prima-twig 0.62.104 → 0.63.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: b6c54b3a3791a28fa90267acdd10c4717f04a62d885eacc758353484f361712a
4
- data.tar.gz: 504ea97726bc241d4296a475a335d1b8b74a9d9389f61936b831b4cab040ef3f
3
+ metadata.gz: 0f6064a412bd928d6b63a4d6811c6060645360ede176c2324bd6a3a8eab9974e
4
+ data.tar.gz: 82fd5101c483f83b695288aac15f4a8a874446faaec7194c8b8198789718de48
5
5
  SHA512:
6
- metadata.gz: e62575269b68619f9de0dc84aa3b9f75bb49d0e0f8b34690956de96aa205cfbdb063fee767151b106ee542df751cdd22c5aa67d785476c87aa314624650207b6
7
- data.tar.gz: 8361b9105ad5272d4bba1e8c3d3f34cfa62b52237cee14a9cc463904b583f7e0951d7db788209bf1f2da4b94ae83a259be3ab508d8d69fc11c1f3705ffdd9d4e
6
+ metadata.gz: 1bcf0b8613af34ecec22b1183e613a6b895767643add0ed39eb87a1da8fb1a3ca75f867252bd729f0cc06bee6be309f6a152c7d0b01a5c0ab9d9956ad7dd5bb2
7
+ data.tar.gz: 51081bfe711750a98a7d020793af938693289632c502951c3e294896a9bbdb8f8f23131094d6f48f3c56f93bbc0ca2b215962637aeef4c1d625e4a97c05fd9b3
@@ -22,9 +22,18 @@ class Release
22
22
  exec "twig feature #{ARGV.join ' '}"
23
23
  end
24
24
  end
25
+ @batch = Aws::Batch::Client.new
26
+ @s3 = Aws::S3::Client.new
27
+ @s3_bucket = 'prima-artifacts'
28
+ @artifact_path = '/tmp/prima-artifact.zip'
29
+ @import_db_task = 'arn:aws:ecs:eu-west-1:001575623345:task-definition/ecs-task-db-restore2-TaskDefinitionDbRestore-P358L3UYOC6F:1'
30
+ @cf_role = 'arn:aws:iam::001575623345:role/qainit-service-role-cloudformat-CloudformationRole-18KBZQIS148R9'
25
31
  @dns_record_identifier = nil
26
32
  @ecs_cluster_name = nil
27
33
  @deploy_update = false
34
+ @qainit = false
35
+ @qainit_host_folder = "/var/ci/#{ENV['DRONE_REPO']}/#{ENV['DRONE_BRANCH']}/#{ENV['DRONE_BUILD_NUMBER']}"
36
+ @qainit_folder = "/drone/src/github.com/project/primait/qainit"
28
37
  @projects = {
29
38
  'prima' => {},
30
39
  'urania' => {},
@@ -33,7 +42,7 @@ class Release
33
42
  'hal9000' => {},
34
43
  'fidaty' => {},
35
44
  'peano' => {},
36
- # 'rogoreport' => {},
45
+ 'rogoreport' => {},
37
46
  'assange' => {},
38
47
  'borat' => {},
39
48
  'crash' => {},
@@ -65,6 +74,8 @@ class Release
65
74
  qainit_deploy_shutdown!
66
75
  elsif 'update' == args[1]
67
76
  qainit_deploy_update!
77
+ elsif 'read' == args[1]
78
+ qainit_read_config! args[2]
68
79
  else
69
80
  if args[1]
70
81
  select_branches(args[1..-1])
@@ -81,8 +92,19 @@ class Release
81
92
  end
82
93
  when 'deploy'
83
94
  abort('Non sei nella cartella di artemide') unless Dir.pwd.match 'artemide$'
84
- if 'lock' == args[1]
95
+ if ['terminate', 'stop', 'shutdown', 'halt', 'destroy'].include? args[1]
96
+ deploy_shutdown!
97
+ elsif 'update' == args[1]
98
+ deploy_update!
99
+ elsif 'lock' == args[1]
85
100
  deploy_lock!
101
+ else
102
+ if args[1]
103
+ select_branches(args[1])
104
+ else
105
+ select_branches
106
+ end
107
+ deploy_feature!
86
108
  end
87
109
  when 'aggregator'
88
110
  if 'enable' == args[1]
@@ -452,10 +474,8 @@ class Release
452
474
 
453
475
  File.open('branch_names', 'w') { |file| file.write(JSON.generate(@projects)) }
454
476
 
455
- update_drone_yml!
456
-
457
477
  `git add projects && \
458
- git add branch_names .drone.yml && \
478
+ git add branch_names && \
459
479
  git commit -m '#{branch_name}' && \
460
480
  git push -f --set-upstream origin #{branch_name} && \
461
481
  git checkout master`
@@ -499,10 +519,7 @@ class Release
499
519
 
500
520
  File.open('branch_names', 'w') { |file| file.write(JSON.generate(@projects)) }
501
521
 
502
- update_drone_yml!
503
-
504
- `git add branch_names .drone.yml`
505
- `git commit -m 'update'`
522
+ `git commit -am 'update'`
506
523
  `git push && git checkout master`
507
524
  end
508
525
 
@@ -580,6 +597,10 @@ class Release
580
597
  output "Cancello il record DNS utilizzato da Lighthouse"
581
598
  delete_lighthouse_dns()
582
599
  output "Finito!".green
600
+
601
+ if @qainit
602
+ qainit_write_output("QA #{ENV["DRONE_BRANCH"]} spento.", 'Indirizzi scritti su ')
603
+ end
583
604
  end
584
605
 
585
606
  def qainit_write_output(file_message, output_message)
@@ -589,16 +610,41 @@ class Release
589
610
  output "#{output_message} #{qa_file_name}".green
590
611
  end
591
612
 
592
- def update_drone_yml!()
593
- drone_yml = File.read('.drone.yml')
594
- @projects.each do |key, project|
595
- drone_yml = drone_yml.gsub(/#{key}@.+\n/, "#{key}@#{project['revision']}\n")
613
+ def qainit_read_config!(action)
614
+ projects = ''
615
+
616
+ File.open('branch_names', 'r') do |file|
617
+ file.each_line do |line|
618
+ projects = JSON.parse(line)
619
+ end
596
620
  end
597
- File.open(".drone.yml", "w") do |f|
598
- f.write(drone_yml)
621
+
622
+ projects.each do |key, project|
623
+ @projects[key] = project
624
+ end
625
+
626
+ get_s3_config_files
627
+ @qainit = true
628
+ case action
629
+ when 'shutdown'
630
+ output 'Shutting down'.green
631
+ qainit_drone_shutdown!
632
+ else
633
+ output 'Starting standard deploy'.green
634
+ deploy_feature!
599
635
  end
600
636
  end
601
637
 
638
+ def get_s3_config_files
639
+ # manteniamo la struttura per lanciarlo facilmente anche da locale
640
+ `mkdir -p cloudformation/stacks/task cloudformation/stacks/route53 cloudformation/stacks/asg cloudformation/stacks/elb`
641
+ @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/task/db.yml', response_target: 'cloudformation/stacks/task/db.yml'})
642
+ @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/route53/qa.yml', response_target: 'cloudformation/stacks/route53/qa.yml'})
643
+ @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/asg/ecs-asg-allinone.yml', response_target: 'cloudformation/stacks/asg/ecs-asg-allinone.yml'})
644
+ @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/ecs-cluster.yml', response_target: 'cloudformation/stacks/ecs-cluster.yml'})
645
+ @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/elb/alb-public-qa.yml', response_target: 'cloudformation/stacks/elb/alb-public-qa.yml'})
646
+ end
647
+
602
648
  def get_deploy_id
603
649
  if @deploy_id
604
650
  @deploy_id
@@ -608,6 +654,1261 @@ class Release
608
654
  end
609
655
  end
610
656
 
657
+ def deploy_feature!
658
+ `git pull && git submodule init && git submodule update`
659
+ @ami_id = get_ami_id("ecs-fleet-allinone-staging")
660
+ deploy_id = get_deploy_id
661
+ stack_name_alb = 'ecs-alb-http-public-qa-' + deploy_id[0..5]
662
+ stack_name_alb_ws = 'ecs-alb-ws-public-qa-' + deploy_id[0..5]
663
+ unless @qainit
664
+ @projects.each_key do |project_key|
665
+ if @projects[project_key]['revision']
666
+ git_checkout_version(project_key, @projects[project_key]['revision'])
667
+ end
668
+ end
669
+ end
670
+ @dns_record_identifier = deploy_id
671
+ @git_branch = ENV['DRONE_BRANCH']
672
+ hostname_pattern_priority = hostname_pattern_priority()
673
+ tags = [
674
+ {
675
+ key: "qainit",
676
+ value: @git_branch
677
+ },
678
+ {
679
+ key: "hostname_pattern_priority",
680
+ value: hostname_pattern_priority
681
+ }
682
+ ]
683
+ @projects.each do |key, value|
684
+ case key.to_s
685
+ when 'crash'
686
+ tags << { key: 'crash', value: @projects['crash']['name'] } if deploy_crash?
687
+ when 'starsky', 'hutch'
688
+ tags << { key: key.to_s, value: @projects[key.to_s]['name'] } if deploy_starsky_hutch?
689
+ else
690
+ tags << { key: key, value: value['name'] }
691
+ end
692
+ end
693
+
694
+ cluster_stack_name = "ecs-cluster-qa-#{deploy_id}"
695
+
696
+ if stack_exists?(cluster_stack_name)
697
+ tags = get_stack_tags(cluster_stack_name)
698
+ hostname_pattern_priority = tags.detect do |tag|
699
+ tag.key == 'hostname_pattern_priority'
700
+ end.value
701
+ end
702
+
703
+ create_cluster_stack(cluster_stack_name, tags) unless stack_exists?(cluster_stack_name)
704
+ wait_for_stack_ready(cluster_stack_name) unless stack_ready?(cluster_stack_name)
705
+
706
+ create_alb_stack(stack_name_alb, "http", deploy_id) unless stack_exists?(stack_name_alb)
707
+ create_alb_stack(stack_name_alb_ws, "websocket", deploy_id) unless stack_exists?(stack_name_alb_ws)
708
+
709
+ resp = describe_stack_resource(cluster_stack_name, 'ECSCluster')
710
+ @ecs_cluster_name = resp.stack_resource_detail.physical_resource_id
711
+
712
+ asg_stack_name = "ecs-asg-allinone-qa-#{deploy_id}"
713
+ create_asg_stack(asg_stack_name, tags) unless stack_exists?(asg_stack_name)
714
+
715
+ stack_name_db = "ecs-task-db-qa-#{deploy_id}"
716
+ stack_body = IO.read('cloudformation/stacks/task/db.yml')
717
+ parameters = [
718
+ {
719
+ parameter_key: "Environment",
720
+ parameter_value: "qa"
721
+ },
722
+ {
723
+ parameter_key: "ECSClusterName",
724
+ parameter_value: @ecs_cluster_name
725
+ }
726
+ ]
727
+ create_stack(stack_name_db, stack_body, parameters, tags, @cf_role) unless stack_exists?(stack_name_db) # creazione asincrona stack dei db (~4 min)
728
+
729
+ output "check pyxis \n".yellow
730
+
731
+ create_pyxis_artifact(@projects["pyxis-npm"]['revision'], deploy_id) unless @projects["pyxis-npm"].nil? # deve essere creato prima di quello di prima, per avere la versione
732
+ create_prima_artifact(@projects["prima"]['revision'], @projects["prima"]['name'], deploy_id) unless artifact_exists?('prima-artifacts-encrypted', "prima/#{@projects["prima"]['revision']}.tar.gz")
733
+ # l'artefatto di prima viene creato sempre (puntamenti all'ambiente compilati nel js) e richiede molto più di 4 minuti
734
+ wait_for_stack_ready(stack_name_db) unless stack_ready?(stack_name_db) # dovrebbe essere istantaneo
735
+ db_task = ''
736
+ db_task = import_dbs(ec2_ip_address(asg_stack_name)) unless stack_exists?("ecs-route53-qa-#{deploy_id}") # import asincrono dei dati
737
+
738
+ create_crash_artifact(@projects['crash']['revision'], deploy_id) unless !deploy_crash? || artifact_exists?('prima-artifacts-encrypted', "microservices/crash/#{@projects['crash']['revision']}-qa.tar.gz")
739
+ create_urania_artifact(@projects["urania"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/urania/#{@projects["urania"]['revision']}-qa.tar.gz")
740
+ create_roger_artifact(@projects["roger"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/roger/#{@projects["roger"]['revision']}-qa.tar.gz")
741
+ create_ermes_artifact(@projects["ermes"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/ermes/#{@projects["ermes"]['revision']}-qa.tar.gz")
742
+ create_bburago_artifact(@projects["bburago"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/bburago/#{@projects["bburago"]['revision']}-qa.tar.gz")
743
+ create_hal9000_artifact(@projects["hal9000"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/hal9000/#{@projects["hal9000"]['revision']}-qa.tar.gz")
744
+ create_rachele_artifact(@projects["rachele"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/rachele/#{@projects["rachele"]['revision']}-qa.tar.gz")
745
+ create_fidaty_artifact(@projects["fidaty"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/fidaty/#{@projects["fidaty"]['revision']}-qa.tar.gz")
746
+ create_peano_artifact(@projects["peano"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/peano/#{@projects["peano"]['revision']}-qa.tar.gz")
747
+ create_rogoreport_artifact(@projects["rogoreport"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/rogoreport/rogoreport-#{@projects["rogoreport"]['revision']}-qa.tar.gz")
748
+ create_assange_artifact(@projects["assange"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/assange/#{@projects["assange"]['revision']}-qa.tar.gz")
749
+ create_borat_artifact(@projects["borat"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/borat/#{@projects["borat"]['revision']}-qa.tar.gz")
750
+ create_activia_artifact(@projects["activia"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/activia/#{@projects["activia"]['revision']}-qa.tar.gz")
751
+ create_leftorium_artifact(@projects["leftorium"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/leftorium/#{@projects["leftorium"]['revision']}-qa.tar.gz")
752
+ create_skynet_artifact(@projects["skynet"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/skynet/#{@projects["skynet"]['revision']}-qa.tar.gz")
753
+ create_maia_artifact(@projects["maia"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/maia/#{@projects["maia"]['revision']}-qa.tar.gz")
754
+ create_starsky_artifact(@projects["starsky"]['revision']) unless !deploy_starsky_hutch? || artifact_exists?('prima-artifacts-encrypted', "microservices/starsky/#{@projects["starsky"]['revision']}-qa.tar.gz")
755
+ create_hutch_artifact(@projects["hutch"]['revision']) unless !deploy_starsky_hutch? || artifact_exists?('prima-artifacts-encrypted', "microservices/hutch/#{@projects["hutch"]['revision']}-#{@dns_record_identifier[0..7]}-qa.tar.gz")
756
+
757
+
758
+ wait_for_db_import(db_task) unless stack_exists?("ecs-route53-qa-#{deploy_id}") # dovrebbe essere istantaneo
759
+
760
+ import_redis_crash(ec2_ip_address(asg_stack_name)) if deploy_crash? && !stack_exists?("ecs-task-web-qa-#{deploy_id}")
761
+
762
+ wait_for_stack_ready(stack_name_alb) unless stack_ready?(stack_name_alb)
763
+ wait_for_stack_ready(stack_name_alb_ws) unless stack_ready?(stack_name_alb_ws)
764
+
765
+ stack_name_route53 = "ecs-route53-qa-#{deploy_id}"
766
+ stack_body = IO.read('cloudformation/stacks/route53/qa.yml')
767
+ parameters = [
768
+ {
769
+ parameter_key: "DnsRecordIdentifier",
770
+ parameter_value: @dns_record_identifier
771
+ },
772
+ {
773
+ parameter_key: "PrimaElbHostname",
774
+ parameter_value: get_alb_host(stack_name_alb)
775
+ },
776
+ {
777
+ parameter_key: "UraniaIp",
778
+ parameter_value: ec2_ip_address(asg_stack_name)
779
+ },
780
+ {
781
+ parameter_key: "BburagoIp",
782
+ parameter_value: ec2_ip_address(asg_stack_name)
783
+ },
784
+ {
785
+ parameter_key: "Hal9000Ip",
786
+ parameter_value: ec2_ip_address(asg_stack_name)
787
+ },
788
+ {
789
+ parameter_key: "FidatyIp",
790
+ parameter_value: ec2_ip_address(asg_stack_name)
791
+ },
792
+ {
793
+ parameter_key: "PeanoIp",
794
+ parameter_value: ec2_ip_address(asg_stack_name)
795
+ },
796
+ {
797
+ parameter_key: "ErmesIp",
798
+ parameter_value: ec2_ip_address(asg_stack_name)
799
+ },
800
+ {
801
+ parameter_key: "ActiviaIp",
802
+ parameter_value: ec2_ip_address(asg_stack_name)
803
+ },
804
+ {
805
+ parameter_key: "SkynetIp",
806
+ parameter_value: ec2_ip_address(asg_stack_name)
807
+ },
808
+ {
809
+ parameter_key: "RogerIp",
810
+ parameter_value: ec2_ip_address(asg_stack_name)
811
+ },
812
+ {
813
+ parameter_key: "LeftoriumIp",
814
+ parameter_value: ec2_ip_address(asg_stack_name)
815
+ },
816
+ {
817
+ parameter_key: "RacheleIp",
818
+ parameter_value: ec2_ip_address(asg_stack_name)
819
+ },
820
+ {
821
+ parameter_key: "RedisIp",
822
+ parameter_value: ec2_ip_address(asg_stack_name)
823
+ },
824
+ {
825
+ parameter_key: "AssangeElbHostname",
826
+ parameter_value: get_alb_host(stack_name_alb)
827
+ },
828
+ {
829
+ parameter_key: "BoratElbHostname",
830
+ parameter_value: get_alb_host(stack_name_alb_ws)
831
+ },
832
+ {
833
+ parameter_key: 'CrashElbHostname',
834
+ parameter_value: get_alb_host(stack_name_alb_ws)
835
+ },
836
+ {
837
+ parameter_key: 'StarskyElbHostname',
838
+ parameter_value: get_alb_host(stack_name_alb)
839
+ },
840
+ {
841
+ parameter_key: 'HutchElbHostname',
842
+ parameter_value: get_alb_host(stack_name_alb)
843
+ },
844
+ {
845
+ parameter_key: 'MaiaElbHostname',
846
+ parameter_value: get_alb_host(stack_name_alb)
847
+ }
848
+ ]
849
+
850
+ create_stack(stack_name_route53, stack_body, parameters, tags, @cf_role) unless stack_exists?(stack_name_route53)
851
+ wait_for_stack_ready(stack_name_route53) unless stack_ready?(stack_name_route53)
852
+
853
+ stack_name_skynet = "ecs-task-skynet-qa-#{deploy_id}"
854
+ git_checkout_version('skynet', @projects["skynet"]['revision'])
855
+ stack_body = File.read('projects/skynet/deploy/task.yml')
856
+ parameters = [
857
+ {
858
+ parameter_key: "Environment",
859
+ parameter_value: "qa"
860
+ },
861
+ {
862
+ parameter_key: "ReleaseVersion",
863
+ parameter_value: @projects["skynet"]['revision']
864
+ },
865
+ {
866
+ parameter_key: "TaskDesiredCount",
867
+ parameter_value: "1"
868
+ },
869
+ {
870
+ parameter_key: "ECSClusterName",
871
+ parameter_value: @ecs_cluster_name
872
+ },
873
+ {
874
+ parameter_key: "HostnamePattern",
875
+ parameter_value: "skynet-#{@dns_record_identifier}.qa.colaster.com"
876
+ },
877
+ {
878
+ parameter_key: "HostnamePatternPriority",
879
+ parameter_value: hostname_pattern_priority
880
+ }
881
+ ]
882
+ if stack_exists?(stack_name_skynet)
883
+ cur_version = get_currently_deployed_version(stack_name_skynet)
884
+ update_stack(stack_name_skynet, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["skynet"]['revision'])
885
+ else
886
+ create_stack(stack_name_skynet, stack_body, parameters, tags, @cf_role)
887
+ end
888
+
889
+ stack_name_urania = "ecs-task-urania-qa-#{deploy_id}"
890
+ git_checkout_version('urania', @projects["urania"]['revision'])
891
+ stack_body = File.read('projects/urania/deploy/task.yml')
892
+ parameters = [
893
+ {
894
+ parameter_key: "Environment",
895
+ parameter_value: "qa"
896
+ },
897
+ {
898
+ parameter_key: "ReleaseVersion",
899
+ parameter_value: @projects["urania"]['revision']
900
+ },
901
+ {
902
+ parameter_key: "TaskDesiredCount",
903
+ parameter_value: "1"
904
+ },
905
+ {
906
+ parameter_key: "ECSClusterName",
907
+ parameter_value: @ecs_cluster_name
908
+ },
909
+ {
910
+ parameter_key: "HostnamePattern",
911
+ parameter_value: "urania-#{@dns_record_identifier}.qa.colaster.com"
912
+ },
913
+ {
914
+ parameter_key: "HostnamePatternPriority",
915
+ parameter_value: hostname_pattern_priority
916
+ }
917
+ ]
918
+ if stack_exists?(stack_name_urania)
919
+ cur_version = get_currently_deployed_version(stack_name_urania)
920
+ update_stack(stack_name_urania, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["urania"]['revision'])
921
+ else
922
+ create_stack(stack_name_urania, stack_body, parameters, tags, @cf_role)
923
+ end
924
+
925
+ stack_name_ermes = "ecs-task-ermes-qa-#{deploy_id}"
926
+ git_checkout_version('ermes', @projects["ermes"]['revision'])
927
+ stack_body = File.read('projects/ermes/deploy/task.yml')
928
+ parameters = [
929
+ {
930
+ parameter_key: "Environment",
931
+ parameter_value: "qa"
932
+ },
933
+ {
934
+ parameter_key: "ReleaseVersion",
935
+ parameter_value: "#{@projects['ermes']['revision']}"
936
+ },
937
+ {
938
+ parameter_key: "TaskDesiredCount",
939
+ parameter_value: "1"
940
+ },
941
+ {
942
+ parameter_key: "ECSClusterName",
943
+ parameter_value: @ecs_cluster_name
944
+ },
945
+ {
946
+ parameter_key: "HostnamePattern",
947
+ parameter_value: "ermes-#{@dns_record_identifier}.qa.colaster.com"
948
+ },
949
+ {
950
+ parameter_key: "HostnamePatternPriority",
951
+ parameter_value: hostname_pattern_priority
952
+ },
953
+ {
954
+ parameter_key: "WebHost",
955
+ parameter_value: get_route53_hostname('ecs-task-web-qa-notneeded')
956
+ },
957
+ {
958
+ parameter_key: "PeanoHost",
959
+ parameter_value: get_route53_hostname('ecs-task-peano-qa-notneeded')
960
+ }
961
+ ]
962
+ if stack_exists?(stack_name_ermes)
963
+ cur_version = get_currently_deployed_version(stack_name_ermes)
964
+ update_stack(stack_name_ermes, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["ermes"]['revision'])
965
+ else
966
+ create_stack(stack_name_ermes, stack_body, parameters, tags, @cf_role)
967
+ end
968
+
969
+ stack_name_bburago = "ecs-task-bburago-qa-#{deploy_id}"
970
+ git_checkout_version('bburago', @projects["bburago"]['revision'])
971
+ stack_body = File.read('projects/bburago/deploy/task.yml')
972
+ parameters = [
973
+ {
974
+ parameter_key: "Environment",
975
+ parameter_value: "qa"
976
+ },
977
+ {
978
+ parameter_key: "ReleaseVersion",
979
+ parameter_value: @projects["bburago"]['revision']
980
+ },
981
+ {
982
+ parameter_key: "ECSClusterName",
983
+ parameter_value: @ecs_cluster_name
984
+ },
985
+ {
986
+ parameter_key: "TaskDesiredCount",
987
+ parameter_value: "1"
988
+ },
989
+ {
990
+ parameter_key: "HostnamePattern",
991
+ parameter_value: "bburago-#{@dns_record_identifier}.qa.colaster.com"
992
+ },
993
+ {
994
+ parameter_key: "HostnamePatternPriority",
995
+ parameter_value: hostname_pattern_priority
996
+ }
997
+ ]
998
+ if stack_exists?(stack_name_bburago)
999
+ cur_version = get_currently_deployed_version(stack_name_bburago)
1000
+ update_stack(stack_name_bburago, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["bburago"]['revision'])
1001
+ else
1002
+ create_stack(stack_name_bburago, stack_body, parameters, tags, @cf_role)
1003
+ end
1004
+
1005
+ stack_name_hal9000 = "ecs-task-hal9000-qa-#{deploy_id}"
1006
+ git_checkout_version('hal9000', @projects["hal9000"]['revision'])
1007
+ stack_body = File.read('projects/hal9000/deploy/task.yml')
1008
+ parameters = [
1009
+ {
1010
+ parameter_key: "Environment",
1011
+ parameter_value: "qa"
1012
+ },
1013
+ {
1014
+ parameter_key: "ReleaseVersion",
1015
+ parameter_value: @projects["hal9000"]['revision']
1016
+ },
1017
+ {
1018
+ parameter_key: "ECSClusterName",
1019
+ parameter_value: @ecs_cluster_name
1020
+ },
1021
+ {
1022
+ parameter_key: "TaskDesiredCount",
1023
+ parameter_value: "1"
1024
+ },
1025
+ {
1026
+ parameter_key: "HostnamePattern",
1027
+ parameter_value: "hal9000-#{@dns_record_identifier}.qa.colaster.com"
1028
+ },
1029
+ {
1030
+ parameter_key: "HostnamePatternPriority",
1031
+ parameter_value: hostname_pattern_priority
1032
+ }
1033
+ ]
1034
+ if stack_exists?(stack_name_hal9000)
1035
+ cur_version = get_currently_deployed_version(stack_name_hal9000)
1036
+ update_stack(stack_name_hal9000, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["hal9000"]['revision'])
1037
+ else
1038
+ create_stack(stack_name_hal9000, stack_body, parameters, tags, @cf_role)
1039
+ end
1040
+
1041
+ stack_name_fidaty = "ecs-task-fidaty-qa-#{deploy_id}"
1042
+ git_checkout_version('fidaty', @projects["fidaty"]['revision'])
1043
+ stack_body = File.read('projects/fidaty/deploy/task.yml')
1044
+ parameters = [
1045
+ {
1046
+ parameter_key: "Environment",
1047
+ parameter_value: "qa"
1048
+ },
1049
+ {
1050
+ parameter_key: "ReleaseVersion",
1051
+ parameter_value: "#{@projects["fidaty"]['revision']}"
1052
+ },
1053
+ {
1054
+ parameter_key: "ECSClusterName",
1055
+ parameter_value: @ecs_cluster_name
1056
+ },
1057
+ {
1058
+ parameter_key: "TaskDesiredCount",
1059
+ parameter_value: "1"
1060
+ },
1061
+ {
1062
+ parameter_key: "HostnamePattern",
1063
+ parameter_value: "fidaty-#{@dns_record_identifier}.qa.colaster.com"
1064
+ },
1065
+ {
1066
+ parameter_key: "HostnamePatternPriority",
1067
+ parameter_value: hostname_pattern_priority
1068
+ },
1069
+ {
1070
+ parameter_key: "PeanoHost",
1071
+ parameter_value: get_route53_hostname("ecs-task-peano-qa-notneeded")
1072
+ }
1073
+ ]
1074
+ if stack_exists?(stack_name_fidaty)
1075
+ cur_version = get_currently_deployed_version(stack_name_fidaty)
1076
+ update_stack(stack_name_fidaty, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["fidaty"]['revision'])
1077
+ else
1078
+ create_stack(stack_name_fidaty, stack_body, parameters, tags, @cf_role)
1079
+ end
1080
+
1081
+ stack_name_peano = "ecs-task-peano-qa-#{deploy_id}"
1082
+ git_checkout_version('peano', @projects["peano"]['revision'])
1083
+ stack_body = File.read('projects/peano/deploy/task.yml')
1084
+ parameters = [
1085
+ {
1086
+ parameter_key: "Environment",
1087
+ parameter_value: "qa"
1088
+ },
1089
+ {
1090
+ parameter_key: "ReleaseVersion",
1091
+ parameter_value: "#{@projects['peano']['revision']}"
1092
+ },
1093
+ {
1094
+ parameter_key: "ECSClusterName",
1095
+ parameter_value: @ecs_cluster_name
1096
+ },
1097
+ {
1098
+ parameter_key: "TaskDesiredCount",
1099
+ parameter_value: "1"
1100
+ },
1101
+ {
1102
+ parameter_key: "HostnamePattern",
1103
+ parameter_value: "peano-#{@dns_record_identifier}.qa.colaster.com"
1104
+ },
1105
+ {
1106
+ parameter_key: "HostnamePatternPriority",
1107
+ parameter_value: hostname_pattern_priority
1108
+ },
1109
+ {
1110
+ parameter_key: "WebHost",
1111
+ parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1112
+ },
1113
+ {
1114
+ parameter_key: "AssangeHost",
1115
+ parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1116
+ }
1117
+ ]
1118
+ if stack_exists?(stack_name_peano)
1119
+ cur_version = get_currently_deployed_version(stack_name_peano)
1120
+ update_stack(stack_name_peano, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["peano"]['revision'])
1121
+ else
1122
+ create_stack(stack_name_peano, stack_body, parameters, tags, @cf_role)
1123
+ end
1124
+
1125
+ stack_name_rogoreport = "ecs-task-rogoreport-qa-#{deploy_id}"
1126
+ git_checkout_version('rogoreport', @projects["rogoreport"]['revision'])
1127
+ stack_body = IO.read('projects/rogoreport/deploy/task.yml')
1128
+ parameters = [
1129
+ {
1130
+ parameter_key: "Environment",
1131
+ parameter_value: "qa"
1132
+ },
1133
+ {
1134
+ parameter_key: "ReleaseVersion",
1135
+ parameter_value: "#{@projects["rogoreport"]['revision']}"
1136
+ },
1137
+ {
1138
+ parameter_key: "ReleaseName",
1139
+ parameter_value: "rogoreport"
1140
+ },
1141
+ {
1142
+ parameter_key: "ECSClusterName",
1143
+ parameter_value: @ecs_cluster_name
1144
+ }
1145
+ ]
1146
+ if stack_exists?(stack_name_rogoreport)
1147
+ cur_version = get_currently_deployed_version(stack_name_rogoreport)
1148
+ update_stack(stack_name_rogoreport, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["rogoreport"]['revision'])
1149
+ else
1150
+ create_stack(stack_name_rogoreport, stack_body, parameters, tags, @cf_role)
1151
+ end
1152
+
1153
+ stack_name_assange = "ecs-task-assange-qa-#{deploy_id}"
1154
+ git_checkout_version('assange', @projects["assange"]['revision'])
1155
+ stack_body = IO.read('projects/assange/deploy/task.yml')
1156
+ parameters = [
1157
+ {
1158
+ parameter_key: "Environment",
1159
+ parameter_value: "qa"
1160
+ },
1161
+ {
1162
+ parameter_key: "ReleaseVersion",
1163
+ parameter_value: "#{@projects["assange"]['revision']}"
1164
+ },
1165
+ {
1166
+ parameter_key: "ECSClusterName",
1167
+ parameter_value: @ecs_cluster_name
1168
+ },
1169
+ {
1170
+ parameter_key: "TaskDesiredCount",
1171
+ parameter_value: "1"
1172
+ },
1173
+ {
1174
+ parameter_key: "ALBShortName",
1175
+ parameter_value: "assange-qa-#{deploy_id}"[0..27]
1176
+ },
1177
+ {
1178
+ parameter_key: "HostnamePattern",
1179
+ parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1180
+ },
1181
+ {
1182
+ parameter_key: "HostnamePatternPriority",
1183
+ parameter_value: (hostname_pattern_priority.to_i + 20).to_s
1184
+ },
1185
+ {
1186
+ parameter_key: "EnvHash",
1187
+ parameter_value: deploy_id
1188
+ },
1189
+ {
1190
+ parameter_key: "WebHost",
1191
+ parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1192
+ },
1193
+ {
1194
+ parameter_key: "AssangeHost",
1195
+ parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1196
+ }
1197
+ ]
1198
+ if stack_exists?(stack_name_assange)
1199
+ cur_version = get_currently_deployed_version(stack_name_assange)
1200
+ update_stack(stack_name_assange, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["assange"]['revision'])
1201
+ else
1202
+ create_stack(stack_name_assange, stack_body, parameters, tags, @cf_role)
1203
+ end
1204
+
1205
+ stack_name_leftorium = "ecs-task-leftorium-qa-#{deploy_id}"
1206
+ git_checkout_version('leftorium', @projects["leftorium"]['revision'])
1207
+ stack_body = File.read('projects/leftorium/deploy/task.yml')
1208
+ parameters = [
1209
+ {
1210
+ parameter_key: "Environment",
1211
+ parameter_value: "qa"
1212
+ },
1213
+ {
1214
+ parameter_key: "ReleaseVersion",
1215
+ parameter_value: "#{@projects["leftorium"]['revision']}"
1216
+ },
1217
+ {
1218
+ parameter_key: "ECSClusterName",
1219
+ parameter_value: @ecs_cluster_name
1220
+ },
1221
+ {
1222
+ parameter_key: "TaskDesiredCount",
1223
+ parameter_value: "1"
1224
+ },
1225
+ {
1226
+ parameter_key: "HostnamePattern",
1227
+ parameter_value: "leftorium-#{@dns_record_identifier}.qa.colaster.com"
1228
+ },
1229
+ {
1230
+ parameter_key: "HostnamePatternPriority",
1231
+ parameter_value: hostname_pattern_priority
1232
+ }
1233
+ ]
1234
+ if stack_exists?(stack_name_leftorium)
1235
+ cur_version = get_currently_deployed_version(stack_name_leftorium)
1236
+ update_stack(stack_name_leftorium, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["leftorium"]['revision'])
1237
+ else
1238
+ create_stack(stack_name_leftorium, stack_body, parameters, tags, @cf_role)
1239
+ end
1240
+
1241
+ stack_name_rachele = "ecs-task-rachele-qa-#{deploy_id}"
1242
+ git_checkout_version('rachele', @projects["rachele"]['revision'])
1243
+ stack_body = File.read('projects/rachele/deploy/task.yml')
1244
+ parameters = [
1245
+ {
1246
+ parameter_key: "Environment",
1247
+ parameter_value: "qa"
1248
+ },
1249
+ {
1250
+ parameter_key: "ReleaseVersion",
1251
+ parameter_value: "#{@projects["rachele"]['revision']}"
1252
+ },
1253
+ {
1254
+ parameter_key: "ECSClusterName",
1255
+ parameter_value: @ecs_cluster_name
1256
+ },
1257
+ {
1258
+ parameter_key: "TaskDesiredCount",
1259
+ parameter_value: "1"
1260
+ },
1261
+ {
1262
+ parameter_key: "WebHost",
1263
+ parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1264
+ },
1265
+ {
1266
+ parameter_key: "HostnamePattern",
1267
+ parameter_value: "rachele-#{@dns_record_identifier}.qa.colaster.com"
1268
+ },
1269
+ {
1270
+ parameter_key: "HostnamePatternPriority",
1271
+ parameter_value: hostname_pattern_priority
1272
+ }
1273
+ ]
1274
+ if stack_exists?(stack_name_rachele)
1275
+ cur_version = get_currently_deployed_version(stack_name_rachele)
1276
+ unless cur_version.include?(@projects["rachele"]['revision'])
1277
+ delete_stack(stack_name_rachele)
1278
+ wait_for_stack_removal(stack_name_rachele)
1279
+ create_stack(stack_name_rachele, stack_body, parameters, tags, @cf_role)
1280
+ end
1281
+ else
1282
+ create_stack(stack_name_rachele, stack_body, parameters, tags, @cf_role)
1283
+ end
1284
+
1285
+ stack_name_borat = "ecs-task-borat-qa-#{deploy_id}"
1286
+ git_checkout_version('borat', @projects["borat"]['revision'])
1287
+ stack_body = IO.read('projects/borat/deploy/task.yml')
1288
+ parameters = [
1289
+ {
1290
+ parameter_key: "Environment",
1291
+ parameter_value: "qa"
1292
+ },
1293
+ {
1294
+ parameter_key: "ReleaseVersion",
1295
+ parameter_value: "#{@projects["borat"]['revision']}"
1296
+ },
1297
+ {
1298
+ parameter_key: "ECSClusterName",
1299
+ parameter_value: @ecs_cluster_name
1300
+ },
1301
+ {
1302
+ parameter_key: "TaskDesiredCount",
1303
+ parameter_value: "1"
1304
+ },
1305
+ {
1306
+ parameter_key: "ALBShortName",
1307
+ parameter_value: "borat-qa-#{deploy_id}"[0..27]
1308
+ },
1309
+ {
1310
+ parameter_key: "HostnamePattern",
1311
+ parameter_value: "backoffice-#{@dns_record_identifier}.qa.colaster.com"
1312
+ },
1313
+ {
1314
+ parameter_key: "HostnamePatternPriority",
1315
+ parameter_value: (hostname_pattern_priority.to_i + 30).to_s
1316
+ },
1317
+ {
1318
+ parameter_key: "EnvHash",
1319
+ parameter_value: deploy_id
1320
+ },
1321
+ {
1322
+ parameter_key: "WsEndpoint",
1323
+ parameter_value: "wss://backoffice-#{@dns_record_identifier}.qa.colaster.com/socket/websocket?vsn=1.0.0"
1324
+ },
1325
+ {
1326
+ parameter_key: "GraphqlEndpoint",
1327
+ parameter_value: "https://backoffice-#{@dns_record_identifier}.qa.colaster.com/graphql"
1328
+ },
1329
+ {
1330
+ parameter_key: "GraphqlInsuranceEndpoint",
1331
+ parameter_value: "https://backoffice-#{@dns_record_identifier}.qa.colaster.com/graphql/insurance"
1332
+ },
1333
+ {
1334
+ parameter_key: "AuthEndpoint",
1335
+ parameter_value: "https://backoffice-#{@dns_record_identifier}.qa.colaster.com/auth"
1336
+ },
1337
+ {
1338
+ parameter_key: "FrontendEndpoint",
1339
+ parameter_value: "https://www-#{@dns_record_identifier}.qa.colaster.com/"
1340
+ }
1341
+ ]
1342
+ if stack_exists?(stack_name_borat)
1343
+ cur_version = get_currently_deployed_version(stack_name_borat)
1344
+ update_stack(stack_name_borat, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["borat"]['revision'])
1345
+ else
1346
+ create_stack(stack_name_borat, stack_body, parameters, tags, @cf_role)
1347
+ end
1348
+
1349
+ if deploy_crash?
1350
+ git_checkout_version('crash', @projects['crash']['revision'])
1351
+ stack_name_crash = "ecs-task-crash-qa-#{deploy_id}"
1352
+ stack_body = IO.read('projects/crash/deploy/task.yml')
1353
+ parameters = [
1354
+ {
1355
+ parameter_key: 'Environment',
1356
+ parameter_value: 'qa'
1357
+ },
1358
+ {
1359
+ parameter_key: 'ReleaseVersion',
1360
+ parameter_value: "#{@projects['crash']['revision']}"
1361
+ },
1362
+ {
1363
+ parameter_key: 'TaskDesiredCount',
1364
+ parameter_value: '1'
1365
+ },
1366
+ {
1367
+ parameter_key: 'ECSClusterName',
1368
+ parameter_value: @ecs_cluster_name
1369
+ },
1370
+ {
1371
+ parameter_key: 'ALBShortName',
1372
+ parameter_value: "crash-qa-#{deploy_id}"[0..27]
1373
+ },
1374
+ {
1375
+ parameter_key: 'HostnamePattern',
1376
+ parameter_value: "crash-#{@dns_record_identifier}.qa.colaster.com"
1377
+ },
1378
+ {
1379
+ parameter_key: 'HostnamePatternPriority',
1380
+ parameter_value: (hostname_pattern_priority.to_i + 10).to_s
1381
+ },
1382
+ {
1383
+ parameter_key: "EnvHash",
1384
+ parameter_value: deploy_id
1385
+ },
1386
+ {
1387
+ parameter_key: "WsEndpoint",
1388
+ parameter_value: "wss://crash-#{@dns_record_identifier}.qa.colaster.com/socket/websocket?vsn=1.0.0"
1389
+ },
1390
+ {
1391
+ parameter_key: "GraphqlEndpoint",
1392
+ parameter_value: "https://crash-#{@dns_record_identifier}.qa.colaster.com/graphql"
1393
+ },
1394
+ {
1395
+ parameter_key: "AuthDomain",
1396
+ parameter_value: "https://www-#{@dns_record_identifier}.qa.colaster.com/"
1397
+ },
1398
+ ]
1399
+ if stack_exists?(stack_name_crash)
1400
+ cur_version = get_currently_deployed_version(stack_name_crash)
1401
+ update_stack(stack_name_crash, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["crash"]['revision'])
1402
+ else
1403
+ create_stack(stack_name_crash, stack_body, parameters, tags, @cf_role)
1404
+ end
1405
+ end
1406
+
1407
+ if deploy_starsky_hutch?
1408
+ stack_name_starsky = "ecs-task-starsky-qa-#{deploy_id}"
1409
+ git_checkout_version('starsky', @projects["starsky"]['revision'])
1410
+ stack_body = IO.read('projects/starsky/deploy/task.yml')
1411
+ parameters = [
1412
+ {
1413
+ parameter_key: "Environment",
1414
+ parameter_value: "qa"
1415
+ },
1416
+ {
1417
+ parameter_key: "ReleaseVersion",
1418
+ parameter_value: "#{@projects["starsky"]['revision']}"
1419
+ },
1420
+ {
1421
+ parameter_key: "TaskDesiredCount",
1422
+ parameter_value: "1"
1423
+ },
1424
+ {
1425
+ parameter_key: "ECSClusterName",
1426
+ parameter_value: @ecs_cluster_name
1427
+ },
1428
+ {
1429
+ parameter_key: "ALBShortName",
1430
+ parameter_value: "starsky-qa-#{deploy_id}"[0..27]
1431
+ },
1432
+ {
1433
+ parameter_key: "EnvHash",
1434
+ parameter_value: deploy_id
1435
+ },
1436
+ {
1437
+ parameter_key: "HostnamePattern",
1438
+ parameter_value: "starsky-#{@dns_record_identifier}.qa.colaster.com"
1439
+ },
1440
+ {
1441
+ parameter_key: "HostnamePatternPriority",
1442
+ parameter_value: (hostname_pattern_priority.to_i + 74).to_s
1443
+ }
1444
+ ]
1445
+ if stack_exists?(stack_name_starsky)
1446
+ cur_version = get_currently_deployed_version(stack_name_starsky)
1447
+ unless cur_version.include?(@projects["starsky"]['revision'])
1448
+ delete_stack(stack_name_starsky)
1449
+ wait_for_stack_removal(stack_name_starsky)
1450
+ create_stack(stack_name_starsky, stack_body, parameters, tags, @cf_role)
1451
+ end
1452
+ else
1453
+ create_stack(stack_name_starsky, stack_body, parameters, tags, @cf_role)
1454
+ end
1455
+ end
1456
+
1457
+ stack_name_activia = "ecs-task-activia-qa-#{deploy_id}"
1458
+ git_checkout_version('activia', @projects["activia"]['revision'])
1459
+ stack_body = File.read('projects/activia/deploy/task.yml')
1460
+ parameters = [
1461
+ {
1462
+ parameter_key: "Environment",
1463
+ parameter_value: "qa"
1464
+ },
1465
+ {
1466
+ parameter_key: "ReleaseVersion",
1467
+ parameter_value: "#{@projects["activia"]['revision']}"
1468
+ },
1469
+ {
1470
+ parameter_key: "ECSClusterName",
1471
+ parameter_value: @ecs_cluster_name
1472
+ },
1473
+ {
1474
+ parameter_key: "TaskDesiredCount",
1475
+ parameter_value: "1"
1476
+ },
1477
+ {
1478
+ parameter_key: "HostnamePattern",
1479
+ parameter_value: "activia-#{@dns_record_identifier}.qa.colaster.com"
1480
+ },
1481
+ {
1482
+ parameter_key: "HostnamePatternPriority",
1483
+ parameter_value: hostname_pattern_priority
1484
+ },
1485
+ {
1486
+ parameter_key: "WebHost",
1487
+ parameter_value: get_route53_hostname('ecs-task-web-qa-notneeded')
1488
+ },
1489
+ {
1490
+ parameter_key: "PeanoHost",
1491
+ parameter_value: get_route53_hostname('ecs-task-peano-qa-notneeded')
1492
+ }
1493
+ ]
1494
+ if stack_exists?(stack_name_activia)
1495
+ cur_version = get_currently_deployed_version(stack_name_activia)
1496
+ update_stack(stack_name_activia, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["activia"]['revision'])
1497
+ else
1498
+ create_stack(stack_name_activia, stack_body, parameters, tags, @cf_role)
1499
+ end
1500
+
1501
+ # Waiting for prima healtcheck dependencies
1502
+ wait_for_stack_ready(stack_name_skynet) unless stack_ready?(stack_name_skynet)
1503
+ wait_for_stack_ready(stack_name_urania) unless stack_ready?(stack_name_urania)
1504
+ wait_for_stack_ready(stack_name_bburago) unless stack_ready?(stack_name_bburago)
1505
+ wait_for_stack_ready(stack_name_hal9000) unless stack_ready?(stack_name_hal9000)
1506
+ wait_for_stack_ready(stack_name_assange) unless stack_ready?(stack_name_assange)
1507
+ wait_for_stack_ready(stack_name_fidaty) unless stack_ready?(stack_name_fidaty)
1508
+ wait_for_stack_ready(stack_name_leftorium) unless stack_ready?(stack_name_leftorium)
1509
+ wait_for_stack_ready(stack_name_rachele) unless stack_ready?(stack_name_rachele)
1510
+
1511
+ stack_name_web = "ecs-task-web-qa-#{deploy_id}"
1512
+ git_checkout_version('prima', @projects["prima"]['revision'])
1513
+ stack_body = IO.read('projects/prima/app/cloudformation/tasks/web.yml')
1514
+ parameters = [
1515
+ {
1516
+ parameter_key: "Environment",
1517
+ parameter_value: "qa"
1518
+ },
1519
+ {
1520
+ parameter_key: "ReleaseVersion",
1521
+ parameter_value: "#{@projects["prima"]['revision']}"
1522
+ },
1523
+ {
1524
+ parameter_key: "TaskDesiredCount",
1525
+ parameter_value: "1"
1526
+ },
1527
+ {
1528
+ parameter_key: "ECSClusterName",
1529
+ parameter_value: @ecs_cluster_name
1530
+ },
1531
+ {
1532
+ parameter_key: "ALBShortName",
1533
+ parameter_value: "web-qa-#{deploy_id}"[0..27]
1534
+ },
1535
+ {
1536
+ parameter_key: "WebQaBaseHostname",
1537
+ parameter_value: "#{@dns_record_identifier}.qa.colaster.com"
1538
+ },
1539
+ {
1540
+ parameter_key: "HostnamePatternPriority",
1541
+ parameter_value: hostname_pattern_priority
1542
+ },
1543
+ {
1544
+ parameter_key: "HostnamePatternAggregatorPriority",
1545
+ parameter_value: (hostname_pattern_priority.to_i + 1).to_s
1546
+ },
1547
+ {
1548
+ parameter_key: "EnvHash",
1549
+ parameter_value: deploy_id
1550
+ },
1551
+ {
1552
+ parameter_key: "AssangeHostname",
1553
+ parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1554
+ },
1555
+ {
1556
+ parameter_key: "BackofficeHostname",
1557
+ parameter_value: "backoffice-#{@dns_record_identifier}.qa.colaster.com"
1558
+ },
1559
+ {
1560
+ parameter_key: "WebHostname",
1561
+ parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1562
+ },
1563
+ {
1564
+ parameter_key: "FePrimaDomain",
1565
+ parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1566
+ },
1567
+ {
1568
+ parameter_key: "HostnamePattern",
1569
+ parameter_value: "www-#{@dns_record_identifier}.*"
1570
+ }
1571
+ ]
1572
+ if stack_exists?(stack_name_web)
1573
+ cur_version = get_currently_deployed_version(stack_name_web)
1574
+ update_stack(stack_name_web, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["prima"]['revision'])
1575
+ else
1576
+ create_stack(stack_name_web, stack_body, parameters, tags, @cf_role)
1577
+ end
1578
+
1579
+ stack_name_consumer = "ecs-task-consumer-qa-#{deploy_id}"
1580
+ git_checkout_version('prima', @projects["prima"]['revision'])
1581
+ stack_body = IO.read('projects/prima/app/cloudformation/tasks/consumer.yml')
1582
+ parameters = [
1583
+ {
1584
+ parameter_key: "Environment",
1585
+ parameter_value: "qa"
1586
+ },
1587
+ {
1588
+ parameter_key: "ReleaseVersion",
1589
+ parameter_value: "#{@projects["prima"]['revision']}"
1590
+ },
1591
+ {
1592
+ parameter_key: "ECSClusterName",
1593
+ parameter_value: @ecs_cluster_name
1594
+ },
1595
+ {
1596
+ parameter_key: "NginxHttpHost",
1597
+ parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1598
+ },
1599
+ {
1600
+ parameter_key: "AssangeHostname",
1601
+ parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1602
+ },
1603
+ {
1604
+ parameter_key: "BackofficeHostname",
1605
+ parameter_value: "backoffice-#{@dns_record_identifier}.qa.colaster.com"
1606
+ },
1607
+ {
1608
+ parameter_key: "WebHostname",
1609
+ parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1610
+ },
1611
+ {
1612
+ parameter_key: "FePrimaDomain",
1613
+ parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1614
+ },
1615
+ {
1616
+ parameter_key: "HostnamePattern",
1617
+ parameter_value: "consumer-#{@dns_record_identifier}.qa.colaster.com"
1618
+ }
1619
+ ]
1620
+ if stack_exists?(stack_name_consumer)
1621
+ cur_version = get_currently_deployed_version(stack_name_consumer)
1622
+ update_stack(stack_name_consumer, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["prima"]['revision'])
1623
+ else
1624
+ create_stack(stack_name_consumer, stack_body, parameters, tags, @cf_role)
1625
+ end
1626
+
1627
+ stack_name_roger = "ecs-task-roger-qa-#{deploy_id}"
1628
+ git_checkout_version('roger', @projects["roger"]['revision'])
1629
+ stack_body = File.read('projects/roger/deploy/task.yml')
1630
+ parameters = [
1631
+ {
1632
+ parameter_key: "Environment",
1633
+ parameter_value: "qa"
1634
+ },
1635
+ {
1636
+ parameter_key: "ReleaseVersion",
1637
+ parameter_value: @projects["roger"]['revision']
1638
+ },
1639
+ {
1640
+ parameter_key: "TaskDesiredCount",
1641
+ parameter_value: "1"
1642
+ },
1643
+ {
1644
+ parameter_key: "ECSClusterName",
1645
+ parameter_value: @ecs_cluster_name
1646
+ },
1647
+ {
1648
+ parameter_key: "HostnamePattern",
1649
+ parameter_value: "roger-#{@dns_record_identifier}.qa.colaster.com"
1650
+ },
1651
+ {
1652
+ parameter_key: "HostnamePatternPriority",
1653
+ parameter_value: hostname_pattern_priority
1654
+ }
1655
+ ]
1656
+ if stack_exists?(stack_name_roger)
1657
+ cur_version = get_currently_deployed_version(stack_name_roger)
1658
+ update_stack(stack_name_roger, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["roger"]['revision'])
1659
+ else
1660
+ create_stack(stack_name_roger, stack_body, parameters, tags, @cf_role)
1661
+ end
1662
+
1663
+
1664
+ if deploy_starsky_hutch?
1665
+ wait_for_stack_ready(stack_name_starsky) unless stack_ready?(stack_name_starsky) unless !deploy_starsky_hutch?
1666
+
1667
+ stack_name_hutch = "ecs-task-hutch-qa-#{deploy_id}"
1668
+ git_checkout_version('hutch', @projects["hutch"]['revision'])
1669
+ stack_body = File.read('projects/hutch/deploy/task.yml')
1670
+ parameters = [
1671
+ {
1672
+ parameter_key: "Environment",
1673
+ parameter_value: "qa"
1674
+ },
1675
+ {
1676
+ parameter_key: "ReleaseVersion",
1677
+ parameter_value: "#{@projects["hutch"]['revision']}-#{@dns_record_identifier[0..7]}"
1678
+ },
1679
+ {
1680
+ parameter_key: "ALBShortName",
1681
+ parameter_value: "hutch-qa-#{deploy_id}"[0..27]
1682
+ },
1683
+ {
1684
+ parameter_key: "ECSClusterName",
1685
+ parameter_value: @ecs_cluster_name
1686
+ },
1687
+ {
1688
+ parameter_key: "EnvHash",
1689
+ parameter_value: deploy_id
1690
+ },
1691
+ {
1692
+ parameter_key: "HostnamePattern",
1693
+ parameter_value: "hutch-#{@dns_record_identifier}.qa.colaster.com"
1694
+ },
1695
+ {
1696
+ parameter_key: "HostnamePatternPriority",
1697
+ parameter_value: (hostname_pattern_priority.to_i + 254).to_s
1698
+ },
1699
+ {
1700
+ parameter_key: "ApiUrl",
1701
+ parameter_value: "https://#{get_route53_hostname('maia-intermediari')}"
1702
+ }
1703
+ ]
1704
+ if stack_exists?(stack_name_hutch)
1705
+ cur_version = get_currently_deployed_version(stack_name_hutch)
1706
+ update_stack(stack_name_hutch, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["hutch"]['revision'])
1707
+ else
1708
+ create_stack(stack_name_hutch, stack_body, parameters, tags, @cf_role)
1709
+ end
1710
+ end
1711
+
1712
+ stack_name_maia = "ecs-task-maia-qa-#{deploy_id}"
1713
+ git_checkout_version('maia', @projects["maia"]['revision'])
1714
+ stack_body = File.read('projects/maia/deploy/task.yml')
1715
+ parameters = [
1716
+ {
1717
+ parameter_key: "Environment",
1718
+ parameter_value: "qa"
1719
+ },
1720
+ {
1721
+ parameter_key: "ReleaseVersion",
1722
+ parameter_value: "#{@projects["maia"]['revision']}"
1723
+ },
1724
+ {
1725
+ parameter_key: "ALBShortName",
1726
+ parameter_value: "maia-qa-#{deploy_id}"[0..15]
1727
+ },
1728
+ {
1729
+ parameter_key: "ECSClusterName",
1730
+ parameter_value: @ecs_cluster_name
1731
+ },
1732
+ {
1733
+ parameter_key: "EnvHash",
1734
+ parameter_value: deploy_id
1735
+ },
1736
+ {
1737
+ parameter_key: "HostnamePatternPublic",
1738
+ parameter_value: "api*-#{@dns_record_identifier}.qa.colaster.com"
1739
+ },
1740
+ {
1741
+ parameter_key: "HostnamePatternPriority",
1742
+ parameter_value: (hostname_pattern_priority.to_i + 128).to_s
1743
+ },
1744
+ {
1745
+ parameter_key: "ProxyHostnameIntermediari",
1746
+ parameter_value: "api-intermediari-#{@dns_record_identifier}.qa.colaster.com"
1747
+ },
1748
+ {
1749
+ parameter_key: "ProxyHostnameApp",
1750
+ parameter_value: "api-#{@dns_record_identifier}.qa.colaster.com"
1751
+ }
1752
+ ]
1753
+ if stack_exists?(stack_name_maia)
1754
+ cur_version = get_currently_deployed_version(stack_name_maia)
1755
+ update_stack(stack_name_maia, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["maia"]['revision'])
1756
+ else
1757
+ create_stack(stack_name_maia, stack_body, parameters, tags, @cf_role)
1758
+ end
1759
+
1760
+ wait_for_stack_ready(stack_name_web) unless stack_ready?(stack_name_web)
1761
+ wait_for_stack_ready(stack_name_consumer) unless stack_ready?(stack_name_consumer)
1762
+ wait_for_stack_ready(stack_name_ermes) unless stack_ready?(stack_name_ermes)
1763
+ wait_for_stack_ready(stack_name_fidaty) unless stack_ready?(stack_name_fidaty)
1764
+ wait_for_stack_ready(stack_name_peano) unless stack_ready?(stack_name_peano)
1765
+ wait_for_stack_ready(stack_name_rogoreport) unless stack_ready?(stack_name_rogoreport)
1766
+ wait_for_stack_ready(stack_name_borat) unless stack_ready?(stack_name_borat)
1767
+ wait_for_stack_ready(stack_name_activia) unless stack_ready?(stack_name_activia)
1768
+ wait_for_stack_ready(stack_name_maia) unless stack_ready?(stack_name_maia)
1769
+ wait_for_stack_ready(stack_name_crash) unless stack_ready?(stack_name_crash) unless !deploy_crash?
1770
+ wait_for_stack_ready(stack_name_hutch) unless stack_ready?(stack_name_hutch) unless !deploy_starsky_hutch?
1771
+
1772
+
1773
+ update_service_defaults(stack_name_web)
1774
+ update_service_defaults(stack_name_consumer)
1775
+ update_service_defaults(stack_name_urania)
1776
+ update_service_defaults(stack_name_ermes)
1777
+ update_service_defaults(stack_name_bburago)
1778
+ update_service_defaults(stack_name_hal9000)
1779
+ update_service_defaults(stack_name_fidaty)
1780
+ update_service_defaults(stack_name_peano)
1781
+ update_service_defaults(stack_name_rogoreport)
1782
+ update_service_defaults(stack_name_assange)
1783
+ update_service_defaults(stack_name_borat)
1784
+ update_service_defaults(stack_name_activia)
1785
+ update_service_defaults(stack_name_skynet)
1786
+ update_service_defaults(stack_name_leftorium)
1787
+ update_service_defaults(stack_name_rachele)
1788
+ update_service_defaults(stack_name_maia)
1789
+ update_service_defaults(stack_name_starsky) unless !deploy_starsky_hutch?
1790
+ update_service_defaults(stack_name_hutch) unless !deploy_starsky_hutch?
1791
+ update_service_defaults(stack_name_crash) unless !deploy_crash?
1792
+
1793
+ activia_hostname = get_route53_hostname("activia")
1794
+ assange_hostname = get_route53_hostname("assange")
1795
+ bburago_hostname = get_route53_hostname("bburago")
1796
+ borat_hostname = get_route53_hostname("borat")
1797
+ ermes_hostname = get_route53_hostname("ermes")
1798
+ fidaty_hostname = get_route53_hostname("fidaty")
1799
+ hal9000_hostname = get_route53_hostname("hal9000")
1800
+ prima_hostname = get_route53_hostname("web")
1801
+ peano_hostname = get_route53_hostname("peano")
1802
+ skynet_hostname = get_route53_hostname("skynet")
1803
+ urania_hostname = get_route53_hostname("urania")
1804
+ roger_hostname = get_route53_hostname("roger")
1805
+ leftorium_hostname = get_route53_hostname("leftorium")
1806
+ rachele_hostname = get_route53_hostname("rachele")
1807
+ maia_app_hostname = get_route53_hostname("maia-app")
1808
+ maia_intermediari_hostname = get_route53_hostname("maia-intermediari")
1809
+ crash_hostname = get_route53_hostname("crash") unless !deploy_crash?
1810
+ starsky_hostname = get_route53_hostname("starsky") unless !deploy_starsky_hutch?
1811
+ hutch_hostname = get_route53_hostname("hutch") unless !deploy_starsky_hutch?
1812
+
1813
+
1814
+ # launch_marley ec2_ip_address(asg_stack_name), prima_hostname, borat_hostname
1815
+
1816
+ projects_text = "
1817
+ > Prima url: https://#{prima_hostname}
1818
+ > Backoffice (Borat) url: https://#{borat_hostname}
1819
+ > Urania url: http://#{urania_hostname}:81
1820
+ > Bburago url: http://#{bburago_hostname}:83
1821
+ > Ermes url: http://#{ermes_hostname}:10002
1822
+ > Hal9000 url: http://#{hal9000_hostname}:10031
1823
+ > Fidaty url: http://#{fidaty_hostname}:10021
1824
+ > Peano url: http://#{peano_hostname}:10039
1825
+ > Assange url: https://#{assange_hostname}
1826
+ > Activia url: http://#{activia_hostname}:10041
1827
+ > Skynet url: http://#{skynet_hostname}:8050
1828
+ > Roger url: http://#{roger_hostname}:10051
1829
+ > Leftorium url: http://#{leftorium_hostname}:10061
1830
+ > Rachele url: http://#{rachele_hostname}:10040
1831
+ > Maia App url: https://#{maia_app_hostname}
1832
+ > Maia Intermediari url: https://#{maia_intermediari_hostname}"
1833
+ projects_text.concat "
1834
+ > Crash url: https://#{crash_hostname}" if deploy_crash?
1835
+ projects_text.concat "
1836
+ > Starsky url: https://#{starsky_hostname}
1837
+ > Hutch url: https://#{hutch_hostname}" if deploy_starsky_hutch?
1838
+ projects_text.concat "
1839
+ > RabbitMQ url: http://#{ec2_ip_address(asg_stack_name)}:15672
1840
+ > Supervisor url: http://#{ec2_ip_address(asg_stack_name)}:9001
1841
+ > Elasticsearch url: http://#{ec2_ip_address(asg_stack_name)}:9200
1842
+ > SSH connection: ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no githubUsername@#{ec2_ip_address(asg_stack_name)}\n"
1843
+ output projects_text.cyan
1844
+ output "Deploy effettuato, everything is awesome!\n".green
1845
+
1846
+ if @projects['prima']['name'] != 'master' then
1847
+ output "Lancio il batch job per la visual regression..."
1848
+ launch_bocelli_test(prima_hostname)
1849
+ output "Visual regression lanciata con successo!"
1850
+
1851
+ output "Lancio i test con Lighthouse..."
1852
+ launch_lighthouse_test(prima_hostname, "mobile")
1853
+ launch_lighthouse_test(prima_hostname, "desktop")
1854
+ output "Test con Lighthouse lanciati con successo..."
1855
+ end
1856
+
1857
+ qainit_write_output(projects_text, 'Indirizzi scritti su ')
1858
+ end
1859
+
1860
+ def get_route53_hostname(project)
1861
+ case
1862
+ when project.include?('web')
1863
+ host = "www-#{@dns_record_identifier}.qa.colaster.com"
1864
+ when project.include?('urania')
1865
+ host = "urania-#{@dns_record_identifier}.qa.colaster.com"
1866
+ when project.include?('bburago')
1867
+ host = "bburago-#{@dns_record_identifier}.qa.colaster.com"
1868
+ when project.include?('hal9000')
1869
+ host = "hal9000-#{@dns_record_identifier}.qa.colaster.com"
1870
+ when project.include?('fidaty')
1871
+ host = "fidaty-#{@dns_record_identifier}.qa.colaster.com"
1872
+ when project.include?('peano')
1873
+ host = "peano-#{@dns_record_identifier}.qa.colaster.com"
1874
+ when project.include?('assange')
1875
+ host = "assange-#{@dns_record_identifier}.qa.colaster.com"
1876
+ when project.include?('borat')
1877
+ host = "backoffice-#{@dns_record_identifier}.qa.colaster.com"
1878
+ when project.include?('crash')
1879
+ host = "crash-#{@dns_record_identifier}.qa.colaster.com"
1880
+ when project.include?('ermes')
1881
+ host = "ermes-#{@dns_record_identifier}.qa.colaster.com"
1882
+ when project.include?('activia')
1883
+ host = "activia-#{@dns_record_identifier}.qa.colaster.com"
1884
+ when project.include?('skynet')
1885
+ host = "skynet-#{@dns_record_identifier}.qa.colaster.com"
1886
+ when project.include?('roger')
1887
+ host = "roger-#{@dns_record_identifier}.qa.colaster.com"
1888
+ when project.include?('leftorium')
1889
+ host = "leftorium-#{@dns_record_identifier}.qa.colaster.com"
1890
+ when project.include?('rachele')
1891
+ host = "rachele-#{@dns_record_identifier}.qa.colaster.com"
1892
+ when project.include?('starsky')
1893
+ host = "starsky-#{@dns_record_identifier}.qa.colaster.com"
1894
+ when project.include?('hutch')
1895
+ host = "hutch-#{@dns_record_identifier}.qa.colaster.com"
1896
+ when project.include?('maia-app')
1897
+ host = "api-#{@dns_record_identifier}.qa.colaster.com"
1898
+ when project.include?('maia-intermediari')
1899
+ host = "api-intermediari-#{@dns_record_identifier}.qa.colaster.com"
1900
+ end
1901
+ host
1902
+ end
1903
+
1904
+ def ec2_ip_address(asg_stack_name)
1905
+ resp = describe_stack_resource(asg_stack_name, 'ECSAutoScalingGroup')
1906
+ resp = describe_auto_scaling_groups([resp.stack_resource_detail.physical_resource_id], 1)
1907
+ instance_id = resp.auto_scaling_groups[0].instances[0].instance_id
1908
+ resp = describe_instances([instance_id])
1909
+ resp.reservations[0].instances[0].private_ip_address
1910
+ end
1911
+
611
1912
  def get_alb_host(stack_name)
612
1913
  case
613
1914
  when stack_name.include?('web')
@@ -656,6 +1957,735 @@ class Release
656
1957
  resp.load_balancers[0].dns_name
657
1958
  end
658
1959
 
1960
+ def update_service_defaults(stack_name)
1961
+ case
1962
+ when stack_name.include?('web')
1963
+ logical_resource_id = 'ECSServiceWebQA'
1964
+ when stack_name.include?('consumer')
1965
+ logical_resource_id = 'ECSServiceConsumerQa'
1966
+ when stack_name.include?('urania')
1967
+ logical_resource_id = 'ECSServiceUraniaQA'
1968
+ when stack_name.include?('backoffice')
1969
+ logical_resource_id = 'ECSServiceBackoffice'
1970
+ when stack_name.include?('ermes')
1971
+ logical_resource_id = 'ECSServiceErmesQA'
1972
+ when stack_name.include?('bburago')
1973
+ logical_resource_id = 'ECSServiceBburagoQA'
1974
+ when stack_name.include?('hal9000')
1975
+ logical_resource_id = 'ECSServiceHal9000QA'
1976
+ when stack_name.include?('fidaty')
1977
+ logical_resource_id = 'ECSServiceFidatyQA'
1978
+ when stack_name.include?('skynet')
1979
+ logical_resource_id = 'ECSServiceSkynetQA'
1980
+ when stack_name.include?('roger')
1981
+ logical_resource_id = 'ECSServiceRogerQA'
1982
+ when stack_name.include?('activia')
1983
+ logical_resource_id = 'ECSServiceActiviaQA'
1984
+ when stack_name.include?('peano')
1985
+ logical_resource_id = 'ECSServicePeanoQA'
1986
+ when stack_name.include?('rogoreport')
1987
+ logical_resource_id = 'ECSServiceRogoreport'
1988
+ when stack_name.include?('assange')
1989
+ logical_resource_id = 'ECSServiceAssangeQA'
1990
+ when stack_name.include?('borat')
1991
+ logical_resource_id = 'ECSServiceBorat'
1992
+ when stack_name.include?('leftorium')
1993
+ logical_resource_id = 'ECSServiceLeftoriumQA'
1994
+ when stack_name.include?('rachele')
1995
+ logical_resource_id = 'ECSServiceRacheleQA'
1996
+ when stack_name.include?('crash')
1997
+ logical_resource_id = 'ECSServiceCrashQA'
1998
+ when stack_name.include?('starsky')
1999
+ logical_resource_id = 'ECSServiceStarskyQA'
2000
+ when stack_name.include?('hutch')
2001
+ logical_resource_id = 'ECSServiceHutch'
2002
+ when stack_name.include?('maia')
2003
+ logical_resource_id = 'ECSServiceMaia'
2004
+ else
2005
+ raise "Service name non gestito per lo stack #{stack_name}"
2006
+ end
2007
+ resp = describe_stack_resource(stack_name, logical_resource_id)
2008
+ update_ecs_service(@ecs_cluster_name, resp.stack_resource_detail.physical_resource_id, {minimum_healthy_percent: 0, maximum_percent: 100})
2009
+ end
2010
+
2011
+ def launch_lighthouse_test(url, device)
2012
+ @cloudflare.post("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {type: 'CNAME', name: "www-#{@dns_record_identifier}", content: url, proxied: true, ttl: 1}) unless get_lighthouse_dns()
2013
+
2014
+ @batch.submit_job({
2015
+ job_name: "lighthouse-#{device}-#{@dns_record_identifier}",
2016
+ job_queue: "tools-production",
2017
+ job_definition: describe_stack_resource('batch-job-lighthouse-production', 'JobDefinition').stack_resource_detail.physical_resource_id,
2018
+ container_overrides: {
2019
+ environment: [
2020
+ {
2021
+ name: "URL_TO_TEST",
2022
+ value: "https://www-#{@dns_record_identifier}.prima.it/?superprima"
2023
+ },
2024
+ {
2025
+ name: "DEVICE",
2026
+ value: device
2027
+ },
2028
+ {
2029
+ name: "BRANCH_NAME",
2030
+ value: @projects['prima']['name']
2031
+ },
2032
+ {
2033
+ name: "COMMITTER_EMAIL",
2034
+ value: @projects['prima']['committer']
2035
+ }
2036
+ ]
2037
+ }
2038
+ })
2039
+ end
2040
+
2041
+ def get_lighthouse_dns()
2042
+ dns_records = @cloudflare.get("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {per_page: 100, type: 'CNAME', name: "www-#{@dns_record_identifier}.prima.it"})
2043
+ if dns_records.body[:result_info][:count] > 0
2044
+ return dns_records.body[:result][0][:id]
2045
+ end
2046
+ false
2047
+ end
2048
+
2049
+ def delete_lighthouse_dns()
2050
+ dns_id = get_lighthouse_dns()
2051
+ @cloudflare.delete("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records/#{dns_id}") if dns_id
2052
+ end
2053
+
2054
+ def launch_bocelli_test(url)
2055
+ @batch.submit_job({
2056
+ job_name: "bocelli-test-#{@dns_record_identifier}",
2057
+ job_queue: "tools-production",
2058
+ job_definition: describe_stack_resource('batch-job-bocelli-production', 'JobDefinition').stack_resource_detail.physical_resource_id,
2059
+ container_overrides: {
2060
+ environment: [
2061
+ {
2062
+ name: "BATCH_COMMAND",
2063
+ value: "test"
2064
+ },
2065
+ {
2066
+ name: "QA_HOSTNAME",
2067
+ value: url
2068
+ },
2069
+ {
2070
+ name: "BRANCH_NAME",
2071
+ value: @projects['prima']['name']
2072
+ },
2073
+ {
2074
+ name: "COMMITTER_EMAIL",
2075
+ value: @projects['prima']['committer']
2076
+ }
2077
+ ]
2078
+ }
2079
+ })
2080
+ end
2081
+
2082
+ def create_activia_artifact(revision)
2083
+ output "Preparo l'artifact activia .zip\n".yellow
2084
+
2085
+ git_checkout_version('activia', revision)
2086
+
2087
+ Dir.chdir 'projects/activia'
2088
+
2089
+ decrypt_secrets()
2090
+
2091
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2092
+ exec_step 'prepare-docker-compose --directory activia && cp docker-compose-qainit.yml docker-compose.yml'
2093
+
2094
+ if File.exists? 'deploy/build_qa_artifact'
2095
+ execute_command "deploy/build_qa_artifact"
2096
+ else
2097
+ [
2098
+ "docker-compose build web",
2099
+ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2100
+ '-c' 'mix local.hex --force && mix hex.info && \
2101
+ mix deps.get && mix compile && mix deps.compile && \
2102
+ cd assets && \
2103
+ rm -rf node_modules && \
2104
+ yarn --cache-folder ~/.cache/yarn && \
2105
+ sysconfcpus -n 2 ./node_modules/.bin/webpack --mode production && \
2106
+ cd .. && \
2107
+ mix phx.digest && \
2108
+ rm -rf _build/qa/rel/ && \
2109
+ mix release --env=qa'"
2110
+ ].each do |cmd|
2111
+ execute_command cmd
2112
+ end
2113
+ end
2114
+
2115
+ cleanup_containers
2116
+
2117
+ artifact_path = Dir.glob("_build/qa/rel/activia/releases/*/activia.tar.gz").first
2118
+
2119
+ upload_artifact(artifact_path, "microservices/activia/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2120
+
2121
+ Dir.chdir '../../'
2122
+ end
2123
+
2124
+ def create_assange_artifact(revision)
2125
+ output "Preparo l'artifact assange .zip\n".yellow
2126
+
2127
+ git_checkout_version('assange', revision)
2128
+
2129
+ Dir.chdir 'projects/assange'
2130
+
2131
+ decrypt_secrets()
2132
+
2133
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2134
+ exec_step 'prepare-docker-compose --directory assange && cp docker-compose-qainit.yml docker-compose.yml'
2135
+ exec_step 'deploy/build_qa_artifact'
2136
+
2137
+ cleanup_containers
2138
+
2139
+ artifact_path = Dir.glob("_build/qa/rel/assange/releases/*/assange.tar.gz").first
2140
+ upload_artifact(artifact_path, "microservices/assange/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2141
+
2142
+ Dir.chdir '../../'
2143
+ end
2144
+
2145
+ def create_bburago_artifact(revision)
2146
+ output "Preparo l'artifact bburago .zip\n".yellow
2147
+
2148
+ git_checkout_version('bburago', revision)
2149
+
2150
+ Dir.chdir 'projects/bburago'
2151
+
2152
+ decrypt_secrets()
2153
+
2154
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2155
+ exec_step 'prepare-docker-compose --directory bburago && cp docker-compose-qainit.yml docker-compose.yml'
2156
+ [
2157
+ "docker-compose build web",
2158
+ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint deploy/create_qa_artifact web"
2159
+ ].each do |cmd|
2160
+ execute_command cmd
2161
+ end
2162
+
2163
+ cleanup_containers
2164
+
2165
+ artifact_path = Dir.glob('_build/qa/rel/bburago/releases/*/bburago.tar.gz').first
2166
+ upload_artifact(artifact_path, "microservices/bburago/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2167
+
2168
+ Dir.chdir '../../'
2169
+ end
2170
+
2171
+ def create_borat_artifact(revision)
2172
+ output "Preparo l'artifact borat .zip\n".yellow
2173
+
2174
+ git_checkout_version('borat', revision)
2175
+
2176
+ Dir.chdir 'projects/borat'
2177
+
2178
+ decrypt_secrets()
2179
+
2180
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2181
+ exec_step 'prepare-docker-compose --directory borat && cp docker-compose-qainit.yml docker-compose.yml'
2182
+
2183
+ if File.exists? 'deploy/build_qa_artifact'
2184
+ execute_command "deploy/build_qa_artifact"
2185
+ else
2186
+ [
2187
+ "docker network create borat_network || true",
2188
+ "docker-compose build web",
2189
+ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2190
+ '-c' 'mix local.hex --force && mix hex.info && \
2191
+ mix deps.get && \
2192
+ cd assets && \
2193
+ yarn --cache-folder ~/.cache/yarn && \
2194
+ sysconfcpus -n 2 ./node_modules/.bin/webpack -p --config config/webpack.config.prod.js && \
2195
+ cd ../ && \
2196
+ mix phx.digest && \
2197
+ mix compile && mix deps.compile && \
2198
+ rm -rf _build/qa/rel/ && \
2199
+ mix distillery.release --env=qa'"
2200
+ ].each do |cmd|
2201
+ execute_command cmd
2202
+ end
2203
+ end
2204
+
2205
+ cleanup_containers
2206
+
2207
+ artifact_path = Dir.glob("_build/qa/rel/borat/releases/*/borat.tar.gz").first
2208
+ upload_artifact(artifact_path, "microservices/borat/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2209
+
2210
+ Dir.chdir '../../'
2211
+ end
2212
+
2213
+ def create_crash_artifact(revision, deploy_id)
2214
+ output "Preparo l'artifact crash .zip\n".yellow
2215
+
2216
+ git_checkout_version('crash', revision)
2217
+
2218
+ Dir.chdir 'projects/crash'
2219
+
2220
+ crash_qa_host = get_route53_hostname('ecs-task-crash-qa-notneeded')
2221
+
2222
+ decrypt_secrets()
2223
+
2224
+ `mv docker-compose-ci.yml docker-compose.yml`
2225
+ exec_step 'prepare-docker-compose --directory crash && cp docker-compose-qainit.yml docker-compose.yml'
2226
+
2227
+ execute_command "deploy/build_qa_artifact #{deploy_id}"
2228
+
2229
+ cleanup_containers
2230
+
2231
+ artifact_path = Dir.glob('_build/qa/rel/crash/releases/*/crash.tar.gz').first
2232
+ upload_artifact(artifact_path, "microservices/crash/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2233
+
2234
+ Dir.chdir '../../'
2235
+ end
2236
+
2237
+ def create_ermes_artifact(revision)
2238
+ output "Preparo l'artifact ermes .zip\n".yellow
2239
+
2240
+ git_checkout_version('ermes', revision)
2241
+
2242
+ Dir.chdir 'projects/ermes'
2243
+
2244
+ decrypt_secrets()
2245
+
2246
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2247
+ exec_step 'prepare-docker-compose --directory ermes && cp docker-compose-qainit.yml docker-compose.yml'
2248
+
2249
+ [
2250
+ "if echo `docker network ls` | grep crash_default; \
2251
+ then echo 'crash_default network already existing'; \
2252
+ else docker network create crash_default; fi",
2253
+ 'docker-compose build web'
2254
+ ].each do |cmd|
2255
+ execute_command cmd
2256
+ end
2257
+
2258
+ [ "docker-compose run --rm -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2259
+ '-c' 'mix local.hex --force && mix hex.info && \
2260
+ mix deps.get && mix compile && mix deps.compile && \
2261
+ mix phx.digest && \
2262
+ MIX_ENV=dev mix compile.sms && \
2263
+ MIX_ENV=dev mix compile.html && \
2264
+ MIX_ENV=dev mix compile.heml && \
2265
+ MIX_ENV=dev mix compile.app_notification && \
2266
+ rm -rf _build/qa/rel/ && \
2267
+ mix release --env=qa'"
2268
+ ].each do |cmd|
2269
+ execute_command cmd
2270
+ end
2271
+
2272
+ cleanup_containers
2273
+
2274
+ artifact_path = Dir.glob("_build/qa/rel/ermes/releases/*/ermes.tar.gz").first
2275
+ upload_artifact(artifact_path, "microservices/ermes/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2276
+
2277
+ Dir.chdir '../../'
2278
+ end
2279
+
2280
+ def create_fidaty_artifact(revision)
2281
+ output "Preparo l'artifact fidaty .zip\n".yellow
2282
+
2283
+ git_checkout_version('fidaty', revision)
2284
+
2285
+ Dir.chdir 'projects/fidaty'
2286
+
2287
+ decrypt_secrets()
2288
+
2289
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2290
+ exec_step 'prepare-docker-compose --directory fidaty && cp docker-compose-qainit.yml docker-compose.yml'
2291
+ [
2292
+ "docker-compose build web",
2293
+ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2294
+ '-c' 'mix local.hex --force && mix hex.info && \
2295
+ mix deps.get && mix compile && mix deps.compile && \
2296
+ mix phx.digest && \
2297
+ rm -rf _build/qa/rel/ && \
2298
+ mix release --env=qa'"
2299
+ ].each do |cmd|
2300
+ execute_command cmd
2301
+ end
2302
+
2303
+ cleanup_containers
2304
+
2305
+ artifact_path = Dir.glob("_build/qa/rel/fidaty/releases/*/fidaty.tar.gz").first
2306
+ upload_artifact(artifact_path, "microservices/fidaty/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2307
+
2308
+ Dir.chdir '../../'
2309
+ end
2310
+
2311
+ def create_hal9000_artifact(revision)
2312
+ output "Preparo l'artifact hal9000 .zip\n".yellow
2313
+
2314
+ git_checkout_version('hal9000', revision)
2315
+
2316
+ Dir.chdir 'projects/hal9000'
2317
+
2318
+ decrypt_secrets()
2319
+
2320
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2321
+ exec_step 'prepare-docker-compose --directory hal9000 && cp docker-compose-qainit.yml docker-compose.yml'
2322
+ [
2323
+ "docker-compose build web",
2324
+ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2325
+ '-c' 'mix local.hex --force && mix hex.info && \
2326
+ mix deps.get && mix compile && mix deps.compile && \
2327
+ mix phx.digest assets -o priv/static && \
2328
+ rm -rf _build/qa/rel/ && \
2329
+ mix release --env=qa'"
2330
+ ].each do |cmd|
2331
+ execute_command cmd
2332
+ end
2333
+
2334
+ cleanup_containers
2335
+
2336
+ artifact_path = Dir.glob("_build/qa/rel/hal9000/releases/*/hal9000.tar.gz").first
2337
+ upload_artifact(artifact_path, "microservices/hal9000/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2338
+
2339
+ Dir.chdir '../../'
2340
+ end
2341
+
2342
+ def create_hutch_artifact(revision)
2343
+ output "Preparo l'artifact hutch\n".yellow
2344
+
2345
+ git_checkout_version('hutch', revision)
2346
+
2347
+ Dir.chdir 'projects/hutch'
2348
+
2349
+ decrypt_secrets() unless File.exist?('config/secrets.yml')
2350
+
2351
+ exec_step 'cp docker-compose-ci.yml docker-compose.yml'
2352
+ exec_step 'prepare-docker-compose --directory hutch && cp docker-compose-qainit.yml docker-compose.yml'
2353
+
2354
+ execute_command "deploy/build_qa_artifact #{get_route53_hostname("maia-intermediari")}"
2355
+
2356
+ cleanup_containers
2357
+
2358
+ artifact_path = "./hutch.tar.gz"
2359
+ upload_artifact(artifact_path, "microservices/hutch/#{revision}-#{@dns_record_identifier[0..7]}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2360
+
2361
+ Dir.chdir '../../'
2362
+ end
2363
+
2364
+ def create_leftorium_artifact(revision)
2365
+ output "Preparo l'artifact leftorium .zip\n".yellow
2366
+
2367
+ git_checkout_version('leftorium', revision)
2368
+
2369
+ Dir.chdir 'projects/leftorium'
2370
+
2371
+ decrypt_secrets()
2372
+
2373
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2374
+ exec_step 'prepare-docker-compose --directory leftorium && cp docker-compose-qainit.yml docker-compose.yml'
2375
+ [
2376
+ "docker-compose build web",
2377
+ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2378
+ '-c' 'mix local.hex --force && mix hex.info && \
2379
+ mix deps.get && mix compile && mix deps.compile && \
2380
+ rm -rf _build/qa/rel/ && \
2381
+ mix release --env=qa'"
2382
+ ].each do |cmd|
2383
+ execute_command cmd
2384
+ end
2385
+
2386
+ cleanup_containers
2387
+
2388
+ artifact_path = Dir.glob('_build/qa/rel/leftorium/releases/*/leftorium.tar.gz').first
2389
+ upload_artifact(artifact_path, "microservices/leftorium/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2390
+
2391
+ Dir.chdir '../../'
2392
+ end
2393
+
2394
+ def create_maia_artifact(revision)
2395
+ output "Preparo l'artifact maia .zip\n".yellow
2396
+
2397
+ git_checkout_version('maia', revision)
2398
+
2399
+ Dir.chdir 'projects/maia'
2400
+
2401
+ decrypt_secrets()
2402
+
2403
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2404
+ exec_step 'prepare-docker-compose --directory maia && cp docker-compose-qainit.yml docker-compose.yml'
2405
+
2406
+ execute_command 'deploy/build_qa_artifact'
2407
+
2408
+ cleanup_containers
2409
+
2410
+ artifact_path = Dir.glob('_build/qa/rel/maia/releases/*/maia.tar.gz').first
2411
+ upload_artifact(artifact_path, "microservices/maia/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2412
+
2413
+ Dir.chdir '../../'
2414
+ end
2415
+
2416
+ def create_peano_artifact(revision)
2417
+ output "Preparo l'artifact peano .zip\n".yellow
2418
+
2419
+ git_checkout_version('peano', revision)
2420
+
2421
+ Dir.chdir 'projects/peano'
2422
+
2423
+ decrypt_secrets() unless File.exist?('config/secrets.yml')
2424
+
2425
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2426
+ exec_step 'prepare-docker-compose --directory peano && cp docker-compose-qainit.yml docker-compose.yml'
2427
+
2428
+ execute_command "deploy/build_qa_artifact"
2429
+
2430
+ cleanup_containers
2431
+
2432
+ artifact_path = Dir.glob("_build/qa/rel/peano/releases/*/peano.tar.gz").first
2433
+ upload_artifact(artifact_path, "microservices/peano/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2434
+
2435
+ Dir.chdir '../../'
2436
+ end
2437
+
2438
+ def create_prima_artifact(revision, branch_name, deploy_id)
2439
+ output "Preparo l'artifact prima .zip\n".yellow
2440
+
2441
+ git_checkout_version('prima', revision)
2442
+
2443
+ Dir.chdir 'projects/prima'
2444
+
2445
+ ['vendor'].each do |dir|
2446
+ unless File.directory?(dir)
2447
+ if File.directory?("../../../prima/#{dir}")
2448
+ exec_step "rsync -a ../../../prima/#{dir} ."
2449
+ end
2450
+ end
2451
+ end
2452
+
2453
+ exec_step 'mv docker-compose-ci.yml docker-compose.yml'
2454
+ exec_step 'prepare-docker-compose --directory prima'
2455
+ exec_step 'sed -i "s/\\/home\\/app/\\/root/g" docker-compose.yml'
2456
+ `sed -i 's/"@prima-assicurazioni/pyxis-npm": ".*",/"@prima-assicurazioni/pyxis-npm": "#{@pyxis_version}",/' package.json` if deploy_pyxis?
2457
+ [
2458
+ "bin/qa_build_artifact.sh #{branch_name} #{'update' if @deploy_update}"
2459
+ ].each do |cmd|
2460
+ execute_command cmd
2461
+ end
2462
+
2463
+ cleanup_containers
2464
+
2465
+ Dir.chdir "../../"
2466
+ end
2467
+
2468
+ def create_pyxis_artifact(revision, deploy_id)
2469
+ if (deploy_pyxis?)
2470
+ output "Preparo l'artifact pyxis\n".yellow
2471
+
2472
+ git_checkout_version('pyxis-npm', revision)
2473
+
2474
+ Dir.chdir 'projects/pyxis-npm'
2475
+
2476
+ decrypt_secrets()
2477
+
2478
+ exec_step 'mv .fakenpmrc .npmrc'
2479
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2480
+ exec_step 'prepare-docker-compose --directory pyxis-npm && cp docker-compose-qainit.yml docker-compose.yml'
2481
+ exec_step 'docker-compose build web'
2482
+
2483
+ exec_step "docker-compose run -w $PWD -u root -e NPM_TOKEN=$NPM_TOKEN --entrypoint /bin/sh web \
2484
+ '-c' 'npm view prima-assicurazioni/pyxis-npm versions --json > versions.json'" # posso recuperare le versioni pubblicate solo da dentro al container, scrivo su un file che leggo subito dopo
2485
+ published_versions = `cat versions.json`
2486
+ qa_versions = JSON.parse(published_versions).select{ |version| version.include? get_pyxis_version(deploy_id) }
2487
+
2488
+ @pyxis_version = "0.#{get_pyxis_version(deploy_id)}.#{qa_versions.size}"
2489
+
2490
+ `sed -i '3s/".*/"version": "#{@pyxis_version}",/' package.json`
2491
+ [
2492
+ "docker-compose run -w $PWD -u root -e NPM_TOKEN=$NPM_TOKEN --entrypoint /bin/sh web \
2493
+ '-c' 'yarn install && \
2494
+ yarn build:prod && \
2495
+ npm publish'"
2496
+ ].each do |cmd|
2497
+ execute_command cmd
2498
+ end
2499
+
2500
+ cleanup_containers
2501
+ Dir.chdir '../../'
2502
+ end
2503
+ end
2504
+
2505
+ def create_rachele_artifact(revision)
2506
+ output "Preparo l'artifact rachele .zip\n".yellow
2507
+
2508
+ git_checkout_version('rachele', revision)
2509
+
2510
+ Dir.chdir 'projects/rachele'
2511
+
2512
+ decrypt_secrets()
2513
+
2514
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2515
+ exec_step 'prepare-docker-compose --directory rachele && cp docker-compose-qainit.yml docker-compose.yml'
2516
+
2517
+ execute_command "docker-compose build web"
2518
+
2519
+ [ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2520
+ '-c' 'mix local.hex --force && mix hex.info && \
2521
+ mix deps.get && mix compile && mix deps.compile && \
2522
+ rm -rf _build/qa/rel/ && \
2523
+ mix release --env=qa'"
2524
+ ].each do |cmd|
2525
+ execute_command cmd
2526
+ end
2527
+
2528
+ cleanup_containers
2529
+
2530
+ artifact_path = Dir.glob("_build/qa/rel/rachele/releases/*/rachele.tar.gz").first
2531
+ upload_artifact(artifact_path, "microservices/rachele/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2532
+
2533
+ Dir.chdir '../../'
2534
+ end
2535
+
2536
+ def create_roger_artifact(revision)
2537
+ output "Preparo l'artifact roger .zip\n".yellow
2538
+
2539
+ git_checkout_version('roger', revision)
2540
+
2541
+ Dir.chdir 'projects/roger'
2542
+
2543
+ decrypt_secrets()
2544
+
2545
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2546
+ exec_step 'prepare-docker-compose --directory roger && cp docker-compose-qainit.yml docker-compose.yml'
2547
+ [
2548
+ "docker-compose build web",
2549
+ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2550
+ '-c' 'mix local.hex --force && mix hex.info && \
2551
+ mix deps.get && mix compile && mix deps.compile && \
2552
+ mix phx.digest && \
2553
+ rm -rf _build/qa/rel/ && \
2554
+ mix distillery.release --env=qa'"
2555
+ ].each do |cmd|
2556
+ execute_command cmd
2557
+ end
2558
+
2559
+ cleanup_containers
2560
+
2561
+ artifact_path = Dir.glob("_build/qa/rel/roger/releases/*/roger.tar.gz").first
2562
+ upload_artifact(artifact_path, "microservices/roger/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2563
+
2564
+ Dir.chdir '../../'
2565
+ end
2566
+
2567
+ def create_rogoreport_artifact(revision)
2568
+ output "Preparo l'artifact rogoreport .zip\n".yellow
2569
+
2570
+ git_checkout_version('rogoreport', revision)
2571
+
2572
+ Dir.chdir 'projects/rogoreport'
2573
+
2574
+ decrypt_secrets() unless File.exist?('config/secrets.yml')
2575
+
2576
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2577
+ exec_step 'prepare-docker-compose --directory rogoreport && cp docker-compose-qainit.yml docker-compose.yml'
2578
+ [
2579
+ "docker-compose build web",
2580
+ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2581
+ '-c' 'mix local.hex --force && mix hex.info && \
2582
+ mix deps.get && mix compile && mix deps.compile && \
2583
+ rm -rf _build/qa/rel/ && \
2584
+ mix release --name=rogoreport --env=qa'"
2585
+ ].each do |cmd|
2586
+ execute_command cmd
2587
+ end
2588
+
2589
+ cleanup_containers
2590
+
2591
+ artifact_path = Dir.glob("_build/qa/rel/rogoreport/releases/*/rogoreport.tar.gz").first
2592
+ upload_artifact(artifact_path, "microservices/rogoreport/rogoreport-#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2593
+
2594
+ Dir.chdir '../../'
2595
+ end
2596
+
2597
+ def create_skynet_artifact(revision)
2598
+ output "Preparo l'artifact skynet\n".yellow
2599
+
2600
+ git_checkout_version('skynet', revision)
2601
+
2602
+ Dir.chdir 'projects/skynet'
2603
+
2604
+ version = `git rev-parse HEAD`
2605
+
2606
+ artifact_path = "/tmp/skynet-#{revision}-qa.tar.gz"
2607
+
2608
+ exec_step "tar cfz #{artifact_path} core config oracles pickles web tests logstash application_log configuration.py wsgi.py requirements.txt requirements_training.txt"
2609
+
2610
+ upload_artifact(artifact_path, "microservices/skynet/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2611
+
2612
+ Dir.chdir '../../'
2613
+ end
2614
+
2615
+ def create_starsky_artifact(revision)
2616
+ output "Preparo l'artifact starsky\n".yellow
2617
+
2618
+ git_checkout_version('starsky', revision)
2619
+
2620
+ Dir.chdir 'projects/starsky'
2621
+
2622
+ version = `git rev-parse HEAD`
2623
+
2624
+ #artifact_path = "/tmp/starsky-#{revision}-qa.tar.gz"
2625
+
2626
+ decrypt_secrets() unless File.exist?('config/secrets.yml')
2627
+
2628
+ `mv docker-compose-ci.yml docker-compose.yml`
2629
+ exec_step 'prepare-docker-compose --directory starsky && cp docker-compose-qainit.yml docker-compose.yml'
2630
+ exec_step "sed s/qa_deploy_id/#{get_deploy_id}/ .env.dist.qa"
2631
+ exec_step "cp .env.dist.qa .env"
2632
+
2633
+ [
2634
+ "sed -i 's/USER app/USER root/g' Dockerfile",
2635
+ "if echo `docker network ls` | grep peano_default; \
2636
+ then echo 'peano_default network already existing'; \
2637
+ else docker network create peano_default; fi",
2638
+ "docker-compose build web",
2639
+ "docker-compose run -w $PWD -e BUILD_ENV=qa -u root --entrypoint /bin/sh web \
2640
+ '-c' 'cargo build --release -vv --features=qa \
2641
+ && cargo build --bin migrate --release --features=qa \
2642
+ && cargo build --bin rabbit_worker --release --features=qa \
2643
+ && cp -p target/release/starsky . \
2644
+ && cp -p target/release/migrate . \
2645
+ && cp -p target/release/rabbit_worker . \
2646
+ && tar cfz #{revision}-qa.tar.gz config starsky migrate rabbit_worker .env.dist.*'"
2647
+ ].each do |cmd|
2648
+ execute_command cmd
2649
+ end
2650
+
2651
+ artifact_path = "./#{revision}-qa.tar.gz"
2652
+
2653
+ upload_artifact(artifact_path, "microservices/starsky/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2654
+
2655
+ Dir.chdir '../../'
2656
+ end
2657
+
2658
+ def create_urania_artifact(revision)
2659
+ output "Preparo l'artifact urania .zip\n".yellow
2660
+
2661
+ git_checkout_version('urania', revision)
2662
+
2663
+ Dir.chdir 'projects/urania'
2664
+
2665
+ decrypt_secrets()
2666
+
2667
+ exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2668
+ exec_step 'prepare-docker-compose --directory urania && cp docker-compose-qainit.yml docker-compose.yml'
2669
+
2670
+ execute_command "docker-compose build web"
2671
+
2672
+ [ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2673
+ '-c' 'mix local.hex --force && mix hex.info && \
2674
+ mix deps.get && mix compile && mix deps.compile && \
2675
+ rm -rf _build/qa/rel/ && \
2676
+ mix release --env=qa'"
2677
+ ].each do |cmd|
2678
+ execute_command cmd
2679
+ end
2680
+
2681
+ cleanup_containers
2682
+
2683
+ artifact_path = Dir.glob("_build/qa/rel/urania/releases/*/urania.tar.gz").first
2684
+ upload_artifact(artifact_path, "microservices/urania/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2685
+
2686
+ Dir.chdir '../../'
2687
+ end
2688
+
659
2689
  def deploy_pyxis?
660
2690
  if defined? @deploy_pyxis
661
2691
  @deploy_pyxis
@@ -669,11 +2699,145 @@ class Release
669
2699
  end
670
2700
  end
671
2701
 
2702
+ def deploy_crash?
2703
+ true # fino a che non ci mettiamo d'accordo su come gestire il fatto che leftorium ha bisogno di comunicare con crash
2704
+ # crash_present = !@projects['crash'].empty? && @projects['crash']['name'] != 'master' && !@projects['crash']['default_branch']
2705
+ # leftorium_present = !@projects['leftorium'].empty? && @projects['leftorium']['name'] != 'master' && !@projects['leftorium']['default_branch']
2706
+ # crash_present || leftorium_present
2707
+ end
2708
+
2709
+ def deploy_starsky_hutch?
2710
+ starsky_present = !@projects['starsky'].nil? && !@projects['starsky'].empty? && @projects['starsky']['name'] != 'master' && !@projects['starsky']['default_branch']
2711
+ hutch_present = !@projects['hutch'].nil? && !@projects['hutch'].empty? && @projects['hutch']['name'] != 'master' && !@projects['hutch']['default_branch']
2712
+ starsky_present || hutch_present
2713
+ end
2714
+
2715
+ def get_pyxis_version(deploy_id)
2716
+ (deploy_id.delete '[a-z0]')[0..9]
2717
+ end
2718
+
2719
+ def cleanup_containers
2720
+ `docker-compose kill && docker-compose down -v --remove-orphans`
2721
+ `docker rm $(docker ps -q -f status=exited)`
2722
+ end
2723
+
2724
+ def git_checkout_version(project, revision)
2725
+ Dir.chdir "projects/#{project}"
2726
+ exec_step "git checkout -- . && git checkout #{revision}"
2727
+ Dir.chdir "../../"
2728
+ end
2729
+
2730
+ def create_asg_stack(stack_name, tags = [])
2731
+ stack_body = IO.read('cloudformation/stacks/asg/ecs-asg-allinone.yml')
2732
+ parameters = [
2733
+ {
2734
+ parameter_key: "Environment",
2735
+ parameter_value: "qa"
2736
+ },
2737
+ {
2738
+ parameter_key: "InstanceType",
2739
+ parameter_value: "t3.large"
2740
+ },
2741
+ {
2742
+ parameter_key: "ECSClusterName",
2743
+ parameter_value: @ecs_cluster_name
2744
+ },
2745
+ {
2746
+ parameter_key: "AMIID",
2747
+ parameter_value: @ami_id
2748
+ }
2749
+ ]
2750
+ create_stack(stack_name, stack_body, parameters, tags, @cf_role)
2751
+ end
2752
+
2753
+ def create_cluster_stack(stack_name, tags = [])
2754
+ stack_body = IO.read('cloudformation/stacks/ecs-cluster.yml')
2755
+ create_stack(stack_name, stack_body, [], tags)
2756
+ end
2757
+
672
2758
  def update_cluster_stack(stack_name, tags = [])
673
2759
  stack_body = IO.read('cloudformation/stacks/ecs-cluster.yml')
674
2760
  update_stack(stack_name, stack_body, [], tags)
675
2761
  end
676
2762
 
2763
+ def create_alb_stack(stack_name, role, hash, environment = 'qa')
2764
+ stack_body = IO.read('cloudformation/stacks/elb/alb-public-qa.yml')
2765
+ parameters = [
2766
+ {
2767
+ parameter_key: "Environment",
2768
+ parameter_value: environment
2769
+ },
2770
+ {
2771
+ parameter_key: "Role",
2772
+ parameter_value: role
2773
+ },
2774
+ {
2775
+ parameter_key: "EnvHash",
2776
+ parameter_value: hash
2777
+ }
2778
+ ]
2779
+ create_stack(stack_name, stack_body, parameters, [], @cf_role)
2780
+ end
2781
+
2782
+ def import_redis_crash(qa_ip_address)
2783
+ output "Importo chiavi di Redis da staging\n".yellow
2784
+
2785
+ prefixes = ['CODICI', 'fun_with_flags']
2786
+ redis_qa = Redis.new(:url => "redis://#{qa_ip_address}:6379/10")
2787
+ redis_staging = Redis.new(:url => 'redis://staging.cache-1.prima.it:6379/10')
2788
+
2789
+ prefixes.each do |prefix|
2790
+ redis_staging.keys("#{prefix}*").each do |key|
2791
+ next unless redis_qa.keys(key).empty?
2792
+ output "Importo #{key} dal Redis di staging\n".yellow
2793
+ dump_staging = redis_staging.dump key
2794
+ redis_qa.restore key, 0, dump_staging
2795
+ end
2796
+ end
2797
+ end
2798
+
2799
+ def import_dbs(ip_address)
2800
+ overrides = {
2801
+ container_overrides: [
2802
+ {
2803
+ name: 'dbrestore',
2804
+ environment: [
2805
+ {
2806
+ name: 'EC2_IP_ADDRESS',
2807
+ value: ip_address
2808
+ }
2809
+ ]
2810
+ }
2811
+ ]
2812
+ }
2813
+ resp = run_ecs_task(@ecs_cluster_name, @import_db_task, overrides, 1)
2814
+ return resp
2815
+ end
2816
+
2817
+ def wait_for_db_import(task)
2818
+ output "Attendo che i DB vengano importati...\n".yellow
2819
+ stopped_at = nil
2820
+ sleep 15 # altrimenti non trova il task appena avviato...
2821
+ while stopped_at.nil?
2822
+ if task.tasks[0].nil?
2823
+ pp @ecs_cluster_name
2824
+ pp task
2825
+ stop_if true, "Task di import DB lanciato, ma risposta vuota!".red
2826
+ end
2827
+ task = describe_ecs_tasks(task.tasks[0].cluster_arn, [task.tasks[0].task_arn])
2828
+ stopped_at = task.tasks[0].stopped_at unless task.tasks[0].nil?
2829
+ sleep_seconds = 10
2830
+ seconds_elapsed = 0
2831
+ while true && stopped_at.nil?
2832
+ break if seconds_elapsed >= sleep_seconds
2833
+ print '.'.yellow; STDOUT.flush
2834
+ sleep 1
2835
+ seconds_elapsed += 1
2836
+ end
2837
+ end
2838
+ print "\n"
2839
+ end
2840
+
677
2841
  def choose_branch_to_deploy(project_name, select_master = false)
678
2842
  Dir.chdir "projects/#{project_name}"
679
2843
  output "Recupero la lista dei branch del progetto #{project_name}..."
@@ -761,6 +2925,70 @@ class Release
761
2925
  (Time.now.to_i.to_s[-4..-1].to_i + Random.rand(40000)).to_s
762
2926
  end
763
2927
 
2928
+ def launch_marley(ip_address, prima_hostname, borat_hostname)
2929
+ resp = describe_stack_resource('batch-job-marley', 'JobDefinition')
2930
+
2931
+ @batch.submit_job({
2932
+ job_name: "marley-#{@dns_record_identifier}", # required
2933
+ job_queue: "tools-production", # required
2934
+ job_definition: resp.stack_resource_detail.physical_resource_id, # required
2935
+ container_overrides: {
2936
+ environment: [
2937
+ {
2938
+ name: 'PRIMA_URL',
2939
+ value: "https://#{prima_hostname}/?superprima"
2940
+ },
2941
+ {
2942
+ name: 'PRIMA_IP',
2943
+ value: ip_address
2944
+ },
2945
+ {
2946
+ name: 'PROJECTS_JSON',
2947
+ value: @projects.to_json
2948
+ },
2949
+ {
2950
+ name: 'BACKOFFICE_URL',
2951
+ value: "https://#{borat_hostname}"
2952
+ }
2953
+ ]
2954
+ }
2955
+ })
2956
+
2957
+ output "Marley lanciato con successo!\n".green
2958
+ end
2959
+
2960
+ def get_currently_deployed_version(stack_name)
2961
+ parameters = get_stack_parameters(stack_name)
2962
+ currently_deployed_version = nil
2963
+ parameters.each do |parameter|
2964
+ if parameter.parameter_key == "ReleaseVersion"
2965
+ currently_deployed_version = parameter.parameter_value
2966
+ end
2967
+ end
2968
+ currently_deployed_version
2969
+ end
2970
+
2971
+ def decrypt_secrets()
2972
+ docker_image = "prima/biscuit_populate_configs"
2973
+ [
2974
+ "docker pull #{docker_image}",
2975
+ "docker run -t --rm -v $HOME/.aws:/root/.aws -w $PWD --volumes-from #{get_host_container_name} #{docker_image}"
2976
+ ].each do |cmd|
2977
+ execute_command cmd
2978
+ end
2979
+ end
2980
+
2981
+ def get_host_container_name()
2982
+ if @host_container_name
2983
+ @host_container_name
2984
+ else
2985
+ hostname = `cat /etc/hostname`.gsub("\n", '')
2986
+ execute_command "docker ps | grep #{hostname} | awk '{print $NF}'"
2987
+ @host_container_name = `docker ps | grep #{hostname} | awk '{print $NF}'`.gsub("\n", '')
2988
+ # @host_container_name = `docker ps | grep #{hostname} | sed -r "s/.+ ([^\s].+)$/\1/p"`
2989
+ end
2990
+ end
2991
+
764
2992
  def select_branches(project_names = nil)
765
2993
  output "Deploy feature menu"
766
2994
  if project_names.nil?
@@ -774,6 +3002,14 @@ class Release
774
3002
  end
775
3003
  end
776
3004
  end
3005
+
3006
+ def get_ami_id(stack_name)
3007
+ get_stack_parameters(stack_name).each do |param|
3008
+ if param.parameter_key == "AMIID"
3009
+ return param.parameter_value
3010
+ end
3011
+ end
3012
+ end
777
3013
  end
778
3014
 
779
3015
  def help_content