prima-twig 0.59.7 → 0.59.123

Sign up to get free protection for your applications and to get access to all the features.
data/bin/twig-feature CHANGED
@@ -22,18 +22,9 @@ class Release
22
22
  exec "twig feature #{ARGV.join ' '}"
23
23
  end
24
24
  end
25
- @batch = Aws::Batch::Client.new
26
- @s3 = Aws::S3::Client.new
27
- @s3_bucket = 'prima-artifacts'
28
- @artifact_path = '/tmp/prima-artifact.zip'
29
- @import_db_task = 'arn:aws:ecs:eu-west-1:001575623345:task-definition/ecs-task-db-restore2-TaskDefinitionDbRestore-1BXH13XEVLPP0:1'
30
- @cf_role = 'arn:aws:iam::001575623345:role/qainit-service-role-cloudformat-CloudformationRole-18KBZQIS148R9'
31
25
  @dns_record_identifier = nil
32
26
  @ecs_cluster_name = nil
33
27
  @deploy_update = false
34
- @qainit = false
35
- @qainit_host_folder = "/var/ci/#{ENV['DRONE_REPO']}/#{ENV['DRONE_BRANCH']}/#{ENV['DRONE_BUILD_NUMBER']}"
36
- @qainit_folder = "/drone/src/github.com/project/primait/qainit"
37
28
  @projects = {
38
29
  'prima' => {},
39
30
  'urania' => {},
@@ -42,7 +33,7 @@ class Release
42
33
  'hal9000' => {},
43
34
  'fidaty' => {},
44
35
  'peano' => {},
45
- 'rogoreport' => {},
36
+ # 'rogoreport' => {},
46
37
  'assange' => {},
47
38
  'borat' => {},
48
39
  'crash' => {},
@@ -59,6 +50,7 @@ class Release
59
50
  @base_stack_name_alb = 'ecs-alb-http-public-qa-'
60
51
  @base_stack_name_alb_ws = 'ecs-alb-ws-public-qa-'
61
52
  @git_branch = ''
53
+ @cloudflare = Rubyflare.connect_with(ENV['CLOUDFLARE_EMAIL'], ENV['CLOUDFLARE_APIKEY'])
62
54
  end
63
55
 
64
56
  def execute!(args)
@@ -73,8 +65,6 @@ class Release
73
65
  qainit_deploy_shutdown!
74
66
  elsif 'update' == args[1]
75
67
  qainit_deploy_update!
76
- elsif 'read' == args[1]
77
- qainit_read_config! args[2]
78
68
  else
79
69
  if args[1]
80
70
  select_branches(args[1..-1])
@@ -91,19 +81,8 @@ class Release
91
81
  end
92
82
  when 'deploy'
93
83
  abort('Non sei nella cartella di artemide') unless Dir.pwd.match 'artemide$'
94
- if ['terminate', 'stop', 'shutdown', 'halt', 'destroy'].include? args[1]
95
- deploy_shutdown!
96
- elsif 'update' == args[1]
97
- deploy_update!
98
- elsif 'lock' == args[1]
84
+ if 'lock' == args[1]
99
85
  deploy_lock!
100
- else
101
- if args[1]
102
- select_branches(args[1])
103
- else
104
- select_branches
105
- end
106
- deploy_feature!
107
86
  end
108
87
  when 'aggregator'
109
88
  if 'enable' == args[1]
@@ -139,7 +118,6 @@ class Release
139
118
  output 'Disable aggregator'
140
119
 
141
120
  output "Recupero le informazioni relative al puntamento dei record DNS..."
142
- cloudflare = Rubyflare.connect_with(@prima.config['cloudflare_email'], @prima.config['cloudflare_apikey'])
143
121
  output "Recupero le informazioni sui QA attivi..."
144
122
  stack_list, envs = get_stacks()
145
123
 
@@ -151,7 +129,7 @@ class Release
151
129
  end.is_a?(Aws::CloudFormation::Types::Tag)
152
130
  aggregator_enabled
153
131
  end[0]
154
- dns_records = cloudflare.get("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {per_page: 100, type: 'CNAME', content: get_alb_host(@base_stack_name_alb + env_hash[3..8])})
132
+ dns_records = @cloudflare.get("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {per_page: 100, type: 'CNAME', content: get_alb_host(@base_stack_name_alb + env_hash[3..8])})
155
133
  stop_if dns_records.body[:result].empty?, "I record DNS degli aggregatori non stanno puntando ad un QA".red
156
134
  change_hostname_priority(env_hash, hostname_pattern_priority())
157
135
  dns_to_staging(env_hash)
@@ -167,8 +145,7 @@ class Release
167
145
  output 'Enable aggregator'
168
146
 
169
147
  output 'Recupero le informazioni relative al puntamento dei record DNS...'
170
- cloudflare = Rubyflare.connect_with(@prima.config['cloudflare_email'], @prima.config['cloudflare_apikey'])
171
- dns_records = cloudflare.get('zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records', { per_page: 100, type: 'CNAME', content: 'staging.prima.it' })
148
+ dns_records = @cloudflare.get('zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records', { per_page: 100, type: 'CNAME', content: 'staging.prima.it' })
172
149
  stop_if dns_records.body[:result].empty?, "I record DNS degli aggregatori stanno gia' puntando ad un QA".red
173
150
 
174
151
  output "Recupero le informazioni sui QA attivi..."
@@ -198,7 +175,7 @@ class Release
198
175
  dns_records.body[:result].each do |dns|
199
176
  if dns[:name] =~ /^\w+\-\w+\-staging\.prima\.it$/
200
177
  output "Changing #{dns[:name]} DNS record"
201
- cloudflare.put("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records/#{dns[:id]}", {type: 'CNAME', name: dns[:name], content: get_alb_host(@base_stack_name_alb + env_hash[3..8]), proxied: true, ttl: 1})
178
+ @cloudflare.put("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records/#{dns[:id]}", {type: 'CNAME', name: dns[:name], content: get_alb_host(@base_stack_name_alb + env_hash[3..8]), proxied: true, ttl: 1})
202
179
  end
203
180
  end
204
181
 
@@ -257,12 +234,11 @@ class Release
257
234
 
258
235
  def dns_to_staging(env_hash)
259
236
  output "Recupero le informazioni relative al puntamento dei record DNS..."
260
- cloudflare = Rubyflare.connect_with(@prima.config['cloudflare_email'], @prima.config['cloudflare_apikey'])
261
- dns_records = cloudflare.get("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {per_page: 100, type: 'CNAME', content: get_alb_host(@base_stack_name_alb + env_hash[3..8])})
237
+ dns_records = @cloudflare.get("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {per_page: 100, type: 'CNAME', content: get_alb_host(@base_stack_name_alb + env_hash[3..8])})
262
238
  dns_records.body[:result].each do |dns|
263
239
  if dns[:name] =~ /^\w+\-\w+\-staging\.prima\.it$/
264
240
  output "Changing #{dns[:name]} DNS record"
265
- cloudflare.put("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records/#{dns[:id]}", {type: 'CNAME', name: dns[:name], content: 'staging.prima.it', proxied: true, ttl: 1})
241
+ @cloudflare.put("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records/#{dns[:id]}", {type: 'CNAME', name: dns[:name], content: 'staging.prima.it', proxied: true, ttl: 1})
266
242
  end
267
243
  end
268
244
  end
@@ -476,8 +452,10 @@ class Release
476
452
 
477
453
  File.open('branch_names', 'w') { |file| file.write(JSON.generate(@projects)) }
478
454
 
455
+ update_drone_yml!
456
+
479
457
  `git add projects && \
480
- git add branch_names && \
458
+ git add branch_names .drone.yml && \
481
459
  git commit -m '#{branch_name}' && \
482
460
  git push -f --set-upstream origin #{branch_name} && \
483
461
  git checkout master`
@@ -521,7 +499,10 @@ class Release
521
499
 
522
500
  File.open('branch_names', 'w') { |file| file.write(JSON.generate(@projects)) }
523
501
 
524
- `git commit -am 'update'`
502
+ update_drone_yml!
503
+
504
+ `git add branch_names .drone.yml`
505
+ `git commit -m 'update'`
525
506
  `git push && git checkout master`
526
507
  end
527
508
 
@@ -596,11 +577,9 @@ class Release
596
577
  delete_stack(@base_stack_name_alb + env_hash[3..8]) if stack_exists?(@base_stack_name_alb + env_hash[3..8])
597
578
  delete_stack(@base_stack_name_alb_ws + env_hash[3..8]) if stack_exists?(@base_stack_name_alb_ws + env_hash[3..8])
598
579
  `git checkout master && git push origin --delete ${DRONE_BRANCH}`
580
+ output "Cancello il record DNS utilizzato da Lighthouse"
581
+ delete_lighthouse_dns()
599
582
  output "Finito!".green
600
-
601
- if @qainit
602
- qainit_write_output("QA #{ENV["DRONE_BRANCH"]} spento.", 'Indirizzi scritti su ')
603
- end
604
583
  end
605
584
 
606
585
  def qainit_write_output(file_message, output_message)
@@ -610,41 +589,16 @@ class Release
610
589
  output "#{output_message} #{qa_file_name}".green
611
590
  end
612
591
 
613
- def qainit_read_config!(action)
614
- projects = ''
615
-
616
- File.open('branch_names', 'r') do |file|
617
- file.each_line do |line|
618
- projects = JSON.parse(line)
619
- end
620
- end
621
-
622
- projects.each do |key, project|
623
- @projects[key] = project
592
+ def update_drone_yml!()
593
+ drone_yml = File.read('.drone.yml')
594
+ @projects.each do |key, project|
595
+ drone_yml = drone_yml.gsub(/#{key}@.+\n/, "#{key}@#{project['revision']}\n")
624
596
  end
625
-
626
- get_s3_config_files
627
- @qainit = true
628
- case action
629
- when 'shutdown'
630
- output 'Shutting down'.green
631
- qainit_drone_shutdown!
632
- else
633
- output 'Starting standard deploy'.green
634
- deploy_feature!
597
+ File.open(".drone.yml", "w") do |f|
598
+ f.write(drone_yml)
635
599
  end
636
600
  end
637
601
 
638
- def get_s3_config_files
639
- # manteniamo la struttura per lanciarlo facilmente anche da locale
640
- `mkdir -p cloudformation/stacks/task cloudformation/stacks/route53 cloudformation/stacks/asg cloudformation/stacks/elb`
641
- @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/task/db.yml', response_target: 'cloudformation/stacks/task/db.yml'})
642
- @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/route53/qa.yml', response_target: 'cloudformation/stacks/route53/qa.yml'})
643
- @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/asg/ecs-asg-allinone.yml', response_target: 'cloudformation/stacks/asg/ecs-asg-allinone.yml'})
644
- @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/ecs-cluster.yml', response_target: 'cloudformation/stacks/ecs-cluster.yml'})
645
- @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/elb/alb-public-qa.yml', response_target: 'cloudformation/stacks/elb/alb-public-qa.yml'})
646
- end
647
-
648
602
  def get_deploy_id
649
603
  if @deploy_id
650
604
  @deploy_id
@@ -654,1246 +608,6 @@ class Release
654
608
  end
655
609
  end
656
610
 
657
- def deploy_feature!
658
- `git pull && git submodule init && git submodule update`
659
- @ami_id = get_ami_id("ecs-fleet-allinone-staging")
660
- deploy_id = get_deploy_id
661
- stack_name_alb = 'ecs-alb-http-public-qa-' + deploy_id[0..5]
662
- stack_name_alb_ws = 'ecs-alb-ws-public-qa-' + deploy_id[0..5]
663
- unless @qainit
664
- @projects.each_key do |project_key|
665
- if @projects[project_key]['revision']
666
- git_checkout_version(project_key, @projects[project_key]['revision'])
667
- end
668
- end
669
- end
670
- @dns_record_identifier = deploy_id
671
- @git_branch = ENV['DRONE_BRANCH']
672
- hostname_pattern_priority = hostname_pattern_priority()
673
- tags = [
674
- {
675
- key: "qainit",
676
- value: @git_branch
677
- },
678
- {
679
- key: "hostname_pattern_priority",
680
- value: hostname_pattern_priority
681
- }
682
- ]
683
- @projects.each do |key, value|
684
- case key.to_s
685
- when 'crash'
686
- tags << { key: 'crash', value: @projects['crash']['name'] } if deploy_crash?
687
- when 'starsky', 'hutch'
688
- tags << { key: key.to_s, value: @projects[key.to_s]['name'] } if deploy_starsky_hutch?
689
- else
690
- tags << { key: key, value: value['name'] }
691
- end
692
- end
693
-
694
- cluster_stack_name = "ecs-cluster-qa-#{deploy_id}"
695
-
696
- if stack_exists?(cluster_stack_name)
697
- tags = get_stack_tags(cluster_stack_name)
698
- hostname_pattern_priority = tags.detect do |tag|
699
- tag.key == 'hostname_pattern_priority'
700
- end.value
701
- end
702
-
703
- create_cluster_stack(cluster_stack_name, tags) unless stack_exists?(cluster_stack_name)
704
- wait_for_stack_ready(cluster_stack_name) unless stack_ready?(cluster_stack_name)
705
-
706
- create_alb_stack(stack_name_alb, "http", deploy_id) unless stack_exists?(stack_name_alb)
707
- create_alb_stack(stack_name_alb_ws, "websocket", deploy_id) unless stack_exists?(stack_name_alb_ws)
708
-
709
- resp = describe_stack_resource(cluster_stack_name, 'ECSCluster')
710
- @ecs_cluster_name = resp.stack_resource_detail.physical_resource_id
711
-
712
- asg_stack_name = "ecs-asg-allinone-qa-#{deploy_id}"
713
- create_asg_stack(asg_stack_name, tags) unless stack_exists?(asg_stack_name)
714
-
715
- stack_name_db = "ecs-task-db-qa-#{deploy_id}"
716
- stack_body = IO.read('cloudformation/stacks/task/db.yml')
717
- parameters = [
718
- {
719
- parameter_key: "Environment",
720
- parameter_value: "qa"
721
- },
722
- {
723
- parameter_key: "ECSClusterName",
724
- parameter_value: @ecs_cluster_name
725
- }
726
- ]
727
- create_stack(stack_name_db, stack_body, parameters, tags, @cf_role) unless stack_exists?(stack_name_db) # creazione asincrona stack dei db (~4 min)
728
-
729
- output "check pyxis \n".yellow
730
-
731
- create_pyxis_artifact(@projects["pyxis-npm"]['revision'], deploy_id) unless @projects["pyxis-npm"].nil? # deve essere creato prima di quello di prima, per avere la versione
732
- create_prima_artifact(@projects["prima"]['revision'], @projects["prima"]['name'], deploy_id) unless artifact_exists?('prima-artifacts-encrypted', "prima/#{@projects["prima"]['revision']}.tar.gz")
733
- # l'artefatto di prima viene creato sempre (puntamenti all'ambiente compilati nel js) e richiede molto più di 4 minuti
734
- wait_for_stack_ready(stack_name_db) unless stack_ready?(stack_name_db) # dovrebbe essere istantaneo
735
- db_task = ''
736
- db_task = import_dbs(ec2_ip_address(asg_stack_name)) unless stack_exists?("ecs-route53-qa-#{deploy_id}") # import asincrono dei dati
737
-
738
- create_crash_artifact(@projects['crash']['revision'], deploy_id) unless !deploy_crash? || artifact_exists?('prima-artifacts-encrypted', "microservices/crash/#{@projects['crash']['revision']}-qa.tar.gz")
739
- create_urania_artifact(@projects["urania"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/urania/#{@projects["urania"]['revision']}-qa.tar.gz")
740
- create_roger_artifact(@projects["roger"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/roger/#{@projects["roger"]['revision']}-qa.tar.gz")
741
- create_ermes_artifact(@projects["ermes"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/ermes/#{@projects["ermes"]['revision']}-qa.tar.gz")
742
- create_bburago_artifact(@projects["bburago"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/bburago/#{@projects["bburago"]['revision']}-qa.tar.gz")
743
- create_hal9000_artifact(@projects["hal9000"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/hal9000/#{@projects["hal9000"]['revision']}-qa.tar.gz")
744
- create_rachele_artifact(@projects["rachele"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/rachele/#{@projects["rachele"]['revision']}-qa.tar.gz")
745
- create_fidaty_artifact(@projects["fidaty"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/fidaty/#{@projects["fidaty"]['revision']}-qa.tar.gz")
746
- create_peano_artifact(@projects["peano"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/peano/#{@projects["peano"]['revision']}-qa.tar.gz")
747
- create_rogoreport_artifact(@projects["rogoreport"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/rogoreport/rogoreport-#{@projects["rogoreport"]['revision']}-qa.tar.gz")
748
- create_assange_artifact(@projects["assange"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/assange/#{@projects["assange"]['revision']}-qa.tar.gz")
749
- create_borat_artifact(@projects["borat"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/borat/#{@projects["borat"]['revision']}-qa.tar.gz")
750
- create_activia_artifact(@projects["activia"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/activia/#{@projects["activia"]['revision']}-qa.tar.gz")
751
- create_leftorium_artifact(@projects["leftorium"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/leftorium/#{@projects["leftorium"]['revision']}-qa.tar.gz")
752
- create_skynet_artifact(@projects["skynet"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/skynet/#{@projects["skynet"]['revision']}-qa.tar.gz")
753
- create_maia_artifact(@projects["maia"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/maia/#{@projects["maia"]['revision']}-qa.tar.gz")
754
- create_starsky_artifact(@projects["starsky"]['revision']) unless !deploy_starsky_hutch? || artifact_exists?('prima-artifacts-encrypted', "microservices/starsky/#{@projects["starsky"]['revision']}-qa.tar.gz")
755
- create_hutch_artifact(@projects["hutch"]['revision']) unless !deploy_starsky_hutch? || artifact_exists?('prima-artifacts-encrypted', "microservices/hutch/#{@projects["hutch"]['revision']}-qa.tar.gz")
756
-
757
-
758
- wait_for_db_import(db_task) unless stack_exists?("ecs-route53-qa-#{deploy_id}") # dovrebbe essere istantaneo
759
-
760
- import_redis_crash(ec2_ip_address(asg_stack_name)) if deploy_crash? && !stack_exists?("ecs-task-web-qa-#{deploy_id}")
761
-
762
- wait_for_stack_ready(stack_name_alb) unless stack_ready?(stack_name_alb)
763
- wait_for_stack_ready(stack_name_alb_ws) unless stack_ready?(stack_name_alb_ws)
764
-
765
- stack_name_route53 = "ecs-route53-qa-#{deploy_id}"
766
- stack_body = IO.read('cloudformation/stacks/route53/qa.yml')
767
- parameters = [
768
- {
769
- parameter_key: "DnsRecordIdentifier",
770
- parameter_value: @dns_record_identifier
771
- },
772
- {
773
- parameter_key: "PrimaElbHostname",
774
- parameter_value: get_alb_host(stack_name_alb)
775
- },
776
- {
777
- parameter_key: "UraniaIp",
778
- parameter_value: ec2_ip_address(asg_stack_name)
779
- },
780
- {
781
- parameter_key: "BburagoIp",
782
- parameter_value: ec2_ip_address(asg_stack_name)
783
- },
784
- {
785
- parameter_key: "Hal9000Ip",
786
- parameter_value: ec2_ip_address(asg_stack_name)
787
- },
788
- {
789
- parameter_key: "FidatyIp",
790
- parameter_value: ec2_ip_address(asg_stack_name)
791
- },
792
- {
793
- parameter_key: "PeanoIp",
794
- parameter_value: ec2_ip_address(asg_stack_name)
795
- },
796
- {
797
- parameter_key: "ErmesIp",
798
- parameter_value: ec2_ip_address(asg_stack_name)
799
- },
800
- {
801
- parameter_key: "ActiviaIp",
802
- parameter_value: ec2_ip_address(asg_stack_name)
803
- },
804
- {
805
- parameter_key: "SkynetIp",
806
- parameter_value: ec2_ip_address(asg_stack_name)
807
- },
808
- {
809
- parameter_key: "RogerIp",
810
- parameter_value: ec2_ip_address(asg_stack_name)
811
- },
812
- {
813
- parameter_key: "LeftoriumIp",
814
- parameter_value: ec2_ip_address(asg_stack_name)
815
- },
816
- {
817
- parameter_key: "RacheleIp",
818
- parameter_value: ec2_ip_address(asg_stack_name)
819
- },
820
- {
821
- parameter_key: "RedisIp",
822
- parameter_value: ec2_ip_address(asg_stack_name)
823
- },
824
- {
825
- parameter_key: "AssangeElbHostname",
826
- parameter_value: get_alb_host(stack_name_alb)
827
- },
828
- {
829
- parameter_key: "BoratElbHostname",
830
- parameter_value: get_alb_host(stack_name_alb_ws)
831
- },
832
- {
833
- parameter_key: 'CrashElbHostname',
834
- parameter_value: get_alb_host(stack_name_alb_ws)
835
- },
836
- {
837
- parameter_key: 'StarskyElbHostname',
838
- parameter_value: get_alb_host(stack_name_alb)
839
- },
840
- {
841
- parameter_key: 'HutchElbHostname',
842
- parameter_value: get_alb_host(stack_name_alb)
843
- },
844
- {
845
- parameter_key: 'MaiaElbHostname',
846
- parameter_value: get_alb_host(stack_name_alb)
847
- }
848
- ]
849
-
850
- create_stack(stack_name_route53, stack_body, parameters, tags, @cf_role) unless stack_exists?(stack_name_route53)
851
- wait_for_stack_ready(stack_name_route53) unless stack_ready?(stack_name_route53)
852
-
853
- stack_name_skynet = "ecs-task-skynet-qa-#{deploy_id}"
854
- git_checkout_version('skynet', @projects["skynet"]['revision'])
855
- stack_body = File.read('projects/skynet/deploy/task.yml')
856
- parameters = [
857
- {
858
- parameter_key: "Environment",
859
- parameter_value: "qa"
860
- },
861
- {
862
- parameter_key: "ReleaseVersion",
863
- parameter_value: @projects["skynet"]['revision']
864
- },
865
- {
866
- parameter_key: "TaskDesiredCount",
867
- parameter_value: "1"
868
- },
869
- {
870
- parameter_key: "ECSClusterName",
871
- parameter_value: @ecs_cluster_name
872
- },
873
- {
874
- parameter_key: "HostnamePattern",
875
- parameter_value: "skynet-#{@dns_record_identifier}.qa.colaster.com"
876
- },
877
- {
878
- parameter_key: "HostnamePatternPriority",
879
- parameter_value: hostname_pattern_priority
880
- }
881
- ]
882
- if stack_exists?(stack_name_skynet)
883
- cur_version = get_currently_deployed_version(stack_name_skynet)
884
- update_stack(stack_name_skynet, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["skynet"]['revision'])
885
- else
886
- create_stack(stack_name_skynet, stack_body, parameters, tags, @cf_role)
887
- end
888
-
889
- stack_name_urania = "ecs-task-urania-qa-#{deploy_id}"
890
- git_checkout_version('urania', @projects["urania"]['revision'])
891
- stack_body = File.read('projects/urania/deploy/task.yml')
892
- parameters = [
893
- {
894
- parameter_key: "Environment",
895
- parameter_value: "qa"
896
- },
897
- {
898
- parameter_key: "ReleaseVersion",
899
- parameter_value: @projects["urania"]['revision']
900
- },
901
- {
902
- parameter_key: "TaskDesiredCount",
903
- parameter_value: "1"
904
- },
905
- {
906
- parameter_key: "ECSClusterName",
907
- parameter_value: @ecs_cluster_name
908
- },
909
- {
910
- parameter_key: "HostnamePattern",
911
- parameter_value: "urania-#{@dns_record_identifier}.qa.colaster.com"
912
- },
913
- {
914
- parameter_key: "HostnamePatternPriority",
915
- parameter_value: hostname_pattern_priority
916
- }
917
- ]
918
- if stack_exists?(stack_name_urania)
919
- cur_version = get_currently_deployed_version(stack_name_urania)
920
- update_stack(stack_name_urania, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["urania"]['revision'])
921
- else
922
- create_stack(stack_name_urania, stack_body, parameters, tags, @cf_role)
923
- end
924
-
925
- stack_name_ermes = "ecs-task-ermes-qa-#{deploy_id}"
926
- git_checkout_version('ermes', @projects["ermes"]['revision'])
927
- stack_body = File.read('projects/ermes/deploy/task.yml')
928
- parameters = [
929
- {
930
- parameter_key: "Environment",
931
- parameter_value: "qa"
932
- },
933
- {
934
- parameter_key: "ReleaseVersion",
935
- parameter_value: "#{@projects['ermes']['revision']}"
936
- },
937
- {
938
- parameter_key: "TaskDesiredCount",
939
- parameter_value: "1"
940
- },
941
- {
942
- parameter_key: "ECSClusterName",
943
- parameter_value: @ecs_cluster_name
944
- },
945
- {
946
- parameter_key: "HostnamePattern",
947
- parameter_value: "ermes-#{@dns_record_identifier}.qa.colaster.com"
948
- },
949
- {
950
- parameter_key: "HostnamePatternPriority",
951
- parameter_value: hostname_pattern_priority
952
- },
953
- {
954
- parameter_key: "WebHost",
955
- parameter_value: get_route53_hostname('ecs-task-web-qa-notneeded')
956
- },
957
- {
958
- parameter_key: "PeanoHost",
959
- parameter_value: get_route53_hostname('ecs-task-peano-qa-notneeded')
960
- }
961
- ]
962
- if stack_exists?(stack_name_ermes)
963
- cur_version = get_currently_deployed_version(stack_name_ermes)
964
- update_stack(stack_name_ermes, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["ermes"]['revision'])
965
- else
966
- create_stack(stack_name_ermes, stack_body, parameters, tags, @cf_role)
967
- end
968
-
969
- stack_name_bburago = "ecs-task-bburago-qa-#{deploy_id}"
970
- git_checkout_version('bburago', @projects["bburago"]['revision'])
971
- stack_body = File.read('projects/bburago/deploy/task.yml')
972
- parameters = [
973
- {
974
- parameter_key: "Environment",
975
- parameter_value: "qa"
976
- },
977
- {
978
- parameter_key: "ReleaseVersion",
979
- parameter_value: @projects["bburago"]['revision']
980
- },
981
- {
982
- parameter_key: "ECSClusterName",
983
- parameter_value: @ecs_cluster_name
984
- },
985
- {
986
- parameter_key: "TaskDesiredCount",
987
- parameter_value: "1"
988
- },
989
- {
990
- parameter_key: "HostnamePattern",
991
- parameter_value: "bburago-#{@dns_record_identifier}.qa.colaster.com"
992
- },
993
- {
994
- parameter_key: "HostnamePatternPriority",
995
- parameter_value: hostname_pattern_priority
996
- }
997
- ]
998
- if stack_exists?(stack_name_bburago)
999
- cur_version = get_currently_deployed_version(stack_name_bburago)
1000
- update_stack(stack_name_bburago, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["bburago"]['revision'])
1001
- else
1002
- create_stack(stack_name_bburago, stack_body, parameters, tags, @cf_role)
1003
- end
1004
-
1005
- stack_name_hal9000 = "ecs-task-hal9000-qa-#{deploy_id}"
1006
- git_checkout_version('hal9000', @projects["hal9000"]['revision'])
1007
- stack_body = File.read('projects/hal9000/deploy/task.yml')
1008
- parameters = [
1009
- {
1010
- parameter_key: "Environment",
1011
- parameter_value: "qa"
1012
- },
1013
- {
1014
- parameter_key: "ReleaseVersion",
1015
- parameter_value: @projects["hal9000"]['revision']
1016
- },
1017
- {
1018
- parameter_key: "ECSClusterName",
1019
- parameter_value: @ecs_cluster_name
1020
- },
1021
- {
1022
- parameter_key: "TaskDesiredCount",
1023
- parameter_value: "1"
1024
- },
1025
- {
1026
- parameter_key: "HostnamePattern",
1027
- parameter_value: "hal9000-#{@dns_record_identifier}.qa.colaster.com"
1028
- },
1029
- {
1030
- parameter_key: "HostnamePatternPriority",
1031
- parameter_value: hostname_pattern_priority
1032
- }
1033
- ]
1034
- if stack_exists?(stack_name_hal9000)
1035
- cur_version = get_currently_deployed_version(stack_name_hal9000)
1036
- update_stack(stack_name_hal9000, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["hal9000"]['revision'])
1037
- else
1038
- create_stack(stack_name_hal9000, stack_body, parameters, tags, @cf_role)
1039
- end
1040
-
1041
- stack_name_fidaty = "ecs-task-fidaty-qa-#{deploy_id}"
1042
- git_checkout_version('fidaty', @projects["fidaty"]['revision'])
1043
- stack_body = File.read('projects/fidaty/deploy/task.yml')
1044
- parameters = [
1045
- {
1046
- parameter_key: "Environment",
1047
- parameter_value: "qa"
1048
- },
1049
- {
1050
- parameter_key: "ReleaseVersion",
1051
- parameter_value: "#{@projects["fidaty"]['revision']}"
1052
- },
1053
- {
1054
- parameter_key: "ECSClusterName",
1055
- parameter_value: @ecs_cluster_name
1056
- },
1057
- {
1058
- parameter_key: "TaskDesiredCount",
1059
- parameter_value: "1"
1060
- },
1061
- {
1062
- parameter_key: "HostnamePattern",
1063
- parameter_value: "fidaty-#{@dns_record_identifier}.qa.colaster.com"
1064
- },
1065
- {
1066
- parameter_key: "HostnamePatternPriority",
1067
- parameter_value: hostname_pattern_priority
1068
- },
1069
- {
1070
- parameter_key: "PeanoHost",
1071
- parameter_value: get_route53_hostname("ecs-task-peano-qa-notneeded")
1072
- }
1073
- ]
1074
- if stack_exists?(stack_name_fidaty)
1075
- cur_version = get_currently_deployed_version(stack_name_fidaty)
1076
- update_stack(stack_name_fidaty, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["fidaty"]['revision'])
1077
- else
1078
- create_stack(stack_name_fidaty, stack_body, parameters, tags, @cf_role)
1079
- end
1080
-
1081
- stack_name_peano = "ecs-task-peano-qa-#{deploy_id}"
1082
- git_checkout_version('peano', @projects["peano"]['revision'])
1083
- stack_body = File.read('projects/peano/deploy/task.yml')
1084
- parameters = [
1085
- {
1086
- parameter_key: "Environment",
1087
- parameter_value: "qa"
1088
- },
1089
- {
1090
- parameter_key: "ReleaseVersion",
1091
- parameter_value: "#{@projects['peano']['revision']}"
1092
- },
1093
- {
1094
- parameter_key: "ECSClusterName",
1095
- parameter_value: @ecs_cluster_name
1096
- },
1097
- {
1098
- parameter_key: "TaskDesiredCount",
1099
- parameter_value: "1"
1100
- },
1101
- {
1102
- parameter_key: "HostnamePattern",
1103
- parameter_value: "peano-#{@dns_record_identifier}.qa.colaster.com"
1104
- },
1105
- {
1106
- parameter_key: "HostnamePatternPriority",
1107
- parameter_value: hostname_pattern_priority
1108
- },
1109
- {
1110
- parameter_key: "WebHost",
1111
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1112
- },
1113
- {
1114
- parameter_key: "AssangeHost",
1115
- parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1116
- }
1117
- ]
1118
- if stack_exists?(stack_name_peano)
1119
- cur_version = get_currently_deployed_version(stack_name_peano)
1120
- update_stack(stack_name_peano, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["peano"]['revision'])
1121
- else
1122
- create_stack(stack_name_peano, stack_body, parameters, tags, @cf_role)
1123
- end
1124
-
1125
- stack_name_rogoreport = "ecs-task-rogoreport-qa-#{deploy_id}"
1126
- git_checkout_version('rogoreport', @projects["rogoreport"]['revision'])
1127
- stack_body = IO.read('projects/rogoreport/deploy/task.yml')
1128
- parameters = [
1129
- {
1130
- parameter_key: "Environment",
1131
- parameter_value: "qa"
1132
- },
1133
- {
1134
- parameter_key: "ReleaseVersion",
1135
- parameter_value: "#{@projects["rogoreport"]['revision']}"
1136
- },
1137
- {
1138
- parameter_key: "ReleaseName",
1139
- parameter_value: "rogoreport"
1140
- },
1141
- {
1142
- parameter_key: "ECSClusterName",
1143
- parameter_value: @ecs_cluster_name
1144
- }
1145
- ]
1146
- if stack_exists?(stack_name_rogoreport)
1147
- cur_version = get_currently_deployed_version(stack_name_rogoreport)
1148
- update_stack(stack_name_rogoreport, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["rogoreport"]['revision'])
1149
- else
1150
- create_stack(stack_name_rogoreport, stack_body, parameters, tags, @cf_role)
1151
- end
1152
-
1153
- stack_name_assange = "ecs-task-assange-qa-#{deploy_id}"
1154
- git_checkout_version('assange', @projects["assange"]['revision'])
1155
- stack_body = IO.read('projects/assange/deploy/task.yml')
1156
- parameters = [
1157
- {
1158
- parameter_key: "Environment",
1159
- parameter_value: "qa"
1160
- },
1161
- {
1162
- parameter_key: "ReleaseVersion",
1163
- parameter_value: "#{@projects["assange"]['revision']}"
1164
- },
1165
- {
1166
- parameter_key: "ECSClusterName",
1167
- parameter_value: @ecs_cluster_name
1168
- },
1169
- {
1170
- parameter_key: "TaskDesiredCount",
1171
- parameter_value: "1"
1172
- },
1173
- {
1174
- parameter_key: "ALBShortName",
1175
- parameter_value: "assange-qa-#{deploy_id}"[0..27]
1176
- },
1177
- {
1178
- parameter_key: "HostnamePattern",
1179
- parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1180
- },
1181
- {
1182
- parameter_key: "HostnamePatternPriority",
1183
- parameter_value: (hostname_pattern_priority.to_i + 20).to_s
1184
- },
1185
- {
1186
- parameter_key: "EnvHash",
1187
- parameter_value: deploy_id
1188
- },
1189
- {
1190
- parameter_key: "WebHost",
1191
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1192
- },
1193
- {
1194
- parameter_key: "AssangeHost",
1195
- parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1196
- }
1197
- ]
1198
- if stack_exists?(stack_name_assange)
1199
- cur_version = get_currently_deployed_version(stack_name_assange)
1200
- update_stack(stack_name_assange, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["assange"]['revision'])
1201
- else
1202
- create_stack(stack_name_assange, stack_body, parameters, tags, @cf_role)
1203
- end
1204
-
1205
- stack_name_leftorium = "ecs-task-leftorium-qa-#{deploy_id}"
1206
- git_checkout_version('leftorium', @projects["leftorium"]['revision'])
1207
- stack_body = File.read('projects/leftorium/deploy/task.yml')
1208
- parameters = [
1209
- {
1210
- parameter_key: "Environment",
1211
- parameter_value: "qa"
1212
- },
1213
- {
1214
- parameter_key: "ReleaseVersion",
1215
- parameter_value: "#{@projects["leftorium"]['revision']}"
1216
- },
1217
- {
1218
- parameter_key: "ECSClusterName",
1219
- parameter_value: @ecs_cluster_name
1220
- },
1221
- {
1222
- parameter_key: "TaskDesiredCount",
1223
- parameter_value: "1"
1224
- },
1225
- {
1226
- parameter_key: "HostnamePattern",
1227
- parameter_value: "leftorium-#{@dns_record_identifier}.qa.colaster.com"
1228
- },
1229
- {
1230
- parameter_key: "HostnamePatternPriority",
1231
- parameter_value: hostname_pattern_priority
1232
- }
1233
- ]
1234
- if stack_exists?(stack_name_leftorium)
1235
- cur_version = get_currently_deployed_version(stack_name_leftorium)
1236
- update_stack(stack_name_leftorium, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["leftorium"]['revision'])
1237
- else
1238
- create_stack(stack_name_leftorium, stack_body, parameters, tags, @cf_role)
1239
- end
1240
-
1241
- stack_name_rachele = "ecs-task-rachele-qa-#{deploy_id}"
1242
- git_checkout_version('rachele', @projects["rachele"]['revision'])
1243
- stack_body = File.read('projects/rachele/deploy/task.yml')
1244
- parameters = [
1245
- {
1246
- parameter_key: "Environment",
1247
- parameter_value: "qa"
1248
- },
1249
- {
1250
- parameter_key: "ReleaseVersion",
1251
- parameter_value: "#{@projects["rachele"]['revision']}"
1252
- },
1253
- {
1254
- parameter_key: "ECSClusterName",
1255
- parameter_value: @ecs_cluster_name
1256
- },
1257
- {
1258
- parameter_key: "TaskDesiredCount",
1259
- parameter_value: "1"
1260
- },
1261
- {
1262
- parameter_key: "WebHost",
1263
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1264
- },
1265
- {
1266
- parameter_key: "HostnamePattern",
1267
- parameter_value: "rachele-#{@dns_record_identifier}.qa.colaster.com"
1268
- },
1269
- {
1270
- parameter_key: "HostnamePatternPriority",
1271
- parameter_value: hostname_pattern_priority
1272
- }
1273
- ]
1274
- if stack_exists?(stack_name_rachele)
1275
- cur_version = get_currently_deployed_version(stack_name_rachele)
1276
- unless cur_version.include?(@projects["rachele"]['revision'])
1277
- delete_stack(stack_name_rachele)
1278
- wait_for_stack_removal(stack_name_rachele)
1279
- create_stack(stack_name_rachele, stack_body, parameters, tags, @cf_role)
1280
- end
1281
- else
1282
- create_stack(stack_name_rachele, stack_body, parameters, tags, @cf_role)
1283
- end
1284
-
1285
- stack_name_borat = "ecs-task-borat-qa-#{deploy_id}"
1286
- git_checkout_version('borat', @projects["borat"]['revision'])
1287
- stack_body = IO.read('projects/borat/deploy/task.yml')
1288
- parameters = [
1289
- {
1290
- parameter_key: "Environment",
1291
- parameter_value: "qa"
1292
- },
1293
- {
1294
- parameter_key: "ReleaseVersion",
1295
- parameter_value: "#{@projects["borat"]['revision']}"
1296
- },
1297
- {
1298
- parameter_key: "ECSClusterName",
1299
- parameter_value: @ecs_cluster_name
1300
- },
1301
- {
1302
- parameter_key: "TaskDesiredCount",
1303
- parameter_value: "1"
1304
- },
1305
- {
1306
- parameter_key: "ALBShortName",
1307
- parameter_value: "borat-qa-#{deploy_id}"[0..27]
1308
- },
1309
- {
1310
- parameter_key: "HostnamePattern",
1311
- parameter_value: "backoffice-#{@dns_record_identifier}.qa.colaster.com"
1312
- },
1313
- {
1314
- parameter_key: "HostnamePatternPriority",
1315
- parameter_value: (hostname_pattern_priority.to_i + 30).to_s
1316
- },
1317
- {
1318
- parameter_key: "EnvHash",
1319
- parameter_value: deploy_id
1320
- },
1321
- {
1322
- parameter_key: "WsEndpoint",
1323
- parameter_value: "wss://backoffice-#{@dns_record_identifier}.qa.colaster.com/socket/websocket?vsn=1.0.0"
1324
- },
1325
- {
1326
- parameter_key: "GraphqlEndpoint",
1327
- parameter_value: "https://backoffice-#{@dns_record_identifier}.qa.colaster.com/graphql"
1328
- },
1329
- {
1330
- parameter_key: "AuthEndpoint",
1331
- parameter_value: "https://backoffice-#{@dns_record_identifier}.qa.colaster.com/auth"
1332
- },
1333
- {
1334
- parameter_key: "FrontendEndpoint",
1335
- parameter_value: "https://www-#{@dns_record_identifier}.qa.colaster.com/"
1336
- }
1337
- ]
1338
- if stack_exists?(stack_name_borat)
1339
- cur_version = get_currently_deployed_version(stack_name_borat)
1340
- update_stack(stack_name_borat, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["borat"]['revision'])
1341
- else
1342
- create_stack(stack_name_borat, stack_body, parameters, tags, @cf_role)
1343
- end
1344
-
1345
- if deploy_crash?
1346
- git_checkout_version('crash', @projects['crash']['revision'])
1347
- stack_name_crash = "ecs-task-crash-qa-#{deploy_id}"
1348
- stack_body = IO.read('projects/crash/deploy/task.yml')
1349
- parameters = [
1350
- {
1351
- parameter_key: 'Environment',
1352
- parameter_value: 'qa'
1353
- },
1354
- {
1355
- parameter_key: 'ReleaseVersion',
1356
- parameter_value: "#{@projects['crash']['revision']}"
1357
- },
1358
- {
1359
- parameter_key: 'TaskDesiredCount',
1360
- parameter_value: '1'
1361
- },
1362
- {
1363
- parameter_key: 'ECSClusterName',
1364
- parameter_value: @ecs_cluster_name
1365
- },
1366
- {
1367
- parameter_key: 'ALBShortName',
1368
- parameter_value: "crash-qa-#{deploy_id}"[0..27]
1369
- },
1370
- {
1371
- parameter_key: 'HostnamePattern',
1372
- parameter_value: "crash-#{@dns_record_identifier}.qa.colaster.com"
1373
- },
1374
- {
1375
- parameter_key: 'HostnamePatternPriority',
1376
- parameter_value: (hostname_pattern_priority.to_i + 10).to_s
1377
- },
1378
- {
1379
- parameter_key: "EnvHash",
1380
- parameter_value: deploy_id
1381
- },
1382
- {
1383
- parameter_key: "WsEndpoint",
1384
- parameter_value: "wss://crash-#{@dns_record_identifier}.qa.colaster.com/socket/websocket?vsn=1.0.0"
1385
- },
1386
- {
1387
- parameter_key: "GraphqlEndpoint",
1388
- parameter_value: "https://crash-#{@dns_record_identifier}.qa.colaster.com/graphql"
1389
- },
1390
- {
1391
- parameter_key: "AuthDomain",
1392
- parameter_value: "https://www-#{@dns_record_identifier}.qa.colaster.com/"
1393
- },
1394
- ]
1395
- if stack_exists?(stack_name_crash)
1396
- cur_version = get_currently_deployed_version(stack_name_crash)
1397
- update_stack(stack_name_crash, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["crash"]['revision'])
1398
- else
1399
- create_stack(stack_name_crash, stack_body, parameters, tags, @cf_role)
1400
- end
1401
- end
1402
-
1403
- if deploy_starsky_hutch?
1404
- stack_name_starsky = "ecs-task-starsky-qa-#{deploy_id}"
1405
- git_checkout_version('starsky', @projects["starsky"]['revision'])
1406
- stack_body = IO.read('projects/starsky/deploy/task.yml')
1407
- parameters = [
1408
- {
1409
- parameter_key: "Environment",
1410
- parameter_value: "qa"
1411
- },
1412
- {
1413
- parameter_key: "ReleaseVersion",
1414
- parameter_value: "#{@projects["starsky"]['revision']}"
1415
- },
1416
- {
1417
- parameter_key: "TaskDesiredCount",
1418
- parameter_value: "1"
1419
- },
1420
- {
1421
- parameter_key: "ECSClusterName",
1422
- parameter_value: @ecs_cluster_name
1423
- },
1424
- {
1425
- parameter_key: "ALBShortName",
1426
- parameter_value: "starsky-qa-#{deploy_id}"[0..27]
1427
- },
1428
- {
1429
- parameter_key: "EnvHash",
1430
- parameter_value: deploy_id
1431
- },
1432
- {
1433
- parameter_key: "HostnamePattern",
1434
- parameter_value: "starsky-#{@dns_record_identifier}.qa.colaster.com"
1435
- },
1436
- {
1437
- parameter_key: "HostnamePatternPriority",
1438
- parameter_value: (hostname_pattern_priority.to_i + 74).to_s
1439
- }
1440
- ]
1441
- if stack_exists?(stack_name_starsky)
1442
- cur_version = get_currently_deployed_version(stack_name_starsky)
1443
- update_stack(stack_name_starsky, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["starsky"]['revision'])
1444
- else
1445
- create_stack(stack_name_starsky, stack_body, parameters, tags, @cf_role)
1446
- end
1447
- end
1448
-
1449
- stack_name_activia = "ecs-task-activia-qa-#{deploy_id}"
1450
- git_checkout_version('activia', @projects["activia"]['revision'])
1451
- stack_body = File.read('projects/activia/deploy/task.yml')
1452
- parameters = [
1453
- {
1454
- parameter_key: "Environment",
1455
- parameter_value: "qa"
1456
- },
1457
- {
1458
- parameter_key: "ReleaseVersion",
1459
- parameter_value: "#{@projects["activia"]['revision']}"
1460
- },
1461
- {
1462
- parameter_key: "ECSClusterName",
1463
- parameter_value: @ecs_cluster_name
1464
- },
1465
- {
1466
- parameter_key: "TaskDesiredCount",
1467
- parameter_value: "1"
1468
- },
1469
- {
1470
- parameter_key: "HostnamePattern",
1471
- parameter_value: "activia-#{@dns_record_identifier}.qa.colaster.com"
1472
- },
1473
- {
1474
- parameter_key: "HostnamePatternPriority",
1475
- parameter_value: hostname_pattern_priority
1476
- },
1477
- {
1478
- parameter_key: "WebHost",
1479
- parameter_value: get_route53_hostname('ecs-task-web-qa-notneeded')
1480
- },
1481
- {
1482
- parameter_key: "PeanoHost",
1483
- parameter_value: get_route53_hostname('ecs-task-peano-qa-notneeded')
1484
- }
1485
- ]
1486
- if stack_exists?(stack_name_activia)
1487
- cur_version = get_currently_deployed_version(stack_name_activia)
1488
- update_stack(stack_name_activia, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["activia"]['revision'])
1489
- else
1490
- create_stack(stack_name_activia, stack_body, parameters, tags, @cf_role)
1491
- end
1492
-
1493
- # Waiting for prima healtcheck dependencies
1494
- wait_for_stack_ready(stack_name_skynet) unless stack_ready?(stack_name_skynet)
1495
- wait_for_stack_ready(stack_name_urania) unless stack_ready?(stack_name_urania)
1496
- wait_for_stack_ready(stack_name_bburago) unless stack_ready?(stack_name_bburago)
1497
- wait_for_stack_ready(stack_name_hal9000) unless stack_ready?(stack_name_hal9000)
1498
- wait_for_stack_ready(stack_name_assange) unless stack_ready?(stack_name_assange)
1499
- wait_for_stack_ready(stack_name_fidaty) unless stack_ready?(stack_name_fidaty)
1500
- wait_for_stack_ready(stack_name_leftorium) unless stack_ready?(stack_name_leftorium)
1501
- wait_for_stack_ready(stack_name_rachele) unless stack_ready?(stack_name_rachele)
1502
-
1503
- stack_name_web = "ecs-task-web-qa-#{deploy_id}"
1504
- git_checkout_version('prima', @projects["prima"]['revision'])
1505
- stack_body = IO.read('projects/prima/app/cloudformation/tasks/web.yml')
1506
- parameters = [
1507
- {
1508
- parameter_key: "Environment",
1509
- parameter_value: "qa"
1510
- },
1511
- {
1512
- parameter_key: "ReleaseVersion",
1513
- parameter_value: "#{@projects["prima"]['revision']}"
1514
- },
1515
- {
1516
- parameter_key: "TaskDesiredCount",
1517
- parameter_value: "1"
1518
- },
1519
- {
1520
- parameter_key: "ECSClusterName",
1521
- parameter_value: @ecs_cluster_name
1522
- },
1523
- {
1524
- parameter_key: "ALBShortName",
1525
- parameter_value: "web-qa-#{deploy_id}"[0..27]
1526
- },
1527
- {
1528
- parameter_key: "WebQaBaseHostname",
1529
- parameter_value: "#{@dns_record_identifier}.qa.colaster.com"
1530
- },
1531
- {
1532
- parameter_key: "HostnamePattern",
1533
- parameter_value: "ww*-#{@dns_record_identifier}.qa.colaster.com"
1534
- },
1535
- {
1536
- parameter_key: "HostnamePatternPriority",
1537
- parameter_value: hostname_pattern_priority
1538
- },
1539
- {
1540
- parameter_key: "HostnamePatternAggregatorPriority",
1541
- parameter_value: (hostname_pattern_priority.to_i + 1).to_s
1542
- },
1543
- {
1544
- parameter_key: "EnvHash",
1545
- parameter_value: deploy_id
1546
- },
1547
- {
1548
- parameter_key: "AssangeHostname",
1549
- parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1550
- },
1551
- {
1552
- parameter_key: "BackofficeHostname",
1553
- parameter_value: "backoffice-#{@dns_record_identifier}.qa.colaster.com"
1554
- },
1555
- {
1556
- parameter_key: "WebHostname",
1557
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1558
- },
1559
- {
1560
- parameter_key: "FePrimaDomain",
1561
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1562
- },
1563
- {
1564
- parameter_key: "HostnamePattern",
1565
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1566
- }
1567
- ]
1568
- if stack_exists?(stack_name_web)
1569
- cur_version = get_currently_deployed_version(stack_name_web)
1570
- update_stack(stack_name_web, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["prima"]['revision'])
1571
- else
1572
- create_stack(stack_name_web, stack_body, parameters, tags, @cf_role)
1573
- end
1574
-
1575
- stack_name_consumer = "ecs-task-consumer-qa-#{deploy_id}"
1576
- git_checkout_version('prima', @projects["prima"]['revision'])
1577
- stack_body = IO.read('projects/prima/app/cloudformation/tasks/consumer.yml')
1578
- parameters = [
1579
- {
1580
- parameter_key: "Environment",
1581
- parameter_value: "qa"
1582
- },
1583
- {
1584
- parameter_key: "ReleaseVersion",
1585
- parameter_value: "#{@projects["prima"]['revision']}"
1586
- },
1587
- {
1588
- parameter_key: "ECSClusterName",
1589
- parameter_value: @ecs_cluster_name
1590
- },
1591
- {
1592
- parameter_key: "NginxHttpHost",
1593
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1594
- },
1595
- {
1596
- parameter_key: "AssangeHostname",
1597
- parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
1598
- },
1599
- {
1600
- parameter_key: "BackofficeHostname",
1601
- parameter_value: "backoffice-#{@dns_record_identifier}.qa.colaster.com"
1602
- },
1603
- {
1604
- parameter_key: "WebHostname",
1605
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1606
- },
1607
- {
1608
- parameter_key: "FePrimaDomain",
1609
- parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1610
- },
1611
- {
1612
- parameter_key: "HostnamePattern",
1613
- parameter_value: "consumer-#{@dns_record_identifier}.qa.colaster.com"
1614
- }
1615
- ]
1616
- if stack_exists?(stack_name_consumer)
1617
- cur_version = get_currently_deployed_version(stack_name_consumer)
1618
- update_stack(stack_name_consumer, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["prima"]['revision'])
1619
- else
1620
- create_stack(stack_name_consumer, stack_body, parameters, tags, @cf_role)
1621
- end
1622
-
1623
- stack_name_roger = "ecs-task-roger-qa-#{deploy_id}"
1624
- git_checkout_version('roger', @projects["roger"]['revision'])
1625
- stack_body = File.read('projects/roger/deploy/task.yml')
1626
- parameters = [
1627
- {
1628
- parameter_key: "Environment",
1629
- parameter_value: "qa"
1630
- },
1631
- {
1632
- parameter_key: "ReleaseVersion",
1633
- parameter_value: @projects["roger"]['revision']
1634
- },
1635
- {
1636
- parameter_key: "TaskDesiredCount",
1637
- parameter_value: "1"
1638
- },
1639
- {
1640
- parameter_key: "ECSClusterName",
1641
- parameter_value: @ecs_cluster_name
1642
- },
1643
- {
1644
- parameter_key: "HostnamePattern",
1645
- parameter_value: "roger-#{@dns_record_identifier}.qa.colaster.com"
1646
- },
1647
- {
1648
- parameter_key: "HostnamePatternPriority",
1649
- parameter_value: hostname_pattern_priority
1650
- }
1651
- ]
1652
- if stack_exists?(stack_name_roger)
1653
- cur_version = get_currently_deployed_version(stack_name_roger)
1654
- update_stack(stack_name_roger, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["roger"]['revision'])
1655
- else
1656
- create_stack(stack_name_roger, stack_body, parameters, tags, @cf_role)
1657
- end
1658
-
1659
-
1660
- if deploy_starsky_hutch?
1661
- wait_for_stack_ready(stack_name_starsky) unless stack_ready?(stack_name_starsky) unless !deploy_starsky_hutch?
1662
-
1663
- stack_name_hutch = "ecs-task-hutch-qa-#{deploy_id}"
1664
- git_checkout_version('hutch', @projects["hutch"]['revision'])
1665
- stack_body = File.read('projects/hutch/deploy/task.yml')
1666
- parameters = [
1667
- {
1668
- parameter_key: "Environment",
1669
- parameter_value: "qa"
1670
- },
1671
- {
1672
- parameter_key: "ReleaseVersion",
1673
- parameter_value: "#{@projects["hutch"]['revision']}"
1674
- },
1675
- {
1676
- parameter_key: "ALBShortName",
1677
- parameter_value: "hutch-qa-#{deploy_id}"[0..27]
1678
- },
1679
- {
1680
- parameter_key: "ECSClusterName",
1681
- parameter_value: @ecs_cluster_name
1682
- },
1683
- {
1684
- parameter_key: "EnvHash",
1685
- parameter_value: deploy_id
1686
- },
1687
- {
1688
- parameter_key: "HostnamePattern",
1689
- parameter_value: "hutch-#{@dns_record_identifier}.qa.colaster.com"
1690
- },
1691
- {
1692
- parameter_key: "HostnamePatternPriority",
1693
- parameter_value: (hostname_pattern_priority.to_i + 254).to_s
1694
- },
1695
- {
1696
- parameter_key: "ApiUrl",
1697
- parameter_value: "https://#{get_route53_hostname('maia-intermediari')}"
1698
- }
1699
- ]
1700
- if stack_exists?(stack_name_hutch)
1701
- cur_version = get_currently_deployed_version(stack_name_hutch)
1702
- update_stack(stack_name_hutch, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["hutch"]['revision'])
1703
- else
1704
- create_stack(stack_name_hutch, stack_body, parameters, tags, @cf_role)
1705
- end
1706
- end
1707
-
1708
- stack_name_maia = "ecs-task-maia-qa-#{deploy_id}"
1709
- git_checkout_version('maia', @projects["maia"]['revision'])
1710
- stack_body = File.read('projects/maia/deploy/task.yml')
1711
- parameters = [
1712
- {
1713
- parameter_key: "Environment",
1714
- parameter_value: "qa"
1715
- },
1716
- {
1717
- parameter_key: "ReleaseVersion",
1718
- parameter_value: "#{@projects["maia"]['revision']}"
1719
- },
1720
- {
1721
- parameter_key: "ALBShortName",
1722
- parameter_value: "maia-qa-#{deploy_id}"[0..15]
1723
- },
1724
- {
1725
- parameter_key: "ECSClusterName",
1726
- parameter_value: @ecs_cluster_name
1727
- },
1728
- {
1729
- parameter_key: "EnvHash",
1730
- parameter_value: deploy_id
1731
- },
1732
- {
1733
- parameter_key: "HostnamePatternPublic",
1734
- parameter_value: "api*-#{@dns_record_identifier}.qa.colaster.com"
1735
- },
1736
- {
1737
- parameter_key: "HostnamePatternPriority",
1738
- parameter_value: (hostname_pattern_priority.to_i + 128).to_s
1739
- },
1740
- {
1741
- parameter_key: "ProxyHostnameIntermediari",
1742
- parameter_value: "api-intermediari-#{@dns_record_identifier}.qa.colaster.com"
1743
- },
1744
- {
1745
- parameter_key: "ProxyHostnameApp",
1746
- parameter_value: "api-#{@dns_record_identifier}.qa.colaster.com"
1747
- }
1748
- ]
1749
- if stack_exists?(stack_name_maia)
1750
- cur_version = get_currently_deployed_version(stack_name_maia)
1751
- update_stack(stack_name_maia, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["maia"]['revision'])
1752
- else
1753
- create_stack(stack_name_maia, stack_body, parameters, tags, @cf_role)
1754
- end
1755
-
1756
- wait_for_stack_ready(stack_name_web) unless stack_ready?(stack_name_web)
1757
- wait_for_stack_ready(stack_name_consumer) unless stack_ready?(stack_name_consumer)
1758
- wait_for_stack_ready(stack_name_ermes) unless stack_ready?(stack_name_ermes)
1759
- wait_for_stack_ready(stack_name_fidaty) unless stack_ready?(stack_name_fidaty)
1760
- wait_for_stack_ready(stack_name_peano) unless stack_ready?(stack_name_peano)
1761
- wait_for_stack_ready(stack_name_rogoreport) unless stack_ready?(stack_name_rogoreport)
1762
- wait_for_stack_ready(stack_name_borat) unless stack_ready?(stack_name_borat)
1763
- wait_for_stack_ready(stack_name_activia) unless stack_ready?(stack_name_activia)
1764
- wait_for_stack_ready(stack_name_maia) unless stack_ready?(stack_name_maia)
1765
- wait_for_stack_ready(stack_name_crash) unless stack_ready?(stack_name_crash) unless !deploy_crash?
1766
- wait_for_stack_ready(stack_name_hutch) unless stack_ready?(stack_name_hutch) unless !deploy_starsky_hutch?
1767
-
1768
-
1769
- update_service_defaults(stack_name_web)
1770
- update_service_defaults(stack_name_consumer)
1771
- update_service_defaults(stack_name_urania)
1772
- update_service_defaults(stack_name_ermes)
1773
- update_service_defaults(stack_name_bburago)
1774
- update_service_defaults(stack_name_hal9000)
1775
- update_service_defaults(stack_name_fidaty)
1776
- update_service_defaults(stack_name_peano)
1777
- update_service_defaults(stack_name_rogoreport)
1778
- update_service_defaults(stack_name_assange)
1779
- update_service_defaults(stack_name_borat)
1780
- update_service_defaults(stack_name_activia)
1781
- update_service_defaults(stack_name_skynet)
1782
- update_service_defaults(stack_name_leftorium)
1783
- update_service_defaults(stack_name_rachele)
1784
- update_service_defaults(stack_name_maia)
1785
- update_service_defaults(stack_name_starsky) unless !deploy_starsky_hutch?
1786
- update_service_defaults(stack_name_hutch) unless !deploy_starsky_hutch?
1787
- update_service_defaults(stack_name_crash) unless !deploy_crash?
1788
-
1789
- activia_hostname = get_route53_hostname("activia")
1790
- assange_hostname = get_route53_hostname("assange")
1791
- bburago_hostname = get_route53_hostname("bburago")
1792
- borat_hostname = get_route53_hostname("borat")
1793
- ermes_hostname = get_route53_hostname("ermes")
1794
- fidaty_hostname = get_route53_hostname("fidaty")
1795
- hal9000_hostname = get_route53_hostname("hal9000")
1796
- prima_hostname = get_route53_hostname("web")
1797
- peano_hostname = get_route53_hostname("peano")
1798
- skynet_hostname = get_route53_hostname("skynet")
1799
- urania_hostname = get_route53_hostname("urania")
1800
- roger_hostname = get_route53_hostname("roger")
1801
- leftorium_hostname = get_route53_hostname("leftorium")
1802
- rachele_hostname = get_route53_hostname("rachele")
1803
- maia_app_hostname = get_route53_hostname("maia-app")
1804
- maia_intermediari_hostname = get_route53_hostname("maia-intermediari")
1805
- crash_hostname = get_route53_hostname("crash") unless !deploy_crash?
1806
- starsky_hostname = get_route53_hostname("starsky") unless !deploy_starsky_hutch?
1807
- hutch_hostname = get_route53_hostname("hutch") unless !deploy_starsky_hutch?
1808
-
1809
-
1810
- # launch_marley ec2_ip_address(asg_stack_name), prima_hostname, borat_hostname
1811
-
1812
- projects_text = "
1813
- > Prima url: https://#{prima_hostname}
1814
- > Backoffice (Borat) url: https://#{borat_hostname}
1815
- > Urania url: http://#{urania_hostname}:81
1816
- > Bburago url: http://#{bburago_hostname}:83
1817
- > Ermes url: http://#{ermes_hostname}:10002
1818
- > Hal9000 url: http://#{hal9000_hostname}:10031
1819
- > Fidaty url: http://#{fidaty_hostname}:10021
1820
- > Peano url: http://#{peano_hostname}:10039
1821
- > Assange url: https://#{assange_hostname}
1822
- > Activia url: http://#{activia_hostname}:10041
1823
- > Skynet url: http://#{skynet_hostname}:8050
1824
- > Roger url: http://#{roger_hostname}:10051
1825
- > Leftorium url: http://#{leftorium_hostname}:10061
1826
- > Rachele url: http://#{rachele_hostname}:10040
1827
- > Maia App url: https://#{maia_app_hostname}
1828
- > Maia Intermediari url: https://#{maia_intermediari_hostname}"
1829
- projects_text.concat "
1830
- > Crash url: https://#{crash_hostname}" if deploy_crash?
1831
- projects_text.concat "
1832
- > Starsky url: https://#{starsky_hostname}
1833
- > Hutch url: https://#{hutch_hostname}" if deploy_starsky_hutch?
1834
- projects_text.concat "
1835
- > RabbitMQ url: http://#{ec2_ip_address(asg_stack_name)}:15672
1836
- > Supervisor url: http://#{ec2_ip_address(asg_stack_name)}:9001
1837
- > Elasticsearch url: http://#{ec2_ip_address(asg_stack_name)}:9200
1838
- > SSH connection: ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no githubUsername@#{ec2_ip_address(asg_stack_name)}\n"
1839
- output projects_text.cyan
1840
- output "Deploy effettuato, everything is awesome!\n".green
1841
-
1842
- qainit_write_output(projects_text, 'Indirizzi scritti su ')
1843
- end
1844
-
1845
- def get_route53_hostname(project)
1846
- case
1847
- when project.include?('web')
1848
- host = "www-#{@dns_record_identifier}.qa.colaster.com"
1849
- when project.include?('urania')
1850
- host = "urania-#{@dns_record_identifier}.qa.colaster.com"
1851
- when project.include?('bburago')
1852
- host = "bburago-#{@dns_record_identifier}.qa.colaster.com"
1853
- when project.include?('hal9000')
1854
- host = "hal9000-#{@dns_record_identifier}.qa.colaster.com"
1855
- when project.include?('fidaty')
1856
- host = "fidaty-#{@dns_record_identifier}.qa.colaster.com"
1857
- when project.include?('peano')
1858
- host = "peano-#{@dns_record_identifier}.qa.colaster.com"
1859
- when project.include?('assange')
1860
- host = "assange-#{@dns_record_identifier}.qa.colaster.com"
1861
- when project.include?('borat')
1862
- host = "backoffice-#{@dns_record_identifier}.qa.colaster.com"
1863
- when project.include?('crash')
1864
- host = "crash-#{@dns_record_identifier}.qa.colaster.com"
1865
- when project.include?('ermes')
1866
- host = "ermes-#{@dns_record_identifier}.qa.colaster.com"
1867
- when project.include?('activia')
1868
- host = "activia-#{@dns_record_identifier}.qa.colaster.com"
1869
- when project.include?('skynet')
1870
- host = "skynet-#{@dns_record_identifier}.qa.colaster.com"
1871
- when project.include?('roger')
1872
- host = "roger-#{@dns_record_identifier}.qa.colaster.com"
1873
- when project.include?('leftorium')
1874
- host = "leftorium-#{@dns_record_identifier}.qa.colaster.com"
1875
- when project.include?('rachele')
1876
- host = "rachele-#{@dns_record_identifier}.qa.colaster.com"
1877
- when project.include?('starsky')
1878
- host = "starsky-#{@dns_record_identifier}.qa.colaster.com"
1879
- when project.include?('hutch')
1880
- host = "hutch-#{@dns_record_identifier}.qa.colaster.com"
1881
- when project.include?('maia-app')
1882
- host = "api-#{@dns_record_identifier}.qa.colaster.com"
1883
- when project.include?('maia-intermediari')
1884
- host = "api-intermediari-#{@dns_record_identifier}.qa.colaster.com"
1885
- end
1886
- host
1887
- end
1888
-
1889
- def ec2_ip_address(asg_stack_name)
1890
- resp = describe_stack_resource(asg_stack_name, 'ECSAutoScalingGroup')
1891
- resp = describe_auto_scaling_groups([resp.stack_resource_detail.physical_resource_id], 1)
1892
- instance_id = resp.auto_scaling_groups[0].instances[0].instance_id
1893
- resp = describe_instances([instance_id])
1894
- resp.reservations[0].instances[0].private_ip_address
1895
- end
1896
-
1897
611
  def get_alb_host(stack_name)
1898
612
  case
1899
613
  when stack_name.include?('web')
@@ -1942,664 +656,6 @@ class Release
1942
656
  resp.load_balancers[0].dns_name
1943
657
  end
1944
658
 
1945
- def update_service_defaults(stack_name)
1946
- case
1947
- when stack_name.include?('web')
1948
- logical_resource_id = 'ECSServiceWebQA'
1949
- when stack_name.include?('consumer')
1950
- logical_resource_id = 'ECSServiceConsumerQa'
1951
- when stack_name.include?('urania')
1952
- logical_resource_id = 'ECSServiceUraniaQA'
1953
- when stack_name.include?('backoffice')
1954
- logical_resource_id = 'ECSServiceBackoffice'
1955
- when stack_name.include?('ermes')
1956
- logical_resource_id = 'ECSServiceErmesQA'
1957
- when stack_name.include?('bburago')
1958
- logical_resource_id = 'ECSServiceBburagoQA'
1959
- when stack_name.include?('hal9000')
1960
- logical_resource_id = 'ECSServiceHal9000QA'
1961
- when stack_name.include?('fidaty')
1962
- logical_resource_id = 'ECSServiceFidatyQA'
1963
- when stack_name.include?('skynet')
1964
- logical_resource_id = 'ECSServiceSkynetQA'
1965
- when stack_name.include?('roger')
1966
- logical_resource_id = 'ECSServiceRogerQA'
1967
- when stack_name.include?('activia')
1968
- logical_resource_id = 'ECSServiceActiviaQA'
1969
- when stack_name.include?('peano')
1970
- logical_resource_id = 'ECSServicePeanoQA'
1971
- when stack_name.include?('rogoreport')
1972
- logical_resource_id = 'ECSServiceRogoreport'
1973
- when stack_name.include?('assange')
1974
- logical_resource_id = 'ECSServiceAssangeQA'
1975
- when stack_name.include?('borat')
1976
- logical_resource_id = 'ECSServiceBorat'
1977
- when stack_name.include?('leftorium')
1978
- logical_resource_id = 'ECSServiceLeftoriumQA'
1979
- when stack_name.include?('rachele')
1980
- logical_resource_id = 'ECSServiceRacheleQA'
1981
- when stack_name.include?('crash')
1982
- logical_resource_id = 'ECSServiceCrashQA'
1983
- when stack_name.include?('starsky')
1984
- logical_resource_id = 'ECSServiceStarskyQA'
1985
- when stack_name.include?('hutch')
1986
- logical_resource_id = 'ECSServiceHutch'
1987
- when stack_name.include?('maia')
1988
- logical_resource_id = 'ECSServiceMaia'
1989
- else
1990
- raise "Service name non gestito per lo stack #{stack_name}"
1991
- end
1992
- resp = describe_stack_resource(stack_name, logical_resource_id)
1993
- update_ecs_service(@ecs_cluster_name, resp.stack_resource_detail.physical_resource_id, {minimum_healthy_percent: 0, maximum_percent: 100})
1994
- end
1995
-
1996
- def create_activia_artifact(revision)
1997
- output "Preparo l'artifact activia .zip\n".yellow
1998
-
1999
- git_checkout_version('activia', revision)
2000
-
2001
- Dir.chdir 'projects/activia'
2002
-
2003
- decrypt_secrets()
2004
-
2005
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2006
- exec_step 'prepare-docker-compose --directory activia && cp docker-compose-qainit.yml docker-compose.yml'
2007
-
2008
- if File.exists? 'deploy/build_qa_artifact'
2009
- execute_command "deploy/build_qa_artifact"
2010
- else
2011
- [
2012
- "docker-compose build web",
2013
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2014
- '-c' 'mix local.hex --force && mix hex.info && \
2015
- mix deps.get && mix compile && mix deps.compile && \
2016
- cd assets && \
2017
- rm -rf node_modules && \
2018
- yarn --cache-folder ~/.cache/yarn && \
2019
- sysconfcpus -n 2 ./node_modules/.bin/webpack --mode production && \
2020
- cd .. && \
2021
- mix phx.digest && \
2022
- rm -rf _build/qa/rel/ && \
2023
- mix release --env=qa'"
2024
- ].each do |cmd|
2025
- execute_command cmd
2026
- end
2027
- end
2028
-
2029
- cleanup_containers
2030
-
2031
- artifact_path = Dir.glob("_build/qa/rel/activia/releases/*/activia.tar.gz").first
2032
-
2033
- upload_artifact(artifact_path, "microservices/activia/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2034
-
2035
- Dir.chdir '../../'
2036
- end
2037
-
2038
- def create_assange_artifact(revision)
2039
- output "Preparo l'artifact assange .zip\n".yellow
2040
-
2041
- git_checkout_version('assange', revision)
2042
-
2043
- Dir.chdir 'projects/assange'
2044
-
2045
- decrypt_secrets()
2046
-
2047
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2048
- exec_step 'prepare-docker-compose --directory assange && cp docker-compose-qainit.yml docker-compose.yml'
2049
- exec_step 'deploy/build_qa_artifact'
2050
-
2051
- cleanup_containers
2052
-
2053
- artifact_path = Dir.glob("_build/qa/rel/assange/releases/*/assange.tar.gz").first
2054
- upload_artifact(artifact_path, "microservices/assange/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2055
-
2056
- Dir.chdir '../../'
2057
- end
2058
-
2059
- def create_bburago_artifact(revision)
2060
- output "Preparo l'artifact bburago .zip\n".yellow
2061
-
2062
- git_checkout_version('bburago', revision)
2063
-
2064
- Dir.chdir 'projects/bburago'
2065
-
2066
- decrypt_secrets()
2067
-
2068
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2069
- exec_step 'prepare-docker-compose --directory bburago && cp docker-compose-qainit.yml docker-compose.yml'
2070
- [
2071
- "docker-compose build web",
2072
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint deploy/create_qa_artifact web"
2073
- ].each do |cmd|
2074
- execute_command cmd
2075
- end
2076
-
2077
- cleanup_containers
2078
-
2079
- artifact_path = Dir.glob('_build/qa/rel/bburago/releases/*/bburago.tar.gz').first
2080
- upload_artifact(artifact_path, "microservices/bburago/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2081
-
2082
- Dir.chdir '../../'
2083
- end
2084
-
2085
- def create_borat_artifact(revision)
2086
- output "Preparo l'artifact borat .zip\n".yellow
2087
-
2088
- git_checkout_version('borat', revision)
2089
-
2090
- Dir.chdir 'projects/borat'
2091
-
2092
- decrypt_secrets()
2093
-
2094
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2095
- exec_step 'prepare-docker-compose --directory borat && cp docker-compose-qainit.yml docker-compose.yml'
2096
-
2097
- if File.exists? 'deploy/build_qa_artifact'
2098
- execute_command "deploy/build_qa_artifact"
2099
- else
2100
- [
2101
- "docker network create borat_network || true",
2102
- "docker-compose build web",
2103
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2104
- '-c' 'mix local.hex --force && mix hex.info && \
2105
- mix deps.get && \
2106
- cd assets && \
2107
- yarn --cache-folder ~/.cache/yarn && \
2108
- sysconfcpus -n 2 ./node_modules/.bin/webpack -p --config config/webpack.config.prod.js && \
2109
- cd ../ && \
2110
- mix phx.digest && \
2111
- mix compile && mix deps.compile && \
2112
- rm -rf _build/qa/rel/ && \
2113
- mix distillery.release --env=qa'"
2114
- ].each do |cmd|
2115
- execute_command cmd
2116
- end
2117
- end
2118
-
2119
- cleanup_containers
2120
-
2121
- artifact_path = Dir.glob("_build/qa/rel/borat/releases/*/borat.tar.gz").first
2122
- upload_artifact(artifact_path, "microservices/borat/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2123
-
2124
- Dir.chdir '../../'
2125
- end
2126
-
2127
- def create_crash_artifact(revision, deploy_id)
2128
- output "Preparo l'artifact crash .zip\n".yellow
2129
-
2130
- git_checkout_version('crash', revision)
2131
-
2132
- Dir.chdir 'projects/crash'
2133
-
2134
- crash_qa_host = get_route53_hostname('ecs-task-crash-qa-notneeded')
2135
-
2136
- decrypt_secrets()
2137
-
2138
- `mv docker-compose-ci.yml docker-compose.yml`
2139
- exec_step 'prepare-docker-compose --directory crash && cp docker-compose-qainit.yml docker-compose.yml'
2140
-
2141
- execute_command "deploy/build_qa_artifact #{deploy_id}"
2142
-
2143
- cleanup_containers
2144
-
2145
- artifact_path = Dir.glob('_build/qa/rel/crash/releases/*/crash.tar.gz').first
2146
- upload_artifact(artifact_path, "microservices/crash/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2147
-
2148
- Dir.chdir '../../'
2149
- end
2150
-
2151
- def create_ermes_artifact(revision)
2152
- output "Preparo l'artifact ermes .zip\n".yellow
2153
-
2154
- git_checkout_version('ermes', revision)
2155
-
2156
- Dir.chdir 'projects/ermes'
2157
-
2158
- decrypt_secrets()
2159
-
2160
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2161
- exec_step 'prepare-docker-compose --directory ermes && cp docker-compose-qainit.yml docker-compose.yml'
2162
-
2163
- [
2164
- "if echo `docker network ls` | grep crash_default; \
2165
- then echo 'crash_default network already existing'; \
2166
- else docker network create crash_default; fi",
2167
- 'docker-compose build web'
2168
- ].each do |cmd|
2169
- execute_command cmd
2170
- end
2171
-
2172
- [ "docker-compose run --rm -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2173
- '-c' 'mix local.hex --force && mix hex.info && \
2174
- mix deps.get && mix compile && mix deps.compile && \
2175
- mix phx.digest && \
2176
- MIX_ENV=dev mix compile.sms && \
2177
- MIX_ENV=dev mix compile.html && \
2178
- MIX_ENV=dev mix compile.heml && \
2179
- MIX_ENV=dev mix compile.app_notification && \
2180
- rm -rf _build/qa/rel/ && \
2181
- mix release --env=qa'"
2182
- ].each do |cmd|
2183
- execute_command cmd
2184
- end
2185
-
2186
- cleanup_containers
2187
-
2188
- artifact_path = Dir.glob("_build/qa/rel/ermes/releases/*/ermes.tar.gz").first
2189
- upload_artifact(artifact_path, "microservices/ermes/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2190
-
2191
- Dir.chdir '../../'
2192
- end
2193
-
2194
- def create_fidaty_artifact(revision)
2195
- output "Preparo l'artifact fidaty .zip\n".yellow
2196
-
2197
- git_checkout_version('fidaty', revision)
2198
-
2199
- Dir.chdir 'projects/fidaty'
2200
-
2201
- decrypt_secrets()
2202
-
2203
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2204
- exec_step 'prepare-docker-compose --directory fidaty && cp docker-compose-qainit.yml docker-compose.yml'
2205
- [
2206
- "docker-compose build web",
2207
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2208
- '-c' 'mix local.hex --force && mix hex.info && \
2209
- mix deps.get && mix compile && mix deps.compile && \
2210
- mix phx.digest && \
2211
- rm -rf _build/qa/rel/ && \
2212
- mix release --env=qa'"
2213
- ].each do |cmd|
2214
- execute_command cmd
2215
- end
2216
-
2217
- cleanup_containers
2218
-
2219
- artifact_path = Dir.glob("_build/qa/rel/fidaty/releases/*/fidaty.tar.gz").first
2220
- upload_artifact(artifact_path, "microservices/fidaty/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2221
-
2222
- Dir.chdir '../../'
2223
- end
2224
-
2225
- def create_hal9000_artifact(revision)
2226
- output "Preparo l'artifact hal9000 .zip\n".yellow
2227
-
2228
- git_checkout_version('hal9000', revision)
2229
-
2230
- Dir.chdir 'projects/hal9000'
2231
-
2232
- decrypt_secrets()
2233
-
2234
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2235
- exec_step 'prepare-docker-compose --directory hal9000 && cp docker-compose-qainit.yml docker-compose.yml'
2236
- [
2237
- "docker-compose build web",
2238
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2239
- '-c' 'mix local.hex --force && mix hex.info && \
2240
- mix deps.get && mix compile && mix deps.compile && \
2241
- mix phx.digest assets -o priv/static && \
2242
- rm -rf _build/qa/rel/ && \
2243
- mix release --env=qa'"
2244
- ].each do |cmd|
2245
- execute_command cmd
2246
- end
2247
-
2248
- cleanup_containers
2249
-
2250
- artifact_path = Dir.glob("_build/qa/rel/hal9000/releases/*/hal9000.tar.gz").first
2251
- upload_artifact(artifact_path, "microservices/hal9000/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2252
-
2253
- Dir.chdir '../../'
2254
- end
2255
-
2256
- def create_hutch_artifact(revision)
2257
- output "Preparo l'artifact hutch\n".yellow
2258
-
2259
- git_checkout_version('hutch', revision)
2260
-
2261
- Dir.chdir 'projects/hutch'
2262
-
2263
- decrypt_secrets() unless File.exist?('config/secrets.yml')
2264
-
2265
- exec_step 'cp docker-compose-ci.yml docker-compose.yml'
2266
- exec_step 'prepare-docker-compose --directory hutch && cp docker-compose-qainit.yml docker-compose.yml'
2267
-
2268
- execute_command "deploy/build_qa_artifact #{get_route53_hostname("maia-intermediari")}"
2269
-
2270
- cleanup_containers
2271
-
2272
- artifact_path = "./hutch.tar.gz"
2273
- upload_artifact(artifact_path, "microservices/hutch/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2274
-
2275
- Dir.chdir '../../'
2276
- end
2277
-
2278
- def create_leftorium_artifact(revision)
2279
- output "Preparo l'artifact leftorium .zip\n".yellow
2280
-
2281
- git_checkout_version('leftorium', revision)
2282
-
2283
- Dir.chdir 'projects/leftorium'
2284
-
2285
- decrypt_secrets()
2286
-
2287
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2288
- exec_step 'prepare-docker-compose --directory leftorium && cp docker-compose-qainit.yml docker-compose.yml'
2289
- [
2290
- "docker-compose build web",
2291
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2292
- '-c' 'mix local.hex --force && mix hex.info && \
2293
- mix deps.get && mix compile && mix deps.compile && \
2294
- rm -rf _build/qa/rel/ && \
2295
- mix release --env=qa'"
2296
- ].each do |cmd|
2297
- execute_command cmd
2298
- end
2299
-
2300
- cleanup_containers
2301
-
2302
- artifact_path = Dir.glob('_build/qa/rel/leftorium/releases/*/leftorium.tar.gz').first
2303
- upload_artifact(artifact_path, "microservices/leftorium/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2304
-
2305
- Dir.chdir '../../'
2306
- end
2307
-
2308
- def create_maia_artifact(revision)
2309
- output "Preparo l'artifact maia .zip\n".yellow
2310
-
2311
- git_checkout_version('maia', revision)
2312
-
2313
- Dir.chdir 'projects/maia'
2314
-
2315
- decrypt_secrets()
2316
-
2317
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2318
- exec_step 'prepare-docker-compose --directory maia && cp docker-compose-qainit.yml docker-compose.yml'
2319
-
2320
- execute_command 'deploy/build_qa_artifact'
2321
-
2322
- cleanup_containers
2323
-
2324
- artifact_path = Dir.glob('_build/qa/rel/maia/releases/*/maia.tar.gz').first
2325
- upload_artifact(artifact_path, "microservices/maia/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2326
-
2327
- Dir.chdir '../../'
2328
- end
2329
-
2330
- def create_peano_artifact(revision)
2331
- output "Preparo l'artifact peano .zip\n".yellow
2332
-
2333
- git_checkout_version('peano', revision)
2334
-
2335
- Dir.chdir 'projects/peano'
2336
-
2337
- decrypt_secrets() unless File.exist?('config/secrets.yml')
2338
-
2339
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2340
- exec_step 'prepare-docker-compose --directory peano && cp docker-compose-qainit.yml docker-compose.yml'
2341
-
2342
- execute_command "deploy/build_qa_artifact"
2343
-
2344
- cleanup_containers
2345
-
2346
- artifact_path = Dir.glob("_build/qa/rel/peano/releases/*/peano.tar.gz").first
2347
- upload_artifact(artifact_path, "microservices/peano/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2348
-
2349
- Dir.chdir '../../'
2350
- end
2351
-
2352
- def create_prima_artifact(revision, branch_name, deploy_id)
2353
- output "Preparo l'artifact prima .zip\n".yellow
2354
-
2355
- git_checkout_version('prima', revision)
2356
-
2357
- Dir.chdir 'projects/prima'
2358
-
2359
- ['vendor'].each do |dir|
2360
- unless File.directory?(dir)
2361
- if File.directory?("../../../prima/#{dir}")
2362
- exec_step "rsync -a ../../../prima/#{dir} ."
2363
- end
2364
- end
2365
- end
2366
-
2367
- exec_step 'mv docker-compose-ci.yml docker-compose.yml'
2368
- exec_step 'prepare-docker-compose --directory prima'
2369
- exec_step 'sed -i "s/\\/home\\/app/\\/root/g" docker-compose.yml'
2370
- `sed -i 's/"@prima-assicurazioni/pyxis-npm": ".*",/"@prima-assicurazioni/pyxis-npm": "#{@pyxis_version}",/' package.json` if deploy_pyxis?
2371
- [
2372
- "bin/qa_build_artifact.sh #{branch_name} #{'update' if @deploy_update}"
2373
- ].each do |cmd|
2374
- execute_command cmd
2375
- end
2376
-
2377
- cleanup_containers
2378
-
2379
- Dir.chdir "../../"
2380
- end
2381
-
2382
- def create_pyxis_artifact(revision, deploy_id)
2383
- if (deploy_pyxis?)
2384
- output "Preparo l'artifact pyxis\n".yellow
2385
-
2386
- git_checkout_version('pyxis-npm', revision)
2387
-
2388
- Dir.chdir 'projects/pyxis-npm'
2389
-
2390
- decrypt_secrets()
2391
-
2392
- exec_step 'mv .fakenpmrc .npmrc'
2393
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2394
- exec_step 'prepare-docker-compose --directory pyxis-npm && cp docker-compose-qainit.yml docker-compose.yml'
2395
- exec_step 'docker-compose build web'
2396
-
2397
- exec_step "docker-compose run -w $PWD -u root -e NPM_TOKEN=$NPM_TOKEN --entrypoint /bin/sh web \
2398
- '-c' 'npm view prima-assicurazioni/pyxis-npm versions --json > versions.json'" # posso recuperare le versioni pubblicate solo da dentro al container, scrivo su un file che leggo subito dopo
2399
- published_versions = `cat versions.json`
2400
- qa_versions = JSON.parse(published_versions).select{ |version| version.include? get_pyxis_version(deploy_id) }
2401
-
2402
- @pyxis_version = "0.#{get_pyxis_version(deploy_id)}.#{qa_versions.size}"
2403
-
2404
- `sed -i '3s/".*/"version": "#{@pyxis_version}",/' package.json`
2405
- [
2406
- "docker-compose run -w $PWD -u root -e NPM_TOKEN=$NPM_TOKEN --entrypoint /bin/sh web \
2407
- '-c' 'yarn install && \
2408
- yarn build:prod && \
2409
- npm publish'"
2410
- ].each do |cmd|
2411
- execute_command cmd
2412
- end
2413
-
2414
- cleanup_containers
2415
- Dir.chdir '../../'
2416
- end
2417
- end
2418
-
2419
- def create_rachele_artifact(revision)
2420
- output "Preparo l'artifact rachele .zip\n".yellow
2421
-
2422
- git_checkout_version('rachele', revision)
2423
-
2424
- Dir.chdir 'projects/rachele'
2425
-
2426
- decrypt_secrets()
2427
-
2428
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2429
- exec_step 'prepare-docker-compose --directory rachele && cp docker-compose-qainit.yml docker-compose.yml'
2430
-
2431
- execute_command "docker-compose build web"
2432
-
2433
- [ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2434
- '-c' 'mix local.hex --force && mix hex.info && \
2435
- mix deps.get && mix compile && mix deps.compile && \
2436
- rm -rf _build/qa/rel/ && \
2437
- mix release --env=qa'"
2438
- ].each do |cmd|
2439
- execute_command cmd
2440
- end
2441
-
2442
- cleanup_containers
2443
-
2444
- artifact_path = Dir.glob("_build/qa/rel/rachele/releases/*/rachele.tar.gz").first
2445
- upload_artifact(artifact_path, "microservices/rachele/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2446
-
2447
- Dir.chdir '../../'
2448
- end
2449
-
2450
- def create_roger_artifact(revision)
2451
- output "Preparo l'artifact roger .zip\n".yellow
2452
-
2453
- git_checkout_version('roger', revision)
2454
-
2455
- Dir.chdir 'projects/roger'
2456
-
2457
- decrypt_secrets()
2458
-
2459
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2460
- exec_step 'prepare-docker-compose --directory roger && cp docker-compose-qainit.yml docker-compose.yml'
2461
- [
2462
- "docker-compose build web",
2463
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2464
- '-c' 'mix local.hex --force && mix hex.info && \
2465
- mix deps.get && mix compile && mix deps.compile && \
2466
- mix phx.digest && \
2467
- rm -rf _build/qa/rel/ && \
2468
- mix distillery.release --env=qa'"
2469
- ].each do |cmd|
2470
- execute_command cmd
2471
- end
2472
-
2473
- cleanup_containers
2474
-
2475
- artifact_path = Dir.glob("_build/qa/rel/roger/releases/*/roger.tar.gz").first
2476
- upload_artifact(artifact_path, "microservices/roger/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2477
-
2478
- Dir.chdir '../../'
2479
- end
2480
-
2481
- def create_rogoreport_artifact(revision)
2482
- output "Preparo l'artifact rogoreport .zip\n".yellow
2483
-
2484
- git_checkout_version('rogoreport', revision)
2485
-
2486
- Dir.chdir 'projects/rogoreport'
2487
-
2488
- decrypt_secrets() unless File.exist?('config/secrets.yml')
2489
-
2490
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2491
- exec_step 'prepare-docker-compose --directory rogoreport && cp docker-compose-qainit.yml docker-compose.yml'
2492
- [
2493
- "docker-compose build web",
2494
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2495
- '-c' 'mix local.hex --force && mix hex.info && \
2496
- mix deps.get && mix compile && mix deps.compile && \
2497
- rm -rf _build/qa/rel/ && \
2498
- mix release --name=rogoreport --env=qa'"
2499
- ].each do |cmd|
2500
- execute_command cmd
2501
- end
2502
-
2503
- cleanup_containers
2504
-
2505
- artifact_path = Dir.glob("_build/qa/rel/rogoreport/releases/*/rogoreport.tar.gz").first
2506
- upload_artifact(artifact_path, "microservices/rogoreport/rogoreport-#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2507
-
2508
- Dir.chdir '../../'
2509
- end
2510
-
2511
- def create_skynet_artifact(revision)
2512
- output "Preparo l'artifact skynet\n".yellow
2513
-
2514
- git_checkout_version('skynet', revision)
2515
-
2516
- Dir.chdir 'projects/skynet'
2517
-
2518
- version = `git rev-parse HEAD`
2519
-
2520
- artifact_path = "/tmp/skynet-#{revision}-qa.tar.gz"
2521
-
2522
- exec_step "tar cfz #{artifact_path} core config oracles pickles web tests logstash application_log configuration.py wsgi.py requirements.txt requirements_training.txt"
2523
-
2524
- upload_artifact(artifact_path, "microservices/skynet/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2525
-
2526
- Dir.chdir '../../'
2527
- end
2528
-
2529
- def create_starsky_artifact(revision)
2530
- output "Preparo l'artifact starsky\n".yellow
2531
-
2532
- git_checkout_version('starsky', revision)
2533
-
2534
- Dir.chdir 'projects/starsky'
2535
-
2536
- version = `git rev-parse HEAD`
2537
-
2538
- #artifact_path = "/tmp/starsky-#{revision}-qa.tar.gz"
2539
-
2540
- decrypt_secrets() unless File.exist?('config/secrets.yml')
2541
-
2542
- `mv docker-compose-ci.yml docker-compose.yml`
2543
- exec_step 'prepare-docker-compose --directory starsky && cp docker-compose-qainit.yml docker-compose.yml'
2544
- exec_step "sed s/qa_deploy_id/#{get_deploy_id}/ .env.dist.qa"
2545
- exec_step "cp .env.dist.qa .env"
2546
-
2547
- [
2548
- "sed -i 's/USER app/USER root/g' Dockerfile",
2549
- "if echo `docker network ls` | grep peano_default; \
2550
- then echo 'peano_default network already existing'; \
2551
- else docker network create peano_default; fi",
2552
- "docker-compose build web",
2553
- "docker-compose run -w $PWD -e BUILD_ENV=qa -u root --entrypoint /bin/sh web \
2554
- '-c' 'cargo build --release -vv --features=qa \
2555
- && cargo build --bin migrate --release --features=qa \
2556
- && cargo build --bin rabbit_worker --release --features=qa \
2557
- && cp -p target/release/starsky . \
2558
- && cp -p target/release/migrate . \
2559
- && cp -p target/release/rabbit_worker . \
2560
- && tar cfz #{revision}-qa.tar.gz config starsky migrate rabbit_worker .env.dist.*'"
2561
- ].each do |cmd|
2562
- execute_command cmd
2563
- end
2564
-
2565
- artifact_path = "./#{revision}-qa.tar.gz"
2566
-
2567
- upload_artifact(artifact_path, "microservices/starsky/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2568
-
2569
- Dir.chdir '../../'
2570
- end
2571
-
2572
- def create_urania_artifact(revision)
2573
- output "Preparo l'artifact urania .zip\n".yellow
2574
-
2575
- git_checkout_version('urania', revision)
2576
-
2577
- Dir.chdir 'projects/urania'
2578
-
2579
- decrypt_secrets()
2580
-
2581
- exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2582
- exec_step 'prepare-docker-compose --directory urania && cp docker-compose-qainit.yml docker-compose.yml'
2583
-
2584
- execute_command "docker-compose build web"
2585
-
2586
- [ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2587
- '-c' 'mix local.hex --force && mix hex.info && \
2588
- mix deps.get && mix compile && mix deps.compile && \
2589
- rm -rf _build/qa/rel/ && \
2590
- mix release --env=qa'"
2591
- ].each do |cmd|
2592
- execute_command cmd
2593
- end
2594
-
2595
- cleanup_containers
2596
-
2597
- artifact_path = Dir.glob("_build/qa/rel/urania/releases/*/urania.tar.gz").first
2598
- upload_artifact(artifact_path, "microservices/urania/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2599
-
2600
- Dir.chdir '../../'
2601
- end
2602
-
2603
659
  def deploy_pyxis?
2604
660
  if defined? @deploy_pyxis
2605
661
  @deploy_pyxis
@@ -2613,146 +669,12 @@ class Release
2613
669
  end
2614
670
  end
2615
671
 
2616
- def deploy_crash?
2617
- crash_present = !@projects['crash'].empty? && @projects['crash']['name'] != 'master' && !@projects['crash']['default_branch']
2618
- leftorium_present = !@projects['leftorium'].empty? && @projects['leftorium']['name'] != 'master' && !@projects['leftorium']['default_branch']
2619
- crash_present || leftorium_present
2620
- end
2621
-
2622
- def deploy_starsky_hutch?
2623
- starsky_present = !@projects['starsky'].nil? && !@projects['starsky'].empty? && @projects['starsky']['name'] != 'master' && !@projects['starsky']['default_branch']
2624
- hutch_present = !@projects['hutch'].nil? && !@projects['hutch'].empty? && @projects['hutch']['name'] != 'master' && !@projects['hutch']['default_branch']
2625
- starsky_present || hutch_present
2626
- end
2627
-
2628
- def get_pyxis_version(deploy_id)
2629
- (deploy_id.delete '[a-z0]')[0..9]
2630
- end
2631
-
2632
- def cleanup_containers
2633
- `docker-compose kill && docker-compose down -v --remove-orphans`
2634
- `docker rm $(docker ps -q -f status=exited)`
2635
- end
2636
-
2637
- def git_checkout_version(project, revision)
2638
- Dir.chdir "projects/#{project}"
2639
- exec_step "git checkout -- . && git checkout #{revision}"
2640
- Dir.chdir "../../"
2641
- end
2642
-
2643
- def create_asg_stack(stack_name, tags = [])
2644
- stack_body = IO.read('cloudformation/stacks/asg/ecs-asg-allinone.yml')
2645
- parameters = [
2646
- {
2647
- parameter_key: "Environment",
2648
- parameter_value: "qa"
2649
- },
2650
- {
2651
- parameter_key: "InstanceType",
2652
- parameter_value: "t3.large"
2653
- },
2654
- {
2655
- parameter_key: "ECSClusterName",
2656
- parameter_value: @ecs_cluster_name
2657
- },
2658
- {
2659
- parameter_key: "AMIID",
2660
- parameter_value: @ami_id
2661
- }
2662
- ]
2663
- create_stack(stack_name, stack_body, parameters, tags, @cf_role)
2664
- end
2665
-
2666
- def create_cluster_stack(stack_name, tags = [])
2667
- stack_body = IO.read('cloudformation/stacks/ecs-cluster.yml')
2668
- create_stack(stack_name, stack_body, [], tags)
2669
- end
2670
-
2671
672
  def update_cluster_stack(stack_name, tags = [])
2672
673
  stack_body = IO.read('cloudformation/stacks/ecs-cluster.yml')
2673
674
  update_stack(stack_name, stack_body, [], tags)
2674
675
  end
2675
676
 
2676
- def create_alb_stack(stack_name, role, hash, environment = 'qa')
2677
- stack_body = IO.read('cloudformation/stacks/elb/alb-public-qa.yml')
2678
- parameters = [
2679
- {
2680
- parameter_key: "Environment",
2681
- parameter_value: environment
2682
- },
2683
- {
2684
- parameter_key: "Role",
2685
- parameter_value: role
2686
- },
2687
- {
2688
- parameter_key: "EnvHash",
2689
- parameter_value: hash
2690
- }
2691
- ]
2692
- create_stack(stack_name, stack_body, parameters, [], @cf_role)
2693
- end
2694
-
2695
- def import_redis_crash(qa_ip_address)
2696
- output "Importo chiavi di Redis da staging\n".yellow
2697
-
2698
- prefixes = ['CODICI', 'fun_with_flags']
2699
- redis_qa = Redis.new(:url => "redis://#{qa_ip_address}:6379/10")
2700
- redis_staging = Redis.new(:url => 'redis://staging.cache-1.prima.it:6379/10')
2701
-
2702
- prefixes.each do |prefix|
2703
- redis_staging.keys("#{prefix}*").each do |key|
2704
- next unless redis_qa.keys(key).empty?
2705
- output "Importo #{key} dal Redis di staging\n".yellow
2706
- dump_staging = redis_staging.dump key
2707
- redis_qa.restore key, 0, dump_staging
2708
- end
2709
- end
2710
- end
2711
-
2712
- def import_dbs(ip_address)
2713
- overrides = {
2714
- container_overrides: [
2715
- {
2716
- name: 'dbrestore',
2717
- environment: [
2718
- {
2719
- name: 'EC2_IP_ADDRESS',
2720
- value: ip_address
2721
- }
2722
- ]
2723
- }
2724
- ]
2725
- }
2726
- resp = run_ecs_task(@ecs_cluster_name, @import_db_task, overrides, 1)
2727
- return resp
2728
- end
2729
-
2730
- def wait_for_db_import(task)
2731
- output "Attendo che i DB vengano importati...\n".yellow
2732
- stopped_at = nil
2733
- sleep 15 # altrimenti non trova il task appena avviato...
2734
- while stopped_at.nil?
2735
- if task.tasks[0].nil?
2736
- pp @ecs_cluster_name
2737
- pp task
2738
- stop_if true, "Task di import DB lanciato, ma risposta vuota!".red
2739
- end
2740
- task = describe_ecs_tasks(task.tasks[0].cluster_arn, [task.tasks[0].task_arn])
2741
- stopped_at = task.tasks[0].stopped_at unless task.tasks[0].nil?
2742
- sleep_seconds = 10
2743
- seconds_elapsed = 0
2744
- while true && stopped_at.nil?
2745
- break if seconds_elapsed >= sleep_seconds
2746
- print '.'.yellow; STDOUT.flush
2747
- sleep 1
2748
- seconds_elapsed += 1
2749
- end
2750
- end
2751
- print "\n"
2752
- end
2753
-
2754
677
  def choose_branch_to_deploy(project_name, select_master = false)
2755
- return {'name' => 'master', 'revision' => '399653d555b8864', 'committer' => 'crash@prima.it', 'default_branch' => true} if project_name == 'crash' && select_master
2756
678
  Dir.chdir "projects/#{project_name}"
2757
679
  output "Recupero la lista dei branch del progetto #{project_name}..."
2758
680
  `git remote prune origin`
@@ -2792,7 +714,6 @@ class Release
2792
714
  name = branch_name.split(' ')[0]
2793
715
  revision = branch_name.split(' ')[1]
2794
716
  committer_email = branch_name.split(' ')[2].tr('<>', '')
2795
- return { 'name' => 'crash', 'default_branch' => true } if project_name == 'crash' && branch_name == 'master' #rimuovere questa riga se mai nei qa servirà crash con un branch diverso da master
2796
717
  { 'name' => name, 'revision' => revision[0..14], 'committer' => committer_email, 'default_branch' => select_master }
2797
718
  end
2798
719
 
@@ -2840,70 +761,6 @@ class Release
2840
761
  (Time.now.to_i.to_s[-4..-1].to_i + Random.rand(40000)).to_s
2841
762
  end
2842
763
 
2843
- def launch_marley(ip_address, prima_hostname, borat_hostname)
2844
- resp = describe_stack_resource('batch-job-marley', 'JobDefinition')
2845
-
2846
- @batch.submit_job({
2847
- job_name: "marley-#{@dns_record_identifier}", # required
2848
- job_queue: "tools-production", # required
2849
- job_definition: resp.stack_resource_detail.physical_resource_id, # required
2850
- container_overrides: {
2851
- environment: [
2852
- {
2853
- name: 'PRIMA_URL',
2854
- value: "https://#{prima_hostname}/?superprima"
2855
- },
2856
- {
2857
- name: 'PRIMA_IP',
2858
- value: ip_address
2859
- },
2860
- {
2861
- name: 'PROJECTS_JSON',
2862
- value: @projects.to_json
2863
- },
2864
- {
2865
- name: 'BACKOFFICE_URL',
2866
- value: "https://#{borat_hostname}"
2867
- }
2868
- ]
2869
- }
2870
- })
2871
-
2872
- output "Marley lanciato con successo!\n".green
2873
- end
2874
-
2875
- def get_currently_deployed_version(stack_name)
2876
- parameters = get_stack_parameters(stack_name)
2877
- currently_deployed_version = nil
2878
- parameters.each do |parameter|
2879
- if parameter.parameter_key == "ReleaseVersion"
2880
- currently_deployed_version = parameter.parameter_value
2881
- end
2882
- end
2883
- currently_deployed_version
2884
- end
2885
-
2886
- def decrypt_secrets()
2887
- docker_image = "prima/biscuit_populate_configs"
2888
- [
2889
- "docker pull #{docker_image}",
2890
- "docker run -t --rm -v $HOME/.aws:/root/.aws -w $PWD --volumes-from #{get_host_container_name} #{docker_image}"
2891
- ].each do |cmd|
2892
- execute_command cmd
2893
- end
2894
- end
2895
-
2896
- def get_host_container_name()
2897
- if @host_container_name
2898
- @host_container_name
2899
- else
2900
- hostname = `cat /etc/hostname`.gsub("\n", '')
2901
- execute_command "docker ps | grep #{hostname} | awk '{print $NF}'"
2902
- @host_container_name = `docker ps | grep #{hostname} | awk '{print $NF}'`.gsub("\n", '')
2903
- # @host_container_name = `docker ps | grep #{hostname} | sed -r "s/.+ ([^\s].+)$/\1/p"`
2904
- end
2905
- end
2906
-
2907
764
  def select_branches(project_names = nil)
2908
765
  output "Deploy feature menu"
2909
766
  if project_names.nil?
@@ -2917,14 +774,6 @@ class Release
2917
774
  end
2918
775
  end
2919
776
  end
2920
-
2921
- def get_ami_id(stack_name)
2922
- get_stack_parameters(stack_name).each do |param|
2923
- if param.parameter_key == "AMIID"
2924
- return param.parameter_value
2925
- end
2926
- end
2927
- end
2928
777
  end
2929
778
 
2930
779
  def help_content