prima-twig 0.58.509 → 0.59.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 6d7efabe467aabe7d0b7c8bdb1db47dae48d875c8d8ec86e84611a163e8b193a
4
- data.tar.gz: f0bd66f5c5999dd5bed1bbc6b203585f202059ae7ed81dbab266d714da2a72ee
3
+ metadata.gz: 4448114bff03d2c305f17e84214de309861f50f53b73fb19fbbe434c514e315c
4
+ data.tar.gz: '0587c2196fed9292bd60989b8fbf331203cc9ba608608946854279e21f406de0'
5
5
  SHA512:
6
- metadata.gz: ec5177ad64ac62333c9f1850c521e22b964f887911ef7db6fafff3dfbc7a29d893c434104f42bd190f066877dd867ac2b93a552d49b6756c265a1015179f118e
7
- data.tar.gz: 3aececf6beac593056a3652793fd403a19fa0681024e8d18020277158bd894a3f7abf2f119d9a386547a823cabc165bc5ae78c2bb71774aae9d889d7dfa37f90
6
+ metadata.gz: 2fdd34b834da97bb1d6107f67e27349c69ebc63fc79859e94415009b48b78b35388a8e2b23451f91698c2746eb2b88bc36df8d33264f4ed5b63c2d39e2a9e93c
7
+ data.tar.gz: 0b8d730ad5f305b64a5edb3f3cccc32b50eed6d1946bdc20730b498f3d78ec4ad88e8d2cd964982f472ec56aa5bb60aa97204034adc6d45264b18db0e6c75aa0
@@ -5,10 +5,7 @@ require_relative '../lib/prima_twig.rb'
5
5
  require_relative '../lib/prima_aws_client.rb'
6
6
  require 'colorize'
7
7
  require 'highline/import'
8
- require 'aws-sdk-batch'
9
- require 'aws-sdk-cloudformation'
10
- require 'aws-sdk-ecs'
11
- require 'aws-sdk-s3'
8
+ require 'aws-sdk'
12
9
  require 'redcarpet'
13
10
  require 'mail'
14
11
  require 'erb'
@@ -58,7 +55,6 @@ class Review
58
55
  @cf = Aws::CloudFormation::Client.new
59
56
  @ecs = Aws::ECS::Client.new
60
57
  @s3 = Aws::S3::Client.new
61
- @batch = Aws::Batch::Client.new
62
58
  @s3_bucket = "prima-artifacts-encrypted"
63
59
  end
64
60
 
@@ -98,6 +94,15 @@ class Review
98
94
  artifact = artifacts.select {|v| v[:rev] == artifact_rev}.first
99
95
 
100
96
  do_deploy! artifact_rev
97
+ # exec_step "terminal-notifier -message 'Deploy terminato, vuoi lanciare paparatzinger?'" if which 'terminal-notifier'
98
+ #
99
+ # confirm_message = "Vuoi lanciare paparatzinger?"
100
+ # launch_paparatzinger = @prima.yesno confirm_message.blue
101
+ #
102
+ # if launch_paparatzinger
103
+ # output "Avvio paparatzinger per gli screenshot".yellow
104
+ # job_name = launch_paparatzinger(artifact[:commit_msg])
105
+ # end
101
106
 
102
107
  mail = Mail.new do
103
108
  from 'deploy@prima.it'
@@ -113,6 +118,7 @@ class Review
113
118
  body << "Revision: [#{artifact[:rev]}](https://github.com/primait/prima/commit/#{artifact[:rev]}) del #{artifact[:created_at].strftime('%d/%m/%Y %H:%M:%S')}\n\n"
114
119
  body << "Branch: [#{artifact[:branch]}](https://github.com/primait/prima/tree/#{artifact[:branch]})\n\n"
115
120
  body << "Commit: #{commit_msg.gsub(/_/, '\_')}\n\n"
121
+ #body << "Screenshots (tra qualche minuto): [BrowserStack](https://www.browserstack.com/automate) (Filtrare per: \"#{get_paparatzinger_job_name(commit_msg).gsub(/_/, '\_')}\")" if launch_paparatzinger
116
122
 
117
123
  htmlBody = Redcarpet::Markdown.new(Redcarpet::Render::HTML.new).render body
118
124
 
@@ -138,8 +144,6 @@ class Review
138
144
 
139
145
  invalidate_prismic_cache
140
146
 
141
- launch_crawler
142
-
143
147
  exec_step "terminal-notifier -message 'Deploy terminato'" if which 'terminal-notifier'
144
148
  end
145
149
 
@@ -213,16 +217,63 @@ class Review
213
217
  artifacts.sort_by { |v| v[:created_at] }.reverse
214
218
  end
215
219
 
216
- def launch_crawler()
217
- resp = describe_stack_resource('batch-job-crawler-production', 'JobDefinition')
220
+ def launch_paparatzinger(job_name)
221
+ @s3.get_object(
222
+ response_target: '/tmp/paparatzinger_twig.yml',
223
+ bucket: 'prima-deploy',
224
+ key: 'paparatzinger_twig.yml')
225
+
226
+ paparatzinger_config = YAML.load_file '/tmp/paparatzinger_twig.yml'
227
+
228
+ uri = URI.parse(paparatzinger_config['prima_api_search_url'])
229
+ body = Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.scheme == 'https') do |http|
230
+ req = Net::HTTP::Get.new(uri)
231
+ req["x-apikey"] = paparatzinger_config['prima_api_token']
232
+ response = http.request req
233
+ response.body
234
+ end
235
+
236
+ saves = JSON.parse body
218
237
 
219
- @batch.submit_job({
220
- job_name: "crawler", # required
221
- job_queue: "tools-production", # required
222
- job_definition: resp.stack_resource_detail.physical_resource_id # required
238
+ save_code = saves.sample['unique_identifier']
239
+ url_garanzie = "https://www.prima.it/preventivo/auto/#{save_code}/garanzie?browserstack=true"
240
+ job_name = get_paparatzinger_job_name(clean_commit_message(job_name))
241
+
242
+ logical_resource_id = 'TaskDefinitionPaparatzinger'
243
+ resp = @cf.describe_stack_resource({
244
+ stack_name: 'ecs-task-paparatzinger-production',
245
+ logical_resource_id: logical_resource_id
246
+ })
247
+
248
+ resp = @ecs.run_task({
249
+ cluster: 'ecs-cluster-tools-vpc-production-ECSCluster-1WJQLW5EVLYEB',
250
+ task_definition: resp.stack_resource_detail.physical_resource_id,
251
+ overrides: {
252
+ container_overrides: [
253
+ {
254
+ name: 'paparatzinger',
255
+ environment: [
256
+ {
257
+ name: 'JOB_NAME',
258
+ value: job_name,
259
+ },
260
+ {
261
+ name: 'VERSION',
262
+ value: paparatzinger_config['version'],
263
+ },
264
+ {
265
+ name: 'URL_GARANZIE',
266
+ value: url_garanzie
267
+ }
268
+ ]
269
+ }
270
+ ]
271
+ },
272
+ count: 1
223
273
  })
274
+ output "Paparatzinger lanciato con successo. URL: #{url_garanzie}\n".green
224
275
 
225
- output "Crawler lanciato con successo!\n".green
276
+ job_name
226
277
  end
227
278
 
228
279
  end
@@ -234,6 +285,10 @@ def clean_commit_message(commit_msg)
234
285
  commit_msg[0..99]
235
286
  end
236
287
 
288
+ def get_paparatzinger_job_name(job_name)
289
+ job_name.gsub /[^0-9a-z]/i, '-'
290
+ end
291
+
237
292
  def which(cmd)
238
293
  exts = ENV['PATHEXT'] ? ENV['PATHEXT'].split(';') : ['']
239
294
  ENV['PATH'].split(File::PATH_SEPARATOR).each do |path|
@@ -26,7 +26,7 @@ class Release
26
26
  @s3 = Aws::S3::Client.new
27
27
  @s3_bucket = 'prima-artifacts'
28
28
  @artifact_path = '/tmp/prima-artifact.zip'
29
- @import_db_task = 'arn:aws:ecs:eu-west-1:001575623345:task-definition/ecs-task-db-restore2-TaskDefinitionDbRestore-4UBHMCZBE5WM:1'
29
+ @import_db_task = 'arn:aws:ecs:eu-west-1:001575623345:task-definition/ecs-task-db-restore2-TaskDefinitionDbRestore-1BXH13XEVLPP0:1'
30
30
  @cf_role = 'arn:aws:iam::001575623345:role/qainit-service-role-cloudformat-CloudformationRole-18KBZQIS148R9'
31
31
  @dns_record_identifier = nil
32
32
  @ecs_cluster_name = nil
@@ -59,7 +59,6 @@ class Release
59
59
  @base_stack_name_alb = 'ecs-alb-http-public-qa-'
60
60
  @base_stack_name_alb_ws = 'ecs-alb-ws-public-qa-'
61
61
  @git_branch = ''
62
- @cloudflare = Rubyflare.connect_with(ENV['CLOUDFLARE_EMAIL'], ENV['CLOUDFLARE_APIKEY'])
63
62
  end
64
63
 
65
64
  def execute!(args)
@@ -76,6 +75,8 @@ class Release
76
75
  qainit_deploy_update!
77
76
  elsif 'read' == args[1]
78
77
  qainit_read_config! args[2]
78
+ elsif 'minimal' == args[1]
79
+ qainit_minimal_deploy! args[2]
79
80
  else
80
81
  if args[1]
81
82
  select_branches(args[1..-1])
@@ -89,6 +90,8 @@ class Release
89
90
  if 'deploy' == args[1]
90
91
  suite_py_branches(args[2])
91
92
  qainit_deploy!(true)
93
+ else
94
+ qainit_deploy_shutdown!(args[2])
92
95
  end
93
96
  when 'deploy'
94
97
  abort('Non sei nella cartella di artemide') unless Dir.pwd.match 'artemide$'
@@ -98,6 +101,8 @@ class Release
98
101
  deploy_update!
99
102
  elsif 'lock' == args[1]
100
103
  deploy_lock!
104
+ elsif 'minimal' == args[1]
105
+ qainit_drone_minimal_deploy!
101
106
  else
102
107
  if args[1]
103
108
  select_branches(args[1])
@@ -140,6 +145,7 @@ class Release
140
145
  output 'Disable aggregator'
141
146
 
142
147
  output "Recupero le informazioni relative al puntamento dei record DNS..."
148
+ cloudflare = Rubyflare.connect_with(@prima.config['cloudflare_email'], @prima.config['cloudflare_apikey'])
143
149
  output "Recupero le informazioni sui QA attivi..."
144
150
  stack_list, envs = get_stacks()
145
151
 
@@ -151,7 +157,7 @@ class Release
151
157
  end.is_a?(Aws::CloudFormation::Types::Tag)
152
158
  aggregator_enabled
153
159
  end[0]
154
- dns_records = @cloudflare.get("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {per_page: 100, type: 'CNAME', content: get_alb_host(@base_stack_name_alb + env_hash[3..8])})
160
+ dns_records = cloudflare.get("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {per_page: 100, type: 'CNAME', content: get_alb_host(@base_stack_name_alb + env_hash[3..8])})
155
161
  stop_if dns_records.body[:result].empty?, "I record DNS degli aggregatori non stanno puntando ad un QA".red
156
162
  change_hostname_priority(env_hash, hostname_pattern_priority())
157
163
  dns_to_staging(env_hash)
@@ -167,7 +173,8 @@ class Release
167
173
  output 'Enable aggregator'
168
174
 
169
175
  output 'Recupero le informazioni relative al puntamento dei record DNS...'
170
- dns_records = @cloudflare.get('zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records', { per_page: 100, type: 'CNAME', content: 'staging.prima.it' })
176
+ cloudflare = Rubyflare.connect_with(@prima.config['cloudflare_email'], @prima.config['cloudflare_apikey'])
177
+ dns_records = cloudflare.get('zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records', { per_page: 100, type: 'CNAME', content: 'staging.prima.it' })
171
178
  stop_if dns_records.body[:result].empty?, "I record DNS degli aggregatori stanno gia' puntando ad un QA".red
172
179
 
173
180
  output "Recupero le informazioni sui QA attivi..."
@@ -197,7 +204,7 @@ class Release
197
204
  dns_records.body[:result].each do |dns|
198
205
  if dns[:name] =~ /^\w+\-\w+\-staging\.prima\.it$/
199
206
  output "Changing #{dns[:name]} DNS record"
200
- @cloudflare.put("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records/#{dns[:id]}", {type: 'CNAME', name: dns[:name], content: get_alb_host(@base_stack_name_alb + env_hash[3..8]), proxied: true, ttl: 1})
207
+ cloudflare.put("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records/#{dns[:id]}", {type: 'CNAME', name: dns[:name], content: get_alb_host(@base_stack_name_alb + env_hash[3..8]), proxied: true, ttl: 1})
201
208
  end
202
209
  end
203
210
 
@@ -256,11 +263,12 @@ class Release
256
263
 
257
264
  def dns_to_staging(env_hash)
258
265
  output "Recupero le informazioni relative al puntamento dei record DNS..."
259
- dns_records = @cloudflare.get("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {per_page: 100, type: 'CNAME', content: get_alb_host(@base_stack_name_alb + env_hash[3..8])})
266
+ cloudflare = Rubyflare.connect_with(@prima.config['cloudflare_email'], @prima.config['cloudflare_apikey'])
267
+ dns_records = cloudflare.get("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {per_page: 100, type: 'CNAME', content: get_alb_host(@base_stack_name_alb + env_hash[3..8])})
260
268
  dns_records.body[:result].each do |dns|
261
269
  if dns[:name] =~ /^\w+\-\w+\-staging\.prima\.it$/
262
270
  output "Changing #{dns[:name]} DNS record"
263
- @cloudflare.put("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records/#{dns[:id]}", {type: 'CNAME', name: dns[:name], content: 'staging.prima.it', proxied: true, ttl: 1})
271
+ cloudflare.put("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records/#{dns[:id]}", {type: 'CNAME', name: dns[:name], content: 'staging.prima.it', proxied: true, ttl: 1})
264
272
  end
265
273
  end
266
274
  end
@@ -427,20 +435,26 @@ class Release
427
435
  end
428
436
 
429
437
  def get_default_branch_name(projects)
430
- projects.each_key do |project|
431
- return projects[project]['name'] if not projects[project]['default_branch']
438
+ if !projects['prima']['default_branch']
439
+ return projects['prima']['name']
440
+ elsif ![nil, 'master'].include? projects['crash']['name']
441
+ return projects['crash']['name']
442
+ else
443
+ projects.each_key do |project_key|
444
+ return projects[project_key]['name'] if projects[project_key]['name'] != 'master'
445
+ end
432
446
  end
433
447
  end
434
448
 
435
449
  def suite_py_branches(args_json)
436
- arg_projects = JSON.parse(args_json)
450
+ args = JSON.parse(args_json)
437
451
 
438
- @projects.merge!(arg_projects)
452
+ args['projects'].each_key do |project|
453
+ @projects[project] = { name: args['projects'][project]['branch'], revision: args['projects'][project]['revision'], committer: '', default_branch: false }
454
+ end
439
455
 
440
456
  @projects.each_key do |project|
441
- if @projects[project].empty?
442
- @projects[project] = choose_branch_to_deploy(project, true)
443
- end
457
+ @projects[project] = choose_branch_to_deploy(project, true) unless args['projects'].key? project
444
458
  end
445
459
  end
446
460
 
@@ -453,7 +467,7 @@ class Release
453
467
  end
454
468
 
455
469
  def qainit_deploy!(quiet = false)
456
- `git checkout user-story/pipeline/PRIMA-4333 && git pull && git submodule update --init --recursive && git remote prune origin`
470
+ `git checkout master && git pull && git submodule update --init --recursive && git remote prune origin`
457
471
 
458
472
  default_name = get_default_branch_name @projects
459
473
  feature_number = ''
@@ -474,15 +488,42 @@ class Release
474
488
 
475
489
  File.open('branch_names', 'w') { |file| file.write(JSON.generate(@projects)) }
476
490
 
477
- update_drone_yml!
478
-
479
491
  `git add projects && \
480
- git add branch_names .drone.yml && \
492
+ git add branch_names && \
481
493
  git commit -m '#{branch_name}' && \
482
494
  git push -f --set-upstream origin #{branch_name} && \
483
495
  git checkout master`
484
496
  end
485
497
 
498
+ def qainit_minimal_deploy!(project)
499
+ abort('L\'unico progetto permesso è prima') unless ['prima'].include? project
500
+ project_definition = choose_branch_to_deploy(project)
501
+
502
+ `git checkout master && git pull && git remote prune origin`
503
+
504
+ default_name = project_definition[:name]
505
+ output "Inserisci la feature a cui si riferisce il QA: [#{default_name}]".cyan
506
+ feature_number = String(STDIN.gets.chomp)
507
+ feature_number = default_name if feature_number.empty?
508
+
509
+ if `git branch -l | grep #{feature_number}`.size > 0
510
+ `git checkout #{feature_number} && git pull`
511
+ else
512
+ `git checkout -b #{feature_number}`
513
+ end
514
+
515
+ # così recupero le informazioni sul branch, poi vado a scrivere il file branch_names con una sola riga
516
+ branch = {'project' => { 'name' => project_definition[:name], 'revision' => project_definition['revision'], 'default_branch' => project_definition[:default_branch]}}
517
+
518
+ File.open('branch_names', 'w') { |file| file.write(JSON.generate(branch)) }
519
+
520
+ `git add projects && \
521
+ git add branch_names && \
522
+ git commit -m 'minimal_#{feature_number}' && \
523
+ git push --set-upstream origin #{feature_number} && \
524
+ git checkout master`
525
+ end
526
+
486
527
  def qainit_deploy_update!
487
528
  `git checkout master && git pull`
488
529
  # cancelliamo tutti i branch che non sono più sul repo remoto
@@ -521,9 +562,11 @@ class Release
521
562
 
522
563
  File.open('branch_names', 'w') { |file| file.write(JSON.generate(@projects)) }
523
564
 
524
- update_drone_yml!
525
-
526
- `git commit -am 'update'`
565
+ if `git log -1` =~ /minimal_/
566
+ `git commit -am 'minimal_update'`
567
+ else
568
+ `git commit -am 'update'`
569
+ end
527
570
  `git push && git checkout master`
528
571
  end
529
572
 
@@ -598,8 +641,6 @@ class Release
598
641
  delete_stack(@base_stack_name_alb + env_hash[3..8]) if stack_exists?(@base_stack_name_alb + env_hash[3..8])
599
642
  delete_stack(@base_stack_name_alb_ws + env_hash[3..8]) if stack_exists?(@base_stack_name_alb_ws + env_hash[3..8])
600
643
  `git checkout master && git push origin --delete ${DRONE_BRANCH}`
601
- output "Cancello il record DNS utilizzato da Lighthouse"
602
- delete_lighthouse_dns()
603
644
  output "Finito!".green
604
645
 
605
646
  if @qainit
@@ -633,27 +674,21 @@ class Release
633
674
  when 'shutdown'
634
675
  output 'Shutting down'.green
635
676
  qainit_drone_shutdown!
677
+ when 'minimal'
678
+ output 'Starting minimal deploy'.green
679
+ qainit_drone_minimal_deploy!
636
680
  else
637
681
  output 'Starting standard deploy'.green
638
682
  deploy_feature!
639
683
  end
640
684
  end
641
685
 
642
- def update_drone_yml!()
643
- drone_yml = File.read('.drone.yml')
644
- @projects.each do |key, project|
645
- drone_yml = drone_yml.gsub("#{key}_placeholder", project['name'])
646
- end
647
- File.open(".drone.yml", "w") do |f|
648
- f.write(drone_yml)
649
- end
650
- end
651
-
652
686
  def get_s3_config_files
653
687
  # manteniamo la struttura per lanciarlo facilmente anche da locale
654
688
  `mkdir -p cloudformation/stacks/task cloudformation/stacks/route53 cloudformation/stacks/asg cloudformation/stacks/elb`
655
689
  @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/task/db.yml', response_target: 'cloudformation/stacks/task/db.yml'})
656
690
  @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/route53/qa.yml', response_target: 'cloudformation/stacks/route53/qa.yml'})
691
+ @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/route53/minimal_qa.yml', response_target: 'cloudformation/stacks/route53/minimal_qa.yml'})
657
692
  @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/asg/ecs-asg-allinone.yml', response_target: 'cloudformation/stacks/asg/ecs-asg-allinone.yml'})
658
693
  @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/ecs-cluster.yml', response_target: 'cloudformation/stacks/ecs-cluster.yml'})
659
694
  @s3.get_object({bucket: "prima-deploy", key: 'cloudformation/stacks/elb/alb-public-qa.yml', response_target: 'cloudformation/stacks/elb/alb-public-qa.yml'})
@@ -668,6 +703,173 @@ class Release
668
703
  end
669
704
  end
670
705
 
706
+ def qainit_drone_minimal_deploy!
707
+ # tenere solo il project da deployare (l'unico project è prima)
708
+ @ami_id = get_ami_id("ecs-fleet-allinone-staging")
709
+ project = ''
710
+ @projects.each_key do |project_key|
711
+ if @projects[project_key]['revision']
712
+ project = project_key
713
+ git_checkout_version(project_key, @projects[project_key]['revision'])
714
+ end
715
+ end
716
+ deploy_id = get_deploy_id
717
+
718
+ @git_branch = ENV['DRONE_BRANCH']
719
+ @dns_record_identifier = deploy_id
720
+ hostname_pattern_priority = hostname_pattern_priority()
721
+ tags = [
722
+ {
723
+ key: "qainit",
724
+ value: @git_branch
725
+ },
726
+ {
727
+ key: project,
728
+ value: @projects[project]['name']
729
+ },
730
+ {
731
+ key: "hostname_pattern_priority",
732
+ value: hostname_pattern_priority
733
+ }
734
+ ]
735
+
736
+ cluster_stack_name = "ecs-cluster-qa-#{deploy_id}"
737
+
738
+ if stack_exists?(cluster_stack_name)
739
+ tags = get_stack_tags(cluster_stack_name)
740
+ hostname_pattern_priority = tags.detect do |tag|
741
+ tag.key == 'hostname_pattern_priority'
742
+ end.value
743
+ end
744
+
745
+ stack_name_alb = @base_stack_name_alb + deploy_id[0..5]
746
+ stack_name_alb_ws = @base_stack_name_alb_ws + deploy_id[0..5]
747
+
748
+ create_cluster_stack(cluster_stack_name, tags) unless stack_exists?(cluster_stack_name)
749
+ wait_for_stack_ready(cluster_stack_name) unless stack_ready?(cluster_stack_name)
750
+
751
+ create_alb_stack(stack_name_alb, "http", deploy_id, 'qa-minimal') unless stack_exists?(stack_name_alb)
752
+ create_alb_stack(stack_name_alb_ws, "websocket", deploy_id, 'qa-minimal') unless stack_exists?(stack_name_alb_ws)
753
+
754
+ resp = describe_stack_resource(cluster_stack_name, 'ECSCluster')
755
+ @ecs_cluster_name = resp.stack_resource_detail.physical_resource_id
756
+
757
+ asg_stack_name = "ecs-asg-allinone-qa-#{deploy_id}"
758
+ create_asg_stack(asg_stack_name, tags) unless stack_exists?(asg_stack_name)
759
+
760
+ deploy_id = get_deploy_id
761
+ create_pyxis_artifact(@projects["pyxis-npm"]['revision'], deploy_id)
762
+ create_prima_artifact(@projects["prima"]['revision'], @projects["prima"]['name'], deploy_id, true) unless artifact_exists?('prima-artifacts-encrypted', "prima/#{@projects["prima"]['revision']}.tar.gz")
763
+
764
+ wait_for_stack_ready(stack_name_alb) unless stack_ready?(stack_name_alb)
765
+ wait_for_stack_ready(stack_name_alb_ws) unless stack_ready?(stack_name_alb_ws)
766
+
767
+ stack_name_route53 = "ecs-route53-qa-#{deploy_id}"
768
+ stack_body = IO.read('cloudformation/stacks/route53/minimal_qa.yml')
769
+ parameters = [
770
+ {
771
+ parameter_key: "DnsRecordIdentifier",
772
+ parameter_value: @dns_record_identifier
773
+ },
774
+ {
775
+ parameter_key: "PrimaElbHostname",
776
+ parameter_value: get_alb_host(stack_name_alb)
777
+ },
778
+ {
779
+ parameter_key: 'CrashElbHostname',
780
+ parameter_value: get_alb_host(stack_name_alb_ws)
781
+ }
782
+ ]
783
+
784
+ create_stack(stack_name_route53, stack_body, parameters, tags, @cf_role) unless stack_exists?(stack_name_route53)
785
+ wait_for_stack_ready(stack_name_route53) unless stack_ready?(stack_name_route53)
786
+
787
+ stack_name_web = "ecs-task-web-qa-#{deploy_id}"
788
+ git_checkout_version('prima', @projects["prima"]['revision'])
789
+ stack_body = IO.read('projects/prima/app/cloudformation/tasks/web.yml')
790
+ parameters = [
791
+ {
792
+ parameter_key: "Environment",
793
+ parameter_value: "qa-minimal"
794
+ },
795
+ {
796
+ parameter_key: "ReleaseVersion",
797
+ parameter_value: "#{@projects["prima"]['revision']}"
798
+ },
799
+ {
800
+ parameter_key: "TaskDesiredCount",
801
+ parameter_value: "1"
802
+ },
803
+ {
804
+ parameter_key: "ECSClusterName",
805
+ parameter_value: @ecs_cluster_name
806
+ },
807
+ {
808
+ parameter_key: "ALBShortName",
809
+ parameter_value: "web-qa-#{deploy_id}"[0..27]
810
+ },
811
+ {
812
+ parameter_key: "WebQaBaseHostname",
813
+ parameter_value: "#{@dns_record_identifier}.qa.colaster.com"
814
+ },
815
+ {
816
+ parameter_key: "HostnamePattern",
817
+ parameter_value: "ww*-#{@dns_record_identifier}.qa.colaster.com"
818
+ },
819
+ {
820
+ parameter_key: "HostnamePatternPriority",
821
+ parameter_value: hostname_pattern_priority
822
+ },
823
+ {
824
+ parameter_key: "HostnamePatternAggregatorPriority",
825
+ parameter_value: (hostname_pattern_priority.to_i + 1).to_s
826
+ },
827
+ {
828
+ parameter_key: "EnvHash",
829
+ parameter_value: deploy_id
830
+ },
831
+ {
832
+ parameter_key: "AssangeHostname",
833
+ parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
834
+ },
835
+ {
836
+ parameter_key: "BackofficeHostname",
837
+ parameter_value: "backoffice-#{@dns_record_identifier}.qa.colaster.com"
838
+ },
839
+ {
840
+ parameter_key: "WebHostname",
841
+ parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
842
+ },
843
+ {
844
+ parameter_key: "FePrimaDomain",
845
+ parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
846
+ },
847
+ {
848
+ parameter_key: "HostnamePattern",
849
+ parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
850
+ }
851
+ ]
852
+ if stack_exists?(stack_name_web)
853
+ cur_version = get_currently_deployed_version(stack_name_web)
854
+ update_stack(stack_name_web, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["prima"]['revision'])
855
+ else
856
+ create_stack(stack_name_web, stack_body, parameters, tags, @cf_role)
857
+ end
858
+ wait_for_stack_ready(stack_name_web) unless stack_ready?(stack_name_web)
859
+ update_service_defaults(stack_name_web)
860
+ prima_hostname = get_route53_hostname(stack_name_web)
861
+
862
+ projects_text = "
863
+ > Prima url: https://#{prima_hostname}
864
+ > SSH connection: ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no githubUsername@#{ec2_ip_address(asg_stack_name)}\n"
865
+
866
+ output projects_text.cyan
867
+ output "Deploy effettuato, everything is awesome!\n".green
868
+ if @qainit
869
+ qainit_write_output(projects_text, 'Indirizzi scritti su ')
870
+ end
871
+ end
872
+
671
873
  def deploy_feature!
672
874
  `git pull && git submodule init && git submodule update`
673
875
  @ami_id = get_ami_id("ecs-fleet-allinone-staging")
@@ -687,7 +889,7 @@ class Release
687
889
  tags = [
688
890
  {
689
891
  key: "qainit",
690
- value: @git_branch.gsub(/[^a-zA-Z\_\.\/\=\+\-\:\@\.]/, '')
892
+ value: @git_branch
691
893
  },
692
894
  {
693
895
  key: "hostname_pattern_priority",
@@ -697,11 +899,11 @@ class Release
697
899
  @projects.each do |key, value|
698
900
  case key.to_s
699
901
  when 'crash'
700
- tags << { key: 'crash', value: @projects['crash']['name'].gsub(/[^a-zA-Z\_\.\/\=\+\-\:\@\.]/, '') } if deploy_crash?
701
- when 'starsky', 'hutch'
702
- tags << { key: key.to_s, value: @projects[key.to_s]['name'].gsub(/[^a-zA-Z\_\.\/\=\+\-\:\@\.]/, '') } if deploy_starsky_hutch?
902
+ tags << { key: 'crash', value: @projects['crash']['name'] } if deploy_crash?
903
+ when 'starsky', 'hutch', 'maia'
904
+ tags << { key: key.to_s, value: @projects[key.to_s]['name'] } if deploy_starsky_hutch_maia?
703
905
  else
704
- tags << { key: key, value: value['name'].gsub(/[^a-zA-Z\_\.\/\=\+\-\:\@\.]/, '') }
906
+ tags << { key: key, value: value['name'] }
705
907
  end
706
908
  end
707
909
 
@@ -764,10 +966,9 @@ class Release
764
966
  create_activia_artifact(@projects["activia"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/activia/#{@projects["activia"]['revision']}-qa.tar.gz")
765
967
  create_leftorium_artifact(@projects["leftorium"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/leftorium/#{@projects["leftorium"]['revision']}-qa.tar.gz")
766
968
  create_skynet_artifact(@projects["skynet"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/skynet/#{@projects["skynet"]['revision']}-qa.tar.gz")
767
- create_maia_artifact(@projects["maia"]['revision']) unless artifact_exists?('prima-artifacts-encrypted', "microservices/maia/#{@projects["maia"]['revision']}-qa.tar.gz")
768
- create_starsky_artifact(@projects["starsky"]['revision']) unless !deploy_starsky_hutch? || artifact_exists?('prima-artifacts-encrypted', "microservices/starsky/#{@projects["starsky"]['revision']}-qa.tar.gz")
769
- create_hutch_artifact(@projects["hutch"]['revision']) unless !deploy_starsky_hutch? || artifact_exists?('prima-artifacts-encrypted', "microservices/hutch/#{@projects["hutch"]['revision']}-#{@dns_record_identifier[0..7]}-qa.tar.gz")
770
-
969
+ create_starsky_artifact(@projects["starsky"]['revision']) unless !deploy_starsky_hutch_maia? || artifact_exists?('prima-artifacts-encrypted', "microservices/starsky/#{@projects["starsky"]['revision']}-qa.tar.gz")
970
+ create_hutch_artifact(@projects["hutch"]['revision']) unless !deploy_starsky_hutch_maia? || artifact_exists?('prima-artifacts-encrypted', "microservices/hutch/#{@projects["hutch"]['revision']}-qa.tar.gz")
971
+ create_maia_artifact(@projects["maia"]['revision']) unless !deploy_starsky_hutch_maia? || artifact_exists?('prima-artifacts-encrypted', "microservices/maia/#{@projects["maia"]['revision']}-qa.tar.gz")
771
972
 
772
973
  wait_for_db_import(db_task) unless stack_exists?("ecs-route53-qa-#{deploy_id}") # dovrebbe essere istantaneo
773
974
 
@@ -1287,11 +1488,7 @@ class Release
1287
1488
  ]
1288
1489
  if stack_exists?(stack_name_rachele)
1289
1490
  cur_version = get_currently_deployed_version(stack_name_rachele)
1290
- unless cur_version.include?(@projects["rachele"]['revision'])
1291
- delete_stack(stack_name_rachele)
1292
- wait_for_stack_removal(stack_name_rachele)
1293
- create_stack(stack_name_rachele, stack_body, parameters, tags, @cf_role)
1294
- end
1491
+ update_stack(stack_name_rachele, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["rachele"]['revision'])
1295
1492
  else
1296
1493
  create_stack(stack_name_rachele, stack_body, parameters, tags, @cf_role)
1297
1494
  end
@@ -1340,10 +1537,6 @@ class Release
1340
1537
  parameter_key: "GraphqlEndpoint",
1341
1538
  parameter_value: "https://backoffice-#{@dns_record_identifier}.qa.colaster.com/graphql"
1342
1539
  },
1343
- {
1344
- parameter_key: "GraphqlInsuranceEndpoint",
1345
- parameter_value: "https://backoffice-#{@dns_record_identifier}.qa.colaster.com/graphql/insurance"
1346
- },
1347
1540
  {
1348
1541
  parameter_key: "AuthEndpoint",
1349
1542
  parameter_value: "https://backoffice-#{@dns_record_identifier}.qa.colaster.com/auth"
@@ -1418,7 +1611,7 @@ class Release
1418
1611
  end
1419
1612
  end
1420
1613
 
1421
- if deploy_starsky_hutch?
1614
+ if deploy_starsky_hutch_maia?
1422
1615
  stack_name_starsky = "ecs-task-starsky-qa-#{deploy_id}"
1423
1616
  git_checkout_version('starsky', @projects["starsky"]['revision'])
1424
1617
  stack_body = IO.read('projects/starsky/deploy/task.yml')
@@ -1458,11 +1651,7 @@ class Release
1458
1651
  ]
1459
1652
  if stack_exists?(stack_name_starsky)
1460
1653
  cur_version = get_currently_deployed_version(stack_name_starsky)
1461
- unless cur_version.include?(@projects["starsky"]['revision'])
1462
- delete_stack(stack_name_starsky)
1463
- wait_for_stack_removal(stack_name_starsky)
1464
- create_stack(stack_name_starsky, stack_body, parameters, tags, @cf_role)
1465
- end
1654
+ update_stack(stack_name_starsky, stack_body, parameters, tags, @cf_role) unless cur_version.include?(@projects["starsky"]['revision'])
1466
1655
  else
1467
1656
  create_stack(stack_name_starsky, stack_body, parameters, tags, @cf_role)
1468
1657
  end
@@ -1550,6 +1739,10 @@ class Release
1550
1739
  parameter_key: "WebQaBaseHostname",
1551
1740
  parameter_value: "#{@dns_record_identifier}.qa.colaster.com"
1552
1741
  },
1742
+ {
1743
+ parameter_key: "HostnamePattern",
1744
+ parameter_value: "ww*-#{@dns_record_identifier}.qa.colaster.com"
1745
+ },
1553
1746
  {
1554
1747
  parameter_key: "HostnamePatternPriority",
1555
1748
  parameter_value: hostname_pattern_priority
@@ -1580,7 +1773,7 @@ class Release
1580
1773
  },
1581
1774
  {
1582
1775
  parameter_key: "HostnamePattern",
1583
- parameter_value: "www-#{@dns_record_identifier}.*"
1776
+ parameter_value: "www-#{@dns_record_identifier}.qa.colaster.com"
1584
1777
  }
1585
1778
  ]
1586
1779
  if stack_exists?(stack_name_web)
@@ -1629,10 +1822,6 @@ class Release
1629
1822
  {
1630
1823
  parameter_key: "HostnamePattern",
1631
1824
  parameter_value: "consumer-#{@dns_record_identifier}.qa.colaster.com"
1632
- },
1633
- {
1634
- parameter_key: "WebQaBaseHostname",
1635
- parameter_value: "#{@dns_record_identifier}.qa.colaster.com"
1636
1825
  }
1637
1826
  ]
1638
1827
  if stack_exists?(stack_name_consumer)
@@ -1679,8 +1868,8 @@ class Release
1679
1868
  end
1680
1869
 
1681
1870
 
1682
- if deploy_starsky_hutch?
1683
- wait_for_stack_ready(stack_name_starsky) unless stack_ready?(stack_name_starsky) unless !deploy_starsky_hutch?
1871
+ if deploy_starsky_hutch_maia?
1872
+ wait_for_stack_ready(stack_name_starsky) unless stack_ready?(stack_name_starsky) unless !deploy_starsky_hutch_maia?
1684
1873
 
1685
1874
  stack_name_hutch = "ecs-task-hutch-qa-#{deploy_id}"
1686
1875
  git_checkout_version('hutch', @projects["hutch"]['revision'])
@@ -1692,7 +1881,7 @@ class Release
1692
1881
  },
1693
1882
  {
1694
1883
  parameter_key: "ReleaseVersion",
1695
- parameter_value: "#{@projects["hutch"]['revision']}-#{@dns_record_identifier[0..7]}"
1884
+ parameter_value: "#{@projects["hutch"]['revision']}"
1696
1885
  },
1697
1886
  {
1698
1887
  parameter_key: "ALBShortName",
@@ -1783,10 +1972,9 @@ class Release
1783
1972
  wait_for_stack_ready(stack_name_rogoreport) unless stack_ready?(stack_name_rogoreport)
1784
1973
  wait_for_stack_ready(stack_name_borat) unless stack_ready?(stack_name_borat)
1785
1974
  wait_for_stack_ready(stack_name_activia) unless stack_ready?(stack_name_activia)
1786
- wait_for_stack_ready(stack_name_maia) unless stack_ready?(stack_name_maia)
1787
1975
  wait_for_stack_ready(stack_name_crash) unless stack_ready?(stack_name_crash) unless !deploy_crash?
1788
- wait_for_stack_ready(stack_name_hutch) unless stack_ready?(stack_name_hutch) unless !deploy_starsky_hutch?
1789
-
1976
+ wait_for_stack_ready(stack_name_hutch) unless stack_ready?(stack_name_hutch) unless !deploy_starsky_hutch_maia?
1977
+ wait_for_stack_ready(stack_name_maia) unless stack_ready?(stack_name_maia) unless !deploy_starsky_hutch_maia?
1790
1978
 
1791
1979
  update_service_defaults(stack_name_web)
1792
1980
  update_service_defaults(stack_name_consumer)
@@ -1803,9 +1991,9 @@ class Release
1803
1991
  update_service_defaults(stack_name_skynet)
1804
1992
  update_service_defaults(stack_name_leftorium)
1805
1993
  update_service_defaults(stack_name_rachele)
1806
- update_service_defaults(stack_name_maia)
1807
- update_service_defaults(stack_name_starsky) unless !deploy_starsky_hutch?
1808
- update_service_defaults(stack_name_hutch) unless !deploy_starsky_hutch?
1994
+ update_service_defaults(stack_name_starsky) unless !deploy_starsky_hutch_maia?
1995
+ update_service_defaults(stack_name_hutch) unless !deploy_starsky_hutch_maia?
1996
+ update_service_defaults(stack_name_maia) unless !deploy_starsky_hutch_maia?
1809
1997
  update_service_defaults(stack_name_crash) unless !deploy_crash?
1810
1998
 
1811
1999
  activia_hostname = get_route53_hostname("activia")
@@ -1822,13 +2010,13 @@ class Release
1822
2010
  roger_hostname = get_route53_hostname("roger")
1823
2011
  leftorium_hostname = get_route53_hostname("leftorium")
1824
2012
  rachele_hostname = get_route53_hostname("rachele")
1825
- maia_app_hostname = get_route53_hostname("maia-app")
1826
- maia_intermediari_hostname = get_route53_hostname("maia-intermediari")
1827
2013
  crash_hostname = get_route53_hostname("crash") unless !deploy_crash?
1828
- starsky_hostname = get_route53_hostname("starsky") unless !deploy_starsky_hutch?
1829
- hutch_hostname = get_route53_hostname("hutch") unless !deploy_starsky_hutch?
2014
+ starsky_hostname = get_route53_hostname("starsky") unless !deploy_starsky_hutch_maia?
2015
+ hutch_hostname = get_route53_hostname("hutch") unless !deploy_starsky_hutch_maia?
2016
+ maia_app_hostname = get_route53_hostname("maia-app") unless !deploy_starsky_hutch_maia?
2017
+ maia_intermediari_hostname = get_route53_hostname("maia-intermediari") unless !deploy_starsky_hutch_maia?
1830
2018
 
1831
- launch_mimo(deploy_id) if deploy_starsky_hutch?
2019
+ # launch_marley ec2_ip_address(asg_stack_name), prima_hostname, borat_hostname
1832
2020
 
1833
2021
  projects_text = "
1834
2022
  > Prima url: https://#{prima_hostname}
@@ -1844,14 +2032,14 @@ class Release
1844
2032
  > Skynet url: http://#{skynet_hostname}:8050
1845
2033
  > Roger url: http://#{roger_hostname}:10051
1846
2034
  > Leftorium url: http://#{leftorium_hostname}:10061
1847
- > Rachele url: http://#{rachele_hostname}:10040
1848
- > Maia App url: https://#{maia_app_hostname}
1849
- > Maia Intermediari url: https://#{maia_intermediari_hostname}"
2035
+ > Rachele url: http://#{rachele_hostname}:10040"
1850
2036
  projects_text.concat "
1851
2037
  > Crash url: https://#{crash_hostname}" if deploy_crash?
1852
2038
  projects_text.concat "
1853
2039
  > Starsky url: https://#{starsky_hostname}
1854
- > Hutch url: https://#{hutch_hostname}" if deploy_starsky_hutch?
2040
+ > Hutch url: https://#{hutch_hostname}
2041
+ > Maia App url: https://#{maia_app_hostname}
2042
+ > Maia Intermediari url: https://#{maia_intermediari_hostname}" if deploy_starsky_hutch_maia?
1855
2043
  projects_text.concat "
1856
2044
  > RabbitMQ url: http://#{ec2_ip_address(asg_stack_name)}:15672
1857
2045
  > Supervisor url: http://#{ec2_ip_address(asg_stack_name)}:9001
@@ -1860,17 +2048,6 @@ class Release
1860
2048
  output projects_text.cyan
1861
2049
  output "Deploy effettuato, everything is awesome!\n".green
1862
2050
 
1863
- if @projects['prima']['name'] != 'master' then
1864
- # output "Lancio il batch job per la visual regression..."
1865
- # launch_bocelli_test(prima_hostname)
1866
- # output "Visual regression lanciata con successo!"
1867
-
1868
- output "Lancio i test con Lighthouse..."
1869
- launch_lighthouse_test(prima_hostname, "mobile")
1870
- launch_lighthouse_test(prima_hostname, "desktop")
1871
- output "Test con Lighthouse lanciati con successo..."
1872
- end
1873
-
1874
2051
  qainit_write_output(projects_text, 'Indirizzi scritti su ')
1875
2052
  end
1876
2053
 
@@ -1979,7 +2156,7 @@ class Release
1979
2156
  when stack_name.include?('web')
1980
2157
  logical_resource_id = 'ECSServiceWebQA'
1981
2158
  when stack_name.include?('consumer')
1982
- logical_resource_id = 'ECSServiceConsumerApiQa'
2159
+ logical_resource_id = 'ECSServiceConsumerQa'
1983
2160
  when stack_name.include?('urania')
1984
2161
  logical_resource_id = 'ECSServiceUraniaQA'
1985
2162
  when stack_name.include?('backoffice')
@@ -2025,77 +2202,6 @@ class Release
2025
2202
  update_ecs_service(@ecs_cluster_name, resp.stack_resource_detail.physical_resource_id, {minimum_healthy_percent: 0, maximum_percent: 100})
2026
2203
  end
2027
2204
 
2028
- def launch_lighthouse_test(url, device)
2029
- @cloudflare.post("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {type: 'CNAME', name: "www-#{@dns_record_identifier}", content: url, proxied: true, ttl: 1}) unless get_lighthouse_dns()
2030
-
2031
- @batch.submit_job({
2032
- job_name: "lighthouse-#{device}-#{@dns_record_identifier}",
2033
- job_queue: "tools-production",
2034
- job_definition: describe_stack_resource('batch-job-lighthouse-production', 'JobDefinition').stack_resource_detail.physical_resource_id,
2035
- container_overrides: {
2036
- environment: [
2037
- {
2038
- name: "URL_TO_TEST",
2039
- value: "https://www-#{@dns_record_identifier}.prima.it/?superprima"
2040
- },
2041
- {
2042
- name: "DEVICE",
2043
- value: device
2044
- },
2045
- {
2046
- name: "BRANCH_NAME",
2047
- value: @projects['prima']['name']
2048
- },
2049
- {
2050
- name: "COMMITTER_EMAIL",
2051
- value: @projects['prima']['committer']
2052
- }
2053
- ]
2054
- }
2055
- })
2056
- end
2057
-
2058
- def get_lighthouse_dns()
2059
- dns_records = @cloudflare.get("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {per_page: 100, type: 'CNAME', name: "www-#{@dns_record_identifier}.prima.it"})
2060
- if dns_records.body[:result_info][:count] > 0
2061
- return dns_records.body[:result][0][:id]
2062
- end
2063
- false
2064
- end
2065
-
2066
- def delete_lighthouse_dns()
2067
- dns_id = get_lighthouse_dns()
2068
- @cloudflare.delete("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records/#{dns_id}") if dns_id
2069
- end
2070
-
2071
- def launch_bocelli_test(url)
2072
- @batch.submit_job({
2073
- job_name: "bocelli-test-#{@dns_record_identifier}",
2074
- job_queue: "tools-production",
2075
- job_definition: describe_stack_resource('batch-job-bocelli-production', 'JobDefinition').stack_resource_detail.physical_resource_id,
2076
- container_overrides: {
2077
- environment: [
2078
- {
2079
- name: "BATCH_COMMAND",
2080
- value: "test"
2081
- },
2082
- {
2083
- name: "QA_HOSTNAME",
2084
- value: url
2085
- },
2086
- {
2087
- name: "BRANCH_NAME",
2088
- value: @projects['prima']['name']
2089
- },
2090
- {
2091
- name: "COMMITTER_EMAIL",
2092
- value: @projects['prima']['committer']
2093
- }
2094
- ]
2095
- }
2096
- })
2097
- end
2098
-
2099
2205
  def create_activia_artifact(revision)
2100
2206
  output "Preparo l'artifact activia .zip\n".yellow
2101
2207
 
@@ -2107,25 +2213,22 @@ class Release
2107
2213
 
2108
2214
  exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2109
2215
  exec_step 'prepare-docker-compose --directory activia && cp docker-compose-qainit.yml docker-compose.yml'
2110
-
2111
- # execute_command "deploy/build_qa_artifact"
2112
-
2113
2216
  [
2114
- "docker-compose build web",
2115
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2116
- '-c' 'mix local.hex --force && mix hex.info && \
2117
- mix deps.get && mix compile && mix deps.compile && \
2118
- cd assets && \
2119
- rm -rf node_modules && \
2120
- yarn --cache-folder ~/.cache/yarn && \
2121
- sysconfcpus -n 2 ./node_modules/.bin/webpack --mode production && \
2122
- cd .. && \
2123
- mix phx.digest && \
2124
- rm -rf _build/qa/rel/ && \
2125
- mix distillery.release --env=qa'"
2126
- ].each do |cmd|
2127
- execute_command cmd
2128
- end
2217
+ "docker-compose build web",
2218
+ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2219
+ '-c' 'mix local.hex --force && mix hex.info && \
2220
+ mix deps.get && mix compile && mix deps.compile && \
2221
+ cd assets && \
2222
+ rm -rf node_modules && \
2223
+ yarn --cache-folder ~/.cache/yarn && \
2224
+ sysconfcpus -n 2 ./node_modules/.bin/webpack --mode production && \
2225
+ cd .. && \
2226
+ mix phx.digest && \
2227
+ rm -rf _build/qa/rel/ && \
2228
+ mix release --env=qa'"
2229
+ ].each do |cmd|
2230
+ execute_command cmd
2231
+ end
2129
2232
 
2130
2233
  cleanup_containers
2131
2234
 
@@ -2194,8 +2297,23 @@ class Release
2194
2297
 
2195
2298
  exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2196
2299
  exec_step 'prepare-docker-compose --directory borat && cp docker-compose-qainit.yml docker-compose.yml'
2197
-
2198
- execute_command "deploy/build_qa_artifact"
2300
+ [
2301
+ "docker network create borat_network || true",
2302
+ "docker-compose build web",
2303
+ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2304
+ '-c' 'mix local.hex --force && mix hex.info && \
2305
+ mix deps.get && \
2306
+ cd assets && \
2307
+ yarn --cache-folder ~/.cache/yarn && \
2308
+ sysconfcpus -n 2 ./node_modules/.bin/webpack -p --config config/webpack.config.prod.js && \
2309
+ cd ../ && \
2310
+ mix phx.digest && \
2311
+ mix compile && mix deps.compile && \
2312
+ rm -rf _build/qa/rel/ && \
2313
+ mix distillery.release --env=qa'"
2314
+ ].each do |cmd|
2315
+ execute_command cmd
2316
+ end
2199
2317
 
2200
2318
  cleanup_containers
2201
2319
 
@@ -2219,7 +2337,28 @@ class Release
2219
2337
  `mv docker-compose-ci.yml docker-compose.yml`
2220
2338
  exec_step 'prepare-docker-compose --directory crash && cp docker-compose-qainit.yml docker-compose.yml'
2221
2339
 
2222
- execute_command "deploy/build_qa_artifact #{deploy_id}"
2340
+ if File.exists? 'deploy/build_qa_artifact'
2341
+ `deploy/build_qa_artifact #{deploy_id}`
2342
+ else # TODO remove when deploy/build_qa_artifact is merged
2343
+ [
2344
+ 'docker-compose build web',
2345
+ "docker-compose run -w $PWD -u root -e MIX_ENV=qa -e ENV_HASH=#{deploy_id} web \
2346
+ '-c' 'mix local.hex --force && mix hex.info && \
2347
+ mix deps.get && \
2348
+ cd assets && \
2349
+ yarn --cache-folder ~/.cache/yarn && \
2350
+ NODE_ENV=production sysconfcpus -n 1 yarn run build && \
2351
+ cd ../ && \
2352
+ mix release.clean --implode --no-confirm && \
2353
+ mix phx.digest && \
2354
+ mix deps.clean --all && \
2355
+ mix deps.get && \
2356
+ mix compile && mix release --env=qa'",
2357
+ 'docker-compose down'
2358
+ ].each do |cmd|
2359
+ execute_command cmd
2360
+ end
2361
+ end
2223
2362
 
2224
2363
  cleanup_containers
2225
2364
 
@@ -2241,26 +2380,27 @@ class Release
2241
2380
  exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2242
2381
  exec_step 'prepare-docker-compose --directory ermes && cp docker-compose-qainit.yml docker-compose.yml'
2243
2382
 
2244
- if File.exists? 'deploy/build_qa_artifact'
2245
- execute_command "deploy/build_qa_artifact"
2246
- else
2247
- [
2248
- "if echo `docker network ls` | grep crash_default; \
2249
- then echo 'crash_default network already existing'; \
2250
- else docker network create crash_default; fi",
2251
- 'docker-compose build web',"docker-compose run --rm -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2252
- '-c' 'mix local.hex --force && mix hex.info && \
2253
- mix deps.get && mix compile && mix deps.compile && \
2254
- mix phx.digest && \
2255
- MIX_ENV=dev mix compile.sms && \
2256
- MIX_ENV=dev mix compile.html && \
2257
- MIX_ENV=dev mix compile.heml && \
2258
- MIX_ENV=dev mix compile.app_notification && \
2259
- rm -rf _build/qa/rel/ && \
2260
- mix release --env=qa'"
2261
- ].each do |cmd|
2262
- execute_command cmd
2263
- end
2383
+ [
2384
+ "if echo `docker network ls` | grep crash_default; \
2385
+ then echo 'crash_default network already existing'; \
2386
+ else docker network create crash_default; fi",
2387
+ 'docker-compose build web'
2388
+ ].each do |cmd|
2389
+ execute_command cmd
2390
+ end
2391
+
2392
+ [ "docker-compose run --rm -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2393
+ '-c' 'mix local.hex --force && mix hex.info && \
2394
+ mix deps.get && mix compile && mix deps.compile && \
2395
+ mix phx.digest && \
2396
+ MIX_ENV=dev mix compile.sms && \
2397
+ MIX_ENV=dev mix compile.html && \
2398
+ MIX_ENV=dev mix compile.heml && \
2399
+ MIX_ENV=dev mix compile.app_notification && \
2400
+ rm -rf _build/qa/rel/ && \
2401
+ mix release --env=qa'"
2402
+ ].each do |cmd|
2403
+ execute_command cmd
2264
2404
  end
2265
2405
 
2266
2406
  cleanup_containers
@@ -2282,21 +2422,16 @@ class Release
2282
2422
 
2283
2423
  exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2284
2424
  exec_step 'prepare-docker-compose --directory fidaty && cp docker-compose-qainit.yml docker-compose.yml'
2285
-
2286
- if File.exists? 'deploy/build_qa_artifact'
2287
- execute_command "deploy/build_qa_artifact"
2288
- else
2289
- [
2290
- "docker-compose build web",
2291
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2292
- '-c' 'mix local.hex --force && mix hex.info && \
2293
- mix deps.get && mix compile && mix deps.compile && \
2294
- mix phx.digest && \
2295
- rm -rf _build/qa/rel/ && \
2296
- mix release --env=qa'"
2297
- ].each do |cmd|
2298
- execute_command cmd
2299
- end
2425
+ [
2426
+ "docker-compose build web",
2427
+ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2428
+ '-c' 'mix local.hex --force && mix hex.info && \
2429
+ mix deps.get && mix compile && mix deps.compile && \
2430
+ mix phx.digest && \
2431
+ rm -rf _build/qa/rel/ && \
2432
+ mix release --env=qa'"
2433
+ ].each do |cmd|
2434
+ execute_command cmd
2300
2435
  end
2301
2436
 
2302
2437
  cleanup_containers
@@ -2318,19 +2453,17 @@ class Release
2318
2453
 
2319
2454
  exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2320
2455
  exec_step 'prepare-docker-compose --directory hal9000 && cp docker-compose-qainit.yml docker-compose.yml'
2321
- # [
2322
- # # "docker-compose build web",
2323
- # # "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2324
- # # '-c' 'mix local.hex --force && mix hex.info && \
2325
- # # mix deps.get && mix compile && mix deps.compile && \
2326
- # # mix phx.digest assets -o priv/static && \
2327
- # # rm -rf _build/qa/rel/ && \
2328
- # # mix release --env=qa'"
2329
- # ].each do |cmd|
2330
- # execute_command cmd
2331
- # end
2332
-
2333
- execute_command "deploy/build_qa_artifact"
2456
+ [
2457
+ "docker-compose build web",
2458
+ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2459
+ '-c' 'mix local.hex --force && mix hex.info && \
2460
+ mix deps.get && mix compile && mix deps.compile && \
2461
+ mix phx.digest assets -o priv/static && \
2462
+ rm -rf _build/qa/rel/ && \
2463
+ mix release --env=qa'"
2464
+ ].each do |cmd|
2465
+ execute_command cmd
2466
+ end
2334
2467
 
2335
2468
  cleanup_containers
2336
2469
 
@@ -2357,7 +2490,7 @@ class Release
2357
2490
  cleanup_containers
2358
2491
 
2359
2492
  artifact_path = "./hutch.tar.gz"
2360
- upload_artifact(artifact_path, "microservices/hutch/#{revision}-#{@dns_record_identifier[0..7]}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2493
+ upload_artifact(artifact_path, "microservices/hutch/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
2361
2494
 
2362
2495
  Dir.chdir '../../'
2363
2496
  end
@@ -2426,7 +2559,20 @@ class Release
2426
2559
  exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2427
2560
  exec_step 'prepare-docker-compose --directory peano && cp docker-compose-qainit.yml docker-compose.yml'
2428
2561
 
2429
- execute_command "deploy/build_qa_artifact"
2562
+ if File.exists? 'deploy/build_qa_artifact'
2563
+ `deploy/build_qa_artifact`
2564
+ else # TODO remove when deploy/build_qa_artifact is merged
2565
+ [
2566
+ "docker-compose build web",
2567
+ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2568
+ '-c' 'mix local.hex --force && mix hex.info && \
2569
+ mix deps.get && mix compile && mix deps.compile && \
2570
+ rm -rf _build/qa/rel/ && \
2571
+ mix release --env=qa'"
2572
+ ].each do |cmd|
2573
+ execute_command cmd
2574
+ end
2575
+ end
2430
2576
 
2431
2577
  cleanup_containers
2432
2578
 
@@ -2436,7 +2582,7 @@ class Release
2436
2582
  Dir.chdir '../../'
2437
2583
  end
2438
2584
 
2439
- def create_prima_artifact(revision, branch_name, deploy_id)
2585
+ def create_prima_artifact(revision, branch_name, deploy_id, minimal = false)
2440
2586
  output "Preparo l'artifact prima .zip\n".yellow
2441
2587
 
2442
2588
  git_checkout_version('prima', revision)
@@ -2628,7 +2774,7 @@ class Release
2628
2774
 
2629
2775
  `mv docker-compose-ci.yml docker-compose.yml`
2630
2776
  exec_step 'prepare-docker-compose --directory starsky && cp docker-compose-qainit.yml docker-compose.yml'
2631
- exec_step "sed -i s/qa_deploy_id/#{get_deploy_id}/g .env.dist.qa"
2777
+ exec_step "sed s/qa_deploy_id/#{get_deploy_id}/ .env.dist.qa"
2632
2778
  exec_step "cp .env.dist.qa .env"
2633
2779
 
2634
2780
  [
@@ -2668,19 +2814,15 @@ class Release
2668
2814
  exec_step 'cp docker-compose.yml docker-compose-ci.yml'
2669
2815
  exec_step 'prepare-docker-compose --directory urania && cp docker-compose-qainit.yml docker-compose.yml'
2670
2816
 
2671
- if File.exists? 'deploy/build_qa_artifact'
2672
- execute_command "deploy/build_qa_artifact"
2673
- else
2674
- [
2675
- "docker-compose build web",
2676
- "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2677
- '-c' 'mix local.hex --force && mix hex.info && \
2678
- mix deps.get && mix compile && mix deps.compile && \
2679
- rm -rf _build/qa/rel/ && \
2680
- mix release --env=qa'"
2681
- ].each do |cmd|
2682
- execute_command cmd
2683
- end
2817
+ execute_command "docker-compose build web"
2818
+
2819
+ [ "docker-compose run -w $PWD -u root -e MIX_ENV=qa --entrypoint /bin/sh web \
2820
+ '-c' 'mix local.hex --force && mix hex.info && \
2821
+ mix deps.get && mix compile && mix deps.compile && \
2822
+ rm -rf _build/qa/rel/ && \
2823
+ mix release --env=qa'"
2824
+ ].each do |cmd|
2825
+ execute_command cmd
2684
2826
  end
2685
2827
 
2686
2828
  cleanup_containers
@@ -2705,17 +2847,16 @@ class Release
2705
2847
  end
2706
2848
 
2707
2849
  def deploy_crash?
2708
- true # fino a che non ci mettiamo d'accordo su come gestire il fatto che leftorium ha bisogno di comunicare con crash
2709
- # crash_present = !@projects['crash'].empty? && @projects['crash']['name'] != 'master' && !@projects['crash']['default_branch']
2710
- # leftorium_present = !@projects['leftorium'].empty? && @projects['leftorium']['name'] != 'master' && !@projects['leftorium']['default_branch']
2711
- # crash_present || leftorium_present
2850
+ crash_present = !@projects['crash'].empty? && @projects['crash']['name'] != 'master' && !@projects['crash']['default_branch']
2851
+ leftorium_present = !@projects['leftorium'].empty? && @projects['leftorium']['name'] != 'master' && !@projects['leftorium']['default_branch']
2852
+ crash_present || leftorium_present
2712
2853
  end
2713
2854
 
2714
- def deploy_starsky_hutch?
2715
- true
2716
- #starsky_present = !@projects['starsky'].nil? && !@projects['starsky'].empty? && @projects['starsky']['name'] != 'master' && !@projects['starsky']['default_branch']
2717
- #hutch_present = !@projects['hutch'].nil? && !@projects['hutch'].empty? && @projects['hutch']['name'] != 'master' && !@projects['hutch']['default_branch']
2718
- #starsky_present || hutch_present
2855
+ def deploy_starsky_hutch_maia?
2856
+ starsky_present = !@projects['starsky'].nil? && !@projects['starsky'].empty? && @projects['starsky']['name'] != 'master' && !@projects['starsky']['default_branch']
2857
+ hutch_present = !@projects['hutch'].nil? && !@projects['hutch'].empty? && @projects['hutch']['name'] != 'master' && !@projects['hutch']['default_branch']
2858
+ maia_present = !@projects['maia'].nil? && !@projects['maia'].empty? && @projects['maia']['name'] != 'master' && !@projects['maia']['default_branch']
2859
+ starsky_present || hutch_present || maia_present
2719
2860
  end
2720
2861
 
2721
2862
  def get_pyxis_version(deploy_id)
@@ -2742,7 +2883,7 @@ class Release
2742
2883
  },
2743
2884
  {
2744
2885
  parameter_key: "InstanceType",
2745
- parameter_value: "t3a.xlarge"
2886
+ parameter_value: "t3.large"
2746
2887
  },
2747
2888
  {
2748
2889
  parameter_key: "ECSClusterName",
@@ -2845,6 +2986,7 @@ class Release
2845
2986
  end
2846
2987
 
2847
2988
  def choose_branch_to_deploy(project_name, select_master = false)
2989
+ return {name: 'master', revision: '399653d555b8864', committer: 'crash@prima.it', default_branch: true} if project_name == 'crash' && select_master
2848
2990
  Dir.chdir "projects/#{project_name}"
2849
2991
  output "Recupero la lista dei branch del progetto #{project_name}..."
2850
2992
  `git remote prune origin`
@@ -2884,7 +3026,8 @@ class Release
2884
3026
  name = branch_name.split(' ')[0]
2885
3027
  revision = branch_name.split(' ')[1]
2886
3028
  committer_email = branch_name.split(' ')[2].tr('<>', '')
2887
- { 'name' => name, 'revision' => revision[0..14], 'committer' => committer_email, 'default_branch' => select_master }
3029
+ return { name: 'crash', default_branch: true } if project_name == 'crash' && branch_name == 'master' #rimuovere questa riga se mai nei qa servirà crash con un branch diverso da master
3030
+ { name: name, revision: revision[0..14], committer: committer_email, default_branch: select_master }
2888
3031
  end
2889
3032
 
2890
3033
  def select_branch_to_deploy(project_name, branch_name)
@@ -2900,7 +3043,7 @@ class Release
2900
3043
  name = branch_name.split(' ')[0]
2901
3044
  revision = branch_name.split(' ')[1]
2902
3045
  committer_email = branch_name.split(' ')[2].tr('<>', '')
2903
- { 'name' => name, 'revision' => revision[0..14], 'committer' => committer_email }
3046
+ { name: name, revision: revision[0..14], committer: committer_email }
2904
3047
  end
2905
3048
 
2906
3049
  def get_stacks()
@@ -2931,44 +3074,36 @@ class Release
2931
3074
  (Time.now.to_i.to_s[-4..-1].to_i + Random.rand(40000)).to_s
2932
3075
  end
2933
3076
 
2934
- def launch_mimo(env_hash)
2935
- resp = describe_stack_resource('batch-job-mimo', 'JobDefinition')
3077
+ def launch_marley(ip_address, prima_hostname, borat_hostname)
3078
+ resp = describe_stack_resource('batch-job-marley', 'JobDefinition')
2936
3079
 
2937
3080
  @batch.submit_job({
2938
- job_name: "mimo-#{@dns_record_identifier}", # required
3081
+ job_name: "marley-#{@dns_record_identifier}", # required
2939
3082
  job_queue: "tools-production", # required
2940
3083
  job_definition: resp.stack_resource_detail.physical_resource_id, # required
2941
3084
  container_overrides: {
2942
3085
  environment: [
2943
3086
  {
2944
- name: 'ENV_HASH',
2945
- value: env_hash
3087
+ name: 'PRIMA_URL',
3088
+ value: "https://#{prima_hostname}/?superprima"
2946
3089
  },
2947
3090
  {
2948
- name: 'APP_ENV',
2949
- value: 'qa'
2950
- },
2951
- {
2952
- name: 'CYPRESS_BASE_URL',
2953
- value: "https://hutch-#{env_hash}.qa.colaster.com"
2954
- },
2955
- {
2956
- name: 'CYPRESS_PEANO_BASE_URL',
2957
- value: "http://peano-#{env_hash}.qa.colaster.com:10039/quotation"
3091
+ name: 'PRIMA_IP',
3092
+ value: ip_address
2958
3093
  },
2959
3094
  {
2960
- name: 'CYPRESS_API_BASE_URL',
2961
- value: "https://#{get_route53_hostname("starsky")}/graphql"
3095
+ name: 'PROJECTS_JSON',
3096
+ value: @projects.to_json
2962
3097
  },
2963
3098
  {
2964
- name: 'QA_NAME',
2965
- value: @git_branch
3099
+ name: 'BACKOFFICE_URL',
3100
+ value: "https://#{borat_hostname}"
2966
3101
  }
2967
3102
  ]
2968
3103
  }
2969
3104
  })
2970
3105
 
2971
- output "Mimo lanciato con successo!\n".green
3106
+ output "Marley lanciato con successo!\n".green
2972
3107
  end
2973
3108
 
2974
3109
  def get_currently_deployed_version(stack_name)
@@ -3049,6 +3184,7 @@ Description
3049
3184
  finish finishes the feature by merging to dev and master
3050
3185
  qainit deploys a new environment with selected branches from every project
3051
3186
  qainit $PROJECT_NAME deploys a new environment allowing to selected a branch from the input project (everything else is master)
3187
+ qainit minimal prima deploys a new copy of prima project, using staging microservices and database
3052
3188
  qainit shutdown deletes a specific qa environment
3053
3189
 
3054
3190
  Available only to devops (from artemide)