prima-twig 0.34.130 → 0.35.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
- SHA256:
3
- metadata.gz: e643c0321391de8e61ac20f750796aea7c82a1d4c3780952522cfce31b0785aa
4
- data.tar.gz: 0a4eeebd6a438e44e5c26a1f8d3a1d5de3bb5986e8e26c827c1137794803af71
2
+ SHA1:
3
+ metadata.gz: 843837d907be87ecc4ef38e0415c589e037a0fc2
4
+ data.tar.gz: 5aae6c8a0a983e85c22cc379becb68bf20aff41c
5
5
  SHA512:
6
- metadata.gz: fcb774ae41561b09aedd92a986197937f5c0670077d33d1ea38ce3655f0bd966d042e3b9b3a26eb4f1cb5aa573d2791a59641474c0b5f3dde8886e7219dd684d
7
- data.tar.gz: 676510c75c34cb05596dc99913e20d0e4b30c25e442d945dbad7f55fde338e9bf6d86a0ef3a823fa9edeaca31c17b0dae85f8be5975418b64c681712c4a453c5
6
+ metadata.gz: 1faa1c7079dabca4bc423a6a1fca628b642f9f9ec35cbb624064ed186b90a97629c18620ab590542bbd8da8e134b274c40effb5b633eab92bfb01bcaf4e6e84a
7
+ data.tar.gz: ccd537c66283bb29704bd577a48961261d1a475198f8cc731aa6ca7288cfa669a3f854d76c5d42399d7ce66f71d530a2f98024eb4efd3f4d46a0e6ee198bafe0
@@ -5,10 +5,7 @@ require_relative '../lib/prima_twig.rb'
5
5
  require_relative '../lib/prima_aws_client.rb'
6
6
  require 'colorize'
7
7
  require 'highline/import'
8
- require 'aws-sdk-batch'
9
- require 'aws-sdk-cloudformation'
10
- require 'aws-sdk-ecs'
11
- require 'aws-sdk-s3'
8
+ require 'aws-sdk'
12
9
  require 'redcarpet'
13
10
  require 'mail'
14
11
  require 'erb'
@@ -58,7 +55,6 @@ class Review
58
55
  @cf = Aws::CloudFormation::Client.new
59
56
  @ecs = Aws::ECS::Client.new
60
57
  @s3 = Aws::S3::Client.new
61
- @batch = Aws::Batch::Client.new
62
58
  @s3_bucket = "prima-artifacts-encrypted"
63
59
  end
64
60
 
@@ -98,6 +94,15 @@ class Review
98
94
  artifact = artifacts.select {|v| v[:rev] == artifact_rev}.first
99
95
 
100
96
  do_deploy! artifact_rev
97
+ exec_step "terminal-notifier -message 'Deploy terminato, vuoi lanciare paparatzinger?'" if which 'terminal-notifier'
98
+
99
+ confirm_message = "Vuoi lanciare paparatzinger?"
100
+ launch_paparatzinger = @prima.yesno confirm_message.blue
101
+
102
+ if launch_paparatzinger
103
+ output "Avvio paparatzinger per gli screenshot".yellow
104
+ job_name = launch_paparatzinger(artifact[:commit_msg])
105
+ end
101
106
 
102
107
  mail = Mail.new do
103
108
  from 'deploy@prima.it'
@@ -113,6 +118,7 @@ class Review
113
118
  body << "Revision: [#{artifact[:rev]}](https://github.com/primait/prima/commit/#{artifact[:rev]}) del #{artifact[:created_at].strftime('%d/%m/%Y %H:%M:%S')}\n\n"
114
119
  body << "Branch: [#{artifact[:branch]}](https://github.com/primait/prima/tree/#{artifact[:branch]})\n\n"
115
120
  body << "Commit: #{commit_msg.gsub(/_/, '\_')}\n\n"
121
+ body << "Screenshots (tra qualche minuto): [BrowserStack](https://www.browserstack.com/automate) (Filtrare per: \"#{get_paparatzinger_job_name(commit_msg).gsub(/_/, '\_')}\")" if launch_paparatzinger
116
122
 
117
123
  htmlBody = Redcarpet::Markdown.new(Redcarpet::Render::HTML.new).render body
118
124
 
@@ -131,34 +137,17 @@ class Review
131
137
  opts[:user_name] = @prima.config['aws_username']
132
138
  opts[:password] = @prima.config['aws_password']
133
139
 
134
- exec_step "git checkout master"
135
-
136
140
  mail.delivery_method(:smtp, opts)
137
141
  mail.deliver
138
142
 
139
- invalidate_prismic_cache
140
-
141
- launch_crawler
142
-
143
+ exec_step "git checkout master"
143
144
  exec_step "terminal-notifier -message 'Deploy terminato'" if which 'terminal-notifier'
144
145
  end
145
146
 
146
- def invalidate_prismic_cache
147
- [
148
- "guarantee",
149
- "glossary",
150
- "guide",
151
- "faq"
152
- ].each do |page|
153
-
154
- exec_step "curl -X POST -H \"Content-Type: application/json\" https://www.prima.it/api/cms/update/#{page}?apikey=#{@prima.config['prima_apikey']}"
155
- end
156
- end
157
-
158
147
  def reload_parameters!
159
148
  artifact_rev = ''
160
149
  resp = @cf.describe_stacks({
161
- stack_name: "ecs-task-web-vpc-production"
150
+ stack_name: "ecs-task-web-production"
162
151
  })
163
152
  resp.stacks[0].parameters.each do |param|
164
153
  if param.parameter_key == 'ReleaseVersion'
@@ -187,42 +176,86 @@ class Review
187
176
  exec_step "git checkout #{artifact_rev}"
188
177
  exec_step deploy_command
189
178
 
190
- stack_name_web = 'ecs-task-web-vpc-production'
191
- stack_name_consumer = 'ecs-task-consumer-vpc-production'
192
- stack_name_cron = 'ecs-task-consumer-vpc-production'
193
- stack_name_job = 'batch-job-php-production'
179
+ stack_name_web = 'ecs-task-web-production'
180
+ stack_name_consumer = 'ecs-task-consumer-production'
181
+ stack_name_cron = 'ecs-task-consumer-production'
194
182
  wait_for_stack_ready(stack_name_web) unless stack_ready?(stack_name_web)
195
183
  wait_for_stack_ready(stack_name_consumer) unless stack_ready?(stack_name_consumer)
196
184
  wait_for_stack_ready(stack_name_cron) unless stack_ready?(stack_name_cron)
197
- wait_for_stack_ready(stack_name_job) unless stack_ready?(stack_name_job)
198
185
  end
199
186
 
200
187
  def get_artifacts
201
188
  artifacts = []
202
189
  resp = @s3.list_objects(bucket: @s3_bucket, prefix: 'prima')
203
190
  resp.contents.each do |l|
204
- # aggiungiamo solo gli artefatti prodotti a partire dal branch master, riconosciuti tramite i metadata
205
191
  rev = l.key.match(/^prima\/(\w{15}).tar.gz$/).captures.first if l.key.match(/^prima\/(\w{15}).tar.gz$/)
206
192
  if rev
207
193
  object = @s3.head_object(bucket: @s3_bucket, key: l.key)
208
194
  commit_msg = ''
209
195
  commit_msg = Base64.decode64(object.metadata['commit_msg']).strip if object.metadata.has_key? 'commit_msg'
210
- artifacts << {rev: rev, created_at: object.last_modified, branch: object.metadata['branch'], commit_msg: commit_msg } if (object.metadata.has_key? 'branch') && (object.metadata['branch'] == 'master')
196
+ artifacts << {rev: rev, created_at: object.last_modified, branch: object.metadata['branch'], commit_msg: commit_msg } if object.metadata.has_key? 'branch'
211
197
  end
212
198
  end
213
199
  artifacts.sort_by { |v| v[:created_at] }.reverse
214
200
  end
215
201
 
216
- def launch_crawler()
217
- resp = describe_stack_resource('batch-job-crawler-production', 'JobDefinition')
202
+ def launch_paparatzinger(job_name)
203
+ @s3.get_object(
204
+ response_target: '/tmp/paparatzinger_twig.yml',
205
+ bucket: 'prima-deploy',
206
+ key: 'paparatzinger_twig.yml')
207
+
208
+ paparatzinger_config = YAML.load_file '/tmp/paparatzinger_twig.yml'
209
+
210
+ uri = URI.parse(paparatzinger_config['prima_api_search_url'])
211
+ body = Net::HTTP.start(uri.host, uri.port, :use_ssl => uri.scheme == 'https') do |http|
212
+ req = Net::HTTP::Get.new(uri)
213
+ req["x-apikey"] = paparatzinger_config['prima_api_token']
214
+ response = http.request req
215
+ response.body
216
+ end
217
+
218
+ saves = JSON.parse body
218
219
 
219
- @batch.submit_job({
220
- job_name: "crawler", # required
221
- job_queue: "tools-production", # required
222
- job_definition: resp.stack_resource_detail.physical_resource_id # required
220
+ save_code = saves.sample['unique_identifier']
221
+ url_garanzie = "https://www.prima.it/preventivo/auto/#{save_code}/garanzie?browserstack=true"
222
+ job_name = get_paparatzinger_job_name(clean_commit_message(job_name))
223
+
224
+ logical_resource_id = 'TaskDefinitionPaparatzinger'
225
+ resp = @cf.describe_stack_resource({
226
+ stack_name: 'ecs-task-paparatzinger-production',
227
+ logical_resource_id: logical_resource_id
228
+ })
229
+
230
+ resp = @ecs.run_task({
231
+ cluster: 'ecs-cluster-tools-ECSCluster-1I4THZHRXOTO5',
232
+ task_definition: resp.stack_resource_detail.physical_resource_id,
233
+ overrides: {
234
+ container_overrides: [
235
+ {
236
+ name: 'paparatzinger',
237
+ environment: [
238
+ {
239
+ name: 'JOB_NAME',
240
+ value: job_name,
241
+ },
242
+ {
243
+ name: 'VERSION',
244
+ value: paparatzinger_config['version'],
245
+ },
246
+ {
247
+ name: 'URL_GARANZIE',
248
+ value: url_garanzie
249
+ }
250
+ ]
251
+ }
252
+ ]
253
+ },
254
+ count: 1
223
255
  })
256
+ output "Paparatzinger lanciato con successo. URL: #{url_garanzie}\n".green
224
257
 
225
- output "Crawler lanciato con successo!\n".green
258
+ job_name
226
259
  end
227
260
 
228
261
  end
@@ -234,6 +267,10 @@ def clean_commit_message(commit_msg)
234
267
  commit_msg[0..99]
235
268
  end
236
269
 
270
+ def get_paparatzinger_job_name(job_name)
271
+ job_name.gsub /[^0-9a-z]/i, '-'
272
+ end
273
+
237
274
  def which(cmd)
238
275
  exts = ENV['PATHEXT'] ? ENV['PATHEXT'].split(';') : ['']
239
276
  ENV['PATH'].split(File::PATH_SEPARATOR).each do |path|
@@ -13,48 +13,40 @@ class Release
13
13
  include Command
14
14
  include PrimaAwsClient
15
15
 
16
- def initialize(update_gem=true)
16
+ def initialize
17
17
  @prima = Prima.new
18
- if update_gem
19
- output 'Controllo se ci sono aggiornamenti da fare (potrebbe richiedere qualche minuto)'
20
- unless `gem update prima-twig`=="Updating installed gems\nNothing to update\n"
21
- output 'Gemma prima-twig aggiornata'
22
- exec "twig feature #{ARGV.join ' '}"
23
- end
24
- end
18
+ output 'Controllo se ci sono aggiornamenti da fare...'
19
+ exec "gem update prima-twig && twig feature #{ARGV.join ' '}" unless `gem outdated`.lines.grep(/^prima-twig \(.*\)/).empty?
20
+ @cf = Aws::CloudFormation::Client.new
21
+ @alb = Aws::ElasticLoadBalancingV2::Client.new
22
+ @ec2 = Aws::EC2::Client.new
23
+ @ecs = Aws::ECS::Client.new
24
+ @batch = Aws::Batch::Client.new
25
+ @asg = Aws::AutoScaling::Client.new
26
+ @s3_bucket = 'prima-artifacts'
27
+ @artifact_path = '/tmp/prima-artifact.zip'
28
+ @import_db_task = 'arn:aws:ecs:eu-west-1:001575623345:task-definition/ecs-task-db-restore2-TaskDefinitionDbRestore-UDIDEVPA4H7Q:1'
25
29
  @dns_record_identifier = nil
26
30
  @ecs_cluster_name = nil
27
31
  @deploy_update = false
28
32
  @projects = {
29
33
  'prima' => {},
34
+ 'backoffice' => {},
30
35
  'urania' => {},
31
36
  'ermes' => {},
32
37
  'bburago' => {},
33
38
  'hal9000' => {},
34
39
  'fidaty' => {},
35
40
  'peano' => {},
36
- # 'rogoreport' => {},
41
+ 'rogoreport' => {},
37
42
  'assange' => {},
38
43
  'borat' => {},
44
+ 'bolla' => {},
39
45
  'crash' => {},
40
- 'activia' => {},
41
- 'skynet' => {},
42
- 'roger' => {},
43
- 'rachele' => {},
44
- 'leftorium' => {},
45
- 'pyxis-npm' => {},
46
- 'starsky' => {},
47
- 'hutch' => {},
48
- 'maia' => {},
49
- 'legion' => {},
50
- 'vianello' => {},
51
- 'domus' => {},
52
- 'toretto' => {}
46
+ 'activia' => {}
53
47
  }
54
- @base_stack_name_alb = 'ecs-alb-http-public-qa-'
55
- @base_stack_name_alb_ws = 'ecs-alb-ws-public-qa-'
56
- @git_branch = ''
57
- @cloudflare = Rubyflare.connect_with(ENV['CLOUDFLARE_EMAIL'], ENV['CLOUDFLARE_APIKEY'])
48
+ @stack_name_alb = 'ecs-alb-http-public-qa'
49
+ @stack_name_alb_ws = 'ecs-alb-ws-public-qa'
58
50
  end
59
51
 
60
52
  def execute!(args)
@@ -63,30 +55,21 @@ class Release
63
55
  start_feature!
64
56
  when 'finish'
65
57
  finish_feature!
66
- when 'qainit'
67
- abort('Non sei nella cartella di qainit') unless Dir.pwd.match 'qainit$' or Dir.pwd.match '/drone/src'
58
+ when 'deploy'
59
+ abort('Non sei nella cartella di artemide') unless Dir.pwd.match 'artemide$'
68
60
  if ['terminate', 'stop', 'shutdown', 'halt', 'destroy'].include? args[1]
69
- qainit_deploy_shutdown!
61
+ deploy_shutdown!
70
62
  elsif 'update' == args[1]
71
- qainit_deploy_update!
63
+ deploy_update!
64
+ elsif 'lock' == args[1]
65
+ deploy_lock!
72
66
  else
73
67
  if args[1]
74
- select_branches(args[1..-1])
68
+ select_branches(args[1])
75
69
  else
76
70
  select_branches
77
71
  end
78
- qainit_deploy!
79
- end
80
- when 'suite'
81
- abort('Non sei nella cartella di qainit') unless Dir.pwd.match 'qainit$'
82
- if 'deploy' == args[1]
83
- suite_py_branches(args[2])
84
- qainit_deploy!(true)
85
- end
86
- when 'deploy'
87
- abort('Non sei nella cartella di artemide') unless Dir.pwd.match 'artemide$'
88
- if 'lock' == args[1]
89
- deploy_lock!
72
+ deploy_feature!
90
73
  end
91
74
  when 'aggregator'
92
75
  if 'enable' == args[1]
@@ -102,7 +85,6 @@ class Release
102
85
  end
103
86
 
104
87
  def stop_for_wrong_args
105
- puts help_content
106
88
  stop_if true, [:wrong_args, ['start', 'finish', 'deploy', 'deploy project_name', 'deploy stop', 'deploy update', 'aggregator enable', 'aggregator disable']]
107
89
  end
108
90
 
@@ -122,6 +104,10 @@ class Release
122
104
  output 'Disable aggregator'
123
105
 
124
106
  output "Recupero le informazioni relative al puntamento dei record DNS..."
107
+ cloudflare = Rubyflare.connect_with(@prima.config['cloudflare_email'], @prima.config['cloudflare_apikey'])
108
+ dns_records = cloudflare.get("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {per_page: 100, type: 'CNAME', content: get_alb_host(@stack_name_alb)})
109
+ stop_if dns_records.body[:result].empty?, "I record DNS degli aggregatori non stanno puntando ad un QA".red
110
+
125
111
  output "Recupero le informazioni sui QA attivi..."
126
112
  stack_list, envs = get_stacks()
127
113
 
@@ -133,10 +119,8 @@ class Release
133
119
  end.is_a?(Aws::CloudFormation::Types::Tag)
134
120
  aggregator_enabled
135
121
  end[0]
136
- dns_records = @cloudflare.get("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {per_page: 100, type: 'CNAME', content: get_alb_host(@base_stack_name_alb + env_hash[3..8])})
137
- stop_if dns_records.body[:result].empty?, "I record DNS degli aggregatori non stanno puntando ad un QA".red
138
122
  change_hostname_priority(env_hash, hostname_pattern_priority())
139
- dns_to_staging(env_hash)
123
+ dns_to_staging()
140
124
  else
141
125
  output 'Nessun QA trovato'.red
142
126
  exit
@@ -149,7 +133,8 @@ class Release
149
133
  output 'Enable aggregator'
150
134
 
151
135
  output 'Recupero le informazioni relative al puntamento dei record DNS...'
152
- dns_records = @cloudflare.get('zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records', { per_page: 100, type: 'CNAME', content: 'staging.prima.it' })
136
+ cloudflare = Rubyflare.connect_with(@prima.config['cloudflare_email'], @prima.config['cloudflare_apikey'])
137
+ dns_records = cloudflare.get('zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records', { per_page: 100, type: 'CNAME', content: 'staging.prima.it' })
153
138
  stop_if dns_records.body[:result].empty?, "I record DNS degli aggregatori stanno gia' puntando ad un QA".red
154
139
 
155
140
  output "Recupero le informazioni sui QA attivi..."
@@ -179,7 +164,7 @@ class Release
179
164
  dns_records.body[:result].each do |dns|
180
165
  if dns[:name] =~ /^\w+\-\w+\-staging\.prima\.it$/
181
166
  output "Changing #{dns[:name]} DNS record"
182
- @cloudflare.put("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records/#{dns[:id]}", {type: 'CNAME', name: dns[:name], content: get_alb_host(@base_stack_name_alb + env_hash[3..8]), proxied: true, ttl: 1})
167
+ cloudflare.put("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records/#{dns[:id]}", {type: 'CNAME', name: dns[:name], content: get_alb_host(@stack_name_alb), proxied: true, ttl: 1})
183
168
  end
184
169
  end
185
170
 
@@ -236,13 +221,14 @@ class Release
236
221
  wait_for_stack_ready(stack_name_web) unless stack_ready?(stack_name_web)
237
222
  end
238
223
 
239
- def dns_to_staging(env_hash)
224
+ def dns_to_staging
240
225
  output "Recupero le informazioni relative al puntamento dei record DNS..."
241
- dns_records = @cloudflare.get("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {per_page: 100, type: 'CNAME', content: get_alb_host(@base_stack_name_alb + env_hash[3..8])})
226
+ cloudflare = Rubyflare.connect_with(@prima.config['cloudflare_email'], @prima.config['cloudflare_apikey'])
227
+ dns_records = cloudflare.get("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records", {per_page: 100, type: 'CNAME', content: get_alb_host(@stack_name_alb)})
242
228
  dns_records.body[:result].each do |dns|
243
229
  if dns[:name] =~ /^\w+\-\w+\-staging\.prima\.it$/
244
230
  output "Changing #{dns[:name]} DNS record"
245
- @cloudflare.put("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records/#{dns[:id]}", {type: 'CNAME', name: dns[:name], content: 'staging.prima.it', proxied: true, ttl: 1})
231
+ cloudflare.put("zones/1fb634f19c43dfb0162cc4cb91915da2/dns_records/#{dns[:id]}", {type: 'CNAME', name: dns[:name], content: 'staging.prima.it', proxied: true, ttl: 1})
246
232
  end
247
233
  end
248
234
  end
@@ -310,7 +296,7 @@ class Release
310
296
  end.is_a?(Aws::CloudFormation::Types::Tag)
311
297
 
312
298
  if aggregator_enabled
313
- dns_to_staging(env_hash)
299
+ dns_to_staging()
314
300
  end
315
301
 
316
302
  # Se non ha finito di cancellare le altre non si puo' cancellare il cluster
@@ -325,8 +311,8 @@ class Release
325
311
  end
326
312
 
327
313
  delete_stack(cluster_stack_name)
328
- delete_stack(@base_stack_name_alb + env_hash[3..8])
329
- delete_stack(@base_stack_name_alb_ws + env_hash[3..8])
314
+ delete_stack(@stack_name_alb) if envs.length < 2
315
+ delete_stack(@stack_name_alb_ws) if envs.length < 2
330
316
  output "Finito!".green
331
317
  end
332
318
 
@@ -337,7 +323,7 @@ class Release
337
323
  @deploy_update = true
338
324
 
339
325
  output "Recupero le informazioni sui QA attivi..."
340
- stack_list, envs = get_clusters()
326
+ stack_list, envs = get_stacks()
341
327
 
342
328
  env_hash = nil
343
329
  unless envs.empty?
@@ -347,7 +333,7 @@ class Release
347
333
  envs.each do |key, env|
348
334
  title = ""
349
335
  env.each do |e|
350
- title << "#{e.value}" if e.key == 'qainit'
336
+ title << "\n#{e.key.upcase}: #{e.value}"
351
337
  end
352
338
  msg = "#{@prima.reduce_size(title, 1000)}".light_blue
353
339
  menu.choice(msg) { key }
@@ -408,210 +394,944 @@ class Release
408
394
  output "Finito!".green
409
395
  end
410
396
 
411
- def get_default_branch_name(projects)
412
- projects.each_key do |project|
413
- return projects[project]['name'] if not projects[project]['default_branch']
397
+ def deploy_feature!
398
+ `git pull && git submodule init && git submodule update`
399
+ if deploy_crash?
400
+ deploy_id = Digest::MD5.hexdigest(
401
+ @projects["prima"][:name] +
402
+ @projects["backoffice"][:name] +
403
+ @projects["urania"][:name] +
404
+ @projects["ermes"][:name] +
405
+ @projects["bburago"][:name] +
406
+ @projects["hal9000"][:name] +
407
+ @projects["fidaty"][:name] +
408
+ @projects["peano"][:name] +
409
+ @projects["rogoreport"][:name] +
410
+ @projects["assange"][:name] +
411
+ @projects["borat"][:name] +
412
+ @projects["bolla"][:name] +
413
+ @projects['crash'][:name] +
414
+ @projects["activia"][:name]
415
+ )
416
+ else
417
+ deploy_id = Digest::MD5.hexdigest(
418
+ @projects["prima"][:name] +
419
+ @projects["backoffice"][:name] +
420
+ @projects["urania"][:name] +
421
+ @projects["ermes"][:name] +
422
+ @projects["bburago"][:name] +
423
+ @projects["hal9000"][:name] +
424
+ @projects["fidaty"][:name] +
425
+ @projects["peano"][:name] +
426
+ @projects["rogoreport"][:name] +
427
+ @projects["assange"][:name] +
428
+ @projects["borat"][:name] +
429
+ @projects["bolla"][:name] +
430
+ @projects["activia"][:name]
431
+ )
414
432
  end
415
- end
416
-
417
- def suite_py_branches(args_json)
418
- arg_projects = JSON.parse(args_json)
433
+ @dns_record_identifier = deploy_id
434
+ hostname_pattern_priority = hostname_pattern_priority()
435
+ tags = [
436
+ {
437
+ key: "prima",
438
+ value: @projects["prima"][:name]
439
+ },
440
+ {
441
+ key: "urania",
442
+ value: @projects["urania"][:name]
443
+ },
444
+ {
445
+ key: "backoffice",
446
+ value: @projects["backoffice"][:name]
447
+ },
448
+ {
449
+ key: "ermes",
450
+ value: @projects["ermes"][:name]
451
+ },
452
+ {
453
+ key: "bburago",
454
+ value: @projects["bburago"][:name]
455
+ },
456
+ {
457
+ key: "hal9000",
458
+ value: @projects["hal9000"][:name]
459
+ },
460
+ {
461
+ key: "fidaty",
462
+ value: @projects["fidaty"][:name]
463
+ },
464
+ {
465
+ key: "hostname_pattern_priority",
466
+ value: hostname_pattern_priority
467
+ },
468
+ {
469
+ key: "peano",
470
+ value: @projects["peano"][:name]
471
+ },
472
+ {
473
+ key: "rogoreport",
474
+ value: @projects["rogoreport"][:name]
475
+ },
476
+ {
477
+ key: "assange",
478
+ value: @projects["assange"][:name]
479
+ },
480
+ {
481
+ key: "borat",
482
+ value: @projects["borat"][:name]
483
+ },
484
+ {
485
+ key: "bolla",
486
+ value: @projects["bolla"][:name]
487
+ },
488
+ {
489
+ key: "activia",
490
+ value: @projects["activia"][:name]
491
+ }
492
+ ]
493
+ tags << { key: 'crash', value: @projects['crash'][:name] } if deploy_crash?
494
+
495
+ cluster_stack_name = "ecs-cluster-qa-#{deploy_id}"
419
496
 
420
- @projects.merge!(arg_projects)
421
-
422
- @projects.each_key do |project|
423
- if @projects[project].empty?
424
- @projects[project] = choose_branch_to_deploy(project, true)
425
- end
497
+ if stack_exists?(cluster_stack_name)
498
+ tags = get_stack_tags(cluster_stack_name)
499
+ hostname_pattern_priority = tags.detect do |tag|
500
+ tag.key == 'hostname_pattern_priority'
501
+ end.value
426
502
  end
427
- end
428
-
429
- def get_git_user()
430
- `git config user.name`.gsub(/[^A-Za-z]/, '').gsub("\n", '')
431
- end
432
503
 
433
- def get_git_mail()
434
- `git config user.email`.gsub("\n", '')
435
- end
436
-
437
- def qainit_deploy!(quiet = false)
438
- `git checkout master && git pull && git submodule update --init --recursive && git remote prune origin`
439
-
440
- `git branch -r | awk '{print $1}' | egrep -v -f /dev/fd/0 <(git branch -vv | grep origin) | awk '{print $1}' | xargs git branch -D`
441
-
442
- default_name = get_default_branch_name @projects
443
- feature_number = ''
444
- unless quiet
445
- output "Inserisci la feature a cui si riferisce il QA: [#{default_name}]".cyan
446
- feature_number = String(STDIN.gets.chomp)
504
+ create_cluster_stack(cluster_stack_name, tags) unless stack_exists?(cluster_stack_name)
505
+ wait_for_stack_ready(cluster_stack_name) unless stack_ready?(cluster_stack_name)
506
+
507
+ create_alb_stack(@stack_name_alb, "http") unless stack_exists?(@stack_name_alb)
508
+ create_alb_stack(@stack_name_alb_ws, "websocket") unless stack_exists?(@stack_name_alb_ws)
509
+
510
+ resp = @cf.describe_stack_resource({stack_name: cluster_stack_name, logical_resource_id: 'ECSCluster'})
511
+ @ecs_cluster_name = resp.stack_resource_detail.physical_resource_id
512
+
513
+ asg_stack_name = "ecs-asg-allinone-qa-#{deploy_id}"
514
+ create_asg_stack(asg_stack_name, tags) unless stack_exists?(asg_stack_name)
515
+
516
+ stack_name_db = "ecs-task-db-qa-#{deploy_id}"
517
+ stack_body = IO.read('cloudformation/stacks/task/db.yml')
518
+ parameters = [
519
+ {
520
+ parameter_key: "Environment",
521
+ parameter_value: "qa"
522
+ },
523
+ {
524
+ parameter_key: "ECSClusterName",
525
+ parameter_value: @ecs_cluster_name
526
+ }
527
+ ]
528
+ create_stack(stack_name_db, stack_body, parameters, tags) unless stack_exists?(stack_name_db)
529
+
530
+ create_prima_artifact(@projects["prima"][:revision], @projects["prima"][:name], deploy_id) unless artifact_exists?('prima-artifacts-encrypted', "prima/#{@projects["prima"][:revision]}-#{deploy_id}.tar.gz")
531
+ create_crash_artifact(@projects['crash'][:revision], deploy_id) unless !deploy_crash? || artifact_exists?('prima-artifacts-encrypted', "microservices/crash/#{@projects['crash'][:revision]}-#{deploy_id}-qa.tar.gz")
532
+ create_urania_artifact(@projects["urania"][:revision]) unless artifact_exists?('prima-artifacts-encrypted', "microservices/urania/#{@projects["urania"][:revision]}-qa.tar.gz")
533
+ create_ermes_artifact(@projects["ermes"][:revision], deploy_id) unless artifact_exists?('prima-artifacts-encrypted', "microservices/ermes/#{@projects["ermes"][:revision]}-#{deploy_id}-qa.tar.gz")
534
+ create_bburago_artifact(@projects["bburago"][:revision]) unless artifact_exists?('prima-artifacts-encrypted', "microservices/bburago/#{@projects["bburago"][:revision]}-qa.tar.gz")
535
+ create_hal9000_artifact(@projects["hal9000"][:revision]) unless artifact_exists?('prima-artifacts-encrypted', "microservices/hal9000/#{@projects["hal9000"][:revision]}-qa.tar.gz")
536
+ create_fidaty_artifact(@projects["fidaty"][:revision], deploy_id) unless artifact_exists?('prima-artifacts-encrypted', "microservices/fidaty/#{@projects["fidaty"][:revision]}-#{deploy_id}-qa.tar.gz")
537
+ create_backoffice_artifact(@projects["backoffice"][:revision], deploy_id) unless artifact_exists?('prima-artifacts', "backoffice/#{@projects["backoffice"][:revision]}-#{deploy_id}.zip")
538
+ create_peano_artifact(@projects["peano"][:revision], deploy_id) unless artifact_exists?('prima-artifacts-encrypted', "microservices/peano/#{@projects["peano"][:revision]}-#{deploy_id}-qa.tar.gz")
539
+ create_rogoreport_artifact(@projects["rogoreport"][:revision], deploy_id) unless artifact_exists?('prima-artifacts-encrypted', "microservices/rogoreport/rogoreport_qa-#{@projects["rogoreport"][:revision]}-#{deploy_id}-qa.tar.gz")
540
+ create_assange_artifact(@projects["assange"][:revision], deploy_id) unless artifact_exists?('prima-artifacts-encrypted', "microservices/assange/#{@projects["assange"][:revision]}-#{deploy_id}-qa.tar.gz")
541
+ create_borat_artifact(@projects["borat"][:revision], deploy_id) unless artifact_exists?('prima-artifacts-encrypted', "microservices/borat/#{@projects["borat"][:revision]}-#{deploy_id}-qa.tar.gz")
542
+ create_bolla_artifact(@projects["bolla"][:revision], deploy_id) unless artifact_exists?('prima-artifacts-encrypted', "microservices/bolla/#{@projects["bolla"][:revision]}-#{deploy_id}-qa-migrator.tar.gz")
543
+ create_activia_artifact(@projects["activia"][:revision], deploy_id) unless artifact_exists?('prima-artifacts-encrypted', "microservices/activia/#{@projects["activia"][:revision]}-#{deploy_id}-qa.tar.gz")
544
+
545
+ wait_for_stack_ready(stack_name_db) unless stack_ready?(stack_name_db)
546
+ import_dbs(ec2_ip_address(asg_stack_name)) unless stack_exists?("ecs-task-web-qa-#{deploy_id}")
547
+
548
+ import_redis_crash(ec2_ip_address(asg_stack_name)) if deploy_crash? && !stack_exists?("ecs-task-web-qa-#{deploy_id}")
549
+
550
+ wait_for_stack_ready(@stack_name_alb) unless stack_ready?(@stack_name_alb)
551
+ wait_for_stack_ready(@stack_name_alb_ws) unless stack_ready?(@stack_name_alb_ws)
552
+
553
+ stack_name_route53 = "ecs-route53-qa-#{deploy_id}"
554
+ stack_body = IO.read('cloudformation/stacks/route53/qa.yml')
555
+ parameters = [
556
+ {
557
+ parameter_key: "DnsRecordIdentifier",
558
+ parameter_value: @dns_record_identifier
559
+ },
560
+ {
561
+ parameter_key: "PrimaElbHostname",
562
+ parameter_value: get_alb_host(@stack_name_alb)
563
+ },
564
+ {
565
+ parameter_key: "BackofficeElbHostname",
566
+ parameter_value: get_alb_host(@stack_name_alb)
567
+ },
568
+ {
569
+ parameter_key: "UraniaIp",
570
+ parameter_value: ec2_ip_address(asg_stack_name)
571
+ },
572
+ {
573
+ parameter_key: "BburagoIp",
574
+ parameter_value: ec2_ip_address(asg_stack_name)
575
+ },
576
+ {
577
+ parameter_key: "Hal9000Ip",
578
+ parameter_value: ec2_ip_address(asg_stack_name)
579
+ },
580
+ {
581
+ parameter_key: "FidatyIp",
582
+ parameter_value: ec2_ip_address(asg_stack_name)
583
+ },
584
+ {
585
+ parameter_key: "PeanoIp",
586
+ parameter_value: ec2_ip_address(asg_stack_name)
587
+ },
588
+ {
589
+ parameter_key: "ErmesIp",
590
+ parameter_value: ec2_ip_address(asg_stack_name)
591
+ },
592
+ {
593
+ parameter_key: "ActiviaIp",
594
+ parameter_value: ec2_ip_address(asg_stack_name)
595
+ },
596
+ {
597
+ parameter_key: "RedisIp",
598
+ parameter_value: ec2_ip_address(asg_stack_name)
599
+ },
600
+ {
601
+ parameter_key: "AssangeElbHostname",
602
+ parameter_value: get_alb_host(@stack_name_alb)
603
+ },
604
+ {
605
+ parameter_key: "BoratElbHostname",
606
+ parameter_value: get_alb_host(@stack_name_alb_ws)
607
+ },
608
+ {
609
+ parameter_key: 'CrashElbHostname',
610
+ parameter_value: get_alb_host(@stack_name_alb_ws)
611
+ }
612
+ ]
613
+
614
+ create_stack(stack_name_route53, stack_body, parameters, tags) unless stack_exists?(stack_name_route53)
615
+ wait_for_stack_ready(stack_name_route53) unless stack_ready?(stack_name_route53)
616
+
617
+ stack_name_urania = "ecs-task-urania-qa-#{deploy_id}"
618
+ git_checkout_version('urania', @projects["urania"][:revision])
619
+ stack_body = YAML.load_file('projects/urania/deploy/task.yml')
620
+ stack_body['Resources']['ECSServiceUrania']['Properties'].reject!{ |k| ['LoadBalancers', 'Role'].include?(k) }
621
+ stack_body = stack_body.to_yaml
622
+ parameters = [
623
+ {
624
+ parameter_key: "Environment",
625
+ parameter_value: "qa"
626
+ },
627
+ {
628
+ parameter_key: "ReleaseVersion",
629
+ parameter_value: @projects["urania"][:revision]
630
+ },
631
+ {
632
+ parameter_key: "TaskDesiredCount",
633
+ parameter_value: "1"
634
+ },
635
+ {
636
+ parameter_key: "ECSClusterName",
637
+ parameter_value: @ecs_cluster_name
638
+ },
639
+ {
640
+ parameter_key: "ALBShortName",
641
+ parameter_value: "urania-qa-#{deploy_id}"[0..31]
642
+ },
643
+ {
644
+ parameter_key: "HostnamePattern",
645
+ parameter_value: "urania-#{@dns_record_identifier}.qa.colaster.com"
646
+ },
647
+ {
648
+ parameter_key: "HostnamePatternPriority",
649
+ parameter_value: hostname_pattern_priority
650
+ }
651
+ ]
652
+ if stack_exists?(stack_name_urania)
653
+ cur_version = get_currently_deployed_version(stack_name_urania)
654
+ update_stack(stack_name_urania, stack_body, parameters, tags) unless cur_version.include?(@projects["urania"][:revision])
655
+ else
656
+ create_stack(stack_name_urania, stack_body, parameters, tags)
447
657
  end
448
- feature_number = default_name if feature_number.empty?
449
- branch_name = get_git_user + '_' + feature_number
450
658
 
451
- if `git branch -l | grep #{branch_name}`.size > 0
452
- `git checkout #{branch_name} && git pull`
659
+ stack_name_ermes = "ecs-task-ermes-qa-#{deploy_id}"
660
+ git_checkout_version('ermes', @projects["ermes"][:revision])
661
+ stack_body = YAML.load_file('projects/ermes/deploy/task.yml')
662
+ stack_body['Resources']['ECSServiceErmes']['Properties'].reject!{ |k| ['LoadBalancers', 'Role'].include?(k) }
663
+ stack_body = stack_body.to_yaml
664
+ parameters = [
665
+ {
666
+ parameter_key: "Environment",
667
+ parameter_value: "qa"
668
+ },
669
+ {
670
+ parameter_key: "ReleaseVersion",
671
+ parameter_value: "#{@projects['ermes'][:revision]}-#{deploy_id}"
672
+ },
673
+ {
674
+ parameter_key: "TaskDesiredCount",
675
+ parameter_value: "1"
676
+ },
677
+ {
678
+ parameter_key: "ECSClusterName",
679
+ parameter_value: @ecs_cluster_name
680
+ },
681
+ {
682
+ parameter_key: "ALBShortName",
683
+ parameter_value: "ermes-qa-#{deploy_id}"[0..31]
684
+ },
685
+ {
686
+ parameter_key: "HostnamePattern",
687
+ parameter_value: "ermes-#{@dns_record_identifier}.qa.colaster.com"
688
+ },
689
+ {
690
+ parameter_key: "HostnamePatternPriority",
691
+ parameter_value: hostname_pattern_priority
692
+ }
693
+ ]
694
+ if stack_exists?(stack_name_ermes)
695
+ cur_version = get_currently_deployed_version(stack_name_ermes)
696
+ update_stack(stack_name_ermes, stack_body, parameters, tags) unless cur_version.include?(@projects["ermes"][:revision])
453
697
  else
454
- `git checkout -b #{branch_name}`
455
- end
456
-
457
- @git_branch = branch_name
458
-
459
- File.open('branch_names', 'w') { |file| file.write(JSON.generate(@projects)) }
460
-
461
- update_drone_yml!
462
-
463
- `git add projects && \
464
- git add branch_names .drone.yml && \
465
- git commit -m '#{branch_name}' && \
466
- git push -f --set-upstream origin #{branch_name} && \
467
- git checkout master`
468
- end
469
-
470
- def qainit_deploy_update!
471
- `git checkout master && git pull`
472
- # cancelliamo tutti i branch che non sono più sul repo remoto
473
- `git fetch -p && for branch in \`git branch -vv | grep ': gone]' | awk '{print $1}'\`; do git branch -D $branch; done`
474
- # leggiamo i nomi dei branch superstiti
475
- former_branches = `git branch -a | grep remotes/ | grep -v HEAD | sed 's/ remotes\\/origin\\///g'`.split "\n"
476
- git_user = get_git_user
477
- # stampiamo la lista
478
- chosen_branch = choose do |menu|
479
- menu.prompt = "Scegli il QA che vuoi aggiornare: ".cyan
480
- menu.shell = true
481
- former_branches.delete('master')
482
- former_branches.each_with_index do |branch, index|
483
- msg = index.odd? ? branch.white : branch.light_yellow # uno bianco e uno giallo alternati
484
- msg = branch.start_with?(git_user) ? msg.on_blue : msg.on_black # i branch creati da chi lancia l'update sono su sfondo più chiaro
485
- menu.choice(msg) { branch }
486
- end
698
+ create_stack(stack_name_ermes, stack_body, parameters, tags)
487
699
  end
488
- # checkout master, checkout branch, pull branch
489
- `git checkout master && git checkout #{chosen_branch} && git pull`
490
-
491
- # aggiornare il commit (revision a cui fa riferimento)
492
700
 
493
- # leggo il file branch_names / recupero i nomi dei branch / riscrivo tutto
494
- projects = ''
495
- File.open('branch_names', 'r') do |file|
496
- file.each_line do |line|
497
- projects = JSON.parse(line)
498
- end
701
+ stack_name_bburago = "ecs-task-bburago-qa-#{deploy_id}"
702
+ git_checkout_version('bburago', @projects["bburago"][:revision])
703
+ stack_body = YAML.load_file('projects/bburago/deploy/task.yml')
704
+ stack_body['Resources']['ECSServiceBburago']['Properties'].reject!{ |k| ['LoadBalancers', 'Role'].include?(k) }
705
+ stack_body = stack_body.to_yaml
706
+ parameters = [
707
+ {
708
+ parameter_key: "Environment",
709
+ parameter_value: "qa"
710
+ },
711
+ {
712
+ parameter_key: "ReleaseVersion",
713
+ parameter_value: @projects["bburago"][:revision]
714
+ },
715
+ {
716
+ parameter_key: "ECSClusterName",
717
+ parameter_value: @ecs_cluster_name
718
+ },
719
+ {
720
+ parameter_key: "TaskDesiredCount",
721
+ parameter_value: "1"
722
+ },
723
+ {
724
+ parameter_key: "ALBShortName",
725
+ parameter_value: "bburago-qa-#{deploy_id}"[0..31]
726
+ },
727
+ {
728
+ parameter_key: "HostnamePattern",
729
+ parameter_value: "bburago-#{@dns_record_identifier}.qa.colaster.com"
730
+ },
731
+ {
732
+ parameter_key: "HostnamePatternPriority",
733
+ parameter_value: hostname_pattern_priority
734
+ }
735
+ ]
736
+ if stack_exists?(stack_name_bburago)
737
+ cur_version = get_currently_deployed_version(stack_name_bburago)
738
+ update_stack(stack_name_bburago, stack_body, parameters, tags) unless cur_version.include?(@projects["bburago"][:revision])
739
+ else
740
+ create_stack(stack_name_bburago, stack_body, parameters, tags)
499
741
  end
500
742
 
501
- projects.each do |key, project|
502
- @projects[key] = select_branch_to_deploy(key, project['name'])
503
- @projects[key]['default_branch'] = project['default_branch']
743
+ stack_name_hal9000 = "ecs-task-hal9000-qa-#{deploy_id}"
744
+ git_checkout_version('hal9000', @projects["hal9000"][:revision])
745
+ stack_body = YAML.load_file('projects/hal9000/deploy/task.yml')
746
+ stack_body['Resources']['ECSServiceHal9000']['Properties'].reject!{ |k| ['LoadBalancers', 'Role'].include?(k) }
747
+ stack_body = stack_body.to_yaml
748
+ parameters = [
749
+ {
750
+ parameter_key: "Environment",
751
+ parameter_value: "qa"
752
+ },
753
+ {
754
+ parameter_key: "ReleaseVersion",
755
+ parameter_value: @projects["hal9000"][:revision]
756
+ },
757
+ {
758
+ parameter_key: "ECSClusterName",
759
+ parameter_value: @ecs_cluster_name
760
+ },
761
+ {
762
+ parameter_key: "TaskDesiredCount",
763
+ parameter_value: "1"
764
+ },
765
+ {
766
+ parameter_key: "ALBShortName",
767
+ parameter_value: "hal9000-qa-#{deploy_id}"[0..31]
768
+ },
769
+ {
770
+ parameter_key: "HostnamePattern",
771
+ parameter_value: "hal9000-#{@dns_record_identifier}.qa.colaster.com"
772
+ },
773
+ {
774
+ parameter_key: "HostnamePatternPriority",
775
+ parameter_value: hostname_pattern_priority
776
+ }
777
+ ]
778
+ if stack_exists?(stack_name_hal9000)
779
+ cur_version = get_currently_deployed_version(stack_name_hal9000)
780
+ update_stack(stack_name_hal9000, stack_body, parameters, tags) unless cur_version.include?(@projects["hal9000"][:revision])
781
+ else
782
+ create_stack(stack_name_hal9000, stack_body, parameters, tags)
504
783
  end
505
784
 
506
- File.open('branch_names', 'w') { |file| file.write(JSON.generate(@projects)) }
785
+ stack_name_fidaty = "ecs-task-fidaty-qa-#{deploy_id}"
786
+ git_checkout_version('fidaty', @projects["fidaty"][:revision])
787
+ stack_body = YAML.load_file('projects/fidaty/deploy/task.yml')
788
+ stack_body['Resources']['ECSServiceFidaty']['Properties'].reject!{ |k| ['LoadBalancers', 'Role'].include?(k) }
789
+ stack_body = stack_body.to_yaml
790
+ parameters = [
791
+ {
792
+ parameter_key: "Environment",
793
+ parameter_value: "qa"
794
+ },
795
+ {
796
+ parameter_key: "ReleaseVersion",
797
+ parameter_value: "#{@projects["fidaty"][:revision]}-#{deploy_id}"
798
+ },
799
+ {
800
+ parameter_key: "ECSClusterName",
801
+ parameter_value: @ecs_cluster_name
802
+ },
803
+ {
804
+ parameter_key: "TaskDesiredCount",
805
+ parameter_value: "1"
806
+ },
807
+ {
808
+ parameter_key: "ALBShortName",
809
+ parameter_value: "fidaty-qa-#{deploy_id}"[0..31]
810
+ },
811
+ {
812
+ parameter_key: "HostnamePattern",
813
+ parameter_value: "fidaty-#{@dns_record_identifier}.qa.colaster.com"
814
+ },
815
+ {
816
+ parameter_key: "HostnamePatternPriority",
817
+ parameter_value: hostname_pattern_priority
818
+ }
819
+ ]
820
+ if stack_exists?(stack_name_fidaty)
821
+ cur_version = get_currently_deployed_version(stack_name_fidaty)
822
+ update_stack(stack_name_fidaty, stack_body, parameters, tags) unless cur_version.include?(@projects["fidaty"][:revision])
823
+ else
824
+ create_stack(stack_name_fidaty, stack_body, parameters, tags)
825
+ end
507
826
 
508
- update_drone_yml!
827
+ stack_name_peano = "ecs-task-peano-qa-#{deploy_id}"
828
+ git_checkout_version('peano', @projects["peano"][:revision])
829
+ stack_body = YAML.load_file('projects/peano/deploy/task.yml')
830
+ stack_body['Resources']['ECSServicePeano']['Properties'].reject!{ |k| ['LoadBalancers', 'Role'].include?(k) }
831
+ stack_body = stack_body.to_yaml
832
+ parameters = [
833
+ {
834
+ parameter_key: "Environment",
835
+ parameter_value: "qa"
836
+ },
837
+ {
838
+ parameter_key: "ReleaseVersion",
839
+ parameter_value: "#{@projects['peano'][:revision]}-#{deploy_id}"
840
+ },
841
+ {
842
+ parameter_key: "ECSClusterName",
843
+ parameter_value: @ecs_cluster_name
844
+ },
845
+ {
846
+ parameter_key: "TaskDesiredCount",
847
+ parameter_value: "1"
848
+ },
849
+ {
850
+ parameter_key: "ALBShortName",
851
+ parameter_value: "peano-qa-#{deploy_id}"[0..31]
852
+ },
853
+ {
854
+ parameter_key: "HostnamePattern",
855
+ parameter_value: "peano-#{@dns_record_identifier}.qa.colaster.com"
856
+ },
857
+ {
858
+ parameter_key: "HostnamePatternPriority",
859
+ parameter_value: hostname_pattern_priority
860
+ }
861
+ ]
862
+ if stack_exists?(stack_name_peano)
863
+ cur_version = get_currently_deployed_version(stack_name_peano)
864
+ update_stack(stack_name_peano, stack_body, parameters, tags) unless cur_version.include?(@projects["peano"][:revision])
865
+ else
866
+ create_stack(stack_name_peano, stack_body, parameters, tags)
867
+ end
509
868
 
510
- `git add branch_names .drone.yml`
511
- `git commit -m 'update'`
512
- `git push && git checkout master`
513
- end
869
+ stack_name_rogoreport = "ecs-task-rogoreport-qa-#{deploy_id}"
870
+ git_checkout_version('rogoreport', @projects["rogoreport"][:revision])
871
+ stack_body = IO.read('projects/rogoreport/deploy/task.yml')
872
+ parameters = [
873
+ {
874
+ parameter_key: "Environment",
875
+ parameter_value: "qa"
876
+ },
877
+ {
878
+ parameter_key: "ReleaseVersion",
879
+ parameter_value: "#{@projects["rogoreport"][:revision]}-#{deploy_id}"
880
+ },
881
+ {
882
+ parameter_key: "ReleaseName",
883
+ parameter_value: "rogoreport_qa"
884
+ },
885
+ {
886
+ parameter_key: "ECSClusterName",
887
+ parameter_value: @ecs_cluster_name
888
+ }
889
+ ]
890
+ if stack_exists?(stack_name_rogoreport)
891
+ cur_version = get_currently_deployed_version(stack_name_rogoreport)
892
+ update_stack(stack_name_rogoreport, stack_body, parameters, tags) unless cur_version.include?(@projects["rogoreport"][:revision])
893
+ else
894
+ create_stack(stack_name_rogoreport, stack_body, parameters, tags)
895
+ end
514
896
 
515
- def qainit_deploy_shutdown!(selection = nil)
516
- `git checkout master && git pull && git remote prune origin`
517
- # leggiamo i nomi dei branch
518
- former_branches = `git branch -a | grep remotes/ | grep -v HEAD | sed 's/ remotes\\/origin\\///g'`.split "\n"
519
- if selection.nil?
520
- # stampiamo la lista
521
- chosen_branch = choose do |menu|
522
- menu.prompt = "Scegli il QA che vuoi spegnere: ".cyan
523
- menu.shell = true
524
- git_user = get_git_user
525
- former_branches.delete('master')
526
- former_branches.each_with_index do |branch, index|
527
- msg = index.odd? ? branch.white : branch.light_yellow # uno bianco e uno giallo alternati
528
- msg = branch.start_with?(git_user) ? msg.on_blue : msg.on_black # i branch creati da chi lancia l'update sono su sfondo blu
529
- menu.choice(msg) { branch }
530
- end
531
- end
897
+ stack_name_assange = "ecs-task-assange-qa-#{deploy_id}"
898
+ git_checkout_version('assange', @projects["assange"][:revision])
899
+ stack_body = IO.read('projects/assange/deploy/task.yml')
900
+ parameters = [
901
+ {
902
+ parameter_key: "Environment",
903
+ parameter_value: "qa"
904
+ },
905
+ {
906
+ parameter_key: "ReleaseVersion",
907
+ parameter_value: "#{@projects["assange"][:revision]}-#{deploy_id}"
908
+ },
909
+ {
910
+ parameter_key: "ECSClusterName",
911
+ parameter_value: @ecs_cluster_name
912
+ },
913
+ {
914
+ parameter_key: "TaskDesiredCount",
915
+ parameter_value: "1"
916
+ },
917
+ {
918
+ parameter_key: "ALBShortName",
919
+ parameter_value: "assange-qa-#{deploy_id}"[0..31]
920
+ },
921
+ {
922
+ parameter_key: "HostnamePattern",
923
+ parameter_value: "assange-#{@dns_record_identifier}.qa.colaster.com"
924
+ },
925
+ {
926
+ parameter_key: "HostnamePatternPriority",
927
+ parameter_value: (hostname_pattern_priority.to_i + 20).to_s
928
+ }
929
+ ]
930
+ if stack_exists?(stack_name_assange)
931
+ cur_version = get_currently_deployed_version(stack_name_assange)
932
+ update_stack(stack_name_assange, stack_body, parameters, tags) unless cur_version.include?(@projects["assange"][:revision])
532
933
  else
533
- chosen_branch = selection
934
+ create_stack(stack_name_assange, stack_body, parameters, tags)
534
935
  end
535
- # checkout master, checkout branch, pull branch, push sul branch con commit vuoto
536
- `git checkout master && git checkout #{chosen_branch} && git pull`
537
- `git commit --allow-empty -m 'shutdown' && git push && git checkout master`
538
- end
539
936
 
540
- def qainit_drone_shutdown!
541
- output "Recupero le informazioni sui QA attivi..."
542
- stack_list, envs = get_stacks
937
+ stack_name_borat = "ecs-task-borat-qa-#{deploy_id}"
938
+ git_checkout_version('borat', @projects["borat"][:revision])
939
+ stack_body = IO.read('projects/borat/deploy/task.yml')
940
+ parameters = [
941
+ {
942
+ parameter_key: "Environment",
943
+ parameter_value: "qa"
944
+ },
945
+ {
946
+ parameter_key: "ReleaseVersion",
947
+ parameter_value: "#{@projects["borat"][:revision]}-#{deploy_id}"
948
+ },
949
+ {
950
+ parameter_key: "ECSClusterName",
951
+ parameter_value: @ecs_cluster_name
952
+ },
953
+ {
954
+ parameter_key: "TaskDesiredCount",
955
+ parameter_value: "1"
956
+ },
957
+ {
958
+ parameter_key: "ALBShortName",
959
+ parameter_value: "borat-qa-#{deploy_id}"[0..31]
960
+ },
961
+ {
962
+ parameter_key: "HostnamePattern",
963
+ parameter_value: "backoffice-#{@dns_record_identifier}.qa.colaster.com"
964
+ },
965
+ {
966
+ parameter_key: "HostnamePatternPriority",
967
+ parameter_value: (hostname_pattern_priority.to_i + 30).to_s
968
+ }
969
+ ]
970
+ if stack_exists?(stack_name_borat)
971
+ cur_version = get_currently_deployed_version(stack_name_borat)
972
+ update_stack(stack_name_borat, stack_body, parameters, tags) unless cur_version.include?(@projects["borat"][:revision])
973
+ else
974
+ create_stack(stack_name_borat, stack_body, parameters, tags)
975
+ end
543
976
 
544
- env_hash = "qa-" + get_deploy_id
977
+ git_checkout_version('backoffice', @projects["backoffice"][:revision])
978
+ stack_name_backoffice = "ecs-task-backoffice-qa-#{deploy_id}"
979
+ stack_body = IO.read('projects/backoffice/deploy/task.yml')
980
+ parameters = [
981
+ {
982
+ parameter_key: "Environment",
983
+ parameter_value: "qa"
984
+ },
985
+ {
986
+ parameter_key: "ReleaseVersion",
987
+ parameter_value: "#{@projects["backoffice"][:revision]}-#{deploy_id}"
988
+ },
989
+ {
990
+ parameter_key: "TaskDesiredCount",
991
+ parameter_value: "1"
992
+ },
993
+ {
994
+ parameter_key: "ECSClusterName",
995
+ parameter_value: @ecs_cluster_name
996
+ },
997
+ {
998
+ parameter_key: "ALBShortName",
999
+ parameter_value: "backoffice-qa-#{deploy_id}"[0..31]
1000
+ },
1001
+ {
1002
+ parameter_key: "HostnamePattern",
1003
+ parameter_value: "backoffice-legacy-#{@dns_record_identifier}.qa.colaster.com"
1004
+ },
1005
+ {
1006
+ parameter_key: "HostnamePatternPriority",
1007
+ parameter_value: (hostname_pattern_priority.to_i + 10).to_s
1008
+ }
1009
+ ]
1010
+ if stack_exists?(stack_name_backoffice)
1011
+ cur_version = get_currently_deployed_version(stack_name_backoffice)
1012
+ update_stack(stack_name_backoffice, stack_body, parameters, tags) unless cur_version.include?(@projects["backoffice"][:revision])
1013
+ else
1014
+ create_stack(stack_name_backoffice, stack_body, parameters, tags)
1015
+ end
545
1016
 
546
- cluster_stack_name = nil
547
- stacks_to_delete = []
548
- stack_list.each do |stack|
549
- if stack.stack_name.match(/#{env_hash}$/)
550
- if stack.stack_name.match(/ecs-cluster/)
551
- cluster_stack_name = stack.stack_name
552
- else
553
- break unless stack.stack_name.match(/#{env_hash}$/)
554
- stacks_to_delete.push(stack.stack_name)
555
- delete_stack(stack.stack_name)
556
- end
1017
+ if deploy_crash?
1018
+ git_checkout_version('crash', @projects['crash'][:revision])
1019
+ stack_name_crash = "ecs-task-crash-qa-#{deploy_id}"
1020
+ stack_body = IO.read('projects/crash/deploy/task.yml')
1021
+ parameters = [
1022
+ {
1023
+ parameter_key: 'Environment',
1024
+ parameter_value: 'qa'
1025
+ },
1026
+ {
1027
+ parameter_key: 'ReleaseVersion',
1028
+ parameter_value: "#{@projects['crash'][:revision]}-#{deploy_id}"
1029
+ },
1030
+ {
1031
+ parameter_key: 'TaskDesiredCount',
1032
+ parameter_value: '1'
1033
+ },
1034
+ {
1035
+ parameter_key: 'ECSClusterName',
1036
+ parameter_value: @ecs_cluster_name
1037
+ },
1038
+ {
1039
+ parameter_key: 'ALBShortName',
1040
+ parameter_value: "crash-qa-#{deploy_id}"[0..31]
1041
+ },
1042
+ {
1043
+ parameter_key: 'HostnamePattern',
1044
+ parameter_value: "crash-#{@dns_record_identifier}.qa.colaster.com"
1045
+ },
1046
+ {
1047
+ parameter_key: 'HostnamePatternPriority',
1048
+ parameter_value: (hostname_pattern_priority.to_i + 10).to_s
1049
+ }
1050
+ ]
1051
+ if stack_exists?(stack_name_crash)
1052
+ cur_version = get_currently_deployed_version(stack_name_crash)
1053
+ update_stack(stack_name_crash, stack_body, parameters, tags) unless cur_version.include?(@projects["crash"][:revision])
1054
+ else
1055
+ create_stack(stack_name_crash, stack_body, parameters, tags)
557
1056
  end
558
1057
  end
559
1058
 
560
- cluster_stack_name = "ecs-cluster-#{env_hash}"
561
- if stack_exists?(cluster_stack_name)
562
- aggregator_enabled = get_stack_tags(cluster_stack_name).detect do |tag|
563
- tag.key === "hostname_pattern_priority" and tag.value === "1"
564
- end.is_a?(Aws::CloudFormation::Types::Tag)
565
-
566
- if aggregator_enabled
567
- dns_to_staging(env_hash)
568
- end
1059
+ stack_name_activia = "ecs-task-activia-qa-#{deploy_id}"
1060
+ git_checkout_version('activia', @projects["activia"][:revision])
1061
+ stack_body = YAML.load_file('projects/activia/deploy/task.yml')
1062
+ stack_body['Resources']['ECSServiceActivia']['Properties'].reject!{ |k| ['LoadBalancers', 'Role'].include?(k) }
1063
+ stack_body = stack_body.to_yaml
1064
+ parameters = [
1065
+ {
1066
+ parameter_key: "Environment",
1067
+ parameter_value: "qa"
1068
+ },
1069
+ {
1070
+ parameter_key: "ReleaseVersion",
1071
+ parameter_value: "#{@projects["activia"][:revision]}-#{deploy_id}"
1072
+ },
1073
+ {
1074
+ parameter_key: "ECSClusterName",
1075
+ parameter_value: @ecs_cluster_name
1076
+ },
1077
+ {
1078
+ parameter_key: "TaskDesiredCount",
1079
+ parameter_value: "1"
1080
+ },
1081
+ {
1082
+ parameter_key: "ALBShortName",
1083
+ parameter_value: "activia-qa-#{deploy_id}"[0..31]
1084
+ },
1085
+ {
1086
+ parameter_key: "HostnamePattern",
1087
+ parameter_value: "activia-#{@dns_record_identifier}.qa.colaster.com"
1088
+ },
1089
+ {
1090
+ parameter_key: "HostnamePatternPriority",
1091
+ parameter_value: hostname_pattern_priority
1092
+ }
1093
+ ]
1094
+ if stack_exists?(stack_name_activia)
1095
+ cur_version = get_currently_deployed_version(stack_name_activia)
1096
+ update_stack(stack_name_activia, stack_body, parameters, tags) unless cur_version.include?(@projects["activia"][:revision])
1097
+ else
1098
+ create_stack(stack_name_activia, stack_body, parameters, tags)
569
1099
  end
570
1100
 
571
- # Se non ha finito di cancellare le altre non si puo' cancellare il cluster
572
- output "Attendo 10 secondi per poter eliminare il cluster ECS"
1101
+ # Waiting for prima healtcheck dependencies
1102
+ wait_for_stack_ready(stack_name_urania) unless stack_ready?(stack_name_urania)
1103
+ wait_for_stack_ready(stack_name_bburago) unless stack_ready?(stack_name_bburago)
1104
+ wait_for_stack_ready(stack_name_hal9000) unless stack_ready?(stack_name_hal9000)
1105
+ wait_for_stack_ready(stack_name_assange) unless stack_ready?(stack_name_assange)
1106
+ wait_for_stack_ready(stack_name_fidaty) unless stack_ready?(stack_name_fidaty)
1107
+
1108
+ stack_name_bolla = "ecs-task-bolla-qa-#{deploy_id}"
1109
+ git_checkout_version('bolla', @projects["bolla"][:revision])
1110
+ stack_body = YAML.load_file('projects/bolla/deploy/task.yml')
1111
+ stack_body['Resources']['ECSServiceBolla']['Properties'].reject!{ |k| ['LoadBalancers', 'Role'].include?(k) }
1112
+ stack_body = stack_body.to_yaml
1113
+ parameters = [
1114
+ {
1115
+ parameter_key: "Environment",
1116
+ parameter_value: "qa"
1117
+ },
1118
+ {
1119
+ parameter_key: "ReleaseVersion",
1120
+ parameter_value: "#{@projects["bolla"][:revision]}-#{deploy_id}"
1121
+ },
1122
+ {
1123
+ parameter_key: "TaskDesiredCount",
1124
+ parameter_value: "1"
1125
+ },
1126
+ {
1127
+ parameter_key: "ECSClusterName",
1128
+ parameter_value: @ecs_cluster_name
1129
+ },
1130
+ {
1131
+ parameter_key: "ALBShortName",
1132
+ parameter_value: "bolla-qa-#{deploy_id}"[0..31]
1133
+ },
1134
+ {
1135
+ parameter_key: "HostnamePattern",
1136
+ parameter_value: "bolla-#{@dns_record_identifier}.qa.colaster.com"
1137
+ },
1138
+ {
1139
+ parameter_key: "HostnamePatternPriority",
1140
+ parameter_value: hostname_pattern_priority
1141
+ },
1142
+ ]
1143
+ if stack_exists?(stack_name_bolla)
1144
+ cur_version = get_currently_deployed_version(stack_name_bolla)
1145
+ update_stack(stack_name_bolla, stack_body, parameters, tags) unless cur_version.include?(@projects["bolla"][:revision])
1146
+ else
1147
+ create_stack(stack_name_bolla, stack_body, parameters, tags)
1148
+ end
573
1149
 
574
- while stacks_to_delete.length > 0
575
- sleep 13
576
- stacks_to_delete.each do |stack_name|
577
- stacks_to_delete = stacks_to_delete - [stack_name] unless stack_exists?(stack_name)
578
- end
579
- output "Stack ancora attivi: #{stacks_to_delete.length.to_s}. Attendo altri 10 secondi per eliminare il cluster ECS"
1150
+ stack_name_web = "ecs-task-web-qa-#{deploy_id}"
1151
+ git_checkout_version('prima', @projects["prima"][:revision])
1152
+ stack_body = IO.read('projects/prima/app/cloudformation/tasks/web.yml')
1153
+ parameters = [
1154
+ {
1155
+ parameter_key: "Environment",
1156
+ parameter_value: "qa"
1157
+ },
1158
+ {
1159
+ parameter_key: "ReleaseVersion",
1160
+ parameter_value: "#{@projects["prima"][:revision]}-#{deploy_id}"
1161
+ },
1162
+ {
1163
+ parameter_key: "TaskDesiredCount",
1164
+ parameter_value: "1"
1165
+ },
1166
+ {
1167
+ parameter_key: "ECSClusterName",
1168
+ parameter_value: @ecs_cluster_name
1169
+ },
1170
+ {
1171
+ parameter_key: "ALBShortName",
1172
+ parameter_value: "web-qa-#{deploy_id}"[0..31]
1173
+ },
1174
+ {
1175
+ parameter_key: "WebQaBaseHostname",
1176
+ parameter_value: "#{@dns_record_identifier}.qa.colaster.com"
1177
+ },
1178
+ {
1179
+ parameter_key: "HostnamePattern",
1180
+ parameter_value: "ww*-#{@dns_record_identifier}.qa.colaster.com"
1181
+ },
1182
+ {
1183
+ parameter_key: "HostnamePatternPriority",
1184
+ parameter_value: hostname_pattern_priority
1185
+ },
1186
+ {
1187
+ parameter_key: "HostnamePatternAggregatorPriority",
1188
+ parameter_value: (hostname_pattern_priority.to_i + 1).to_s
1189
+ }
1190
+ ]
1191
+ if stack_exists?(stack_name_web)
1192
+ cur_version = get_currently_deployed_version(stack_name_web)
1193
+ update_stack(stack_name_web, stack_body, parameters, tags) unless cur_version.include?(@projects["prima"][:revision])
1194
+ else
1195
+ create_stack(stack_name_web, stack_body, parameters, tags)
580
1196
  end
581
1197
 
582
- delete_stack(cluster_stack_name) if stack_exists?(cluster_stack_name)
583
- delete_stack(@base_stack_name_alb + env_hash[3..8]) if stack_exists?(@base_stack_name_alb + env_hash[3..8])
584
- delete_stack(@base_stack_name_alb_ws + env_hash[3..8]) if stack_exists?(@base_stack_name_alb_ws + env_hash[3..8])
585
- `git checkout master && git push origin --delete ${DRONE_BRANCH}`
586
- output "Cancello il record DNS utilizzato da Lighthouse"
587
- delete_lighthouse_dns()
588
- output "Finito!".green
589
- end
1198
+ stack_name_consumer = "ecs-task-consumer-qa-#{deploy_id}"
1199
+ git_checkout_version('prima', @projects["prima"][:revision])
1200
+ stack_body = IO.read('projects/prima/app/cloudformation/tasks/consumer.yml')
1201
+ parameters = [
1202
+ {
1203
+ parameter_key: "Environment",
1204
+ parameter_value: "qa"
1205
+ },
1206
+ {
1207
+ parameter_key: "ReleaseVersion",
1208
+ parameter_value: "#{@projects["prima"][:revision]}-#{deploy_id}"
1209
+ },
1210
+ {
1211
+ parameter_key: "ECSClusterName",
1212
+ parameter_value: @ecs_cluster_name
1213
+ }
1214
+ ]
1215
+ if stack_exists?(stack_name_consumer)
1216
+ cur_version = get_currently_deployed_version(stack_name_consumer)
1217
+ update_stack(stack_name_consumer, stack_body, parameters, tags) unless cur_version.include?(@projects["prima"][:revision])
1218
+ else
1219
+ create_stack(stack_name_consumer, stack_body, parameters, tags)
1220
+ end
590
1221
 
591
- def qainit_write_output(file_message, output_message)
592
- `mkdir -p /etc/qainit-output`
593
- qa_file_name = "/etc/qainit-output/url_qa"
594
- File.open(qa_file_name + '.txt', 'w') { |file| file.write(file_message) }
595
- output "#{output_message} #{qa_file_name}".green
1222
+ wait_for_stack_ready(stack_name_web) unless stack_ready?(stack_name_web)
1223
+ wait_for_stack_ready(stack_name_consumer) unless stack_ready?(stack_name_consumer)
1224
+ wait_for_stack_ready(stack_name_backoffice) unless stack_ready?(stack_name_backoffice)
1225
+ wait_for_stack_ready(stack_name_ermes) unless stack_ready?(stack_name_ermes)
1226
+ wait_for_stack_ready(stack_name_fidaty) unless stack_ready?(stack_name_fidaty)
1227
+ wait_for_stack_ready(stack_name_peano) unless stack_ready?(stack_name_peano)
1228
+ wait_for_stack_ready(stack_name_rogoreport) unless stack_ready?(stack_name_rogoreport)
1229
+ wait_for_stack_ready(stack_name_borat) unless stack_ready?(stack_name_borat)
1230
+ wait_for_stack_ready(stack_name_bolla) unless stack_ready?(stack_name_bolla)
1231
+ wait_for_stack_ready(stack_name_activia) unless stack_ready?(stack_name_activia)
1232
+ wait_for_stack_ready(stack_name_crash) unless stack_ready?(stack_name_crash) || !deploy_crash?
1233
+
1234
+ update_service_defaults(stack_name_web)
1235
+ update_service_defaults(stack_name_consumer)
1236
+ update_service_defaults(stack_name_urania)
1237
+ update_service_defaults(stack_name_backoffice)
1238
+ update_service_defaults(stack_name_ermes)
1239
+ update_service_defaults(stack_name_bburago)
1240
+ update_service_defaults(stack_name_hal9000)
1241
+ update_service_defaults(stack_name_fidaty)
1242
+ update_service_defaults(stack_name_peano)
1243
+ update_service_defaults(stack_name_rogoreport)
1244
+ update_service_defaults(stack_name_assange)
1245
+ update_service_defaults(stack_name_borat)
1246
+ update_service_defaults(stack_name_bolla)
1247
+ update_service_defaults(stack_name_activia)
1248
+ update_service_defaults(stack_name_crash) unless stack_ready?(stack_name_crash) || !deploy_crash?
1249
+
1250
+ prima_hostname = get_route53_hostname(stack_name_web)
1251
+ urania_hostname = get_route53_hostname(stack_name_urania)
1252
+ bburago_hostname = get_route53_hostname(stack_name_bburago)
1253
+ hal9000_hostname = get_route53_hostname(stack_name_hal9000)
1254
+ ermes_hostname = get_route53_hostname(stack_name_ermes)
1255
+ fidaty_hostname = get_route53_hostname(stack_name_fidaty)
1256
+ peano_hostname = get_route53_hostname(stack_name_peano)
1257
+ backoffice_hostname = get_route53_hostname(stack_name_backoffice)
1258
+ assange_hostname = get_route53_hostname(stack_name_assange)
1259
+ borat_hostname = get_route53_hostname(stack_name_borat)
1260
+ bolla_hostname = get_route53_hostname(stack_name_bolla)
1261
+ activia_hostname = get_route53_hostname(stack_name_activia)
1262
+ deploy_crash? && crash_hostname = get_route53_hostname(stack_name_crash)
1263
+
1264
+ launch_marley ec2_ip_address(asg_stack_name), prima_hostname, borat_hostname
1265
+
1266
+ projects_text = "
1267
+ > Prima url: https://#{prima_hostname}
1268
+ > Prima RI url: https://#{prima_hostname.sub("www", "wwwri")}
1269
+ > Backoffice (Borat) url: https://#{borat_hostname}
1270
+ > Urania url: http://#{urania_hostname}:81
1271
+ > Bburago url: http://#{bburago_hostname}:83
1272
+ > Ermes url: http://#{ermes_hostname}:10002
1273
+ > Hal9000 url: http://#{hal9000_hostname}:10031
1274
+ > Fidaty url: http://#{fidaty_hostname}:10021
1275
+ > Peano url: http://#{peano_hostname}:10039
1276
+ > Bolla url: http://#{ec2_ip_address(asg_stack_name)}:10046
1277
+ > Assange url: https://#{assange_hostname}
1278
+ > Activia url: http://#{activia_hostname}:10041
1279
+ > Backoffice (legacy) url: https://#{backoffice_hostname}"
1280
+ projects_text.concat "
1281
+ > Crash url: https://#{crash_hostname}" if deploy_crash?
1282
+ projects_text.concat "
1283
+ > RabbitMQ url: http://#{ec2_ip_address(asg_stack_name)}:15672
1284
+ > Supervisor url: http://#{ec2_ip_address(asg_stack_name)}:9001
1285
+ > Elasticsearch url: http://#{ec2_ip_address(asg_stack_name)}:9200
1286
+ > SSH connection: ssh -o UserKnownHostsFile=/dev/null -o StrictHostKeyChecking=no githubUsername@#{ec2_ip_address(asg_stack_name)}\n"
1287
+ output projects_text.cyan
1288
+ output "Deploy effettuato, everything is awesome!\n".green
596
1289
  end
597
1290
 
598
- def update_drone_yml!()
599
- drone_yml = File.read('.drone.yml')
600
- @projects.each do |key, project|
601
- drone_yml = drone_yml.gsub(/#{key}@.+\n/, "#{key}@#{project['revision']}\n")
602
- end
603
- File.open(".drone.yml", "w") do |f|
604
- f.write(drone_yml)
1291
+ def get_route53_hostname(stack_name)
1292
+ case
1293
+ when stack_name.include?('web')
1294
+ host = "www-#{@dns_record_identifier}.qa.colaster.com"
1295
+ when stack_name.include?('urania')
1296
+ host = "urania-#{@dns_record_identifier}.qa.colaster.com"
1297
+ when stack_name.include?('backoffice')
1298
+ host = "backoffice-legacy-#{@dns_record_identifier}.qa.colaster.com"
1299
+ when stack_name.include?('bburago')
1300
+ host = "bburago-#{@dns_record_identifier}.qa.colaster.com"
1301
+ when stack_name.include?('hal9000')
1302
+ host = "hal9000-#{@dns_record_identifier}.qa.colaster.com"
1303
+ when stack_name.include?('fidaty')
1304
+ host = "fidaty-#{@dns_record_identifier}.qa.colaster.com"
1305
+ when stack_name.include?('peano')
1306
+ host = "peano-#{@dns_record_identifier}.qa.colaster.com"
1307
+ when stack_name.include?('assange')
1308
+ host = "assange-#{@dns_record_identifier}.qa.colaster.com"
1309
+ when stack_name.include?('borat')
1310
+ host = "backoffice-#{@dns_record_identifier}.qa.colaster.com"
1311
+ when stack_name.include?('crash')
1312
+ host = "crash-#{@dns_record_identifier}.qa.colaster.com"
1313
+ when stack_name.include?('ermes')
1314
+ host = "ermes-#{@dns_record_identifier}.qa.colaster.com"
1315
+ when stack_name.include?('bolla')
1316
+ host = "bolla-#{@dns_record_identifier}.qa.colaster.com"
1317
+ when stack_name.include?('activia')
1318
+ host = "activia-#{@dns_record_identifier}.qa.colaster.com"
605
1319
  end
1320
+ host
606
1321
  end
607
1322
 
608
- def get_deploy_id
609
- if @deploy_id
610
- @deploy_id
611
- else
612
- @deploy_id = Digest::MD5.hexdigest(ENV['DRONE_BRANCH'])
613
- @deploy_id
614
- end
1323
+ def ec2_ip_address(asg_stack_name)
1324
+ resp = @cf.describe_stack_resource({
1325
+ stack_name: asg_stack_name,
1326
+ logical_resource_id: 'ECSAutoScalingGroup'
1327
+ })
1328
+ resp = @asg.describe_auto_scaling_groups({
1329
+ auto_scaling_group_names: [resp.stack_resource_detail.physical_resource_id],
1330
+ max_records: 1
1331
+ })
1332
+ instance_id = resp.auto_scaling_groups[0].instances[0].instance_id
1333
+ resp = @ec2.describe_instances({instance_ids: [instance_id]})
1334
+ resp.reservations[0].instances[0].private_ip_address
615
1335
  end
616
1336
 
617
1337
  def get_alb_host(stack_name)
@@ -630,51 +1350,588 @@ class Release
630
1350
  logical_resource_id = 'EcsApplicationLoadBalancerInternal'
631
1351
  when stack_name.include?('activia')
632
1352
  logical_resource_id = 'EcsApplicationLoadBalancerInternal'
633
- when stack_name.include?('skynet')
634
- logical_resource_id = 'EcsApplicationLoadBalancerInternal'
635
- when stack_name.include?('roger')
636
- logical_resource_id = 'EcsApplicationLoadBalancerInternal'
637
1353
  when stack_name.include?('alb-http-public')
638
1354
  logical_resource_id = 'EcsApplicationLoadBalancerPublic'
639
1355
  when stack_name.include?('alb-ws-public')
640
1356
  logical_resource_id = 'EcsApplicationLoadBalancerPublic'
641
1357
  when stack_name.include?('peano')
642
1358
  logical_resource_id = 'EcsApplicationLoadBalancerInternal'
643
- when stack_name.include?('leftorium')
644
- logical_resource_id = 'EcsApplicationLoadBalancerInternal'
645
1359
  when stack_name.include?('assange')
646
1360
  logical_resource_id = 'EcsApplicationLoadBalancerPublic'
647
1361
  when stack_name.include?('borat')
648
1362
  logical_resource_id = 'EcsApplicationLoadBalancerPublic'
649
1363
  when stack_name.include?('crash')
650
1364
  logical_resource_id = 'EcsApplicationLoadBalancerPublic'
651
- when stack_name.include?('rachele')
652
- logical_resource_id = 'EcsApplicationLoadBalancerInternal'
653
- when stack_name.include?('starsky')
654
- logical_resource_id = 'EcsApplicationLoadBalancerPublic'
655
- when stack_name.include?('hutch')
656
- logical_resource_id = 'EcsApplicationLoadBalancerPublic'
657
- when stack_name.include?('maia')
658
- logical_resource_id = 'EcsApplicationLoadBalancerPublic'
659
- when stack_name.include?('legion')
660
- logical_resource_id = 'EcsApplicationLoadBalancerInternal'
661
1365
  end
662
- resp = describe_stack_resource(stack_name, logical_resource_id)
663
- resp = describe_load_balancers([resp.stack_resource_detail.physical_resource_id])
1366
+ resp = @cf.describe_stack_resource({
1367
+ stack_name: stack_name,
1368
+ logical_resource_id: logical_resource_id
1369
+ })
1370
+ resp = @alb.describe_load_balancers({
1371
+ load_balancer_arns: [resp.stack_resource_detail.physical_resource_id]
1372
+ })
664
1373
  resp.load_balancers[0].dns_name
665
1374
  end
666
1375
 
667
- def deploy_pyxis?
668
- if defined? @deploy_pyxis
669
- @deploy_pyxis
1376
+ def update_service_defaults(stack_name)
1377
+ case
1378
+ when stack_name.include?('web')
1379
+ logical_resource_id = 'ECSServiceWeb'
1380
+ when stack_name.include?('consumer')
1381
+ logical_resource_id = 'ECSServiceConsumer'
1382
+ when stack_name.include?('urania')
1383
+ logical_resource_id = 'ECSServiceUrania'
1384
+ when stack_name.include?('backoffice')
1385
+ logical_resource_id = 'ECSServiceBackoffice'
1386
+ when stack_name.include?('ermes')
1387
+ logical_resource_id = 'ECSServiceErmes'
1388
+ when stack_name.include?('bburago')
1389
+ logical_resource_id = 'ECSServiceBburago'
1390
+ when stack_name.include?('hal9000')
1391
+ logical_resource_id = 'ECSServiceHal9000'
1392
+ when stack_name.include?('fidaty')
1393
+ logical_resource_id = 'ECSServiceFidaty'
1394
+ when stack_name.include?('activia')
1395
+ logical_resource_id = 'ECSServiceActivia'
1396
+ when stack_name.include?('peano')
1397
+ logical_resource_id = 'ECSServicePeano'
1398
+ when stack_name.include?('rogoreport')
1399
+ logical_resource_id = 'ECSServiceRogoreport'
1400
+ when stack_name.include?('assange')
1401
+ logical_resource_id = 'ECSServiceAssange'
1402
+ when stack_name.include?('borat')
1403
+ logical_resource_id = 'ECSServiceBorat'
1404
+ when stack_name.include?('crash')
1405
+ logical_resource_id = 'ECSServiceCrash'
1406
+ when stack_name.include?('bolla')
1407
+ logical_resource_id = 'ECSServiceBolla'
670
1408
  else
671
- pyxis_updated = `git log -p -1 --unified=0 | grep pyxis-npm:`.length > 0
1409
+ raise "Service name non gestito per lo stack #{stack_name}"
1410
+ end
1411
+ resp = @cf.describe_stack_resource(
1412
+ stack_name: stack_name,
1413
+ logical_resource_id: logical_resource_id
1414
+ )
1415
+ @ecs.update_service(
1416
+ cluster: @ecs_cluster_name,
1417
+ service: resp.stack_resource_detail.physical_resource_id,
1418
+ deployment_configuration: {
1419
+ minimum_healthy_percent: 0,
1420
+ maximum_percent: 100
1421
+ }
1422
+ )
1423
+ end
1424
+
1425
+ def create_urania_artifact(revision)
1426
+ output "Preparo l'artifact urania .zip\n".yellow
1427
+
1428
+ git_checkout_version('urania', revision)
1429
+
1430
+ Dir.chdir 'projects/urania'
1431
+
1432
+ decrypt_secrets()
1433
+
1434
+ [
1435
+ "docker-compose build web",
1436
+ "docker run -v $PWD:/code -w /code -e MIX_ENV=qa --entrypoint /bin/sh urania_web \
1437
+ '-c' 'mix local.hex --force && mix hex.info && \
1438
+ mix deps.get && mix compile && mix deps.compile && \
1439
+ rm -rf _build/qa/rel/ && \
1440
+ mix release --env=qa'"
1441
+ ].each do |cmd|
1442
+ execute_command cmd
1443
+ end
1444
+
1445
+ artifact_path = Dir.glob("_build/qa/rel/urania/releases/*/urania.tar.gz").first
1446
+ upload_artifact(artifact_path, "microservices/urania/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
1447
+
1448
+ Dir.chdir '../../'
1449
+ end
1450
+
1451
+ def create_ermes_artifact(revision, deploy_id)
1452
+ output "Preparo l'artifact ermes .zip\n".yellow
1453
+
1454
+ git_checkout_version('ermes', revision)
1455
+
1456
+ Dir.chdir 'projects/ermes'
1457
+
1458
+ stack_name_web = 'ecs-task-web-qa-notneeded'
1459
+ web_qa_host = get_route53_hostname(stack_name_web)
1460
+ stack_name_peano = 'ecs-task-peano-qa-notneeded'
1461
+ peano_qa_host = "#{get_route53_hostname(stack_name_peano)}:10039"
1462
+
1463
+ decrypt_secrets()
1464
+
1465
+ [
1466
+ "if echo `docker network ls` | grep crash_default; \
1467
+ then echo 'crash_default network already existing'; \
1468
+ else docker network create crash_default; fi",
1469
+ 'docker-compose build web',
1470
+ "docker run -v $PWD:/code -w /code -e MIX_ENV=qa --entrypoint /bin/sh ermes_web \
1471
+ '-c' 'sed -i \"s/peano-qa-host/#{peano_qa_host}/g\" config/qa.exs && \
1472
+ sed -i \"s/web-qa-host/#{web_qa_host}/g\" config/qa.exs && \
1473
+ mix local.hex --force && mix hex.info && \
1474
+ mix deps.get && mix compile && mix deps.compile && \
1475
+ mix phx.digest && \
1476
+ rm -rf _build/qa/rel/ && \
1477
+ mix release --env=qa'",
1478
+ "if echo `docker ps` | grep crash; \
1479
+ then echo 'cannot delete crash_default network'; \
1480
+ else docker network rm crash_default; fi "
1481
+ ].each do |cmd|
1482
+ execute_command cmd
1483
+ end
1484
+
1485
+ artifact_path = Dir.glob("_build/qa/rel/ermes/releases/*/ermes.tar.gz").first
1486
+ upload_artifact(artifact_path, "microservices/ermes/#{revision}-#{deploy_id}-qa.tar.gz", "#{@s3_bucket}-encrypted")
1487
+
1488
+ Dir.chdir '../../'
1489
+ end
1490
+
1491
+ def deploy_crash?
1492
+ !@projects['crash'].empty? && !@projects['crash'][:default_branch]
1493
+ end
1494
+
1495
+ def create_crash_artifact(revision, deploy_id)
1496
+ output "Preparo l'artifact crash .zip\n".yellow
1497
+
1498
+ git_checkout_version('crash', revision)
1499
+
1500
+ Dir.chdir 'projects/crash'
1501
+
1502
+ stack_name_web = 'ecs-task-web-qa-notneeded'
1503
+ web_qa_host = get_route53_hostname(stack_name_web)
1504
+ stack_name_crash = 'ecs-task-crash-qa-notneeded'
1505
+ crash_qa_host = get_route53_hostname(stack_name_crash)
1506
+
1507
+ ws_endpoint = "wss://#{crash_qa_host}/socket/websocket?vsn=1.0.0"
1508
+ frontend_endpoint = "https://#{crash_qa_host}/graphql"
1509
+
1510
+ decrypt_secrets()
1511
+
1512
+ [
1513
+ 'mv docker-compose-ci.yml docker-compose.yml',
1514
+ 'docker-compose build web',
1515
+ "docker-compose run -w $PWD -e WS_ENDPOINT=#{ws_endpoint} -e GRAPHQL_ENDPOINT=#{frontend_endpoint} -e MIX_ENV=qa web \
1516
+ '-c' 'sed -i \"s/web-qa-host/#{web_qa_host}/g\" config/qa.exs && \
1517
+ sed -i \"s/crash-qa-host/#{crash_qa_host}/g\" config/qa.exs && \
1518
+ mix local.hex --force && mix hex.info && \
1519
+ mix deps.get && \
1520
+ cd assets && \
1521
+ yarn --cache-folder ~/.cache/yarn && \
1522
+ ./node_modules/.bin/elm-github-install && \
1523
+ NODE_ENV=production sysconfcpus -n 2 ./node_modules/.bin/webpack -p --config config/webpack.config.prod.js && \
1524
+ cd ../ && \
1525
+ mix release.clean --implode --no-confirm && \
1526
+ mix phx.digest && \
1527
+ mix deps.clean --all && \
1528
+ mix deps.get && \
1529
+ mix compile && mix release --env=qa'",
1530
+ 'docker-compose down'
1531
+ ].each do |cmd|
1532
+ execute_command cmd
1533
+ end
1534
+
1535
+ artifact_path = Dir.glob('_build/qa/rel/crash/releases/*/crash.tar.gz').first
1536
+ upload_artifact(artifact_path, "microservices/crash/#{revision}-#{deploy_id}-qa.tar.gz", "#{@s3_bucket}-encrypted")
1537
+
1538
+ Dir.chdir '../../'
1539
+ end
1540
+
1541
+ def create_bburago_artifact(revision)
1542
+ output "Preparo l'artifact bburago .zip\n".yellow
1543
+
1544
+ git_checkout_version('bburago', revision)
672
1545
 
673
- update_pyxis = !@projects['pyxis-npm'].empty? && @projects['pyxis-npm']['name'] != 'master' && pyxis_updated
1546
+ Dir.chdir 'projects/bburago'
674
1547
 
675
- @deploy_pyxis = update_pyxis
676
- return update_pyxis
1548
+ decrypt_secrets()
1549
+
1550
+ [
1551
+ "docker-compose build web",
1552
+ "docker run -v $PWD:/code -w /code -e MIX_ENV=qa --entrypoint /bin/sh bburago_web \
1553
+ '-c' 'mix local.hex --force && mix hex.info && \
1554
+ mix deps.get && mix compile && mix deps.compile && \
1555
+ rm -rf _build/qa/rel/ && \
1556
+ mix release --env=qa'"
1557
+ ].each do |cmd|
1558
+ execute_command cmd
1559
+ end
1560
+
1561
+ artifact_path = Dir.glob("_build/qa/rel/bburago/releases/*/bburago.tar.gz").first
1562
+ upload_artifact(artifact_path, "microservices/bburago/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
1563
+
1564
+ Dir.chdir '../../'
1565
+ end
1566
+
1567
+ def create_hal9000_artifact(revision)
1568
+ output "Preparo l'artifact hal9000 .zip\n".yellow
1569
+
1570
+ git_checkout_version('hal9000', revision)
1571
+
1572
+ Dir.chdir 'projects/hal9000'
1573
+
1574
+ decrypt_secrets()
1575
+
1576
+ [
1577
+ "docker-compose build web",
1578
+ "docker run -v $PWD:/code -w /code -e MIX_ENV=qa --entrypoint /bin/sh hal9000_web \
1579
+ '-c' 'mix local.hex --force && mix hex.info && \
1580
+ mix deps.get && mix compile && mix deps.compile && \
1581
+ mix phx.digest && \
1582
+ rm -rf _build/qa/rel/ && \
1583
+ mix release --env=qa'"
1584
+ ].each do |cmd|
1585
+ execute_command cmd
1586
+ end
1587
+
1588
+ artifact_path = Dir.glob("_build/qa/rel/hal9000/releases/*/hal9000.tar.gz").first
1589
+ upload_artifact(artifact_path, "microservices/hal9000/#{revision}-qa.tar.gz", "#{@s3_bucket}-encrypted")
1590
+
1591
+ Dir.chdir '../../'
1592
+ end
1593
+
1594
+ def create_fidaty_artifact(revision, deploy_id)
1595
+ output "Preparo l'artifact fidaty .zip\n".yellow
1596
+
1597
+ git_checkout_version('fidaty', revision)
1598
+
1599
+ Dir.chdir 'projects/fidaty'
1600
+
1601
+ stack_name_web = "ecs-task-web-qa-notneeded"
1602
+ web_qa_host = get_route53_hostname(stack_name_web)
1603
+
1604
+ stack_name_peano = "ecs-task-peano-qa-notneeded"
1605
+ peano_qa_host = "#{get_route53_hostname(stack_name_peano)}:10039"
1606
+
1607
+ decrypt_secrets()
1608
+
1609
+ [
1610
+ "docker-compose build web",
1611
+ "docker run -v $PWD:/code -w /code -e MIX_ENV=qa --entrypoint /bin/sh fidaty_web \
1612
+ '-c' 'sed -i \"s/web-qa-host/#{web_qa_host}/g\" config/qa.exs && \
1613
+ sed -i \"s/peano-qa-host/#{peano_qa_host}/g\" config/qa.exs && \
1614
+ mix local.hex --force && mix hex.info && \
1615
+ mix deps.get && mix compile && mix deps.compile && \
1616
+ mix phx.digest && \
1617
+ rm -rf _build/qa/rel/ && \
1618
+ mix release --env=qa'"
1619
+ ].each do |cmd|
1620
+ execute_command cmd
1621
+ end
1622
+
1623
+ artifact_path = Dir.glob("_build/qa/rel/fidaty/releases/*/fidaty.tar.gz").first
1624
+ upload_artifact(artifact_path, "microservices/fidaty/#{revision}-#{deploy_id}-qa.tar.gz", "#{@s3_bucket}-encrypted")
1625
+
1626
+ Dir.chdir '../../'
1627
+ end
1628
+
1629
+ def create_peano_artifact(revision, deploy_id)
1630
+ output "Preparo l'artifact peano .zip\n".yellow
1631
+
1632
+ git_checkout_version('peano', revision)
1633
+
1634
+ stack_name_assange = "ecs-task-assange-qa-notneeded"
1635
+ assange_qa_host = get_route53_hostname(stack_name_assange)
1636
+ stack_name_web = 'ecs-task-web-qa-notneeded'
1637
+ web_qa_host = get_route53_hostname(stack_name_web)
1638
+
1639
+ Dir.chdir 'projects/peano'
1640
+
1641
+ stop_unless is_branch_compatible_with_current_twig_version?("peano"), "Il tuo branch del progetto peano non e' compatibile con questa versione di twig, devi REBASARE DA MASTER!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!".red
1642
+
1643
+ decrypt_secrets()
1644
+
1645
+ [
1646
+ "docker-compose build web",
1647
+ "docker run -v $PWD:/code -w /code -e MIX_ENV=qa --entrypoint /bin/sh peano_web \
1648
+ '-c' 'mix local.hex --force && mix hex.info && \
1649
+ sed -i \"s/assange-qa-host/#{assange_qa_host}/g\" config/qa.exs && \
1650
+ sed -i \"s/web-qa-host/#{web_qa_host}/g\" config/qa.exs && \
1651
+ mix deps.get && mix compile && mix deps.compile && \
1652
+ rm -rf _build/qa/rel/ && \
1653
+ mix release --env=qa'"
1654
+ ].each do |cmd|
1655
+ execute_command cmd
1656
+ end
1657
+
1658
+ artifact_path = Dir.glob("_build/qa/rel/peano/releases/*/peano.tar.gz").first
1659
+ upload_artifact(artifact_path, "microservices/peano/#{revision}-#{deploy_id}-qa.tar.gz", "#{@s3_bucket}-encrypted")
1660
+
1661
+ Dir.chdir '../../'
1662
+ end
1663
+
1664
+ def create_rogoreport_artifact(revision, deploy_id)
1665
+ output "Preparo l'artifact rogoreport .zip\n".yellow
1666
+
1667
+ git_checkout_version('rogoreport', revision)
1668
+
1669
+ Dir.chdir 'projects/rogoreport'
1670
+
1671
+ stack_name_peano = 'ecs-task-peano-qa-notneeded'
1672
+ stack_name_web = 'ecs-task-web-qa-notneeded'
1673
+ peano_qa_host = "#{get_route53_hostname(stack_name_peano)}:10039"
1674
+ web_qa_host = get_route53_hostname(stack_name_web)
1675
+
1676
+ decrypt_secrets()
1677
+
1678
+ [
1679
+ "docker-compose build web",
1680
+ "docker run -v $PWD:/code -w /code -e MIX_ENV=qa --entrypoint /bin/sh rogoreport_web \
1681
+ '-c' 'sed -i \"s/peano-qa-host/#{peano_qa_host}/g\" apps/escile/config/qa.exs && \
1682
+ sed -i \"s/web-qa-host/#{web_qa_host}/g\" apps/escile/config/qa.exs && \
1683
+ cat apps/escile/config/qa.exs && \
1684
+ mix local.hex --force && mix hex.info && \
1685
+ mix deps.get && mix compile && mix deps.compile && \
1686
+ rm -rf _build/qa/rel/ && \
1687
+ mix release --name=rogoreport_qa --env=qa'"
1688
+ ].each do |cmd|
1689
+ execute_command cmd
1690
+ end
1691
+
1692
+ artifact_path = Dir.glob("_build/qa/rel/rogoreport*/releases/*/rogoreport*.tar.gz").first
1693
+ upload_artifact(artifact_path, "microservices/rogoreport/rogoreport_qa-#{revision}-#{deploy_id}-qa.tar.gz", "#{@s3_bucket}-encrypted")
1694
+
1695
+ Dir.chdir '../../'
1696
+ end
1697
+
1698
+ def create_assange_artifact(revision, deploy_id)
1699
+ output "Preparo l'artifact assange .zip\n".yellow
1700
+
1701
+ git_checkout_version('assange', revision)
1702
+
1703
+ Dir.chdir 'projects/assange'
1704
+
1705
+ stack_name_web = 'ecs-task-web-qa-notneeded'
1706
+ web_qa_host = get_route53_hostname(stack_name_web)
1707
+
1708
+ decrypt_secrets()
1709
+
1710
+ [
1711
+ "docker-compose build web",
1712
+ "docker run -v $PWD:/code -w /code -e MIX_ENV=qa --entrypoint /bin/sh assange_web \
1713
+ '-c' 'mix local.hex --force && mix hex.info && \
1714
+ sed -i \"s/web-qa-host/#{web_qa_host}/g\" config/qa.exs && \
1715
+ mix deps.get && mix compile && mix deps.compile && \
1716
+ mix phx.digest && \
1717
+ rm -rf _build/qa/rel/ && \
1718
+ mix release --env=qa'"
1719
+ ].each do |cmd|
1720
+ execute_command cmd
1721
+ end
1722
+
1723
+ artifact_path = Dir.glob("_build/qa/rel/assange/releases/*/assange.tar.gz").first
1724
+ upload_artifact(artifact_path, "microservices/assange/#{revision}-#{deploy_id}-qa.tar.gz", "#{@s3_bucket}-encrypted")
1725
+
1726
+ Dir.chdir '../../'
1727
+ end
1728
+
1729
+ def create_activia_artifact(revision, deploy_id)
1730
+ output "Preparo l'artifact activia .zip\n".yellow
1731
+
1732
+ git_checkout_version('activia', revision)
1733
+
1734
+ Dir.chdir 'projects/activia'
1735
+
1736
+ stack_name_web = "ecs-task-web-qa-notneeded"
1737
+ web_qa_host = get_route53_hostname(stack_name_web)
1738
+
1739
+ stack_name_peano = "ecs-task-peano-qa-notneeded"
1740
+ peano_qa_host = "#{get_route53_hostname(stack_name_peano)}:10041"
1741
+
1742
+ decrypt_secrets()
1743
+
1744
+ [
1745
+ "docker-compose build web",
1746
+ "docker run -v $PWD:/code -w /code -e MIX_ENV=qa --entrypoint /bin/sh activia_web \
1747
+ '-c' 'sed -i \"s/web-qa-host/#{web_qa_host}/g\" config/qa.exs && \
1748
+ sed -i \"s/peano-qa-host/#{peano_qa_host}/g\" config/qa.exs && \
1749
+ mix local.hex --force && mix hex.info && \
1750
+ mix deps.get && mix compile && mix deps.compile && \
1751
+ mix phx.digest && \
1752
+ rm -rf _build/qa/rel/ && \
1753
+ mix release --env=qa'"
1754
+ ].each do |cmd|
1755
+ execute_command cmd
1756
+ end
1757
+
1758
+ artifact_path = Dir.glob("_build/qa/rel/activia/releases/*/activia.tar.gz").first
1759
+ upload_artifact(artifact_path, "microservices/activia/#{revision}-#{deploy_id}-qa.tar.gz", "#{@s3_bucket}-encrypted")
1760
+
1761
+ Dir.chdir '../../'
1762
+ end
1763
+
1764
+ def create_bolla_artifact(revision, deploy_id)
1765
+ output "Preparo l'artifact bolla .zip\n".yellow
1766
+
1767
+ git_checkout_version('bolla', revision)
1768
+
1769
+ Dir.chdir 'projects/bolla'
1770
+
1771
+ decrypt_secrets()
1772
+
1773
+ [
1774
+ "docker-compose build web",
1775
+ "docker run -v $PWD:/code -w /code -e MIX_ENV=qa --entrypoint /bin/sh assange_web \
1776
+ '-c' 'mix local.hex --force && mix hex.info && \
1777
+ mix deps.get && mix compile && mix deps.compile && \
1778
+ rm -rf _build/qa/rel/ && \
1779
+ mix release --env=qa'"
1780
+ ].each do |cmd|
1781
+ execute_command cmd
1782
+ end
1783
+
1784
+ artifact_path = Dir.glob("_build/qa/rel/migrator/releases/*/migrator.tar.gz").first
1785
+ upload_artifact(artifact_path, "microservices/bolla/#{revision}-#{deploy_id}-qa-migrator.tar.gz", "#{@s3_bucket}-encrypted")
1786
+
1787
+ Dir.chdir '../../'
1788
+ end
1789
+
1790
+ def create_borat_artifact(revision, deploy_id)
1791
+ output "Preparo l'artifact borat .zip\n".yellow
1792
+
1793
+ git_checkout_version('borat', revision)
1794
+
1795
+ Dir.chdir 'projects/borat'
1796
+
1797
+ stack_name_web = "ecs-task-web-qa-notneeded"
1798
+ web_qa_host = get_route53_hostname(stack_name_web)
1799
+ stack_name_backoffice = "ecs-task-backoffice-qa-notneeded"
1800
+ backoffice_qa_host = get_route53_hostname(stack_name_backoffice)
1801
+ stack_name_assange = "ecs-task-assange-qa-notneeded"
1802
+ assange_qa_host = get_route53_hostname(stack_name_assange)
1803
+ stack_name_peano = "ecs-task-peano-qa-notneeded"
1804
+ peano_qa_host = "#{get_route53_hostname(stack_name_peano)}:10039"
1805
+ stack_name_borat = "ecs-task-borat-qa-notneeded"
1806
+ borat_qa_host = get_route53_hostname(stack_name_borat)
1807
+
1808
+ ws_endpoint = "wss://#{borat_qa_host}/socket/websocket?vsn=1.0.0"
1809
+ frontend_endpoint = "https://#{web_qa_host}/"
1810
+
1811
+ stop_unless is_branch_compatible_with_current_twig_version?("borat"), "Il tuo branch del progetto borat non e' compatibile con questa versione di twig, devi REBASARE DA MASTER!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!".red
1812
+
1813
+ decrypt_secrets()
1814
+
1815
+ [
1816
+ "docker-compose build backend",
1817
+ "docker run -v $PWD:/code -w /code -e WS_ENDPOINT=#{ws_endpoint} -e FRONTEND=#{frontend_endpoint} -e MIX_ENV=qa --entrypoint /bin/sh borat_backend \
1818
+ '-c' 'sed -i \"s/web-qa-host/#{web_qa_host}/g\" config/qa.exs && \
1819
+ sed -i \"s/backoffice-legacy-qa-host/#{backoffice_qa_host}/g\" config/qa.exs && \
1820
+ sed -i \"s/assange-qa-host/#{assange_qa_host}/g\" config/qa.exs && \
1821
+ sed -i \"s/peano-qa-host/#{peano_qa_host}/g\" config/qa.exs && \
1822
+ sed -i \"s/borat-qa-host/#{borat_qa_host}/g\" config/qa.exs && \
1823
+ mix local.hex --force && mix hex.info && \
1824
+ mix deps.get && \
1825
+ cd assets && \
1826
+ yarn --cache-folder ~/.cache/yarn && \
1827
+ ./node_modules/.bin/elm-github-install && \
1828
+ sysconfcpus -n 2 ./node_modules/.bin/webpack -p --config config/webpack.config.prod.js && \
1829
+ cd ../ && \
1830
+ mix phx.digest && \
1831
+ mix compile && mix deps.compile && \
1832
+ rm -rf _build/qa/rel/ && \
1833
+ mix release --env=qa'"
1834
+ ].each do |cmd|
1835
+ execute_command cmd
1836
+ end
1837
+
1838
+ artifact_path = Dir.glob("_build/qa/rel/backend/releases/*/backend.tar.gz").first
1839
+ upload_artifact(artifact_path, "microservices/borat/#{revision}-#{deploy_id}-qa.tar.gz", "#{@s3_bucket}-encrypted")
1840
+
1841
+ Dir.chdir '../../'
1842
+ end
1843
+
1844
+ def create_backoffice_artifact(revision, deploy_id)
1845
+ output "Preparo l'artifact backoffice .zip\n".yellow
1846
+
1847
+ git_checkout_version('backoffice', revision)
1848
+
1849
+ Dir.chdir 'projects/backoffice'
1850
+
1851
+ ['node_modules'].each do |dir|
1852
+ unless File.directory?(dir)
1853
+ if File.directory?("../../../backoffice/#{dir}")
1854
+ exec_step "rsync -a ../../../backoffice/#{dir} ."
1855
+ end
1856
+ end
1857
+ end
1858
+
1859
+ stack_name_web = "ecs-task-web-qa-notneeded"
1860
+ web_qa_host = get_route53_hostname(stack_name_web)
1861
+ webri_qa_host = web_qa_host.sub("www", "wwwri")
1862
+
1863
+ [
1864
+ "docker-compose build workers",
1865
+ "rm -rf src/ && git checkout -- .",
1866
+ "cd ../../ && docker run -e GIT_DIR=$PWD -v $PWD:/usr/app/src -w /usr/app/src/projects/backoffice blinkmobile/bower install --allow-root",
1867
+ "docker run -v $PWD:/code -w /code -e PHANTOMJS_BIN=/code/node_modules/grunt-selenium-webdriver/node_modules/phantomjs/bin/phantomjs --entrypoint /bin/bash backoffice_workers '-c' 'sed -i \"s/web-qa-url/#{web_qa_host}/g\" Gruntfile.js && sed -i \"s/web-qa-ri-url/#{webri_qa_host}/g\" Gruntfile.js && npm install && grunt qa'"
1868
+ ].each do |cmd|
1869
+ execute_command cmd
1870
+ end
1871
+
1872
+ artifact_path = '/tmp/backoffice.zip'
1873
+ exec_step "rm -f #{artifact_path} && zip -9 -r #{artifact_path} bin/"
1874
+ upload_artifact(artifact_path, "backoffice/#{revision}-#{deploy_id}.zip")
1875
+
1876
+ Dir.chdir '../../'
1877
+ end
1878
+
1879
+ def create_prima_artifact(revision, branch_name, deploy_id)
1880
+ output "Preparo l'artifact prima .zip\n".yellow
1881
+
1882
+ git_checkout_version('prima', revision)
1883
+
1884
+ Dir.chdir 'projects/prima'
1885
+
1886
+ ['vendor'].each do |dir|
1887
+ unless File.directory?(dir)
1888
+ if File.directory?("../../../prima/#{dir}")
1889
+ exec_step "rsync -a ../../../prima/#{dir} ."
1890
+ end
1891
+ end
1892
+ end
1893
+
1894
+ backoffice_qa_host = get_route53_hostname("ecs-task-borat-qa-notneeded")
1895
+ web_qa_host = get_route53_hostname("ecs-task-web-qa-notneeded")
1896
+ assange_qa_host = get_route53_hostname("ecs-task-assange-qa-notneeded")
1897
+
1898
+ [
1899
+ "bin/local_build_artifact.sh #{branch_name} #{web_qa_host} #{backoffice_qa_host} #{assange_qa_host} #{deploy_id} #{'update' if @deploy_update}"
1900
+ ].each do |cmd|
1901
+ execute_command cmd
677
1902
  end
1903
+
1904
+ Dir.chdir "../../"
1905
+ end
1906
+
1907
+ def git_checkout_version(project, revision)
1908
+ Dir.chdir "projects/#{project}"
1909
+ exec_step "git checkout -- . && git checkout #{revision}"
1910
+ Dir.chdir "../../"
1911
+ end
1912
+
1913
+ def create_asg_stack(stack_name, tags = [])
1914
+ stack_body = IO.read('cloudformation/stacks/asg/ecs-asg-allinone.yml')
1915
+ parameters = [
1916
+ {
1917
+ parameter_key: "Environment",
1918
+ parameter_value: "qa"
1919
+ },
1920
+ {
1921
+ parameter_key: "InstanceType",
1922
+ parameter_value: "t2.large"
1923
+ },
1924
+ {
1925
+ parameter_key: "ECSClusterName",
1926
+ parameter_value: @ecs_cluster_name
1927
+ }
1928
+ ]
1929
+ create_stack(stack_name, stack_body, parameters, tags)
1930
+ end
1931
+
1932
+ def create_cluster_stack(stack_name, tags = [])
1933
+ stack_body = IO.read('cloudformation/stacks/ecs-cluster.yml')
1934
+ create_stack(stack_name, stack_body, [], tags)
678
1935
  end
679
1936
 
680
1937
  def update_cluster_stack(stack_name, tags = [])
@@ -682,13 +1939,88 @@ class Release
682
1939
  update_stack(stack_name, stack_body, [], tags)
683
1940
  end
684
1941
 
1942
+ def create_alb_stack(stack_name, role)
1943
+ stack_body = IO.read('cloudformation/stacks/alb/ecs-alb-public.yml')
1944
+ parameters = [
1945
+ {
1946
+ parameter_key: "Environment",
1947
+ parameter_value: "qa"
1948
+ },
1949
+ {
1950
+ parameter_key: "Role",
1951
+ parameter_value: role
1952
+ }
1953
+ ]
1954
+ create_stack(stack_name, stack_body, parameters)
1955
+ end
1956
+
1957
+ def import_redis_crash(qa_ip_address)
1958
+ output "Importo dump di Redis da staging\n".yellow
1959
+
1960
+ redis_qa = Redis.new(:url => "redis://#{qa_ip_address}:6379/10")
1961
+
1962
+ return unless redis_qa.keys('CODICI').empty?
1963
+
1964
+ redis_staging = Redis.new(:url => 'redis://staging.cache-1.prima.it:6379/10')
1965
+
1966
+ dump_staging = redis_staging.dump 'CODICI'
1967
+
1968
+ redis_qa.restore 'CODICI', 0, dump_staging
1969
+ end
1970
+
1971
+ def import_dbs(ip_address)
1972
+ resp = @ecs.run_task({
1973
+ cluster: @ecs_cluster_name,
1974
+ task_definition: @import_db_task,
1975
+ overrides: {
1976
+ container_overrides: [
1977
+ {
1978
+ name: 'dbrestore',
1979
+ environment: [
1980
+ {
1981
+ name: 'EC2_IP_ADDRESS',
1982
+ value: ip_address
1983
+ }
1984
+ ]
1985
+ }
1986
+ ]
1987
+ },
1988
+ count: 1
1989
+ })
1990
+ pp resp
1991
+ output "Attendo che i DB vengano importati...\n".yellow
1992
+ stopped_at = nil
1993
+ while stopped_at.nil?
1994
+ pp "stopped_at == nil"
1995
+ unless resp.tasks[0].nil?
1996
+ pp "describe tasks..."
1997
+ resp = @ecs.describe_tasks({
1998
+ cluster: @ecs_cluster_name,
1999
+ tasks: [resp.tasks[0].task_arn]
2000
+ })
2001
+ pp resp
2002
+ end
2003
+ stopped_at = resp.tasks[0].stopped_at unless resp.tasks[0].nil?
2004
+ sleep_seconds = 10
2005
+ seconds_elapsed = 0
2006
+ while true && stopped_at.nil?
2007
+ break if seconds_elapsed >= sleep_seconds
2008
+ print '.'.yellow; STDOUT.flush
2009
+ sleep 1
2010
+ seconds_elapsed += 1
2011
+ end
2012
+ end
2013
+ print "\n"
2014
+ end
2015
+
685
2016
  def choose_branch_to_deploy(project_name, select_master = false)
2017
+ return {} if project_name == 'crash' && select_master
686
2018
  Dir.chdir "projects/#{project_name}"
687
2019
  output "Recupero la lista dei branch del progetto #{project_name}..."
688
2020
  `git remote prune origin`
689
2021
  out = %x[ git fetch ]
690
2022
  branches = %x[ git for-each-ref --sort=-committerdate refs/remotes/ --format='%(refname) %(objectname) %(committeremail)' | sed 's/refs\\/remotes\\/origin\\///g' ]
691
- .split("\n").delete_if { |b| b.include?('HEAD') }[0..49]
2023
+ .split("\n").delete_if { |b| b.include?('HEAD') || b.include?('dev') }[0..49]
692
2024
 
693
2025
  master_branch = nil
694
2026
 
@@ -697,20 +2029,16 @@ class Release
697
2029
  break unless master_branch.nil?
698
2030
  end
699
2031
 
700
- if select_master || branches.length == 1
2032
+ if select_master
701
2033
  branch_name = master_branch
702
2034
  else
703
- branches.insert(0, branches.delete(master_branch))
704
2035
  branch_name = choose do |menu|
705
2036
  menu.prompt = "Scegli il branch di #{project_name} da deployare: ".cyan
706
2037
  menu.shell = true
707
2038
 
708
- git_mail = get_git_mail
709
-
710
- branches.each_with_index do |branch, index|
2039
+ branches.each do |branch|
711
2040
  title = @prima.reduce_size(branch, 100)
712
- msg = index.odd? ? title.white : title.light_yellow # uno bianco e uno giallo alternati
713
- msg = branch.include?(git_mail) ? msg.on_blue : msg.on_black # i branch aggiornati da chi lancia la creazione sono su sfondo blu
2041
+ msg = "#{title}".light_blue
714
2042
  menu.choice(msg) { branch }
715
2043
  menu.default = branch if branch == master_branch
716
2044
  end
@@ -722,7 +2050,7 @@ class Release
722
2050
  name = branch_name.split(' ')[0]
723
2051
  revision = branch_name.split(' ')[1]
724
2052
  committer_email = branch_name.split(' ')[2].tr('<>', '')
725
- { 'name' => name, 'revision' => revision[0..14], 'committer' => committer_email, 'default_branch' => select_master }
2053
+ { name: name, revision: revision[0..14], committer: committer_email, default_branch: select_master }
726
2054
  end
727
2055
 
728
2056
  def select_branch_to_deploy(project_name, branch_name)
@@ -731,14 +2059,43 @@ class Release
731
2059
  `git remote prune origin`
732
2060
  out = %x[ git fetch ]
733
2061
  branch_name = %x[ git for-each-ref --sort=-committerdate refs/remotes/ --format='%(refname) %(objectname) %(committeremail)' | sed 's/refs\\/remotes\\/origin\\///g' ]
734
- .split("\n").delete_if { |b| !b.match("^#{Regexp.escape(branch_name)}") }[0..49]
2062
+ .split("\n").delete_if { |b| !b.include?(branch_name) }[0..49]
735
2063
  .first
736
2064
 
737
2065
  Dir.chdir "../../"
738
2066
  name = branch_name.split(' ')[0]
739
2067
  revision = branch_name.split(' ')[1]
740
2068
  committer_email = branch_name.split(' ')[2].tr('<>', '')
741
- { 'name' => name, 'revision' => revision[0..14], 'committer' => committer_email }
2069
+ { name: name, revision: revision[0..14], committer: committer_email }
2070
+ end
2071
+
2072
+ def is_branch_compatible_with_current_twig_version?(project)
2073
+ case
2074
+ when project == 'prima'
2075
+ File.readlines("app/config/config_qa.yml").grep(/replaceme-assange.qa.staging.colaster.com/).size > 0
2076
+ when project == 'urania'
2077
+ File.readlines("deploy/deploy").grep(/HOSTNAME_PATTERN/).size > 0
2078
+ when project == 'backoffice'
2079
+ File.readlines("deploy/deploy.sh").grep(/HOSTNAME_PATTERN/).size > 0
2080
+ when project == 'bburago'
2081
+ File.readlines("deploy/deploy").grep(/HOSTNAME_PATTERN/).size > 0
2082
+ when project == 'hal9000'
2083
+ File.readlines("deploy/deploy").grep(/HOSTNAME_PATTERN/).size > 0
2084
+ when project == 'fidaty'
2085
+ File.readlines("deploy/deploy").grep(/HOSTNAME_PATTERN/).size > 0
2086
+ when project == 'activia'
2087
+ File.readlines("deploy/deploy").grep(/HOSTNAME_PATTERN/).size > 0
2088
+ when project == 'peano'
2089
+ File.readlines("deploy/deploy").grep(/HOSTNAME_PATTERN/).size > 0
2090
+ when project == 'assange'
2091
+ File.readlines("deploy/deploy").grep(/HOSTNAME_PATTERN/).size > 0
2092
+ when project == 'borat'
2093
+ File.readlines("deploy/deploy").grep(/HOSTNAME_PATTERN/).size > 0
2094
+ when project == 'crash'
2095
+ File.readlines("deploy/deploy").grep(/HOSTNAME_PATTERN/).size > 0
2096
+ when project == 'ermes'
2097
+ File.readlines("deploy/deploy").grep(/HOSTNAME_PATTERN/).size > 0
2098
+ end
742
2099
  end
743
2100
 
744
2101
  def get_stacks()
@@ -753,32 +2110,88 @@ class Release
753
2110
  return stack_list, envs
754
2111
  end
755
2112
 
756
- def get_clusters()
757
- envs = {}
758
- cluster_list = cluster_list()
759
- cluster_list.each do |stack|
760
- unless stack.stack_name.match(/spotfleet-allinone-qa-(\w+)$/)
761
- env_hash = stack.stack_name.match(/qa-(\w+)$/)[0]
762
- envs[env_hash] = stack.tags unless envs.has_key?(env_hash) || stack.tags.empty?
2113
+ def hostname_pattern_priority()
2114
+ (Time.now.to_i.to_s[-4..-1].to_i + Random.rand(40000)).to_s
2115
+ end
2116
+
2117
+ def launch_marley(ip_address, prima_hostname, borat_hostname)
2118
+ resp = @cf.describe_stack_resource({
2119
+ stack_name: 'batch-job-marley',
2120
+ logical_resource_id: 'JobDefinition'
2121
+ })
2122
+
2123
+ @batch.submit_job({
2124
+ job_name: "marley-#{@dns_record_identifier}", # required
2125
+ job_queue: "marley", # required
2126
+ job_definition: resp.stack_resource_detail.physical_resource_id, # required
2127
+ container_overrides: {
2128
+ environment: [
2129
+ {
2130
+ name: 'PRIMA_URL',
2131
+ value: "https://#{prima_hostname}/"
2132
+ },
2133
+ {
2134
+ name: 'PRIMA_IP',
2135
+ value: ip_address
2136
+ },
2137
+ {
2138
+ name: 'PROJECTS_JSON',
2139
+ value: @projects.to_json
2140
+ },
2141
+ {
2142
+ name: 'BACKOFFICE_URL',
2143
+ value: "https://#{borat_hostname}"
2144
+ }
2145
+ ]
2146
+ }
2147
+ })
2148
+
2149
+ output "Marley lanciato con successo!\n".green
2150
+ end
2151
+
2152
+ def get_currently_deployed_version(stack_name)
2153
+ parameters = get_stack_parameters(stack_name)
2154
+ currently_deployed_version = nil
2155
+ parameters.each do |parameter|
2156
+ if parameter.parameter_key == "ReleaseVersion"
2157
+ currently_deployed_version = parameter.parameter_value
763
2158
  end
764
2159
  end
765
- return cluster_list, envs
2160
+ currently_deployed_version
766
2161
  end
767
2162
 
768
- def hostname_pattern_priority()
769
- (Time.now.to_i.to_s[-4..-1].to_i + Random.rand(40000)).to_s
2163
+ def decrypt_secrets()
2164
+ docker_image = "prima/biscuit_populate_configs"
2165
+ [
2166
+ "docker pull #{docker_image}",
2167
+ "docker run -t --rm -v $HOME/.aws:/root/.aws -v $PWD:$PWD -w $PWD #{docker_image}"
2168
+ ].each do |cmd|
2169
+ execute_command cmd
2170
+ end
770
2171
  end
771
2172
 
772
- def select_branches(project_names = nil)
2173
+ def select_branches(project_name = nil)
773
2174
  output "Deploy feature menu"
774
- if project_names.nil?
775
- @projects.each{ |key, value| @projects[key] = choose_branch_to_deploy(key) }
2175
+ if project_name.nil?
2176
+ @projects["prima"] = choose_branch_to_deploy('prima')
2177
+ @projects["backoffice"] = choose_branch_to_deploy('backoffice')
2178
+ @projects["urania"] = choose_branch_to_deploy('urania')
2179
+ @projects["ermes"] = choose_branch_to_deploy('ermes')
2180
+ @projects["bburago"] = choose_branch_to_deploy('bburago')
2181
+ @projects["hal9000"] = choose_branch_to_deploy('hal9000')
2182
+ @projects["fidaty"] = choose_branch_to_deploy('fidaty')
2183
+ @projects["peano"] = choose_branch_to_deploy('peano')
2184
+ @projects["rogoreport"] = choose_branch_to_deploy('rogoreport')
2185
+ @projects["assange"] = choose_branch_to_deploy('assange')
2186
+ @projects["borat"] = choose_branch_to_deploy('borat')
2187
+ @projects["bolla"] = choose_branch_to_deploy('bolla')
2188
+ @projects['crash'] = choose_branch_to_deploy('crash')
2189
+ @projects['activia'] = choose_branch_to_deploy('activia')
776
2190
  else
777
- project_names.each do |project|
778
- @projects[project] = choose_branch_to_deploy(project)
779
- end
2191
+ stop_unless File.directory?("./projects/#{project_name}"), "progetto #{project_name} inesistente o non supportato"
2192
+ @projects[project_name] = choose_branch_to_deploy(project_name)
780
2193
  @projects.each_key do |branch_project|
781
- @projects[branch_project] = choose_branch_to_deploy(branch_project, true) unless project_names.include? branch_project
2194
+ @projects[branch_project] = choose_branch_to_deploy(branch_project, true) unless branch_project == project_name
782
2195
  end
783
2196
  end
784
2197
  end
@@ -805,12 +2218,6 @@ Description
805
2218
 
806
2219
  start creates a new feature branch
807
2220
  finish finishes the feature by merging to dev and master
808
- qainit deploys a new environment with selected branches from every project
809
- qainit $PROJECT_NAME deploys a new environment allowing to selected a branch from the input project (everything else is master)
810
- qainit shutdown deletes a specific qa environment
811
-
812
- Available only to devops (from artemide)
813
- -----------
814
2221
  deploy deploys the feature branch to a temporary AWS Elastic Beanstalk env
815
2222
  deploy stop destroys the AWS Elastic Beanstalk env
816
2223
  deploy update updates a feature branch with current branches
@@ -830,11 +2237,4 @@ if args.include?('--help')
830
2237
  exit
831
2238
  end
832
2239
 
833
- gem_update = true
834
- if args.include?('no-gem-update')
835
- gem_update = false
836
- end
837
-
838
- args.delete('no-gem-update')
839
-
840
- Release.new(gem_update).execute!(args)
2240
+ Release.new.execute!(args)