fhcap-cli 0.4.2 → 0.4.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +8 -0
  3. data/README.md +2 -0
  4. data/lib/cookbooks/provision/libraries/provision.rb +24 -0
  5. data/lib/cookbooks/provision/recipes/aws_cluster_create.rb +1 -2
  6. data/lib/cookbooks/provision/recipes/cluster_create_instances.rb +14 -0
  7. data/lib/cookbooks/provision/recipes/cluster_destroy_instances.rb +6 -6
  8. data/lib/cookbooks/provision/recipes/cluster_provision_instances.rb +7 -47
  9. data/lib/cookbooks/provision/recipes/openstack_cluster_create.rb +9 -9
  10. data/lib/cookbooks/provision/recipes/seed_cookbooks.rb +38 -0
  11. data/lib/fhcap/cli.rb +4 -0
  12. data/lib/fhcap/cluster.rb +13 -1
  13. data/lib/fhcap/config.rb +4 -0
  14. data/lib/fhcap/cookbook.rb +10 -0
  15. data/lib/fhcap/dns.rb +60 -0
  16. data/lib/fhcap/misc.rb +17 -24
  17. data/lib/fhcap/tasks/chef/chef_server_task.rb +105 -0
  18. data/lib/fhcap/tasks/chef/chef_task_base.rb +2 -40
  19. data/lib/fhcap/tasks/chef/chef_zero_server.rb +35 -0
  20. data/lib/fhcap/tasks/chef/cookbook/archive.rb +42 -0
  21. data/lib/fhcap/tasks/chef/cookbook/update_metadata.rb +1 -1
  22. data/lib/fhcap/tasks/chef/environments/destroy.rb +10 -10
  23. data/lib/fhcap/tasks/chef/provisioning/chef_provisioning_task.rb +3 -1
  24. data/lib/fhcap/tasks/chef/provisioning/chef_provisioning_task_base.rb +14 -7
  25. data/lib/fhcap/tasks/chef/provisioning/create.rb +12 -3
  26. data/lib/fhcap/tasks/chef/provisioning/destroy.rb +5 -2
  27. data/lib/fhcap/tasks/chef/provisioning/provision.rb +4 -2
  28. data/lib/fhcap/tasks/chef/server/bootstrap.rb +47 -122
  29. data/lib/fhcap/tasks/chef/server/info.rb +8 -7
  30. data/lib/fhcap/tasks/chef/server/provision.rb +17 -17
  31. data/lib/fhcap/tasks/cluster/chef_provisioning_task.rb +16 -0
  32. data/lib/fhcap/tasks/cluster/cluster_task_base.rb +8 -0
  33. data/lib/fhcap/tasks/cluster/create.rb +6 -5
  34. data/lib/fhcap/tasks/cluster/destroy.rb +15 -4
  35. data/lib/fhcap/tasks/cluster/destroy_environment.rb +1 -1
  36. data/lib/fhcap/tasks/cluster/info.rb +1 -1
  37. data/lib/fhcap/tasks/cluster/provision.rb +3 -3
  38. data/lib/fhcap/tasks/cluster/status.rb +1 -1
  39. data/lib/fhcap/tasks/dns/create_record.rb +79 -0
  40. data/lib/fhcap/tasks/dns/delete_record.rb +55 -0
  41. data/lib/fhcap/tasks/dns/list.rb +37 -0
  42. data/lib/fhcap/tasks/dns/route53_helper.rb +48 -0
  43. data/lib/fhcap/tasks/knife/add.rb +43 -8
  44. data/lib/fhcap/tasks/repo/checkout.rb +1 -0
  45. data/lib/fhcap/tasks/task_base.rb +25 -0
  46. data/lib/fhcap/version.rb +1 -1
  47. data/spec/fhcap/tasks/dns/create_record_spec.rb +34 -0
  48. data/spec/fhcap/tasks/dns/delete_record_spec.rb +30 -0
  49. data/templates/chef/environment_core.json.erb +1 -1
  50. data/templates/cluster/aws/common.json.erb +1 -1
  51. data/templates/cluster/aws/core-small-9node.json.erb +3 -3
  52. data/templates/cluster/aws/single-blank.json.erb +2 -1
  53. data/templates/cluster/openstack/core-3node.json.erb +1 -1
  54. data/templates/cluster/openstack/core-small-9node.json.erb +4 -4
  55. data/templates/cluster/openstack/mbaas-3node.json.erb +2 -2
  56. data/templates/cluster/openstack/single-blank.json.erb +0 -1
  57. data/templates/init/knife_local.rb.erb +9 -0
  58. metadata +19 -4
  59. data/lib/fhcap/tasks/misc/create_dns_record.rb +0 -100
@@ -45,7 +45,7 @@ module Fhcap
45
45
  meta_hash = meta.to_hash().merge({cookbook: name})
46
46
 
47
47
  template_filepath = File.join(Fhcap.source_root, 'templates', 'chef', 'cookbook', 'metadata.erb')
48
- eruby = Erubis::Eruby.new(File.read(template_filepath))
48
+ eruby = ::Erubis::Eruby.new(File.read(template_filepath))
49
49
  result = eruby.result(meta_hash)
50
50
  thor.create_file(metadata_filepath, result, {force: true})
51
51
  end
@@ -1,33 +1,33 @@
1
- require 'fhcap/tasks/chef/chef_task_base'
1
+ require 'fhcap/tasks/chef/chef_server_task'
2
2
 
3
3
  module Fhcap
4
4
  module Tasks
5
5
  module Chef
6
6
  module Environments
7
- class Destroy < ChefTaskBase
7
+ class Destroy < ChefServerTask
8
8
 
9
- attr_reader :name, :chef_server
9
+ attr_reader :name
10
10
 
11
11
  def initialize(options)
12
12
  super
13
13
  @name = options[:name]
14
- @repo = options[:repo]
15
- @chef_server = options[:chef_server]
16
14
  end
17
15
 
18
16
  def run
19
17
  thor.say "Chef::Environments::Destroy #{@name}", :yellow
20
18
 
21
- if @chef_server
22
- if thor.yes? "Remove all environments and config from the chef server (#{@chef_server})?"
23
- run_knife_cmd("environment delete #{@name} -y", @chef_server)
24
- run_knife_cmd("data bag delete environments #{@name} -y", @chef_server)
19
+ if chef_server
20
+ if thor.yes? "Remove all environments and config from the chef server (#{chef_server})?"
21
+ with_chef_server do
22
+ run_knife_cmd("environment delete #{@name} -y", chef_server)
23
+ run_knife_cmd("data bag delete environments #{@name} -y", chef_server)
24
+ end
25
25
  end
26
26
  end
27
27
 
28
28
  thor.remove_file find_data_bag_item('environments', @name)
29
29
  thor.remove_file find_environment(@name)
30
- thor.remove_file find_repo_item(repo_clusters_dir(@repo), "#{@name}_passwords.json").first
30
+ thor.remove_file find_repo_item(repo_clusters_dir(chef_repo), "#{@name}_passwords.json").first
31
31
  end
32
32
 
33
33
  end
@@ -15,7 +15,9 @@ module Fhcap
15
15
 
16
16
  def run
17
17
  thor.say "Chef::Provisioning::Task name = #{@name}, run_list = #{@run_list}", :yellow
18
- do_chef_run(run_list)
18
+ with_chef_server do
19
+ do_chef_run(run_list)
20
+ end
19
21
  end
20
22
 
21
23
  end
@@ -1,13 +1,14 @@
1
- require 'fhcap/tasks/chef/chef_task_base'
1
+ require 'fhcap/tasks/chef/chef_server_task'
2
+ require 'fhcap/tasks/chef/cookbook/archive'
2
3
  require 'fhcap/chef-dk/chef_runner'
3
4
 
4
5
  module Fhcap
5
6
  module Tasks
6
7
  module Chef
7
8
  module Provisioning
8
- class ChefProvisioningTaskBase < ChefTaskBase
9
+ class ChefProvisioningTaskBase < ChefServerTask
9
10
 
10
- attr_reader :name, :cluster_config, :cluster_filepath, :chef_server
11
+ attr_reader :name, :cluster_config, :cluster_filepath
11
12
 
12
13
  def initialize(options)
13
14
  super
@@ -16,17 +17,23 @@ module Fhcap
16
17
  if @cluster_filepath
17
18
  @cluster_config = JSON.parse(IO.read(@cluster_filepath), {:symbolize_names => true})
18
19
  @cluster_config.merge!({
19
- local_repo_path: repo_dir(@cluster_config[:repo]),
20
- local_repo_clusters_dir: repo_clusters_dir(@cluster_config[:repo]),
21
- chef_server_config: chef_server_config_hash_for(@cluster_config[:chef_server]),
20
+ local_repo_path: repo_dir(@chef_repo),
21
+ local_repo_clusters_dir: repo_clusters_dir(@chef_repo),
22
+ chef_server_config: chef_server_config_hash_for(@chef_server),
22
23
  provider_credentials: provider_credentials(@cluster_config[:provider_id])
23
24
  })
24
- @chef_server = @cluster_config[:chef_server]
25
25
  else
26
26
  exit_with_error("Unknown cluster #{@name}")
27
27
  end
28
28
  end
29
29
 
30
+ def seed_cookbooks
31
+ archive_task = Chef::Cookbook::Archive.new(@options.dup.merge({:'skip-repo-checkout' => true}))
32
+ archive_task.run
33
+ @cluster_config[:local_cookbook_archive] = archive_task.archive
34
+ do_chef_run("provision::seed_cookbooks")
35
+ end
36
+
30
37
  def do_chef_run(run_list)
31
38
  cookbook_path = File.join(Fhcap.source_root, 'lib', 'cookbooks')
32
39
  private_key_paths = [File.join(cluster_config[:local_repo_path], cluster_config[:local_repo_clusters_dir], 'key_pairs')]
@@ -8,9 +8,18 @@ module Fhcap
8
8
 
9
9
  def run
10
10
  thor.say "Chef::Provisioning::Create #{@name}", :yellow
11
- do_chef_run("provision::cluster_create")
12
- do_chef_run("provision::reset_rabbitmq")
13
- do_chef_run("provision::restart_services")
11
+ with_chef_server do
12
+ do_chef_run("provision::cluster_create")
13
+
14
+ knife_download(repo_dir(chef_repo), chef_server, ['/nodes']) if local_chef_server?
15
+
16
+ unless options[:'skip-provision']
17
+ seed_cookbooks if local_chef_server?
18
+ do_chef_run("provision::cluster_provision")
19
+ do_chef_run("provision::reset_rabbitmq")
20
+ do_chef_run("provision::restart_services")
21
+ end
22
+ end
14
23
  end
15
24
 
16
25
  end
@@ -1,4 +1,5 @@
1
1
  require 'fhcap/tasks/chef/provisioning/chef_provisioning_task_base'
2
+ require 'fileutils'
2
3
 
3
4
  module Fhcap
4
5
  module Tasks
@@ -9,8 +10,10 @@ module Fhcap
9
10
  def run
10
11
  thor.say "Chef::Provisioning::Destroy #{@name}", :yellow
11
12
  if thor.yes? "Destroy cluster #{@name}? (y/n)"
12
- run_list = "provision::cluster_destroy"
13
- do_chef_run(run_list)
13
+ with_chef_server do
14
+ run_list = "provision::cluster_destroy"
15
+ do_chef_run(run_list)
16
+ end
14
17
  end
15
18
  end
16
19
 
@@ -8,8 +8,10 @@ module Fhcap
8
8
 
9
9
  def run
10
10
  thor.say "Chef::Provisioning::Provision #{@name}", :yellow
11
- run_list = "provision::cluster_provision"
12
- do_chef_run(run_list)
11
+ with_chef_server do
12
+ seed_cookbooks if local_chef_server?
13
+ do_chef_run("provision::cluster_provision")
14
+ end
13
15
  end
14
16
 
15
17
  end
@@ -1,161 +1,86 @@
1
- require 'fhcap/tasks/chef/chef_task_base'
1
+ require 'fhcap/tasks/chef/chef_server_task'
2
2
  require 'chef/knife/diff'
3
+ require 'chef/knife/cookbook_upload'
3
4
 
4
5
  module Fhcap
5
6
  module Tasks
6
7
  module Chef
7
8
  module Server
8
- class Bootstrap < ChefTaskBase
9
+ class Bootstrap < ChefServerTask
9
10
 
10
11
  attr_reader :environments
11
12
 
12
13
  def initialize(options)
13
14
  super
14
15
  @environments = options[:environments]
15
- @chef_server = options[:chef_server]
16
- @diff_strategy = options[:'diff-strategy'] || 'knife'
17
16
  @skip_roles = options[:'skip-roles']
17
+ @skip_nodes = options[:'skip-roles'] || !local_chef_server?
18
18
  @skip_data_bags = options[:'skip-data-bags']
19
19
  @skip_environments = options[:'skip-environments']
20
20
  @skip_cookbooks = options[:'skip-cookbooks']
21
21
  end
22
22
 
23
23
  def run
24
- thor.say "Chef::Server::Bootstrap: environments = #{environments}", :yellow
25
-
26
- chef_server_environments = {}
27
- environments.each do |environment|
28
- chef_server = select_chef_server(environment, @chef_server)
29
- chef_server_environments[chef_server] = chef_server_environments[chef_server] || []
30
- chef_server_environments[chef_server] << environment
31
- end
32
-
33
- chef_server_environments.each do |server, environments|
34
- sync_roles({:environments => environments, :server => server}) unless @skip_roles
35
- sync_data_bags({:environments => environments, :server => server}) unless @skip_data_bags
36
- sync_environments({:environments => environments, :server => server}) unless @skip_environments
37
- sync_cookbooks({:environments => environments, :server => server}) unless @skip_cookbooks
24
+ thor.say "Chef::Server::Bootstrap: environments = #{environments}, server = #{chef_server}", :yellow
25
+ with_chef_server do
26
+ repos.each do |repo|
27
+ repo_path = repo_dir(repo)
28
+
29
+ upload_patterns = []
30
+ upload_patterns << '/roles' unless @skip_roles
31
+ upload_patterns << '/nodes' unless @skip_nodes
32
+ upload_patterns |= environments.collect { |env| ["/environments/#{env}*", "/data_bags/environments/#{env}*"] } unless @skip_environments
33
+ upload_patterns |= data_bag_pattern_for(repo_path, environments) unless @skip_data_bags
34
+ upload_patterns.flatten!
35
+
36
+ knife_upload(repo_path, chef_server, upload_patterns)
37
+ end
38
+ knife_upload_cookbooks(chef_server)
38
39
  end
39
40
  end
40
41
 
41
42
  private
42
43
 
43
- def sync_roles(options)
44
- sync_items('roles', options) do |local_repo, sync_items|
45
- sync_items.each do |item|
46
- unless item =~ /roles\/dev*/
47
- run_knife_cmd("role from file #{File.join(local_repo, item)}", options[:server])
48
- end
49
- end
50
- end
51
- end
52
-
53
- def sync_data_bags(options, bag_created_cache=[])
54
- requires_resync = false
55
- sync_items('data_bags', options) do |local_repo, sync_items|
56
- sync_items.each do |item|
57
- if include_data_bag_item?(item, options)
58
- tmp, bag, bag_item = item.split('/')
59
- if bag
60
- unless bag_created_cache.include? bag
61
- output = run_knife_cmd("data bag create #{bag}", options[:server])
62
- requires_resync = requires_resync || output =~ /Created/
63
- bag_created_cache << bag
64
- end
65
- if bag_item
66
- run_knife_cmd("data bag from file #{bag} #{File.join(local_repo, item)}", options[:server])
67
- end
68
- end
44
+ def data_bag_pattern_for(repo_path, environments)
45
+ pattern = []
46
+ data_bags_path = File.join(repo_path, 'data_bags')
47
+ if Dir.exists? data_bags_path
48
+ Dir[File.join(data_bags_path, '*')].map { |a| File.basename(a) }.each do |name|
49
+ if include_data_bag?(name, environments)
50
+ pattern << "/data_bags/#{name}"
69
51
  end
70
52
  end
71
53
  end
72
- sync_data_bags(options, bag_created_cache) if requires_resync
54
+ pattern
73
55
  end
74
56
 
75
- def include_data_bag_item?(item, options)
57
+ def include_data_bag?(name, environments)
76
58
  include = true
77
- environments = options[:environments]
78
- if environments
79
- if item =~ /nagios_/
80
- #As usual nagios requires the most work Argghh
81
- nagios_data_bag_name = item.split('/')[1]
82
- #Default nagios data bags will only be 2 in length, check everything else
83
- if nagios_data_bag_name.split('_').length > 2
84
- allowed = ['fheng', 'production']
85
- allowed |= environments
86
- allowed |= environments.collect { |env| env.gsub('-', '_') } # Prod data bags are named incorrectly for some reason
87
- include = allowed.any? { |val| nagios_data_bag_name =~ /#{val}/ }
88
- end
89
- elsif item =~ /environments\//
90
- include = environments.any? { |val| item =~ /#{val}/ }
59
+ if name =~ /nagios_/
60
+ #Default nagios data bags will only be 2 in length, check everything else
61
+ if name.split('_').length > 2
62
+ allowed = ['fheng', 'production']
63
+ allowed |= environments
64
+ allowed |= environments.collect { |env| env.gsub('-', '_') } # Prod data bags are named incorrectly for some reason
65
+ include = allowed.any? { |val| name =~ /#{val}/ }
91
66
  end
67
+ elsif name =~ /environments/
68
+ include = false
92
69
  end
93
70
  include
94
71
  end
95
72
 
96
- def sync_environments(options)
97
- environments = options[:environments]
98
- if environments
99
- sync_items('environments', options) do |local_repo, sync_items|
100
- sync_items.each do |item|
101
- if environments.any? { |val| item =~ /#{val}/ }
102
- run_knife_cmd("environment from file #{File.join(local_repo, item)}", options[:server])
103
- end
104
- end
105
- end
106
- else
107
- #thor.say "No environments specified!!"
108
- end
109
- end
110
-
111
- def sync_cookbooks(options)
112
- #sync_items('cookbooks', options) do |local_repo, sync_items|
113
- # modified_cookbooks = sync_items.collect do |cookbook|
114
- # cookbook.split('/')[1]
115
- # end.uniq
116
- # run_knife_cmd("cookbook upload #{modified_cookbooks.join(' ')} --force", options[:server]) unless modified_cookbooks.empty?
117
- #end
118
- run_knife_cmd("cookbook upload -a --force", options[:server], !verbose)
119
- end
120
-
121
- def sync_items(item, options)
122
- repo_paths.each do |local_repo|
123
- #thor.say "Syncing #{item} in #{local_repo} based on #{@diff_strategy} diff", :yellow
124
- if @diff_strategy == 'git'
125
- item = %w{cookbooks site-cookbooks wrapper-cookbooks} if item == 'cookbooks'
126
- yield local_repo, git_diff(item, local_repo)
127
- else
128
- yield local_repo, knife_diff(item, local_repo, options[:server])
129
- end
130
- end
131
- #thor.say "All #{item} synced successfully!"
132
- end
133
-
134
- def git_diff(item, local_repo)
135
- result = ''
136
- Dir.chdir local_repo do
137
- paths = item.is_a?(Array) ? item : [item]
138
- paths.each do |path|
139
- if Dir.exists? File.join(local_repo, path)
140
- result += `git diff --name-only origin/master -- #{path}`
141
- end
142
- end
143
- end
144
- result.split
145
- end
146
-
147
- def knife_diff(item, local_repo, server)
148
- result = ''
149
- if Dir.exists? File.join(local_repo, item)
150
- thor.inside local_repo do
151
- cmd = "knife diff #{item} --chef-repo-path #{local_repo} --repo-mode static --name-only --diff-filter=[AM] --config #{knife_config_file_for(server)}"
152
- result = thor.run(cmd, :capture => true)
153
- thor.say result if verbose
154
- end
155
- else
156
- #thor.say "No #{item} in #{local_repo}"
73
+ def knife_upload_cookbooks(server)
74
+ knife_config_file = knife_config_file_for(server)
75
+ thor.say " * uploading cookbooks from #{knife_config_file} ..."
76
+ suppress_stdout(!options[:verbose], !options[:verbose]) do
77
+ ::Chef::Knife::CookbookUpload.load_deps
78
+ knife_command = ::Chef::Knife::CookbookUpload.new
79
+ knife_command.config[:config_file] = knife_config_file
80
+ knife_command.configure_chef
81
+ knife_command.config[:all] = true
82
+ knife_command.run
157
83
  end
158
- result.split
159
84
  end
160
85
 
161
86
  end
@@ -1,27 +1,28 @@
1
- require 'fhcap/tasks/chef/chef_task_base'
1
+ require 'fhcap/tasks/chef/chef_server_task'
2
2
 
3
3
  module Fhcap
4
4
  module Tasks
5
5
  module Chef
6
6
  module Server
7
- class Info < ChefTaskBase
7
+ class Info < ChefServerTask
8
8
 
9
- attr_reader :environments, :node_names, :roles, :chef_server, :urls
9
+ attr_reader :environments, :node_names, :roles, :urls
10
10
 
11
11
  def initialize(options)
12
12
  super
13
13
  @environments = options[:environments]
14
14
  @node_names = options[:nodes]
15
15
  @roles = options[:roles]
16
- @chef_server = options[:chef_server]
17
16
  @urls = {}
18
17
  end
19
18
 
20
19
  def run
21
20
  thor.say "Chef::Server::Info: environments = #{environments}", :yellow
22
- environments.each do |env|
23
- ip_addresses(env) unless options[:only] && options[:only] != 'ips'
24
- management_dashboards(env) unless options[:only] && options[:only] != 'urls'
21
+ with_chef_server do
22
+ environments.each do |env|
23
+ ip_addresses(env) unless options[:only] && options[:only] != 'ips'
24
+ management_dashboards(env) unless options[:only] && options[:only] != 'urls'
25
+ end
25
26
  end
26
27
  end
27
28
 
@@ -1,10 +1,10 @@
1
- require 'fhcap/tasks/chef/chef_task_base'
1
+ require 'fhcap/tasks/chef/chef_server_task'
2
2
 
3
3
  module Fhcap
4
4
  module Tasks
5
5
  module Chef
6
6
  module Server
7
- class Provision < ChefTaskBase
7
+ class Provision < ChefServerTask
8
8
 
9
9
  attr_reader :environments, :node_names, :roles
10
10
 
@@ -13,27 +13,27 @@ module Fhcap
13
13
  @environments = options[:environments]
14
14
  @node_names = options[:nodes]
15
15
  @roles = options[:roles]
16
- @chef_server = options[:chef_server]
17
16
  end
18
17
 
19
18
  def run
20
19
  thor.say "Chef::Server::Provision: environments = #{environments}", :yellow
21
20
 
22
- chef_server_environments = {}
23
- environments.each do |environment|
24
- chef_server = select_chef_server(environment, @chef_server)
25
- chef_server_environments[chef_server] = chef_server_environments[chef_server] || []
26
- chef_server_environments[chef_server] << environment
27
- end
28
-
29
- chef_server_environments.each do |server, environments|
21
+ with_chef_server do
22
+ chef_server_environments = {}
30
23
  environments.each do |environment|
31
- query = ["chef_environment:#{environment}"]
32
- query << "AND name:#{node_names}" if node_names
33
- query << "AND roles:#{roles}" if roles
34
- query = query.join(' ')
35
- cmd = "sudo chef-client"
36
- run_knife_ssh_cmd(query, cmd, server, options)
24
+ chef_server_environments[chef_server] = chef_server_environments[chef_server] || []
25
+ chef_server_environments[chef_server] << environment
26
+ end
27
+
28
+ chef_server_environments.each do |server, environments|
29
+ environments.each do |environment|
30
+ query = ["chef_environment:#{environment}"]
31
+ query << "AND name:#{node_names}" if node_names
32
+ query << "AND roles:#{roles}" if roles
33
+ query = query.join(' ')
34
+ cmd = "sudo chef-client"
35
+ run_knife_ssh_cmd(query, cmd, server, options)
36
+ end
37
37
  end
38
38
  end
39
39
  end
@@ -0,0 +1,16 @@
1
+ require 'fhcap/tasks/cluster/cluster_task_base'
2
+ require 'fhcap/tasks/chef/provisioning/chef_provisioning_task'
3
+
4
+ module Fhcap
5
+ module Tasks
6
+ module Cluster
7
+ class ChefProvisioningTask < ClusterTaskBase
8
+
9
+ def run
10
+ Tasks::Chef::Provisioning::ChefProvisioningTask.new(@options.dup.merge(chef_task_options)).run
11
+ end
12
+
13
+ end
14
+ end
15
+ end
16
+ end
@@ -23,6 +23,14 @@ module Fhcap
23
23
  end
24
24
  end
25
25
 
26
+ def chef_task_options
27
+ {
28
+ :'chef-server' => cluster_config[:chef_server],
29
+ :'chef-repo' => cluster_config[:repo],
30
+ :repos => ['fhcap', cluster_config[:repo]].uniq
31
+ }
32
+ end
33
+
26
34
  def template_as_object(template_file, config)
27
35
  template = File.read(template_file)
28
36
  eruby = Erubis::Eruby.new(template)
@@ -5,7 +5,7 @@ require 'fhcap/tasks/chef/environments/promote_cookbooks'
5
5
  require 'fhcap/tasks/chef/environments/create'
6
6
  require 'fhcap/tasks/chef/server/bootstrap'
7
7
  require 'fhcap/tasks/repo/checkout'
8
- require "fhcap/tasks/misc/create_dns_record"
8
+ require "fhcap/tasks/dns/create_record"
9
9
 
10
10
  module Fhcap
11
11
  module Tasks
@@ -30,6 +30,7 @@ module Fhcap
30
30
  @skip_server_bootstrap = options[:'skip-server-bootstrap']
31
31
  @skip_dns_record = options[:'skip-dns-record']
32
32
  @skip_create = options[:'skip-create']
33
+ @skip_provision = options[:'skip-provision']
33
34
 
34
35
  @cluster_config = {
35
36
  id: @name,
@@ -66,10 +67,10 @@ module Fhcap
66
67
  Chef::Environments::PromoteCookbooks.new(@options.dup.merge({:environments => cluster_environments})).run unless @skip_cookbook_promote
67
68
 
68
69
  #Bootstrap Server
69
- Chef::Server::Bootstrap.new(@options.dup.merge({:environments => cluster_environments, :chef_server => @cluster_config[:chef_server]})).run unless @skip_server_bootstrap
70
+ Chef::Server::Bootstrap.new(@options.dup.merge(chef_task_options.merge({:environments => cluster_environments}))).run unless @skip_server_bootstrap
70
71
 
71
72
  #Provisioning create
72
- Chef::Provisioning::Create.new(options).run unless @skip_create
73
+ Chef::Provisioning::Create.new(@options.dup.merge(chef_task_options)).run unless @skip_create
73
74
 
74
75
  create_dns_record unless @skip_dns_record
75
76
  end
@@ -188,7 +189,7 @@ module Fhcap
188
189
  hosted_zone_id: elb.canonical_hosted_zone_name_id
189
190
  }
190
191
  }
191
- Misc::CreateDNSRecord.new(@options.dup.merge(dns_record_cfg)).run
192
+ Dns::CreateRecord.new(@options.dup.merge(dns_record_cfg)).run
192
193
  rescue Aws::ElasticLoadBalancing::Errors::LoadBalancerNotFound => e
193
194
  thor.say_status 'error', "LoadBalancer #{lb} not found, unable to create DNS Entry", :red
194
195
  rescue Aws::ElasticLoadBalancing::Errors::ServiceError => e
@@ -216,7 +217,7 @@ module Fhcap
216
217
  domain: "*.#{env_cfg[:domain]}",
217
218
  ipaddress: lb_node['cloud.public_ipv4']
218
219
  }
219
- Misc::CreateDNSRecord.new(@options.dup.merge(dns_record_cfg)).run
220
+ Dns::CreateRecord.new(@options.dup.merge(dns_record_cfg)).run
220
221
  else
221
222
  thor.say "Found lb node '#{lb_node['name']}', but was unable to retrieve it's IP!!}"
222
223
  end
@@ -1,6 +1,7 @@
1
1
  require 'fhcap/tasks/cluster/cluster_task_base'
2
2
  require 'fhcap/tasks/chef/provisioning/destroy'
3
3
  require 'fhcap/tasks/chef/environments/destroy'
4
+ require 'fhcap/tasks/dns/delete_record'
4
5
 
5
6
  module Fhcap
6
7
  module Tasks
@@ -8,20 +9,30 @@ module Fhcap
8
9
  class Destroy < ClusterTaskBase
9
10
 
10
11
  def initialize(options)
11
- super(options, false)
12
+ super
12
13
  @skip_destroy = options[:'skip-destroy']
13
14
  end
14
15
 
15
16
  def run
16
17
  thor.say "Cluster::Destroy: name = #{@name}", :yellow
17
- Chef::Provisioning::Destroy.new(options).run unless @skip_destroy
18
+ Chef::Provisioning::Destroy.new(@options.dup.merge(chef_task_options)).run unless @skip_destroy
18
19
 
19
- if thor.yes? "Delete all local repo files?"
20
+ if thor.yes? "Delete all local repo files? (y/n)"
20
21
  cluster_environments.each do |env_name, cfg|
21
- Chef::Environments::Destroy.new(@options.dup.merge({:name => env_name, :chef_server => @cluster_config[:chef_server], :repo => @cluster_config[:repo]})).run
22
+ Chef::Environments::Destroy.new(@options.dup.merge({:name => env_name}.merge(chef_task_options))).run
22
23
  end
23
24
  thor.remove_file find_cluster(@name)
24
25
  end
26
+
27
+ if thor.yes? "Delete all DNS entries? (y/n)"
28
+ @cluster_config[:environments].each do |env_name, cfg|
29
+ if cfg[:domain]
30
+ domain = ['*', cfg[:domain]].join('.')
31
+ Dns::DeleteRecord.new(@options.dup.merge({:domain => domain})).run
32
+ end
33
+ end
34
+ end
35
+
25
36
  end
26
37
 
27
38
  end
@@ -19,7 +19,7 @@ module Fhcap
19
19
  cluster_config[:environments].delete environment_name.to_sym
20
20
  thor.create_file(cluster_file, JSON.pretty_generate(cluster_config), force: true)
21
21
 
22
- Chef::Environments::Destroy.new(@options.dup.merge({:name => environment_name, :chef_server => cluster_config[:chef_server]})).run
22
+ Chef::Environments::Destroy.new(@options.dup.merge({:name => environment_name}.merge(chef_task_options))).run
23
23
  end
24
24
 
25
25
  end
@@ -26,7 +26,7 @@ module Fhcap
26
26
 
27
27
  def run
28
28
  thor.say "Cluster::Info: name = #{@name}", :yellow
29
- info_task = Chef::Server::Info.new(@options.dup.merge({:environments => cluster_environments, :chef_server => cluster_config[:chef_server]}))
29
+ info_task = Chef::Server::Info.new(@options.dup.merge({:environments => cluster_environments}.merge(chef_task_options)))
30
30
  info_task.run
31
31
  cluster_environments.each do |env|
32
32
  passwords(env) unless options[:only] && options[:only] != 'passwords'
@@ -28,12 +28,12 @@ module Fhcap
28
28
  thor.say "Cluster::Provision: name = #{name}, strategy = #{strategy}", :yellow
29
29
  checkout_repos unless @skip_repo_checkout
30
30
  Chef::Environments::PromoteCookbooks.new(@options.dup.merge({:environments => cluster_environments})).run unless @skip_cookbook_promote
31
- Chef::Server::Bootstrap.new(@options.dup.merge({:environments => cluster_environments, :chef_server => cluster_config[:chef_server]})).run unless @skip_server_bootstrap
31
+ Chef::Server::Bootstrap.new(@options.dup.merge({:environments => cluster_environments}.merge(chef_task_options))).run unless @skip_server_bootstrap
32
32
  unless @skip_provision
33
33
  if strategy == 'chef'
34
- Chef::Provisioning::Provision.new(@options).run
34
+ Chef::Provisioning::Provision.new(@options.dup.merge(chef_task_options)).run
35
35
  elsif strategy == 'knife'
36
- Chef::Server::Provision.new(@options.dup.merge({:environments => cluster_environments, :chef_server => cluster_config[:chef_server]})).run
36
+ Chef::Server::Provision.new(@options.dup.merge({:environments => cluster_environments}.merge(chef_task_options))).run
37
37
  else
38
38
  thor.say_status :error, "Unsupported provision strategy #{strategy}"
39
39
  end
@@ -8,7 +8,7 @@ module Fhcap
8
8
 
9
9
  def run
10
10
  thor.say "Cluster::Status: name = #{@name}", :yellow
11
- Tasks::Chef::Provisioning::ChefProvisioningTask.new(@options.dup.merge({:'run-list' => 'recipe[provision::cluster_status]'})).run
11
+ Tasks::Chef::Provisioning::ChefProvisioningTask.new(@options.dup.merge({:'run-list' => 'recipe[provision::cluster_status]'}.merge(chef_task_options))).run
12
12
  end
13
13
 
14
14
  end