wombat-cli 0.6.1 → 0.6.2
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.gitignore +23 -23
- data/.travis.yml +22 -27
- data/CHANGELOG.md +438 -423
- data/DESIGN.md +49 -49
- data/Gemfile +5 -5
- data/README.md +146 -146
- data/Rakefile +26 -26
- data/bin/wombat +24 -24
- data/generator_files/Vagrantfile +120 -120
- data/generator_files/cookbooks/automate/.gitignore +16 -16
- data/generator_files/cookbooks/automate/.kitchen.ec2.yml +34 -34
- data/generator_files/cookbooks/automate/.kitchen.yml +24 -24
- data/generator_files/cookbooks/automate/Berksfile +6 -6
- data/generator_files/cookbooks/automate/README.md +4 -4
- data/generator_files/cookbooks/automate/chefignore +102 -102
- data/generator_files/cookbooks/automate/libraries/_helper.rb +52 -52
- data/generator_files/cookbooks/automate/libraries/delivery_api.rb +204 -204
- data/generator_files/cookbooks/automate/libraries/delivery_project.rb +31 -31
- data/generator_files/cookbooks/automate/libraries/dsl.rb +4 -4
- data/generator_files/cookbooks/automate/metadata.rb +11 -11
- data/generator_files/cookbooks/automate/recipes/default.rb +118 -124
- data/generator_files/cookbooks/automate/recipes/update-users.rb +48 -48
- data/generator_files/cookbooks/automate/templates/delivery.erb +6 -6
- data/generator_files/cookbooks/automate/test/fixtures/cookbooks/mock_data/files/automate.crt +26 -26
- data/generator_files/cookbooks/automate/test/fixtures/cookbooks/mock_data/files/automate.key +27 -27
- data/generator_files/cookbooks/automate/test/fixtures/cookbooks/mock_data/files/chef.crt +25 -25
- data/generator_files/cookbooks/automate/test/fixtures/cookbooks/mock_data/files/chef.key +27 -27
- data/generator_files/cookbooks/automate/test/fixtures/cookbooks/mock_data/files/compliance.crt +26 -26
- data/generator_files/cookbooks/automate/test/fixtures/cookbooks/mock_data/files/compliance.key +27 -27
- data/generator_files/cookbooks/automate/test/fixtures/cookbooks/mock_data/files/private.pem +27 -27
- data/generator_files/cookbooks/automate/test/fixtures/cookbooks/mock_data/files/public.pub +1 -1
- data/generator_files/cookbooks/automate/test/fixtures/cookbooks/mock_data/metadata.rb +3 -3
- data/generator_files/cookbooks/automate/test/fixtures/cookbooks/mock_data/recipes/default.rb +27 -27
- data/generator_files/cookbooks/automate/test/integration/default/automate_spec.rb +56 -56
- data/generator_files/cookbooks/build_node/.gitignore +16 -16
- data/generator_files/cookbooks/build_node/.kitchen.ec2.yml +37 -37
- data/generator_files/cookbooks/build_node/.kitchen.yml +23 -23
- data/generator_files/cookbooks/build_node/Berksfile +8 -8
- data/generator_files/cookbooks/build_node/README.md +4 -4
- data/generator_files/cookbooks/build_node/chefignore +102 -102
- data/generator_files/cookbooks/build_node/metadata.rb +12 -12
- data/generator_files/cookbooks/build_node/recipes/default.rb +38 -38
- data/generator_files/cookbooks/build_node/templates/client.erb +2 -2
- data/generator_files/cookbooks/build_node/test/fixtures/cookbooks/mock_data/files/automate.crt +26 -26
- data/generator_files/cookbooks/build_node/test/fixtures/cookbooks/mock_data/files/automate.key +27 -27
- data/generator_files/cookbooks/build_node/test/fixtures/cookbooks/mock_data/files/chef.crt +25 -25
- data/generator_files/cookbooks/build_node/test/fixtures/cookbooks/mock_data/files/chef.key +27 -27
- data/generator_files/cookbooks/build_node/test/fixtures/cookbooks/mock_data/files/compliance.crt +26 -26
- data/generator_files/cookbooks/build_node/test/fixtures/cookbooks/mock_data/files/compliance.key +27 -27
- data/generator_files/cookbooks/build_node/test/fixtures/cookbooks/mock_data/files/private.pem +27 -27
- data/generator_files/cookbooks/build_node/test/fixtures/cookbooks/mock_data/files/public.pub +1 -1
- data/generator_files/cookbooks/build_node/test/fixtures/cookbooks/mock_data/metadata.rb +2 -2
- data/generator_files/cookbooks/build_node/test/fixtures/cookbooks/mock_data/recipes/default.rb +18 -18
- data/generator_files/cookbooks/build_node/test/integration/default/build-node_spec.rb +40 -40
- data/generator_files/cookbooks/chef_server/.gitignore +16 -16
- data/generator_files/cookbooks/chef_server/.kitchen.ec2.yml +34 -34
- data/generator_files/cookbooks/chef_server/.kitchen.yml +24 -24
- data/generator_files/cookbooks/chef_server/Berksfile +6 -6
- data/generator_files/cookbooks/chef_server/README.md +4 -4
- data/generator_files/cookbooks/chef_server/chefignore +102 -102
- data/generator_files/cookbooks/chef_server/metadata.rb +11 -11
- data/generator_files/cookbooks/chef_server/recipes/bootstrap_users.rb +91 -91
- data/generator_files/cookbooks/chef_server/recipes/default.rb +113 -113
- data/generator_files/cookbooks/chef_server/test/fixtures/cookbooks/mock_data/files/automate.crt +26 -26
- data/generator_files/cookbooks/chef_server/test/fixtures/cookbooks/mock_data/files/automate.key +27 -27
- data/generator_files/cookbooks/chef_server/test/fixtures/cookbooks/mock_data/files/chef.crt +25 -25
- data/generator_files/cookbooks/chef_server/test/fixtures/cookbooks/mock_data/files/chef.key +27 -27
- data/generator_files/cookbooks/chef_server/test/fixtures/cookbooks/mock_data/files/compliance.crt +26 -26
- data/generator_files/cookbooks/chef_server/test/fixtures/cookbooks/mock_data/files/compliance.key +27 -27
- data/generator_files/cookbooks/chef_server/test/fixtures/cookbooks/mock_data/files/private.pem +27 -27
- data/generator_files/cookbooks/chef_server/test/fixtures/cookbooks/mock_data/files/public.pub +1 -1
- data/generator_files/cookbooks/chef_server/test/fixtures/cookbooks/mock_data/metadata.rb +2 -2
- data/generator_files/cookbooks/chef_server/test/fixtures/cookbooks/mock_data/recipes/default.rb +23 -23
- data/generator_files/cookbooks/chef_server/test/integration/default/chef_server_spec.rb +50 -50
- data/generator_files/cookbooks/compliance/.gitignore +16 -16
- data/generator_files/cookbooks/compliance/.kitchen.ec2.yml +34 -34
- data/generator_files/cookbooks/compliance/.kitchen.yml +24 -24
- data/generator_files/cookbooks/compliance/Berksfile +7 -7
- data/generator_files/cookbooks/compliance/README.md +4 -4
- data/generator_files/cookbooks/compliance/chefignore +102 -102
- data/generator_files/cookbooks/compliance/metadata.rb +11 -11
- data/generator_files/cookbooks/compliance/recipes/default.rb +57 -57
- data/generator_files/cookbooks/compliance/spec/spec_helper.rb +2 -2
- data/generator_files/cookbooks/compliance/spec/unit/recipes/default_spec.rb +20 -20
- data/generator_files/cookbooks/compliance/test/fixtures/cookbooks/mock_data/files/automate.crt +26 -26
- data/generator_files/cookbooks/compliance/test/fixtures/cookbooks/mock_data/files/automate.key +27 -27
- data/generator_files/cookbooks/compliance/test/fixtures/cookbooks/mock_data/files/chef.crt +25 -25
- data/generator_files/cookbooks/compliance/test/fixtures/cookbooks/mock_data/files/chef.key +27 -27
- data/generator_files/cookbooks/compliance/test/fixtures/cookbooks/mock_data/files/compliance.crt +26 -26
- data/generator_files/cookbooks/compliance/test/fixtures/cookbooks/mock_data/files/compliance.key +27 -27
- data/generator_files/cookbooks/compliance/test/fixtures/cookbooks/mock_data/files/private.pem +27 -27
- data/generator_files/cookbooks/compliance/test/fixtures/cookbooks/mock_data/files/public.pub +1 -1
- data/generator_files/cookbooks/compliance/test/fixtures/cookbooks/mock_data/metadata.rb +4 -4
- data/generator_files/cookbooks/compliance/test/fixtures/cookbooks/mock_data/recipes/default.rb +21 -21
- data/generator_files/cookbooks/compliance/test/integration/default/compliance.rb +28 -28
- data/generator_files/cookbooks/infranodes/.gitignore +16 -16
- data/generator_files/cookbooks/infranodes/.kitchen.ec2.yml +48 -48
- data/generator_files/cookbooks/infranodes/.kitchen.yml +21 -21
- data/generator_files/cookbooks/infranodes/Berksfile +6 -6
- data/generator_files/cookbooks/infranodes/README.md +4 -4
- data/generator_files/cookbooks/infranodes/attributes/default.rb +2 -2
- data/generator_files/cookbooks/infranodes/chefignore +102 -102
- data/generator_files/cookbooks/infranodes/metadata.rb +13 -13
- data/generator_files/cookbooks/infranodes/recipes/default.rb +57 -57
- data/generator_files/cookbooks/infranodes/spec/spec_helper.rb +2 -2
- data/generator_files/cookbooks/infranodes/spec/unit/recipes/default_spec.rb +20 -20
- data/generator_files/cookbooks/infranodes/templates/default/client.rb.erb +5 -5
- data/generator_files/cookbooks/infranodes/test/fixtures/cookbooks/mock_data/files/automate.crt +26 -26
- data/generator_files/cookbooks/infranodes/test/fixtures/cookbooks/mock_data/files/automate.key +27 -27
- data/generator_files/cookbooks/infranodes/test/fixtures/cookbooks/mock_data/files/chef.crt +25 -25
- data/generator_files/cookbooks/infranodes/test/fixtures/cookbooks/mock_data/files/chef.key +27 -27
- data/generator_files/cookbooks/infranodes/test/fixtures/cookbooks/mock_data/files/compliance.crt +26 -26
- data/generator_files/cookbooks/infranodes/test/fixtures/cookbooks/mock_data/files/compliance.key +27 -27
- data/generator_files/cookbooks/infranodes/test/fixtures/cookbooks/mock_data/files/private.pem +27 -27
- data/generator_files/cookbooks/infranodes/test/fixtures/cookbooks/mock_data/files/public.pub +1 -1
- data/generator_files/cookbooks/infranodes/test/fixtures/cookbooks/mock_data/metadata.rb +3 -3
- data/generator_files/cookbooks/infranodes/test/fixtures/cookbooks/mock_data/recipes/default.rb +27 -27
- data/generator_files/cookbooks/infranodes/test/integration/default/infranodes_spec.rb +22 -22
- data/generator_files/cookbooks/infranodes/test/integration/helpers/serverspec/spec_helper.rb +8 -8
- data/generator_files/cookbooks/wombat/.gitignore +16 -16
- data/generator_files/cookbooks/wombat/.kitchen.yml +43 -43
- data/generator_files/cookbooks/wombat/Berksfile +5 -5
- data/generator_files/cookbooks/wombat/README.md +4 -4
- data/generator_files/cookbooks/wombat/attributes/default.rb +79 -80
- data/generator_files/cookbooks/wombat/attributes/packer.rb +18 -18
- data/generator_files/cookbooks/wombat/chefignore +102 -102
- data/generator_files/cookbooks/wombat/metadata.rb +13 -13
- data/generator_files/cookbooks/wombat/recipes/authorized-keys.rb +20 -20
- data/generator_files/cookbooks/wombat/recipes/default.rb +111 -111
- data/generator_files/cookbooks/wombat/recipes/etc-hosts.rb +51 -51
- data/generator_files/cookbooks/workstation/.gitignore +16 -16
- data/generator_files/cookbooks/workstation/.kitchen.azure.yml +45 -0
- data/generator_files/cookbooks/workstation/.kitchen.ec2.yml +46 -30
- data/generator_files/cookbooks/workstation/.kitchen.yml +42 -22
- data/generator_files/cookbooks/workstation/Berksfile +7 -7
- data/generator_files/cookbooks/workstation/README.md +3 -3
- data/generator_files/cookbooks/workstation/chefignore +106 -102
- data/generator_files/cookbooks/workstation/files/atom.apm.list +10 -7
- data/generator_files/cookbooks/workstation/files/atom.config.cson +6 -3
- data/generator_files/cookbooks/workstation/{templates/default/ise_profile.ps1.erb → files/ise_profile.ps1} +11 -11
- data/generator_files/cookbooks/workstation/libraries/home.rb +4 -4
- data/generator_files/cookbooks/workstation/metadata.rb +14 -14
- data/generator_files/cookbooks/workstation/recipes/browser.rb +53 -58
- data/generator_files/cookbooks/workstation/recipes/certs-keys.rb +41 -45
- data/generator_files/cookbooks/workstation/recipes/chef.rb +29 -28
- data/generator_files/cookbooks/workstation/recipes/default.rb +24 -21
- data/generator_files/cookbooks/workstation/recipes/dotnet.rb +19 -17
- data/generator_files/cookbooks/workstation/recipes/editor.rb +46 -18
- data/generator_files/cookbooks/workstation/recipes/profile.rb +14 -41
- data/generator_files/cookbooks/workstation/recipes/terminal.rb +11 -11
- data/generator_files/cookbooks/workstation/templates/default/bookmarks.html.erb +23 -23
- data/generator_files/cookbooks/workstation/templates/default/data_collector.rb.erb +2 -2
- data/generator_files/cookbooks/workstation/templates/default/knife.rb.erb +10 -10
- data/generator_files/cookbooks/workstation/templates/default/master_preferences.json.erb +28 -28
- data/generator_files/cookbooks/workstation/templates/default/ssh_config.erb +16 -16
- data/generator_files/cookbooks/workstation/test/fixtures/cookbooks/mock_data/files/automate.crt +26 -26
- data/generator_files/cookbooks/workstation/test/fixtures/cookbooks/mock_data/files/automate.key +27 -27
- data/generator_files/cookbooks/workstation/test/fixtures/cookbooks/mock_data/files/chef.crt +26 -26
- data/generator_files/cookbooks/workstation/test/fixtures/cookbooks/mock_data/files/chef.key +27 -27
- data/generator_files/cookbooks/workstation/test/fixtures/cookbooks/mock_data/files/compliance.crt +26 -26
- data/generator_files/cookbooks/workstation/test/fixtures/cookbooks/mock_data/files/compliance.key +27 -27
- data/generator_files/cookbooks/workstation/test/fixtures/cookbooks/mock_data/files/private.pem +27 -27
- data/generator_files/cookbooks/workstation/test/fixtures/cookbooks/mock_data/files/public.pub +1 -1
- data/generator_files/cookbooks/workstation/test/fixtures/cookbooks/mock_data/metadata.rb +2 -2
- data/generator_files/cookbooks/workstation/test/fixtures/cookbooks/mock_data/recipes/default.rb +21 -21
- data/generator_files/cookbooks/workstation/test/integration/default/workstation_spec.rb +77 -37
- data/generator_files/packer/automate.json +136 -136
- data/generator_files/packer/build-node.json +142 -142
- data/generator_files/packer/chef-server.json +137 -137
- data/generator_files/packer/compliance.json +133 -133
- data/generator_files/packer/infranodes-windows.json +143 -143
- data/generator_files/packer/infranodes.json +134 -134
- data/generator_files/packer/scripts/PreSysprep.ps1 +9 -0
- data/generator_files/packer/workstation.json +160 -142
- data/generator_files/templates/arm.md.json.erb +754 -754
- data/generator_files/templates/arm.vhd.json.erb +630 -630
- data/generator_files/templates/bootstrap-aws.erb +39 -39
- data/generator_files/templates/cfn.json.erb +675 -674
- data/generator_files/wombat.yml +75 -74
- data/lib/wombat/aws.rb +67 -67
- data/lib/wombat/build.rb +392 -392
- data/lib/wombat/cli.rb +254 -254
- data/lib/wombat/common.rb +420 -420
- data/lib/wombat/crypto.rb +65 -65
- data/lib/wombat/delete.rb +67 -67
- data/lib/wombat/deploy.rb +128 -128
- data/lib/wombat/init.rb +32 -32
- data/lib/wombat/latest.rb +27 -27
- data/lib/wombat/output.rb +101 -101
- data/lib/wombat/update.rb +20 -20
- data/lib/wombat/version.rb +3 -3
- data/lib/wombat.rb +8 -8
- data/spec/functional/common_spec.rb +26 -26
- data/spec/spec_helper.rb +103 -103
- data/spec/unit/common_spec.rb +116 -116
- data/terraform/README.md +13 -13
- data/terraform/templates/terraform.tfvars.erb +12 -12
- data/terraform/wombat.tf +328 -328
- data/wombat-cli.gemspec +36 -36
- metadata +6 -4
data/lib/wombat/build.rb
CHANGED
@@ -1,392 +1,392 @@
|
|
1
|
-
|
2
|
-
require 'wombat/common'
|
3
|
-
require 'wombat/crypto'
|
4
|
-
require 'mixlib/shellout'
|
5
|
-
require 'parallel'
|
6
|
-
require 'ms_rest_azure'
|
7
|
-
require 'azure_mgmt_resources'
|
8
|
-
require 'azure_mgmt_storage'
|
9
|
-
require 'azure/storage'
|
10
|
-
require 'uri'
|
11
|
-
|
12
|
-
module Wombat
|
13
|
-
class BuildRunner
|
14
|
-
include Wombat::Common
|
15
|
-
include Wombat::Crypto
|
16
|
-
|
17
|
-
attr_reader :templates, :builder, :parallel, :storage_access_key
|
18
|
-
|
19
|
-
def initialize(opts)
|
20
|
-
@templates = opts.templates.nil? ? calculate_templates : opts.templates
|
21
|
-
@builder = opts.builder.nil? ? "amazon-ebs" : opts.builder
|
22
|
-
@parallel = opts.parallel
|
23
|
-
@wombat_yml = opts.wombat_yml unless opts.wombat_yml.nil?
|
24
|
-
@debug = opts.debug
|
25
|
-
@no_vendor = opts.vendor
|
26
|
-
end
|
27
|
-
|
28
|
-
def start
|
29
|
-
if which('packer').nil?
|
30
|
-
raise "packer binary not found in path, exiting..."
|
31
|
-
end
|
32
|
-
banner("Generating certs (if necessary)")
|
33
|
-
wombat['certs'].each do |hostname|
|
34
|
-
gen_x509_cert(hostname)
|
35
|
-
end
|
36
|
-
banner("Generating SSH keypair (if necessary)")
|
37
|
-
gen_ssh_key
|
38
|
-
|
39
|
-
# If running on azure ensure that the resource group and storage account exist
|
40
|
-
prepare_azure if builder == "azure-arm"
|
41
|
-
|
42
|
-
time = Benchmark.measure do
|
43
|
-
banner("Starting build for templates: #{templates}")
|
44
|
-
aws_region_check if builder == 'amazon-ebs'
|
45
|
-
templates.each do |t|
|
46
|
-
vendor_cookbooks(t) unless @no_vendor
|
47
|
-
end
|
48
|
-
|
49
|
-
if parallel.nil?
|
50
|
-
build_hash.each do |k, v|
|
51
|
-
build(v['template'], v['options'])
|
52
|
-
end
|
53
|
-
else
|
54
|
-
build_parallel(templates)
|
55
|
-
end
|
56
|
-
end
|
57
|
-
|
58
|
-
# Copy the images to the correct location if running Azure builder
|
59
|
-
azure_copy_images if builder == "azure-arm"
|
60
|
-
|
61
|
-
shell_out_command("say -v fred \"Wombat has made an #{build_hash.keys}\" for you") if audio?
|
62
|
-
banner("Build finished in #{duration(time.real)}.")
|
63
|
-
end
|
64
|
-
|
65
|
-
private
|
66
|
-
|
67
|
-
def prepare_azure()
|
68
|
-
|
69
|
-
# Ensure that a storage acocunt has been specified, if it has not error
|
70
|
-
if wombat['azure']['storage_account'].nil?
|
71
|
-
puts "\nA storage account name must be specified in wombat.yml, e.g.\n openssl rand -base64 12\nEnsure all lowercase and no special characters"
|
72
|
-
exit
|
73
|
-
end
|
74
|
-
|
75
|
-
# Using environment variables connect to azure
|
76
|
-
subscription_id = ENV['AZURE_SUBSCRIPTION_ID']
|
77
|
-
tenant_id = ENV['AZURE_TENANT_ID']
|
78
|
-
client_id = ENV['AZURE_CLIENT_ID']
|
79
|
-
client_secret = ENV['AZURE_CLIENT_SECRET']
|
80
|
-
|
81
|
-
token_provider = MsRestAzure::ApplicationTokenProvider.new(tenant_id, client_id, client_secret)
|
82
|
-
azure_conn = MsRest::TokenCredentials.new(token_provider)
|
83
|
-
|
84
|
-
# Create a resource to create the resource group if it does not exist
|
85
|
-
resource_management_client = Azure::ARM::Resources::ResourceManagementClient.new(azure_conn)
|
86
|
-
resource_management_client.subscription_id = subscription_id
|
87
|
-
|
88
|
-
# Create a storage account client to create the stoarge account if it does not exist
|
89
|
-
storage_management_client = Azure::ARM::Storage::StorageManagementClient.new(azure_conn)
|
90
|
-
storage_management_client.subscription_id = subscription_id
|
91
|
-
|
92
|
-
# Create the resource group
|
93
|
-
create_resource_group(resource_management_client,
|
94
|
-
wombat['name'],
|
95
|
-
wombat['azure']['location'],
|
96
|
-
wombat['owner'],
|
97
|
-
wombat['azure']['tags'])
|
98
|
-
|
99
|
-
# Check to see if the storage account already exists
|
100
|
-
banner(format("Checking for storage account: %s", wombat['azure']['storage_account']))
|
101
|
-
|
102
|
-
# Create the storage account in the resource group
|
103
|
-
# NOTE: This should have a test to see if the storage account exists and it available however the
|
104
|
-
# Azure Ruby SDK has an issue with the check_name_availability method and comes back with an error
|
105
|
-
# This would normally be done through an ARM template, but in this case needs to exist before Packer can run
|
106
|
-
storage_account = Azure::ARM::Storage::Models::StorageAccountCreateParameters.new
|
107
|
-
storage_account.location = wombat['azure']['location']
|
108
|
-
sku = Azure::ARM::Storage::Models::Sku.new
|
109
|
-
sku.name = 'Standard_LRS'
|
110
|
-
storage_account.sku = sku
|
111
|
-
storage_account.kind = Azure::ARM::Storage::Models::Kind::Storage
|
112
|
-
|
113
|
-
storage_management_client.storage_accounts.create(wombat['name'], wombat['azure']['storage_account'], storage_account)
|
114
|
-
|
115
|
-
# Get the keys from the storage management client so that the container that the images will be moved into
|
116
|
-
# can be checked for and created if required
|
117
|
-
# Once Packer uses the MD features in the GO library this can be removed
|
118
|
-
# ------------------------------------------------------------------------
|
119
|
-
keys = storage_management_client.storage_accounts.list_keys(wombat['name'], wombat['azure']['storage_account'])
|
120
|
-
@storage_access_key = keys.keys[0].value
|
121
|
-
|
122
|
-
# Use the key to configure the storage library
|
123
|
-
Azure::Storage.setup(:storage_account_name => wombat['azure']['storage_account'], :storage_access_key => storage_access_key)
|
124
|
-
blobs = Azure::Storage::Blob::BlobService.new
|
125
|
-
|
126
|
-
# Get all the containers to determine if the one that is required already exists
|
127
|
-
container_names = []
|
128
|
-
blobs.list_containers().each do |container|
|
129
|
-
container_names.push(container.name)
|
130
|
-
end
|
131
|
-
|
132
|
-
# create the container if it does not exist
|
133
|
-
container_name = "mdimages"
|
134
|
-
if !container_names.include?(container_name)
|
135
|
-
info("Creating storage container")
|
136
|
-
container = blobs.create_container(container_name)
|
137
|
-
end
|
138
|
-
# ------------------------------------------------------------------------
|
139
|
-
|
140
|
-
end
|
141
|
-
|
142
|
-
# Packer does not put custom images into a location that is supported by Managed Disks
|
143
|
-
# So to be able to use the MD feature of Azure, the images have to be copied to a location that
|
144
|
-
# does work. This method is responsible for doing this work.
|
145
|
-
#
|
146
|
-
# @author Russell Seymour
|
147
|
-
def azure_copy_images()
|
148
|
-
|
149
|
-
container_name = "mdimages"
|
150
|
-
|
151
|
-
Azure::Storage.setup(:storage_account_name => wombat['azure']['storage_account'], :storage_access_key => storage_access_key)
|
152
|
-
blobs = Azure::Storage::Blob::BlobService.new
|
153
|
-
|
154
|
-
# Read the logs for azure
|
155
|
-
path = "#{wombat['conf']['log_dir']}/azure*.log"
|
156
|
-
logs = Dir.glob(path).reject { |l| !l.match(wombat['linux']) }
|
157
|
-
|
158
|
-
# iterate around the log files and get the image location
|
159
|
-
time = Benchmark.measure do
|
160
|
-
logs.each do |log|
|
161
|
-
|
162
|
-
# get the image uri
|
163
|
-
url = File.read(log).split("\n").grep(/OSDiskUri:/) {|x| x.split[1]}.last
|
164
|
-
|
165
|
-
next if url.nil?
|
166
|
-
|
167
|
-
# Use the storage library to copy the image from source to destination
|
168
|
-
uri = URI(url)
|
169
|
-
|
170
|
-
blob_name = uri.path.split(/\//).last
|
171
|
-
|
172
|
-
info "Copying: #{blob_name}"
|
173
|
-
|
174
|
-
status = blobs.copy_blob_from_uri(container_name, blob_name, url)
|
175
|
-
|
176
|
-
# Append the new location for the image to the log file
|
177
|
-
append_text = format("\nManagedDiskOSDiskUri: https://%s.blob.core.windows.net/%s/%s", wombat['azure']['storage_account'], container_name, blob_name)
|
178
|
-
File.open(log, 'a') { |f| f.write(append_text) }
|
179
|
-
|
180
|
-
end
|
181
|
-
end
|
182
|
-
|
183
|
-
info (format("Images copied in %s", duration(time.real)))
|
184
|
-
|
185
|
-
end
|
186
|
-
|
187
|
-
def build(template, options)
|
188
|
-
bootstrap_aws if options['os'] == 'windows'
|
189
|
-
shell_out_command(packer_build_cmd(template, builder, options))
|
190
|
-
end
|
191
|
-
|
192
|
-
def build_parallel(templates)
|
193
|
-
Parallel.map(build_hash.keys, in_threads: build_hash.count) do |name|
|
194
|
-
build(build_hash[name]['template'], build_hash[name]['options'])
|
195
|
-
end
|
196
|
-
end
|
197
|
-
|
198
|
-
def build_hash
|
199
|
-
proc_hash = {}
|
200
|
-
templates.each do |template_name|
|
201
|
-
if template_name =~ /infranodes/
|
202
|
-
infranodes.each do |name, _rl|
|
203
|
-
next if name.empty?
|
204
|
-
proc_hash[name] = {
|
205
|
-
'template' => template_name,
|
206
|
-
'options' => {
|
207
|
-
'node-name' => name,
|
208
|
-
'os' => wombat['infranodes'][name]['platform']
|
209
|
-
}
|
210
|
-
}
|
211
|
-
end
|
212
|
-
elsif template_name =~ /build-node/
|
213
|
-
build_nodes.each do |name, num|
|
214
|
-
proc_hash[name] = {
|
215
|
-
'template' => template_name,
|
216
|
-
'options' => {
|
217
|
-
'node-number' => num
|
218
|
-
}
|
219
|
-
}
|
220
|
-
end
|
221
|
-
elsif template_name =~ /workstation/
|
222
|
-
workstations.each do |name, num|
|
223
|
-
proc_hash[name] = {
|
224
|
-
'template' => template_name,
|
225
|
-
'options' => {
|
226
|
-
'os' => wombat['workstations']['platform'],
|
227
|
-
'workstation-number' => num
|
228
|
-
}
|
229
|
-
}
|
230
|
-
end
|
231
|
-
else
|
232
|
-
proc_hash[template_name] = {
|
233
|
-
'template' => template_name,
|
234
|
-
'options' => {}
|
235
|
-
}
|
236
|
-
end
|
237
|
-
end
|
238
|
-
proc_hash
|
239
|
-
end
|
240
|
-
|
241
|
-
def a_to_s(*args)
|
242
|
-
clean_array(*args).join(" ")
|
243
|
-
end
|
244
|
-
|
245
|
-
def clean_array(*args)
|
246
|
-
args.flatten.reject { |i| i.nil? || i == "" }.map(&:to_s)
|
247
|
-
end
|
248
|
-
|
249
|
-
def b_to_c(builder)
|
250
|
-
case builder
|
251
|
-
when 'amazon-ebs'
|
252
|
-
'aws'
|
253
|
-
when 'googlecompute'
|
254
|
-
'gce'
|
255
|
-
when 'azure-arm'
|
256
|
-
'azure'
|
257
|
-
end
|
258
|
-
end
|
259
|
-
|
260
|
-
def shell_out_command(command)
|
261
|
-
cmd = Mixlib::ShellOut.new(a_to_s(command), :timeout => conf['timeout'], live_stream: STDOUT)
|
262
|
-
cmd.run_command
|
263
|
-
cmd
|
264
|
-
end
|
265
|
-
|
266
|
-
def aws_region_check
|
267
|
-
if ENV['AWS_REGION']
|
268
|
-
banner("Region set by environment: #{ENV['AWS_REGION']}")
|
269
|
-
else
|
270
|
-
banner("$AWS_REGION not set, setting to #{wombat['aws']['region']}")
|
271
|
-
ENV['AWS_REGION'] = wombat['aws']['region']
|
272
|
-
end
|
273
|
-
end
|
274
|
-
|
275
|
-
def vendor_cookbooks(template)
|
276
|
-
banner "Vendoring cookbooks for #{template}"
|
277
|
-
|
278
|
-
if template =~ /.*-windows/
|
279
|
-
base = template.split('-')[0]
|
280
|
-
else
|
281
|
-
base = template.split('.json')[0].tr('-', '_')
|
282
|
-
end
|
283
|
-
rm_cmd = "rm -rf #{conf['cookbook_dir']}/#{base}/Berksfile.lock vendored-cookbooks/#{base}"
|
284
|
-
shell_out_command(rm_cmd)
|
285
|
-
vendor_cmd = "berks vendor -q -b #{conf['cookbook_dir']}/#{base}/Berksfile vendored-cookbooks/#{base}"
|
286
|
-
shell_out_command(vendor_cmd)
|
287
|
-
end
|
288
|
-
|
289
|
-
def log(template, builder, options)
|
290
|
-
cloud = b_to_c(builder)
|
291
|
-
case template
|
292
|
-
when /automate/
|
293
|
-
log_name = "#{cloud}-automate-#{linux}"
|
294
|
-
when /chef-server/
|
295
|
-
log_name = "#{cloud}-chef-server-#{linux}"
|
296
|
-
when /compliance/
|
297
|
-
log_name = "#{cloud}-compliance-#{linux}"
|
298
|
-
when /build-node/
|
299
|
-
log_name = "#{cloud}-build-node-#{options['node-number']}-#{linux}"
|
300
|
-
when /workstation/
|
301
|
-
log_name = "#{cloud}-workstation-#{options['workstation-number']}-#{linux}"
|
302
|
-
when /infranodes/
|
303
|
-
if options['os'] =~ /windows/
|
304
|
-
log_name = "#{cloud}-infranodes-#{options['node-name']}-windows"
|
305
|
-
else
|
306
|
-
log_name = "#{cloud}-infranodes-#{options['node-name']}-#{linux}"
|
307
|
-
end
|
308
|
-
end
|
309
|
-
log_file = "#{conf['log_dir']}/#{log_name}.log"
|
310
|
-
end
|
311
|
-
|
312
|
-
def which(cmd)
|
313
|
-
exts = ENV['PATHEXT'] ? ENV['PATHEXT'].split(';') : ['']
|
314
|
-
ENV['PATH'].split(File::PATH_SEPARATOR).each do |path|
|
315
|
-
exts.each { |ext|
|
316
|
-
exe = File.join(path, "#{cmd}#{ext}")
|
317
|
-
return exe if File.executable?(exe) && !File.directory?(exe)
|
318
|
-
}
|
319
|
-
end
|
320
|
-
return nil
|
321
|
-
end
|
322
|
-
|
323
|
-
def base_image(template, builder, options)
|
324
|
-
cloud = b_to_c(builder)
|
325
|
-
if template =~ /workstation/
|
326
|
-
wombat[cloud]['source_image']['windows']
|
327
|
-
elsif template =~ /infranodes/
|
328
|
-
if options['os'] == 'windows'
|
329
|
-
wombat[cloud]['source_image']['windows']
|
330
|
-
else
|
331
|
-
wombat[cloud]['source_image'][linux]
|
332
|
-
end
|
333
|
-
else
|
334
|
-
wombat[cloud]['source_image'][linux]
|
335
|
-
end
|
336
|
-
end
|
337
|
-
|
338
|
-
def packer_build_cmd(template, builder, options)
|
339
|
-
create_infranodes_json
|
340
|
-
Dir.mkdir(conf['log_dir'], 0755) unless File.exist?(conf['log_dir'])
|
341
|
-
|
342
|
-
cmd = %W(packer build #{conf['packer_dir']}/#{template}.json | tee #{log(template, builder, options)})
|
343
|
-
cmd.insert(2, "--only #{builder}")
|
344
|
-
cmd.insert(2, "--var org=#{wombat['org']}")
|
345
|
-
cmd.insert(2, "--var domain=#{wombat['domain']}")
|
346
|
-
cmd.insert(2, "--var domain_prefix=#{wombat['domain_prefix']}")
|
347
|
-
cmd.insert(2, "--var enterprise=#{wombat['enterprise']}")
|
348
|
-
cmd.insert(2, "--var chefdk=#{wombat['products']['chefdk']}")
|
349
|
-
cmd.insert(2, "--var chef_ver=#{wombat['products']['chef'].split('-')[1]}")
|
350
|
-
cmd.insert(2, "--var chef_channel=#{wombat['products']['chef'].split('-')[0]}")
|
351
|
-
cmd.insert(2, "--var automate=#{wombat['products']['automate']}")
|
352
|
-
cmd.insert(2, "--var compliance=#{wombat['products']['compliance']}")
|
353
|
-
cmd.insert(2, "--var chef-server=#{wombat['products']['chef-server']}")
|
354
|
-
cmd.insert(2, "--var push-jobs-server=#{wombat['products']['push-jobs-server']}")
|
355
|
-
cmd.insert(2, "--var manage=#{wombat['products']['manage']}")
|
356
|
-
cmd.insert(2, "--var node-name=#{options['node-name']}") if template =~ /infranodes/
|
357
|
-
cmd.insert(2, "--var node-number=#{options['node-number']}") if template =~ /build-node/
|
358
|
-
cmd.insert(2, "--var build-nodes=#{wombat['build-nodes']['count']}")
|
359
|
-
cmd.insert(2, "--var winrm_password=#{wombat['workstations']['password']}")
|
360
|
-
cmd.insert(2, "--var winrm_username=Administrator")
|
361
|
-
cmd.insert(2, "--var workstation-number=#{options['workstation-number']}") if template =~ /workstation/
|
362
|
-
cmd.insert(2, "--var workstations=#{wombat['workstations']['count']}")
|
363
|
-
cmd.insert(2, "--var aws_source_ami=#{base_image(template, builder, options)}") if builder =~ /amazon-ebs/
|
364
|
-
cmd.insert(2, "--var gce_source_image=#{base_image(template, builder, options)}") if builder =~ /googlecompute/
|
365
|
-
cmd.insert(2, "--var azure_location=#{wombat['azure']['location']}")
|
366
|
-
cmd.insert(2, "--var ssh_username=#{linux}")
|
367
|
-
cmd.insert(2, "--debug") if @debug
|
368
|
-
|
369
|
-
# If running with the azure-arm builder add the necessary arguments
|
370
|
-
if builder =~ /azure-arm/
|
371
|
-
|
372
|
-
# Get the information about the base image to use
|
373
|
-
base_image = base_image(template, builder, options)
|
374
|
-
|
375
|
-
if !base_image.nil?
|
376
|
-
# This is a URN so it needs to be split out using : as delimiters
|
377
|
-
base_image_parts = base_image.split(/:/)
|
378
|
-
|
379
|
-
cmd.insert(2, "--var azure_image_publisher=#{base_image_parts[0]}")
|
380
|
-
cmd.insert(2, "--var azure_image_offer=#{base_image_parts[1]}")
|
381
|
-
cmd.insert(2, "--var azure_image_sku=#{base_image_parts[2]}")
|
382
|
-
cmd.insert(2, "--var azure_image_version=#{base_image_parts[3]}") if base_image_parts.length == 4
|
383
|
-
end
|
384
|
-
|
385
|
-
cmd.insert(2, "--var azure_resource_group=#{wombat['name']}")
|
386
|
-
cmd.insert(2, "--var azure_storage_account=#{wombat['azure']['storage_account']}")
|
387
|
-
end
|
388
|
-
|
389
|
-
cmd.join(' ')
|
390
|
-
end
|
391
|
-
end
|
392
|
-
end
|
1
|
+
|
2
|
+
require 'wombat/common'
|
3
|
+
require 'wombat/crypto'
|
4
|
+
require 'mixlib/shellout'
|
5
|
+
require 'parallel'
|
6
|
+
require 'ms_rest_azure'
|
7
|
+
require 'azure_mgmt_resources'
|
8
|
+
require 'azure_mgmt_storage'
|
9
|
+
require 'azure/storage'
|
10
|
+
require 'uri'
|
11
|
+
|
12
|
+
module Wombat
|
13
|
+
class BuildRunner
|
14
|
+
include Wombat::Common
|
15
|
+
include Wombat::Crypto
|
16
|
+
|
17
|
+
attr_reader :templates, :builder, :parallel, :storage_access_key
|
18
|
+
|
19
|
+
def initialize(opts)
|
20
|
+
@templates = opts.templates.nil? ? calculate_templates : opts.templates
|
21
|
+
@builder = opts.builder.nil? ? "amazon-ebs" : opts.builder
|
22
|
+
@parallel = opts.parallel
|
23
|
+
@wombat_yml = opts.wombat_yml unless opts.wombat_yml.nil?
|
24
|
+
@debug = opts.debug
|
25
|
+
@no_vendor = opts.vendor
|
26
|
+
end
|
27
|
+
|
28
|
+
def start
|
29
|
+
if which('packer').nil?
|
30
|
+
raise "packer binary not found in path, exiting..."
|
31
|
+
end
|
32
|
+
banner("Generating certs (if necessary)")
|
33
|
+
wombat['certs'].each do |hostname|
|
34
|
+
gen_x509_cert(hostname)
|
35
|
+
end
|
36
|
+
banner("Generating SSH keypair (if necessary)")
|
37
|
+
gen_ssh_key
|
38
|
+
|
39
|
+
# If running on azure ensure that the resource group and storage account exist
|
40
|
+
prepare_azure if builder == "azure-arm"
|
41
|
+
|
42
|
+
time = Benchmark.measure do
|
43
|
+
banner("Starting build for templates: #{templates}")
|
44
|
+
aws_region_check if builder == 'amazon-ebs'
|
45
|
+
templates.each do |t|
|
46
|
+
vendor_cookbooks(t) unless @no_vendor
|
47
|
+
end
|
48
|
+
|
49
|
+
if parallel.nil?
|
50
|
+
build_hash.each do |k, v|
|
51
|
+
build(v['template'], v['options'])
|
52
|
+
end
|
53
|
+
else
|
54
|
+
build_parallel(templates)
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
# Copy the images to the correct location if running Azure builder
|
59
|
+
azure_copy_images if builder == "azure-arm"
|
60
|
+
|
61
|
+
shell_out_command("say -v fred \"Wombat has made an #{build_hash.keys}\" for you") if audio?
|
62
|
+
banner("Build finished in #{duration(time.real)}.")
|
63
|
+
end
|
64
|
+
|
65
|
+
private
|
66
|
+
|
67
|
+
def prepare_azure()
|
68
|
+
|
69
|
+
# Ensure that a storage acocunt has been specified, if it has not error
|
70
|
+
if wombat['azure']['storage_account'].nil?
|
71
|
+
puts "\nA storage account name must be specified in wombat.yml, e.g.\n openssl rand -base64 12\nEnsure all lowercase and no special characters"
|
72
|
+
exit
|
73
|
+
end
|
74
|
+
|
75
|
+
# Using environment variables connect to azure
|
76
|
+
subscription_id = ENV['AZURE_SUBSCRIPTION_ID']
|
77
|
+
tenant_id = ENV['AZURE_TENANT_ID']
|
78
|
+
client_id = ENV['AZURE_CLIENT_ID']
|
79
|
+
client_secret = ENV['AZURE_CLIENT_SECRET']
|
80
|
+
|
81
|
+
token_provider = MsRestAzure::ApplicationTokenProvider.new(tenant_id, client_id, client_secret)
|
82
|
+
azure_conn = MsRest::TokenCredentials.new(token_provider)
|
83
|
+
|
84
|
+
# Create a resource to create the resource group if it does not exist
|
85
|
+
resource_management_client = Azure::ARM::Resources::ResourceManagementClient.new(azure_conn)
|
86
|
+
resource_management_client.subscription_id = subscription_id
|
87
|
+
|
88
|
+
# Create a storage account client to create the stoarge account if it does not exist
|
89
|
+
storage_management_client = Azure::ARM::Storage::StorageManagementClient.new(azure_conn)
|
90
|
+
storage_management_client.subscription_id = subscription_id
|
91
|
+
|
92
|
+
# Create the resource group
|
93
|
+
create_resource_group(resource_management_client,
|
94
|
+
wombat['name'],
|
95
|
+
wombat['azure']['location'],
|
96
|
+
wombat['owner'],
|
97
|
+
wombat['azure']['tags'])
|
98
|
+
|
99
|
+
# Check to see if the storage account already exists
|
100
|
+
banner(format("Checking for storage account: %s", wombat['azure']['storage_account']))
|
101
|
+
|
102
|
+
# Create the storage account in the resource group
|
103
|
+
# NOTE: This should have a test to see if the storage account exists and it available however the
|
104
|
+
# Azure Ruby SDK has an issue with the check_name_availability method and comes back with an error
|
105
|
+
# This would normally be done through an ARM template, but in this case needs to exist before Packer can run
|
106
|
+
storage_account = Azure::ARM::Storage::Models::StorageAccountCreateParameters.new
|
107
|
+
storage_account.location = wombat['azure']['location']
|
108
|
+
sku = Azure::ARM::Storage::Models::Sku.new
|
109
|
+
sku.name = 'Standard_LRS'
|
110
|
+
storage_account.sku = sku
|
111
|
+
storage_account.kind = Azure::ARM::Storage::Models::Kind::Storage
|
112
|
+
|
113
|
+
storage_management_client.storage_accounts.create(wombat['name'], wombat['azure']['storage_account'], storage_account)
|
114
|
+
|
115
|
+
# Get the keys from the storage management client so that the container that the images will be moved into
|
116
|
+
# can be checked for and created if required
|
117
|
+
# Once Packer uses the MD features in the GO library this can be removed
|
118
|
+
# ------------------------------------------------------------------------
|
119
|
+
keys = storage_management_client.storage_accounts.list_keys(wombat['name'], wombat['azure']['storage_account'])
|
120
|
+
@storage_access_key = keys.keys[0].value
|
121
|
+
|
122
|
+
# Use the key to configure the storage library
|
123
|
+
Azure::Storage.setup(:storage_account_name => wombat['azure']['storage_account'], :storage_access_key => storage_access_key)
|
124
|
+
blobs = Azure::Storage::Blob::BlobService.new
|
125
|
+
|
126
|
+
# Get all the containers to determine if the one that is required already exists
|
127
|
+
container_names = []
|
128
|
+
blobs.list_containers().each do |container|
|
129
|
+
container_names.push(container.name)
|
130
|
+
end
|
131
|
+
|
132
|
+
# create the container if it does not exist
|
133
|
+
container_name = "mdimages"
|
134
|
+
if !container_names.include?(container_name)
|
135
|
+
info("Creating storage container")
|
136
|
+
container = blobs.create_container(container_name)
|
137
|
+
end
|
138
|
+
# ------------------------------------------------------------------------
|
139
|
+
|
140
|
+
end
|
141
|
+
|
142
|
+
# Packer does not put custom images into a location that is supported by Managed Disks
|
143
|
+
# So to be able to use the MD feature of Azure, the images have to be copied to a location that
|
144
|
+
# does work. This method is responsible for doing this work.
|
145
|
+
#
|
146
|
+
# @author Russell Seymour
|
147
|
+
def azure_copy_images()
|
148
|
+
|
149
|
+
container_name = "mdimages"
|
150
|
+
|
151
|
+
Azure::Storage.setup(:storage_account_name => wombat['azure']['storage_account'], :storage_access_key => storage_access_key)
|
152
|
+
blobs = Azure::Storage::Blob::BlobService.new
|
153
|
+
|
154
|
+
# Read the logs for azure
|
155
|
+
path = "#{wombat['conf']['log_dir']}/azure*.log"
|
156
|
+
logs = Dir.glob(path).reject { |l| !l.match(wombat['linux']) }
|
157
|
+
|
158
|
+
# iterate around the log files and get the image location
|
159
|
+
time = Benchmark.measure do
|
160
|
+
logs.each do |log|
|
161
|
+
|
162
|
+
# get the image uri
|
163
|
+
url = File.read(log).split("\n").grep(/OSDiskUri:/) {|x| x.split[1]}.last
|
164
|
+
|
165
|
+
next if url.nil?
|
166
|
+
|
167
|
+
# Use the storage library to copy the image from source to destination
|
168
|
+
uri = URI(url)
|
169
|
+
|
170
|
+
blob_name = uri.path.split(/\//).last
|
171
|
+
|
172
|
+
info "Copying: #{blob_name}"
|
173
|
+
|
174
|
+
status = blobs.copy_blob_from_uri(container_name, blob_name, url)
|
175
|
+
|
176
|
+
# Append the new location for the image to the log file
|
177
|
+
append_text = format("\nManagedDiskOSDiskUri: https://%s.blob.core.windows.net/%s/%s", wombat['azure']['storage_account'], container_name, blob_name)
|
178
|
+
File.open(log, 'a') { |f| f.write(append_text) }
|
179
|
+
|
180
|
+
end
|
181
|
+
end
|
182
|
+
|
183
|
+
info (format("Images copied in %s", duration(time.real)))
|
184
|
+
|
185
|
+
end
|
186
|
+
|
187
|
+
def build(template, options)
|
188
|
+
bootstrap_aws if options['os'] == 'windows'
|
189
|
+
shell_out_command(packer_build_cmd(template, builder, options))
|
190
|
+
end
|
191
|
+
|
192
|
+
def build_parallel(templates)
|
193
|
+
Parallel.map(build_hash.keys, in_threads: build_hash.count) do |name|
|
194
|
+
build(build_hash[name]['template'], build_hash[name]['options'])
|
195
|
+
end
|
196
|
+
end
|
197
|
+
|
198
|
+
def build_hash
|
199
|
+
proc_hash = {}
|
200
|
+
templates.each do |template_name|
|
201
|
+
if template_name =~ /infranodes/
|
202
|
+
infranodes.each do |name, _rl|
|
203
|
+
next if name.empty?
|
204
|
+
proc_hash[name] = {
|
205
|
+
'template' => template_name,
|
206
|
+
'options' => {
|
207
|
+
'node-name' => name,
|
208
|
+
'os' => wombat['infranodes'][name]['platform']
|
209
|
+
}
|
210
|
+
}
|
211
|
+
end
|
212
|
+
elsif template_name =~ /build-node/
|
213
|
+
build_nodes.each do |name, num|
|
214
|
+
proc_hash[name] = {
|
215
|
+
'template' => template_name,
|
216
|
+
'options' => {
|
217
|
+
'node-number' => num
|
218
|
+
}
|
219
|
+
}
|
220
|
+
end
|
221
|
+
elsif template_name =~ /workstation/
|
222
|
+
workstations.each do |name, num|
|
223
|
+
proc_hash[name] = {
|
224
|
+
'template' => template_name,
|
225
|
+
'options' => {
|
226
|
+
'os' => wombat['workstations']['platform'],
|
227
|
+
'workstation-number' => num
|
228
|
+
}
|
229
|
+
}
|
230
|
+
end
|
231
|
+
else
|
232
|
+
proc_hash[template_name] = {
|
233
|
+
'template' => template_name,
|
234
|
+
'options' => {}
|
235
|
+
}
|
236
|
+
end
|
237
|
+
end
|
238
|
+
proc_hash
|
239
|
+
end
|
240
|
+
|
241
|
+
def a_to_s(*args)
|
242
|
+
clean_array(*args).join(" ")
|
243
|
+
end
|
244
|
+
|
245
|
+
def clean_array(*args)
|
246
|
+
args.flatten.reject { |i| i.nil? || i == "" }.map(&:to_s)
|
247
|
+
end
|
248
|
+
|
249
|
+
def b_to_c(builder)
|
250
|
+
case builder
|
251
|
+
when 'amazon-ebs'
|
252
|
+
'aws'
|
253
|
+
when 'googlecompute'
|
254
|
+
'gce'
|
255
|
+
when 'azure-arm'
|
256
|
+
'azure'
|
257
|
+
end
|
258
|
+
end
|
259
|
+
|
260
|
+
def shell_out_command(command)
|
261
|
+
cmd = Mixlib::ShellOut.new(a_to_s(command), :timeout => conf['timeout'], live_stream: STDOUT)
|
262
|
+
cmd.run_command
|
263
|
+
cmd
|
264
|
+
end
|
265
|
+
|
266
|
+
def aws_region_check
|
267
|
+
if ENV['AWS_REGION']
|
268
|
+
banner("Region set by environment: #{ENV['AWS_REGION']}")
|
269
|
+
else
|
270
|
+
banner("$AWS_REGION not set, setting to #{wombat['aws']['region']}")
|
271
|
+
ENV['AWS_REGION'] = wombat['aws']['region']
|
272
|
+
end
|
273
|
+
end
|
274
|
+
|
275
|
+
def vendor_cookbooks(template)
|
276
|
+
banner "Vendoring cookbooks for #{template}"
|
277
|
+
|
278
|
+
if template =~ /.*-windows/
|
279
|
+
base = template.split('-')[0]
|
280
|
+
else
|
281
|
+
base = template.split('.json')[0].tr('-', '_')
|
282
|
+
end
|
283
|
+
rm_cmd = "rm -rf #{conf['cookbook_dir']}/#{base}/Berksfile.lock vendored-cookbooks/#{base}"
|
284
|
+
shell_out_command(rm_cmd)
|
285
|
+
vendor_cmd = "berks vendor -q -b #{conf['cookbook_dir']}/#{base}/Berksfile vendored-cookbooks/#{base}"
|
286
|
+
shell_out_command(vendor_cmd)
|
287
|
+
end
|
288
|
+
|
289
|
+
def log(template, builder, options)
|
290
|
+
cloud = b_to_c(builder)
|
291
|
+
case template
|
292
|
+
when /automate/
|
293
|
+
log_name = "#{cloud}-automate-#{linux}"
|
294
|
+
when /chef-server/
|
295
|
+
log_name = "#{cloud}-chef-server-#{linux}"
|
296
|
+
when /compliance/
|
297
|
+
log_name = "#{cloud}-compliance-#{linux}"
|
298
|
+
when /build-node/
|
299
|
+
log_name = "#{cloud}-build-node-#{options['node-number']}-#{linux}"
|
300
|
+
when /workstation/
|
301
|
+
log_name = "#{cloud}-workstation-#{options['workstation-number']}-#{linux}"
|
302
|
+
when /infranodes/
|
303
|
+
if options['os'] =~ /windows/
|
304
|
+
log_name = "#{cloud}-infranodes-#{options['node-name']}-windows"
|
305
|
+
else
|
306
|
+
log_name = "#{cloud}-infranodes-#{options['node-name']}-#{linux}"
|
307
|
+
end
|
308
|
+
end
|
309
|
+
log_file = "#{conf['log_dir']}/#{log_name}.log"
|
310
|
+
end
|
311
|
+
|
312
|
+
def which(cmd)
|
313
|
+
exts = ENV['PATHEXT'] ? ENV['PATHEXT'].split(';') : ['']
|
314
|
+
ENV['PATH'].split(File::PATH_SEPARATOR).each do |path|
|
315
|
+
exts.each { |ext|
|
316
|
+
exe = File.join(path, "#{cmd}#{ext}")
|
317
|
+
return exe if File.executable?(exe) && !File.directory?(exe)
|
318
|
+
}
|
319
|
+
end
|
320
|
+
return nil
|
321
|
+
end
|
322
|
+
|
323
|
+
def base_image(template, builder, options)
|
324
|
+
cloud = b_to_c(builder)
|
325
|
+
if template =~ /workstation/
|
326
|
+
wombat[cloud]['source_image']['windows']
|
327
|
+
elsif template =~ /infranodes/
|
328
|
+
if options['os'] == 'windows'
|
329
|
+
wombat[cloud]['source_image']['windows']
|
330
|
+
else
|
331
|
+
wombat[cloud]['source_image'][linux]
|
332
|
+
end
|
333
|
+
else
|
334
|
+
wombat[cloud]['source_image'][linux]
|
335
|
+
end
|
336
|
+
end
|
337
|
+
|
338
|
+
def packer_build_cmd(template, builder, options)
|
339
|
+
create_infranodes_json
|
340
|
+
Dir.mkdir(conf['log_dir'], 0755) unless File.exist?(conf['log_dir'])
|
341
|
+
|
342
|
+
cmd = %W(packer build #{conf['packer_dir']}/#{template}.json | tee #{log(template, builder, options)})
|
343
|
+
cmd.insert(2, "--only #{builder}")
|
344
|
+
cmd.insert(2, "--var org=#{wombat['org']}")
|
345
|
+
cmd.insert(2, "--var domain=#{wombat['domain']}")
|
346
|
+
cmd.insert(2, "--var domain_prefix=#{wombat['domain_prefix']}")
|
347
|
+
cmd.insert(2, "--var enterprise=#{wombat['enterprise']}")
|
348
|
+
cmd.insert(2, "--var chefdk=#{wombat['products']['chefdk']}")
|
349
|
+
cmd.insert(2, "--var chef_ver=#{wombat['products']['chef'].split('-')[1]}")
|
350
|
+
cmd.insert(2, "--var chef_channel=#{wombat['products']['chef'].split('-')[0]}")
|
351
|
+
cmd.insert(2, "--var automate=#{wombat['products']['automate']}")
|
352
|
+
cmd.insert(2, "--var compliance=#{wombat['products']['compliance']}")
|
353
|
+
cmd.insert(2, "--var chef-server=#{wombat['products']['chef-server']}")
|
354
|
+
cmd.insert(2, "--var push-jobs-server=#{wombat['products']['push-jobs-server']}")
|
355
|
+
cmd.insert(2, "--var manage=#{wombat['products']['manage']}")
|
356
|
+
cmd.insert(2, "--var node-name=#{options['node-name']}") if template =~ /infranodes/
|
357
|
+
cmd.insert(2, "--var node-number=#{options['node-number']}") if template =~ /build-node/
|
358
|
+
cmd.insert(2, "--var build-nodes=#{wombat['build-nodes']['count']}")
|
359
|
+
cmd.insert(2, "--var winrm_password=#{wombat['workstations']['password']}")
|
360
|
+
cmd.insert(2, "--var winrm_username=Administrator")
|
361
|
+
cmd.insert(2, "--var workstation-number=#{options['workstation-number']}") if template =~ /workstation/
|
362
|
+
cmd.insert(2, "--var workstations=#{wombat['workstations']['count']}")
|
363
|
+
cmd.insert(2, "--var aws_source_ami=#{base_image(template, builder, options)}") if builder =~ /amazon-ebs/
|
364
|
+
cmd.insert(2, "--var gce_source_image=#{base_image(template, builder, options)}") if builder =~ /googlecompute/
|
365
|
+
cmd.insert(2, "--var azure_location=#{wombat['azure']['location']}")
|
366
|
+
cmd.insert(2, "--var ssh_username=#{linux}")
|
367
|
+
cmd.insert(2, "--debug") if @debug
|
368
|
+
|
369
|
+
# If running with the azure-arm builder add the necessary arguments
|
370
|
+
if builder =~ /azure-arm/
|
371
|
+
|
372
|
+
# Get the information about the base image to use
|
373
|
+
base_image = base_image(template, builder, options)
|
374
|
+
|
375
|
+
if !base_image.nil?
|
376
|
+
# This is a URN so it needs to be split out using : as delimiters
|
377
|
+
base_image_parts = base_image.split(/:/)
|
378
|
+
|
379
|
+
cmd.insert(2, "--var azure_image_publisher=#{base_image_parts[0]}")
|
380
|
+
cmd.insert(2, "--var azure_image_offer=#{base_image_parts[1]}")
|
381
|
+
cmd.insert(2, "--var azure_image_sku=#{base_image_parts[2]}")
|
382
|
+
cmd.insert(2, "--var azure_image_version=#{base_image_parts[3]}") if base_image_parts.length == 4
|
383
|
+
end
|
384
|
+
|
385
|
+
cmd.insert(2, "--var azure_resource_group=#{wombat['name']}")
|
386
|
+
cmd.insert(2, "--var azure_storage_account=#{wombat['azure']['storage_account']}")
|
387
|
+
end
|
388
|
+
|
389
|
+
cmd.join(' ')
|
390
|
+
end
|
391
|
+
end
|
392
|
+
end
|