bosh_cli 0.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/README +4 -0
- data/Rakefile +55 -0
- data/bin/bosh +17 -0
- data/lib/cli.rb +76 -0
- data/lib/cli/cache.rb +44 -0
- data/lib/cli/changeset_helper.rb +142 -0
- data/lib/cli/command_definition.rb +52 -0
- data/lib/cli/commands/base.rb +245 -0
- data/lib/cli/commands/biff.rb +300 -0
- data/lib/cli/commands/blob.rb +125 -0
- data/lib/cli/commands/cloudcheck.rb +169 -0
- data/lib/cli/commands/deployment.rb +147 -0
- data/lib/cli/commands/job.rb +42 -0
- data/lib/cli/commands/job_management.rb +117 -0
- data/lib/cli/commands/log_management.rb +81 -0
- data/lib/cli/commands/maintenance.rb +131 -0
- data/lib/cli/commands/misc.rb +240 -0
- data/lib/cli/commands/package.rb +112 -0
- data/lib/cli/commands/property_management.rb +125 -0
- data/lib/cli/commands/release.rb +469 -0
- data/lib/cli/commands/ssh.rb +271 -0
- data/lib/cli/commands/stemcell.rb +184 -0
- data/lib/cli/commands/task.rb +213 -0
- data/lib/cli/commands/user.rb +28 -0
- data/lib/cli/commands/vms.rb +53 -0
- data/lib/cli/config.rb +154 -0
- data/lib/cli/core_ext.rb +145 -0
- data/lib/cli/dependency_helper.rb +62 -0
- data/lib/cli/deployment_helper.rb +263 -0
- data/lib/cli/deployment_manifest_compiler.rb +28 -0
- data/lib/cli/director.rb +633 -0
- data/lib/cli/director_task.rb +64 -0
- data/lib/cli/errors.rb +48 -0
- data/lib/cli/event_log_renderer.rb +351 -0
- data/lib/cli/job_builder.rb +226 -0
- data/lib/cli/package_builder.rb +254 -0
- data/lib/cli/packaging_helper.rb +248 -0
- data/lib/cli/release.rb +176 -0
- data/lib/cli/release_builder.rb +215 -0
- data/lib/cli/release_compiler.rb +178 -0
- data/lib/cli/release_tarball.rb +272 -0
- data/lib/cli/runner.rb +771 -0
- data/lib/cli/stemcell.rb +83 -0
- data/lib/cli/task_log_renderer.rb +40 -0
- data/lib/cli/templates/help_message.erb +75 -0
- data/lib/cli/validation.rb +42 -0
- data/lib/cli/version.rb +7 -0
- data/lib/cli/version_calc.rb +48 -0
- data/lib/cli/versions_index.rb +126 -0
- data/lib/cli/yaml_helper.rb +62 -0
- data/spec/assets/biff/bad_gateway_config.yml +28 -0
- data/spec/assets/biff/good_simple_config.yml +63 -0
- data/spec/assets/biff/good_simple_golden_config.yml +63 -0
- data/spec/assets/biff/good_simple_template.erb +69 -0
- data/spec/assets/biff/multiple_subnets_config.yml +40 -0
- data/spec/assets/biff/network_only_template.erb +34 -0
- data/spec/assets/biff/no_cc_config.yml +27 -0
- data/spec/assets/biff/no_range_config.yml +27 -0
- data/spec/assets/biff/no_subnet_config.yml +16 -0
- data/spec/assets/biff/ok_network_config.yml +30 -0
- data/spec/assets/biff/properties_template.erb +6 -0
- data/spec/assets/deployment.MF +0 -0
- data/spec/assets/plugins/bosh/cli/commands/echo.rb +43 -0
- data/spec/assets/plugins/bosh/cli/commands/ruby.rb +24 -0
- data/spec/assets/release/jobs/cacher.tgz +0 -0
- data/spec/assets/release/jobs/cacher/config/file1.conf +0 -0
- data/spec/assets/release/jobs/cacher/config/file2.conf +0 -0
- data/spec/assets/release/jobs/cacher/job.MF +6 -0
- data/spec/assets/release/jobs/cacher/monit +1 -0
- data/spec/assets/release/jobs/cleaner.tgz +0 -0
- data/spec/assets/release/jobs/cleaner/job.MF +4 -0
- data/spec/assets/release/jobs/cleaner/monit +1 -0
- data/spec/assets/release/jobs/sweeper.tgz +0 -0
- data/spec/assets/release/jobs/sweeper/config/test.conf +1 -0
- data/spec/assets/release/jobs/sweeper/job.MF +5 -0
- data/spec/assets/release/jobs/sweeper/monit +1 -0
- data/spec/assets/release/packages/mutator.tar.gz +0 -0
- data/spec/assets/release/packages/stuff.tgz +0 -0
- data/spec/assets/release/release.MF +17 -0
- data/spec/assets/release_invalid_checksum.tgz +0 -0
- data/spec/assets/release_invalid_jobs.tgz +0 -0
- data/spec/assets/release_no_name.tgz +0 -0
- data/spec/assets/release_no_version.tgz +0 -0
- data/spec/assets/stemcell/image +1 -0
- data/spec/assets/stemcell/stemcell.MF +6 -0
- data/spec/assets/stemcell_invalid_mf.tgz +0 -0
- data/spec/assets/stemcell_no_image.tgz +0 -0
- data/spec/assets/valid_release.tgz +0 -0
- data/spec/assets/valid_stemcell.tgz +0 -0
- data/spec/spec_helper.rb +25 -0
- data/spec/unit/base_command_spec.rb +66 -0
- data/spec/unit/biff_spec.rb +135 -0
- data/spec/unit/cache_spec.rb +36 -0
- data/spec/unit/cli_commands_spec.rb +481 -0
- data/spec/unit/config_spec.rb +139 -0
- data/spec/unit/core_ext_spec.rb +77 -0
- data/spec/unit/dependency_helper_spec.rb +52 -0
- data/spec/unit/deployment_manifest_compiler_spec.rb +63 -0
- data/spec/unit/director_spec.rb +511 -0
- data/spec/unit/director_task_spec.rb +48 -0
- data/spec/unit/event_log_renderer_spec.rb +171 -0
- data/spec/unit/hash_changeset_spec.rb +73 -0
- data/spec/unit/job_builder_spec.rb +454 -0
- data/spec/unit/package_builder_spec.rb +567 -0
- data/spec/unit/release_builder_spec.rb +65 -0
- data/spec/unit/release_spec.rb +66 -0
- data/spec/unit/release_tarball_spec.rb +33 -0
- data/spec/unit/runner_spec.rb +140 -0
- data/spec/unit/ssh_spec.rb +78 -0
- data/spec/unit/stemcell_spec.rb +17 -0
- data/spec/unit/version_calc_spec.rb +27 -0
- data/spec/unit/versions_index_spec.rb +132 -0
- metadata +338 -0
|
@@ -0,0 +1,178 @@
|
|
|
1
|
+
# Copyright (c) 2009-2012 VMware, Inc.
|
|
2
|
+
|
|
3
|
+
module Bosh::Cli
|
|
4
|
+
# Compiles release tarball based on manifest
|
|
5
|
+
class ReleaseCompiler
|
|
6
|
+
|
|
7
|
+
attr_writer :tarball_path
|
|
8
|
+
|
|
9
|
+
def self.compile(manifest_file, blobstore)
|
|
10
|
+
new(manifest_file, blobstore).compile
|
|
11
|
+
end
|
|
12
|
+
|
|
13
|
+
def initialize(manifest_file, blobstore,
|
|
14
|
+
remote_release = nil, release_dir = nil)
|
|
15
|
+
@build_dir = Dir.mktmpdir
|
|
16
|
+
@jobs_dir = File.join(@build_dir, "jobs")
|
|
17
|
+
@packages_dir = File.join(@build_dir, "packages")
|
|
18
|
+
@blobstore = blobstore
|
|
19
|
+
@release_dir = release_dir || Dir.pwd
|
|
20
|
+
|
|
21
|
+
at_exit { FileUtils.rm_rf(@build_dir) }
|
|
22
|
+
|
|
23
|
+
FileUtils.mkdir_p(@jobs_dir)
|
|
24
|
+
FileUtils.mkdir_p(@packages_dir)
|
|
25
|
+
|
|
26
|
+
@manifest_file = File.expand_path(manifest_file, @release_dir)
|
|
27
|
+
@manifest = load_yaml_file(manifest_file)
|
|
28
|
+
|
|
29
|
+
if remote_release
|
|
30
|
+
@remote_packages = remote_release["packages"].map do |pkg|
|
|
31
|
+
OpenStruct.new(pkg)
|
|
32
|
+
end
|
|
33
|
+
@remote_jobs = remote_release["jobs"].map do |job|
|
|
34
|
+
OpenStruct.new(job)
|
|
35
|
+
end
|
|
36
|
+
else
|
|
37
|
+
@remote_packages = []
|
|
38
|
+
@remote_jobs = []
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
@name = @manifest["name"]
|
|
42
|
+
@version = @manifest["version"]
|
|
43
|
+
@packages = @manifest["packages"].map { |pkg| OpenStruct.new(pkg) }
|
|
44
|
+
@jobs = @manifest["jobs"].map { |job| OpenStruct.new(job) }
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
def compile
|
|
48
|
+
if exists?
|
|
49
|
+
quit("You already have this version in `#{tarball_path.green}'")
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
FileUtils.cp(@manifest_file,
|
|
53
|
+
File.join(@build_dir, "release.MF"),
|
|
54
|
+
:preserve => true)
|
|
55
|
+
|
|
56
|
+
header("Copying packages")
|
|
57
|
+
@packages.each do |package|
|
|
58
|
+
say("#{package.name} (#{package.version})".ljust(30), " ")
|
|
59
|
+
if remote_object_exists?(@remote_packages, package)
|
|
60
|
+
say("SKIP".yellow)
|
|
61
|
+
next
|
|
62
|
+
end
|
|
63
|
+
package_filename = find_package(package)
|
|
64
|
+
if package_filename.nil?
|
|
65
|
+
err("Cannot find package `#{package.name} (#{package.version})'")
|
|
66
|
+
end
|
|
67
|
+
FileUtils.cp(package_filename,
|
|
68
|
+
File.join(@packages_dir, "#{package.name}.tgz"),
|
|
69
|
+
:preserve => true)
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
header("Copying jobs")
|
|
73
|
+
@jobs.each do |job|
|
|
74
|
+
say("#{job.name} (#{job.version})".ljust(30), " ")
|
|
75
|
+
if remote_object_exists?(@remote_jobs, job)
|
|
76
|
+
say("SKIP".yellow)
|
|
77
|
+
next
|
|
78
|
+
end
|
|
79
|
+
job_filename = find_job(job)
|
|
80
|
+
if job_filename.nil?
|
|
81
|
+
err("Cannot find job `#{job.name} (#{job.version})")
|
|
82
|
+
end
|
|
83
|
+
FileUtils.cp(job_filename,
|
|
84
|
+
File.join(@jobs_dir, "#{job.name}.tgz"),
|
|
85
|
+
:preserve => true)
|
|
86
|
+
end
|
|
87
|
+
|
|
88
|
+
header("Building tarball")
|
|
89
|
+
Dir.chdir(@build_dir) do
|
|
90
|
+
tar_out = `tar -czf #{tarball_path} . 2>&1`
|
|
91
|
+
unless $?.exitstatus == 0
|
|
92
|
+
raise InvalidRelease, "Cannot create release tarball: #{tar_out}"
|
|
93
|
+
end
|
|
94
|
+
say("Generated #{tarball_path.green}")
|
|
95
|
+
say("Release size: #{pretty_size(tarball_path).green}")
|
|
96
|
+
end
|
|
97
|
+
end
|
|
98
|
+
|
|
99
|
+
def exists?
|
|
100
|
+
File.exists?(tarball_path)
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
def tarball_path
|
|
104
|
+
@tarball_path || File.join(File.dirname(@manifest_file),
|
|
105
|
+
"#{@name}-#{@version}.tgz")
|
|
106
|
+
end
|
|
107
|
+
|
|
108
|
+
def find_package(package)
|
|
109
|
+
final_index = VersionsIndex.new(
|
|
110
|
+
File.join(@release_dir, ".final_builds", "packages", package.name))
|
|
111
|
+
dev_index = VersionsIndex.new(
|
|
112
|
+
File.join(@release_dir, ".dev_builds", "packages", package.name))
|
|
113
|
+
find_in_indices(final_index, dev_index, package)
|
|
114
|
+
end
|
|
115
|
+
|
|
116
|
+
def find_job(job)
|
|
117
|
+
final_index = VersionsIndex.new(
|
|
118
|
+
File.join(@release_dir, ".final_builds", "jobs", job.name))
|
|
119
|
+
dev_index = VersionsIndex.new(
|
|
120
|
+
File.join(@release_dir, ".dev_builds", "jobs", job.name))
|
|
121
|
+
find_in_indices(final_index, dev_index, job)
|
|
122
|
+
end
|
|
123
|
+
|
|
124
|
+
def find_in_indices(final_index, dev_index, object)
|
|
125
|
+
desc = "#{object.name} (#{object.version})"
|
|
126
|
+
|
|
127
|
+
index = final_index
|
|
128
|
+
build_data = index.find_by_checksum(object.sha1)
|
|
129
|
+
|
|
130
|
+
if build_data.nil?
|
|
131
|
+
index = dev_index
|
|
132
|
+
build_data = index.find_by_checksum(object.sha1)
|
|
133
|
+
end
|
|
134
|
+
|
|
135
|
+
if build_data.nil?
|
|
136
|
+
say("MISSING".red)
|
|
137
|
+
err("Cannot find object with given checksum")
|
|
138
|
+
end
|
|
139
|
+
|
|
140
|
+
version = build_data["version"]
|
|
141
|
+
sha1 = build_data["sha1"]
|
|
142
|
+
blobstore_id = build_data["blobstore_id"]
|
|
143
|
+
filename = index.filename(version)
|
|
144
|
+
|
|
145
|
+
if File.exists?(filename)
|
|
146
|
+
say("FOUND LOCAL".green)
|
|
147
|
+
if Digest::SHA1.file(filename) != sha1
|
|
148
|
+
err("#{desc} is corrupted locally")
|
|
149
|
+
end
|
|
150
|
+
elsif blobstore_id
|
|
151
|
+
say("FOUND REMOTE".yellow)
|
|
152
|
+
say("Downloading #{blobstore_id.to_s.green}...")
|
|
153
|
+
|
|
154
|
+
payload = @blobstore.get(blobstore_id)
|
|
155
|
+
|
|
156
|
+
if Digest::SHA1.hexdigest(payload) == sha1
|
|
157
|
+
File.open(filename, "w") { |f| f.write(payload) }
|
|
158
|
+
else
|
|
159
|
+
err("#{desc} is corrupted in blobstore (id=#{blobstore_id})")
|
|
160
|
+
end
|
|
161
|
+
end
|
|
162
|
+
|
|
163
|
+
File.exists?(filename) ? filename : nil
|
|
164
|
+
|
|
165
|
+
rescue Bosh::Blobstore::BlobstoreError => e
|
|
166
|
+
raise BlobstoreError, "Blobstore error: #{e}"
|
|
167
|
+
end
|
|
168
|
+
|
|
169
|
+
def remote_object_exists?(collection, local_object)
|
|
170
|
+
collection.any? do |remote_object|
|
|
171
|
+
remote_object.name == local_object.name &&
|
|
172
|
+
remote_object.version.to_s == local_object.version.to_s
|
|
173
|
+
end
|
|
174
|
+
end
|
|
175
|
+
|
|
176
|
+
end
|
|
177
|
+
|
|
178
|
+
end
|
|
@@ -0,0 +1,272 @@
|
|
|
1
|
+
module Bosh::Cli
|
|
2
|
+
class ReleaseTarball
|
|
3
|
+
include Validation
|
|
4
|
+
include DependencyHelper
|
|
5
|
+
|
|
6
|
+
attr_reader :release_name, :jobs, :packages, :version
|
|
7
|
+
attr_reader :skipped # Mostly for tests
|
|
8
|
+
|
|
9
|
+
def initialize(tarball_path)
|
|
10
|
+
@tarball_path = File.expand_path(tarball_path, Dir.pwd)
|
|
11
|
+
@unpack_dir = Dir.mktmpdir
|
|
12
|
+
@jobs = []
|
|
13
|
+
@packages = []
|
|
14
|
+
end
|
|
15
|
+
|
|
16
|
+
# Unpacks tarball to @unpack_dir, returns true if succeeded, false if failed
|
|
17
|
+
def unpack
|
|
18
|
+
return @unpacked unless @unpacked.nil?
|
|
19
|
+
`tar -C #{@unpack_dir} -xzf #{@tarball_path} 2>&1`
|
|
20
|
+
@unpacked = $?.exitstatus == 0
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
def exists?
|
|
24
|
+
File.exists?(@tarball_path) && File.readable?(@tarball_path)
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
# Repacks tarball according to the structure of remote release
|
|
28
|
+
# Return path to repackaged tarball or nil if repack has failed
|
|
29
|
+
def repack(remote_release)
|
|
30
|
+
return nil unless valid?
|
|
31
|
+
unpack
|
|
32
|
+
|
|
33
|
+
tmpdir = Dir.mktmpdir
|
|
34
|
+
repacked_path = File.join(tmpdir, "release-repack.tgz")
|
|
35
|
+
|
|
36
|
+
at_exit { FileUtils.rm_rf(tmpdir) }
|
|
37
|
+
|
|
38
|
+
manifest = load_yaml_file(File.join(@unpack_dir, "release.MF"))
|
|
39
|
+
|
|
40
|
+
local_packages = manifest["packages"]
|
|
41
|
+
local_jobs = manifest["jobs"]
|
|
42
|
+
remote_packages = remote_release["packages"]
|
|
43
|
+
remote_jobs = remote_release["jobs"]
|
|
44
|
+
|
|
45
|
+
@skipped = 0
|
|
46
|
+
|
|
47
|
+
Dir.chdir(@unpack_dir) do
|
|
48
|
+
local_packages.each do |package|
|
|
49
|
+
say("#{package['name']} (#{package['version']})".ljust(30), " ")
|
|
50
|
+
if remote_packages.any? { |rp| package["name"] == rp["name"] &&
|
|
51
|
+
package["version"].to_s == rp["version"].to_s }
|
|
52
|
+
say("SKIP".green)
|
|
53
|
+
@skipped += 1
|
|
54
|
+
FileUtils.rm_rf(File.join("packages", "#{package['name']}.tgz"))
|
|
55
|
+
else
|
|
56
|
+
say("UPLOAD".red)
|
|
57
|
+
end
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
local_jobs.each do |job|
|
|
61
|
+
say("#{job['name']} (#{job['version']})".ljust(30), " ")
|
|
62
|
+
if remote_jobs.any? { |rj| job["name"] == rj["name"] &&
|
|
63
|
+
job["version"].to_s == rj["version"].to_s }
|
|
64
|
+
say("SKIP".green)
|
|
65
|
+
@skipped += 1
|
|
66
|
+
FileUtils.rm_rf(File.join("jobs", "#{job['name']}.tgz"))
|
|
67
|
+
else
|
|
68
|
+
say("UPLOAD".red)
|
|
69
|
+
end
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
return nil if @skipped == 0
|
|
73
|
+
`tar -czf #{repacked_path} . 2>&1`
|
|
74
|
+
return repacked_path if $? == 0
|
|
75
|
+
end
|
|
76
|
+
end
|
|
77
|
+
|
|
78
|
+
# If sparse release is allowed we bypass the requirement of having all jobs
|
|
79
|
+
# and packages in place when we do validation. However for jobs and packages
|
|
80
|
+
# that are present we still need to validate checksums
|
|
81
|
+
def perform_validation(options = {})
|
|
82
|
+
# CLEANUP this syntax
|
|
83
|
+
allow_sparse = options.has_key?(:allow_sparse) ?
|
|
84
|
+
!!options[:allow_sparse] :
|
|
85
|
+
false
|
|
86
|
+
|
|
87
|
+
step("File exists and readable",
|
|
88
|
+
"Cannot find release file #{@tarball_path}", :fatal) do
|
|
89
|
+
exists?
|
|
90
|
+
end
|
|
91
|
+
|
|
92
|
+
step("Extract tarball",
|
|
93
|
+
"Cannot extract tarball #{@tarball_path}", :fatal) do
|
|
94
|
+
unpack
|
|
95
|
+
end
|
|
96
|
+
|
|
97
|
+
manifest_file = File.expand_path("release.MF", @unpack_dir)
|
|
98
|
+
|
|
99
|
+
step("Manifest exists", "Cannot find release manifest", :fatal) do
|
|
100
|
+
File.exists?(manifest_file)
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
manifest = load_yaml_file(manifest_file)
|
|
104
|
+
|
|
105
|
+
step("Release name/version",
|
|
106
|
+
"Manifest doesn't contain release name and/or version") do
|
|
107
|
+
manifest.is_a?(Hash) &&
|
|
108
|
+
manifest.has_key?("name") &&
|
|
109
|
+
manifest.has_key?("version")
|
|
110
|
+
end
|
|
111
|
+
|
|
112
|
+
@release_name = manifest["name"]
|
|
113
|
+
@version = manifest["version"].to_s
|
|
114
|
+
|
|
115
|
+
# Check packages
|
|
116
|
+
total_packages = manifest["packages"].size
|
|
117
|
+
available_packages = {}
|
|
118
|
+
|
|
119
|
+
manifest["packages"].each_with_index do |package, i|
|
|
120
|
+
@packages << package
|
|
121
|
+
name, version = package['name'], package['version']
|
|
122
|
+
|
|
123
|
+
package_file = File.expand_path(name + ".tgz",
|
|
124
|
+
@unpack_dir + "/packages")
|
|
125
|
+
package_exists = File.exists?(package_file)
|
|
126
|
+
|
|
127
|
+
step("Read package '%s' (%d of %d)" % [name, i+1, total_packages],
|
|
128
|
+
"Missing package '#{name}'") do
|
|
129
|
+
package_exists || allow_sparse
|
|
130
|
+
end
|
|
131
|
+
|
|
132
|
+
if package_exists
|
|
133
|
+
available_packages[name] = true
|
|
134
|
+
step("Package '#{name}' checksum",
|
|
135
|
+
"Incorrect checksum for package '#{name}'") do
|
|
136
|
+
Digest::SHA1.file(package_file).hexdigest == package["sha1"]
|
|
137
|
+
end
|
|
138
|
+
end
|
|
139
|
+
end
|
|
140
|
+
|
|
141
|
+
# Check package dependencies
|
|
142
|
+
# Note that we use manifest["packages"] here; manifest contains
|
|
143
|
+
# all packages even if release is sparse, so we can detect problems
|
|
144
|
+
# even in sparse release tarball.
|
|
145
|
+
if total_packages > 0
|
|
146
|
+
step("Package dependencies",
|
|
147
|
+
"Package dependencies couldn't be resolved") do
|
|
148
|
+
begin
|
|
149
|
+
tsort_packages(manifest["packages"].inject({}) { |h, p|
|
|
150
|
+
h[p["name"]] = p["dependencies"] || []; h })
|
|
151
|
+
true
|
|
152
|
+
rescue Bosh::Cli::CircularDependency,
|
|
153
|
+
Bosh::Cli::MissingDependency => e
|
|
154
|
+
errors << e.message
|
|
155
|
+
false
|
|
156
|
+
end
|
|
157
|
+
end
|
|
158
|
+
end
|
|
159
|
+
|
|
160
|
+
# Check jobs
|
|
161
|
+
total_jobs = manifest["jobs"].size
|
|
162
|
+
|
|
163
|
+
step("Checking jobs format",
|
|
164
|
+
"Jobs are not versioned, please re-create release " +
|
|
165
|
+
"with current CLI version (or any CLI >= 0.4.4)", :fatal) do
|
|
166
|
+
total_jobs > 0 && manifest["jobs"][0].is_a?(Hash)
|
|
167
|
+
end
|
|
168
|
+
|
|
169
|
+
manifest["jobs"].each_with_index do |job, i|
|
|
170
|
+
@jobs << job
|
|
171
|
+
name = job["name"]
|
|
172
|
+
version = job["version"]
|
|
173
|
+
|
|
174
|
+
job_file = File.expand_path(name + ".tgz", @unpack_dir + "/jobs")
|
|
175
|
+
job_exists = File.exists?(job_file)
|
|
176
|
+
|
|
177
|
+
step("Read job '%s' (%d of %d), version %s" % [name, i+1, total_jobs,
|
|
178
|
+
version],
|
|
179
|
+
"Job '#{name}' not found") do
|
|
180
|
+
job_exists || allow_sparse
|
|
181
|
+
end
|
|
182
|
+
|
|
183
|
+
if job_exists
|
|
184
|
+
step("Job '#{name}' checksum",
|
|
185
|
+
"Incorrect checksum for job '#{name}'") do
|
|
186
|
+
Digest::SHA1.file(job_file).hexdigest == job["sha1"]
|
|
187
|
+
end
|
|
188
|
+
|
|
189
|
+
job_tmp_dir = Dir.mktmpdir
|
|
190
|
+
FileUtils.mkdir_p(job_tmp_dir)
|
|
191
|
+
`tar -C #{job_tmp_dir} -xzf #{job_file} 2>&1`
|
|
192
|
+
job_extracted = $?.exitstatus == 0
|
|
193
|
+
|
|
194
|
+
step("Extract job '#{name}'", "Cannot extract job '#{name}'") do
|
|
195
|
+
job_extracted
|
|
196
|
+
end
|
|
197
|
+
|
|
198
|
+
if job_extracted
|
|
199
|
+
job_manifest_file = File.expand_path("job.MF", job_tmp_dir)
|
|
200
|
+
if File.exists?(job_manifest_file)
|
|
201
|
+
job_manifest = load_yaml_file(job_manifest_file)
|
|
202
|
+
end
|
|
203
|
+
job_manifest_valid = job_manifest.is_a?(Hash)
|
|
204
|
+
|
|
205
|
+
step("Read job '#{name}' manifest",
|
|
206
|
+
"Invalid job '#{name}' manifest") do
|
|
207
|
+
job_manifest_valid
|
|
208
|
+
end
|
|
209
|
+
|
|
210
|
+
if job_manifest_valid && job_manifest["templates"]
|
|
211
|
+
job_manifest["templates"].each_key do |template|
|
|
212
|
+
step("Check template '#{template}' for '#{name}'",
|
|
213
|
+
"No template named '#{template}' for '#{name}'") do
|
|
214
|
+
File.exists?(File.expand_path(template,
|
|
215
|
+
job_tmp_dir + "/templates"))
|
|
216
|
+
end
|
|
217
|
+
end
|
|
218
|
+
end
|
|
219
|
+
|
|
220
|
+
if job_manifest_valid && job_manifest["packages"]
|
|
221
|
+
job_manifest["packages"].each do |package_name|
|
|
222
|
+
step("Job '#{name}' needs '#{package_name}' package",
|
|
223
|
+
"Job '#{name}' references missing package " +
|
|
224
|
+
"'#{package_name}'") do
|
|
225
|
+
available_packages[package_name] || allow_sparse
|
|
226
|
+
end
|
|
227
|
+
end
|
|
228
|
+
end
|
|
229
|
+
|
|
230
|
+
step("Monit file for '#{name}'",
|
|
231
|
+
"Monit script missing for job '#{name}'") do
|
|
232
|
+
File.exists?(File.expand_path("monit", job_tmp_dir)) ||
|
|
233
|
+
Dir.glob("#{job_tmp_dir}/*.monit").size > 0
|
|
234
|
+
end
|
|
235
|
+
end
|
|
236
|
+
end
|
|
237
|
+
end
|
|
238
|
+
|
|
239
|
+
print_info(manifest)
|
|
240
|
+
end
|
|
241
|
+
|
|
242
|
+
def print_info(manifest)
|
|
243
|
+
say("\nRelease info")
|
|
244
|
+
say("------------")
|
|
245
|
+
|
|
246
|
+
say("Name: #{manifest["name"]}")
|
|
247
|
+
say("Version: #{manifest["version"]}")
|
|
248
|
+
|
|
249
|
+
say("\nPackages")
|
|
250
|
+
|
|
251
|
+
if manifest["packages"].empty?
|
|
252
|
+
say(" - none")
|
|
253
|
+
end
|
|
254
|
+
|
|
255
|
+
for package in manifest["packages"]
|
|
256
|
+
say(" - #{package["name"]} (#{package["version"]})")
|
|
257
|
+
end
|
|
258
|
+
|
|
259
|
+
say("\nJobs")
|
|
260
|
+
|
|
261
|
+
if manifest["jobs"].empty?
|
|
262
|
+
say(" - none")
|
|
263
|
+
end
|
|
264
|
+
|
|
265
|
+
for job in manifest["jobs"]
|
|
266
|
+
say(" - #{job["name"]} (#{job["version"]})")
|
|
267
|
+
end
|
|
268
|
+
end
|
|
269
|
+
|
|
270
|
+
end
|
|
271
|
+
end
|
|
272
|
+
|
data/lib/cli/runner.rb
ADDED
|
@@ -0,0 +1,771 @@
|
|
|
1
|
+
# Copyright (c) 2009-2012 VMware, Inc.
|
|
2
|
+
|
|
3
|
+
module Bosh::Cli
|
|
4
|
+
class ParseTreeNode < Hash
|
|
5
|
+
attr_accessor :command
|
|
6
|
+
end
|
|
7
|
+
|
|
8
|
+
class Runner
|
|
9
|
+
COMMANDS = { }
|
|
10
|
+
ALL_KEYWORDS = []
|
|
11
|
+
|
|
12
|
+
attr_reader :usage
|
|
13
|
+
attr_reader :namespace
|
|
14
|
+
attr_reader :action
|
|
15
|
+
attr_reader :args
|
|
16
|
+
attr_reader :options
|
|
17
|
+
|
|
18
|
+
# The runner is an instance of the command type that the user issued,
|
|
19
|
+
# such as a Deployment instance. This is an accessor for testing.
|
|
20
|
+
# @return [Bosh::Cli::Command::<type>] Instance of the command instance.
|
|
21
|
+
attr_accessor :runner
|
|
22
|
+
|
|
23
|
+
def self.run(args)
|
|
24
|
+
new(args).run
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
def initialize(args)
|
|
28
|
+
trap("SIGINT") {
|
|
29
|
+
handle_ctrl_c
|
|
30
|
+
}
|
|
31
|
+
define_commands
|
|
32
|
+
@args = args
|
|
33
|
+
@options = {
|
|
34
|
+
:director_checks => true,
|
|
35
|
+
:colorize => true,
|
|
36
|
+
}
|
|
37
|
+
end
|
|
38
|
+
|
|
39
|
+
##
|
|
40
|
+
# When user issues ctrl-c it asks if they really want to quit. If so
|
|
41
|
+
# then it will cancel the current running task if it exists.
|
|
42
|
+
def handle_ctrl_c
|
|
43
|
+
if !@runner.task_running?
|
|
44
|
+
exit(1)
|
|
45
|
+
elsif kill_current_task?
|
|
46
|
+
@runner.cancel_current_task
|
|
47
|
+
exit(1)
|
|
48
|
+
end
|
|
49
|
+
end
|
|
50
|
+
|
|
51
|
+
##
|
|
52
|
+
# Asks user if they really want to quit and returns the boolean answer.
|
|
53
|
+
#
|
|
54
|
+
# @return [Boolean] Whether the user wants to quit or not.
|
|
55
|
+
def kill_current_task?
|
|
56
|
+
# Use say and stdin.gets instead of ask because of 2 bugs in Highline.
|
|
57
|
+
# The bug makes it so that if something else has called ask and was in
|
|
58
|
+
# the middle of waiting for a response then ctrl-c is issued and it
|
|
59
|
+
# calls ask again then highline will re-issue the first question again.
|
|
60
|
+
# If the input is a newline character then highline will choke.
|
|
61
|
+
say("\nAre you sure you'd like to cancel running tasks? [yN]")
|
|
62
|
+
$stdin.gets.chomp.downcase == "y"
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
def prepare
|
|
66
|
+
define_commands
|
|
67
|
+
define_plugin_commands
|
|
68
|
+
build_parse_tree
|
|
69
|
+
add_shortcuts
|
|
70
|
+
parse_options!
|
|
71
|
+
|
|
72
|
+
Config.interactive = !@options[:non_interactive]
|
|
73
|
+
Config.colorize = @options.delete(:colorize)
|
|
74
|
+
Config.output ||= STDOUT unless @options[:quiet]
|
|
75
|
+
end
|
|
76
|
+
|
|
77
|
+
def run
|
|
78
|
+
prepare
|
|
79
|
+
dispatch unless @namespace && @action
|
|
80
|
+
|
|
81
|
+
if @namespace && @action
|
|
82
|
+
ns_class_name = @namespace.to_s.gsub(/(?:_|^)(.)/) { $1.upcase }
|
|
83
|
+
klass = eval("Bosh::Cli::Command::#{ns_class_name}")
|
|
84
|
+
@runner = klass.new(@options)
|
|
85
|
+
@runner.usage = @usage
|
|
86
|
+
|
|
87
|
+
action_arity = @runner.method(@action.to_sym).arity
|
|
88
|
+
n_required_args = action_arity >= 0 ? action_arity : -action_arity - 1
|
|
89
|
+
|
|
90
|
+
if n_required_args > @args.size
|
|
91
|
+
err("Not enough arguments, correct usage is: bosh #{@usage}")
|
|
92
|
+
end
|
|
93
|
+
if action_arity >= 0 && n_required_args < @args.size
|
|
94
|
+
err("Too many arguments, correct usage is: bosh #{@usage}")
|
|
95
|
+
end
|
|
96
|
+
|
|
97
|
+
@runner.send(@action.to_sym, *@args)
|
|
98
|
+
elsif @args.empty? || @args == ["help"]
|
|
99
|
+
say(help_message)
|
|
100
|
+
say(plugin_help_message) if @plugins
|
|
101
|
+
elsif @args[0] == "help"
|
|
102
|
+
cmd_args = @args[1..-1]
|
|
103
|
+
suggestions = command_suggestions(cmd_args).map do |cmd|
|
|
104
|
+
command_usage(cmd, 0)
|
|
105
|
+
end
|
|
106
|
+
if suggestions.empty?
|
|
107
|
+
unknown_command(cmd_args.join(" "))
|
|
108
|
+
else
|
|
109
|
+
say(suggestions.uniq.join("\n"))
|
|
110
|
+
end
|
|
111
|
+
else
|
|
112
|
+
unknown_command(@args.join(" "))
|
|
113
|
+
|
|
114
|
+
suggestions = command_suggestions(@args).map do |cmd|
|
|
115
|
+
"bosh #{cmd.usage}"
|
|
116
|
+
end
|
|
117
|
+
|
|
118
|
+
if suggestions.size > 0
|
|
119
|
+
say("Did you mean any of these?")
|
|
120
|
+
say("\n" + suggestions.uniq.join("\n"))
|
|
121
|
+
end
|
|
122
|
+
exit(1)
|
|
123
|
+
end
|
|
124
|
+
|
|
125
|
+
rescue OptionParser::InvalidOption => e
|
|
126
|
+
say(e.message.red + "\n" + basic_usage)
|
|
127
|
+
exit(1)
|
|
128
|
+
rescue Bosh::Cli::GracefulExit => e
|
|
129
|
+
# Redirected bosh commands end up
|
|
130
|
+
# generating this exception (kind of goto)
|
|
131
|
+
rescue Bosh::Cli::CliExit, Bosh::Cli::DirectorError => e
|
|
132
|
+
say(e.message.red)
|
|
133
|
+
exit(e.exit_code)
|
|
134
|
+
rescue Bosh::Cli::CliError => e
|
|
135
|
+
say("Error #{e.error_code}: #{e.message}".red)
|
|
136
|
+
exit(e.exit_code)
|
|
137
|
+
rescue => e
|
|
138
|
+
if @options[:debug]
|
|
139
|
+
raise e
|
|
140
|
+
else
|
|
141
|
+
save_exception(e)
|
|
142
|
+
exit(1)
|
|
143
|
+
end
|
|
144
|
+
end
|
|
145
|
+
|
|
146
|
+
def command(name, &block)
|
|
147
|
+
cmd_def = CommandDefinition.new
|
|
148
|
+
cmd_def.instance_eval(&block)
|
|
149
|
+
COMMANDS[name] = cmd_def
|
|
150
|
+
ALL_KEYWORDS.push(*cmd_def.keywords)
|
|
151
|
+
end
|
|
152
|
+
|
|
153
|
+
def find_command(name)
|
|
154
|
+
COMMANDS[name] || raise("Unknown command definition: #{name}")
|
|
155
|
+
end
|
|
156
|
+
|
|
157
|
+
def dispatch(command = nil)
|
|
158
|
+
command ||= search_parse_tree(@parse_tree)
|
|
159
|
+
command = try_alias if command.nil? && Config.interactive
|
|
160
|
+
return if command.nil?
|
|
161
|
+
@usage = command.usage
|
|
162
|
+
|
|
163
|
+
case command.route
|
|
164
|
+
when Array
|
|
165
|
+
@namespace, @action = command.route
|
|
166
|
+
when Proc
|
|
167
|
+
@namespace, @action = command.route.call(@args)
|
|
168
|
+
else
|
|
169
|
+
raise "Command definition is invalid, " +
|
|
170
|
+
"route should be an Array or Proc"
|
|
171
|
+
end
|
|
172
|
+
end
|
|
173
|
+
|
|
174
|
+
def define_commands
|
|
175
|
+
command :version do
|
|
176
|
+
usage "version"
|
|
177
|
+
desc "Show version"
|
|
178
|
+
route :misc, :version
|
|
179
|
+
end
|
|
180
|
+
|
|
181
|
+
command :alias do
|
|
182
|
+
usage "alias <name> <command>"
|
|
183
|
+
desc "Create an alias <name> for command <command>"
|
|
184
|
+
route :misc, :set_alias
|
|
185
|
+
end
|
|
186
|
+
|
|
187
|
+
command :target do
|
|
188
|
+
usage "target [<name>] [<alias>]"
|
|
189
|
+
desc "Choose director to talk to (optionally creating an alias). " +
|
|
190
|
+
"If no arguments given, show currently targeted director"
|
|
191
|
+
route do |args|
|
|
192
|
+
(args.size > 0) ? [:misc, :set_target] : [:misc, :show_target]
|
|
193
|
+
end
|
|
194
|
+
end
|
|
195
|
+
|
|
196
|
+
command :deployment do
|
|
197
|
+
usage "deployment [<name>]"
|
|
198
|
+
desc "Choose deployment to work with " +
|
|
199
|
+
"(it also updates current target)"
|
|
200
|
+
route do |args|
|
|
201
|
+
if args.size > 0
|
|
202
|
+
[:deployment, :set_current]
|
|
203
|
+
else
|
|
204
|
+
[:deployment, :show_current]
|
|
205
|
+
end
|
|
206
|
+
end
|
|
207
|
+
end
|
|
208
|
+
|
|
209
|
+
command :deploy do
|
|
210
|
+
usage "deploy"
|
|
211
|
+
desc "Deploy according to the currently selected " +
|
|
212
|
+
"deployment manifest"
|
|
213
|
+
option "--recreate", "recreate all VMs in deployment"
|
|
214
|
+
route :deployment, :perform
|
|
215
|
+
end
|
|
216
|
+
|
|
217
|
+
command :ssh do
|
|
218
|
+
usage "ssh <job> [index] [<options>] [command]"
|
|
219
|
+
desc "Given a job, execute the given command or " +
|
|
220
|
+
"start an interactive session"
|
|
221
|
+
option "--public_key <file>"
|
|
222
|
+
option "--gateway_host <host>"
|
|
223
|
+
option "--gateway_user <user>"
|
|
224
|
+
option "--default_password", "Use default ssh password. Not recommended."
|
|
225
|
+
route :ssh, :shell
|
|
226
|
+
end
|
|
227
|
+
|
|
228
|
+
command :ssh_cleanup do
|
|
229
|
+
usage "ssh_cleanup <job> [index]"
|
|
230
|
+
desc "Cleanup SSH artifacts"
|
|
231
|
+
route :ssh, :cleanup
|
|
232
|
+
end
|
|
233
|
+
|
|
234
|
+
command :scp do
|
|
235
|
+
usage "scp <job> [index] (--upload|--download) [options]" +
|
|
236
|
+
"/path/to/source /path/to/destination"
|
|
237
|
+
desc "upload/download the source file to the given job. " +
|
|
238
|
+
"Note: for dowload /path/to/destination is a directory"
|
|
239
|
+
option "--public_key <file>"
|
|
240
|
+
option "--gateway_host <host>"
|
|
241
|
+
option "--gateway_user <user>"
|
|
242
|
+
route :ssh, :scp
|
|
243
|
+
end
|
|
244
|
+
|
|
245
|
+
command :scp do
|
|
246
|
+
usage "scp <job> <--upload | --download> [options] " +
|
|
247
|
+
"/path/to/source /path/to/destination"
|
|
248
|
+
desc "upload/download the source file to the given job. " +
|
|
249
|
+
"Note: for dowload /path/to/destination is a directory"
|
|
250
|
+
option "--index <job_index>"
|
|
251
|
+
option "--public_key <file>"
|
|
252
|
+
option "--gateway_host <host>"
|
|
253
|
+
option "--gateway_user <user>"
|
|
254
|
+
route :ssh, :scp
|
|
255
|
+
end
|
|
256
|
+
|
|
257
|
+
command :status do
|
|
258
|
+
usage "status"
|
|
259
|
+
desc "Show current status (current target, " +
|
|
260
|
+
"user, deployment info etc.)"
|
|
261
|
+
route :misc, :status
|
|
262
|
+
end
|
|
263
|
+
|
|
264
|
+
command :login do
|
|
265
|
+
usage "login [<name>] [<password>]"
|
|
266
|
+
desc "Provide credentials for the subsequent interactions " +
|
|
267
|
+
"with targeted director"
|
|
268
|
+
route :misc, :login
|
|
269
|
+
end
|
|
270
|
+
|
|
271
|
+
command :logout do
|
|
272
|
+
usage "logout"
|
|
273
|
+
desc "Forget saved credentials for targeted director"
|
|
274
|
+
route :misc, :logout
|
|
275
|
+
end
|
|
276
|
+
|
|
277
|
+
command :purge do
|
|
278
|
+
usage "purge"
|
|
279
|
+
desc "Purge local manifest cache"
|
|
280
|
+
route :misc, :purge_cache
|
|
281
|
+
end
|
|
282
|
+
|
|
283
|
+
command :create_release do
|
|
284
|
+
usage "create release"
|
|
285
|
+
desc "Create release (assumes current directory " +
|
|
286
|
+
"to be a release repository)"
|
|
287
|
+
route :release, :create
|
|
288
|
+
option "--force", "bypass git dirty state check"
|
|
289
|
+
option "--final", "create production-ready release " +
|
|
290
|
+
"(stores artefacts in blobstore, bumps final version)"
|
|
291
|
+
option "--with-tarball", "create full release tarball" +
|
|
292
|
+
"(by default only manifest is created)"
|
|
293
|
+
option "--dry-run", "stop before writing release " +
|
|
294
|
+
"manifest (for diagnostics)"
|
|
295
|
+
end
|
|
296
|
+
|
|
297
|
+
command :create_user do
|
|
298
|
+
usage "create user [<name>] [<password>]"
|
|
299
|
+
desc "Create user"
|
|
300
|
+
route :user, :create
|
|
301
|
+
end
|
|
302
|
+
|
|
303
|
+
command :create_package do
|
|
304
|
+
usage "create package <name>|<path>"
|
|
305
|
+
desc "Build a single package"
|
|
306
|
+
route :package, :create
|
|
307
|
+
end
|
|
308
|
+
|
|
309
|
+
command :start_job do
|
|
310
|
+
usage "start <job> [<index>]"
|
|
311
|
+
desc "Start job/instance"
|
|
312
|
+
route :job_management, :start_job
|
|
313
|
+
|
|
314
|
+
power_option "--force"
|
|
315
|
+
end
|
|
316
|
+
|
|
317
|
+
command :stop_job do
|
|
318
|
+
usage "stop <job> [<index>]"
|
|
319
|
+
desc "Stop job/instance"
|
|
320
|
+
route :job_management, :stop_job
|
|
321
|
+
option "--soft", "stop process only"
|
|
322
|
+
option "--hard", "power off VM"
|
|
323
|
+
|
|
324
|
+
power_option "--force"
|
|
325
|
+
end
|
|
326
|
+
|
|
327
|
+
command :restart_job do
|
|
328
|
+
usage "restart <job> [<index>]"
|
|
329
|
+
desc "Restart job/instance (soft stop + start)"
|
|
330
|
+
route :job_management, :restart_job
|
|
331
|
+
|
|
332
|
+
power_option "--force"
|
|
333
|
+
end
|
|
334
|
+
|
|
335
|
+
command :recreate_job do
|
|
336
|
+
usage "recreate <job> [<index>]"
|
|
337
|
+
desc "Recreate job/instance (hard stop + start)"
|
|
338
|
+
route :job_management, :recreate_job
|
|
339
|
+
|
|
340
|
+
power_option "--force"
|
|
341
|
+
end
|
|
342
|
+
|
|
343
|
+
command :fetch_logs do
|
|
344
|
+
usage "logs <job> <index>"
|
|
345
|
+
desc "Fetch job (default) or agent (if option provided) logs"
|
|
346
|
+
route :log_management, :fetch_logs
|
|
347
|
+
option "--agent", "fetch agent logs"
|
|
348
|
+
option "--only <filter1>[...]", "only fetch logs that satisfy " +
|
|
349
|
+
"given filters (defined in job spec)"
|
|
350
|
+
option "--all", "fetch all files in the job or agent log directory"
|
|
351
|
+
end
|
|
352
|
+
|
|
353
|
+
command :set_property do
|
|
354
|
+
usage "set property <name> <value>"
|
|
355
|
+
desc "Set deployment property"
|
|
356
|
+
route :property_management, :set
|
|
357
|
+
end
|
|
358
|
+
|
|
359
|
+
command :get_property do
|
|
360
|
+
usage "get property <name>"
|
|
361
|
+
desc "Get deployment property"
|
|
362
|
+
route :property_management, :get
|
|
363
|
+
end
|
|
364
|
+
|
|
365
|
+
command :unset_property do
|
|
366
|
+
usage "unset property <name>"
|
|
367
|
+
desc "Unset deployment property"
|
|
368
|
+
route :property_management, :unset
|
|
369
|
+
end
|
|
370
|
+
|
|
371
|
+
command :list_properties do
|
|
372
|
+
usage "properties"
|
|
373
|
+
desc "List current deployment properties"
|
|
374
|
+
route :property_management, :list
|
|
375
|
+
option "--terse", "easy to parse output"
|
|
376
|
+
end
|
|
377
|
+
|
|
378
|
+
command :init_release do
|
|
379
|
+
usage "init release [<path>]"
|
|
380
|
+
desc "Initialize release directory"
|
|
381
|
+
route :release, :init
|
|
382
|
+
option "--git", "initialize git repository"
|
|
383
|
+
end
|
|
384
|
+
|
|
385
|
+
command :generate_package do
|
|
386
|
+
usage "generate package <name>"
|
|
387
|
+
desc "Generate package template"
|
|
388
|
+
route :package, :generate
|
|
389
|
+
end
|
|
390
|
+
|
|
391
|
+
command :generate_job do
|
|
392
|
+
usage "generate job <name>"
|
|
393
|
+
desc "Generate job template"
|
|
394
|
+
route :job, :generate
|
|
395
|
+
end
|
|
396
|
+
|
|
397
|
+
command :upload_stemcell do
|
|
398
|
+
usage "upload stemcell <path>"
|
|
399
|
+
desc "Upload the stemcell"
|
|
400
|
+
route :stemcell, :upload
|
|
401
|
+
end
|
|
402
|
+
|
|
403
|
+
command :upload_release do
|
|
404
|
+
usage "upload release [<path>]"
|
|
405
|
+
desc "Upload release (<path> can point to tarball or manifest, " +
|
|
406
|
+
"defaults to the most recently created release)"
|
|
407
|
+
route :release, :upload
|
|
408
|
+
end
|
|
409
|
+
|
|
410
|
+
command :verify_stemcell do
|
|
411
|
+
usage "verify stemcell <path>"
|
|
412
|
+
desc "Verify stemcell"
|
|
413
|
+
route :stemcell, :verify
|
|
414
|
+
end
|
|
415
|
+
|
|
416
|
+
command :verify_release do
|
|
417
|
+
usage "verify release <path>"
|
|
418
|
+
desc "Verify release"
|
|
419
|
+
route :release, :verify
|
|
420
|
+
end
|
|
421
|
+
|
|
422
|
+
command :delete_deployment do
|
|
423
|
+
usage "delete deployment <name>"
|
|
424
|
+
desc "Delete deployment"
|
|
425
|
+
route :deployment, :delete
|
|
426
|
+
option "--force", "ignore all errors while deleting parts " +
|
|
427
|
+
"of the deployment"
|
|
428
|
+
end
|
|
429
|
+
|
|
430
|
+
command :delete_stemcell do
|
|
431
|
+
usage "delete stemcell <name> <version>"
|
|
432
|
+
desc "Delete the stemcell"
|
|
433
|
+
route :stemcell, :delete
|
|
434
|
+
end
|
|
435
|
+
|
|
436
|
+
command :delete_release do
|
|
437
|
+
usage "delete release <name> [<version>]"
|
|
438
|
+
desc "Delete release (or a particular release version)"
|
|
439
|
+
route :release, :delete
|
|
440
|
+
option "--force", "ignore errors during deletion"
|
|
441
|
+
end
|
|
442
|
+
|
|
443
|
+
command :reset_release do
|
|
444
|
+
usage "reset release"
|
|
445
|
+
desc "Reset release development environment " +
|
|
446
|
+
"(deletes all dev artifacts)"
|
|
447
|
+
route :release, :reset
|
|
448
|
+
end
|
|
449
|
+
|
|
450
|
+
command :cancel_task do
|
|
451
|
+
usage "cancel task <id>"
|
|
452
|
+
desc "Cancel task once it reaches the next cancel checkpoint"
|
|
453
|
+
route :task, :cancel
|
|
454
|
+
end
|
|
455
|
+
|
|
456
|
+
command :track_task do
|
|
457
|
+
usage "task [<task_id>|last]"
|
|
458
|
+
desc "Show task status and start tracking its output"
|
|
459
|
+
route :task, :track
|
|
460
|
+
option "--no-cache", "don't cache output locally"
|
|
461
|
+
option "--event|--soap|--debug", "different log types to track"
|
|
462
|
+
option "--raw", "don't beautify log"
|
|
463
|
+
end
|
|
464
|
+
|
|
465
|
+
command :list_stemcells do
|
|
466
|
+
usage "stemcells"
|
|
467
|
+
desc "Show the list of available stemcells"
|
|
468
|
+
route :stemcell, :list
|
|
469
|
+
end
|
|
470
|
+
|
|
471
|
+
command :list_public_stemcells do
|
|
472
|
+
usage "public stemcells"
|
|
473
|
+
desc "Show the list of publicly available stemcells for download."
|
|
474
|
+
route :stemcell, :list_public
|
|
475
|
+
end
|
|
476
|
+
|
|
477
|
+
command :download_public_stemcell do
|
|
478
|
+
usage "download public stemcell <stemcell_name>"
|
|
479
|
+
desc "Downloads a stemcell from the public blobstore."
|
|
480
|
+
route :stemcell, :download_public
|
|
481
|
+
end
|
|
482
|
+
|
|
483
|
+
command :list_releases do
|
|
484
|
+
usage "releases"
|
|
485
|
+
desc "Show the list of available releases"
|
|
486
|
+
route :release, :list
|
|
487
|
+
end
|
|
488
|
+
|
|
489
|
+
command :list_deployments do
|
|
490
|
+
usage "deployments"
|
|
491
|
+
desc "Show the list of available deployments"
|
|
492
|
+
route :deployment, :list
|
|
493
|
+
end
|
|
494
|
+
|
|
495
|
+
command :diff do
|
|
496
|
+
usage "diff [<template_file>]"
|
|
497
|
+
desc "Diffs your current BOSH deployment configuration against " +
|
|
498
|
+
"the specified BOSH deployment configuration template so that " +
|
|
499
|
+
"you can keep your deployment configuration file up to date. " +
|
|
500
|
+
"A dev template can be found in deployments repos."
|
|
501
|
+
route :biff, :biff
|
|
502
|
+
end
|
|
503
|
+
|
|
504
|
+
command :list_running_tasks do
|
|
505
|
+
usage "tasks"
|
|
506
|
+
desc "Show the list of running tasks"
|
|
507
|
+
route :task, :list_running
|
|
508
|
+
end
|
|
509
|
+
|
|
510
|
+
command :list_recent_tasks do
|
|
511
|
+
usage "tasks recent [<number>]"
|
|
512
|
+
desc "Show <number> recent tasks"
|
|
513
|
+
route :task, :list_recent
|
|
514
|
+
end
|
|
515
|
+
|
|
516
|
+
command :list_vms do
|
|
517
|
+
usage "vms [<deployment>]"
|
|
518
|
+
desc "List all VMs that supposed to be in a deployment"
|
|
519
|
+
route :vms, :list
|
|
520
|
+
end
|
|
521
|
+
|
|
522
|
+
command :cleanup do
|
|
523
|
+
usage "cleanup"
|
|
524
|
+
desc "Remove all but several recent stemcells and releases " +
|
|
525
|
+
"from current director " +
|
|
526
|
+
"(stemcells and releases currently in use are NOT deleted)"
|
|
527
|
+
route :maintenance, :cleanup
|
|
528
|
+
end
|
|
529
|
+
|
|
530
|
+
command :cloudcheck do
|
|
531
|
+
usage "cloudcheck"
|
|
532
|
+
desc "Cloud consistency check and interactive repair"
|
|
533
|
+
option "--auto", "resolve problems automatically " +
|
|
534
|
+
"(not recommended for production)"
|
|
535
|
+
option "--report", "generate report only, " +
|
|
536
|
+
"don't attempt to resolve problems"
|
|
537
|
+
route :cloud_check, :perform
|
|
538
|
+
end
|
|
539
|
+
|
|
540
|
+
command :upload_blob do
|
|
541
|
+
usage "upload blob <blobs>"
|
|
542
|
+
desc "Upload given blob to the blobstore"
|
|
543
|
+
option "--force", "bypass duplicate checking"
|
|
544
|
+
route :blob, :upload_blob
|
|
545
|
+
end
|
|
546
|
+
|
|
547
|
+
command :sync_blobs do
|
|
548
|
+
usage "sync blobs"
|
|
549
|
+
desc "Sync blob with the blobstore"
|
|
550
|
+
option "--force", "overwrite all local copies with the remote blob"
|
|
551
|
+
route :blob, :sync_blobs
|
|
552
|
+
end
|
|
553
|
+
|
|
554
|
+
command :blobs do
|
|
555
|
+
usage "blobs"
|
|
556
|
+
desc "Print blob status"
|
|
557
|
+
route :blob, :blobs_info
|
|
558
|
+
end
|
|
559
|
+
|
|
560
|
+
def define_plugin_commands
|
|
561
|
+
Gem.find_files("bosh/cli/commands/*.rb", true).each do |file|
|
|
562
|
+
class_name = File.basename(file, ".rb").capitalize
|
|
563
|
+
|
|
564
|
+
next if Bosh::Cli::Command.const_defined?(class_name)
|
|
565
|
+
|
|
566
|
+
load file
|
|
567
|
+
|
|
568
|
+
plugin = Bosh::Cli::Command.const_get(class_name)
|
|
569
|
+
|
|
570
|
+
plugin.commands.each do |name, block|
|
|
571
|
+
command(name, &block)
|
|
572
|
+
end
|
|
573
|
+
|
|
574
|
+
@plugins ||= {}
|
|
575
|
+
@plugins[class_name] = plugin
|
|
576
|
+
end
|
|
577
|
+
end
|
|
578
|
+
|
|
579
|
+
end
|
|
580
|
+
|
|
581
|
+
def parse_options!
|
|
582
|
+
opts_parser = OptionParser.new do |opts|
|
|
583
|
+
opts.on("-c", "--config FILE") { |file| @options[:config] = file }
|
|
584
|
+
opts.on("--cache-dir DIR") { |dir| @options[:cache_dir] = dir }
|
|
585
|
+
opts.on("--verbose") { @options[:verbose] = true }
|
|
586
|
+
opts.on("--no-color") { @options[:colorize] = false }
|
|
587
|
+
opts.on("-q", "--quiet") { @options[:quiet] = true }
|
|
588
|
+
opts.on("-s", "--skip-director-checks") do
|
|
589
|
+
@options[:director_checks] = false
|
|
590
|
+
end
|
|
591
|
+
opts.on("-n", "--non-interactive") do
|
|
592
|
+
@options[:non_interactive] = true
|
|
593
|
+
@options[:colorize] = false
|
|
594
|
+
end
|
|
595
|
+
opts.on("-d", "--debug") { @options[:debug] = true }
|
|
596
|
+
opts.on("-v", "--version") { dispatch(find_command(:version)); }
|
|
597
|
+
end
|
|
598
|
+
|
|
599
|
+
@args = opts_parser.order!(@args)
|
|
600
|
+
end
|
|
601
|
+
|
|
602
|
+
def build_parse_tree
|
|
603
|
+
@parse_tree = ParseTreeNode.new
|
|
604
|
+
|
|
605
|
+
COMMANDS.each_pair do |id, command|
|
|
606
|
+
p = @parse_tree
|
|
607
|
+
n_kw = command.keywords.size
|
|
608
|
+
|
|
609
|
+
keywords = command.keywords.each_with_index do |kw, i|
|
|
610
|
+
p[kw] ||= ParseTreeNode.new
|
|
611
|
+
p = p[kw]
|
|
612
|
+
p.command = command if i == n_kw - 1
|
|
613
|
+
end
|
|
614
|
+
end
|
|
615
|
+
end
|
|
616
|
+
|
|
617
|
+
def add_shortcuts
|
|
618
|
+
{ "st" => "status",
|
|
619
|
+
"props" => "properties",
|
|
620
|
+
"cck" => "cloudcheck" }.each do |short, long|
|
|
621
|
+
@parse_tree[short] = @parse_tree[long]
|
|
622
|
+
end
|
|
623
|
+
end
|
|
624
|
+
|
|
625
|
+
def basic_usage
|
|
626
|
+
<<-OUT.gsub(/^\s{10}/, "")
|
|
627
|
+
usage: bosh [--verbose] [--config|-c <FILE>] [--cache-dir <DIR]
|
|
628
|
+
[--force] [--no-color] [--skip-director-checks] [--quiet]
|
|
629
|
+
[--non-interactive]
|
|
630
|
+
command [<args>]
|
|
631
|
+
OUT
|
|
632
|
+
end
|
|
633
|
+
|
|
634
|
+
def command_usage(cmd, margin = nil)
|
|
635
|
+
command = cmd.is_a?(Symbol) ? find_command(cmd) : cmd
|
|
636
|
+
usage = command.usage
|
|
637
|
+
|
|
638
|
+
margin ||= 2
|
|
639
|
+
usage_width = 25
|
|
640
|
+
desc_width = 43
|
|
641
|
+
option_width = 10
|
|
642
|
+
|
|
643
|
+
output = " " * margin
|
|
644
|
+
output << usage.ljust(usage_width) + " "
|
|
645
|
+
char_count = usage.size > usage_width ? 100 : 0
|
|
646
|
+
|
|
647
|
+
command.description.to_s.split(/\s+/).each do |word|
|
|
648
|
+
if char_count + word.size + 1 > desc_width # +1 accounts for space
|
|
649
|
+
char_count = 0
|
|
650
|
+
output << "\n" + " " * (margin + usage_width + 1)
|
|
651
|
+
end
|
|
652
|
+
char_count += word.size
|
|
653
|
+
output << word << " "
|
|
654
|
+
end
|
|
655
|
+
|
|
656
|
+
command.options.each do |name, value|
|
|
657
|
+
output << "\n" + " " * (margin + usage_width + 1)
|
|
658
|
+
output << name.ljust(option_width) + " "
|
|
659
|
+
# Long option name eats the whole line,
|
|
660
|
+
# short one gives space to description
|
|
661
|
+
char_count = name.size > option_width ? 100 : 0
|
|
662
|
+
|
|
663
|
+
value.to_s.split(/\s+/).each do |word|
|
|
664
|
+
if char_count + word.size + 1 > desc_width - option_width
|
|
665
|
+
char_count = 0
|
|
666
|
+
output << "\n" + " " * (margin + usage_width + option_width + 2)
|
|
667
|
+
end
|
|
668
|
+
char_count += word.size
|
|
669
|
+
output << word << " "
|
|
670
|
+
end
|
|
671
|
+
end
|
|
672
|
+
|
|
673
|
+
output
|
|
674
|
+
end
|
|
675
|
+
|
|
676
|
+
def help_message
|
|
677
|
+
template = File.join(File.dirname(__FILE__),
|
|
678
|
+
"templates", "help_message.erb")
|
|
679
|
+
ERB.new(File.read(template), 4).result(binding.taint)
|
|
680
|
+
end
|
|
681
|
+
|
|
682
|
+
def plugin_help_message
|
|
683
|
+
help = ['']
|
|
684
|
+
|
|
685
|
+
@plugins.each do |class_name, plugin|
|
|
686
|
+
help << class_name
|
|
687
|
+
plugin.commands.keys.each do |name|
|
|
688
|
+
help << command_usage(name)
|
|
689
|
+
end
|
|
690
|
+
end
|
|
691
|
+
|
|
692
|
+
help.join("\n")
|
|
693
|
+
end
|
|
694
|
+
|
|
695
|
+
def search_parse_tree(node)
|
|
696
|
+
return nil if node.nil?
|
|
697
|
+
arg = @args.shift
|
|
698
|
+
|
|
699
|
+
longer_command = search_parse_tree(node[arg])
|
|
700
|
+
|
|
701
|
+
if longer_command.nil?
|
|
702
|
+
@args.unshift(arg) if arg # backtrack if needed
|
|
703
|
+
node.command
|
|
704
|
+
else
|
|
705
|
+
longer_command
|
|
706
|
+
end
|
|
707
|
+
end
|
|
708
|
+
|
|
709
|
+
def try_alias
|
|
710
|
+
# Tries to find best match among aliases (possibly multiple words),
|
|
711
|
+
# then unwinds it onto the remaining args and searches parse tree again.
|
|
712
|
+
# Not the most effective algorithm but does the job.
|
|
713
|
+
config = Bosh::Cli::Config.new(
|
|
714
|
+
@options[:config] || Bosh::Cli::DEFAULT_CONFIG_PATH)
|
|
715
|
+
candidate = []
|
|
716
|
+
best_match = nil
|
|
717
|
+
save_args = @args.dup
|
|
718
|
+
|
|
719
|
+
while arg = @args.shift
|
|
720
|
+
candidate << arg
|
|
721
|
+
resolved = config.resolve_alias(:cli, candidate.join(" "))
|
|
722
|
+
if best_match && resolved.nil?
|
|
723
|
+
@args.unshift(arg)
|
|
724
|
+
break
|
|
725
|
+
end
|
|
726
|
+
best_match = resolved
|
|
727
|
+
end
|
|
728
|
+
|
|
729
|
+
if best_match.nil?
|
|
730
|
+
@args = save_args
|
|
731
|
+
return
|
|
732
|
+
end
|
|
733
|
+
|
|
734
|
+
best_match.split(/\s+/).reverse.each do |arg|
|
|
735
|
+
@args.unshift(arg)
|
|
736
|
+
end
|
|
737
|
+
|
|
738
|
+
search_parse_tree(@parse_tree)
|
|
739
|
+
end
|
|
740
|
+
|
|
741
|
+
def command_suggestions(args)
|
|
742
|
+
non_keywords = args - ALL_KEYWORDS
|
|
743
|
+
|
|
744
|
+
COMMANDS.values.select do |cmd|
|
|
745
|
+
(args & cmd.keywords).size > 0 && args - cmd.keywords == non_keywords
|
|
746
|
+
end
|
|
747
|
+
end
|
|
748
|
+
|
|
749
|
+
def unknown_command(cmd)
|
|
750
|
+
say("Command `#{cmd}' not found.")
|
|
751
|
+
say("Please use `bosh help' to get the list of bosh commands.")
|
|
752
|
+
end
|
|
753
|
+
|
|
754
|
+
def save_exception(e)
|
|
755
|
+
say("BOSH CLI Error: #{e.message}".red)
|
|
756
|
+
begin
|
|
757
|
+
errfile = File.expand_path("~/.bosh_error")
|
|
758
|
+
File.open(errfile, "w") do |f|
|
|
759
|
+
f.write(e.message)
|
|
760
|
+
f.write("\n")
|
|
761
|
+
f.write(e.backtrace.join("\n"))
|
|
762
|
+
end
|
|
763
|
+
say("Error information saved in #{errfile}")
|
|
764
|
+
rescue => e
|
|
765
|
+
say("Error information couldn't be saved: #{e.message}")
|
|
766
|
+
end
|
|
767
|
+
end
|
|
768
|
+
|
|
769
|
+
end
|
|
770
|
+
|
|
771
|
+
end
|