bosh_cli 0.16
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/README +4 -0
- data/Rakefile +55 -0
- data/bin/bosh +17 -0
- data/lib/cli.rb +76 -0
- data/lib/cli/cache.rb +44 -0
- data/lib/cli/changeset_helper.rb +142 -0
- data/lib/cli/command_definition.rb +52 -0
- data/lib/cli/commands/base.rb +245 -0
- data/lib/cli/commands/biff.rb +300 -0
- data/lib/cli/commands/blob.rb +125 -0
- data/lib/cli/commands/cloudcheck.rb +169 -0
- data/lib/cli/commands/deployment.rb +147 -0
- data/lib/cli/commands/job.rb +42 -0
- data/lib/cli/commands/job_management.rb +117 -0
- data/lib/cli/commands/log_management.rb +81 -0
- data/lib/cli/commands/maintenance.rb +131 -0
- data/lib/cli/commands/misc.rb +240 -0
- data/lib/cli/commands/package.rb +112 -0
- data/lib/cli/commands/property_management.rb +125 -0
- data/lib/cli/commands/release.rb +469 -0
- data/lib/cli/commands/ssh.rb +271 -0
- data/lib/cli/commands/stemcell.rb +184 -0
- data/lib/cli/commands/task.rb +213 -0
- data/lib/cli/commands/user.rb +28 -0
- data/lib/cli/commands/vms.rb +53 -0
- data/lib/cli/config.rb +154 -0
- data/lib/cli/core_ext.rb +145 -0
- data/lib/cli/dependency_helper.rb +62 -0
- data/lib/cli/deployment_helper.rb +263 -0
- data/lib/cli/deployment_manifest_compiler.rb +28 -0
- data/lib/cli/director.rb +633 -0
- data/lib/cli/director_task.rb +64 -0
- data/lib/cli/errors.rb +48 -0
- data/lib/cli/event_log_renderer.rb +351 -0
- data/lib/cli/job_builder.rb +226 -0
- data/lib/cli/package_builder.rb +254 -0
- data/lib/cli/packaging_helper.rb +248 -0
- data/lib/cli/release.rb +176 -0
- data/lib/cli/release_builder.rb +215 -0
- data/lib/cli/release_compiler.rb +178 -0
- data/lib/cli/release_tarball.rb +272 -0
- data/lib/cli/runner.rb +771 -0
- data/lib/cli/stemcell.rb +83 -0
- data/lib/cli/task_log_renderer.rb +40 -0
- data/lib/cli/templates/help_message.erb +75 -0
- data/lib/cli/validation.rb +42 -0
- data/lib/cli/version.rb +7 -0
- data/lib/cli/version_calc.rb +48 -0
- data/lib/cli/versions_index.rb +126 -0
- data/lib/cli/yaml_helper.rb +62 -0
- data/spec/assets/biff/bad_gateway_config.yml +28 -0
- data/spec/assets/biff/good_simple_config.yml +63 -0
- data/spec/assets/biff/good_simple_golden_config.yml +63 -0
- data/spec/assets/biff/good_simple_template.erb +69 -0
- data/spec/assets/biff/multiple_subnets_config.yml +40 -0
- data/spec/assets/biff/network_only_template.erb +34 -0
- data/spec/assets/biff/no_cc_config.yml +27 -0
- data/spec/assets/biff/no_range_config.yml +27 -0
- data/spec/assets/biff/no_subnet_config.yml +16 -0
- data/spec/assets/biff/ok_network_config.yml +30 -0
- data/spec/assets/biff/properties_template.erb +6 -0
- data/spec/assets/deployment.MF +0 -0
- data/spec/assets/plugins/bosh/cli/commands/echo.rb +43 -0
- data/spec/assets/plugins/bosh/cli/commands/ruby.rb +24 -0
- data/spec/assets/release/jobs/cacher.tgz +0 -0
- data/spec/assets/release/jobs/cacher/config/file1.conf +0 -0
- data/spec/assets/release/jobs/cacher/config/file2.conf +0 -0
- data/spec/assets/release/jobs/cacher/job.MF +6 -0
- data/spec/assets/release/jobs/cacher/monit +1 -0
- data/spec/assets/release/jobs/cleaner.tgz +0 -0
- data/spec/assets/release/jobs/cleaner/job.MF +4 -0
- data/spec/assets/release/jobs/cleaner/monit +1 -0
- data/spec/assets/release/jobs/sweeper.tgz +0 -0
- data/spec/assets/release/jobs/sweeper/config/test.conf +1 -0
- data/spec/assets/release/jobs/sweeper/job.MF +5 -0
- data/spec/assets/release/jobs/sweeper/monit +1 -0
- data/spec/assets/release/packages/mutator.tar.gz +0 -0
- data/spec/assets/release/packages/stuff.tgz +0 -0
- data/spec/assets/release/release.MF +17 -0
- data/spec/assets/release_invalid_checksum.tgz +0 -0
- data/spec/assets/release_invalid_jobs.tgz +0 -0
- data/spec/assets/release_no_name.tgz +0 -0
- data/spec/assets/release_no_version.tgz +0 -0
- data/spec/assets/stemcell/image +1 -0
- data/spec/assets/stemcell/stemcell.MF +6 -0
- data/spec/assets/stemcell_invalid_mf.tgz +0 -0
- data/spec/assets/stemcell_no_image.tgz +0 -0
- data/spec/assets/valid_release.tgz +0 -0
- data/spec/assets/valid_stemcell.tgz +0 -0
- data/spec/spec_helper.rb +25 -0
- data/spec/unit/base_command_spec.rb +66 -0
- data/spec/unit/biff_spec.rb +135 -0
- data/spec/unit/cache_spec.rb +36 -0
- data/spec/unit/cli_commands_spec.rb +481 -0
- data/spec/unit/config_spec.rb +139 -0
- data/spec/unit/core_ext_spec.rb +77 -0
- data/spec/unit/dependency_helper_spec.rb +52 -0
- data/spec/unit/deployment_manifest_compiler_spec.rb +63 -0
- data/spec/unit/director_spec.rb +511 -0
- data/spec/unit/director_task_spec.rb +48 -0
- data/spec/unit/event_log_renderer_spec.rb +171 -0
- data/spec/unit/hash_changeset_spec.rb +73 -0
- data/spec/unit/job_builder_spec.rb +454 -0
- data/spec/unit/package_builder_spec.rb +567 -0
- data/spec/unit/release_builder_spec.rb +65 -0
- data/spec/unit/release_spec.rb +66 -0
- data/spec/unit/release_tarball_spec.rb +33 -0
- data/spec/unit/runner_spec.rb +140 -0
- data/spec/unit/ssh_spec.rb +78 -0
- data/spec/unit/stemcell_spec.rb +17 -0
- data/spec/unit/version_calc_spec.rb +27 -0
- data/spec/unit/versions_index_spec.rb +132 -0
- metadata +338 -0
|
@@ -0,0 +1,300 @@
|
|
|
1
|
+
module Bosh::Cli::Command
|
|
2
|
+
class Biff < Base
|
|
3
|
+
|
|
4
|
+
# Takes your current deployment configuration and uses some of its
|
|
5
|
+
# configuration to populate the template file. The Network information is
|
|
6
|
+
# used and then IPs for each job are automatically set. Once the template
|
|
7
|
+
# file has been used to generate a new config, the old config and new config
|
|
8
|
+
# are diff'd and the user can choose to keep the new config.
|
|
9
|
+
# @param [String] template The string path to the template that should be
|
|
10
|
+
# used.
|
|
11
|
+
def biff(template)
|
|
12
|
+
begin
|
|
13
|
+
setup(template)
|
|
14
|
+
|
|
15
|
+
template_to_fill = ERB.new(File.read(@template_file), 0, "%<>-")
|
|
16
|
+
@template_output = template_to_fill.result(binding)
|
|
17
|
+
|
|
18
|
+
if @errors == 0
|
|
19
|
+
print_string_diff(File.read(@deployment_file), @template_output)
|
|
20
|
+
keep_new_file unless @no_differences
|
|
21
|
+
else
|
|
22
|
+
say("There were " + "#{@errors} errors.".red)
|
|
23
|
+
end
|
|
24
|
+
ensure
|
|
25
|
+
delete_temp_diff_files
|
|
26
|
+
end
|
|
27
|
+
end
|
|
28
|
+
|
|
29
|
+
private
|
|
30
|
+
|
|
31
|
+
# Unified is so that we get the whole file diff not just sections.
|
|
32
|
+
DIFF_COMMAND = "diff --unified=1000"
|
|
33
|
+
|
|
34
|
+
KEEP_NEW_VERSION_TEXT = "Would you like to keep the new version? [yn]"
|
|
35
|
+
|
|
36
|
+
DIFF_FAILED_KEEP_NEW_TEXT =
|
|
37
|
+
"Would you like the new version copied to '%s'? [yn]"
|
|
38
|
+
|
|
39
|
+
# Accessor for testing purposes.
|
|
40
|
+
attr_accessor :ip_helper, :template_output
|
|
41
|
+
|
|
42
|
+
# Deletes the temporary files that were used.
|
|
43
|
+
def delete_temp_diff_files
|
|
44
|
+
# File.exists works for both files and directories. Must use for 1.8
|
|
45
|
+
# compat.
|
|
46
|
+
if @dir_name && File.exists?(@dir_name)
|
|
47
|
+
FileUtils.remove_entry_secure(@dir_name)
|
|
48
|
+
end
|
|
49
|
+
end
|
|
50
|
+
|
|
51
|
+
# Takes two strings and prints the diff of them.
|
|
52
|
+
# @param [String] str1 The first string to diff.
|
|
53
|
+
# @param [String] str2 The string to diff against.
|
|
54
|
+
def print_string_diff(str1, str2)
|
|
55
|
+
File.open(@temp_file_path_1, "w") { |f|
|
|
56
|
+
f.write(str1)
|
|
57
|
+
}
|
|
58
|
+
File.open(@temp_file_path_2, "w") { |f|
|
|
59
|
+
f.write(str2)
|
|
60
|
+
}
|
|
61
|
+
|
|
62
|
+
@diff_works = true
|
|
63
|
+
cmd = "#{DIFF_COMMAND} #{@temp_file_path_1} #{@temp_file_path_2} 2>&1"
|
|
64
|
+
output = `#{cmd}`
|
|
65
|
+
if $?.exitstatus == 2
|
|
66
|
+
say("'#{cmd}' did not work.")
|
|
67
|
+
say("Failed, saying: '#{output}'.")
|
|
68
|
+
@diff_works = false
|
|
69
|
+
return
|
|
70
|
+
end
|
|
71
|
+
|
|
72
|
+
if output.empty?
|
|
73
|
+
output = "No differences."
|
|
74
|
+
@no_differences = true
|
|
75
|
+
end
|
|
76
|
+
say(output)
|
|
77
|
+
end
|
|
78
|
+
|
|
79
|
+
# Alias for find. It is used to find within a given object, not the default
|
|
80
|
+
# deployment_obj
|
|
81
|
+
# @param [String] path The path to the object that the template wants to
|
|
82
|
+
# retrieve from the user's config and substitute in.
|
|
83
|
+
# @param [Object] obj Either a hash or array which is the user's deployment
|
|
84
|
+
# config to be looked through.
|
|
85
|
+
# @return [Object] The found object.
|
|
86
|
+
def find_in(path, obj)
|
|
87
|
+
find(path, obj)
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
# Finds a path in the user's deployment configuration object. The reason we
|
|
91
|
+
# use this is to make the paths used in the template file better on the
|
|
92
|
+
# eyes. Instead of having find('jobs[4].static_ips') we have
|
|
93
|
+
# find('jobs.debian_nfs_server.static_ips'). Find will look through the jobs
|
|
94
|
+
# array and find the object that has name=debian_nfs_server. If jobs were a
|
|
95
|
+
# hash then find would get the hash key debian_nfs_server.
|
|
96
|
+
# @param [String] path The path to the object that the template wants to
|
|
97
|
+
# retrieve from the user's config and substitute in.
|
|
98
|
+
# @param [Object] obj Either a hash or array which is the user's deployment
|
|
99
|
+
# config to be looked through.
|
|
100
|
+
# @return [Object] The found object.
|
|
101
|
+
def find(path, obj = @deployment_obj)
|
|
102
|
+
starting_obj = obj
|
|
103
|
+
path_split = path.split(".")
|
|
104
|
+
found_so_far = []
|
|
105
|
+
path_split.each do |path_part|
|
|
106
|
+
found = false
|
|
107
|
+
if obj.is_a?(Array)
|
|
108
|
+
obj.each do |data_val|
|
|
109
|
+
if data_val["name"] == path_part
|
|
110
|
+
obj = data_val
|
|
111
|
+
found = true
|
|
112
|
+
end
|
|
113
|
+
end
|
|
114
|
+
elsif obj[path_part]
|
|
115
|
+
obj = obj[path_part]
|
|
116
|
+
found = true
|
|
117
|
+
end
|
|
118
|
+
|
|
119
|
+
unless found
|
|
120
|
+
@errors += 1
|
|
121
|
+
say("Could not find #{path.red}.")
|
|
122
|
+
say("'#{@template_file}' has it but '#{@deployment_file}' does not.")
|
|
123
|
+
#say("\nIt should exist in \n#{obj.to_yaml}\n")
|
|
124
|
+
if starting_obj == @deployment_obj
|
|
125
|
+
# To cut down on complexity, we don't print out the section of code
|
|
126
|
+
# from the template YAML that the user needs if the find method was
|
|
127
|
+
# called with any other starting object other than deployment_obj.
|
|
128
|
+
# The reason for this is because we'd have to recursively find the
|
|
129
|
+
# path to the starting object so that it can be found in the
|
|
130
|
+
# template.
|
|
131
|
+
print_the_template_path(path.split('.'), found_so_far)
|
|
132
|
+
end
|
|
133
|
+
break
|
|
134
|
+
end
|
|
135
|
+
found_so_far << path_part
|
|
136
|
+
end
|
|
137
|
+
obj
|
|
138
|
+
end
|
|
139
|
+
|
|
140
|
+
# Used by print_the_template_path so that it can prettily print just the
|
|
141
|
+
# section of the template that the user is missing. E.x. if the user is
|
|
142
|
+
# missing the job 'ccdb' then we want to not just print out 'ccdb' and
|
|
143
|
+
# everything in it -- we also want to print out it's heirarchy, aka the fact
|
|
144
|
+
# that it is under jobs. So, we delete everything else in jobs.
|
|
145
|
+
# @param [Object] obj Either a Hash or Array that is supposed to have
|
|
146
|
+
# everything deleted out of it except for a key or object with
|
|
147
|
+
# name = key depending on if it is a hash or array respectively.
|
|
148
|
+
# @param [String] name They key to keep.
|
|
149
|
+
# @return [Object] The original containing object with only the named object
|
|
150
|
+
# in it.
|
|
151
|
+
def delete_all_except(obj, name)
|
|
152
|
+
each_method = obj.is_a?(Hash) ? "each_key" : "each_index"
|
|
153
|
+
obj.send(each_method) do |key|
|
|
154
|
+
if key == name ||
|
|
155
|
+
(obj[key].is_a?(Hash) && obj[key]["name"] == name)
|
|
156
|
+
return_obj = nil
|
|
157
|
+
if (obj.is_a?(Hash))
|
|
158
|
+
return_obj = {}
|
|
159
|
+
return_obj[name] = obj[key]
|
|
160
|
+
else
|
|
161
|
+
return_obj = [obj[key]]
|
|
162
|
+
end
|
|
163
|
+
return return_obj
|
|
164
|
+
end
|
|
165
|
+
end
|
|
166
|
+
end
|
|
167
|
+
|
|
168
|
+
# Tries to print out some helpful output from the template to let the user
|
|
169
|
+
# know what they're missing. For instance, if the user doesn't have a job
|
|
170
|
+
# and the template needs to pull some data from the job then it will print
|
|
171
|
+
# out what the job looks like in the template. This method can't be used if
|
|
172
|
+
# the path is a relative path. A relative path is when find_in was used.
|
|
173
|
+
# @param [Array] looking_for_path The path that is being looked for in the
|
|
174
|
+
# user's deployment config but does not exist.
|
|
175
|
+
# @param [Array] users_farthest_found_path The farthest that 'find' got in
|
|
176
|
+
# finding the looking_for_path.
|
|
177
|
+
def print_the_template_path(looking_for_path, users_farthest_found_path)
|
|
178
|
+
delete_all_except_name =
|
|
179
|
+
(looking_for_path - users_farthest_found_path).first
|
|
180
|
+
path = users_farthest_found_path.join('.')
|
|
181
|
+
what_we_need = find(path, @template_obj)
|
|
182
|
+
what_we_need = delete_all_except(what_we_need, delete_all_except_name)
|
|
183
|
+
say("Add this to '#{path}':".red + "\n#{what_we_need.to_yaml}\n\n")
|
|
184
|
+
end
|
|
185
|
+
|
|
186
|
+
# Loads the template file as YAML. First, it replaces all of the ruby
|
|
187
|
+
# syntax. This file is used so that when there is an error, biff can report
|
|
188
|
+
# what the user's deployment needs according to this template.
|
|
189
|
+
# @return [String] The loaded template file as a ruby object.
|
|
190
|
+
def load_template_as_yaml
|
|
191
|
+
temp_data = File.read(@template_file)
|
|
192
|
+
temp_data.gsub!(/<%=.*%>/, "INSERT_DATA_HERE")
|
|
193
|
+
temp_data.gsub!(/[ ]*<%.*%>[ ]*\n/, "")
|
|
194
|
+
YAML::load(temp_data)
|
|
195
|
+
end
|
|
196
|
+
|
|
197
|
+
# Gets the network's network/mask for configuring things such as the
|
|
198
|
+
# nfs_server properties. E.x. 192.168.1.0/22
|
|
199
|
+
# @param [String] netw_name The name of the network to get the network/mast
|
|
200
|
+
# from.
|
|
201
|
+
# @return [String] The network/mask.
|
|
202
|
+
def get_network_and_mask(netw_name)
|
|
203
|
+
netw_cidr = @ip_helper[netw_name]
|
|
204
|
+
"#{netw_cidr.network}#{netw_cidr.netmask}"
|
|
205
|
+
end
|
|
206
|
+
|
|
207
|
+
# Used by the template to specify IPs for jobs. It uses the CIDR tool to get
|
|
208
|
+
# them.
|
|
209
|
+
# @param [Integer] ip_num The nth IP number to get.
|
|
210
|
+
# @param [String] netw_name The name of the network to get the IP from.
|
|
211
|
+
# @return [String] An IP in the network.
|
|
212
|
+
def ip(ip_num, netw_name)
|
|
213
|
+
ip_range((ip_num..ip_num), netw_name)
|
|
214
|
+
end
|
|
215
|
+
|
|
216
|
+
# Used by the template to specify IP ranges for jobs. It uses the CIDR tool
|
|
217
|
+
# to get them. Accepts negative ranges.
|
|
218
|
+
# @param [Range] range The range of IPs to return, such as 10..24
|
|
219
|
+
# @param [String] netw_name The name of the network to get the IPs from.
|
|
220
|
+
# @return [String] An IP return in the network.
|
|
221
|
+
def ip_range(range, netw_name)
|
|
222
|
+
netw_cidr = @ip_helper[netw_name]
|
|
223
|
+
first = (range.first >= 0) ? range.first :
|
|
224
|
+
netw_cidr.size + range.first
|
|
225
|
+
last = (range.last >= 0) ? range.last :
|
|
226
|
+
netw_cidr.size + range.last
|
|
227
|
+
|
|
228
|
+
first == last ? "#{netw_cidr.nth(first)}" :
|
|
229
|
+
"#{netw_cidr.nth(first)} - #{netw_cidr.nth(last)}"
|
|
230
|
+
end
|
|
231
|
+
|
|
232
|
+
# Creates the helper hash. Keys are the network name, values are the CIDR
|
|
233
|
+
# tool for generating IPs for jobs in that network.
|
|
234
|
+
# @return [Hash] The helper hash that has a CIDR instance for each network.
|
|
235
|
+
def create_ip_helper
|
|
236
|
+
helper = {}
|
|
237
|
+
netw_arr = find("networks")
|
|
238
|
+
if netw_arr.nil?
|
|
239
|
+
raise "Must have a network section."
|
|
240
|
+
end
|
|
241
|
+
netw_arr.each do |netw|
|
|
242
|
+
subnets = netw["subnets"]
|
|
243
|
+
check_valid_network_config(netw, subnets)
|
|
244
|
+
helper[netw["name"]] = NetAddr::CIDR.create(subnets.first["range"])
|
|
245
|
+
end
|
|
246
|
+
helper
|
|
247
|
+
end
|
|
248
|
+
|
|
249
|
+
# Raises errors if there is something wrong with the user's deployment
|
|
250
|
+
# network configuration, since it's used to populate the rest of the
|
|
251
|
+
# template.
|
|
252
|
+
# @param [Hash] netw The user's network configuration as a ruby hash.
|
|
253
|
+
# @param [Array] subnets The subnets in the network.
|
|
254
|
+
def check_valid_network_config(netw, subnets)
|
|
255
|
+
if subnets.nil?
|
|
256
|
+
raise "You must have subnets in #{netw["name"]}"
|
|
257
|
+
end
|
|
258
|
+
unless subnets.length == 1
|
|
259
|
+
raise "Biff doesn't know how to deal with anything other than one " +
|
|
260
|
+
"subnet in #{netw["name"]}"
|
|
261
|
+
end
|
|
262
|
+
if subnets.first["range"].nil? || subnets.first["dns"].nil?
|
|
263
|
+
raise "Biff requires each network to have range and dns entries."
|
|
264
|
+
end
|
|
265
|
+
if subnets.first["gateway"] && subnets.first["gateway"].match(/.*\.1$/).nil?
|
|
266
|
+
raise "Biff only supports configurations where the gateway is the " +
|
|
267
|
+
"first IP (e.g. 172.31.196.1)."
|
|
268
|
+
end
|
|
269
|
+
end
|
|
270
|
+
|
|
271
|
+
# Asks if the user would like to keep the new template and copies it over
|
|
272
|
+
# their existing template if yes. This is its own function for testing.
|
|
273
|
+
def keep_new_file
|
|
274
|
+
copy_to_file = @diff_works ? @deployment_file : @deployment_file + ".new"
|
|
275
|
+
agree_text = @diff_works ?
|
|
276
|
+
KEEP_NEW_VERSION_TEXT : (DIFF_FAILED_KEEP_NEW_TEXT % copy_to_file)
|
|
277
|
+
if agree(agree_text)
|
|
278
|
+
say("New version copied to '#{copy_to_file}'")
|
|
279
|
+
FileUtils.cp(@temp_file_path_2, copy_to_file)
|
|
280
|
+
end
|
|
281
|
+
end
|
|
282
|
+
|
|
283
|
+
# Sets up a few instance variables.
|
|
284
|
+
# @param [String] template The string path to the template that should be
|
|
285
|
+
# used.
|
|
286
|
+
def setup(template)
|
|
287
|
+
@template_file = template
|
|
288
|
+
@deployment_file = deployment
|
|
289
|
+
raise "Deployment not set." if @deployment_file.nil?
|
|
290
|
+
@deployment_obj = load_yaml_file(@deployment_file)
|
|
291
|
+
@template_obj = load_template_as_yaml
|
|
292
|
+
@ip_helper = create_ip_helper
|
|
293
|
+
@errors = 0
|
|
294
|
+
@dir_name = Dir.mktmpdir
|
|
295
|
+
@temp_file_path_1 = "#{@dir_name}/bosh_biff_1"
|
|
296
|
+
@temp_file_path_2 = "#{@dir_name}/bosh_biff_2"
|
|
297
|
+
end
|
|
298
|
+
|
|
299
|
+
end
|
|
300
|
+
end
|
|
@@ -0,0 +1,125 @@
|
|
|
1
|
+
# Copyright (c) 2009-2012 VMware, Inc.
|
|
2
|
+
|
|
3
|
+
module Bosh::Cli::Command
|
|
4
|
+
class Blob < Base
|
|
5
|
+
|
|
6
|
+
def upload_blob(*params)
|
|
7
|
+
check_if_blobs_supported
|
|
8
|
+
force = !params.delete("--force").nil?
|
|
9
|
+
|
|
10
|
+
blobs = params.map{ |param| get_blob_name(param) }
|
|
11
|
+
total = blobs.size
|
|
12
|
+
blob_index = get_blobs_index
|
|
13
|
+
|
|
14
|
+
blobs.each_with_index do |blob_name, idx|
|
|
15
|
+
count = idx + 1
|
|
16
|
+
blob_file = File.join(BLOBS_DIR, blob_name)
|
|
17
|
+
blob_sha = Digest::SHA1.file(blob_file).hexdigest
|
|
18
|
+
|
|
19
|
+
if blob_index[blob_name] && !force
|
|
20
|
+
# We already have this binary on record
|
|
21
|
+
if blob_index[blob_name]["sha"] == blob_sha
|
|
22
|
+
say("[#{count}/#{total}] Skipping #{blob_name}".green)
|
|
23
|
+
next
|
|
24
|
+
end
|
|
25
|
+
# Local copy is different from the remote copy
|
|
26
|
+
if interactive?
|
|
27
|
+
confirm = ask("\nBlob #{blob_name} changed, " +
|
|
28
|
+
"do you want to update the binary [yN]: ")
|
|
29
|
+
if confirm.empty? || !(confirm =~ /y(es)?$/i)
|
|
30
|
+
say("[#{count}/#{total}] Skipping #{blob_name}".green)
|
|
31
|
+
next
|
|
32
|
+
end
|
|
33
|
+
end
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
# TODO: We could use the sha and try to avoid
|
|
37
|
+
# uploading duplicated objects.
|
|
38
|
+
say("[#{count}/#{total}] Uploading #{blob_name}".green)
|
|
39
|
+
blob_id = blobstore.create(File.open(blob_file, "r"))
|
|
40
|
+
blob_index[blob_name] = { "object_id" => blob_id, "sha" => blob_sha }
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
# update the index file
|
|
44
|
+
index_file = Tempfile.new("tmp_blob_index")
|
|
45
|
+
dump_yaml_to_file(blob_index, index_file)
|
|
46
|
+
index_file.close
|
|
47
|
+
FileUtils.mv(index_file.path, File.join(work_dir, BLOBS_INDEX_FILE))
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
def sync_blobs(*options)
|
|
51
|
+
check_if_blobs_supported
|
|
52
|
+
force = options.include?("--force")
|
|
53
|
+
|
|
54
|
+
blob_index = get_blobs_index
|
|
55
|
+
total = blob_index.size
|
|
56
|
+
count = 0
|
|
57
|
+
|
|
58
|
+
blob_index.each_pair do |name, blob_info|
|
|
59
|
+
count += 1
|
|
60
|
+
blob_file = File.join(work_dir, BLOBS_DIR, name)
|
|
61
|
+
|
|
62
|
+
# check if we have conflicting blobs
|
|
63
|
+
if File.file?(blob_file) && !force
|
|
64
|
+
blob_sha = Digest::SHA1.file(blob_file).hexdigest
|
|
65
|
+
if blob_sha == blob_info["sha"]
|
|
66
|
+
say("[#{count}/#{total}] Skipping blob #{name}".green)
|
|
67
|
+
next
|
|
68
|
+
end
|
|
69
|
+
|
|
70
|
+
if interactive?
|
|
71
|
+
confirm = ask("\nLocal blob (#{name}) conflicts with " +
|
|
72
|
+
"remote object, overwrite local copy? [yN]: ")
|
|
73
|
+
if confirm.empty? || !(confirm =~ /y(es)?$/i)
|
|
74
|
+
say("[#{count}/#{total}] Skipping blob #{name}".green)
|
|
75
|
+
next
|
|
76
|
+
end
|
|
77
|
+
end
|
|
78
|
+
end
|
|
79
|
+
say("[#{count}/#{total}] Updating #{blob_file}".green)
|
|
80
|
+
fetch_blob(blob_file, blob_info)
|
|
81
|
+
end
|
|
82
|
+
end
|
|
83
|
+
|
|
84
|
+
def blobs_info
|
|
85
|
+
blob_status(true)
|
|
86
|
+
end
|
|
87
|
+
|
|
88
|
+
private
|
|
89
|
+
|
|
90
|
+
# Sanity check the input file and returns the blob_name
|
|
91
|
+
def get_blob_name(file)
|
|
92
|
+
err("Invalid file #{file}") unless File.file?(file)
|
|
93
|
+
blobs_dir = File.join(realpath(work_dir), "#{BLOBS_DIR}/")
|
|
94
|
+
file_path = realpath(File.expand_path(file))
|
|
95
|
+
|
|
96
|
+
if file_path[0..blobs_dir.length - 1] != blobs_dir
|
|
97
|
+
err("#{file_path} is NOT under #{blobs_dir}")
|
|
98
|
+
end
|
|
99
|
+
file_path[blobs_dir.length..file_path.length]
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
# Download the blob (blob_info) into dst_file
|
|
103
|
+
def fetch_blob(dst_file, blob_info)
|
|
104
|
+
object_id = blob_info["object_id"]
|
|
105
|
+
|
|
106
|
+
# fetch the blob
|
|
107
|
+
new_blob = Tempfile.new("new_blob_file")
|
|
108
|
+
blobstore.get(object_id, new_blob)
|
|
109
|
+
new_blob.close
|
|
110
|
+
|
|
111
|
+
if blob_info["sha"] != Digest::SHA1.file(new_blob.path).hexdigest
|
|
112
|
+
err("Fatal error: " +
|
|
113
|
+
"Inconsistent checksum for object #{blob_info["object_id"]}")
|
|
114
|
+
end
|
|
115
|
+
|
|
116
|
+
FileUtils.mkdir_p(File.dirname(dst_file))
|
|
117
|
+
FileUtils.chmod(0644, new_blob.path)
|
|
118
|
+
FileUtils.mv(new_blob.path, dst_file)
|
|
119
|
+
end
|
|
120
|
+
|
|
121
|
+
def realpath(path)
|
|
122
|
+
Pathname.new(path).realpath.to_s
|
|
123
|
+
end
|
|
124
|
+
end
|
|
125
|
+
end
|
|
@@ -0,0 +1,169 @@
|
|
|
1
|
+
# Copyright (c) 2009-2012 VMware, Inc.
|
|
2
|
+
|
|
3
|
+
module Bosh::Cli::Command
|
|
4
|
+
class CloudCheck < Base
|
|
5
|
+
include Bosh::Cli::DeploymentHelper
|
|
6
|
+
|
|
7
|
+
def perform(*options)
|
|
8
|
+
auth_required
|
|
9
|
+
|
|
10
|
+
@auto_mode = options.delete("--auto")
|
|
11
|
+
@report_mode = options.delete("--report")
|
|
12
|
+
|
|
13
|
+
if non_interactive? && !@report_mode
|
|
14
|
+
err ("Cloudcheck cannot be run in non-interactive mode\n" +
|
|
15
|
+
"Please use `--auto' flag if you want automated resolutions")
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
if options.size > 0
|
|
19
|
+
err("Unknown options: #{options.join(", ")}")
|
|
20
|
+
end
|
|
21
|
+
|
|
22
|
+
if @auto_mode && @report_mode
|
|
23
|
+
err("Can't use --auto and --report mode together")
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
say("Performing cloud check...")
|
|
27
|
+
|
|
28
|
+
manifest = prepare_deployment_manifest
|
|
29
|
+
deployment_name = manifest["name"]
|
|
30
|
+
|
|
31
|
+
status, body = director.perform_cloud_scan(deployment_name)
|
|
32
|
+
scan_failed(status, body) if status != :done
|
|
33
|
+
|
|
34
|
+
say("Scan is complete, checking if any problems found...")
|
|
35
|
+
@problems = director.list_problems(deployment_name)
|
|
36
|
+
|
|
37
|
+
verify_problems
|
|
38
|
+
nl
|
|
39
|
+
say("Found #{pluralize(@problems.size, "problem")}".yellow)
|
|
40
|
+
nl
|
|
41
|
+
|
|
42
|
+
@resolutions = {}
|
|
43
|
+
|
|
44
|
+
@problems.each_with_index do |problem, index|
|
|
45
|
+
description = problem["description"].to_s.chomp(".") + "."
|
|
46
|
+
say("Problem #{index+1} of #{@problems.size}: #{description}".yellow)
|
|
47
|
+
next if @report_mode
|
|
48
|
+
if @auto_mode
|
|
49
|
+
@resolutions[problem["id"]] = {
|
|
50
|
+
"name" => nil,
|
|
51
|
+
"plan" => "apply default resolution"
|
|
52
|
+
}
|
|
53
|
+
else
|
|
54
|
+
@resolutions[problem["id"]] = get_resolution(problem)
|
|
55
|
+
end
|
|
56
|
+
nl
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
if @report_mode
|
|
60
|
+
exit(@problems.empty? ? 0 : 1)
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
confirm_resolutions unless @auto_mode
|
|
64
|
+
say("Applying resolutions...")
|
|
65
|
+
|
|
66
|
+
action_map = @resolutions.inject({}) do |hash, (id, resolution)|
|
|
67
|
+
hash[id] = resolution["name"]
|
|
68
|
+
hash
|
|
69
|
+
end
|
|
70
|
+
|
|
71
|
+
status, body = director.apply_resolutions(deployment_name, action_map)
|
|
72
|
+
resolution_failed(status, body) if status != :done
|
|
73
|
+
say("Cloudcheck is finished".green)
|
|
74
|
+
end
|
|
75
|
+
|
|
76
|
+
private
|
|
77
|
+
|
|
78
|
+
def scan_failed(status, response)
|
|
79
|
+
responses = {
|
|
80
|
+
:non_trackable => "Unable to track cloud scan progress, " +
|
|
81
|
+
"please update your director",
|
|
82
|
+
:track_timeout => "Timed out while tracking cloud scan progress",
|
|
83
|
+
:error => "Cloud scan error",
|
|
84
|
+
:invalid => "Invalid cloud scan request"
|
|
85
|
+
}
|
|
86
|
+
|
|
87
|
+
err(responses[status] || "Cloud scan failed: #{response}")
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
def resolution_failed(status, response)
|
|
91
|
+
responses = {
|
|
92
|
+
:non_trackable => "Unable to track problem resolution progress, " +
|
|
93
|
+
"please update your director",
|
|
94
|
+
:track_timeout => "Timed out while tracking problem resolution progress",
|
|
95
|
+
:error => "Problem resolution error",
|
|
96
|
+
:invalid => "Invalid problem resolution request"
|
|
97
|
+
}
|
|
98
|
+
|
|
99
|
+
err(responses[status] || "Problem resolution failed: #{response}")
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
def verify_problems
|
|
103
|
+
err("Invalid problem list format") unless @problems.kind_of?(Enumerable)
|
|
104
|
+
|
|
105
|
+
if @problems.empty?
|
|
106
|
+
say("No problems found".green)
|
|
107
|
+
quit
|
|
108
|
+
end
|
|
109
|
+
|
|
110
|
+
@problems.each do |problem|
|
|
111
|
+
unless problem.is_a?(Hash) && problem["id"] && problem["description"] &&
|
|
112
|
+
problem["resolutions"].kind_of?(Enumerable)
|
|
113
|
+
err("Invalid problem list format received from director")
|
|
114
|
+
end
|
|
115
|
+
|
|
116
|
+
problem["resolutions"].each do |resolution|
|
|
117
|
+
if resolution["name"].blank? || resolution["plan"].blank?
|
|
118
|
+
err("Some problem resolutions received from director " +
|
|
119
|
+
"have an invalid format")
|
|
120
|
+
end
|
|
121
|
+
end
|
|
122
|
+
end
|
|
123
|
+
end
|
|
124
|
+
|
|
125
|
+
def get_resolution(problem)
|
|
126
|
+
resolutions = problem["resolutions"]
|
|
127
|
+
|
|
128
|
+
resolutions.each_with_index do |resolution, index|
|
|
129
|
+
say(" #{index+1}. #{resolution["plan"]}")
|
|
130
|
+
end
|
|
131
|
+
|
|
132
|
+
choice = nil
|
|
133
|
+
loop do
|
|
134
|
+
choice = ask("Please choose a resolution [1 - #{resolutions.size}]: ")
|
|
135
|
+
if choice =~ /^\s*\d+\s*$/ &&
|
|
136
|
+
choice.to_i >= 1 &&
|
|
137
|
+
choice.to_i <= resolutions.size
|
|
138
|
+
break
|
|
139
|
+
end
|
|
140
|
+
say("Please enter a number between 1 and #{resolutions.size}".red)
|
|
141
|
+
end
|
|
142
|
+
|
|
143
|
+
resolutions[choice.to_i-1] # -1 accounts for 0-based indexing
|
|
144
|
+
end
|
|
145
|
+
|
|
146
|
+
def confirm_resolutions
|
|
147
|
+
say("Below is the list of resolutions you've provided".yellow)
|
|
148
|
+
say("Please make sure everything is fine and confirm your changes".yellow)
|
|
149
|
+
nl
|
|
150
|
+
|
|
151
|
+
@problems.each_with_index do |problem, index|
|
|
152
|
+
description = problem["description"]
|
|
153
|
+
plan = @resolutions[problem["id"]]["plan"]
|
|
154
|
+
padding = " " * ((index+1).to_s.size + 4)
|
|
155
|
+
say(" #{index+1}. #{problem["description"]}")
|
|
156
|
+
say("#{padding}#{plan.to_s.yellow}")
|
|
157
|
+
nl
|
|
158
|
+
end
|
|
159
|
+
|
|
160
|
+
# TODO: allow editing resolutions?
|
|
161
|
+
cancel unless confirmed?("Apply resolutions?")
|
|
162
|
+
end
|
|
163
|
+
|
|
164
|
+
def cancel
|
|
165
|
+
err("Canceled cloudcheck")
|
|
166
|
+
end
|
|
167
|
+
|
|
168
|
+
end
|
|
169
|
+
end
|