hammer_cli_import 0.10.21
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/LICENSE +674 -0
- data/README.md +115 -0
- data/channel_data_pretty.json +10316 -0
- data/config/import/config_macros.yml +16 -0
- data/config/import/interview_answers.yml +13 -0
- data/config/import/role_map.yml +10 -0
- data/config/import.yml +2 -0
- data/lib/hammer_cli_import/activationkey.rb +156 -0
- data/lib/hammer_cli_import/all.rb +253 -0
- data/lib/hammer_cli_import/asynctasksreactor.rb +187 -0
- data/lib/hammer_cli_import/autoload.rb +27 -0
- data/lib/hammer_cli_import/base.rb +585 -0
- data/lib/hammer_cli_import/configfile.rb +392 -0
- data/lib/hammer_cli_import/contenthost.rb +243 -0
- data/lib/hammer_cli_import/contentview.rb +198 -0
- data/lib/hammer_cli_import/csvhelper.rb +68 -0
- data/lib/hammer_cli_import/deltahash.rb +86 -0
- data/lib/hammer_cli_import/fixtime.rb +27 -0
- data/lib/hammer_cli_import/hostcollection.rb +52 -0
- data/lib/hammer_cli_import/import.rb +31 -0
- data/lib/hammer_cli_import/importtools.rb +351 -0
- data/lib/hammer_cli_import/organization.rb +110 -0
- data/lib/hammer_cli_import/persistentmap.rb +225 -0
- data/lib/hammer_cli_import/repository.rb +91 -0
- data/lib/hammer_cli_import/repositoryenable.rb +250 -0
- data/lib/hammer_cli_import/templatesnippet.rb +67 -0
- data/lib/hammer_cli_import/user.rb +155 -0
- data/lib/hammer_cli_import/version.rb +25 -0
- data/lib/hammer_cli_import.rb +53 -0
- metadata +117 -0
@@ -0,0 +1,392 @@
|
|
1
|
+
#
|
2
|
+
# Copyright (c) 2014 Red Hat Inc.
|
3
|
+
#
|
4
|
+
# This file is part of hammer-cli-import.
|
5
|
+
#
|
6
|
+
# hammer-cli-import is free software: you can redistribute it and/or modify
|
7
|
+
# it under the terms of the GNU General Public License as published by
|
8
|
+
# the Free Software Foundation, either version 3 of the License, or
|
9
|
+
# (at your option) any later version.
|
10
|
+
#
|
11
|
+
# hammer-cli-import is distributed in the hope that it will be useful,
|
12
|
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
13
|
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
14
|
+
# GNU General Public License for more details.
|
15
|
+
#
|
16
|
+
# You should have received a copy of the GNU General Public License
|
17
|
+
# along with hammer-cli-import. If not, see <http://www.gnu.org/licenses/>.
|
18
|
+
#
|
19
|
+
|
20
|
+
require 'hammer_cli'
|
21
|
+
#require 'hammer_cli_katello'
|
22
|
+
require 'apipie-bindings'
|
23
|
+
require 'open3'
|
24
|
+
|
25
|
+
module HammerCLIImport
|
26
|
+
class ImportCommand
|
27
|
+
class ConfigFileImportCommand < BaseCommand
|
28
|
+
command_name 'config-file'
|
29
|
+
reportname = 'config-files-latest'
|
30
|
+
desc "Create puppet-modules from Configuration Channel content (from spacewalk-report #{reportname})."
|
31
|
+
|
32
|
+
option ['--generate-only'], :flag,
|
33
|
+
'Create and fill puppet-modules, but DO NOT upload anything',
|
34
|
+
:default => false
|
35
|
+
|
36
|
+
option ['--macro-mapping'], 'FILE_NAME',
|
37
|
+
'Mapping of Satellite-5 config-file-macros to puppet facts',
|
38
|
+
:default => '/etc/hammer/cli.modules.d/import/config_macros.yml'
|
39
|
+
|
40
|
+
option ['--working-directory'], 'FILE_NAME',
|
41
|
+
'Location for building puppet modules (will be created if it doesn\'t exist',
|
42
|
+
:default => File.join(File.expand_path('~'), 'puppet_work_dir')
|
43
|
+
|
44
|
+
option ['--answers-file'], 'FILE_NAME',
|
45
|
+
'Answers to the puppet-generate-module interview questions',
|
46
|
+
:default => '/etc/hammer/cli.modules.d/import/interview_answers.yml' \
|
47
|
+
do |answers_file|
|
48
|
+
raise ArgumentError, "File #{answers_file} does not exist" unless !option_delete? && File.exist?(answers_file)
|
49
|
+
answers_file
|
50
|
+
end
|
51
|
+
|
52
|
+
csv_columns 'org_id', 'channel', 'channel_id', 'channel_type', 'path', 'file_type', 'file_id',
|
53
|
+
'revision', 'is_binary', 'contents', 'delim_start', 'delim_end', 'username',
|
54
|
+
'groupname', 'filemode', 'symbolic_link', 'selinux_ctx'
|
55
|
+
|
56
|
+
persistent_maps :organizations, :products, :puppet_repositories
|
57
|
+
|
58
|
+
class << self; attr_accessor :interview_questions end
|
59
|
+
@interview_questions = %w(version author license summary srcrepo learnmore fileissues Y)
|
60
|
+
|
61
|
+
# Load the macro-mapping and interview-answers ONLY once-per-run
|
62
|
+
def first_time_only
|
63
|
+
unless option_delete?
|
64
|
+
# Load interview-answers
|
65
|
+
@interview_answers = YAML.load_file(option_answers_file)
|
66
|
+
@interview_answers['Y'] = 'Y'
|
67
|
+
|
68
|
+
# Load macro-mappings
|
69
|
+
if File.exist? option_macro_mapping
|
70
|
+
@macros = YAML.load_file(option_macro_mapping)
|
71
|
+
else
|
72
|
+
@macros = {}
|
73
|
+
warn "Macro-mapping file #{option_macro_mapping} not found, no puppet-facts will be assigned"
|
74
|
+
end
|
75
|
+
|
76
|
+
# Create the puppet-working-directory
|
77
|
+
Dir.mkdir option_working_directory unless File.directory? option_working_directory
|
78
|
+
end
|
79
|
+
return 'loaded'
|
80
|
+
end
|
81
|
+
|
82
|
+
# puppet-module-names are username-classname
|
83
|
+
# usernames can only be alphanumeric
|
84
|
+
# classnames can only be alphanumeric and '_'
|
85
|
+
def build_module_name(data)
|
86
|
+
owning_org = lookup_entity_in_cache(:organizations,
|
87
|
+
{'id' => get_translated_id(:organizations, data['org_id'].to_i)})
|
88
|
+
org_name = owning_org['name'].gsub(/[^0-9a-zA-Z]*/, '').downcase
|
89
|
+
chan_name = data['channel'].gsub(/[^0-9a-zA-Z_]/, '_').downcase
|
90
|
+
return org_name + '-' + chan_name
|
91
|
+
end
|
92
|
+
|
93
|
+
# Return a mapped puppet-fact for a macro, if there is one
|
94
|
+
# Otherwise, leave the macro in place
|
95
|
+
#
|
96
|
+
# NOTE: rhn.system.net_interface* macros are special - they have the form
|
97
|
+
# rhn.system.net_interface.THING(INTERFACE) (eg, rhn.system.net_interface.netmask(eth0))
|
98
|
+
# We need to look them up from config_macros.yml as rhn.system.net_interface.THING(eth_device),
|
99
|
+
# and in the matching value (if there is one), replace "{NETWORK_INTERFACE}" with the
|
100
|
+
# specified INTERFACE.
|
101
|
+
# Ew.
|
102
|
+
# TODO: Make this less hard-coded, then beg for forgiveness.
|
103
|
+
def map_macro(macro)
|
104
|
+
debug ">>> macro #{macro}"
|
105
|
+
if @macros.key? macro
|
106
|
+
return @macros[macro]
|
107
|
+
elsif /^rhn\.system\.net_interface\.(.*)\((.*)\)/.match(macro)
|
108
|
+
# Magic net_interface assumptions - hold onto your hats...
|
109
|
+
net_interface_key = "rhn.system.net_interface.#{Regexp.last_match[1]}(eth_device)"
|
110
|
+
# If the constructed key can't be found, shrug and move along
|
111
|
+
return macro unless @macros.key? net_interface_key
|
112
|
+
# We found a key we can use
|
113
|
+
puppet_value = @macros[net_interface_key]
|
114
|
+
# Bolt if we don't have a value
|
115
|
+
return puppet_value if puppet_value.nil?
|
116
|
+
# Return the value with the Magic String replaced with what the user specified
|
117
|
+
return puppet_value.sub('{NETWORK INTERFACE}', Regexp.last_match[2])
|
118
|
+
else
|
119
|
+
return macro
|
120
|
+
end
|
121
|
+
end
|
122
|
+
|
123
|
+
# If module 'name' has been generated,
|
124
|
+
# throw away it filesystem existence
|
125
|
+
def clean_module(name)
|
126
|
+
path = File.join(option_working_directory, name)
|
127
|
+
debug "Removing #{path}"
|
128
|
+
system("rm -rf #{path}")
|
129
|
+
end
|
130
|
+
|
131
|
+
include Open3
|
132
|
+
# Create a puppet module-template on the filesystem,
|
133
|
+
# inside of working-directory
|
134
|
+
def generate_module_template_for(name)
|
135
|
+
module_name = name
|
136
|
+
Dir.chdir(option_working_directory)
|
137
|
+
gen_cmd = "puppet module generate #{name}"
|
138
|
+
Open3.popen3(gen_cmd) do |stdin, stdout, _stderr|
|
139
|
+
stdout.sync = true
|
140
|
+
ConfigFileImportCommand.interview_questions.each do |q|
|
141
|
+
rd = ''
|
142
|
+
until rd.include? '?'
|
143
|
+
rd = stdout.readline
|
144
|
+
#debug "Read #{rd}"
|
145
|
+
end
|
146
|
+
answer = @interview_answers[q].gsub('#{module_name}', module_name)
|
147
|
+
stdin.puts(answer)
|
148
|
+
end
|
149
|
+
rd = ''
|
150
|
+
begin
|
151
|
+
rd = stdout.readline while rd
|
152
|
+
rescue EOFError
|
153
|
+
debug 'Done reading'
|
154
|
+
end
|
155
|
+
end
|
156
|
+
|
157
|
+
# Now that we have generated the module, add a 'description' to the
|
158
|
+
# metadata.json file found at option_working_dir/<name>/metadata.json
|
159
|
+
metadata_path = File.join(File.join(option_working_directory, name), 'metadata.json')
|
160
|
+
answer = @interview_answers['description'].gsub('#{module_name}', module_name)
|
161
|
+
sed_cmd = "sed -i '\/\"summary\":\/a \\ \\ \"description\": \"#{answer}\",' #{metadata_path}"
|
162
|
+
debug "About to issue #{sed_cmd}"
|
163
|
+
system sed_cmd
|
164
|
+
report_summary :wrote, :puppet_modules
|
165
|
+
end
|
166
|
+
|
167
|
+
def build_puppet_module(module_name)
|
168
|
+
module_dir = File.join(option_working_directory, module_name)
|
169
|
+
Dir.chdir(module_dir)
|
170
|
+
gen_cmd = 'puppet module build'
|
171
|
+
Open3.popen3(gen_cmd) do |_stdin, stdout, _stderr|
|
172
|
+
rd = ''
|
173
|
+
begin
|
174
|
+
rd = stdout.readline while rd
|
175
|
+
debug rd
|
176
|
+
rescue EOFError
|
177
|
+
debug 'Done reading'
|
178
|
+
end
|
179
|
+
end
|
180
|
+
return module_dir
|
181
|
+
end
|
182
|
+
|
183
|
+
# If we haven't seen this module-name before,
|
184
|
+
# arrange to do 'puppet generate module' for it
|
185
|
+
def generate_module(module_name)
|
186
|
+
return if @modules.key? module_name
|
187
|
+
|
188
|
+
@modules[module_name] = []
|
189
|
+
clean_module(module_name)
|
190
|
+
generate_module_template_for(module_name)
|
191
|
+
end
|
192
|
+
|
193
|
+
def file_data(data)
|
194
|
+
# Everybody gets a name, which is 'path' with '/' chgd to '_'
|
195
|
+
data['name'] = data['path'].gsub('/', '_')
|
196
|
+
|
197
|
+
# If we're not type='file', done - return data
|
198
|
+
return data unless data['file_type'] == 'file'
|
199
|
+
|
200
|
+
# If we're not a binary-file, check for macros
|
201
|
+
if data['is_binary'] == 'N'
|
202
|
+
sdelim = data['delim_start']
|
203
|
+
edelim = data['delim_end']
|
204
|
+
cstr = data['contents']
|
205
|
+
matched = false
|
206
|
+
data['contents'] = cstr.gsub(/(#{Regexp.escape(sdelim)})(.*)(#{Regexp.escape(edelim)})/) do |_match|
|
207
|
+
matched = true
|
208
|
+
"<%= #{map_macro Regexp.last_match[2].strip!} %>"
|
209
|
+
end if cstr
|
210
|
+
# If we replaced any macros, we're now type='template'
|
211
|
+
data['file_type'] = 'template' if matched
|
212
|
+
else
|
213
|
+
# If we're binary, base64-decode contents
|
214
|
+
debug 'decoding'
|
215
|
+
data['contents'] = data['contents'].unpack('m')
|
216
|
+
end
|
217
|
+
|
218
|
+
return data
|
219
|
+
end
|
220
|
+
|
221
|
+
def mk_product_hash(data, product_name)
|
222
|
+
{
|
223
|
+
:name => product_name,
|
224
|
+
:organization_id => get_translated_id(:organizations, data['org_id'].to_i)
|
225
|
+
}
|
226
|
+
end
|
227
|
+
|
228
|
+
def mk_repo_hash(data, product_id)
|
229
|
+
{
|
230
|
+
:name => data['channel'],
|
231
|
+
:product_id => product_id,
|
232
|
+
:content_type => 'puppet'
|
233
|
+
}
|
234
|
+
end
|
235
|
+
|
236
|
+
# Store all files into a hash keyed by module-name
|
237
|
+
def import_single_row(data)
|
238
|
+
@first_time ||= first_time_only
|
239
|
+
@modules ||= {}
|
240
|
+
|
241
|
+
mname = build_module_name(data)
|
242
|
+
generate_module(mname)
|
243
|
+
file_hash = file_data(data)
|
244
|
+
debug "name #{data['name']}, path #{file_hash['path']}, type #{file_hash['file_type']}"
|
245
|
+
@modules[mname] << file_hash
|
246
|
+
end
|
247
|
+
|
248
|
+
def delete_single_row(data)
|
249
|
+
# repo maps to channel_id
|
250
|
+
composite_id = [data['org_id'].to_i, data['channel_id'].to_i]
|
251
|
+
unless @pm[:puppet_repositories][composite_id]
|
252
|
+
info "#{to_singular(:puppet_repositories).capitalize} with id #{composite_id} wasn't imported. Skipping deletion."
|
253
|
+
return
|
254
|
+
end
|
255
|
+
|
256
|
+
# find out product id
|
257
|
+
repo_id = get_translated_id(:puppet_repositories, composite_id)
|
258
|
+
product_id = lookup_entity(:puppet_repositories, repo_id)['product']['id']
|
259
|
+
# delete repo
|
260
|
+
delete_entity(:puppet_repositories, composite_id)
|
261
|
+
# delete its product, if it's not associated with any other repositories
|
262
|
+
product = lookup_entity(:products, product_id, true)
|
263
|
+
|
264
|
+
delete_entity_by_import_id(:products, product_id) if product['repository_count'] == 0
|
265
|
+
end
|
266
|
+
|
267
|
+
def write_file(dir, name, content)
|
268
|
+
File.open(File.join(dir, name), 'w') do |f|
|
269
|
+
f.syswrite(content)
|
270
|
+
end
|
271
|
+
end
|
272
|
+
|
273
|
+
# For each module, write file-content to <module>/files or <module>/templates,
|
274
|
+
# and fill <module>/manifests/init.pp with appropriate metadata
|
275
|
+
def export_files
|
276
|
+
progress 'Writing converted files'
|
277
|
+
@modules.each do |mname, files|
|
278
|
+
info "Found module #{mname}"
|
279
|
+
dsl = ''
|
280
|
+
|
281
|
+
module_dir = File.join(option_working_directory, mname)
|
282
|
+
fdir = File.join(module_dir, 'files')
|
283
|
+
Dir.mkdir(fdir)
|
284
|
+
tdir = File.join(module_dir, 'templates')
|
285
|
+
Dir.mkdir(tdir)
|
286
|
+
class_name = mname.partition('-').last
|
287
|
+
|
288
|
+
files.each do |a_file|
|
289
|
+
debug "...file #{a_file['name']}"
|
290
|
+
|
291
|
+
dsl += "file { '#{a_file['name']}':\n"
|
292
|
+
dsl += " path => '#{a_file['path']}',\n"
|
293
|
+
|
294
|
+
case a_file['file_type']
|
295
|
+
when 'file'
|
296
|
+
write_file(fdir, a_file['name'], a_file['contents'])
|
297
|
+
dsl += " source => 'puppet:///modules/#{mname}/#{a_file['name']}',\n"
|
298
|
+
dsl += " group => '#{a_file['groupname']}',\n"
|
299
|
+
dsl += " owner => '#{a_file['username']}',\n"
|
300
|
+
dsl += " ensure => 'file',\n"
|
301
|
+
dsl += " mode => '#{a_file['filemode']}',\n"
|
302
|
+
dsl += "}\n\n"
|
303
|
+
when 'template'
|
304
|
+
write_file(tdir, a_file['name'] + '.erb', a_file['contents'])
|
305
|
+
dsl += " group => '#{a_file['groupname']}',\n"
|
306
|
+
dsl += " owner => '#{a_file['username']}',\n"
|
307
|
+
dsl += " ensure => 'file',\n"
|
308
|
+
dsl += " mode => '#{a_file['filemode']}',\n"
|
309
|
+
dsl += " content => template('#{mname}/#{a_file['name']}.erb'),\n"
|
310
|
+
dsl += "}\n\n"
|
311
|
+
when 'directory'
|
312
|
+
dsl += " group => '#{a_file['groupname']}',\n"
|
313
|
+
dsl += " owner => '#{a_file['username']}',\n"
|
314
|
+
dsl += " ensure => 'directory',\n"
|
315
|
+
dsl += " mode => '#{a_file['filemode']}',\n"
|
316
|
+
dsl += "}\n\n"
|
317
|
+
when'symlink'
|
318
|
+
dsl += " target => '#{a_file['symbolic_link']}',\n"
|
319
|
+
dsl += " ensure => 'link',\n"
|
320
|
+
dsl += "}\n\n"
|
321
|
+
end
|
322
|
+
report_summary :wrote, :puppet_files
|
323
|
+
end
|
324
|
+
export_manifest(mname, class_name, dsl)
|
325
|
+
end
|
326
|
+
end
|
327
|
+
|
328
|
+
def export_manifest(mname, channel_name, dsl)
|
329
|
+
debug "Exporting manifest #{option_working_directory}/#{mname}/manifests/init.pp"
|
330
|
+
module_dir = File.join(option_working_directory, mname)
|
331
|
+
mdir = File.join(module_dir, 'manifests')
|
332
|
+
File.open(File.join(mdir, 'init.pp'), 'w') do |f|
|
333
|
+
f.puts "class #{channel_name} {"
|
334
|
+
f.puts dsl
|
335
|
+
f.puts '}'
|
336
|
+
end
|
337
|
+
end
|
338
|
+
|
339
|
+
# We're going to build a product-per-org, with a repo-per-channel
|
340
|
+
# and upload the built-puppet-module, one-per-repo
|
341
|
+
#
|
342
|
+
# We're using the hammer-repository-upload subcommand to do this,
|
343
|
+
# because the direct-API-route is 'touchy' and repo-upload already
|
344
|
+
# does all the Right Stuff
|
345
|
+
def build_and_upload
|
346
|
+
progress 'Building and uploading puppet modules'
|
347
|
+
prod_name = 'Imported Satellite5 Configuration Files'
|
348
|
+
@modules.each do |mname, files|
|
349
|
+
data = files[0]
|
350
|
+
|
351
|
+
# Build the puppet-module for upload
|
352
|
+
module_dir = build_puppet_module(mname)
|
353
|
+
|
354
|
+
# Build/find the product
|
355
|
+
product_hash = mk_product_hash(data, prod_name)
|
356
|
+
composite_id = [data['org_id'].to_i, prod_name]
|
357
|
+
product_id = create_entity(:products, product_hash, composite_id)['id']
|
358
|
+
|
359
|
+
# Build the repo
|
360
|
+
repo_hash = mk_repo_hash data, product_id
|
361
|
+
# Try creating a repo in the product, skip if it fails
|
362
|
+
repo = create_entity(:puppet_repositories, repo_hash,
|
363
|
+
[data['org_id'].to_i, data['channel_id'].to_i])
|
364
|
+
|
365
|
+
# Find the built-module .tar.gz
|
366
|
+
built_module_path = File.join(File.join(module_dir, 'pkg'),
|
367
|
+
"#{mname}-#{@interview_answers['version']}.tar.gz")
|
368
|
+
info "Uploading #{built_module_path}"
|
369
|
+
|
370
|
+
# Ask hammer repository upload to Do Its Thing
|
371
|
+
require 'hammer_cli_katello/repository'
|
372
|
+
ucc = HammerCLIKatello::Repository::UploadContentCommand.new('', context)
|
373
|
+
rc = ucc.run(%W(--id #{repo['id']} --path #{built_module_path}))
|
374
|
+
|
375
|
+
# If hammer fails us, record it and move on
|
376
|
+
if rc == 0
|
377
|
+
report_summary :uploaded, :puppet_modules
|
378
|
+
else
|
379
|
+
report_summary :failed, :puppet_modules
|
380
|
+
end
|
381
|
+
end
|
382
|
+
end
|
383
|
+
|
384
|
+
def post_import(_csv)
|
385
|
+
return unless @modules
|
386
|
+
export_files
|
387
|
+
build_and_upload unless option_generate_only?
|
388
|
+
end
|
389
|
+
end
|
390
|
+
end
|
391
|
+
end
|
392
|
+
# vim: autoindent tabstop=2 shiftwidth=2 expandtab softtabstop=2 filetype=ruby
|
@@ -0,0 +1,243 @@
|
|
1
|
+
#
|
2
|
+
# Copyright (c) 2014 Red Hat Inc.
|
3
|
+
#
|
4
|
+
# This file is part of hammer-cli-import.
|
5
|
+
#
|
6
|
+
# hammer-cli-import is free software: you can redistribute it and/or modify
|
7
|
+
# it under the terms of the GNU General Public License as published by
|
8
|
+
# the Free Software Foundation, either version 3 of the License, or
|
9
|
+
# (at your option) any later version.
|
10
|
+
#
|
11
|
+
# hammer-cli-import is distributed in the hope that it will be useful,
|
12
|
+
# but WITHOUT ANY WARRANTY; without even the implied warranty of
|
13
|
+
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
|
14
|
+
# GNU General Public License for more details.
|
15
|
+
#
|
16
|
+
# You should have received a copy of the GNU General Public License
|
17
|
+
# along with hammer-cli-import. If not, see <http://www.gnu.org/licenses/>.
|
18
|
+
#
|
19
|
+
|
20
|
+
require 'hammer_cli'
|
21
|
+
require 'set'
|
22
|
+
require 'socket'
|
23
|
+
|
24
|
+
module HammerCLIImport
|
25
|
+
class ImportCommand
|
26
|
+
class ContentHostImportCommand < BaseCommand
|
27
|
+
include ImportTools::ContentView::Include
|
28
|
+
|
29
|
+
command_name 'content-host'
|
30
|
+
reportname = 'system-profiles'
|
31
|
+
desc "Import Content Hosts (from spacewalk-report #{reportname})."
|
32
|
+
|
33
|
+
csv_columns 'server_id', 'profile_name', 'hostname', 'description',
|
34
|
+
'organization_id', 'architecture', 'release',
|
35
|
+
'base_channel_id', 'child_channel_id', 'system_group_id',
|
36
|
+
'virtual_host', 'virtual_guest',
|
37
|
+
'base_channel_label'
|
38
|
+
|
39
|
+
persistent_maps :organizations, :content_views, :redhat_content_views, :system_content_views,
|
40
|
+
:host_collections, :systems
|
41
|
+
|
42
|
+
option ['--export-directory'], 'DIR_PATH', 'Directory to export rpmbuild structure'
|
43
|
+
|
44
|
+
validate_options do
|
45
|
+
any(:option_export_directory, :option_delete).required
|
46
|
+
end
|
47
|
+
|
48
|
+
def _build_composite_cv_label(data, cvs)
|
49
|
+
label = ''
|
50
|
+
label += data['base_channel_label'] + '_' if data['base_channel_label']
|
51
|
+
label += cvs.sort.join('_')
|
52
|
+
label.gsub!(/[^0-9a-z_-]/i, '_')
|
53
|
+
return label
|
54
|
+
end
|
55
|
+
|
56
|
+
def mk_profile_hash(data, cv_id)
|
57
|
+
hcollections = split_multival(data['system_group_id']).collect do |sg_id|
|
58
|
+
get_translated_id(:host_collections, sg_id)
|
59
|
+
end
|
60
|
+
{
|
61
|
+
:name => data['profile_name'],
|
62
|
+
:description => "#{data['description']}\nsat5_system_id: #{data['server_id']}",
|
63
|
+
:facts => {'release' => data['release'], 'architecture' => data['architecture']},
|
64
|
+
:type => 'system',
|
65
|
+
# :guest_ids => [],
|
66
|
+
:organization_id => get_translated_id(:organizations, data['organization_id'].to_i),
|
67
|
+
:content_view_id => cv_id,
|
68
|
+
:host_collection_ids => hcollections
|
69
|
+
}
|
70
|
+
end
|
71
|
+
|
72
|
+
def import_single_row(data)
|
73
|
+
@vguests ||= {}
|
74
|
+
@map ||= Set.new
|
75
|
+
cvs = (split_multival(data['base_channel_id']) + split_multival(data['child_channel_id'])).collect do |channel_id|
|
76
|
+
begin
|
77
|
+
get_translated_id(:redhat_content_views, [data['organization_id'].to_i, channel_id])
|
78
|
+
rescue HammerCLIImport::MissingObjectError
|
79
|
+
get_translated_id(:content_views, channel_id)
|
80
|
+
end
|
81
|
+
end
|
82
|
+
cv_id = create_composite_content_view(
|
83
|
+
:system_content_views,
|
84
|
+
get_translated_id(:organizations, data['organization_id'].to_i),
|
85
|
+
_build_composite_cv_label(data, cvs),
|
86
|
+
'Composite content view for content hosts',
|
87
|
+
cvs)
|
88
|
+
profile = mk_profile_hash data, cv_id
|
89
|
+
c_host = create_entity(:systems, profile, data['server_id'].to_i)
|
90
|
+
# store processed system profiles to a set according to the organization
|
91
|
+
@map << {
|
92
|
+
:org_id => data['organization_id'].to_i,
|
93
|
+
:system_id => data['server_id'].to_i,
|
94
|
+
:uuid => c_host['uuid']}
|
95
|
+
# associate virtual guests in post_import to make sure, all the guests
|
96
|
+
# are already imported (and known to sat6)
|
97
|
+
@vguests[data['server_id'].to_i] = split_multival(data['virtual_guest']) if data['virtual_host'] == data['server_id']
|
98
|
+
debug "vguests: #{@vguests[data['server_id'].to_i].inspect}" if @vguests[data['server_id'].to_i]
|
99
|
+
end
|
100
|
+
|
101
|
+
def post_import(_file)
|
102
|
+
@vguests.each do |system_id, guest_ids|
|
103
|
+
handle_missing_and_supress "setting guests for #{system_id}" do
|
104
|
+
uuid = get_translated_id(:systems, system_id)
|
105
|
+
vguest_uuids = guest_ids.collect do |id|
|
106
|
+
get_translated_id(:systems, id)
|
107
|
+
end if guest_ids
|
108
|
+
debug "Setting virtual guests for #{uuid}: #{vguest_uuids.inspect}"
|
109
|
+
update_entity(
|
110
|
+
:systems,
|
111
|
+
uuid,
|
112
|
+
{:guest_ids => vguest_uuids}
|
113
|
+
) if uuid && vguest_uuids
|
114
|
+
end
|
115
|
+
end
|
116
|
+
return if @map.empty?
|
117
|
+
# create rpmbuild directories
|
118
|
+
create_rpmbuild_structure
|
119
|
+
# create mapping files
|
120
|
+
version = '0.0.1'
|
121
|
+
now = Time.now
|
122
|
+
rpm_name = "system-profile-transition-#{Socket.gethostname}-#{now.to_i}"
|
123
|
+
tar_name = "#{rpm_name}-#{version}"
|
124
|
+
dir_name = File.join(option_export_directory, tar_name)
|
125
|
+
# create SOURCES id_to_uuid.map file
|
126
|
+
FileUtils.rm_rf(dir_name) if File.directory?(dir_name)
|
127
|
+
Dir.mkdir dir_name
|
128
|
+
CSVHelper.csv_write_hashes(
|
129
|
+
File.join(dir_name, "system-id_to_uuid-#{now.to_i}.map"),
|
130
|
+
[:system_id, :uuid, :org_id],
|
131
|
+
@map.sort_by { |x| [x[:org_id], x[:system_id], x[:uuid]] })
|
132
|
+
|
133
|
+
sources_dir = File.join(option_export_directory, 'SOURCES')
|
134
|
+
# debug("tar -C #{option_export_directory} -czf #{sources_dir}/#{tar_name}.tar.gz #{tar_name}")
|
135
|
+
system("tar -C #{option_export_directory} -czf #{sources_dir}/#{tar_name}.tar.gz #{tar_name}")
|
136
|
+
FileUtils.rm_rf(dir_name)
|
137
|
+
# store spec file
|
138
|
+
File.open(
|
139
|
+
File.join(option_export_directory, 'SPECS', "#{tar_name}.spec"), 'w') do |file|
|
140
|
+
file.write(rpm_spec(rpm_name, version, now))
|
141
|
+
end
|
142
|
+
abs_export_directory = File.expand_path(option_export_directory)
|
143
|
+
progress ''
|
144
|
+
progress 'To build the system-profile-transition rpm, run:'
|
145
|
+
progress ''
|
146
|
+
progress "\tcd #{abs_export_directory}/SPECS && "
|
147
|
+
progress "\t rpmbuild -ba --define \"_topdir #{abs_export_directory}\" #{tar_name}.spec"
|
148
|
+
progress ''
|
149
|
+
progress "Then find your #{rpm_name} package"
|
150
|
+
progress "\tin #{File.join(abs_export_directory, 'RPMS/noarch/')} directory."
|
151
|
+
end
|
152
|
+
|
153
|
+
def delete_single_row(data)
|
154
|
+
@composite_cvs ||= Set.new
|
155
|
+
profile_id = data['server_id'].to_i
|
156
|
+
unless @pm[:systems][profile_id]
|
157
|
+
info "#{to_singular(:systems).capitalize} with id #{profile_id} wasn't imported. Skipping deletion."
|
158
|
+
return
|
159
|
+
end
|
160
|
+
profile = get_cache(:systems)[@pm[:systems][profile_id]]
|
161
|
+
cv = get_cache(:content_views)[profile['content_view_id']]
|
162
|
+
@composite_cvs << cv['id'] if cv['composite']
|
163
|
+
delete_entity_by_import_id(:systems, get_translated_id(:systems, profile_id), 'uuid')
|
164
|
+
end
|
165
|
+
|
166
|
+
def post_delete(_file)
|
167
|
+
# let's 'try' to delete the system content views
|
168
|
+
# there's no chance to find out, whether some other content hosts are associated with them
|
169
|
+
@composite_cvs.each do |cv_id|
|
170
|
+
silently do
|
171
|
+
delete_content_view(cv_id, :system_content_views)
|
172
|
+
end
|
173
|
+
end
|
174
|
+
end
|
175
|
+
|
176
|
+
def _create_dir(dir_name)
|
177
|
+
Dir.mkdir(dir_name) unless File.directory?(dir_name)
|
178
|
+
end
|
179
|
+
|
180
|
+
def create_rpmbuild_structure
|
181
|
+
_create_dir option_export_directory
|
182
|
+
_create_dir File.join(option_export_directory, 'SPECS')
|
183
|
+
_create_dir File.join(option_export_directory, 'SOURCES')
|
184
|
+
end
|
185
|
+
|
186
|
+
def rpm_spec(rpm_name, version, date)
|
187
|
+
"
|
188
|
+
Name: #{rpm_name}
|
189
|
+
Version: #{version}
|
190
|
+
Release: 1
|
191
|
+
Summary: System profile transition data
|
192
|
+
|
193
|
+
Group: Applications/Productivity
|
194
|
+
License: GPLv3
|
195
|
+
URL: https://github.com/Katello/hammer-cli-import
|
196
|
+
Source0: #{rpm_name}-#{version}.tar.gz
|
197
|
+
BuildRoot: %(mktemp -ud %{_tmppath}/%{name}-%{version}-%{release}-XXXXXX)
|
198
|
+
BuildArch: noarch
|
199
|
+
|
200
|
+
%define _binary_filedigest_algorithm 1
|
201
|
+
%define _binary_payload w9.gzdio
|
202
|
+
|
203
|
+
%define debug_package %{nil}
|
204
|
+
|
205
|
+
%description
|
206
|
+
This package contains mapping information, how system profiles managed by Red Hat Satellite 5
|
207
|
+
get translated to content hosts on Red Hat Satellite 6
|
208
|
+
|
209
|
+
%prep
|
210
|
+
%setup -q
|
211
|
+
|
212
|
+
|
213
|
+
%build
|
214
|
+
|
215
|
+
|
216
|
+
%install
|
217
|
+
install -m 755 -d $RPM_BUILD_ROOT/%{_datarootdir}/rhn/transition
|
218
|
+
install -m 644 system-id_to_uuid-#{date.to_i}.map $RPM_BUILD_ROOT/%{_datarootdir}/rhn/transition/
|
219
|
+
|
220
|
+
|
221
|
+
%post
|
222
|
+
# run register here
|
223
|
+
|
224
|
+
%clean
|
225
|
+
rm -rf %{buildroot}
|
226
|
+
|
227
|
+
|
228
|
+
%files
|
229
|
+
%defattr(-,root,root,-)
|
230
|
+
/usr/share/rhn/transition/
|
231
|
+
/usr/share/rhn/transition/system-id_to_uuid-#{date.to_i}.map
|
232
|
+
%doc
|
233
|
+
|
234
|
+
|
235
|
+
%changelog
|
236
|
+
* #{date.strftime('%a %b %e %Y')} root <root@localhost> initial package build
|
237
|
+
- using system profile to content host mapping data
|
238
|
+
"
|
239
|
+
end
|
240
|
+
end
|
241
|
+
end
|
242
|
+
end
|
243
|
+
# vim: autoindent tabstop=2 shiftwidth=2 expandtab softtabstop=2 filetype=ruby
|