tpkg 1.16.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/lib/tpkg.rb ADDED
@@ -0,0 +1,3966 @@
1
+ ##############################################################################
2
+ # tpkg package management system library
3
+ # Copyright 2009, AT&T Interactive
4
+ # License: MIT (http://www.opensource.org/licenses/mit-license.php)
5
+ ##############################################################################
6
+
7
+ STDOUT.sync = STDERR.sync = true # All outputs/prompts to the kernel ASAP
8
+
9
+ # When we build the tpkg packages we put this file in
10
+ # /usr/lib/ruby/site_ruby/1.8/ or similar and then the rest of the ruby
11
+ # files (versiontype.rb, deployer.rb, etc) into
12
+ # /usr/lib/ruby/site_ruby/1.8/tpkg/
13
+ # We need to tell Ruby to search that tpkg subdirectory.
14
+ # The alternative is to specify the subdirectory in the require
15
+ # (require 'tpkg/versiontype' for example), but tpkg is also the name
16
+ # of the executable script so we can't create a subdirectory here named
17
+ # tpkg. If we put the subdir in the require lines then users couldn't
18
+ # run tpkg directly from an svn working copy.
19
+ tpkglibdir = File.join(File.dirname(__FILE__), 'tpkg')
20
+ if File.directory?(tpkglibdir)
21
+ $:.unshift(tpkglibdir)
22
+ end
23
+
24
+ begin
25
+ # Try loading facter w/o gems first so that we don't introduce a
26
+ # dependency on gems if it is not needed.
27
+ require 'facter' # Facter
28
+ rescue LoadError
29
+ require 'rubygems'
30
+ require 'facter'
31
+ end
32
+ require 'digest/sha2' # Digest::SHA256#hexdigest, etc.
33
+ require 'uri' # URI
34
+ require 'net/http' # Net::HTTP
35
+ require 'net/https' # Net::HTTP#use_ssl, etc.
36
+ require 'time' # Time#httpdate
37
+ require 'rexml/document' # REXML::Document
38
+ require 'fileutils' # FileUtils.cp, rm, etc.
39
+ require 'tempfile' # Tempfile
40
+ require 'find' # Find
41
+ require 'etc' # Etc.getpwnam, getgrnam
42
+ require 'openssl' # OpenSSL
43
+ require 'open3' # Open3
44
+ require 'versiontype' # Version
45
+ require 'deployer'
46
+ require 'set'
47
+ require 'metadata'
48
+
49
+ class Tpkg
50
+
51
+ VERSION = '1.16.2'
52
+ CONFIGDIR = '/etc'
53
+
54
+ POSTINSTALL_ERR = 2
55
+ POSTREMOVE_ERR = 3
56
+ INITSCRIPT_ERR = 4
57
+
58
+ attr_reader :installed_directory
59
+
60
+ #
61
+ # Class methods
62
+ #
63
+
64
+ @@debug = false
65
+ def self.set_debug(debug)
66
+ @@debug = debug
67
+ end
68
+
69
+ @@prompt = true
70
+ def self.set_prompt(prompt)
71
+ @@prompt = prompt
72
+ end
73
+
74
+ # Find GNU tar or bsdtar in ENV['PATH']
75
+ # Raises an exception if a suitable tar cannot be found
76
+ @@tar = nil
77
+ TARNAMES = ['tar', 'gtar', 'gnutar', 'bsdtar']
78
+ def self.find_tar
79
+ if !@@tar
80
+ catch :tar_found do
81
+ ENV['PATH'].split(':').each do |path|
82
+ TARNAMES.each do |tarname|
83
+ if File.executable?(File.join(path, tarname))
84
+ IO.popen("#{File.join(path, tarname)} --version 2>/dev/null") do |pipe|
85
+ pipe.each_line do |line|
86
+ if line.include?('GNU tar') || line.include?('bsdtar')
87
+ @@tar = File.join(path, tarname)
88
+ throw :tar_found
89
+ end
90
+ end
91
+ end
92
+ end
93
+ end
94
+ end
95
+ # Raise an exception if we didn't find a suitable tar
96
+ raise "Unable to find GNU tar or bsdtar in PATH"
97
+ end
98
+ end
99
+ @@tar.dup
100
+ end
101
+ def self.clear_cached_tar
102
+ @@tar = nil
103
+ end
104
+
105
+ # Encrypts the given file in-place (the plaintext file is replaced by the
106
+ # encrypted file). The resulting file is compatible with openssl's 'enc'
107
+ # utility.
108
+ # Algorithm from http://www.ruby-forum.com/topic/101936#225585
109
+ MAGIC = 'Salted__'
110
+ SALT_LEN = 8
111
+ @@passphrase = nil
112
+ def self.encrypt(pkgname, filename, passphrase, cipher='aes-256-cbc')
113
+ # passphrase can be a callback Proc, call it if that's the case
114
+ pass = nil
115
+ if @@passphrase
116
+ pass = @@passphrase
117
+ elsif passphrase.kind_of?(Proc)
118
+ pass = passphrase.call(pkgname)
119
+ @@passphrase = pass
120
+ else
121
+ pass = passphrase
122
+ end
123
+
124
+ salt = OpenSSL::Random::random_bytes(SALT_LEN)
125
+ c = OpenSSL::Cipher::Cipher.new(cipher)
126
+ c.encrypt
127
+ c.pkcs5_keyivgen(pass, salt, 1)
128
+ tmpfile = Tempfile.new(File.basename(filename), File.dirname(filename))
129
+ # Match permissions and ownership of plaintext file
130
+ st = File.stat(filename)
131
+ File.chmod(st.mode & 07777, tmpfile.path)
132
+ begin
133
+ File.chown(st.uid, st.gid, tmpfile.path)
134
+ rescue Errno::EPERM
135
+ raise if Process.euid == 0
136
+ end
137
+ tmpfile.write(MAGIC)
138
+ tmpfile.write(salt)
139
+ tmpfile.write(c.update(IO.read(filename)) + c.final)
140
+ tmpfile.close
141
+ File.rename(tmpfile.path, filename)
142
+ end
143
+ # Decrypt the given file in-place.
144
+ def self.decrypt(pkgname, filename, passphrase, cipher='aes-256-cbc')
145
+ # passphrase can be a callback Proc, call it if that's the case
146
+ pass = nil
147
+ if @@passphrase
148
+ pass = @@passphrase
149
+ elsif passphrase.kind_of?(Proc)
150
+ pass = passphrase.call(pkgname)
151
+ @@passphrase = pass
152
+ else
153
+ pass = passphrase
154
+ end
155
+
156
+ file = File.open(filename)
157
+ if (buf = file.read(MAGIC.length)) != MAGIC
158
+ raise "Unrecognized encrypted file #{filename}"
159
+ end
160
+ salt = file.read(SALT_LEN)
161
+ c = OpenSSL::Cipher::Cipher.new(cipher)
162
+ c.decrypt
163
+ c.pkcs5_keyivgen(pass, salt, 1)
164
+ tmpfile = Tempfile.new(File.basename(filename), File.dirname(filename))
165
+ # Match permissions and ownership of encrypted file
166
+ st = File.stat(filename)
167
+ File.chmod(st.mode & 07777, tmpfile.path)
168
+ begin
169
+ File.chown(st.uid, st.gid, tmpfile.path)
170
+ rescue Errno::EPERM
171
+ raise if Process.euid == 0
172
+ end
173
+ tmpfile.write(c.update(file.read) + c.final)
174
+ tmpfile.close
175
+ File.rename(tmpfile.path, filename)
176
+ end
177
+ def self.verify_precrypt_file(filename)
178
+ # This currently just verifies that the file seems to start with the
179
+ # right bits. Any further verification would require the passphrase
180
+ # and cipher so we could decrypt the file, but that would preclude
181
+ # folks from including precrypt files for which they don't have the
182
+ # passphrase in a package. In some environments it might be desirable
183
+ # for folks to be able to build the package even if they couldn't
184
+ # install it.
185
+ file = File.open(filename)
186
+ if (buf = file.read(MAGIC.length)) != MAGIC
187
+ raise "Unrecognized encrypted file #{filename}"
188
+ end
189
+ true
190
+ end
191
+
192
+ # Makes a package from a directory containing the files to put into the package
193
+ REQUIRED_FIELDS = ['name', 'version', 'maintainer']
194
+ def self.make_package(pkgsrcdir, passphrase=nil)
195
+ pkgfile = nil
196
+
197
+ # Make a working directory
198
+ workdir = nil
199
+ # dirname('.') returns '.', which screws things up. So in cases
200
+ # where the user passed us a directory that doesn't have enough
201
+ # parts that we can get the parent directory we use a working
202
+ # directory in the system's temp area. As an alternative we could
203
+ # use Pathname.realpath to convert whatever the user passed us into
204
+ # an absolute path.
205
+ if File.dirname(pkgsrcdir) == pkgsrcdir
206
+ workdir = tempdir('tpkg')
207
+ else
208
+ workdir = tempdir('tpkg', File.dirname(pkgsrcdir))
209
+ end
210
+
211
+ begin
212
+ # Make the 'tpkg' directory for storing the package contents
213
+ tpkgdir = File.join(workdir, 'tpkg')
214
+ Dir.mkdir(tpkgdir)
215
+
216
+ # A package really shouldn't be partially relocatable, warn the user if
217
+ # they're creating such a scourge.
218
+ if (File.exist?(File.join(pkgsrcdir, 'root')) && File.exist?(File.join(pkgsrcdir, 'reloc')))
219
+ warn 'Warning: Your source directory should contain either a "root" or "reloc" directory, but not both.'
220
+ end
221
+
222
+ # Copy the package contents into that directory
223
+ # I tried to use FileUtils.cp_r but it doesn't handle symlinks properly
224
+ # And on further reflection it makes sense to only have one chunk of
225
+ # code (tar) ever touch the user's files.
226
+ system("#{find_tar} -C #{pkgsrcdir} -cf - . | #{find_tar} -C #{tpkgdir} -xpf -") || raise("Package content copy failed")
227
+
228
+ if File.exists?(File.join(tpkgdir, 'tpkg.yml'))
229
+ metadata_text = File.read(File.join(tpkgdir, 'tpkg.yml'))
230
+ metadata = Metadata.new(metadata_text, 'yml')
231
+ elsif File.exists?(File.join(tpkgdir, 'tpkg.xml'))
232
+ metadata_text = File.read(File.join(tpkgdir, 'tpkg.xml'))
233
+ metadata = Metadata.new(metadata_text, 'xml')
234
+ else
235
+ raise 'Your source directory does not contain the metadata configuration file.'
236
+ end
237
+
238
+ metadata.verify_required_fields
239
+
240
+ # file_metadata.yml hold information for files that are installed
241
+ # by the package. For example, checksum, path, relocatable or not, etc.
242
+ File.open(File.join(tpkgdir, "file_metadata.bin"), "w") do |file|
243
+ filemetadata = get_filemetadata_from_directory(tpkgdir)
244
+ Marshal::dump(filemetadata.hash, file)
245
+ # YAML::dump(filemetadata.hash, file)
246
+ end
247
+
248
+ # Check all the files are there as specified in the metadata config file
249
+ metadata[:files][:files].each do |tpkgfile|
250
+ tpkg_path = tpkgfile[:path]
251
+ working_path = nil
252
+ if tpkg_path[0,1] == File::SEPARATOR
253
+ working_path = File.join(tpkgdir, 'root', tpkg_path)
254
+ else
255
+ working_path = File.join(tpkgdir, 'reloc', tpkg_path)
256
+ end
257
+ # Raise an exception if any files listed in tpkg.yml can't be found
258
+ if !File.exist?(working_path) && !File.symlink?(working_path)
259
+ raise "File #{tpkg_path} referenced in tpkg.yml but not found"
260
+ end
261
+
262
+ # Encrypt any files marked for encryption
263
+ if tpkgfile[:encrypt]
264
+ if tpkgfile[:encrypt] == 'precrypt'
265
+ verify_precrypt_file(working_path)
266
+ else
267
+ if passphrase.nil?
268
+ raise "Package requires encryption but supplied passphrase is nil"
269
+ end
270
+ encrypt(metadata[:name], working_path, passphrase)
271
+ end
272
+ end
273
+ end unless metadata[:files].nil? or metadata[:files][:files].nil?
274
+
275
+ package_filename = metadata.generate_package_filename
276
+ package_directory = File.join(workdir, package_filename)
277
+ Dir.mkdir(package_directory)
278
+ pkgfile = File.join(File.dirname(pkgsrcdir), package_filename + '.tpkg')
279
+ if File.exist?(pkgfile) || File.symlink?(pkgfile)
280
+ if @@prompt
281
+ print "Package file #{pkgfile} already exists, overwrite? [y/N]"
282
+ response = $stdin.gets
283
+ if response !~ /^y/i
284
+ return
285
+ end
286
+ end
287
+ File.delete(pkgfile)
288
+ end
289
+
290
+ # Tar up the tpkg directory
291
+ tpkgfile = File.join(package_directory, 'tpkg.tar')
292
+ system("#{find_tar} -C #{workdir} -cf #{tpkgfile} tpkg") || raise("tpkg.tar creation failed")
293
+
294
+ # Checksum the tarball
295
+ # Older ruby version doesn't support this
296
+ # digest = Digest::SHA256.file(tpkgfile).hexdigest
297
+ digest = Digest::SHA256.hexdigest(File.read(tpkgfile))
298
+
299
+ # Create checksum.xml
300
+ File.open(File.join(package_directory, 'checksum.xml'), 'w') do |csx|
301
+ csx.puts('<tpkg_checksums>')
302
+ csx.puts(' <checksum>')
303
+ csx.puts(' <algorithm>SHA256</algorithm>')
304
+ csx.puts(" <digest>#{digest}</digest>")
305
+ csx.puts(' </checksum>')
306
+ csx.puts('</tpkg_checksums>')
307
+ end
308
+
309
+ # Tar up checksum.xml and the main tarball
310
+ system("#{find_tar} -C #{workdir} -cf #{pkgfile} #{package_filename}") || raise("Final package creation failed")
311
+ ensure
312
+ # Remove our working directory
313
+ FileUtils.rm_rf(workdir)
314
+ end
315
+
316
+ # Return the filename of the package
317
+ pkgfile
318
+ end
319
+
320
+ def self.package_toplevel_directory(package_file)
321
+ # This assumes the first entry in the tarball is the top level directory.
322
+ # I think that is a safe assumption.
323
+ toplevel = nil
324
+ # FIXME: This is so lame, to read the whole package to get the
325
+ # first filename. Blech.
326
+ IO.popen("#{find_tar} -tf #{package_file}") do |pipe|
327
+ toplevel = pipe.gets.chomp
328
+ # Avoid SIGPIPE, if we don't sink the rest of the output from tar
329
+ # then tar ends up getting SIGPIPE when it tries to write to the
330
+ # closed pipe and exits with error, which causes us to throw an
331
+ # exception down below here when we check the exit status.
332
+ pipe.read
333
+ end
334
+ if !$?.success?
335
+ raise "Error reading top level directory from #{package_file}"
336
+ end
337
+ # Strip off the trailing slash
338
+ toplevel.sub!(Regexp.new("#{File::SEPARATOR}$"), '')
339
+ if toplevel.include?(File::SEPARATOR)
340
+ raise "Package directory structure of #{package_file} unexpected, top level is more than one directory deep"
341
+ end
342
+ toplevel
343
+ end
344
+
345
+ def self.get_filemetadata_from_directory(tpkgdir)
346
+ filemetadata = {}
347
+ root_dir = File.join(tpkgdir, "root")
348
+ reloc_dir = File.join(tpkgdir, "reloc")
349
+ files = []
350
+
351
+ Find.find(root_dir, reloc_dir) do |f|
352
+ next if !File.exist?(f)
353
+ relocatable = false
354
+
355
+ # check if it's from root dir or reloc dir
356
+ if f =~ /^#{root_dir}/
357
+ short_fn = f[root_dir.length ..-1]
358
+ else
359
+ short_fn = f[reloc_dir.length + 1..-1]
360
+ relocatable = true
361
+ end
362
+
363
+ next if short_fn.nil? or short_fn.empty?
364
+
365
+ file = {}
366
+ file[:path] = short_fn
367
+ file[:relocatable] = relocatable
368
+
369
+ # only do checksum for file
370
+ if File.file?(f)
371
+ digest = Digest::SHA256.hexdigest(File.read(f))
372
+ file[:checksum] = {:algorithm => "SHA256", :digests => [{:value => digest}]}
373
+ end
374
+ files << file
375
+ end
376
+ filemetadata['files'] = files
377
+ #return FileMetadata.new(YAML::dump(filemetadata),'yml')
378
+ return FileMetadata.new(Marshal::dump(filemetadata),'bin')
379
+ end
380
+
381
+ def self.get_xml_filemetadata_from_directory(tpkgdir)
382
+ filemetadata_xml = REXML::Document.new
383
+ filemetadata_xml << REXML::Element.new('files')
384
+
385
+ # create file_metadata.xml that stores list of files and their checksum
386
+ # will be used later on to check whether installed files have been changed
387
+ root_dir = File.join(tpkgdir, "root")
388
+ reloc_dir = File.join(tpkgdir, "reloc")
389
+ Find.find(root_dir, reloc_dir) do |f|
390
+ next if !File.exist?(f)
391
+ relocatable = "false"
392
+
393
+ # check if it's from root dir or reloc dir
394
+ if f =~ /^#{root_dir}/
395
+ short_fn = f[root_dir.length ..-1]
396
+ else
397
+ short_fn = f[reloc_dir.length + 1..-1]
398
+ relocatable = "true"
399
+ end
400
+
401
+ next if short_fn.nil? or short_fn.empty?
402
+
403
+ file_ele = filemetadata_xml.root.add_element("file", {"relocatable" => relocatable})
404
+ path_ele = file_ele.add_element("path")
405
+ path_ele.add_text(short_fn)
406
+
407
+ # only do checksum for file
408
+ if File.file?(f)
409
+ # this doesn't work for older ruby version
410
+ #digest = Digest::SHA256.file(f).hexdigest
411
+ digest = Digest::SHA256.hexdigest(File.read(f))
412
+ chksum_ele = file_ele.add_element("checksum")
413
+ alg_ele = chksum_ele.add_element("algorithm")
414
+ alg_ele.add_text("SHA256")
415
+ digest_ele = chksum_ele.add_element("digest")
416
+ digest_ele.add_text(digest)
417
+ end
418
+ end
419
+ return filemetadata_xml
420
+ end
421
+
422
+ def self.verify_package_checksum(package_file)
423
+ topleveldir = package_toplevel_directory(package_file)
424
+ # Extract checksum.xml from the package
425
+ checksum_xml = nil
426
+ IO.popen("#{find_tar} -xf #{package_file} -O #{File.join(topleveldir, 'checksum.xml')}") do |pipe|
427
+ checksum_xml = REXML::Document.new(pipe.read)
428
+ end
429
+ if !$?.success?
430
+ raise "Error extracting checksum.xml from #{package_file}"
431
+ end
432
+
433
+ # Verify checksum.xml
434
+ checksum_xml.elements.each('/tpkg_checksums/checksum') do |checksum|
435
+ digest = nil
436
+ algorithm = checksum.elements['algorithm'].text
437
+ digest_from_package = checksum.elements['digest'].text
438
+ case algorithm
439
+ when 'SHA224'
440
+ digest = Digest::SHA224.new
441
+ when 'SHA256'
442
+ digest = Digest::SHA256.new
443
+ when 'SHA384'
444
+ digest = Digest::SHA384.new
445
+ when 'SHA512'
446
+ digest = Digest::SHA512.new
447
+ else
448
+ raise("Unrecognized checksum algorithm #{checksum.elements['algorithm']}")
449
+ end
450
+ # Extract tpkg.tar from the package and digest it
451
+ IO.popen("#{find_tar} -xf #{package_file} -O #{File.join(topleveldir, 'tpkg.tar')}") do |pipe|
452
+ # Package files can be quite large, so we digest the package in
453
+ # chunks. A survey of the Internet turns up someone who tested
454
+ # various chunk sizes on various platforms and found 4k to be
455
+ # consistently the best. I'm too lazy to do my own testing.
456
+ # http://groups.google.com/group/comp.lang.ruby/browse_thread/thread/721d304fc8a5cc71
457
+ while buf = pipe.read(4096)
458
+ digest << buf
459
+ end
460
+ end
461
+ if !$?.success?
462
+ raise "Error extracting tpkg.tar from #{package_file}"
463
+ end
464
+ if digest != digest_from_package
465
+ raise "Checksum mismatch for #{algorithm}, #{digest} != #{digest_from_package}"
466
+ end
467
+ end
468
+ end
469
+
470
+ # Extracts and returns the metadata from a package file
471
+ def self.metadata_from_package(package_file)
472
+ topleveldir = package_toplevel_directory(package_file)
473
+ # Verify checksum
474
+ verify_package_checksum(package_file)
475
+ # Extract and parse tpkg.xml
476
+ metadata = nil
477
+ ['yml','xml'].each do |format|
478
+ file = File.join('tpkg', "tpkg.#{format}")
479
+
480
+ # use popen3 instead of popen because popen display stderr when there's an error such as
481
+ # tpkg.yml not being there, which is something we want to ignore since old tpkg doesn't
482
+ # have tpkg.yml file
483
+ stdin, stdout, stderr = Open3.popen3("#{find_tar} -xf #{package_file} -O #{File.join(topleveldir, 'tpkg.tar')} | #{find_tar} -xf - -O #{file}")
484
+ filecontent = stdout.read
485
+ if filecontent.nil? or filecontent.empty?
486
+ next
487
+ else
488
+ metadata = Metadata.new(filecontent, format)
489
+ break
490
+ end
491
+ end
492
+ unless metadata
493
+ raise "Failed to extract metadata from #{package_file}"
494
+ end
495
+
496
+ # Insert an attribute on the root element with the package filename
497
+ metadata[:filename] = File.basename(package_file)
498
+ return metadata
499
+ end
500
+
501
+ # TODO: To be deprecated
502
+ # Extracts and returns the metadata from a package file
503
+ def self.xml_metadata_from_package(package_file)
504
+ topleveldir = package_toplevel_directory(package_file)
505
+ # Verify checksum
506
+ verify_package_checksum(package_file)
507
+ # Extract and parse tpkg.xml
508
+ tpkg_xml = nil
509
+ IO.popen("#{find_tar} -xf #{package_file} -O #{File.join(topleveldir, 'tpkg.tar')} | #{find_tar} -xf - -O #{File.join('tpkg', 'tpkg.xml')}") do |pipe|
510
+ tpkg_xml = REXML::Document.new(pipe.read)
511
+ end
512
+ if !$?.success?
513
+ raise "Extracting tpkg.xml from #{package_file} failed"
514
+ end
515
+
516
+ # Insert an attribute on the root element with the package filename
517
+ tpkg_xml.root.attributes['filename'] = File.basename(package_file)
518
+
519
+ # Return
520
+ return tpkg_xml
521
+ end
522
+
523
+ # TODO: To be deprecated
524
+ # Extracts and returns the metadata from a directory of package files
525
+ def self.xml_metadata_from_directory(directory)
526
+ metadata = []
527
+ # if metadata.xml already exists, then go ahead and
528
+ # parse it
529
+ existing_metadata_file = File.join(directory, 'metadata.xml')
530
+ existing_metadata = {}
531
+ if File.exists?(existing_metadata_file)
532
+ tpkg_metadata_xml = REXML::Document.new(File.open(existing_metadata_file))
533
+
534
+ tpkg_metadata_xml.root.elements.each do | metadata_xml |
535
+ existing_metadata[metadata_xml.attributes['filename']] = metadata_xml
536
+ end
537
+ end
538
+
539
+ # Populate the metadata array with metadata for all of the packages
540
+ # in the given directory. Reuse existing metadata if possible.
541
+ Dir.glob(File.join(directory, '*.tpkg')) do |pkg|
542
+ if existing_metadata[File.basename(pkg)]
543
+ metadata << existing_metadata[File.basename(pkg)]
544
+ else
545
+ xml = xml_metadata_from_package(pkg)
546
+ metadata << xml.root
547
+ end
548
+ end
549
+
550
+ return metadata
551
+ end
552
+
553
+ # Extracts and returns the metadata from a directory of package files
554
+ def self.metadata_from_directory(directory)
555
+ metadata = []
556
+
557
+ # if metadata.xml already exists, then go ahead and
558
+ # parse it
559
+ existing_metadata_file = File.join(directory, 'metadata.yml')
560
+ existing_metadata = {}
561
+
562
+ if File.exists?(existing_metadata_file)
563
+ metadata_contents = File.read(File.join(directory, 'metadata.yml'))
564
+ Metadata::get_pkgs_metadata_from_yml_doc(metadata_contents, existing_metadata)
565
+ end
566
+
567
+ # Populate the metadata array with metadata for all of the packages
568
+ # in the given directory. Reuse existing metadata if possible.
569
+ Dir.glob(File.join(directory, '*.tpkg')) do |pkg|
570
+ if existing_metadata[File.basename(pkg)]
571
+ metadata << existing_metadata[File.basename(pkg)]
572
+ else
573
+ metadata_yml = metadata_from_package(pkg)
574
+ metadata << metadata_yml
575
+ end
576
+ end
577
+
578
+ return metadata
579
+ end
580
+
581
+ # Extracts the metadata from a directory of package files and saves it
582
+ # to metadata.xml in that directory
583
+ def self.extract_metadata(directory, dest=nil)
584
+ dest = directory if dest.nil?
585
+ backward_compatible = true
586
+
587
+ # If we still want to support metadata.xml
588
+ if backward_compatible
589
+ metadata_xml = xml_metadata_from_directory(directory)
590
+ # Combine all of the individual metadata files into one XML document
591
+ metadata = REXML::Document.new
592
+ metadata << REXML::Element.new('tpkg_metadata')
593
+ metadata_xml.each do |md|
594
+ metadata.root << md
595
+ end
596
+ # And write that out to metadata.xml
597
+ metadata_tmpfile = Tempfile.new('metadata.xml', dest)
598
+ metadata.write(metadata_tmpfile)
599
+ metadata_tmpfile.close
600
+ File.chmod(0644, metadata_tmpfile.path)
601
+ File.rename(metadata_tmpfile.path, File.join(dest, 'metadata.xml'))
602
+ end
603
+
604
+ metadata = metadata_from_directory(directory)
605
+ # And write that out to metadata.yml
606
+ metadata_tmpfile = Tempfile.new('metadata.yml', dest)
607
+ metadata.each do | metadata |
608
+ YAML::dump(metadata.hash, metadata_tmpfile)
609
+ end
610
+ metadata_tmpfile.close
611
+ File.chmod(0644, metadata_tmpfile.path)
612
+ File.rename(metadata_tmpfile.path, File.join(dest, 'metadata.yml'))
613
+ end
614
+
615
+ # Haven't found a Ruby method for creating temporary directories,
616
+ # so create a temporary file and replace it with a directory.
617
+ def self.tempdir(basename, tmpdir=Dir::tmpdir)
618
+ tmpfile = Tempfile.new(basename, tmpdir)
619
+ tmpdir = tmpfile.path
620
+ tmpfile.close!
621
+ Dir.mkdir(tmpdir)
622
+ tmpdir
623
+ end
624
+
625
+ @@arch = nil
626
+ def self.get_arch
627
+ if !@@arch
628
+ Facter.loadfacts
629
+ @@arch = Facter['hardwaremodel'].value
630
+ end
631
+ @@arch.dup
632
+ end
633
+
634
+ # Returns a string representing the OS of this box of the form:
635
+ # "OSname-OSmajorversion". The OS name is currently whatever facter
636
+ # returns for the 'operatingsystem' fact. The major version is a bit
637
+ # messier, as we try on a per-OS basis to come up with something that
638
+ # represents the major version number of the OS, where binaries are
639
+ # expected to be compatible across all versions of the OS with that
640
+ # same major version number. Examples include RedHat-5, CentOS-5,
641
+ # FreeBSD-7, Darwin-10.5, and Solaris-5.10
642
+ @@os = nil
643
+ def self.get_os
644
+ if !@@os
645
+ # Tell facter to load everything, otherwise it tries to dynamically
646
+ # load the individual fact libraries using a very broken mechanism
647
+ Facter.loadfacts
648
+
649
+ operatingsystem = Facter['operatingsystem'].value
650
+ osver = nil
651
+ if Facter['lsbmajdistrelease'] &&
652
+ Facter['lsbmajdistrelease'].value &&
653
+ !Facter['lsbmajdistrelease'].value.empty?
654
+ osver = Facter['lsbmajdistrelease'].value
655
+ elsif Facter['kernel'] &&
656
+ Facter['kernel'].value == 'Darwin' &&
657
+ Facter['macosx_productversion'] &&
658
+ Facter['macosx_productversion'].value &&
659
+ !Facter['macosx_productversion'].value.empty?
660
+ macver = Facter['macosx_productversion'].value
661
+ # Extract 10.5 from 10.5.6, for example
662
+ osver = macver.split('.')[0,2].join('.')
663
+ elsif Facter['operatingsystem'] &&
664
+ Facter['operatingsystem'].value == 'FreeBSD'
665
+ # Extract 7 from 7.1-RELEASE, for example
666
+ fbver = Facter['operatingsystemrelease'].value
667
+ osver = fbver.split('.').first
668
+ elsif Facter['operatingsystemrelease'] &&
669
+ Facter['operatingsystemrelease'].value &&
670
+ !Facter['operatingsystemrelease'].value.empty?
671
+ osver = Facter['operatingsystemrelease'].value
672
+ else
673
+ raise "Unable to determine proper OS value on this platform"
674
+ end
675
+ @@os = "#{operatingsystem}-#{osver}"
676
+ end
677
+ @@os.dup
678
+ end
679
+
680
+ # Given an array of pkgs. Determine if any of those package
681
+ # satisfy the requirement specified by req
682
+ def self.packages_meet_requirement?(pkgs, req)
683
+ pkgs.each do | pkg |
684
+ return true if Tpkg::package_meets_requirement?(pkg, req)
685
+ end
686
+ return false
687
+ end
688
+
689
+ # pkg is a standard Hash format used in the library to represent an
690
+ # available package
691
+ # req is a standard Hash format used in the library to represent package
692
+ # requirements
693
+ def self.package_meets_requirement?(pkg, req)
694
+ result = true
695
+ puts "pkg_meets_req checking #{pkg.inspect} against #{req.inspect}" if @@debug
696
+ metadata = pkg[:metadata]
697
+ if req[:type] == :native && pkg[:source] != :native_installed && pkg[:source] != :native_available
698
+ # A req for a native package must be satisfied by a native package
699
+ puts "Package fails native requirement" if @@debug
700
+ result = false
701
+ elsif (!req[:type] || req[:type] == :tpkg) &&
702
+ (pkg[:source] == :native_installed || pkg[:source] == :native_available)
703
+ # Likewise a req for a tpkg must be satisfied by a tpkg
704
+ puts "Package fails non-native requirement" if @@debug
705
+ result = false
706
+ elsif metadata[:name] == req[:name]
707
+ same_min_ver_req = false
708
+ same_max_ver_req = false
709
+ if req[:allowed_versions]
710
+ version = metadata[:version]
711
+ version = "#{version}-#{metadata[:package_version]}" if metadata[:package_version]
712
+ if !File.fnmatch(req[:allowed_versions], version)
713
+ puts "Package fails version requirement.)" if @@debug
714
+ result = false
715
+ end
716
+ end
717
+ if req[:minimum_version]
718
+ pkgver = Version.new(metadata[:version])
719
+ reqver = Version.new(req[:minimum_version])
720
+ if pkgver < reqver
721
+ puts "Package fails minimum_version (#{pkgver} < #{reqver})" if @@debug
722
+ result = false
723
+ elsif pkgver == reqver
724
+ same_min_ver_req = true
725
+ end
726
+ end
727
+ if req[:maximum_version]
728
+ pkgver = Version.new(metadata[:version])
729
+ reqver = Version.new(req[:maximum_version])
730
+ if pkgver > reqver
731
+ puts "Package fails maximum_version (#{pkgver} > #{reqver})" if @@debug
732
+ result = false
733
+ elsif pkgver == reqver
734
+ same_max_ver_req = true
735
+ end
736
+ end
737
+ if same_min_ver_req && req[:minimum_package_version]
738
+ pkgver = Version.new(metadata[:package_version])
739
+ reqver = Version.new(req[:minimum_package_version])
740
+ if pkgver < reqver
741
+ puts "Package fails minimum_package_version (#{pkgver} < #{reqver})" if @@debug
742
+ result = false
743
+ end
744
+ end
745
+ if same_max_ver_req && req[:maximum_package_version]
746
+ pkgver = Version.new(metadata[:package_version])
747
+ reqver = Version.new(req[:maximum_package_version])
748
+ if pkgver > reqver
749
+ puts "Package fails maximum_package_version (#{pkgver} > #{reqver})" if @@debug
750
+ result = false
751
+ end
752
+ end
753
+ # The empty? check ensures that a package with no operatingsystem
754
+ # field matches all clients.
755
+ if metadata[:operatingsystem] &&
756
+ !metadata[:operatingsystem].empty? &&
757
+ !metadata[:operatingsystem].include?(get_os) &&
758
+ !metadata[:operatingsystem].any?{|os| get_os =~ /#{os}/}
759
+ puts "Package fails operatingsystem" if @@debug
760
+ result = false
761
+ end
762
+ # Same deal with empty? here
763
+ if metadata[:architecture] &&
764
+ !metadata[:architecture].empty? &&
765
+ !metadata[:architecture].include?(get_arch) &&
766
+ !metadata[:architecture].any?{|arch| get_arch =~ /#{arch}/}
767
+ puts "Package fails architecture" if @@debug
768
+ result = false
769
+ end
770
+ else
771
+ puts "Package fails name" if @@debug
772
+ result = false
773
+ end
774
+ result
775
+ end
776
+
777
+ # Define a block for sorting packages in order of desirability
778
+ # Suitable for passing to Array#sort as array.sort(&SORT_PACKAGES)
779
+ SORT_PACKAGES = lambda do |a,b|
780
+ #
781
+ # We first prepare all of the values we wish to compare
782
+ #
783
+
784
+ # Name
785
+ aname = a[:metadata][:name]
786
+ bname = b[:metadata][:name]
787
+ # Currently installed
788
+ # Conflicted about whether this belongs here or not, not sure if all
789
+ # potential users of this sorting system would want to prefer currently
790
+ # installed packages.
791
+ acurrentinstall = 0
792
+ if (a[:source] == :currently_installed || a[:source] == :native_installed) && a[:prefer] == true
793
+ acurrentinstall = 1
794
+ end
795
+ bcurrentinstall = 0
796
+ if (b[:source] == :currently_installed || b[:source] == :native_installed) && b[:prefer] == true
797
+ bcurrentinstall = 1
798
+ end
799
+ # Version
800
+ aversion = Version.new(a[:metadata][:version])
801
+ bversion = Version.new(b[:metadata][:version])
802
+ # Package version
803
+ apkgver = Version.new(0)
804
+ if a[:metadata][:package_version]
805
+ apkgver = Version.new(a[:metadata][:package_version])
806
+ end
807
+ bpkgver = Version.new(0)
808
+ if b[:metadata][:package_version]
809
+ bpkgver = Version.new(b[:metadata][:package_version])
810
+ end
811
+ # OS
812
+ # Fewer OSs is better, but zero is least desirable because zero means
813
+ # the package works on all OSs (i.e. it is the most generic package).
814
+ # We prefer packages tuned to a particular set of OSs over packages
815
+ # that work everywhere on the assumption that the package that works
816
+ # on only a few platforms was tuned more specifically for those
817
+ # platforms. We remap 0 to a big number so that the sorting works
818
+ # properly.
819
+ aoslength = 0
820
+ aoslength = a[:metadata][:operatingsystem].length if a[:metadata][:operatingsystem]
821
+ if aoslength == 0
822
+ # See comments above
823
+ aoslength = 1000
824
+ end
825
+ boslength = 0
826
+ boslength = b[:metadata][:operatingsystem].length if b[:metadata][:operatingsystem]
827
+ if boslength == 0
828
+ boslength = 1000
829
+ end
830
+ # Architecture
831
+ # Same deal here, fewer architectures is better but zero is least desirable
832
+ aarchlength = 0
833
+ aarchlength = a[:metadata][:architecture].length if a[:metadata][:architecture]
834
+ if aarchlength == 0
835
+ aarchlength = 1000
836
+ end
837
+ barchlength = 0
838
+ barchlength = b[:metadata][:architecture].length if b[:metadata][:architecture]
839
+ if barchlength == 0
840
+ barchlength = 1000
841
+ end
842
+ # Prefer a currently installed package over an otherwise identical
843
+ # not installed package even if :prefer==false as a last deciding
844
+ # factor.
845
+ acurrentinstallnoprefer = 0
846
+ if a[:source] == :currently_installed || a[:source] == :native_installed
847
+ acurrentinstallnoprefer = 1
848
+ end
849
+ bcurrentinstallnoprefer = 0
850
+ if b[:source] == :currently_installed || b[:source] == :native_installed
851
+ bcurrentinstallnoprefer = 1
852
+ end
853
+
854
+ #
855
+ # Then compare
856
+ #
857
+
858
+ # The mixture of a's and b's in these two arrays may seem odd at first,
859
+ # but for some fields bigger is better (versions) but for other fields
860
+ # smaller is better.
861
+ [aname, bcurrentinstall, bversion, bpkgver, aoslength,
862
+ aarchlength, bcurrentinstallnoprefer] <=>
863
+ [bname, acurrentinstall, aversion, apkgver, boslength,
864
+ barchlength, acurrentinstallnoprefer]
865
+ end
866
+
867
+ def self.files_in_package(package_file)
868
+ files = {}
869
+ files[:root] = []
870
+ files[:reloc] = []
871
+ topleveldir = package_toplevel_directory(package_file)
872
+ IO.popen("#{find_tar} -xf #{package_file} -O #{File.join(topleveldir, 'tpkg.tar')} | #{find_tar} -tf -") do |pipe|
873
+ pipe.each do |file|
874
+ file.chomp!
875
+ if file =~ Regexp.new(File.join('tpkg', 'root'))
876
+ files[:root] << file.sub(Regexp.new(File.join('tpkg', 'root')), '')
877
+ elsif file =~ Regexp.new(File.join('tpkg', 'reloc', '.'))
878
+ files[:reloc] << file.sub(Regexp.new(File.join('tpkg', 'reloc', '')), '')
879
+ end
880
+ end
881
+ end
882
+ if !$?.success?
883
+ raise "Extracting file list from #{package_file} failed"
884
+ end
885
+ files
886
+ end
887
+
888
+ def self.lookup_uid(user)
889
+ uid = nil
890
+ if user =~ /^\d+$/
891
+ # If the user was specified as a numeric UID, use it directly.
892
+ uid = user
893
+ else
894
+ # Otherwise attempt to look up the username to get a UID.
895
+ # Default to UID 0 if the username can't be found.
896
+ # TODO: Should we cache this info somewhere?
897
+ begin
898
+ pw = Etc.getpwnam(user)
899
+ uid = pw.uid
900
+ rescue ArgumentError
901
+ puts "Package requests user #{user}, but that user can't be found. Using UID 0."
902
+ uid = 0
903
+ end
904
+ end
905
+
906
+ uid.to_i
907
+ end
908
+
909
+ def self.lookup_gid(group)
910
+ gid = nil
911
+ if group =~ /^\d+$/
912
+ # If the group was specified as a numeric GID, use it directly.
913
+ gid = group
914
+ else
915
+ # Otherwise attempt to look up the group to get a GID. Default
916
+ # to GID 0 if the group can't be found.
917
+ # TODO: Should we cache this info somewhere?
918
+ begin
919
+ gr = Etc.getgrnam(group)
920
+ gid = gr.gid
921
+ rescue ArgumentError
922
+ puts "Package requests group #{group}, but that group can't be found. Using GID 0."
923
+ gid = 0
924
+ end
925
+ end
926
+
927
+ gid.to_i
928
+ end
929
+
930
+ def self.gethttp(uri)
931
+ if uri.scheme != 'http' && uri.scheme != 'https'
932
+ # It would be possible to add support for FTP and possibly
933
+ # other things if anyone cares
934
+ raise "Only http/https URIs are supported, got: '#{uri}'"
935
+ end
936
+ http = Net::HTTP.new(uri.host, uri.port)
937
+ if uri.scheme == 'https'
938
+ # Eliminate the OpenSSL "using default DH parameters" warning
939
+ if File.exist?(File.join(CONFIGDIR, 'tpkg', 'dhparams'))
940
+ dh = OpenSSL::PKey::DH.new(IO.read(File.join(CONFIGDIR, 'tpkg', 'dhparams')))
941
+ Net::HTTP.ssl_context_accessor(:tmp_dh_callback)
942
+ http.tmp_dh_callback = proc { dh }
943
+ end
944
+ http.use_ssl = true
945
+ if File.exist?(File.join(CONFIGDIR, 'tpkg', 'ca.pem'))
946
+ http.ca_file = File.join(CONFIGDIR, 'tpkg', 'ca.pem')
947
+ http.verify_mode = OpenSSL::SSL::VERIFY_PEER
948
+ elsif File.directory?(File.join(CONFIGDIR, 'tpkg', 'ca'))
949
+ http.ca_path = File.join(CONFIGDIR, 'tpkg', 'ca')
950
+ http.verify_mode = OpenSSL::SSL::VERIFY_PEER
951
+ end
952
+ end
953
+ http.start
954
+ http
955
+ end
956
+
957
+ # foo
958
+ # foo=1.0
959
+ # foo=1.0=1
960
+ # foo-1.0-1.tpkg
961
+ def self.parse_request(request, installed_dir = nil)
962
+ # FIXME: Add support for <, <=, >, >=
963
+ req = {}
964
+ parts = request.split('=')
965
+
966
+ # upgrade/remove/query options should take package filenames
967
+ # First, look inside installed dir to see if we can find the request package. This is to support
968
+ # request that uses package filename rather than package name
969
+ if installed_dir && File.exists?(File.join(installed_dir, request))
970
+ metadata = Tpkg::metadata_from_package(File.join(installed_dir, request))
971
+ req[:name] = metadata[:name]
972
+ req[:minimum_version] = metadata[:version].to_s
973
+ req[:maximum_version] = metadata[:version].to_s
974
+ if metadata[:package_version] && !metadata[:package_version].to_s.empty?
975
+ req[:minimum_package_version] = metadata[:package_version].to_s
976
+ req[:maximum_package_version] = metadata[:package_version].to_s
977
+ end
978
+ elsif parts.length > 2 && parts[-2] =~ /^[\d\.]/ && parts[-1] =~ /^[\d\.]/
979
+ package_version = parts.pop
980
+ version = parts.pop
981
+ req[:name] = parts.join('-')
982
+ req[:minimum_version] = version
983
+ req[:maximum_version] = version
984
+ req[:minimum_package_version] = package_version
985
+ req[:maximum_package_version] = package_version
986
+ elsif parts.length > 1 && parts[-1] =~ /^[\d\.]/
987
+ version = parts.pop
988
+ req[:name] = parts.join('-')
989
+ req[:minimum_version] = version
990
+ req[:maximum_version] = version
991
+ else
992
+ req[:name] = parts.join('-')
993
+ end
994
+ req
995
+ end
996
+
997
+ # deploy_options is used for configuration the deployer. It is a map of option_names => option_values. Possible
998
+ # options are: use-ssh-key, deploy-as, worker-count, abort-on-fail
999
+ #
1000
+ # deploy_params is an array that holds the list of paramters that is used when invoking tpkg on to the remote
1001
+ # servers where we want to deploy to.
1002
+ #
1003
+ # servers is an array or a callback that list the remote servers where we want to deploy to
1004
+ def self.deploy(deploy_params, deploy_options, servers)
1005
+ deployer = Deployer.new(deploy_options)
1006
+ deployer.deploy(deploy_params, servers)
1007
+ end
1008
+
1009
+ # Given a pid, check if it is running
1010
+ def self.process_running?(pid)
1011
+ return false if pid.nil? or pid == ""
1012
+ begin
1013
+ Process.kill(0, pid.to_i)
1014
+ rescue Errno::ESRCH
1015
+ return false
1016
+ rescue => e
1017
+ puts e
1018
+ return true
1019
+ end
1020
+ end
1021
+
1022
+ # Prompt user to confirm yes or no. Default to yes if user just hit enter without any input.
1023
+ def self.confirm
1024
+ while true
1025
+ print "Confirm? [Y/n] "
1026
+ response = $stdin.gets
1027
+ if response =~ /^n/i
1028
+ return false
1029
+ elsif response =~ /^y|^\s$/i
1030
+ return true
1031
+ end
1032
+ end
1033
+ end
1034
+
1035
+ def self.extract_tpkgxml(package_file)
1036
+ result = ""
1037
+ workdir = ""
1038
+ begin
1039
+ topleveldir = Tpkg::package_toplevel_directory(package_file)
1040
+ workdir = Tpkg::tempdir(topleveldir)
1041
+ system("#{find_tar} -xf #{package_file} -O #{File.join(topleveldir, 'tpkg.tar')} | #{find_tar} -C #{workdir} -xpf -")
1042
+
1043
+ if !File.exist?(File.join(workdir,"tpkg", "tpkg.xml"))
1044
+ raise "#{package_file} does not contain tpkg.xml"
1045
+ else
1046
+ File.open(File.join(workdir,"tpkg", "tpkg.xml"), "r") do | f |
1047
+ result = f.read
1048
+ end
1049
+ end
1050
+ rescue
1051
+ puts "Failed to extract package."
1052
+ ensure
1053
+ FileUtils.rm_rf(workdir) if workdir
1054
+ end
1055
+ return result
1056
+ end
1057
+
1058
+ #
1059
+ # Instance methods
1060
+ #
1061
+
1062
+ DEFAULT_BASE = '/home/t'
1063
+
1064
+ def initialize(options)
1065
+ # Options
1066
+ @base = options[:base]
1067
+ # An array of filenames or URLs which point to individual package files
1068
+ # or directories containing packages and extracted metadata.
1069
+ @sources = []
1070
+ if options[:sources]
1071
+ @sources = options[:sources]
1072
+ # Clean up any URI sources by ensuring they have a trailing slash
1073
+ # so that they are compatible with URI::join
1074
+ @sources.map! do |source|
1075
+ if !File.exist?(source) && source !~ %r{/$}
1076
+ source << '/'
1077
+ end
1078
+ source
1079
+ end
1080
+ end
1081
+ @report_server = nil
1082
+ if options[:report_server]
1083
+ @report_server = options[:report_server]
1084
+ end
1085
+ @lockforce = false
1086
+ if options.has_key?(:lockforce)
1087
+ @lockforce = options[:lockforce]
1088
+ end
1089
+ @force =false
1090
+ if options.has_key?(:force)
1091
+ @force = options[:force]
1092
+ end
1093
+
1094
+ @file_system_root = '/' # Not sure if this needs to be more portable
1095
+ # This option is only intended for use by the test suite
1096
+ if options[:file_system_root]
1097
+ @file_system_root = options[:file_system_root]
1098
+ @base = File.join(@file_system_root, @base)
1099
+ end
1100
+
1101
+ # Various external scripts that we run might need to adjust things for
1102
+ # relocatable packages based on the base directory. Set $TPKG_HOME so
1103
+ # those scripts know what base directory is being used.
1104
+ ENV['TPKG_HOME'] = @base
1105
+
1106
+ # Other instance variables
1107
+ @metadata = {}
1108
+ @available_packages = {}
1109
+ @available_native_packages = {}
1110
+ @var_directory = File.join(@base, 'var', 'tpkg')
1111
+ if !File.exist?(@var_directory)
1112
+ begin
1113
+ FileUtils.mkdir_p(@var_directory)
1114
+ rescue Errno::EACCES
1115
+ raise if Process.euid == 0
1116
+ rescue Errno::EIO => e
1117
+ if Tpkg::get_os =~ /Darwin/
1118
+ # Try to help our Mac OS X users, otherwise this could be
1119
+ # rather confusing.
1120
+ warn "\nNote: /home is controlled by the automounter by default on Mac OS X.\n" +
1121
+ "You'll either need to disable that in /etc/auto_master or configure\n" +
1122
+ "tpkg to use a different base via tpkg.conf.\n"
1123
+ end
1124
+ raise e
1125
+ end
1126
+ end
1127
+ @installed_directory = File.join(@var_directory, 'installed')
1128
+ if !File.exist?(@installed_directory)
1129
+ begin
1130
+ FileUtils.mkdir_p(@installed_directory)
1131
+ rescue Errno::EACCES
1132
+ raise if Process.euid == 0
1133
+ end
1134
+ end
1135
+ @metadata_directory = File.join(@installed_directory, 'metadata')
1136
+ if !File.exist?(@metadata_directory)
1137
+ begin
1138
+ FileUtils.mkdir_p(@metadata_directory)
1139
+ rescue Errno::EACCES
1140
+ raise if Process.euid == 0
1141
+ end
1142
+ end
1143
+ @sources_directory = File.join(@var_directory, 'sources')
1144
+ if !File.exist?(@sources_directory)
1145
+ begin
1146
+ FileUtils.mkdir_p(@sources_directory)
1147
+ rescue Errno::EACCES
1148
+ raise if Process.euid == 0
1149
+ end
1150
+ end
1151
+ @external_directory = File.join(@var_directory, 'externals')
1152
+ if !File.exist?(@external_directory)
1153
+ begin
1154
+ FileUtils.mkdir_p(@external_directory)
1155
+ rescue Errno::EACCES
1156
+ raise if Process.euid == 0
1157
+ end
1158
+ end
1159
+ @tmp_directory = File.join(@var_directory, 'tmp')
1160
+ if !File.exist?(@tmp_directory)
1161
+ begin
1162
+ FileUtils.mkdir_p(@tmp_directory)
1163
+ rescue Errno::EACCES
1164
+ raise if Process.euid == 0
1165
+ end
1166
+ end
1167
+ @tar = Tpkg::find_tar
1168
+ @lock_directory = File.join(@var_directory, 'lock')
1169
+ @lock_pid_file = File.join(@lock_directory, 'pid')
1170
+ @locks = 0
1171
+ @installed_metadata = {}
1172
+ end
1173
+
1174
+ def source_to_local_directory(source)
1175
+ source_as_directory = source.gsub(/[^a-zA-Z0-9]/, '')
1176
+ File.join(@sources_directory, source_as_directory)
1177
+ end
1178
+
1179
+ # One-time operations related to loading information about available
1180
+ # packages
1181
+ def prep_metadata
1182
+ if @metadata.empty?
1183
+ metadata = {}
1184
+ @sources.each do |source|
1185
+ if File.file?(source)
1186
+ metadata_yml = Tpkg::metadata_from_package(source)
1187
+ metadata_yml.source = source
1188
+ name = metadata_yml[:name]
1189
+ metadata[name] = [] if !metadata[name]
1190
+ metadata[name] << metadata_yml
1191
+ elsif File.directory?(source)
1192
+ if !File.exists?(File.join(source, 'metadata.yml'))
1193
+ warn "Warning: the source directory #{source} has no metadata.yml file. Try running tpkg -x #{source} first."
1194
+ next
1195
+ end
1196
+
1197
+ metadata_contents = File.read(File.join(source, 'metadata.yml'))
1198
+ Metadata::get_pkgs_metadata_from_yml_doc(metadata_contents, metadata, source)
1199
+ else
1200
+ uri = http = localdate = remotedate = localdir = localpath = nil
1201
+
1202
+ ['metadata.yml', 'metadata.xml'].each do | metadata_file |
1203
+ uri = URI.join(source, metadata_file)
1204
+ http = Tpkg::gethttp(uri)
1205
+
1206
+ # Calculate the path to the local copy of the metadata for this URI
1207
+ localdir = source_to_local_directory(source)
1208
+ localpath = File.join(localdir, metadata_file)
1209
+ localdate = nil
1210
+ if File.exist?(localpath)
1211
+ localdate = File.mtime(localpath)
1212
+ end
1213
+
1214
+ # For now, we always have to hit the repo once to determine if
1215
+ # it has metadata.yml or metadata.xml. In the future,
1216
+ # we will only support metadata.yml
1217
+ response = http.head(uri.path)
1218
+ case response
1219
+ when Net::HTTPSuccess
1220
+ remotedate = Time.httpdate(response['Date'])
1221
+ break
1222
+ else
1223
+ puts "Error fetching metadata from #{uri}: #{response.body}"
1224
+ next
1225
+ end
1226
+ end
1227
+
1228
+ # Fetch the metadata if necessary
1229
+ metadata_contents = nil
1230
+ if !localdate || remotedate != localdate
1231
+ response = http.get(uri.path)
1232
+ case response
1233
+ when Net::HTTPSuccess
1234
+ metadata_contents = response.body
1235
+ remotedate = Time.httpdate(response['Date'])
1236
+ # Attempt to save a local copy, might not work if we're not
1237
+ # running with sufficient privileges
1238
+ begin
1239
+ if !File.exist?(localdir)
1240
+ FileUtils.mkdir_p(localdir)
1241
+ end
1242
+ File.open(localpath, 'w') do |file|
1243
+ file.puts(response.body)
1244
+ end
1245
+ File.utime(remotedate, remotedate, localpath)
1246
+ rescue Errno::EACCES
1247
+ raise if Process.euid == 0
1248
+ end
1249
+ else
1250
+ puts "Error fetching metadata from #{uri}: #{response.body}"
1251
+ response.error! # Throws an exception
1252
+ end
1253
+ else
1254
+ metadata_contents = IO.read(localpath)
1255
+ end
1256
+
1257
+ if uri.path =~ /yml/
1258
+ Metadata::get_pkgs_metadata_from_yml_doc(metadata_contents, metadata, source)
1259
+ else
1260
+ # At this stage we just break up the metadata.xml document into
1261
+ # per-package chunks and save them for further parsing later.
1262
+ # This allows us to parse the whole metadata.xml just once, and
1263
+ # saves us from having to further parse and convert the
1264
+ # per-package chunks until if/when they are needed.
1265
+ tpkg_metadata = REXML::Document.new(metadata_contents)
1266
+ tpkg_metadata.elements.each('/tpkg_metadata/tpkg') do |metadata_xml|
1267
+ name = metadata_xml.elements['name'].text
1268
+ metadata[name] = [] if !metadata[name]
1269
+ metadata[name] << Metadata.new(metadata_xml.to_s, 'xml', source)
1270
+ end
1271
+ end
1272
+ end
1273
+ end
1274
+ @metadata = metadata
1275
+ if @@debug
1276
+ @sources.each do |source|
1277
+ count = metadata.inject(0) do |memo,m|
1278
+ # metadata is a hash of pkgname => array of metadata
1279
+ # hashes
1280
+ # Thus m is a 2 element array of [pkgname, array of
1281
+ # metadata hashes] And thus m[1] is the array of
1282
+ # metadata hashes. And metadata hashes are themselves
1283
+ # a hash of XML metadata and source.
1284
+ memo + m[1].select{|mh| mh[:source] == source}.length
1285
+ end
1286
+ puts "Found #{count} packages from #{source}"
1287
+ end
1288
+ end
1289
+ end
1290
+ end
1291
+
1292
+ # Populate our list of available packages for a given package name
1293
+ def load_available_packages(name=nil)
1294
+ prep_metadata
1295
+
1296
+ if name
1297
+ if !@available_packages[name]
1298
+ packages = []
1299
+ if @metadata[name]
1300
+ @metadata[name].each do |metadata_obj|
1301
+ packages << { :metadata => metadata_obj,
1302
+ :source => metadata_obj.source }
1303
+ end
1304
+ end
1305
+ @available_packages[name] = packages
1306
+
1307
+ if @@debug
1308
+ puts "Loaded #{@available_packages[name].size} available packages for #{name}"
1309
+ end
1310
+ end
1311
+ else
1312
+ # Load all packages
1313
+ @metadata.each do |pkgname, metadata_objs|
1314
+ if !@available_packages[pkgname]
1315
+ packages = []
1316
+ metadata_objs.each do |metadata_obj|
1317
+ packages << { :metadata => metadata_obj,
1318
+ :source => metadata_obj.source }
1319
+ end
1320
+ @available_packages[pkgname] = packages
1321
+ end
1322
+ end
1323
+ end
1324
+ end
1325
+
1326
+ # Used by load_available_native_packages to stuff all the info about a
1327
+ # native package into a hash to match the structure we pass around
1328
+ # internally for tpkgs
1329
+ def pkg_for_native_package(name, version, package_version, source)
1330
+ metadata = {}
1331
+ metadata[:name] = name
1332
+ metadata[:version] = version
1333
+ metadata[:package_version] = package_version if package_version
1334
+ pkg = { :metadata => metadata, :source => source }
1335
+ if source == :native_installed
1336
+ pkg[:prefer] = true
1337
+ end
1338
+ pkg
1339
+ end
1340
+
1341
+ def load_available_native_packages(pkgname)
1342
+ if !@available_native_packages[pkgname]
1343
+ native_packages = []
1344
+ if Tpkg::get_os =~ /RedHat|CentOS|Fedora/
1345
+ [ {:arg => 'installed', :header => 'Installed', :source => :native_installed},
1346
+ {:arg => 'available', :header => 'Available', :source => :native_available} ].each do |yum|
1347
+ puts "available_native_packages running 'yum list #{yum[:arg]} #{pkgname}'" if @@debug
1348
+ stderr_first_line = nil
1349
+ Open3.popen3("yum list #{yum[:arg]} #{pkgname}") do |stdin, stdout, stderr|
1350
+ stdin.close
1351
+ read_packages = false
1352
+ stdout.each_line do |line|
1353
+ if line =~ /#{yum[:header]} Packages/
1354
+ # Skip the header lines until we get to this line
1355
+ read_packages = true
1356
+ elsif read_packages
1357
+ name_and_arch, ver_and_release, repo = line.split
1358
+ # In the end we ignore the architecture. Anything that
1359
+ # shows up in yum should be installable on this box, and
1360
+ # the chance of a mismatch between facter's idea of the
1361
+ # architecture and RPM's idea is high. I.e. i386 vs i686
1362
+ # or i32e vs x86_64 or whatever.
1363
+ name, arch = name_and_arch.split('.')
1364
+ # This is prone to error, as both the version and release
1365
+ # (what we call package version) could contain '-', so
1366
+ # there's no reliable way to parse the combined value.
1367
+ # RPM can show them separately, but seemingly not yum.
1368
+ # We could use rpm to list installed packages, but we
1369
+ # have to use yum to get available packages so we're
1370
+ # stuck with the problem.
1371
+ verparts = ver_and_release.split('-')
1372
+ package_version = verparts.pop
1373
+ version = verparts.join('-')
1374
+ # Create the pkg structure
1375
+ pkg = pkg_for_native_package(name, version, package_version, yum[:source])
1376
+ native_packages << pkg
1377
+ end
1378
+ end
1379
+ stderr_first_line = stderr.gets
1380
+ end
1381
+ if !$?.success?
1382
+ # Ignore 'no matching packages', raise anything else
1383
+ if stderr_first_line != "Error: No matching Packages to list\n"
1384
+ raise "available_native_packages error running yum"
1385
+ end
1386
+ end
1387
+ end
1388
+ elsif Tpkg::get_os =~ /Debian|Ubuntu/
1389
+ # The default 'dpkg -l' format has an optional third column for
1390
+ # errors, which makes it hard to parse reliably.
1391
+ puts "available_native_packages running dpkg-query -W -f='${Package} ${Version} ${Status}\n' #{pkgname}" if @@debug
1392
+ stderr_first_line = nil
1393
+ Open3.popen3("dpkg-query -W -f='${Package} ${Version} ${Status}\n' #{pkgname}") do |stdin, stdout, stderr|
1394
+ stdin.close
1395
+ stdout.each_line do |line|
1396
+ name, debversion, status = line.split(' ', 3)
1397
+ # Seems to be Debian convention that if the package has a
1398
+ # package version you seperate that from the upstream version
1399
+ # with a hyphen.
1400
+ version = nil
1401
+ package_version = nil
1402
+ if debversion =~ /-/
1403
+ version, package_version = debversion.split('-', 2)
1404
+ else
1405
+ version = debversion
1406
+ end
1407
+ if status =~ /installed/
1408
+ pkg = pkg_for_native_package(name, version, package_version, :native_installed)
1409
+ native_packages << pkg
1410
+ end
1411
+ end
1412
+ stderr_first_line = stderr.gets
1413
+ end
1414
+ if !$?.success?
1415
+ # Ignore 'no matching packages', raise anything else
1416
+ if stderr_first_line !~ 'No packages found matching'
1417
+ raise "available_native_packages error running dpkg-query"
1418
+ end
1419
+ end
1420
+ puts "available_native_packages running 'apt-cache show #{pkgname}'" if @@debug
1421
+ IO.popen("apt-cache show #{pkgname}") do |pipe|
1422
+ name = nil
1423
+ version = nil
1424
+ package_version = nil
1425
+ pipe.each_line do |line|
1426
+ if line =~ /^Package: (.*)/
1427
+ name = $1
1428
+ version = nil
1429
+ package_version = nil
1430
+ elsif line =~ /^Version: (.*)/
1431
+ debversion = $1
1432
+ # Seems to be Debian convention that if the package has a
1433
+ # package version you seperate that from the upstream version
1434
+ # with a hyphen.
1435
+ if debversion =~ /-/
1436
+ version, package_version = debversion.split('-', 2)
1437
+ else
1438
+ version = debversion
1439
+ end
1440
+ pkg = pkg_for_native_package(name, version, package_version, :native_available)
1441
+ native_packages << pkg
1442
+ end
1443
+ end
1444
+ end
1445
+ if !$?.success?
1446
+ raise "available_native_packages error running apt-cache"
1447
+ end
1448
+ elsif Tpkg::get_os =~ /Solaris/
1449
+ # Example of pkginfo -x output:
1450
+ # SUNWzfsu ZFS (Usr)
1451
+ # (i386) 11.10.0,REV=2006.05.18.01.46
1452
+ puts "available_native_packages running 'pkginfo -x #{pkgname}'" if @@debug
1453
+ IO.popen("pkginfo -x #{pkgname}") do |pipe|
1454
+ name = nil
1455
+ version = nil
1456
+ package_version = nil
1457
+ pipe.each_line do |line|
1458
+ if line =~ /^\w/
1459
+ name = line.split(' ')
1460
+ version = nil
1461
+ package_version = nil
1462
+ else
1463
+ arch, solversion = line.split(' ')
1464
+ # Lots of Sun and some third party packages (including CSW)
1465
+ # seem to use this REV= convention in the version. I've
1466
+ # never seen it documented, but since it seems to be a
1467
+ # widely used convention we'll go with it.
1468
+ if solversion =~ /,REV=/
1469
+ version, package_version = solversion.split(',REV=')
1470
+ else
1471
+ version = solversion
1472
+ end
1473
+ pkg = pkg_for_native_package(name, version, package_version, :native_installed)
1474
+ native_packages << pkg
1475
+ end
1476
+ end
1477
+ end
1478
+ if !$?.success?
1479
+ raise "available_native_packages error running pkginfo"
1480
+ end
1481
+ if File.exist?('/opt/csw/bin/pkg-get')
1482
+ puts "available_native_packages running '/opt/csw/bin/pkg-get -a'" if @@debug
1483
+ IO.popen('/opt/csw/bin/pkg-get -a') do |pipe|
1484
+ pipe.each_line do |line|
1485
+ next if line =~ /^#/ # Skip comments
1486
+ name, solversion = line.split
1487
+ # pkg-get doesn't have an option to only show available
1488
+ # packages matching a specific name, so we have to look over
1489
+ # all available packages and pick out the ones that match.
1490
+ next if name != pkgname
1491
+ # Lots of Sun and some third party packages (including CSW)
1492
+ # seem to use this REV= convention in the version. I've
1493
+ # never seen it documented, but since it seems to be a
1494
+ # widely used convention we'll go with it.
1495
+ version = nil
1496
+ package_version = nil
1497
+ if solversion =~ /,REV=/
1498
+ version, package_version = solversion.split(',REV=')
1499
+ else
1500
+ version = solversion
1501
+ end
1502
+ pkg = pkg_for_native_package(name, version, package_version, :native_available)
1503
+ native_packages << pkg
1504
+ end
1505
+ end
1506
+ end
1507
+ elsif Tpkg::get_os =~ /FreeBSD/
1508
+ puts "available_native_packages running 'pkg_info #{pkgname}'" if @@debug
1509
+ IO.popen("pkg_info #{pkgname}") do |pipe|
1510
+ pipe.each_line do |line|
1511
+ name_and_version = line.split(' ', 3)
1512
+ nameparts = name_and_version.split('-')
1513
+ fbversion = nameparts.pop
1514
+ name = nameparts.join('-')
1515
+ # Seems to be FreeBSD convention that if the package has a
1516
+ # package version you seperate that from the upstream version
1517
+ # with an underscore.
1518
+ version = nil
1519
+ package_version = nil
1520
+ if fbversion =~ /_/
1521
+ version, package_version = fbversion.split('_', 2)
1522
+ else
1523
+ version = fbversion
1524
+ end
1525
+ pkg = pkg_for_native_package(name, version, package_version, :native_installed)
1526
+ package_version << pkg
1527
+ end
1528
+ end
1529
+ if !$?.success?
1530
+ raise "available_native_packages error running pkg_info"
1531
+ end
1532
+ # FIXME: FreeBSD available packages
1533
+ # We could either poke around in the ports tree (if installed), or
1534
+ # try to recreate the URL "pkg_add -r" would use and pull a
1535
+ # directory listing.
1536
+ elsif Tpkg::get_os =~ /Darwin/
1537
+ if File.exist?('/opt/local/bin/port')
1538
+ puts "available_native_packages running '/opt/local/bin/port installed #{pkgname}'" if @@debug
1539
+ IO.popen("/opt/local/bin/port installed #{pkgname}") do |pipe|
1540
+ pipe.each_line do |line|
1541
+ next if line =~ /The following ports are currently installed/
1542
+ next if line =~ /None of the specified ports are installed/
1543
+ next if line !~ /\(active\)/
1544
+ name, version = line.split(' ')
1545
+ version.sub!(/^@/, '')
1546
+ # Remove variant names
1547
+ version.sub!(/\+.*/, '')
1548
+ # Remove the _number that is always listed on installed ports,
1549
+ # presumably some sort of differentiator if multiple copies of
1550
+ # the same port version are installed.
1551
+ version.sub!(/_\d+$/, '')
1552
+ package_version = nil
1553
+ pkg = pkg_for_native_package(name, version, package_version, :native_installed)
1554
+ native_packages << pkg
1555
+ end
1556
+ end
1557
+ if !$?.success?
1558
+ raise "available_native_packages error running port"
1559
+ end
1560
+ puts "available_native_packages running '/opt/local/bin/port list #{pkgname}'" if @@debug
1561
+ IO.popen("/opt/local/bin/port list #{pkgname}") do |pipe|
1562
+ pipe.each_line do |line|
1563
+ name, version = line.split(' ')
1564
+ version.sub!(/^@/, '')
1565
+ package_version = nil
1566
+ pkg = pkg_for_native_package(name, version, package_version, :native_available)
1567
+ native_packages << pkg
1568
+ end
1569
+ end
1570
+ if !$?.success?
1571
+ raise "available_native_packages error running port"
1572
+ end
1573
+ else
1574
+ # Fink support would be nice
1575
+ raise "No supported native package tool available on #{Tpkg::get_os}"
1576
+ end
1577
+ else
1578
+ puts "Unknown value for OS: #{Tpkg::get_os}"
1579
+ end
1580
+ @available_native_packages[pkgname] = native_packages
1581
+ if @@debug
1582
+ nicount = native_packages.select{|pkg| pkg[:source] == :native_installed}.length
1583
+ nacount = native_packages.select{|pkg| pkg[:source] == :native_available}.length
1584
+ puts "Found #{nicount} installed native packages for #{pkgname}"
1585
+ puts "Found #{nacount} available native packages for #{pkgname}"
1586
+ end
1587
+ end
1588
+ end
1589
+
1590
+ # Returns an array of the tpkg.xml metadata for installed packages
1591
+ def metadata_for_installed_packages
1592
+ metadata = {}
1593
+ if File.directory?(@installed_directory)
1594
+ Dir.foreach(@installed_directory) do |entry|
1595
+ next if entry == '.' || entry == '..' || entry == 'metadata'
1596
+ # Check the timestamp on the file to see if it is new or has
1597
+ # changed since we last loaded data
1598
+ timestamp = File.mtime(File.join(@installed_directory, entry))
1599
+ if @installed_metadata[entry] &&
1600
+ timestamp == @installed_metadata[entry][:timestamp]
1601
+ puts "Using cached installed metadata for #{entry}" if @@debug
1602
+ metadata[entry] = @installed_metadata[entry]
1603
+ else
1604
+ puts "Loading installed metadata from disk for #{entry}" if @@debug
1605
+ # Check to see if we already have a saved copy of the metadata
1606
+ # Originally tpkg just stored a copy of the package file in
1607
+ # @installed_directory and we had to extract the metadata
1608
+ # from the package file every time we needed it. That was
1609
+ # determined to be too slow, so we now cache a copy of the
1610
+ # metadata separately. However we may encounter installs by
1611
+ # old copies of tpkg and need to extract and cache the
1612
+ # metadata.
1613
+ package_metadata_dir =
1614
+ File.join(@metadata_directory,
1615
+ File.basename(entry, File.extname(entry)))
1616
+ metadata_file = File.join(package_metadata_dir, "tpkg.yml")
1617
+ m = nil
1618
+ if File.exists?(metadata_file)
1619
+ metadata_text = File.read(metadata_file)
1620
+ m = Metadata.new(metadata_text, 'yml')
1621
+ elsif File.exists?(File.join(package_metadata_dir, "tpkg.xml"))
1622
+ metadata_text = File.read(File.join(package_metadata_dir, "tpkg.xml"))
1623
+ m = Metadata.new(metadata_text, 'xml')
1624
+ # No cached metadata found, we have to extract it ourselves
1625
+ # and save it for next time
1626
+ else
1627
+ m = Tpkg::metadata_from_package(
1628
+ File.join(@installed_directory, entry))
1629
+ begin
1630
+ FileUtils.mkdir_p(package_metadata_dir)
1631
+ File.open(metadata_file, "w") do |file|
1632
+ YAML::dump(m.hash, file)
1633
+ end
1634
+ rescue Errno::EACCES
1635
+ raise if Process.euid == 0
1636
+ end
1637
+ end
1638
+ metadata[entry] = { :timestamp => timestamp,
1639
+ :metadata => m }
1640
+ end
1641
+ end
1642
+ end
1643
+ @installed_metadata = metadata
1644
+ # FIXME: dup the array we return?
1645
+ @installed_metadata.collect { |im| im[1][:metadata] }
1646
+ end
1647
+
1648
+ # Convert metadata_for_installed_packages into pkg hashes
1649
+ def installed_packages
1650
+ instpkgs = []
1651
+ metadata_for_installed_packages.each do |metadata|
1652
+ instpkgs << { :metadata => metadata,
1653
+ :source => :currently_installed,
1654
+ # It seems reasonable for this to default to true
1655
+ :prefer => true }
1656
+ end
1657
+ instpkgs
1658
+ end
1659
+
1660
+ # Returns a hash of file_metadata for installed packages
1661
+ def file_metadata_for_installed_packages
1662
+ ret = {}
1663
+
1664
+ if File.directory?(@metadata_directory)
1665
+ Dir.foreach(@metadata_directory) do |entry|
1666
+ next if entry == '.' || entry == '..'
1667
+ if File.exists?(File.join(@metadata_directory, entry, "file_metadata.bin"))
1668
+ file = File.join(@metadata_directory, entry, "file_metadata.bin")
1669
+ file_metadata = FileMetadata.new(File.read(file), 'bin')
1670
+ elsif File.exists?(File.join(@metadata_directory, entry, "file_metadata.yml"))
1671
+ file = File.join(@metadata_directory, entry, "file_metadata.yml")
1672
+ file_metadata = FileMetadata.new(File.read(file), 'yml')
1673
+ elsif File.exists?(File.join(@metadata_directory, entry, "file_metadata.xml"))
1674
+ file = File.join(@metadata_directory, entry, "file_metadata.xml")
1675
+ file_metadata = FileMetadata.new(File.read(file), 'xml')
1676
+ end
1677
+ ret[file_metadata[:package_file]] = file_metadata
1678
+ end
1679
+ end
1680
+ ret
1681
+ end
1682
+
1683
+ # Returns an array of packages which meet the given requirement
1684
+ def available_packages_that_meet_requirement(req=nil)
1685
+ pkgs = []
1686
+ puts "avail_pkgs_that_meet_req checking for #{req.inspect}" if @@debug
1687
+ if req
1688
+ if req[:type] == :native
1689
+ load_available_native_packages(req[:name])
1690
+ @available_native_packages[req[:name]].each do |pkg|
1691
+ if Tpkg::package_meets_requirement?(pkg, req)
1692
+ pkgs << pkg
1693
+ end
1694
+ end
1695
+ else
1696
+ load_available_packages(req[:name])
1697
+ @available_packages[req[:name]].each do |pkg|
1698
+ if Tpkg::package_meets_requirement?(pkg, req)
1699
+ pkgs << pkg
1700
+ end
1701
+ end
1702
+ # There's a weird dicotomy here where @available_packages contains
1703
+ # available tpkg and native packages, and _installed_ native
1704
+ # packages, but not installed tpkgs. That's somewhat intentional,
1705
+ # as we don't want to cache the installed state since that might
1706
+ # change during a run. We probably should be consistent, and not
1707
+ # cache installed native packages either. However, we do have
1708
+ # some intelligent caching of the installed tpkg state which would
1709
+ # be hard to replicate for native packages, and this method gets
1710
+ # called a lot so re-running the native package query commands
1711
+ # frequently would not be acceptable. So maybe we have the right
1712
+ # design, and this just serves as a note that it is not obvious.
1713
+ pkgs.concat(installed_packages_that_meet_requirement(req))
1714
+ end
1715
+ else
1716
+ # We return everything available if given a nil requirement
1717
+ # We do not include native packages
1718
+ load_available_packages
1719
+ # @available_packages is a hash of pkgname => array of pkgs
1720
+ # Thus m is a 2 element array of [pkgname, array of pkgs]
1721
+ # And thus m[1] is the array of packages
1722
+ pkgs = @available_packages.collect{|m| m[1]}.flatten
1723
+ end
1724
+ pkgs
1725
+ end
1726
+ def installed_packages_that_meet_requirement(req=nil)
1727
+ pkgs = []
1728
+ if req && req[:type] == :native
1729
+ load_available_native_packages(req[:name])
1730
+ @available_native_packages[req[:name]].each do |pkg|
1731
+ if pkg[:source] == :native_installed &&
1732
+ Tpkg::package_meets_requirement?(pkg, req)
1733
+ pkgs << pkg
1734
+ end
1735
+ end
1736
+ else
1737
+ installed_packages.each do |pkg|
1738
+ if req
1739
+ if Tpkg::package_meets_requirement?(pkg, req)
1740
+ pkgs << pkg
1741
+ end
1742
+ else
1743
+ pkgs << pkg
1744
+ end
1745
+ end
1746
+ end
1747
+ pkgs
1748
+ end
1749
+ # Takes a files structure as returned by files_in_package. Inserts
1750
+ # a new entry in the structure with the combined relocatable and
1751
+ # non-relocatable file lists normalized to their full paths.
1752
+ def normalize_paths(files)
1753
+ files[:normalized] = []
1754
+ files[:root].each do |rootfile|
1755
+ files[:normalized] << File.join(@file_system_root, rootfile)
1756
+ end
1757
+ files[:reloc].each do |relocfile|
1758
+ files[:normalized] << File.join(@base, relocfile)
1759
+ end
1760
+ end
1761
+ def files_for_installed_packages(package_files=nil)
1762
+ files = {}
1763
+ if !package_files
1764
+ package_files = []
1765
+ metadata_for_installed_packages.each do |metadata|
1766
+ package_files << metadata[:filename]
1767
+ end
1768
+ end
1769
+ metadata_for_installed_packages.each do |metadata|
1770
+ package_file = metadata[:filename]
1771
+ if package_files.include?(package_file)
1772
+ fip = Tpkg::files_in_package(File.join(@installed_directory, package_file))
1773
+ normalize_paths(fip)
1774
+ fip[:metadata] = metadata
1775
+ files[package_file] = fip
1776
+ end
1777
+ end
1778
+ files
1779
+ end
1780
+
1781
+ # Returns the best solution that meets the given requirements. Some
1782
+ # or all packages may be optionally pre-selected and specified via the
1783
+ # packages parameter, otherwise packages are picked from the set of
1784
+ # available packages. The packages parameter is in the form of a hash
1785
+ # with package names as keys pointing to arrays of package specs (our
1786
+ # standard hash of package metadata and source). The return value
1787
+ # will be an array of package specs.
1788
+ MAX_POSSIBLE_SOLUTIONS_TO_CHECK = 10000
1789
+ def best_solution(requirements, packages, core_packages)
1790
+ # Dup objects passed to us so that resolve_dependencies is free to
1791
+ # change them without potentially messing up our caller
1792
+ result = resolve_dependencies(requirements.dup, packages.dup, core_packages.dup)
1793
+ if @@debug
1794
+ if result[:solution]
1795
+ puts "bestsol picks: #{result[:solution].inspect}" if @@debug
1796
+ else
1797
+ puts "bestsol checked #{result[:number_of_possible_solutions_checked]} possible solutions, none worked"
1798
+ end
1799
+ end
1800
+ result[:solution]
1801
+ end
1802
+
1803
+ # Recursive method used by best_solution
1804
+ def resolve_dependencies(requirements, packages, core_packages, number_of_possible_solutions_checked=0)
1805
+ # Make sure we have populated package lists for all requirements.
1806
+ # Filter the package lists against the requirements and
1807
+ # ensure we can at least satisfy the initial requirements.
1808
+ requirements.each do |req|
1809
+ if !packages[req[:name]]
1810
+ puts "resolvedeps initializing packages for #{req.inspect}" if @@debug
1811
+ packages[req[:name]] =
1812
+ available_packages_that_meet_requirement(req)
1813
+ else
1814
+ # Loop over packages and eliminate ones that don't work for
1815
+ # this requirement
1816
+ puts "resolvedeps filtering packages for #{req.inspect}" if @@debug
1817
+ packages[req[:name]] =
1818
+ packages[req[:name]].select do |pkg|
1819
+ # When this method is called recursively there might be a
1820
+ # nil entry inserted into packages by the sorting code
1821
+ # below. We need to skip those.
1822
+ if pkg != nil
1823
+ Tpkg::package_meets_requirement?(pkg, req)
1824
+ end
1825
+ end
1826
+ end
1827
+ if packages[req[:name]].empty?
1828
+ if @@debug
1829
+ puts "No packages matching #{req.inspect}"
1830
+ end
1831
+ return {:number_of_possible_solutions_checked => number_of_possible_solutions_checked}
1832
+ end
1833
+ end
1834
+ # Sort the packages
1835
+ packages.each do |pkgname, pkgs|
1836
+ pkgs.sort!(&SORT_PACKAGES)
1837
+ # Only currently installed packages are allowed to score 0.
1838
+ # Anything else can score 1 at best. This ensures
1839
+ # that we prefer the solution which leaves the most
1840
+ # currently installed packages alone.
1841
+ if pkgs[0][:source] != :currently_installed &&
1842
+ pkgs[0][:source] != :native_installed
1843
+ pkgs.unshift(nil)
1844
+ end
1845
+ end
1846
+
1847
+ if @@debug
1848
+ puts "Packages after initial population and filtering:"
1849
+ puts packages.inspect
1850
+ end
1851
+
1852
+ # Here's an example of the possible solution sets we should come
1853
+ # up with and the proper ordering. Sets with identical averages
1854
+ # are equivalent, the order they appear in does not matter.
1855
+ #
1856
+ # packages: [a0, a1, a2], [b0, b1, b2], [c0, c1, c2]
1857
+ # core_packages: a, b
1858
+ #
1859
+ # [a0, b0, c0] (core avg 0) (avg 0)
1860
+ # [a0, b0, c1] (avg .33)
1861
+ # [a0, b0, c2] (avg .66)
1862
+ # [a0, b1, c0] (core avg .5) (avg .33)
1863
+ # [a1, b0, c0]
1864
+ # [a0, b1, c1] (avg .66)
1865
+ # [a1, b0, c1]
1866
+ # [a0, b1, c2] (avg 1)
1867
+ # [a1, b0, c2]
1868
+ # [a1, b1, c0] (core avg 1) (avg .66)
1869
+ # [a0, b2, c0]
1870
+ # [a2, b0, c0]
1871
+ # [a1, b1, c1] (avg 1)
1872
+ # [a0, b2, c1]
1873
+ # [a2, b0, c1]
1874
+ # [a1, b1, c2] (avg 1.33)
1875
+ # [a0, b2, c2]
1876
+ # [a2, b0, c2]
1877
+ # [a1, b2, c0] (core avg 1.5) (avg 1)
1878
+ # [a2, b1, c0]
1879
+ # [a1, b2, c1] (avg 1.33)
1880
+ # [a2, b1, c1]
1881
+ # [a1, b2, c2] (avg 1.67)
1882
+ # [a2, b1, c2]
1883
+ # [a2, b2, c0] (core avg 2) (avg 1.33)
1884
+ # [a2, b2, c1] (avg 1.67)
1885
+ # [a2, b2, c2] (avg 2)
1886
+
1887
+ # Divide packages into core and non-core packages
1888
+ corepkgs = packages.reject{|pkgname, pkgs| !core_packages.include?(pkgname)}
1889
+ noncorepkgs = packages.reject{|pkgname, pkgs| core_packages.include?(pkgname)}
1890
+
1891
+ # Calculate total package depth, the sum of the lengths (or rather
1892
+ # the max array index) of each array of packages.
1893
+ coretotaldepth = corepkgs.inject(0) {|memo, pkgs| memo + pkgs[1].length - 1}
1894
+ noncoretotaldepth = noncorepkgs.inject(0) {|memo, pkgs| memo + pkgs[1].length - 1}
1895
+ if @@debug
1896
+ puts "resolvedeps coretotaldepth #{coretotaldepth}"
1897
+ puts "resolvedeps noncoretotaldepth #{noncoretotaldepth}"
1898
+ end
1899
+
1900
+ # First pass, combinations of core packages
1901
+ (0..coretotaldepth).each do |coredepth|
1902
+ puts "resolvedeps checking coredepth: #{coredepth}" if @@debug
1903
+ core_solutions = [{:remaining_coredepth => coredepth, :pkgs => []}]
1904
+ corepkgs.each do |pkgname, pkgs|
1905
+ puts "resolvedeps corepkg #{pkgname}: #{pkgs.inspect}" if @@debug
1906
+ new_core_solutions = []
1907
+ core_solutions.each do |core_solution|
1908
+ remaining_coredepth = core_solution[:remaining_coredepth]
1909
+ puts "resolvedeps :remaining_coredepth: #{remaining_coredepth}" if @@debug
1910
+ (0..[remaining_coredepth, pkgs.length-1].min).each do |corepkgdepth|
1911
+ puts "resolvedeps corepkgdepth: #{corepkgdepth}" if @@debug
1912
+ # We insert a nil entry in some situations (see the sort
1913
+ # step earlier), so skip nil entries in the pkgs array.
1914
+ if pkgs[corepkgdepth] != nil
1915
+ coresol = core_solution.dup
1916
+ # Hash#dup doesn't dup each key/value, so we need to
1917
+ # explicitly dup :pkgs so that each copy has an
1918
+ # independent array that we can modify.
1919
+ coresol[:pkgs] = core_solution[:pkgs].dup
1920
+ coresol[:remaining_coredepth] -= corepkgdepth
1921
+ coresol[:pkgs] << pkgs[corepkgdepth]
1922
+ new_core_solutions << coresol
1923
+ # If this is a complete combination of core packages then
1924
+ # proceed to the next step
1925
+ puts "resolvedeps coresol[:pkgs] #{coresol[:pkgs].inspect}" if @@debug
1926
+ if coresol[:pkgs].length == corepkgs.length
1927
+ puts "resolvedeps complete core pkg set: #{coresol.inspect}" if @@debug
1928
+ # Solutions with remaining depth are duplicates of
1929
+ # solutions we already checked at lower depth levels
1930
+ # I.e. at coredepth==0 we'd have:
1931
+ # {:pkgs=>{a0, b0}, :remaining_coredepth=0}
1932
+ # And at coredepth==1:
1933
+ # {:pkgs=>{a0,b0}, :remaining_coredepth=1}
1934
+ # Whereas at coredepth==1 this is new and needs to be checked:
1935
+ # {:pkgs=>{a1,b0}, :remaining_coredepth=0}
1936
+ if coresol[:remaining_coredepth] == 0
1937
+ # Second pass, add combinations of non-core packages
1938
+ if noncorepkgs.empty?
1939
+ puts "resolvedeps noncorepkgs empty, checking solution" if @@debug
1940
+ result = check_solution(coresol, requirements, packages, core_packages, number_of_possible_solutions_checked)
1941
+ if result[:solution]
1942
+ return result
1943
+ else
1944
+ number_of_possible_solutions_checked = result[:number_of_possible_solutions_checked]
1945
+ end
1946
+ else
1947
+ (0..noncoretotaldepth).each do |noncoredepth|
1948
+ puts "resolvedeps noncoredepth: #{noncoredepth}" if @@debug
1949
+ coresol[:remaining_noncoredepth] = noncoredepth
1950
+ solutions = [coresol]
1951
+ noncorepkgs.each do |ncpkgname, ncpkgs|
1952
+ puts "resolvedeps noncorepkg #{ncpkgname}: #{ncpkgs.inspect}" if @@debug
1953
+ new_solutions = []
1954
+ solutions.each do |solution|
1955
+ remaining_noncoredepth = solution[:remaining_noncoredepth]
1956
+ puts "resolvedeps :remaining_noncoredepth: #{remaining_noncoredepth}" if @@debug
1957
+ (0..[remaining_noncoredepth, ncpkgs.length-1].min).each do |ncpkgdepth|
1958
+ puts "resolvedeps ncpkgdepth: #{ncpkgdepth}" if @@debug
1959
+ # We insert a nil entry in some situations (see the sort
1960
+ # step earlier), so skip nil entries in the pkgs array.
1961
+ if ncpkgs[ncpkgdepth] != nil
1962
+ sol = solution.dup
1963
+ # Hash#dup doesn't dup each key/value, so we need to
1964
+ # explicitly dup :pkgs so that each copy has an
1965
+ # independent array that we can modify.
1966
+ sol[:pkgs] = solution[:pkgs].dup
1967
+ sol[:remaining_noncoredepth] -= ncpkgdepth
1968
+ sol[:pkgs] << ncpkgs[ncpkgdepth]
1969
+ new_solutions << sol
1970
+ # If this is a complete combination of packages then
1971
+ # proceed to the next step
1972
+ puts "resolvedeps sol[:pkgs] #{sol[:pkgs].inspect}" if @@debug
1973
+ if sol[:pkgs].length == packages.length
1974
+ puts "resolvedeps complete pkg set: #{sol.inspect}" if @@debug
1975
+ # Solutions with remaining depth are duplicates of
1976
+ # solutions we already checked at lower depth levels
1977
+ if sol[:remaining_noncoredepth] == 0
1978
+ result = check_solution(sol, requirements, packages, core_packages, number_of_possible_solutions_checked)
1979
+ if result[:solution]
1980
+ return result
1981
+ else
1982
+ number_of_possible_solutions_checked = result[:number_of_possible_solutions_checked]
1983
+ end
1984
+ end
1985
+ end
1986
+ end
1987
+ end
1988
+ end
1989
+ solutions = new_solutions
1990
+ end
1991
+ end
1992
+ end
1993
+ end
1994
+ end
1995
+ end
1996
+ end
1997
+ end
1998
+ core_solutions = new_core_solutions
1999
+ end
2000
+ end
2001
+ # No solutions found
2002
+ return {:number_of_possible_solutions_checked => number_of_possible_solutions_checked}
2003
+ end
2004
+
2005
+ # Used by resolve_dependencies
2006
+ def check_solution(solution, requirements, packages, core_packages, number_of_possible_solutions_checked)
2007
+ number_of_possible_solutions_checked += 1
2008
+ # Probably should give the user a way to override this
2009
+ if number_of_possible_solutions_checked > MAX_POSSIBLE_SOLUTIONS_TO_CHECK
2010
+ raise "Checked #{MAX_POSSIBLE_SOLUTIONS_TO_CHECK} possible solutions to requirements and dependencies, no solution found"
2011
+ end
2012
+
2013
+ if @@debug
2014
+ puts "checksol checking #{solution.inspect}"
2015
+ end
2016
+
2017
+ # Extract dependencies from each package in the solution
2018
+ newreqs = []
2019
+ solution[:pkgs].each do |pkg|
2020
+ puts "checksol pkg #{pkg.inspect}" if @@debug
2021
+ if pkg[:metadata][:dependencies]
2022
+ pkg[:metadata][:dependencies].each do |depreq|
2023
+ if !requirements.include?(depreq) && !newreqs.include?(depreq)
2024
+ puts "checksol new depreq #{depreq.inspect}" if @@debug
2025
+ newreqs << depreq
2026
+ end
2027
+ end
2028
+ end
2029
+ end
2030
+
2031
+ if newreqs.empty?
2032
+ # No additional requirements, this is a complete solution
2033
+ puts "checksol no newreqs, complete solution" if @@debug
2034
+ return {:solution => solution[:pkgs]}
2035
+ else
2036
+ newreqs_that_need_packages = []
2037
+ newreqs.each do |newreq|
2038
+ puts "checksol checking newreq: #{newreq.inspect}" if @@debug
2039
+ if packages[newreq[:name]]
2040
+ pkg = solution[:pkgs].find{|solpkg| solpkg[:metadata][:name] == newreq[:name]}
2041
+ puts "checksol newreq pkg: #{pkg.inspect}" if @@debug
2042
+ if Tpkg::package_meets_requirement?(pkg, newreq)
2043
+ # No change to solution needed
2044
+ else
2045
+ # Solution no longer works
2046
+ puts "checksol solution no longer works" if @@debug
2047
+ return {:number_of_possible_solutions_checked => number_of_possible_solutions_checked}
2048
+ end
2049
+ else
2050
+ puts "checksol newreq needs packages" if @@debug
2051
+ newreqs_that_need_packages << newreq
2052
+ end
2053
+ end
2054
+ if newreqs_that_need_packages.empty?
2055
+ # None of the new requirements changed the solution, so the solution is complete
2056
+ puts "checksol no newreqs that need packages, complete solution" if @@debug
2057
+ return {:solution => solution[:pkgs]}
2058
+ else
2059
+ puts "checksol newreqs need packages, calling resolvedeps" if @@debug
2060
+ result = resolve_dependencies(requirements+newreqs_that_need_packages, packages.dup, core_packages, number_of_possible_solutions_checked)
2061
+ if result[:solution]
2062
+ return result
2063
+ else
2064
+ number_of_possible_solutions_checked = result[:number_of_possible_solutions_checked]
2065
+ end
2066
+ end
2067
+ end
2068
+ return {:number_of_possible_solutions_checked => number_of_possible_solutions_checked}
2069
+ end
2070
+
2071
+ def download(source, path, downloaddir = nil)
2072
+ http = Tpkg::gethttp(URI.parse(source))
2073
+ localdir = source_to_local_directory(source)
2074
+ localpath = File.join(localdir, File.basename(path))
2075
+
2076
+ # Don't download again if file is already there from previous installation
2077
+ # and still has valid checksum
2078
+ if File.file?(localpath)
2079
+ begin
2080
+ Tpkg::verify_package_checksum(localpath)
2081
+ return localpath
2082
+ rescue RuntimeError, NoMethodError
2083
+ # Previous download is bad (which can happen for a variety of
2084
+ # reasons like an interrupted download or a bad package on the
2085
+ # server). Delete it and we'll try to grab it again.
2086
+ File.delete(localpath)
2087
+ end
2088
+ else
2089
+ # If downloaddir is specified, then download to that directory. Otherwise,
2090
+ # download to default source directory
2091
+ localdir = downloaddir || localdir
2092
+ if !File.exist?(localdir)
2093
+ FileUtils.mkdir_p(localdir)
2094
+ end
2095
+ localpath = File.join(localdir, File.basename(path))
2096
+ end
2097
+ uri = URI.join(source, path)
2098
+ tmpfile = Tempfile.new(File.basename(localpath), File.dirname(localpath))
2099
+ http.request_get(uri.path) do |response|
2100
+ # Package files can be quite large, so we transfer the package to a
2101
+ # local file in chunks
2102
+ response.read_body do |chunk|
2103
+ tmpfile.write(chunk)
2104
+ end
2105
+ remotedate = Time.httpdate(response['Date'])
2106
+ File.utime(remotedate, remotedate, tmpfile.path)
2107
+ end
2108
+ tmpfile.close
2109
+
2110
+ begin
2111
+ Tpkg::verify_package_checksum(tmpfile.path)
2112
+ File.chmod(0644, tmpfile.path)
2113
+ File.rename(tmpfile.path, localpath)
2114
+ rescue
2115
+ raise "Unable to download and/or verify the package."
2116
+ end
2117
+
2118
+ localpath
2119
+ end
2120
+
2121
+ # Given a package's metadata return a hash of init scripts in the
2122
+ # package and the entry for that file from the metadata
2123
+ def init_scripts(metadata)
2124
+ init_scripts = {}
2125
+ # don't do anything unless we have to
2126
+ unless metadata[:files] && metadata[:files][:files]
2127
+ return init_scripts
2128
+ end
2129
+ metadata[:files][:files].each do |tpkgfile|
2130
+ if tpkgfile[:init]
2131
+ tpkg_path = tpkgfile[:path]
2132
+ installed_path = nil
2133
+ if tpkg_path[0,1] == File::SEPARATOR
2134
+ installed_path = File.join(@file_system_root, tpkg_path)
2135
+ else
2136
+ installed_path = File.join(@base, tpkg_path)
2137
+ end
2138
+ init_scripts[installed_path] = tpkgfile
2139
+ end
2140
+ end
2141
+ init_scripts
2142
+ end
2143
+
2144
+ # Given a package's metadata return a hash of init scripts in the
2145
+ # package and where they need to be linked to on the system
2146
+ def init_links(metadata)
2147
+ links = {}
2148
+ init_scripts(metadata).each do |installed_path, tpkgfile|
2149
+ # SysV-style init
2150
+ if Tpkg::get_os =~ /RedHat|CentOS|Fedora/ ||
2151
+ Tpkg::get_os =~ /Debian|Ubuntu/ ||
2152
+ Tpkg::get_os =~ /Solaris/
2153
+ start = '99'
2154
+ if tpkgfile[:init][:start]
2155
+ start = tpkgfile[:init][:start]
2156
+ end
2157
+ levels = nil
2158
+ if Tpkg::get_os =~ /RedHat|CentOS|Fedora/ ||
2159
+ Tpkg::get_os =~ /Debian|Ubuntu/
2160
+ levels = ['2', '3', '4', '5']
2161
+ elsif Tpkg::get_os =~ /Solaris/
2162
+ levels = ['2', '3']
2163
+ end
2164
+ if tpkgfile[:init][:levels]
2165
+ levels = tpkgfile[:init][:levels]
2166
+ end
2167
+ init_directory = nil
2168
+ if Tpkg::get_os =~ /RedHat|CentOS|Fedora/
2169
+ init_directory = File.join(@file_system_root, 'etc', 'rc.d')
2170
+ elsif Tpkg::get_os =~ /Debian|Ubuntu/ ||
2171
+ Tpkg::get_os =~ /Solaris/
2172
+ init_directory = File.join(@file_system_root, 'etc')
2173
+ end
2174
+ levels.to_s.each do |level|
2175
+ links[File.join(init_directory, "rc#{level}.d", 'S' + start.to_s + File.basename(installed_path))] = installed_path
2176
+ end
2177
+ elsif Tpkg::get_os =~ /FreeBSD/
2178
+ init_directory = File.join(@file_system_root, 'usr', 'local', 'etc', 'rc.d')
2179
+ if tpkgfile[:init][:levels] && tpkgfile[:init][:levels].empty?
2180
+ # User doesn't want the init script linked in to auto-start
2181
+ else
2182
+ links[File.join(init_directory, File.basename(installed_path))] = installed_path
2183
+ end
2184
+ else
2185
+ raise "No init script support for #{Tpkg::get_os}"
2186
+ end
2187
+ end
2188
+ links
2189
+ end
2190
+
2191
+ # Given a package's metadata return a hash of crontabs in the
2192
+ # package and where they need to be installed on the system
2193
+ def crontab_destinations(metadata)
2194
+ destinations = {}
2195
+
2196
+ # Don't do anything unless we have to
2197
+ unless metadata[:files] && metadata[:files][:files]
2198
+ return destinations
2199
+ end
2200
+
2201
+ metadata[:files][:files].each do |tpkgfile|
2202
+ if tpkgfile[:crontab]
2203
+ tpkg_path = tpkgfile[:path]
2204
+ installed_path = nil
2205
+ if tpkg_path[0,1] == File::SEPARATOR
2206
+ installed_path = File.join(@file_system_root, tpkg_path)
2207
+ else
2208
+ installed_path = File.join(@base, tpkg_path)
2209
+ end
2210
+ destinations[installed_path] = {}
2211
+
2212
+ # Decide whether we're going to add the file to a per-user
2213
+ # crontab or link it into a directory of misc. crontabs. If the
2214
+ # system only supports per-user crontabs we have to go the
2215
+ # per-user route. If the system supports both we decide based on
2216
+ # whether the package specifies a user for the crontab.
2217
+ # Systems that only support per-user style
2218
+ if Tpkg::get_os =~ /FreeBSD/ ||
2219
+ Tpkg::get_os =~ /Solaris/ ||
2220
+ Tpkg::get_os =~ /Darwin/
2221
+ if tpkgfile[:crontab][:user]
2222
+ user = tpkgfile[:crontab][:user]
2223
+ if Tpkg::get_os =~ /FreeBSD/
2224
+ destinations[installed_path][:file] = File.join(@file_system_root, 'var', 'cron', 'tabs', user)
2225
+ elsif Tpkg::get_os =~ /Solaris/
2226
+ destinations[installed_path][:file] = File.join(@file_system_root, 'var', 'spool', 'cron', 'crontabs', user)
2227
+ elsif Tpkg::get_os =~ /Darwin/
2228
+ destinations[installed_path][:file] = File.join(@file_system_root, 'usr', 'lib', 'cron', 'tabs', user)
2229
+ end
2230
+ else
2231
+ raise "No user specified for crontab in #{metadata[:filename]}"
2232
+ end
2233
+ # Systems that support cron.d style
2234
+ elsif Tpkg::get_os =~ /RedHat|CentOS|Fedora/ ||
2235
+ Tpkg::get_os =~ /Debian|Ubuntu/
2236
+ # If a user is specified go the per-user route
2237
+ if tpkgfile[:crontab][:user]
2238
+ user = tpkgfile[:crontab][:user]
2239
+ if Tpkg::get_os =~ /RedHat|CentOS|Fedora/
2240
+ destinations[installed_path][:file] = File.join(@file_system_root, 'var', 'spool', 'cron', user)
2241
+ elsif Tpkg::get_os =~ /Debian|Ubuntu/
2242
+ destinations[installed_path][:file] = File.join(@file_system_root, 'var', 'spool', 'cron', 'crontabs', user)
2243
+ end
2244
+ # Otherwise go the cron.d route
2245
+ else
2246
+ destinations[installed_path][:link] = File.join(@file_system_root, 'etc', 'cron.d', File.basename(installed_path))
2247
+ end
2248
+ else
2249
+ raise "No crontab support for #{Tpkg::get_os}"
2250
+ end
2251
+ end
2252
+ end
2253
+ destinations
2254
+ end
2255
+
2256
+ def run_external(pkgfile, operation, name, data)
2257
+ externalpath = File.join(@external_directory, name)
2258
+ if !File.executable?(externalpath)
2259
+ raise "External #{externalpath} does not exist or is not executable"
2260
+ end
2261
+ case operation
2262
+ when :install
2263
+ IO.popen("#{externalpath} '#{pkgfile}' install", 'w') do |pipe|
2264
+ pipe.write(data)
2265
+ end
2266
+ when :remove
2267
+ IO.popen("#{externalpath} '#{pkgfile}' remove", 'w') do |pipe|
2268
+ pipe.write(data)
2269
+ end
2270
+ else
2271
+ raise "Bug, unknown external operation #{operation}"
2272
+ end
2273
+ end
2274
+
2275
+ # Unpack the files from a package into place, decrypt as necessary, set
2276
+ # permissions and ownership, etc. Does not check for conflicting
2277
+ # files or packages, etc. Those checks (if desired) must be done before
2278
+ # calling this method.
2279
+ def unpack(package_file, passphrase=nil, options={})
2280
+ ret_val = 0
2281
+ metadata = Tpkg::metadata_from_package(package_file)
2282
+
2283
+ # Unpack files in a temporary directory
2284
+ # I'd prefer to unpack on the fly so that the user doesn't need to
2285
+ # have disk space to hold three copies of the package (the package
2286
+ # file itself, this temporary unpack, and the final copy of the
2287
+ # files). However, I haven't figured out a way to get that to work,
2288
+ # since we need to strip several layers of directories out of the
2289
+ # directory structure in the package.
2290
+ topleveldir = Tpkg::package_toplevel_directory(package_file)
2291
+ workdir = Tpkg::tempdir(topleveldir, @tmp_directory)
2292
+ system("#{@tar} -xf #{package_file} -O #{File.join(topleveldir, 'tpkg.tar')} | #{@tar} -C #{workdir} -xpf -")
2293
+ files_info = {} # store perms, uid, gid, etc. for files
2294
+ checksums_of_decrypted_files = {}
2295
+ root_dir = File.join(workdir, 'tpkg', 'root')
2296
+ reloc_dir = File.join(workdir, 'tpkg', 'reloc')
2297
+ rel_root_dir = File.join('tpkg', 'root')
2298
+ rel_reloc_dir = File.join('tpkg', 'reloc')
2299
+
2300
+ # Get list of conflicting files/directories & store their perm/ownership. That way, we can
2301
+ # set them to the correct values later on in order to preserve them.
2302
+ # TODO: verify this command works on all platforms
2303
+ files = `#{@tar} -xf #{package_file} -O #{File.join(topleveldir, 'tpkg.tar')} | #{@tar} -tf -`
2304
+ files = files.split("\n")
2305
+ conflicting_files = {}
2306
+ files.each do | file |
2307
+ if file =~ /^#{rel_root_dir}/
2308
+ possible_conflicting_file = "#{@file_system_root}/#{file[rel_root_dir.length ..-1]}"
2309
+ elsif file =~ /^#{rel_reloc_dir}/
2310
+ possible_conflicting_file = "#{@base}/#{file[rel_reloc_dir.length + 1..-1]}"
2311
+ end
2312
+ if possible_conflicting_file && (File.exists?(possible_conflicting_file) && !File.symlink?(possible_conflicting_file))
2313
+ conflicting_files[File.join(workdir, file)] = File.stat(possible_conflicting_file)
2314
+ end
2315
+ end
2316
+
2317
+ # Run preinstall script
2318
+ if File.exist?(File.join(workdir, 'tpkg', 'preinstall'))
2319
+ pwd = Dir.pwd
2320
+ # chdir into the working directory so that the user can specify a
2321
+ # relative path to their file/script.
2322
+ Dir.chdir(File.join(workdir, 'tpkg'))
2323
+
2324
+ # Warn the user about non-executable files, as system will just
2325
+ # silently fail and exit if that's the case.
2326
+ if !File.executable?(File.join(workdir, 'tpkg', 'preinstall'))
2327
+ warn "Warning: preinstall script for #{File.basename(package_file)} is not executable, execution will likely fail"
2328
+ end
2329
+ if @force
2330
+ system(File.join(workdir, 'tpkg', 'preinstall')) || warn("Warning: preinstall for #{File.basename(package_file)} failed with exit value #{$?.exitstatus}")
2331
+ else
2332
+ system(File.join(workdir, 'tpkg', 'preinstall')) || raise("Error: preinstall for #{File.basename(package_file)} failed with exit value #{$?.exitstatus}")
2333
+ end
2334
+ # Switch back to our previous directory
2335
+ Dir.chdir(pwd)
2336
+ end
2337
+
2338
+ # Run any externals
2339
+ metadata[:externals].each do |external|
2340
+ # If the external references a datafile or datascript then read/run it
2341
+ # now that we've unpacked the package contents and have the file/script
2342
+ # available. This will get us the data for the external.
2343
+ if external[:datafile] || external[:datascript]
2344
+ pwd = Dir.pwd
2345
+ # chdir into the working directory so that the user can specify a
2346
+ # relative path to their file/script.
2347
+ Dir.chdir(File.join(workdir, 'tpkg'))
2348
+ if external[:datafile]
2349
+ # Read the file
2350
+ external[:data] = IO.read(external[:datafile])
2351
+ # Drop the datafile key so that we don't waste time re-reading the
2352
+ # datafile again in the future.
2353
+ external.delete(:datafile)
2354
+ elsif external[:datascript]
2355
+ # Run the script
2356
+ IO.popen(external[:datascript]) do |pipe|
2357
+ external[:data] = pipe.read
2358
+ end
2359
+ # Drop the datascript key so that we don't waste time re-running the
2360
+ # datascript again in the future.
2361
+ external.delete(:datascript)
2362
+ end
2363
+ # Switch back to our previous directory
2364
+ Dir.chdir(pwd)
2365
+ end
2366
+ if !options[:externals_to_skip] || !options[:externals_to_skip].include?(external)
2367
+ run_external(metadata[:filename], :install, external[:name], external[:data])
2368
+ end
2369
+ end if metadata[:externals]
2370
+
2371
+ # Since we're stuck with unpacking to a temporary folder take
2372
+ # advantage of that to handle permissions, ownership and decryption
2373
+ # tasks before moving the files into their final location.
2374
+
2375
+ # Handle any default permissions and ownership
2376
+ default_uid = 0
2377
+ default_gid = 0
2378
+ default_perms = nil
2379
+
2380
+ if metadata[:files] && metadata[:files][:file_defaults]
2381
+ if metadata[:files][:file_defaults][:posix]
2382
+ if metadata[:files][:file_defaults][:posix][:owner]
2383
+ default_uid = Tpkg::lookup_uid(metadata[:files][:file_defaults][:posix][:owner])
2384
+ end
2385
+ if metadata[:files][:file_defaults][:posix][:group]
2386
+ default_gid = Tpkg::lookup_gid(metadata[:files][:file_defaults][:posix][:group])
2387
+ end
2388
+ if metadata[:files][:file_defaults][:posix][:perms]
2389
+ default_perms = metadata[:files][:file_defaults][:posix][:perms]
2390
+ end
2391
+ end
2392
+ end
2393
+
2394
+ # Set default dir uid/gid to be same as for file.
2395
+ default_dir_uid = default_uid
2396
+ default_dir_gid = default_gid
2397
+ default_dir_perms = 0755
2398
+
2399
+ if metadata[:files] && metadata[:files][:dir_defaults]
2400
+ if metadata[:files][:dir_defaults][:posix]
2401
+ if metadata[:files][:dir_defaults][:posix][:owner]
2402
+ default_dir_uid = Tpkg::lookup_uid(metadata[:files][:dir_defaults][:posix][:owner])
2403
+ end
2404
+ if metadata[:files][:dir_defaults][:posix][:group]
2405
+ default_dir_gid = Tpkg::lookup_gid(metadata[:files][:dir_defaults][:posix][:group])
2406
+ end
2407
+ if metadata[:files][:dir_defaults][:posix][:perms]
2408
+ default_dir_perms = metadata[:files][:dir_defaults][:posix][:perms]
2409
+ end
2410
+ end
2411
+ end
2412
+
2413
+ Find.find(root_dir, reloc_dir) do |f|
2414
+ # If the package doesn't contain either of the top level
2415
+ # directories we need to skip them, find will pass them to us
2416
+ # even if they don't exist.
2417
+ next if !File.exist?(f)
2418
+
2419
+ begin
2420
+ if File.directory?(f)
2421
+ File.chown(default_dir_uid, default_dir_gid, f)
2422
+ else
2423
+ File.chown(default_uid, default_gid, f)
2424
+ end
2425
+ rescue Errno::EPERM
2426
+ raise if Process.euid == 0
2427
+ end
2428
+ if File.file?(f) && !File.symlink?(f)
2429
+ if default_perms
2430
+ File.chmod(default_perms, f)
2431
+ end
2432
+ elsif File.directory?(f) && !File.symlink?(f)
2433
+ File.chmod(default_dir_perms, f)
2434
+ end
2435
+ end
2436
+
2437
+ # Reset the permission/ownership of the conflicting files as how they were before.
2438
+ # This needs to be done after the default permission/ownership is applied, but before
2439
+ # the handling of ownership/permissions on specific files
2440
+ conflicting_files.each do | file, stat |
2441
+ File.chmod(stat.mode, file)
2442
+ File.chown(stat.uid, stat.gid, file)
2443
+ end
2444
+
2445
+ # Handle any decryption and ownership/permissions on specific files
2446
+ metadata[:files][:files].each do |tpkgfile|
2447
+ tpkg_path = tpkgfile[:path]
2448
+ working_path = nil
2449
+ if tpkg_path[0,1] == File::SEPARATOR
2450
+ working_path = File.join(workdir, 'tpkg', 'root', tpkg_path)
2451
+ else
2452
+ working_path = File.join(workdir, 'tpkg', 'reloc', tpkg_path)
2453
+ end
2454
+ if !File.exist?(working_path) && !File.symlink?(working_path)
2455
+ raise "tpkg.xml for #{File.basename(package_file)} references file #{tpkg_path} but that file is not in the package"
2456
+ end
2457
+
2458
+ # Set permissions and ownership for specific files
2459
+ # We do this before the decryption stage so that permissions and
2460
+ # ownership designed to protect private file contents are in place
2461
+ # prior to decryption. The decrypt method preserves the permissions
2462
+ # and ownership of the encrypted file on the decrypted file.
2463
+ if tpkgfile[:posix]
2464
+ if tpkgfile[:posix][:owner] || tpkgfile[:posix][:group]
2465
+ uid = nil
2466
+ if tpkgfile[:posix][:owner]
2467
+ uid = Tpkg::lookup_uid(tpkgfile[:posix][:owner])
2468
+ end
2469
+ gid = nil
2470
+ if tpkgfile[:posix][:group]
2471
+ gid = Tpkg::lookup_gid(tpkgfile[:posix][:group])
2472
+ end
2473
+ begin
2474
+ File.chown(uid, gid, working_path)
2475
+ rescue Errno::EPERM
2476
+ raise if Process.euid == 0
2477
+ end
2478
+ end
2479
+ if tpkgfile[:posix][:perms]
2480
+ perms = tpkgfile[:posix][:perms]
2481
+ File.chmod(perms, working_path)
2482
+ end
2483
+ end
2484
+
2485
+ # Decrypt any files marked for decryption
2486
+ if tpkgfile[:encrypt]
2487
+ if passphrase.nil?
2488
+ # If the user didn't supply a passphrase then just remove the
2489
+ # encrypted file. This allows users to install packages that
2490
+ # contain encrypted files for which they don't have the
2491
+ # passphrase. They end up with just the non-encrypted files,
2492
+ # potentially useful for development or QA environments.
2493
+ File.delete(working_path)
2494
+ else
2495
+ (1..3).each do | i |
2496
+ begin
2497
+ Tpkg::decrypt(metadata[:name], working_path, passphrase)
2498
+ break
2499
+ rescue OpenSSL::CipherError
2500
+ @@passphrase = nil
2501
+ if i == 3
2502
+ raise "Incorrect passphrase."
2503
+ else
2504
+ puts "Incorrect passphrase. Try again."
2505
+ end
2506
+ end
2507
+ end
2508
+
2509
+ #digest = Digest::SHA256.file(working_path).hexdigest
2510
+ digest = Digest::SHA256.hexdigest(File.read(working_path))
2511
+ # get checksum for the decrypted file. Will be used for creating file_metadata.xml
2512
+ checksums_of_decrypted_files[File.expand_path(tpkg_path)] = digest
2513
+ end
2514
+ end
2515
+ end if metadata[:files] && metadata[:files][:files]
2516
+
2517
+ # We should get the perms, gid, uid stuff here since all the files
2518
+ # have been set up correctly
2519
+ Find.find(root_dir, reloc_dir) do |f|
2520
+ # If the package doesn't contain either of the top level
2521
+ # directory we need to skip them, find will pass them to us
2522
+ # even if they don't exist.
2523
+ next if !File.exist?(f)
2524
+ next if File.symlink?(f)
2525
+
2526
+ # check if it's from root dir or reloc dir
2527
+ if f =~ /^#{root_dir}/
2528
+ short_fn = f[root_dir.length ..-1]
2529
+ else
2530
+ short_fn = f[reloc_dir.length + 1..-1]
2531
+ relocatable = "true"
2532
+ end
2533
+
2534
+ acl = {}
2535
+ acl["gid"] = File.stat(f).gid
2536
+ acl["uid"] = File.stat(f).uid
2537
+ acl["perms"] = File.stat(f).mode.to_s(8)
2538
+ files_info[short_fn] = acl
2539
+ end
2540
+
2541
+ # Move files into place
2542
+ # If we implement any of the ACL permissions features we'll have to be
2543
+ # careful here that tar preserves those permissions. Otherwise we'll
2544
+ # need to apply them after moving the files into place.
2545
+ if File.directory?(File.join(workdir, 'tpkg', 'root'))
2546
+ system("#{@tar} -C #{File.join(workdir, 'tpkg', 'root')} -cf - . | #{@tar} -C #{@file_system_root} -xpf -")
2547
+ end
2548
+ if File.directory?(File.join(workdir, 'tpkg', 'reloc'))
2549
+ system("#{@tar} -C #{File.join(workdir, 'tpkg', 'reloc')} -cf - . | #{@tar} -C #{@base} -xpf -")
2550
+ end
2551
+
2552
+ # Install any init scripts
2553
+ init_links(metadata).each do |link, init_script|
2554
+ # We don't have to any anything if there's already symlink to our init script.
2555
+ # This can happen if user removes pkg manually without removing
2556
+ # init symlink
2557
+ next if File.symlink?(link) && File.readlink(link) == init_script
2558
+ begin
2559
+ if !File.exist?(File.dirname(link))
2560
+ FileUtils.mkdir_p(File.dirname(link))
2561
+ end
2562
+ begin
2563
+ File.symlink(init_script, link)
2564
+ rescue Errno::EEXIST
2565
+ # The link name that init_links provides is not guaranteed to
2566
+ # be unique. It might collide with a base system init script
2567
+ # or an init script from another tpkg. If the link name
2568
+ # supplied by init_links results in EEXIST then try appending
2569
+ # a number to the end of the link name.
2570
+ catch :init_link_done do
2571
+ 1.upto(9) do |i|
2572
+ begin
2573
+ File.symlink(init_script, link + i.to_s)
2574
+ throw :init_link_done
2575
+ rescue Errno::EEXIST
2576
+ end
2577
+ end
2578
+ # If we get here (i.e. we never reached the throw) then we
2579
+ # failed to create any of the possible link names.
2580
+ raise "Failed to install init script #{init_script} -> #{link} for #{File.basename(package_file)}"
2581
+ end
2582
+ end
2583
+ rescue Errno::EPERM
2584
+ # If creating the link fails due to permission problems and
2585
+ # we're not running as root just warn the user, allowing folks
2586
+ # to run tpkg as a non-root user with reduced functionality.
2587
+ if Process.euid == 0
2588
+ raise
2589
+ else
2590
+ warn "Failed to install init script for #{File.basename(package_file)}, probably due to lack of root privileges"
2591
+ end
2592
+ end
2593
+ end
2594
+
2595
+ # Install any crontabs
2596
+ crontab_destinations(metadata).each do |crontab, destination|
2597
+ begin
2598
+ if destination[:link]
2599
+ next if File.symlink?(destination[:link]) && File.readlink(destination[:link]) == crontab
2600
+ if !File.exist?(File.dirname(destination[:link]))
2601
+ FileUtils.mkdir_p(File.dirname(destination[:link]))
2602
+ end
2603
+ begin
2604
+ File.symlink(crontab, destination[:link])
2605
+ rescue Errno::EEXIST
2606
+ # The link name that crontab_destinations provides is not
2607
+ # guaranteed to be unique. It might collide with a base
2608
+ # system crontab or a crontab from another tpkg. If the
2609
+ # link name supplied by crontab_destinations results in
2610
+ # EEXIST then try appending a number to the end of the link
2611
+ # name.
2612
+ catch :crontab_link_done do
2613
+ 1.upto(9) do |i|
2614
+ begin
2615
+ File.symlink(crontab, destination[:link] + i.to_s)
2616
+ throw :crontab_link_done
2617
+ rescue Errno::EEXIST
2618
+ end
2619
+ end
2620
+ # If we get here (i.e. we never reached the throw) then we
2621
+ # failed to create any of the possible link names.
2622
+ raise "Failed to install crontab #{crontab} -> #{destination[:link]} for #{File.basename(package_file)}"
2623
+ end
2624
+ end
2625
+ elsif destination[:file]
2626
+ if !File.exist?(File.dirname(destination[:file]))
2627
+ FileUtils.mkdir_p(File.dirname(destination[:file]))
2628
+ end
2629
+ tmpfile = Tempfile.new(File.basename(destination[:file]), File.dirname(destination[:file]))
2630
+ if File.exist?(destination[:file])
2631
+ # Match permissions and ownership of current crontab
2632
+ st = File.stat(destination[:file])
2633
+ File.chmod(st.mode & 07777, tmpfile.path)
2634
+ File.chown(st.uid, st.gid, tmpfile.path)
2635
+ # Insert the contents of the current crontab file
2636
+ File.open(destination[:file]) { |file| tmpfile.write(file.read) }
2637
+ end
2638
+ # Insert a header line so we can find this section to remove later
2639
+ tmpfile.puts "### TPKG START - #{@base} - #{File.basename(package_file)}"
2640
+ # Insert the package crontab contents
2641
+ crontab_contents = IO.read(crontab)
2642
+ tmpfile.write(crontab_contents)
2643
+ # Insert a newline if the crontab doesn't end with one
2644
+ if crontab_contents.chomp == crontab_contents
2645
+ tmpfile.puts
2646
+ end
2647
+ # Insert a footer line
2648
+ tmpfile.puts "### TPKG END - #{@base} - #{File.basename(package_file)}"
2649
+ tmpfile.close
2650
+ File.rename(tmpfile.path, destination[:file])
2651
+ # FIXME: On Solaris we should bounce cron or use the crontab
2652
+ # command, otherwise cron won't pick up the changes
2653
+ end
2654
+ rescue Errno::EPERM
2655
+ # If installing the crontab fails due to permission problems and
2656
+ # we're not running as root just warn the user, allowing folks
2657
+ # to run tpkg as a non-root user with reduced functionality.
2658
+ if Process.euid == 0
2659
+ raise
2660
+ else
2661
+ warn "Failed to install crontab for #{File.basename(package_file)}, probably due to lack of root privileges"
2662
+ end
2663
+ end
2664
+ end
2665
+
2666
+ # Run postinstall script
2667
+ if File.exist?(File.join(workdir, 'tpkg', 'postinstall'))
2668
+ pwd = Dir.pwd
2669
+ # chdir into the working directory so that the user can specify a
2670
+ # relative path to their file/script.
2671
+ Dir.chdir(File.join(workdir, 'tpkg'))
2672
+
2673
+ # Warn the user about non-executable files, as system will just
2674
+ # silently fail and exit if that's the case.
2675
+ if !File.executable?(File.join(workdir, 'tpkg', 'postinstall'))
2676
+ warn "Warning: postinstall script for #{File.basename(package_file)} is not executable, execution will likely fail"
2677
+ end
2678
+ # Note this only warns the user if the postinstall fails, it does
2679
+ # not raise an exception like we do if preinstall fails. Raising
2680
+ # an exception would leave the package's files installed but the
2681
+ # package not registered as installed, which does not seem
2682
+ # desirable. We could remove the package's files and raise an
2683
+ # exception, but this seems the best approach to me.
2684
+ system(File.join(workdir, 'tpkg', 'postinstall')) || warn("Warning: postinstall for #{File.basename(package_file)} failed with exit value #{$?.exitstatus}")
2685
+ ret_val = POSTINSTALL_ERR if $?.exitstatus > 0
2686
+
2687
+ # Switch back to our previous directory
2688
+ Dir.chdir(pwd)
2689
+ end
2690
+
2691
+ # Save metadata for this pkg
2692
+ package_name = File.basename(package_file, File.extname(package_file))
2693
+ package_metadata_dir = File.join(@metadata_directory, package_name)
2694
+ FileUtils.mkdir_p(package_metadata_dir)
2695
+ metadata_file = File.new(File.join(package_metadata_dir, "tpkg.yml"), "w")
2696
+ metadata.write(metadata_file)
2697
+ metadata_file.close
2698
+
2699
+ # Save file_metadata.yml for this pkg
2700
+ if File.exist?(File.join(workdir, 'tpkg', 'file_metadata.bin'))
2701
+ file_metadata = FileMetadata.new(File.read(File.join(workdir, 'tpkg', 'file_metadata.bin')), 'bin')
2702
+ elsif File.exist?(File.join(workdir, 'tpkg', 'file_metadata.yml'))
2703
+ file_metadata = FileMetadata.new(File.read(File.join(workdir, 'tpkg', 'file_metadata.yml')), 'yml')
2704
+ elsif File.exists?(File.join(workdir, 'tpkg', 'file_metadata.xml'))
2705
+ file_metadata = FileMetadata.new(File.read(File.join(workdir, 'tpkg', 'file_metadata.xml')), 'xml')
2706
+ end
2707
+ if file_metadata
2708
+ file_metadata[:package_file] = File.basename(package_file)
2709
+ file_metadata[:files].each do |file|
2710
+ acl = files_info[file[:path]]
2711
+ file.merge!(acl) unless acl.nil?
2712
+ digest = checksums_of_decrypted_files[File.expand_path(file[:path])]
2713
+ if digest
2714
+ digests = file[:checksum][:digests]
2715
+ digests[0][:encrypted] = true
2716
+ digests[1] = {:decrypted => true, :value => digest}
2717
+ end
2718
+ end
2719
+
2720
+ file = File.open(File.join(package_metadata_dir, "file_metadata.bin"), "w")
2721
+ Marshal.dump(file_metadata.hash, file)
2722
+ file.close
2723
+ else
2724
+ warn "Warning: package #{File.basename(package_file)} does not include file_metadata information."
2725
+ end
2726
+
2727
+ # Copy the package file to the directory for installed packages
2728
+ FileUtils.cp(package_file, @installed_directory)
2729
+
2730
+ # Cleanup
2731
+ FileUtils.rm_rf(workdir)
2732
+ return ret_val
2733
+ end
2734
+
2735
+ def requirements_for_currently_installed_package(pkgname=nil)
2736
+ requirements = []
2737
+ metadata_for_installed_packages.each do |metadata|
2738
+ if !pkgname || pkgname == metadata[:name]
2739
+ req = { :name => metadata[:name],
2740
+ :minimum_version => metadata[:version] }
2741
+ if metadata[:package_version]
2742
+ req[:minimum_package_version] = metadata[:package_version]
2743
+ end
2744
+ requirements << req
2745
+ end
2746
+ end
2747
+ requirements
2748
+ end
2749
+
2750
+ # Adds/modifies requirements and packages arguments to add requirements
2751
+ # and package entries for currently installed packages
2752
+ # Note: the requirements and packages arguments are modified by this method
2753
+ def requirements_for_currently_installed_packages(requirements, packages)
2754
+ metadata_for_installed_packages.each do |installed_xml|
2755
+ name = installed_xml[:name]
2756
+ version = installed_xml[:version]
2757
+ # For each currently installed package we insert a requirement for
2758
+ # at least that version of the package
2759
+ req = { :name => name, :minimum_version => version }
2760
+ requirements << req
2761
+ # Initialize the list of possible packages for this req
2762
+ if !packages[name]
2763
+ packages[name] = available_packages_that_meet_requirement(req)
2764
+ end
2765
+ end
2766
+ end
2767
+
2768
+ # Define requirements for requested packages
2769
+ # Takes an array of packages: files, URLs, or basic package specs ('foo' or
2770
+ # 'foo=1.0')
2771
+ # Adds/modifies requirements and packages arguments based on parsing those
2772
+ # requests
2773
+ # Input:
2774
+ # [ 'foo-1.0.tpkg', 'http://server/pkgs/bar-2.3.pkg', 'blat=0.5' ]
2775
+ # Result:
2776
+ # requirements << { :name => 'foo' }, packages['foo'] = { :source => 'foo-1.0.tpkg' }
2777
+ # requirements << { :name => 'bar' }, packages['bar'] = { :source => 'http://server/pkgs/bar-2.3.pkg' }
2778
+ # requirements << { :name => 'blat', :minimum_version => '0.5', :maximum_version => '0.5' }, packages['blat'] populated with available packages meeting that requirement
2779
+ # Note: the requirements and packages arguments are modified by this method
2780
+ def parse_requests(requests, requirements, packages)
2781
+ newreqs = []
2782
+
2783
+ requests.each do |request|
2784
+ puts "parse_requests processing #{request.inspect}" if @@debug
2785
+ if request =~ /^[-\w=<>\d\.]+$/ && !File.file?(request) # basic package specs ('foo' or 'foo=1.0')
2786
+ puts "parse_requests request looks like package spec" if @@debug
2787
+
2788
+ # Tpkg::parse_request is a class method and doesn't know where packages are installed.
2789
+ # So we have to tell it ourselves.
2790
+ req = Tpkg::parse_request(request, @installed_directory)
2791
+ newreqs << req
2792
+
2793
+ # Initialize the list of possible packages for this req
2794
+ if !packages[req[:name]]
2795
+ packages[req[:name]] = available_packages_that_meet_requirement(req)
2796
+ end
2797
+ else # User specified a file or URI
2798
+ req = {}
2799
+ metadata = nil
2800
+ source = nil
2801
+ localpath = nil
2802
+ if File.file?(request)
2803
+ puts "parse_requests treating request as a file" if @@debug
2804
+ localpath = request
2805
+ metadata = Tpkg::metadata_from_package(request)
2806
+ source = request
2807
+ else
2808
+ puts "parse_requests treating request as a URI" if @@debug
2809
+ uri = URI.parse(request) # This just serves as a sanity check
2810
+ # Using these File methods on a URI seems to work but is probably fragile
2811
+ source = File.dirname(request) + '/' # dirname chops off the / at the end, we need it in order to be compatible with URI.join
2812
+ pkgfile = File.basename(request)
2813
+ localpath = download(source, pkgfile, Tpkg::tempdir('download'))
2814
+ metadata = Tpkg::metadata_from_package(localpath)
2815
+ # Cleanup temp download dir
2816
+ FileUtils.rm_rf(localpath)
2817
+ end
2818
+ req[:name] = metadata[:name]
2819
+ pkg = { :metadata => metadata, :source => source }
2820
+
2821
+ newreqs << req
2822
+ # The user specified a particular package, so it is the only package
2823
+ # that can be used to meet the requirement
2824
+ packages[req[:name]] = [pkg]
2825
+ end
2826
+ end
2827
+
2828
+ requirements.concat(newreqs)
2829
+ newreqs
2830
+ end
2831
+
2832
+ # After calling parse_request, we should call this method
2833
+ # to check whether or not we can meet the requirements/dependencies
2834
+ # of the result packages
2835
+ def check_requests(packages)
2836
+ all_requests_satisfied = true # whether or not all requests can be satisfied
2837
+ errors = [""]
2838
+ packages.each do |name, pkgs|
2839
+ if pkgs.empty?
2840
+ errors << ["Unable to find any packages which satisfy #{name}"]
2841
+ all_requests_satisfied = false
2842
+ next
2843
+ end
2844
+
2845
+ request_satisfied = false # whether or not this request can be satisfied
2846
+ possible_errors = []
2847
+ pkgs.each do |pkg|
2848
+ metadata = pkg[:metadata]
2849
+ req = { :name => metadata[:name] }
2850
+ # Quick sanity check that the package can be installed on this machine.
2851
+ if !Tpkg::package_meets_requirement?(pkg, req)
2852
+ possible_errors << " Requested package #{metadata[:filename]} doesn't match this machine's OS or architecture"
2853
+ next
2854
+ end
2855
+ # a sanity check that there is at least one package
2856
+ # available for each dependency of this package
2857
+ dep_satisfied = true
2858
+ metadata[:dependencies].each do |depreq|
2859
+ if available_packages_that_meet_requirement(depreq).empty? && !Tpkg::packages_meet_requirement?(packages.values.flatten, depreq)
2860
+ possible_errors << " Requested package #{metadata[:filename]} depends on #{depreq.inspect}, no packages that satisfy that dependency are available"
2861
+ dep_satisfied = false
2862
+ end
2863
+ end if metadata[:dependencies]
2864
+ request_satisfied = true if dep_satisfied
2865
+ end
2866
+ if !request_satisfied
2867
+ errors << ["Unable to find any packages which satisfy #{name}. Possible error(s):"]
2868
+ errors << possible_errors
2869
+ all_requests_satisfied = false
2870
+ end
2871
+ end
2872
+
2873
+ if !all_requests_satisfied
2874
+ puts errors.join("\n")
2875
+ raise "Unable to satisfy the request(s)"
2876
+ end
2877
+ end
2878
+
2879
+ CHECK_INSTALL = 1
2880
+ CHECK_UPGRADE = 2
2881
+ CHECK_REMOVE = 3
2882
+ def conflicting_files(package_file, mode=CHECK_INSTALL)
2883
+ metadata = Tpkg::metadata_from_package(package_file)
2884
+ pkgname = metadata[:name]
2885
+
2886
+ conflicts = {}
2887
+
2888
+ installed_files = files_for_installed_packages
2889
+
2890
+ # Pull out the normalized paths, skipping appropriate packages based
2891
+ # on the requested mode
2892
+ installed_files_normalized = {}
2893
+ installed_files.each do |pkgfile, files|
2894
+ # Skip packages with the same name if the user is performing an upgrade
2895
+ if mode == CHECK_UPGRADE && files[:metadata][:name] == pkgname
2896
+ next
2897
+ end
2898
+ # Skip packages with the same filename if the user is removing
2899
+ if mode == CHECK_REMOVE && pkgfile == File.basename(package_file)
2900
+ next
2901
+ end
2902
+ installed_files_normalized[pkgfile] = files[:normalized]
2903
+ end
2904
+
2905
+ fip = Tpkg::files_in_package(package_file)
2906
+ normalize_paths(fip)
2907
+
2908
+ fip[:normalized].each do |file|
2909
+ installed_files_normalized.each do |instpkgfile, files|
2910
+ if files.include?(file)
2911
+ if !conflicts[instpkgfile]
2912
+ conflicts[instpkgfile] = []
2913
+ end
2914
+ conflicts[instpkgfile] << file
2915
+ end
2916
+ end
2917
+ end
2918
+
2919
+ # The remove method actually needs !conflicts, so invert in that case
2920
+ if mode == CHECK_REMOVE
2921
+ # Flatten conflicts to an array
2922
+ flatconflicts = []
2923
+ conflicts.each_value { |files| flatconflicts.concat(files) }
2924
+ # And invert
2925
+ conflicts = fip[:normalized] - flatconflicts
2926
+ end
2927
+
2928
+ conflicts
2929
+ end
2930
+
2931
+ def check_for_conflicting_pkgs(pkgs_to_check)
2932
+ # loop through packages that we're interested in, check for conflict listing,
2933
+ # see if there are any conflicts among each other
2934
+ pkgs_to_check.each do |pkg1|
2935
+ # native package might not have conflicts defined so skip
2936
+ next if pkg1[:metadata][:conflicts].nil?
2937
+ pkg1[:metadata][:conflicts].each do | conflict |
2938
+ pkgs_to_check.each do |pkg2|
2939
+ if Tpkg::package_meets_requirement?(pkg2, conflict)
2940
+ raise "Package conflicts between #{pkg2.inspect} and #{pkg1.inspect}"
2941
+ end
2942
+ end
2943
+ end
2944
+ end
2945
+ end
2946
+
2947
+ def prompt_for_conflicting_files(package_file, mode=CHECK_INSTALL)
2948
+ if !@@prompt
2949
+ return true
2950
+ end
2951
+
2952
+ result = true
2953
+ conflicts = conflicting_files(package_file, mode)
2954
+
2955
+ # We don't want to prompt the user for directories, so strip those out
2956
+ conflicts.each do |pkgfile, files|
2957
+ files.reject! { |file| File.directory?(file) }
2958
+ end
2959
+ conflicts.reject! { |pkgfile, files| files.empty? }
2960
+
2961
+ if !conflicts.empty?
2962
+ puts "File conflicts:"
2963
+ conflicts.each do |pkgfile, files|
2964
+ files.each do |file|
2965
+ puts "#{file} (#{pkgfile})"
2966
+ end
2967
+ end
2968
+ print "Proceed? [y/N] "
2969
+ response = $stdin.gets
2970
+ if response !~ /^y/i
2971
+ result = false
2972
+ end
2973
+ end
2974
+ result
2975
+ end
2976
+
2977
+ def prompt_for_install(pkgs, promptstring)
2978
+ if @@prompt
2979
+ pkgs_to_report = pkgs.select do |pkg|
2980
+ pkg[:source] != :currently_installed &&
2981
+ pkg[:source] != :native_installed
2982
+ end
2983
+ if !pkgs_to_report.empty?
2984
+ puts "The following packages will be #{promptstring}:"
2985
+ pkgs_to_report.sort(&SORT_PACKAGES).each do |pkg|
2986
+ if pkg[:source] == :native_available
2987
+ name = pkg[:metadata][:name]
2988
+ version = pkg[:metadata][:version]
2989
+ package_version = pkg[:metadata][:package_version]
2990
+ puts "Native #{name}=#{version}=#{package_version}"
2991
+ else
2992
+ puts pkg[:metadata][:filename]
2993
+ end
2994
+ end
2995
+ return Tpkg::confirm
2996
+ end
2997
+ end
2998
+ true
2999
+ end
3000
+
3001
+ # See parse_requests for format of requests
3002
+ def install(requests, passphrase=nil)
3003
+ ret_val = 0
3004
+ requirements = []
3005
+ packages = {}
3006
+ lock
3007
+
3008
+ parse_requests(requests, requirements, packages)
3009
+ check_requests(packages)
3010
+ core_packages = []
3011
+ currently_installed_requirements = []
3012
+ requirements.each do |req|
3013
+ core_packages << req[:name] if !core_packages.include?(req[:name])
3014
+ currently_installed_requirements.concat(
3015
+ requirements_for_currently_installed_package(req[:name]))
3016
+ end
3017
+ requirements.concat(currently_installed_requirements).uniq!
3018
+
3019
+ puts "install calling best_solution" if @@debug
3020
+ puts "install requirements: #{requirements.inspect}" if @@debug
3021
+ puts "install packages: #{packages.inspect}" if @@debug
3022
+ puts "install core_packages: #{core_packages.inspect}" if @@debug
3023
+ #solution_packages = best_solution(requirements.dup, packages.dup)
3024
+ solution_packages = best_solution(requirements, packages, core_packages)
3025
+ if !solution_packages
3026
+ raise "Unable to resolve dependencies"
3027
+ end
3028
+
3029
+ check_for_conflicting_pkgs(solution_packages | installed_packages)
3030
+
3031
+ if !prompt_for_install(solution_packages, 'installed')
3032
+ unlock
3033
+ return false
3034
+ end
3035
+
3036
+ # Create array of packages (names) we have installed so far
3037
+ # We will use it later on to determine the order of how to install the packages
3038
+ installed_so_far = installed_packages.collect{|pkg| pkg[:metadata][:name]}
3039
+
3040
+ while pkg = solution_packages.shift
3041
+ # get dependencies and make sure we install the packages in the correct order
3042
+ # based on the dependencies
3043
+ dependencies = nil
3044
+ if pkg[:metadata][:dependencies]
3045
+ dependencies = pkg[:metadata][:dependencies].collect { |dep| dep[:name] }.compact
3046
+ # don't install this pkg right now if its dependencies haven't been installed
3047
+ if !dependencies.empty? && !dependencies.to_set.subset?(installed_so_far.to_set)
3048
+ solution_packages.push(pkg)
3049
+ next
3050
+ end
3051
+ end
3052
+
3053
+ if pkg[:source] == :currently_installed ||
3054
+ pkg[:source] == :native_installed
3055
+ # Nothing to do for packages currently installed
3056
+ warn "Skipping #{pkg[:metadata][:name]}, already installed"
3057
+ elsif pkg[:source] == :native_available
3058
+ if Tpkg::get_os =~ /RedHat|CentOS|Fedora/
3059
+ name = pkg[:metadata][:name]
3060
+ version = pkg[:metadata][:version]
3061
+ package_version = pkg[:metadata][:package_version]
3062
+ # RPMs always have a release/package_version
3063
+ pkgname = "#{name}-#{version}-#{package_version}"
3064
+ puts "Running 'yum -y install #{pkgname}' to install native package" if @@debug
3065
+ system("yum -y install #{pkgname}")
3066
+ elsif Tpkg::get_os =~ /Debian|Ubuntu/
3067
+ name = pkg[:metadata][:name]
3068
+ version = pkg[:metadata][:version]
3069
+ pkgname = "#{name}-#{version}"
3070
+ if pkg[:metadata][:package_version]
3071
+ pkgname << "-#{pkg[:metadata][:package_version]}"
3072
+ end
3073
+ puts "Running 'apt-get -y install #{pkgname}' to install native package" if @@debug
3074
+ system("apt-get -y install #{pkgname}")
3075
+ elsif Tpkg::get_os =~ /Solaris/
3076
+ name = pkg[:metadata][:name]
3077
+ version = pkg[:metadata][:version]
3078
+ pkgname = "#{name}-#{version}"
3079
+ if pkg[:metadata][:package_version]
3080
+ pkgname << ",REV=#{pkg[:metadata][:package_version]}"
3081
+ end
3082
+ if File.exist?('/opt/csw/bin/pkg-get')
3083
+ puts "Running '/opt/csw/bin/pkg-get -i #{pkgname}' to install native package" if @@debug
3084
+ system("/opt/csw/bin/pkg-get -i #{pkgname}")
3085
+ else
3086
+ raise "No native package installation tool available"
3087
+ end
3088
+ elsif Tpkg::get_os =~ /FreeBSD/
3089
+ name = pkg[:metadata][:name]
3090
+ version = pkg[:metadata][:version]
3091
+ pkgname = "#{name}-#{version}"
3092
+ if pkg[:metadata][:package_version]
3093
+ pkgname << "_#{pkg[:metadata][:package_version]}"
3094
+ end
3095
+ puts "Running 'pkg_add -r #{pkgname}' to install native package" if @@debug
3096
+ system("pkg_add -r #{pkgname}")
3097
+ elsif Tpkg::get_os =~ /Darwin/
3098
+ if File.exist?('/opt/local/bin/port')
3099
+ name = pkg[:metadata][:name]
3100
+ # MacPorts doesn't support installing a specific version (AFAIK)
3101
+ if pkg[:metadata][:version]
3102
+ warn "Ignoring version with MacPorts"
3103
+ end
3104
+ # Nor does it have a concept of a package version
3105
+ if pkg[:metadata][:package_version]
3106
+ warn "Ignoring package version with MacPorts"
3107
+ end
3108
+ # Just for consistency with the code for other platforms
3109
+ pkgname = name
3110
+ puts "Running '/opt/local/bin/port install #{pkgname}' to install native package" if @@debug
3111
+ system("/opt/local/bin/port install #{pkgname}")
3112
+ else
3113
+ # Fink support would be nice
3114
+ raise "No supported native package tool available on #{Tpkg::get_os}"
3115
+ end
3116
+ else
3117
+ raise "No native package installation support for #{Tpkg::get_os}"
3118
+ end
3119
+ else # regular tpkg that needs to be installed
3120
+ pkgfile = nil
3121
+ if File.file?(pkg[:source])
3122
+ pkgfile = pkg[:source]
3123
+ elsif File.directory?(pkg[:source])
3124
+ pkgfile = File.join(pkg[:source], pkg[:metadata][:filename])
3125
+ else
3126
+ pkgfile = download(pkg[:source], pkg[:metadata][:filename])
3127
+ end
3128
+ if File.exist?(
3129
+ File.join(@installed_directory, File.basename(pkgfile)))
3130
+ warn "Skipping #{File.basename(pkgfile)}, already installed"
3131
+ else
3132
+ if prompt_for_conflicting_files(pkgfile)
3133
+ ret_val |= unpack(pkgfile, passphrase)
3134
+ end
3135
+ end
3136
+ end
3137
+
3138
+ # If we're down here, it means we have installed the package. So go ahead and
3139
+ # update the list of packages we installed so far
3140
+ installed_so_far << pkg[:metadata][:name]
3141
+ end # end while loop
3142
+
3143
+ send_update_to_server unless @report_server.nil?
3144
+ unlock
3145
+ return ret_val
3146
+ end
3147
+
3148
+ # This method can also be used for doing downgrade
3149
+ def upgrade(requests=nil, passphrase=nil, downgrade=false)
3150
+ ret_val = 0
3151
+ requirements = []
3152
+ packages = {}
3153
+ core_packages = []
3154
+ lock
3155
+ has_updates = false # flags whether or not there was at least one actual package that
3156
+ # get updated
3157
+
3158
+ # If the user specified some specific packages to upgrade in requests
3159
+ # then we look for upgrades for just those packages (and any necessary
3160
+ # dependency upgrades). If the user did not specify specific packages
3161
+ # then we look for upgrades for all currently installed packages.
3162
+
3163
+ if requests
3164
+ puts "Upgrading requested packages only" if @@debug
3165
+ parse_requests(requests, requirements, packages)
3166
+ check_requests(packages)
3167
+ additional_requirements = []
3168
+ requirements.each do |req|
3169
+ core_packages << req[:name] if !core_packages.include?(req[:name])
3170
+
3171
+ # When doing downgrade, we don't want to include the package being
3172
+ # downgrade as the requirements. Otherwise, we won't be able to downgrade it
3173
+ unless downgrade
3174
+ additional_requirements.concat(
3175
+ requirements_for_currently_installed_package(req[:name]))
3176
+ end
3177
+
3178
+ # Initialize the list of possible packages for this req
3179
+ if !packages[req[:name]]
3180
+ packages[req[:name]] = available_packages_that_meet_requirement(req)
3181
+ end
3182
+ # Remove preference for currently installed package
3183
+ packages[req[:name]].each do |pkg|
3184
+ if pkg[:source] == :currently_installed
3185
+ pkg[:prefer] = false
3186
+ end
3187
+ end
3188
+
3189
+ # Look for pkgs that might depend on the pkg we're upgrading,
3190
+ # and add them to our list of requirements. We need to make sure that we can still
3191
+ # satisfy the dependency requirements if we were to do the upgrade.
3192
+ metadata_for_installed_packages.each do | metadata |
3193
+ metadata[:dependencies].each do | dep |
3194
+ if dep[:name] == req[:name]
3195
+ additional_requirements << metadata.hash
3196
+ end
3197
+ end if metadata[:dependencies]
3198
+ end
3199
+ end
3200
+ requirements.concat(additional_requirements)
3201
+ requirements.uniq!
3202
+ else
3203
+ puts "Upgrading all packages" if @@debug
3204
+ requirements_for_currently_installed_packages(requirements, packages)
3205
+ # Remove preference for currently installed packages
3206
+ packages.each do |name, pkgs|
3207
+ core_packages << name if !core_packages.include?(name)
3208
+ pkgs.each do |pkg|
3209
+ if pkg[:source] == :currently_installed
3210
+ pkg[:prefer] = false
3211
+ end
3212
+ end
3213
+ end
3214
+ end
3215
+
3216
+ puts "upgrade calling best_solution" if @@debug
3217
+ puts "upgrade requirements: #{requirements.inspect}" if @@debug
3218
+ puts "upgrade packages: #{packages.inspect}" if @@debug
3219
+ puts "upgrade core_packages: #{core_packages.inspect}" if @@debug
3220
+ #solution_packages = best_solution(requirements.dup, packages.dup)
3221
+ solution_packages = best_solution(requirements, packages, core_packages)
3222
+
3223
+ if solution_packages.nil?
3224
+ raise "Unable to find solution for upgrading. Please verify that you specified the correct package(s) for upgrade."
3225
+ end
3226
+
3227
+ check_for_conflicting_pkgs(solution_packages | installed_packages)
3228
+
3229
+ if downgrade
3230
+ prompt_action = 'downgraded'
3231
+ else
3232
+ prompt_action = 'upgraded'
3233
+ end
3234
+ if !prompt_for_install(solution_packages, prompt_action)
3235
+ unlock
3236
+ return false
3237
+ end
3238
+
3239
+ installed_files = files_for_installed_packages
3240
+ removed_pkgs = [] # keep track of what we removed so far
3241
+ while pkg = solution_packages.shift
3242
+ # solution_packages.each do |pkg|
3243
+ if pkg[:source] == :currently_installed ||
3244
+ pkg[:source] == :native_installed
3245
+ # Nothing to do for packages currently installed
3246
+ elsif pkg[:source] == :native_available
3247
+ if Tpkg::get_os =~ /RedHat|CentOS|Fedora/
3248
+ name = pkg[:metadata][:name]
3249
+ version = pkg[:metadata][:version]
3250
+ package_version = pkg[:metadata][:package_version]
3251
+ # RPMs always have a release/package_version
3252
+ pkgname = "#{name}-#{version}-#{package_version}"
3253
+ puts "Running 'yum -y install #{pkgname}' to upgrade native package" if @@debug
3254
+ system("yum -y install #{pkgname}")
3255
+ has_updates = true
3256
+ elsif Tpkg::get_os =~ /Debian|Ubuntu/
3257
+ name = pkg[:metadata][:name]
3258
+ version = pkg[:metadata][:version]
3259
+ pkgname = "#{name}-#{version}"
3260
+ if pkg[:metadata][:package_version]
3261
+ pkgname << "-#{pkg[:metadata][:package_version]}"
3262
+ end
3263
+ puts "Running 'apt-get -y install #{pkgname}' to upgrade native package" if @@debug
3264
+ system("apt-get -y install #{pkgname}")
3265
+ has_updates = true
3266
+ elsif Tpkg::get_os =~ /Solaris/
3267
+ name = pkg[:metadata][:name]
3268
+ version = pkg[:metadata][:version]
3269
+ pkgname = "#{name}-#{version}"
3270
+ if pkg[:metadata][:package_version]
3271
+ pkgname << ",REV=#{pkg[:metadata][:package_version]}"
3272
+ end
3273
+ if File.exist?('/opt/csw/bin/pkg-get')
3274
+ puts "Running '/opt/csw/bin/pkg-get -i #{pkgname}' to upgrade native package" if @@debug
3275
+ system("/opt/csw/bin/pkg-get -i #{pkgname}")
3276
+ has_updates = true
3277
+ else
3278
+ raise "No native package upgrade tool available"
3279
+ end
3280
+ elsif Tpkg::get_os =~ /FreeBSD/
3281
+ name = pkg[:metadata][:name]
3282
+ version = pkg[:metadata][:version]
3283
+ pkgname = "#{name}-#{version}"
3284
+ if pkg[:metadata][:package_version]
3285
+ pkgname << "_#{pkg[:metadata][:package_version]}"
3286
+ end
3287
+ # This is not very ideal. It would be better to download the
3288
+ # new package, and if the download is successful remove the
3289
+ # old package and install the new one. The way we're doing it
3290
+ # here we risk leaving the system with neither version
3291
+ # installed if the download of the new package fails.
3292
+ # However, the FreeBSD package tools don't make it easy to
3293
+ # handle things properly.
3294
+ puts "Running 'pkg_delete #{name}' and 'pkg_add -r #{pkgname}' to upgrade native package" if @@debug
3295
+ system("pkg_delete #{name}")
3296
+ system("pkg_add -r #{pkgname}")
3297
+ has_updates = true
3298
+ elsif Tpkg::get_os =~ /Darwin/
3299
+ if File.exist?('/opt/local/bin/port')
3300
+ name = pkg[:metadata][:name]
3301
+ # MacPorts doesn't support installing a specific version (AFAIK)
3302
+ if pkg[:metadata][:version]
3303
+ warn "Ignoring version with MacPorts"
3304
+ end
3305
+ # Nor does it have a concept of a package version
3306
+ if pkg[:metadata][:package_version]
3307
+ warn "Ignoring package version with MacPorts"
3308
+ end
3309
+ # Just for consistency with the code for other platforms
3310
+ pkgname = name
3311
+ puts "Running '/opt/local/bin/port upgrade #{pkgname}' to upgrade native package" if @@debug
3312
+ system("/opt/local/bin/port upgrade #{pkgname}")
3313
+ else
3314
+ # Fink support would be nice
3315
+ raise "No supported native package tool available on #{Tpkg::get_os}"
3316
+ end
3317
+ else
3318
+ raise "No native package upgrade support for #{Tpkg::get_os}"
3319
+ end
3320
+ else # tpkg
3321
+ pkgfile = nil
3322
+ if File.file?(pkg[:source])
3323
+ pkgfile = pkg[:source]
3324
+ elsif File.directory?(pkg[:source])
3325
+ pkgfile = File.join(pkg[:source], pkg[:metadata][:filename])
3326
+ else
3327
+ pkgfile = download(pkg[:source], pkg[:metadata][:filename])
3328
+ end
3329
+ if prompt_for_conflicting_files(pkgfile, CHECK_UPGRADE)
3330
+ # If the old and new packages have overlapping externals flag them
3331
+ # to be skipped so that the external isn't removed and then
3332
+ # immediately re-added
3333
+ oldpkgs = installed_packages_that_meet_requirement({:name => pkg[:metadata][:name]})
3334
+ externals_to_skip = []
3335
+ pkg[:metadata][:externals].each do |external|
3336
+ if oldpkgs.all? {|oldpkg| oldpkg[:metadata][:externals].include?(external)}
3337
+ externals_to_skip << external
3338
+ end
3339
+ end if pkg[:metadata][:externals]
3340
+
3341
+ # Remove the old package if we haven't done so
3342
+ unless removed_pkgs.include?(pkg[:metadata][:name])
3343
+ remove([pkg[:metadata][:name]], :upgrade => true, :externals_to_skip => externals_to_skip)
3344
+ removed_pkgs << pkg[:metadata][:name]
3345
+ end
3346
+
3347
+ # determine if we can unpack the new version package now by
3348
+ # looking to see if all of its dependencies have been installed
3349
+ can_unpack = true
3350
+ pkg[:metadata][:dependencies].each do | dep |
3351
+ iptmr = installed_packages_that_meet_requirement(dep)
3352
+ if iptmr.nil? || iptmr.empty?
3353
+ can_unpack = false
3354
+ # Can't unpack yet. so push it back in the solution_packages queue
3355
+ solution_packages.push(pkg)
3356
+ break
3357
+ end
3358
+ end if pkg[:metadata][:dependencies]
3359
+ if can_unpack
3360
+ ret_val |= unpack(pkgfile, passphrase, :externals_to_skip => externals_to_skip)
3361
+ end
3362
+
3363
+ has_updates = true
3364
+ end
3365
+ end
3366
+ end
3367
+
3368
+ if !has_updates
3369
+ puts "No updates available"
3370
+ elsif !@report_server.nil?
3371
+ send_update_to_server
3372
+ end
3373
+
3374
+ unlock
3375
+ return ret_val
3376
+ end
3377
+
3378
+ def remove(requests=nil, options={})
3379
+ ret_val = 0
3380
+ lock
3381
+
3382
+ packages_to_remove = nil
3383
+ if requests
3384
+ packages_to_remove = []
3385
+ requests.each do |request|
3386
+ req = Tpkg::parse_request(request, @installed_directory)
3387
+ packages_to_remove.concat(installed_packages_that_meet_requirement(req))
3388
+ end
3389
+ else
3390
+ packages_to_remove = installed_packages_that_meet_requirement
3391
+ end
3392
+
3393
+ if packages_to_remove.empty?
3394
+ puts "No matching packages"
3395
+ unlock
3396
+ return false
3397
+ end
3398
+
3399
+ # If user want to remove all the dependent pkgs, then go ahead
3400
+ # and include them in our array of things to remove
3401
+ if options[:remove_all_dep]
3402
+ packages_to_remove |= get_dependents(packages_to_remove)
3403
+ elsif options[:remove_all_prereq]
3404
+ puts "Attemping to remove #{packages_to_remove.map do |pkg| pkg[:metadata][:filename] end} and all prerequisites."
3405
+ # Get list of dependency prerequisites
3406
+ ptr = packages_to_remove | get_prerequisites(packages_to_remove)
3407
+ pkg_files_to_remove = ptr.map { |pkg| pkg[:metadata][:filename] }
3408
+
3409
+ # see if any other packages depends on the ones we're about to remove
3410
+ # If so, we can't remove that package + any of its prerequisites
3411
+ non_removable_pkg_files = []
3412
+ metadata_for_installed_packages.each do |metadata|
3413
+ next if pkg_files_to_remove.include?(metadata[:filename])
3414
+ next if metadata[:dependencies].nil?
3415
+ metadata[:dependencies].each do |req|
3416
+ # We ignore native dependencies because there is no way a removal
3417
+ # can break a native dependency, we don't support removing native
3418
+ # packages.
3419
+ if req[:type] != :native && req[:type] != :native_installed
3420
+ iptmr = installed_packages_that_meet_requirement(req)
3421
+ if iptmr.all? { |pkg| pkg_files_to_remove.include?(pkg[:metadata][:filename]) }
3422
+ non_removable_pkg_files |= iptmr.map{ |pkg| pkg[:metadata][:filename]}
3423
+ non_removable_pkg_files |= get_prerequisites(iptmr).map{ |pkg| pkg[:metadata][:filename]}
3424
+ end
3425
+ end
3426
+ end
3427
+ end
3428
+ # Generate final list of packages that we should remove.
3429
+ packages_to_remove = {}
3430
+ ptr.each do | pkg |
3431
+ next if pkg[:source] == :native or pkg[:source] == :native_installed
3432
+ next if non_removable_pkg_files.include?(pkg[:metadata][:filename])
3433
+ packages_to_remove[pkg[:metadata][:filename]] = pkg
3434
+ end
3435
+ packages_to_remove = packages_to_remove.values
3436
+ if packages_to_remove.empty?
3437
+ raise "Can't remove request package because other packages depend on it."
3438
+ elsif !non_removable_pkg_files.empty?
3439
+ puts "Can't remove #{non_removable_pkg_files.inspect} because other packages depend on them."
3440
+ end
3441
+ # Check that this doesn't leave any dependencies unresolved
3442
+ elsif !options[:upgrade]
3443
+ pkg_files_to_remove = packages_to_remove.map { |pkg| pkg[:metadata][:filename] }
3444
+ metadata_for_installed_packages.each do |metadata|
3445
+ next if pkg_files_to_remove.include?(metadata[:filename])
3446
+ next if metadata[:dependencies].nil?
3447
+ metadata[:dependencies].each do |req|
3448
+ # We ignore native dependencies because there is no way a removal
3449
+ # can break a native dependency, we don't support removing native
3450
+ # packages.
3451
+ # FIXME: Should we also consider :native_installed?
3452
+ if req[:type] != :native
3453
+ if installed_packages_that_meet_requirement(req).all? { |pkg| pkg_files_to_remove.include?(pkg[:metadata][:filename]) }
3454
+ raise "Package #{metadata[:filename]} depends on #{req[:name]}"
3455
+ end
3456
+ end
3457
+ end
3458
+ end
3459
+ end
3460
+
3461
+ # Confirm with the user
3462
+ # upgrade does its own prompting
3463
+ if @@prompt && !options[:upgrade]
3464
+ puts "The following packages will be removed:"
3465
+ packages_to_remove.each do |pkg|
3466
+ puts pkg[:metadata][:filename]
3467
+ end
3468
+ unless Tpkg::confirm
3469
+ unlock
3470
+ return false
3471
+ end
3472
+ end
3473
+
3474
+ # Stop the services if there's init script
3475
+ if !options[:upgrade]
3476
+ packages_to_remove.each do |pkg|
3477
+ init_scripts_metadata = init_scripts(pkg[:metadata])
3478
+ if init_scripts_metadata && !init_scripts_metadata.empty?
3479
+ execute_init_for_package(pkg, 'stop')
3480
+ end
3481
+ end
3482
+ end
3483
+
3484
+ # Remove the packages
3485
+ packages_to_remove.each do |pkg|
3486
+ pkgname = pkg[:metadata][:name]
3487
+ package_file = File.join(@installed_directory, pkg[:metadata][:filename])
3488
+
3489
+ topleveldir = Tpkg::package_toplevel_directory(package_file)
3490
+ workdir = Tpkg::tempdir(topleveldir, @tmp_directory)
3491
+ system("#{@tar} -xf #{package_file} -O #{File.join(topleveldir, 'tpkg.tar')} | #{@tar} -C #{workdir} -xpf -")
3492
+
3493
+ # Run preremove script
3494
+ if File.exist?(File.join(workdir, 'tpkg', 'preremove'))
3495
+ pwd = Dir.pwd
3496
+ # chdir into the working directory so that the user can specify a
3497
+ # relative path to their file/script.
3498
+ Dir.chdir(File.join(workdir, 'tpkg'))
3499
+
3500
+ # Warn the user about non-executable files, as system will just
3501
+ # silently fail and exit if that's the case.
3502
+ if !File.executable?(File.join(workdir, 'tpkg', 'preremove'))
3503
+ warn "Warning: preremove script for #{File.basename(package_file)} is not executable, execution will likely fail"
3504
+ end
3505
+ if @force
3506
+ system(File.join(workdir, 'tpkg', 'preremove')) || warn("Warning: preremove for #{File.basename(package_file)} failed with exit value #{$?.exitstatus}")
3507
+ else
3508
+ system(File.join(workdir, 'tpkg', 'preremove')) || raise("Error: preremove for #{File.basename(package_file)} failed with exit value #{$?.exitstatus}")
3509
+ end
3510
+
3511
+ # Switch back to our previous directory
3512
+ Dir.chdir(pwd)
3513
+ end
3514
+
3515
+ # Remove any init scripts
3516
+ init_links(pkg[:metadata]).each do |link, init_script|
3517
+ # The link we ended up making when we unpacked the package could
3518
+ # be any of a series (see the code in unpack for the reasoning),
3519
+ # we need to check them all.
3520
+ links = [link]
3521
+ links.concat((1..9).to_a.map { |i| link + i.to_s })
3522
+ links.each do |l|
3523
+ if File.symlink?(l) && File.readlink(l) == init_script
3524
+ begin
3525
+ File.delete(l)
3526
+ rescue Errno::EPERM
3527
+ if Process.euid == 0
3528
+ raise
3529
+ else
3530
+ warn "Failed to remove init script for #{File.basename(package_file)}, probably due to lack of root privileges"
3531
+ end
3532
+ end
3533
+ end
3534
+ end
3535
+ end
3536
+
3537
+ # Remove any crontabs
3538
+ crontab_destinations(pkg[:metadata]).each do |crontab, destination|
3539
+ begin
3540
+ if destination[:link]
3541
+ # The link we ended up making when we unpacked the package could
3542
+ # be any of a series (see the code in unpack for the reasoning),
3543
+ # we need to check them all.
3544
+ links = [destination[:link]]
3545
+ links.concat((1..9).to_a.map { |i| destination[:link] + i.to_s })
3546
+ links.each do |l|
3547
+ if File.symlink?(l) && File.readlink(l) == crontab
3548
+ begin
3549
+ File.delete(l)
3550
+ rescue Errno::EPERM
3551
+ if Process.euid == 0
3552
+ raise
3553
+ else
3554
+ warn "Failed to remove crontab for #{File.basename(package_file)}, probably due to lack of root privileges"
3555
+ end
3556
+ end
3557
+ end
3558
+ end
3559
+ elsif destination[:file]
3560
+ if File.exist?(destination[:file])
3561
+ tmpfile = Tempfile.new(File.basename(destination[:file]), File.dirname(destination[:file]))
3562
+ # Match permissions and ownership of current crontab
3563
+ st = File.stat(destination[:file])
3564
+ File.chmod(st.mode & 07777, tmpfile.path)
3565
+ File.chown(st.uid, st.gid, tmpfile.path)
3566
+ # Remove section associated with this package
3567
+ skip = false
3568
+ IO.foreach(destination[:file]) do |line|
3569
+ if line == "### TPKG START - #{@base} - #{File.basename(package_file)}\n"
3570
+ skip = true
3571
+ elsif line == "### TPKG END - #{@base} - #{File.basename(package_file)}\n"
3572
+ skip = false
3573
+ elsif !skip
3574
+ tmpfile.write(line)
3575
+ end
3576
+ end
3577
+ tmpfile.close
3578
+ File.rename(tmpfile.path, destination[:file])
3579
+ # FIXME: On Solaris we should bounce cron or use the crontab
3580
+ # command, otherwise cron won't pick up the changes
3581
+ end
3582
+ end
3583
+ rescue Errno::EPERM
3584
+ # If removing the crontab fails due to permission problems and
3585
+ # we're not running as root just warn the user, allowing folks
3586
+ # to run tpkg as a non-root user with reduced functionality.
3587
+ if Process.euid == 0
3588
+ raise
3589
+ else
3590
+ warn "Failed to remove crontab for #{File.basename(package_file)}, probably due to lack of root privileges"
3591
+ end
3592
+ end
3593
+ end
3594
+
3595
+ # Run any externals
3596
+ pkg[:metadata][:externals].each do |external|
3597
+ if !options[:externals_to_skip] || !options[:externals_to_skip].include?(external)
3598
+ run_external(pkg[:metadata][:filename], :remove, external[:name], external[:data])
3599
+ end
3600
+ end if pkg[:metadata][:externals]
3601
+
3602
+ # Remove files
3603
+ files_to_remove = conflicting_files(package_file, CHECK_REMOVE)
3604
+ # Reverse the order of the files, as directories will appear first
3605
+ # in the listing but we want to remove any files in them before
3606
+ # trying to remove the directory.
3607
+ files_to_remove.reverse.each do |file|
3608
+ begin
3609
+ if !File.directory?(file)
3610
+ File.delete(file)
3611
+ else
3612
+ begin
3613
+ Dir.delete(file)
3614
+ rescue SystemCallError => e
3615
+ # Directory isn't empty
3616
+ #puts e.message
3617
+ end
3618
+ end
3619
+ rescue Errno::ENOENT
3620
+ warn "File #{file} from package #{File.basename(package_file)} missing during remove"
3621
+ end
3622
+ end
3623
+
3624
+ # Run postremove script
3625
+ if File.exist?(File.join(workdir, 'tpkg', 'postremove'))
3626
+ pwd = Dir.pwd
3627
+ # chdir into the working directory so that the user can specify a
3628
+ # relative path to their file/script.
3629
+ Dir.chdir(File.join(workdir, 'tpkg'))
3630
+
3631
+ # Warn the user about non-executable files, as system will just
3632
+ # silently fail and exit if that's the case.
3633
+ if !File.executable?(File.join(workdir, 'tpkg', 'postremove'))
3634
+ warn "Warning: postremove script for #{File.basename(package_file)} is not executable, execution will likely fail"
3635
+ end
3636
+ # Note this only warns the user if the postremove fails, it does
3637
+ # not raise an exception like we do if preremove fails. Raising
3638
+ # an exception would leave the package's files removed but the
3639
+ # package still registered as installed, which does not seem
3640
+ # desirable. We could reinstall the package's files and raise an
3641
+ # exception, but this seems the best approach to me.
3642
+ system(File.join(workdir, 'tpkg', 'postremove')) || warn("Warning: postremove for #{File.basename(package_file)} failed with exit value #{$?.exitstatus}")
3643
+ ret_val = POSTREMOVE_ERR if $?.exitstatus > 0
3644
+
3645
+ # Switch back to our previous directory
3646
+ Dir.chdir(pwd)
3647
+ end
3648
+
3649
+ File.delete(package_file)
3650
+
3651
+ # delete metadata dir of this package
3652
+ package_metadata_dir = File.join(@metadata_directory, File.basename(package_file, File.extname(package_file)))
3653
+ FileUtils.rm_rf(package_metadata_dir)
3654
+
3655
+ # Cleanup
3656
+ FileUtils.rm_rf(workdir)
3657
+ end
3658
+
3659
+ send_update_to_server unless @report_server.nil? || options[:upgrade]
3660
+ unlock
3661
+ return ret_val
3662
+ end
3663
+
3664
+ def verify_file_metadata(requests)
3665
+ results = {}
3666
+ packages = []
3667
+ # parse request to determine what packages the user wants to verify
3668
+ requests.each do |request|
3669
+ req = Tpkg::parse_request(request)
3670
+ packages.concat(installed_packages_that_meet_requirement(req).collect { |pkg| pkg[:metadata][:filename] })
3671
+ end
3672
+
3673
+ # loop through each package, and verify checksum, owner, group and perm of each file that was installed
3674
+ packages.each do | package_file |
3675
+ puts "Verifying #{package_file}"
3676
+ package_full_name = File.basename(package_file, File.extname(package_file))
3677
+
3678
+ # Extract checksum.xml from the package
3679
+ checksum_xml = nil
3680
+
3681
+ # get file_metadata.xml from the installed package
3682
+ file_metadata_bin = File.join(@metadata_directory, package_full_name, 'file_metadata.bin')
3683
+ file_metadata_yml = File.join(@metadata_directory, package_full_name, 'file_metadata.yml')
3684
+ file_metadata_xml = File.join(@metadata_directory, package_full_name, 'file_metadata.xml')
3685
+ if File.exist?(file_metadata_bin)
3686
+ file_metadata = FileMetadata.new(File.read(file_metadata_bin), 'bin')
3687
+ elsif File.exist?(file_metadata_yml)
3688
+ file_metadata = FileMetadata.new(File.read(file_metadata_yml), 'yml')
3689
+ elsif File.exist?(file_metadata_xml)
3690
+ file_metadata = FileMetadata.new(File.read(file_metadata_xml), 'xml')
3691
+ else
3692
+ errors = []
3693
+ errors << "Can't find file_metadata.xml or file_metadata.yml file. Most likely this is because the package was created before the verify feature was added"
3694
+ results[package_file] = errors
3695
+ return results
3696
+ end
3697
+
3698
+ # verify installed files match their checksum
3699
+ file_metadata[:files].each do |file|
3700
+ errors = []
3701
+ gid_expected, uid_expected, perms_expected, chksum_expected = nil
3702
+ fp = file[:path]
3703
+
3704
+ # get expected checksum. For files that were encrypted, we're interested in the
3705
+ # checksum of the decrypted version
3706
+ if file[:checksum]
3707
+ chksum_expected = file[:checksum][:digests].first[:value]
3708
+ file[:checksum][:digests].each do | digest |
3709
+ if digest[:decrypted] == true
3710
+ chksum_expected = digest[:value].to_s
3711
+ end
3712
+ end
3713
+ end
3714
+
3715
+ # get expected acl values
3716
+ if file[:uid]
3717
+ uid_expected = file[:uid].to_i
3718
+ end
3719
+ if file[:gid]
3720
+ gid_expected = file[:gid].to_i
3721
+ end
3722
+ if file[:perms]
3723
+ perms_expected = file[:perms].to_s
3724
+ end
3725
+
3726
+ # normalize file path
3727
+ if file[:relocatable] == true
3728
+ fp = File.join(@base, fp)
3729
+ else
3730
+ fp = File.join(@file_system_root, fp)
3731
+ end
3732
+
3733
+ # can't handle symlink
3734
+ if File.symlink?(fp)
3735
+ next
3736
+ end
3737
+
3738
+ # check if file exist
3739
+ if !File.exists?(fp)
3740
+ errors << "File is missing"
3741
+ else
3742
+ # get actual values
3743
+ #chksum_actual = Digest::SHA256.file(fp).hexdigest if File.file?(fp)
3744
+ chksum_actual = Digest::SHA256.hexdigest(File.read(fp)) if File.file?(fp)
3745
+ uid_actual = File.stat(fp).uid
3746
+ gid_actual = File.stat(fp).gid
3747
+ perms_actual = File.stat(fp).mode.to_s(8)
3748
+ end
3749
+
3750
+ if !chksum_expected.nil? && !chksum_actual.nil? && chksum_expected != chksum_actual
3751
+ errors << "Checksum doesn't match (Expected: #{chksum_expected}, Actual: #{chksum_actual}"
3752
+ end
3753
+
3754
+ if !uid_expected.nil? && !uid_actual.nil? && uid_expected != uid_actual
3755
+ errors << "uid doesn't match (Expected: #{uid_expected}, Actual: #{uid_actual}) "
3756
+ end
3757
+
3758
+ if !gid_expected.nil? && !gid_actual.nil? && gid_expected != gid_actual
3759
+ errors << "gid doesn't match (Expected: #{gid_expected}, Actual: #{gid_actual})"
3760
+ end
3761
+
3762
+ if !perms_expected.nil? && !perms_actual.nil? && perms_expected != perms_actual
3763
+ errors << "perms doesn't match (Expected: #{perms_expected}, Actual: #{perms_actual})"
3764
+ end
3765
+
3766
+ results[fp] = errors
3767
+ end
3768
+ end
3769
+ return results
3770
+ end
3771
+
3772
+ def execute_init(requests, action)
3773
+ ret_val = 0
3774
+ packages_to_execute_on = []
3775
+ if requests.nil?
3776
+ packages_to_execute_on = installed_packages_that_meet_requirement(nil)
3777
+ else
3778
+ requests.each do |request|
3779
+ req = Tpkg::parse_request(request)
3780
+ packages_to_execute_on.concat(installed_packages_that_meet_requirement(req))
3781
+ end
3782
+ end
3783
+
3784
+ packages_to_execute_on.each do |pkg|
3785
+ ret_val |= execute_init_for_package(pkg, action)
3786
+ end
3787
+ return ret_val
3788
+ end
3789
+
3790
+ def execute_init_for_package(pkg, action)
3791
+ ret_val = 0
3792
+ init_scripts_metadata = init_scripts(pkg[:metadata])
3793
+
3794
+ # warn if there's no init script and then return
3795
+ if init_scripts_metadata.nil? || init_scripts_metadata.empty?
3796
+ warn "Warning: There is no init script for #{pkg[:metadata][:name]}"
3797
+ return 1
3798
+ end
3799
+
3800
+ # convert the init scripts metadata to an array of { path => value, start => value}
3801
+ # so that we can order them based on their start value. This is necessary because
3802
+ # we need to execute the init scripts in correct order.
3803
+ init_scripts = []
3804
+ init_scripts_metadata.each do | installed_path, init_info |
3805
+ init = {}
3806
+ init[:path] = installed_path
3807
+ init[:start] = init_info[:init][:start] || 0
3808
+ init_scripts << init
3809
+ end
3810
+
3811
+ # Reverse order if doing stop.
3812
+ if action == "stop"
3813
+ ordered_init_scripts = init_scripts.sort{ |a,b| b[:start] <=> a[:start] }
3814
+ else
3815
+ ordered_init_scripts = init_scripts.sort{ |a,b| a[:start] <=> b[:start] }
3816
+ end
3817
+
3818
+ ordered_init_scripts.each do |init_script|
3819
+ installed_path = init_script[:path]
3820
+ system("#{installed_path} #{action}")
3821
+ ret_val = INITSCRIPT_ERR if $?.exitstatus > 0
3822
+ end
3823
+ return ret_val
3824
+ end
3825
+
3826
+ # We can't safely calculate a set of dependencies and install the
3827
+ # resulting set of packages if another user is manipulating the installed
3828
+ # packages at the same time. These methods lock and unlock the package
3829
+ # system so that only one user makes changes at a time.
3830
+ def lock
3831
+ if @locks > 0
3832
+ @locks += 1
3833
+ return
3834
+ end
3835
+ if File.directory?(@lock_directory)
3836
+ if @lockforce
3837
+ warn "Forcing lock removal"
3838
+ FileUtils.rm_rf(@lock_directory)
3839
+ else
3840
+ # Remove old lock files on the assumption that they were left behind
3841
+ # by a previous failed run
3842
+ if File.mtime(@lock_directory) < Time.at(Time.now - 60 * 60 * 2)
3843
+ warn "Lock is more than 2 hours old, removing"
3844
+ FileUtils.rm_rf(@lock_directory)
3845
+ end
3846
+ end
3847
+ end
3848
+ begin
3849
+ Dir.mkdir(@lock_directory)
3850
+ File.open(@lock_pid_file, 'w') { |file| file.puts($$) }
3851
+ @locks = 1
3852
+ rescue Errno::EEXIST
3853
+ lockpid = ''
3854
+ begin
3855
+ File.open(@lock_pid_file) { |file| lockpid = file.gets.chomp }
3856
+ rescue Errno::ENOENT
3857
+ end
3858
+
3859
+ # check that the process is actually running
3860
+ # if not, clean up old lock and attemp to obtain lock again
3861
+ if Tpkg::process_running?(lockpid)
3862
+ raise "tpkg repository locked by another process (with PID #{lockpid})"
3863
+ else
3864
+ FileUtils.rm_rf(@lock_directory)
3865
+ lock
3866
+ end
3867
+ end
3868
+ end
3869
+
3870
+ def unlock
3871
+ if @locks == 0
3872
+ warn "unlock called but not locked, that probably shouldn't happen"
3873
+ return
3874
+ end
3875
+ @locks -= 1
3876
+ if @locks == 0
3877
+ FileUtils.rm_rf(@lock_directory)
3878
+ end
3879
+ end
3880
+
3881
+ # TODO: update server side to accept yaml data
3882
+ def send_update_to_server
3883
+ metadata = metadata_for_installed_packages.collect{|metadata| metadata.hash}
3884
+ yml = YAML.dump(metadata)
3885
+ begin
3886
+ update_uri = URI.parse("#{@report_server}")
3887
+ http = Tpkg::gethttp(update_uri)
3888
+ request = {"yml"=>URI.escape(yml), "client"=>Facter['fqdn'].value}
3889
+ post = Net::HTTP::Post.new(update_uri.path)
3890
+ post.set_form_data(request)
3891
+ response = http.request(post)
3892
+
3893
+ case response
3894
+ when Net::HTTPSuccess
3895
+ # puts "Response from server:\n'#{response.body}'"
3896
+ puts "Successfully send update to reporter server"
3897
+ else
3898
+ $stderr.puts response.body
3899
+ #response.error!
3900
+ # just ignore error and give user warning
3901
+ puts "Failed to send update to reporter server"
3902
+ end
3903
+ rescue
3904
+ puts "Failed to send update to reporter server"
3905
+ end
3906
+ end
3907
+
3908
+ # Build a dependency map of currently installed packages
3909
+ # For example, if we have pkgB and pkgC which depends on pkgA, then
3910
+ # the dependency map would look like this:
3911
+ # "pkgA.tpkg" => [{pkgB metadata}, {pkgC metadata}]
3912
+ def get_dependency_mapping
3913
+ dependency_mapping = {}
3914
+ installed_packages.each do | pkg |
3915
+ metadata = pkg[:metadata]
3916
+
3917
+ # Get list of pkgs that this pkg depends on
3918
+ next if metadata[:dependencies].nil?
3919
+ depended_on = []
3920
+ metadata[:dependencies].each do |req|
3921
+ next if req[:type] == :native
3922
+ depended_on |= installed_packages_that_meet_requirement(req)
3923
+ end
3924
+
3925
+ # populate the depencency map
3926
+ depended_on.each do | req_pkg |
3927
+ dependency_mapping[req_pkg[:metadata][:filename]] = [] if dependency_mapping[req_pkg[:metadata][:filename]].nil?
3928
+ dependency_mapping[req_pkg[:metadata][:filename]] << pkg
3929
+ end
3930
+ end
3931
+ return dependency_mapping
3932
+ end
3933
+
3934
+ # Given a list of packages, return a list of dependents packages
3935
+ def get_dependents(pkgs)
3936
+ dependents = []
3937
+ to_check = pkgs.map { |pkg| pkg[:metadata][:filename] }
3938
+ dependency = get_dependency_mapping
3939
+ while pkgfile = to_check.pop
3940
+ pkgs = dependency[pkgfile.to_s]
3941
+ next if pkgs.nil?
3942
+ dependents |= pkgs
3943
+ to_check |= pkgs.map { |pkg| pkg[:metadata][:filename] }
3944
+ end
3945
+ return dependents
3946
+ end
3947
+
3948
+ # Given a list of packages, return a list of all their prerequisite dependencies
3949
+ # Example: If pkgA depends on pkgB, and pkgB depends on pkgC, then calling this
3950
+ # method on pkgA will returns pkgB and pkgC
3951
+ # Assumption: There is no cyclic dependency
3952
+ def get_prerequisites(pkgs)
3953
+ pre_reqs = []
3954
+ to_check = pkgs.clone
3955
+ while pkg = to_check.pop
3956
+ next if pkg[:metadata][:dependencies].nil?
3957
+ pkg[:metadata][:dependencies].each do | dep |
3958
+ pre_req = installed_packages_that_meet_requirement(dep)
3959
+ pre_reqs |= pre_req
3960
+ to_check |= pre_req
3961
+ end
3962
+ end
3963
+ return pre_reqs
3964
+ end
3965
+ end
3966
+