tpkg 1.21.1 → 1.22.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/Rakefile +1 -1
- data/bin/cpan2tpkg +37 -20
- data/bin/gem2tpkg +8 -19
- data/bin/tpkg +18 -21
- data/lib/tpkg.rb +236 -312
- data/lib/tpkg/deployer.rb +8 -1
- data/lib/tpkg/metadata.rb +10 -2
- data/schema/schema-1.0.5.yml +86 -0
- data/schema/schema.yml +2 -0
- metadata +16 -15
data/Rakefile
CHANGED
@@ -5,7 +5,7 @@ spec = Gem::Specification.new do |s|
|
|
5
5
|
s.add_dependency('facter')
|
6
6
|
s.add_dependency('net-ssh')
|
7
7
|
s.add_dependency('ddao-kwalify')
|
8
|
-
s.version = '1.
|
8
|
+
s.version = '1.22.1'
|
9
9
|
s.authors = ['Darren Dao', 'Jason Heiss']
|
10
10
|
s.email = 'tpkg-users@lists.sourceforge.net'
|
11
11
|
s.homepage = 'http://tpkg.sourceforge.net'
|
data/bin/cpan2tpkg
CHANGED
@@ -256,29 +256,46 @@ MODULE: foreach my $name ($extinst->modules)
|
|
256
256
|
}
|
257
257
|
PREREQ: foreach my $dep (keys %prereqs)
|
258
258
|
{
|
259
|
-
|
260
|
-
my $depmod = CPAN::Shell->expand('Module', $dep);
|
261
|
-
# This is a bit of an indirect way to identify core modules
|
262
|
-
# but the only way I can figure out. Core stuff gets
|
263
|
-
# installed with perl into /home/t/perl-version, CPAN
|
264
|
-
# modules into /home/t/lib/perl5/site_perl.
|
265
|
-
#
|
266
|
-
# What seems like it should be the "right" way is that the "D"
|
267
|
-
# (aka "Development Stage") field in dslip_status has "S" (aka
|
268
|
-
# "Standard, supplied with Perl 5") as one of its possible values,
|
269
|
-
# according to the docs (http://perldoc.perl.org/CPAN.html).
|
270
|
-
# However, that doesn't seem to be set reliably.
|
271
|
-
if ($depmod->inst_file && $depmod->inst_file =~ /$Config{prefix}/)
|
259
|
+
if ($dep eq 'perl')
|
272
260
|
{
|
273
|
-
|
274
|
-
|
261
|
+
# We always add a dependency on perl anyway, so the
|
262
|
+
# module's dependency is only relevant if it specifies a
|
263
|
+
# newer version than is running.
|
264
|
+
if ($prereqs{$dep} ne '0')
|
265
|
+
{
|
266
|
+
if ($prereqs{$dep} > $])
|
267
|
+
{
|
268
|
+
die "Module requires perl >= $prereqs{$dep}, this is $]\n";
|
269
|
+
}
|
270
|
+
}
|
275
271
|
}
|
276
|
-
|
277
|
-
my $deppkgname = module_to_pkg_name($depmod);
|
278
|
-
$deps{$deppkgname} = {};
|
279
|
-
if ($prereqs{$dep} ne '0')
|
272
|
+
else
|
280
273
|
{
|
281
|
-
|
274
|
+
# Skip dependencies on core modules
|
275
|
+
my $depmod = CPAN::Shell->expand('Module', $dep);
|
276
|
+
# This is a bit of an indirect way to identify core
|
277
|
+
# modules but the only way I can figure out. Core stuff
|
278
|
+
# gets installed with perl into /home/t/perl-version,
|
279
|
+
# CPAN modules into /home/t/lib/perl5/site_perl.
|
280
|
+
#
|
281
|
+
# What seems like it should be the "right" way is that
|
282
|
+
# the "D" (aka "Development Stage") field in
|
283
|
+
# dslip_status has "S" (aka "Standard, supplied with
|
284
|
+
# Perl 5") as one of its possible values, according to
|
285
|
+
# the docs (http://perldoc.perl.org/CPAN.html).
|
286
|
+
# However, that doesn't seem to be set reliably.
|
287
|
+
if ($depmod->inst_file && $depmod->inst_file =~ /$Config{prefix}/)
|
288
|
+
{
|
289
|
+
print "Prereq $dep is a core module, skipping\n";
|
290
|
+
next PREREQ;
|
291
|
+
}
|
292
|
+
|
293
|
+
my $deppkgname = module_to_pkg_name($depmod);
|
294
|
+
$deps{$deppkgname} = {};
|
295
|
+
if ($prereqs{$dep} ne '0')
|
296
|
+
{
|
297
|
+
$deps{$deppkgname}{minimum_version} = $prereqs{$dep};
|
298
|
+
}
|
282
299
|
}
|
283
300
|
}
|
284
301
|
}
|
data/bin/gem2tpkg
CHANGED
@@ -164,7 +164,6 @@ end
|
|
164
164
|
# Create the directory we want gem to install into
|
165
165
|
@gemdir = tempdir('gem2tpkg')
|
166
166
|
ENV['GEM_HOME'] = @gemdir
|
167
|
-
ENV['GEM_PATH'] = @gemdir
|
168
167
|
|
169
168
|
# Install the gem
|
170
169
|
geminst = [@gemcmd, 'install', '--no-rdoc', '--no-ri'] | @gems
|
@@ -186,6 +185,10 @@ if !r
|
|
186
185
|
abort('gem install failed')
|
187
186
|
end
|
188
187
|
|
188
|
+
# Now set GEM_PATH so that further operations (particularly `gem list`)
|
189
|
+
# only operate against the gems installed in our working directory.
|
190
|
+
ENV['GEM_PATH'] = @gemdir
|
191
|
+
|
189
192
|
@already_packaged = []
|
190
193
|
def package(gem)
|
191
194
|
pkgfiles = []
|
@@ -323,8 +326,6 @@ def package(gem)
|
|
323
326
|
maxver = "#{majorver}.9999.9999"
|
324
327
|
deps[depgem][:maximum_version] = maxver
|
325
328
|
end
|
326
|
-
# Package the dependency
|
327
|
-
pkgfiles.concat(package(depgem))
|
328
329
|
end
|
329
330
|
end
|
330
331
|
if !$?.success?
|
@@ -454,28 +455,16 @@ def package(gem)
|
|
454
455
|
pkgfiles
|
455
456
|
end
|
456
457
|
|
457
|
-
#
|
458
|
-
|
458
|
+
# Package each installed gem
|
459
|
+
pkgfiles = []
|
459
460
|
IO.popen("#{@gemcmd} list") do |pipe|
|
460
461
|
pipe.each_line do |line|
|
461
462
|
next if line.include?('***') # Skip header line
|
462
463
|
next if line =~ /^\s*$/ # Skip blank lines
|
463
|
-
|
464
|
+
gem, version = line.split
|
465
|
+
pkgfiles |= package(gem)
|
464
466
|
end
|
465
467
|
end
|
466
|
-
if gemcount == 0
|
467
|
-
abort "Zero gems installed according to gem list?"
|
468
|
-
end
|
469
|
-
|
470
|
-
pkgfiles = []
|
471
|
-
@gems.each do | gem |
|
472
|
-
pkgfiles |= package(gem)
|
473
|
-
end
|
474
|
-
# Make sure the package method made as many packages as there were gems
|
475
|
-
# installed
|
476
|
-
if pkgfiles.length != gemcount
|
477
|
-
abort "gem count (#{gemcount}) vs pkg count (#{pkgfiles.length}) mismatch"
|
478
|
-
end
|
479
468
|
|
480
469
|
# Tell the user what packages were created
|
481
470
|
puts 'The following packages were created:'
|
data/bin/tpkg
CHANGED
@@ -28,8 +28,9 @@ require 'tpkg'
|
|
28
28
|
@servers = nil
|
29
29
|
@worker_count = 10
|
30
30
|
@rerun_with_sudo = false
|
31
|
-
@tpkg_options = {}
|
32
|
-
@init_options = {}
|
31
|
+
@tpkg_options = {} # options for instantiating Tpkg object
|
32
|
+
@init_options = {} # options for how to run init scripts
|
33
|
+
@other_options = {}
|
33
34
|
@compress = nil
|
34
35
|
|
35
36
|
|
@@ -70,8 +71,9 @@ opts.on('--upgrade', '-u', '=PACKAGES', 'Upgrade one or more packages', Array) d
|
|
70
71
|
@action_value = opt
|
71
72
|
end
|
72
73
|
opts.on('--downgrade', '=PACKAGES', 'Downgrade one or more packages', Array) do |opt|
|
74
|
+
@other_options[:downgrade] = true
|
73
75
|
@rerun_with_sudo = true
|
74
|
-
@action = :
|
76
|
+
@action = :upgrade
|
75
77
|
@action_value = opt
|
76
78
|
end
|
77
79
|
opts.on('--ua', 'Upgrade all packages') do |opt|
|
@@ -85,12 +87,14 @@ opts.on('--remove', '-r', '=PACKAGES', 'Remove one or more packages', Array) do
|
|
85
87
|
end
|
86
88
|
opts.on('--rd', '=PACKAGES', 'Similar to -r but also remove depending packages', Array) do |opt|
|
87
89
|
@rerun_with_sudo = true
|
88
|
-
@
|
90
|
+
@other_options[:remove_all_dep] = true
|
91
|
+
@action = :remove
|
89
92
|
@action_value = opt
|
90
93
|
end
|
91
94
|
opts.on('--rp', '=PACKAGES', 'Similar to -r but also remove prerequisites', Array) do |opt|
|
92
95
|
@rerun_with_sudo = true
|
93
|
-
@
|
96
|
+
@other_options[:remove_all_prereq] = true
|
97
|
+
@action = :remove
|
94
98
|
@action_value = opt
|
95
99
|
end
|
96
100
|
opts.on('--ra', 'Remove all packages') do |opt|
|
@@ -229,7 +233,10 @@ end
|
|
229
233
|
opts.on('--lock-force', 'Force the removal of an existing lockfile') do |opt|
|
230
234
|
@lockforce = opt
|
231
235
|
end
|
232
|
-
opts.on('--force', '
|
236
|
+
opts.on('--force-replace', 'Replace conflicting pkgs with the new one(s)') do |opt|
|
237
|
+
@other_options[:force_replace] = opt
|
238
|
+
end
|
239
|
+
opts.on('--force', 'Force the execution of a given task') do |opt|
|
233
240
|
@force = opt
|
234
241
|
end
|
235
242
|
opts.on('--use-ssh-key', 'Use ssh key for deploying instead of password') do |opt|
|
@@ -240,7 +247,7 @@ opts.on('--deploy-as', '=USERNAME', 'What username to use for deploying to remot
|
|
240
247
|
@deploy_options["deploy-as"] = opt
|
241
248
|
@deploy_params = @deploy_params - ['--deploy-as']
|
242
249
|
end
|
243
|
-
opts.on('--compress', '=[TYPE]', 'Compress files when making packages
|
250
|
+
opts.on('--compress', '=[TYPE]', 'Compress files when making packages') do |opt|
|
244
251
|
@compress = opt
|
245
252
|
end
|
246
253
|
opts.on('--debug', 'Print lots of messages about what tpkg is doing') do |opt|
|
@@ -280,7 +287,7 @@ def instantiate_tpkg(options = {})
|
|
280
287
|
sources = options["sources"] || []
|
281
288
|
report_server = nil
|
282
289
|
|
283
|
-
[File.join(Tpkg::CONFIGDIR, 'tpkg.conf'),
|
290
|
+
[File.join(Tpkg::CONFIGDIR, 'tpkg.conf'), File.join(ENV['HOME'], ".tpkg.conf")].each do |configfile|
|
284
291
|
if File.exist?(configfile)
|
285
292
|
IO.foreach(configfile) do |line|
|
286
293
|
line.chomp!
|
@@ -355,23 +362,13 @@ when :extract
|
|
355
362
|
Tpkg::extract_metadata(@action_value)
|
356
363
|
when :install
|
357
364
|
tpkg = instantiate_tpkg(@tpkg_options)
|
358
|
-
ret_val = tpkg.install(@action_value, passphrase_callback)
|
365
|
+
ret_val = tpkg.install(@action_value, passphrase_callback, @other_options)
|
359
366
|
when :upgrade
|
360
367
|
tpkg = instantiate_tpkg(@tpkg_options)
|
361
|
-
ret_val = tpkg.upgrade(@action_value, passphrase_callback)
|
362
|
-
when :downgrade
|
363
|
-
downgrade = true
|
364
|
-
tpkg = instantiate_tpkg(@tpkg_options)
|
365
|
-
ret_val = tpkg.upgrade(@action_value, passphrase_callback, downgrade)
|
368
|
+
ret_val = tpkg.upgrade(@action_value, passphrase_callback, @other_options)
|
366
369
|
when :remove
|
367
370
|
tpkg = instantiate_tpkg(@tpkg_options)
|
368
|
-
ret_val = tpkg.remove(@action_value)
|
369
|
-
when :remove_all_dep
|
370
|
-
tpkg = instantiate_tpkg(@tpkg_options)
|
371
|
-
ret_val = tpkg.remove(@action_value, {:remove_all_dep => true})
|
372
|
-
when :remove_all_prereq
|
373
|
-
tpkg = instantiate_tpkg(@tpkg_options)
|
374
|
-
ret_val = tpkg.remove(@action_value, {:remove_all_prereq => true})
|
371
|
+
ret_val = tpkg.remove(@action_value, @other_options)
|
375
372
|
when :verify
|
376
373
|
result = nil
|
377
374
|
# Verify a given .tpkg file
|
data/lib/tpkg.rb
CHANGED
@@ -56,7 +56,7 @@ require 'kwalify' # for validating yaml
|
|
56
56
|
|
57
57
|
class Tpkg
|
58
58
|
|
59
|
-
VERSION = '1.
|
59
|
+
VERSION = '1.22.1'
|
60
60
|
CONFIGDIR = '/etc'
|
61
61
|
|
62
62
|
GENERIC_ERR = 1
|
@@ -131,6 +131,8 @@ class Tpkg
|
|
131
131
|
end
|
132
132
|
def self.clear_cached_tar
|
133
133
|
@@tar = nil
|
134
|
+
@@taroptions = ""
|
135
|
+
@@tarinfo = {:version => 'unknown'}
|
134
136
|
end
|
135
137
|
|
136
138
|
# Encrypts the given file in-place (the plaintext file is replaced by the
|
@@ -450,47 +452,6 @@ class Tpkg
|
|
450
452
|
#return FileMetadata.new(YAML::dump(filemetadata),'yml')
|
451
453
|
return FileMetadata.new(Marshal::dump(filemetadata),'bin')
|
452
454
|
end
|
453
|
-
|
454
|
-
def self.get_xml_filemetadata_from_directory(tpkgdir)
|
455
|
-
filemetadata_xml = REXML::Document.new
|
456
|
-
filemetadata_xml << REXML::Element.new('files')
|
457
|
-
|
458
|
-
# create file_metadata.xml that stores list of files and their checksum
|
459
|
-
# will be used later on to check whether installed files have been changed
|
460
|
-
root_dir = File.join(tpkgdir, "root")
|
461
|
-
reloc_dir = File.join(tpkgdir, "reloc")
|
462
|
-
Find.find(root_dir, reloc_dir) do |f|
|
463
|
-
next if !File.exist?(f)
|
464
|
-
relocatable = "false"
|
465
|
-
|
466
|
-
# check if it's from root dir or reloc dir
|
467
|
-
if f =~ /^#{root_dir}/
|
468
|
-
short_fn = f[root_dir.length ..-1]
|
469
|
-
else
|
470
|
-
short_fn = f[reloc_dir.length + 1..-1]
|
471
|
-
relocatable = "true"
|
472
|
-
end
|
473
|
-
|
474
|
-
next if short_fn.nil? or short_fn.empty?
|
475
|
-
|
476
|
-
file_ele = filemetadata_xml.root.add_element("file", {"relocatable" => relocatable})
|
477
|
-
path_ele = file_ele.add_element("path")
|
478
|
-
path_ele.add_text(short_fn)
|
479
|
-
|
480
|
-
# only do checksum for file
|
481
|
-
if File.file?(f)
|
482
|
-
# this doesn't work for older ruby version
|
483
|
-
#digest = Digest::SHA256.file(f).hexdigest
|
484
|
-
digest = Digest::SHA256.hexdigest(File.read(f))
|
485
|
-
chksum_ele = file_ele.add_element("checksum")
|
486
|
-
alg_ele = chksum_ele.add_element("algorithm")
|
487
|
-
alg_ele.add_text("SHA256")
|
488
|
-
digest_ele = chksum_ele.add_element("digest")
|
489
|
-
digest_ele.add_text(digest)
|
490
|
-
end
|
491
|
-
end
|
492
|
-
return filemetadata_xml
|
493
|
-
end
|
494
455
|
|
495
456
|
def self.verify_package_checksum(package_file)
|
496
457
|
topleveldir = package_toplevel_directory(package_file)
|
@@ -552,12 +513,10 @@ class Tpkg
|
|
552
513
|
['yml','xml'].each do |format|
|
553
514
|
file = File.join('tpkg', "tpkg.#{format}")
|
554
515
|
|
555
|
-
extract_tpkg_tar_command = cmd_to_extract_tpkg_tar(package_file, topleveldir)
|
556
|
-
|
557
516
|
# use popen3 instead of popen because popen display stderr when there's an error such as
|
558
517
|
# tpkg.yml not being there, which is something we want to ignore since old tpkg doesn't
|
559
518
|
# have tpkg.yml file
|
560
|
-
|
519
|
+
extract_tpkg_tar_command = cmd_to_extract_tpkg_tar(package_file, topleveldir)
|
561
520
|
stdin, stdout, stderr = Open3.popen3("#{extract_tpkg_tar_command} | #{find_tar} -xf - -O #{file}")
|
562
521
|
filecontent = stdout.read
|
563
522
|
if filecontent.nil? or filecontent.empty?
|
@@ -576,60 +535,6 @@ class Tpkg
|
|
576
535
|
return metadata
|
577
536
|
end
|
578
537
|
|
579
|
-
# TODO: To be deprecated
|
580
|
-
# Extracts and returns the metadata from a package file
|
581
|
-
def self.xml_metadata_from_package(package_file)
|
582
|
-
topleveldir = package_toplevel_directory(package_file)
|
583
|
-
# Verify checksum
|
584
|
-
verify_package_checksum(package_file)
|
585
|
-
# Extract and parse tpkg.xml
|
586
|
-
tpkg_xml = nil
|
587
|
-
extract_tpkg_tar_command = cmd_to_extract_tpkg_tar(package_file, topleveldir)
|
588
|
-
IO.popen("#{extract_tpkg_tar_command} | #{find_tar} -xf - -O #{File.join('tpkg', 'tpkg.xml')}") do |pipe|
|
589
|
-
tpkg_xml = REXML::Document.new(pipe.read)
|
590
|
-
end
|
591
|
-
if !$?.success?
|
592
|
-
warn "Warning: Extracting tpkg.xml from #{package_file} failed"
|
593
|
-
return nil
|
594
|
-
end
|
595
|
-
|
596
|
-
# Insert an attribute on the root element with the package filename
|
597
|
-
tpkg_xml.root.attributes['filename'] = File.basename(package_file)
|
598
|
-
|
599
|
-
# Return
|
600
|
-
return tpkg_xml
|
601
|
-
end
|
602
|
-
|
603
|
-
# TODO: To be deprecated
|
604
|
-
# Extracts and returns the metadata from a directory of package files
|
605
|
-
def self.xml_metadata_from_directory(directory)
|
606
|
-
metadata = []
|
607
|
-
# if metadata.xml already exists, then go ahead and
|
608
|
-
# parse it
|
609
|
-
existing_metadata_file = File.join(directory, 'metadata.xml')
|
610
|
-
existing_metadata = {}
|
611
|
-
if File.exists?(existing_metadata_file)
|
612
|
-
tpkg_metadata_xml = REXML::Document.new(File.open(existing_metadata_file))
|
613
|
-
|
614
|
-
tpkg_metadata_xml.root.elements.each do | metadata_xml |
|
615
|
-
existing_metadata[metadata_xml.attributes['filename']] = metadata_xml
|
616
|
-
end
|
617
|
-
end
|
618
|
-
|
619
|
-
# Populate the metadata array with metadata for all of the packages
|
620
|
-
# in the given directory. Reuse existing metadata if possible.
|
621
|
-
Dir.glob(File.join(directory, '*.tpkg')) do |pkg|
|
622
|
-
if existing_metadata[File.basename(pkg)]
|
623
|
-
metadata << existing_metadata[File.basename(pkg)]
|
624
|
-
else
|
625
|
-
xml = xml_metadata_from_package(pkg)
|
626
|
-
metadata << xml.root if xml
|
627
|
-
end
|
628
|
-
end
|
629
|
-
|
630
|
-
return metadata
|
631
|
-
end
|
632
|
-
|
633
538
|
# Extracts and returns the metadata from a directory of package files
|
634
539
|
def self.metadata_from_directory(directory)
|
635
540
|
metadata = []
|
@@ -667,26 +572,6 @@ class Tpkg
|
|
667
572
|
# to metadata.xml in that directory
|
668
573
|
def self.extract_metadata(directory, dest=nil)
|
669
574
|
dest = directory if dest.nil?
|
670
|
-
# we're no longer generating metadata.xml
|
671
|
-
backward_compatible = false
|
672
|
-
|
673
|
-
# If we still want to support metadata.xml
|
674
|
-
if backward_compatible
|
675
|
-
metadata_xml = xml_metadata_from_directory(directory)
|
676
|
-
# Combine all of the individual metadata files into one XML document
|
677
|
-
metadata = REXML::Document.new
|
678
|
-
metadata << REXML::Element.new('tpkg_metadata')
|
679
|
-
metadata_xml.each do |md|
|
680
|
-
metadata.root << md
|
681
|
-
end
|
682
|
-
# And write that out to metadata.xml
|
683
|
-
metadata_tmpfile = Tempfile.new('metadata.xml', dest)
|
684
|
-
metadata.write(metadata_tmpfile)
|
685
|
-
metadata_tmpfile.close
|
686
|
-
File.chmod(0644, metadata_tmpfile.path)
|
687
|
-
File.rename(metadata_tmpfile.path, File.join(dest, 'metadata.xml'))
|
688
|
-
end
|
689
|
-
|
690
575
|
metadata = metadata_from_directory(directory)
|
691
576
|
# And write that out to metadata.yml
|
692
577
|
metadata_tmpfile = Tempfile.new('metadata.yml', dest)
|
@@ -1093,7 +978,7 @@ class Tpkg
|
|
1093
978
|
# deploy_params is an array that holds the list of paramters that is used when invoking tpkg on to the remote
|
1094
979
|
# servers where we want to deploy to.
|
1095
980
|
#
|
1096
|
-
# servers is an array or a callback that list the remote servers where we want to deploy to
|
981
|
+
# servers is an array, a filename or a callback that list the remote servers where we want to deploy to
|
1097
982
|
def self.deploy(deploy_params, deploy_options, servers)
|
1098
983
|
servers.uniq!
|
1099
984
|
deployer = Deployer.new(deploy_options)
|
@@ -1162,9 +1047,7 @@ class Tpkg
|
|
1162
1047
|
# 2) the file_defaults settings of the metadata file
|
1163
1048
|
# 3) the explicitly defined settings in the corresponding file section of the metadata file
|
1164
1049
|
def self.predict_file_perms_and_ownership(data)
|
1165
|
-
perms = nil
|
1166
|
-
uid = nil
|
1167
|
-
gid = nil
|
1050
|
+
perms = uid = gid = nil
|
1168
1051
|
|
1169
1052
|
# get current permission and ownership
|
1170
1053
|
if data[:actual_file]
|
@@ -1193,7 +1076,8 @@ class Tpkg
|
|
1193
1076
|
return perms, uid, gid
|
1194
1077
|
end
|
1195
1078
|
|
1196
|
-
# Given a package file, figure out
|
1079
|
+
# Given a package file, figure out if tpkg.tar was compressed
|
1080
|
+
# Return what type of compression. If tpkg.tar wasn't compressed, then return nil.
|
1197
1081
|
def self.get_compression(package_file)
|
1198
1082
|
compression = nil
|
1199
1083
|
IO.popen("#{find_tar} -tf #{package_file} #{@@taroptions}") do |pipe|
|
@@ -1239,6 +1123,16 @@ class Tpkg
|
|
1239
1123
|
end
|
1240
1124
|
return result
|
1241
1125
|
end
|
1126
|
+
|
1127
|
+
# Used where we wish to capture an exception and modify the message. This
|
1128
|
+
# method returns a new exception with desired message but with the backtrace
|
1129
|
+
# from the original exception so that the backtrace info is not lost. This
|
1130
|
+
# is necessary because Exception lacks a set_message method.
|
1131
|
+
def self.wrap_exception(e, message)
|
1132
|
+
eprime = e.exception(message)
|
1133
|
+
eprime.set_backtrace(e.backtrace)
|
1134
|
+
eprime
|
1135
|
+
end
|
1242
1136
|
|
1243
1137
|
#
|
1244
1138
|
# Instance methods
|
@@ -1332,6 +1226,7 @@ class Tpkg
|
|
1332
1226
|
@lock_pid_file = File.join(@lock_directory, 'pid')
|
1333
1227
|
@locks = 0
|
1334
1228
|
@installed_metadata = {}
|
1229
|
+
@available_packages_cache = {}
|
1335
1230
|
end
|
1336
1231
|
|
1337
1232
|
def source_to_local_directory(source)
|
@@ -1362,30 +1257,24 @@ class Tpkg
|
|
1362
1257
|
else
|
1363
1258
|
uri = http = localdate = remotedate = localdir = localpath = nil
|
1364
1259
|
|
1365
|
-
|
1366
|
-
|
1367
|
-
http = Tpkg::gethttp(uri)
|
1260
|
+
uri = URI.join(source, 'metadata.yml')
|
1261
|
+
http = Tpkg::gethttp(uri)
|
1368
1262
|
|
1369
|
-
|
1370
|
-
|
1371
|
-
|
1372
|
-
|
1373
|
-
|
1374
|
-
|
1375
|
-
end
|
1263
|
+
# Calculate the path to the local copy of the metadata for this URI
|
1264
|
+
localdir = source_to_local_directory(source)
|
1265
|
+
localpath = File.join(localdir, 'metadata.yml')
|
1266
|
+
if File.exist?(localpath)
|
1267
|
+
localdate = File.mtime(localpath)
|
1268
|
+
end
|
1376
1269
|
|
1377
|
-
|
1378
|
-
|
1379
|
-
|
1380
|
-
|
1381
|
-
|
1382
|
-
|
1383
|
-
|
1384
|
-
|
1385
|
-
else
|
1386
|
-
puts "Error fetching metadata from #{uri}: #{response.body}"
|
1387
|
-
next
|
1388
|
-
end
|
1270
|
+
# get last modified time of the metadata file from the server
|
1271
|
+
response = http.head(uri.path)
|
1272
|
+
case response
|
1273
|
+
when Net::HTTPSuccess
|
1274
|
+
remotedate = Time.httpdate(response['Date'])
|
1275
|
+
else
|
1276
|
+
puts "Error fetching metadata from #{uri}: #{response.body}"
|
1277
|
+
response.error! # Throws an exception
|
1389
1278
|
end
|
1390
1279
|
|
1391
1280
|
# Fetch the metadata if necessary
|
@@ -1416,22 +1305,9 @@ class Tpkg
|
|
1416
1305
|
else
|
1417
1306
|
metadata_contents = IO.read(localpath)
|
1418
1307
|
end
|
1419
|
-
|
1420
|
-
|
1421
|
-
|
1422
|
-
else
|
1423
|
-
# At this stage we just break up the metadata.xml document into
|
1424
|
-
# per-package chunks and save them for further parsing later.
|
1425
|
-
# This allows us to parse the whole metadata.xml just once, and
|
1426
|
-
# saves us from having to further parse and convert the
|
1427
|
-
# per-package chunks until if/when they are needed.
|
1428
|
-
tpkg_metadata = REXML::Document.new(metadata_contents)
|
1429
|
-
tpkg_metadata.elements.each('/tpkg_metadata/tpkg') do |metadata_xml|
|
1430
|
-
name = metadata_xml.elements['name'].text
|
1431
|
-
metadata[name] = [] if !metadata[name]
|
1432
|
-
metadata[name] << Metadata.new(metadata_xml.to_s, 'xml', source)
|
1433
|
-
end
|
1434
|
-
end
|
1308
|
+
# This method will parse the yml doc and populate the metadata variable
|
1309
|
+
# with list of pkgs' metadata
|
1310
|
+
Metadata::get_pkgs_metadata_from_yml_doc(metadata_contents, metadata, source)
|
1435
1311
|
end
|
1436
1312
|
end
|
1437
1313
|
@metadata = metadata
|
@@ -1754,7 +1630,7 @@ class Tpkg
|
|
1754
1630
|
end
|
1755
1631
|
end
|
1756
1632
|
|
1757
|
-
# Returns an array of
|
1633
|
+
# Returns an array of metadata for installed packages
|
1758
1634
|
def metadata_for_installed_packages
|
1759
1635
|
metadata = {}
|
1760
1636
|
if File.directory?(@installed_directory)
|
@@ -1851,44 +1727,54 @@ class Tpkg
|
|
1851
1727
|
|
1852
1728
|
# Returns an array of packages which meet the given requirement
|
1853
1729
|
def available_packages_that_meet_requirement(req=nil)
|
1854
|
-
pkgs =
|
1730
|
+
pkgs = nil
|
1855
1731
|
puts "avail_pkgs_that_meet_req checking for #{req.inspect}" if @@debug
|
1856
|
-
if req
|
1857
|
-
|
1858
|
-
|
1859
|
-
|
1860
|
-
|
1861
|
-
|
1732
|
+
if @available_packages_cache[req]
|
1733
|
+
puts "avail_pkgs_that_meet_req returning cached result" if @@debug
|
1734
|
+
pkgs = @available_packages_cache[req]
|
1735
|
+
else
|
1736
|
+
pkgs = []
|
1737
|
+
if req
|
1738
|
+
req = req.clone # we're using req as the key for our cache, so it's important
|
1739
|
+
# that we clone it here. Otherwise, req can be changed later on from
|
1740
|
+
# the calling method and modify our cache inadvertently
|
1741
|
+
if req[:type] == :native
|
1742
|
+
load_available_native_packages(req[:name])
|
1743
|
+
@available_native_packages[req[:name]].each do |pkg|
|
1744
|
+
if Tpkg::package_meets_requirement?(pkg, req)
|
1745
|
+
pkgs << pkg
|
1746
|
+
end
|
1862
1747
|
end
|
1863
|
-
|
1864
|
-
|
1865
|
-
|
1866
|
-
|
1867
|
-
|
1868
|
-
|
1748
|
+
else
|
1749
|
+
load_available_packages(req[:name])
|
1750
|
+
@available_packages[req[:name]].each do |pkg|
|
1751
|
+
if Tpkg::package_meets_requirement?(pkg, req)
|
1752
|
+
pkgs << pkg
|
1753
|
+
end
|
1869
1754
|
end
|
1755
|
+
# There's a weird dicotomy here where @available_packages contains
|
1756
|
+
# available tpkg and native packages, and _installed_ native
|
1757
|
+
# packages, but not installed tpkgs. That's somewhat intentional,
|
1758
|
+
# as we don't want to cache the installed state since that might
|
1759
|
+
# change during a run. We probably should be consistent, and not
|
1760
|
+
# cache installed native packages either. However, we do have
|
1761
|
+
# some intelligent caching of the installed tpkg state which would
|
1762
|
+
# be hard to replicate for native packages, and this method gets
|
1763
|
+
# called a lot so re-running the native package query commands
|
1764
|
+
# frequently would not be acceptable. So maybe we have the right
|
1765
|
+
# design, and this just serves as a note that it is not obvious.
|
1766
|
+
pkgs.concat(installed_packages_that_meet_requirement(req))
|
1870
1767
|
end
|
1871
|
-
|
1872
|
-
#
|
1873
|
-
#
|
1874
|
-
|
1875
|
-
#
|
1876
|
-
#
|
1877
|
-
#
|
1878
|
-
|
1879
|
-
# called a lot so re-running the native package query commands
|
1880
|
-
# frequently would not be acceptable. So maybe we have the right
|
1881
|
-
# design, and this just serves as a note that it is not obvious.
|
1882
|
-
pkgs.concat(installed_packages_that_meet_requirement(req))
|
1768
|
+
else
|
1769
|
+
# We return everything available if given a nil requirement
|
1770
|
+
# We do not include native packages
|
1771
|
+
load_available_packages
|
1772
|
+
# @available_packages is a hash of pkgname => array of pkgs
|
1773
|
+
# Thus m is a 2 element array of [pkgname, array of pkgs]
|
1774
|
+
# And thus m[1] is the array of packages
|
1775
|
+
pkgs = @available_packages.collect{|m| m[1]}.flatten
|
1883
1776
|
end
|
1884
|
-
|
1885
|
-
# We return everything available if given a nil requirement
|
1886
|
-
# We do not include native packages
|
1887
|
-
load_available_packages
|
1888
|
-
# @available_packages is a hash of pkgname => array of pkgs
|
1889
|
-
# Thus m is a 2 element array of [pkgname, array of pkgs]
|
1890
|
-
# And thus m[1] is the array of packages
|
1891
|
-
pkgs = @available_packages.collect{|m| m[1]}.flatten
|
1777
|
+
@available_packages_cache[req] = pkgs
|
1892
1778
|
end
|
1893
1779
|
pkgs
|
1894
1780
|
end
|
@@ -1949,6 +1835,16 @@ class Tpkg
|
|
1949
1835
|
files[:normalized] << File.join(@base, relocfile)
|
1950
1836
|
end
|
1951
1837
|
end
|
1838
|
+
def normalize_path(path,root=nil,base=nil)
|
1839
|
+
root ||= @file_system_root
|
1840
|
+
base ||= @base
|
1841
|
+
if path[0,1] == File::SEPARATOR
|
1842
|
+
normalized_path = File.join(root, path)
|
1843
|
+
else
|
1844
|
+
normalized_path = File.join(base, path)
|
1845
|
+
end
|
1846
|
+
normalized_path
|
1847
|
+
end
|
1952
1848
|
def files_for_installed_packages(package_files=nil)
|
1953
1849
|
files = {}
|
1954
1850
|
if !package_files
|
@@ -2353,12 +2249,7 @@ class Tpkg
|
|
2353
2249
|
metadata[:files][:files].each do |tpkgfile|
|
2354
2250
|
if tpkgfile[:init]
|
2355
2251
|
tpkg_path = tpkgfile[:path]
|
2356
|
-
installed_path =
|
2357
|
-
if tpkg_path[0,1] == File::SEPARATOR
|
2358
|
-
installed_path = File.join(@file_system_root, tpkg_path)
|
2359
|
-
else
|
2360
|
-
installed_path = File.join(@base, tpkg_path)
|
2361
|
-
end
|
2252
|
+
installed_path = normalize_path(tpkg_path)
|
2362
2253
|
init_scripts[installed_path] = tpkgfile
|
2363
2254
|
end
|
2364
2255
|
end
|
@@ -2431,12 +2322,7 @@ class Tpkg
|
|
2431
2322
|
metadata[:files][:files].each do |tpkgfile|
|
2432
2323
|
if tpkgfile[:crontab]
|
2433
2324
|
tpkg_path = tpkgfile[:path]
|
2434
|
-
installed_path =
|
2435
|
-
if tpkg_path[0,1] == File::SEPARATOR
|
2436
|
-
installed_path = File.join(@file_system_root, tpkg_path)
|
2437
|
-
else
|
2438
|
-
installed_path = File.join(@base, tpkg_path)
|
2439
|
-
end
|
2325
|
+
installed_path = normalize_path(tpkg_path)
|
2440
2326
|
destinations[installed_path] = {}
|
2441
2327
|
|
2442
2328
|
# Decide whether we're going to add the file to a per-user
|
@@ -2494,18 +2380,24 @@ class Tpkg
|
|
2494
2380
|
IO.popen("#{externalpath} '#{pkgfile}' install", 'w') do |pipe|
|
2495
2381
|
pipe.write(data)
|
2496
2382
|
end
|
2383
|
+
if !$?.success?
|
2384
|
+
raise "Exit value #{$?.exitstatus}"
|
2385
|
+
end
|
2497
2386
|
rescue => e
|
2498
2387
|
# Tell the user which external and package were involved, otherwise
|
2499
2388
|
# failures in externals are very hard to debug
|
2500
|
-
raise
|
2389
|
+
raise Tpkg.wrap_exception(e, "External #{name} #{operation} for #{File.basename(pkgfile)}: " + e.message)
|
2501
2390
|
end
|
2502
2391
|
when :remove
|
2503
2392
|
begin
|
2504
2393
|
IO.popen("#{externalpath} '#{pkgfile}' remove", 'w') do |pipe|
|
2505
2394
|
pipe.write(data)
|
2506
2395
|
end
|
2396
|
+
if !$?.success?
|
2397
|
+
raise "Exit value #{$?.exitstatus}"
|
2398
|
+
end
|
2507
2399
|
rescue => e
|
2508
|
-
raise
|
2400
|
+
raise Tpkg.wrap_exception(e, "External #{name} #{operation} for #{File.basename(pkgfile)}: " + e.message)
|
2509
2401
|
end
|
2510
2402
|
else
|
2511
2403
|
raise "Bug, unknown external operation #{operation}"
|
@@ -2520,6 +2412,14 @@ class Tpkg
|
|
2520
2412
|
ret_val = 0
|
2521
2413
|
metadata = Tpkg::metadata_from_package(package_file)
|
2522
2414
|
|
2415
|
+
# set env variable to let pre/post install know whether this unpack
|
2416
|
+
# is part of an install or upgrade
|
2417
|
+
if options[:is_doing_upgrade]
|
2418
|
+
ENV['TPKG_ACTION'] = "upgrade"
|
2419
|
+
else
|
2420
|
+
ENV['TPKG_ACTION'] = "install"
|
2421
|
+
end
|
2422
|
+
|
2523
2423
|
# Unpack files in a temporary directory
|
2524
2424
|
# I'd prefer to unpack on the fly so that the user doesn't need to
|
2525
2425
|
# have disk space to hold three copies of the package (the package
|
@@ -2533,22 +2433,19 @@ class Tpkg
|
|
2533
2433
|
system("#{extract_tpkg_tar_cmd} | #{@tar} #{@@taroptions} -C #{workdir} -xpf -")
|
2534
2434
|
files_info = {} # store perms, uid, gid, etc. for files
|
2535
2435
|
checksums_of_decrypted_files = {}
|
2536
|
-
|
2537
|
-
reloc_dir = File.join(workdir, 'tpkg', 'reloc')
|
2538
|
-
rel_root_dir = File.join('tpkg', 'root')
|
2539
|
-
rel_reloc_dir = File.join('tpkg', 'reloc')
|
2540
|
-
|
2436
|
+
|
2541
2437
|
# Get list of conflicting files/directories & store their perm/ownership. That way, we can
|
2542
2438
|
# set them to the correct values later on in order to preserve them.
|
2543
|
-
|
2439
|
+
rel_root_dir = File.join('tpkg', 'root')
|
2440
|
+
rel_reloc_dir = File.join('tpkg', 'reloc')
|
2544
2441
|
files = `#{extract_tpkg_tar_cmd} | #{@tar} #{@@taroptions} -tf -`
|
2545
2442
|
files = files.split("\n")
|
2546
2443
|
conflicting_files = {}
|
2547
2444
|
files.each do | file |
|
2548
2445
|
if file =~ /^#{rel_root_dir}/
|
2549
|
-
possible_conflicting_file =
|
2446
|
+
possible_conflicting_file = File.join(@file_system_root, file[rel_root_dir.length ..-1])
|
2550
2447
|
elsif file =~ /^#{rel_reloc_dir}/
|
2551
|
-
possible_conflicting_file =
|
2448
|
+
possible_conflicting_file = File.join(@base, file[rel_reloc_dir.length + 1..-1])
|
2552
2449
|
end
|
2553
2450
|
if possible_conflicting_file && (File.exists?(possible_conflicting_file) && !File.symlink?(possible_conflicting_file))
|
2554
2451
|
conflicting_files[File.join(workdir, file)] = File.stat(possible_conflicting_file)
|
@@ -2568,18 +2465,14 @@ class Tpkg
|
|
2568
2465
|
default_gid = DEFAULT_OWNERSHIP_UID
|
2569
2466
|
default_perms = nil
|
2570
2467
|
|
2571
|
-
if metadata[:files]
|
2572
|
-
|
2573
|
-
|
2574
|
-
|
2575
|
-
|
2576
|
-
|
2577
|
-
|
2578
|
-
|
2579
|
-
if metadata[:files][:file_defaults][:posix][:perms]
|
2580
|
-
default_perms = metadata[:files][:file_defaults][:posix][:perms]
|
2581
|
-
end
|
2582
|
-
end
|
2468
|
+
if (metadata[:files][:file_defaults][:posix][:owner] rescue nil)
|
2469
|
+
default_uid = Tpkg::lookup_uid(metadata[:files][:file_defaults][:posix][:owner])
|
2470
|
+
end
|
2471
|
+
if (metadata[:files][:file_defaults][:posix][:group] rescue nil)
|
2472
|
+
default_gid = Tpkg::lookup_gid(metadata[:files][:file_defaults][:posix][:group])
|
2473
|
+
end
|
2474
|
+
if (metadata[:files][:file_defaults][:posix][:perms] rescue nil)
|
2475
|
+
default_perms = metadata[:files][:file_defaults][:posix][:perms]
|
2583
2476
|
end
|
2584
2477
|
|
2585
2478
|
# Set default dir uid/gid to be same as for file.
|
@@ -2587,20 +2480,18 @@ class Tpkg
|
|
2587
2480
|
default_dir_gid = default_gid
|
2588
2481
|
default_dir_perms = 0755
|
2589
2482
|
|
2590
|
-
if metadata[:files]
|
2591
|
-
|
2592
|
-
|
2593
|
-
|
2594
|
-
|
2595
|
-
|
2596
|
-
|
2597
|
-
|
2598
|
-
if metadata[:files][:dir_defaults][:posix][:perms]
|
2599
|
-
default_dir_perms = metadata[:files][:dir_defaults][:posix][:perms]
|
2600
|
-
end
|
2601
|
-
end
|
2483
|
+
if (metadata[:files][:dir_defaults][:posix][:owner] rescue nil)
|
2484
|
+
default_dir_uid = Tpkg::lookup_uid(metadata[:files][:dir_defaults][:posix][:owner])
|
2485
|
+
end
|
2486
|
+
if (metadata[:files][:dir_defaults][:posix][:group] rescue nil)
|
2487
|
+
default_dir_gid = Tpkg::lookup_gid(metadata[:files][:dir_defaults][:posix][:group])
|
2488
|
+
end
|
2489
|
+
if (metadata[:files][:dir_defaults][:posix][:perms] rescue nil)
|
2490
|
+
default_dir_perms = metadata[:files][:dir_defaults][:posix][:perms]
|
2602
2491
|
end
|
2603
2492
|
|
2493
|
+
root_dir = File.join(workdir, 'tpkg', 'root')
|
2494
|
+
reloc_dir = File.join(workdir, 'tpkg', 'reloc')
|
2604
2495
|
Find.find(root_dir, reloc_dir) do |f|
|
2605
2496
|
# If the package doesn't contain either of the top level
|
2606
2497
|
# directories we need to skip them, find will pass them to us
|
@@ -2636,12 +2527,7 @@ class Tpkg
|
|
2636
2527
|
# Handle any decryption and ownership/permissions on specific files
|
2637
2528
|
metadata[:files][:files].each do |tpkgfile|
|
2638
2529
|
tpkg_path = tpkgfile[:path]
|
2639
|
-
working_path =
|
2640
|
-
if tpkg_path[0,1] == File::SEPARATOR
|
2641
|
-
working_path = File.join(workdir, 'tpkg', 'root', tpkg_path)
|
2642
|
-
else
|
2643
|
-
working_path = File.join(workdir, 'tpkg', 'reloc', tpkg_path)
|
2644
|
-
end
|
2530
|
+
working_path = normalize_path(tpkg_path, File.join(workdir, 'tpkg', 'root'), File.join(workdir, 'tpkg', 'reloc'))
|
2645
2531
|
if !File.exist?(working_path) && !File.symlink?(working_path)
|
2646
2532
|
raise "tpkg.xml for #{File.basename(package_file)} references file #{tpkg_path} but that file is not in the package"
|
2647
2533
|
end
|
@@ -2698,7 +2584,7 @@ class Tpkg
|
|
2698
2584
|
end
|
2699
2585
|
|
2700
2586
|
digest = Digest::SHA256.hexdigest(File.read(working_path))
|
2701
|
-
# get checksum for the decrypted file. Will be used for creating file_metadata
|
2587
|
+
# get checksum for the decrypted file. Will be used for creating file_metadata
|
2702
2588
|
checksums_of_decrypted_files[File.expand_path(tpkg_path)] = digest
|
2703
2589
|
end
|
2704
2590
|
end
|
@@ -3048,35 +2934,41 @@ class Tpkg
|
|
3048
2934
|
r
|
3049
2935
|
end
|
3050
2936
|
|
3051
|
-
def run_externals_for_install(metadata, workdir, externals_to_skip)
|
2937
|
+
def run_externals_for_install(metadata, workdir, externals_to_skip=[])
|
3052
2938
|
metadata[:externals].each do |external|
|
3053
|
-
|
3054
|
-
|
3055
|
-
|
3056
|
-
|
3057
|
-
|
3058
|
-
|
3059
|
-
|
3060
|
-
|
3061
|
-
|
3062
|
-
|
3063
|
-
|
3064
|
-
|
3065
|
-
|
3066
|
-
|
3067
|
-
|
3068
|
-
|
3069
|
-
|
3070
|
-
|
2939
|
+
if !externals_to_skip || !externals_to_skip.include?(external)
|
2940
|
+
# If the external references a datafile or datascript then read/run it
|
2941
|
+
# now that we've unpacked the package contents and have the file/script
|
2942
|
+
# available. This will get us the data for the external.
|
2943
|
+
if external[:datafile] || external[:datascript]
|
2944
|
+
pwd = Dir.pwd
|
2945
|
+
# chdir into the working directory so that the user can specify a
|
2946
|
+
# relative path to their file/script.
|
2947
|
+
Dir.chdir(File.join(workdir, 'tpkg'))
|
2948
|
+
begin
|
2949
|
+
if external[:datafile]
|
2950
|
+
# Read the file
|
2951
|
+
external[:data] = IO.read(external[:datafile])
|
2952
|
+
# Drop the datafile key so that we don't waste time re-reading the
|
2953
|
+
# datafile again in the future.
|
2954
|
+
external.delete(:datafile)
|
2955
|
+
elsif external[:datascript]
|
2956
|
+
# Run the script
|
2957
|
+
IO.popen(external[:datascript]) do |pipe|
|
2958
|
+
external[:data] = pipe.read
|
2959
|
+
end
|
2960
|
+
if !$?.success?
|
2961
|
+
raise "Datascript #{external[:datascript]} for package #{File.basename(metadata[:filename])} had exit value #{$?.exitstatus}"
|
2962
|
+
end
|
2963
|
+
# Drop the datascript key so that we don't waste time re-running the
|
2964
|
+
# datascript again in the future.
|
2965
|
+
external.delete(:datascript)
|
2966
|
+
end
|
2967
|
+
ensure
|
2968
|
+
# Switch back to our previous directory
|
2969
|
+
Dir.chdir(pwd)
|
3071
2970
|
end
|
3072
|
-
# Drop the datascript key so that we don't waste time re-running the
|
3073
|
-
# datascript again in the future.
|
3074
|
-
external.delete(:datascript)
|
3075
2971
|
end
|
3076
|
-
# Switch back to our previous directory
|
3077
|
-
Dir.chdir(pwd)
|
3078
|
-
end
|
3079
|
-
if !externals_to_skip || !externals_to_skip.include?(external)
|
3080
2972
|
run_external(metadata[:filename], :install, external[:name], external[:data])
|
3081
2973
|
end
|
3082
2974
|
end if metadata[:externals]
|
@@ -3091,7 +2983,7 @@ class Tpkg
|
|
3091
2983
|
metadata.write(metadata_file)
|
3092
2984
|
metadata_file.close
|
3093
2985
|
|
3094
|
-
# Save file_metadata
|
2986
|
+
# Save file_metadata for this pkg
|
3095
2987
|
if File.exist?(File.join(workdir, 'tpkg', 'file_metadata.bin'))
|
3096
2988
|
file_metadata = FileMetadata.new(File.read(File.join(workdir, 'tpkg', 'file_metadata.bin')), 'bin')
|
3097
2989
|
elsif File.exist?(File.join(workdir, 'tpkg', 'file_metadata.yml'))
|
@@ -3102,8 +2994,11 @@ class Tpkg
|
|
3102
2994
|
if file_metadata
|
3103
2995
|
file_metadata[:package_file] = File.basename(package_file)
|
3104
2996
|
file_metadata[:files].each do |file|
|
2997
|
+
# update file_metadata with user/group ownership and permission
|
3105
2998
|
acl = files_info[file[:path]]
|
3106
2999
|
file.merge!(acl) unless acl.nil?
|
3000
|
+
|
3001
|
+
# update file_metadata with the checksums of decrypted files
|
3107
3002
|
digest = checksums_of_decrypted_files[File.expand_path(file[:path])]
|
3108
3003
|
if digest
|
3109
3004
|
digests = file[:checksum][:digests]
|
@@ -3329,22 +3224,62 @@ class Tpkg
|
|
3329
3224
|
conflicts
|
3330
3225
|
end
|
3331
3226
|
|
3332
|
-
|
3333
|
-
|
3334
|
-
|
3335
|
-
|
3227
|
+
# This method is called by install and upgrade method to make sure there is
|
3228
|
+
# no conflicts between the existing pkgs and the pkgs we're about to install
|
3229
|
+
def handle_conflicting_pkgs(installed_pkgs, pkgs_to_install, options ={})
|
3230
|
+
conflicting_pkgs = []
|
3231
|
+
|
3232
|
+
# check if existing pkgs have conflicts with pkgs we're about to install
|
3233
|
+
installed_pkgs.each do |pkg1|
|
3234
|
+
next if pkg1[:metadata][:conflicts].nil?
|
3235
|
+
pkg1[:metadata][:conflicts].each do | conflict |
|
3236
|
+
pkgs_to_install.each do |pkg2|
|
3237
|
+
if Tpkg::package_meets_requirement?(pkg2, conflict)
|
3238
|
+
conflicting_pkgs << pkg1
|
3239
|
+
end
|
3240
|
+
end
|
3241
|
+
end
|
3242
|
+
end
|
3243
|
+
|
3244
|
+
# check if pkgs we're about to install conflict with existing pkgs
|
3245
|
+
pkgs_to_install.each do |pkg1|
|
3246
|
+
next if pkg1[:metadata][:conflicts].nil?
|
3247
|
+
pkg1[:metadata][:conflicts].each do | conflict |
|
3248
|
+
conflicting_pkgs |= installed_packages_that_meet_requirement(conflict)
|
3249
|
+
end
|
3250
|
+
end
|
3251
|
+
|
3252
|
+
# Check if there are conflicts among the pkgs we're about to install
|
3253
|
+
# For these type of conflicts, we can't proceed, so raise exception.
|
3254
|
+
pkgs_to_install.each do |pkg1|
|
3336
3255
|
# native package might not have conflicts defined so skip
|
3337
3256
|
next if pkg1[:metadata][:conflicts].nil?
|
3338
3257
|
pkg1[:metadata][:conflicts].each do | conflict |
|
3339
|
-
|
3258
|
+
pkgs_to_install.each do |pkg2|
|
3340
3259
|
if Tpkg::package_meets_requirement?(pkg2, conflict)
|
3341
|
-
raise "Package conflicts between #{pkg2
|
3260
|
+
raise "Package conflicts between #{pkg2[:metadata][:filename]} and #{pkg1[:metadata][:filename]}"
|
3342
3261
|
end
|
3343
3262
|
end
|
3344
3263
|
end
|
3345
3264
|
end
|
3265
|
+
|
3266
|
+
# Report to the users if there are conflicts
|
3267
|
+
unless conflicting_pkgs.empty?
|
3268
|
+
puts "The package(s) you're trying to install conflict with the following package(s):"
|
3269
|
+
conflicting_pkgs = conflicting_pkgs.collect{|pkg|pkg[:metadata][:filename]}
|
3270
|
+
puts conflicting_pkgs.join("\n")
|
3271
|
+
if options[:force_replace]
|
3272
|
+
puts "Attemping to replace the conflicting packages."
|
3273
|
+
success = remove(conflicting_pkgs)
|
3274
|
+
return success
|
3275
|
+
else
|
3276
|
+
puts "Try removing the conflicting package(s) first, or rerun tpkg with the --force-replace option."
|
3277
|
+
return false
|
3278
|
+
end
|
3279
|
+
end
|
3280
|
+
return true
|
3346
3281
|
end
|
3347
|
-
|
3282
|
+
|
3348
3283
|
def prompt_for_conflicting_files(package_file, mode=CHECK_INSTALL)
|
3349
3284
|
if !@@prompt
|
3350
3285
|
return true
|
@@ -3404,29 +3339,18 @@ class Tpkg
|
|
3404
3339
|
end
|
3405
3340
|
|
3406
3341
|
# See parse_requests for format of requests
|
3407
|
-
def install(requests, passphrase=nil)
|
3342
|
+
def install(requests, passphrase=nil, options={})
|
3408
3343
|
ret_val = 0
|
3409
3344
|
requirements = []
|
3410
3345
|
packages = {}
|
3411
3346
|
lock
|
3412
|
-
|
3413
3347
|
parse_requests(requests, requirements, packages)
|
3414
3348
|
check_requests(packages)
|
3349
|
+
|
3415
3350
|
core_packages = []
|
3416
|
-
#currently_installed_requirements = []
|
3417
3351
|
requirements.each do |req|
|
3418
3352
|
core_packages << req[:name] if !core_packages.include?(req[:name])
|
3419
|
-
|
3420
|
-
# This was here to ensure that nothing went backwards. But I guess in the
|
3421
|
-
# install case (as opposed to upgrade) going backwards can't really happen,
|
3422
|
-
# we may just install an older version alongside a newer version, which is
|
3423
|
-
# perfectly fine.
|
3424
|
-
# currently_installed_requirements.concat(
|
3425
|
-
# requirements_for_currently_installed_package(req[:name]))
|
3426
3353
|
end
|
3427
|
-
#requirements.concat(currently_installed_requirements).uniq!
|
3428
|
-
|
3429
|
-
|
3430
3354
|
|
3431
3355
|
puts "install calling best_solution" if @@debug
|
3432
3356
|
puts "install requirements: #{requirements.inspect}" if @@debug
|
@@ -3438,7 +3362,8 @@ class Tpkg
|
|
3438
3362
|
raise "Unable to resolve dependencies"
|
3439
3363
|
end
|
3440
3364
|
|
3441
|
-
|
3365
|
+
success = handle_conflicting_pkgs(installed_packages, solution_packages, options)
|
3366
|
+
return false if !success
|
3442
3367
|
|
3443
3368
|
if !prompt_for_install(solution_packages, 'installed')
|
3444
3369
|
unlock
|
@@ -3564,8 +3489,7 @@ class Tpkg
|
|
3564
3489
|
|
3565
3490
|
# send udpate back to reporting server
|
3566
3491
|
unless @report_server.nil?
|
3567
|
-
options = {:newly_installed => newly_installed,
|
3568
|
-
:currently_installed => currently_installed}
|
3492
|
+
options = {:newly_installed => newly_installed, :currently_installed => currently_installed}
|
3569
3493
|
send_update_to_server(options)
|
3570
3494
|
end
|
3571
3495
|
unlock
|
@@ -3573,7 +3497,8 @@ class Tpkg
|
|
3573
3497
|
end
|
3574
3498
|
|
3575
3499
|
# This method can also be used for doing downgrade
|
3576
|
-
def upgrade(requests=nil, passphrase=nil,
|
3500
|
+
def upgrade(requests=nil, passphrase=nil, options={})
|
3501
|
+
downgrade = options[:downgrade] || false
|
3577
3502
|
ret_val = 0
|
3578
3503
|
requirements = []
|
3579
3504
|
packages = {}
|
@@ -3655,7 +3580,8 @@ class Tpkg
|
|
3655
3580
|
raise "Unable to find solution for upgrading. Please verify that you specified the correct package(s) for upgrade."
|
3656
3581
|
end
|
3657
3582
|
|
3658
|
-
|
3583
|
+
success = handle_conflicting_pkgs(installed_packages, solution_packages, options)
|
3584
|
+
return false if !success
|
3659
3585
|
|
3660
3586
|
if downgrade
|
3661
3587
|
prompt_action = 'downgraded'
|
@@ -3675,7 +3601,6 @@ class Tpkg
|
|
3675
3601
|
installed_files = files_for_installed_packages
|
3676
3602
|
removed_pkgs = [] # keep track of what we removed so far
|
3677
3603
|
while pkg = solution_packages.shift
|
3678
|
-
# solution_packages.each do |pkg|
|
3679
3604
|
if pkg[:source] == :currently_installed ||
|
3680
3605
|
pkg[:source] == :native_installed
|
3681
3606
|
# Nothing to do for packages currently installed
|
@@ -3798,7 +3723,9 @@ class Tpkg
|
|
3798
3723
|
end
|
3799
3724
|
end if pkg[:metadata][:dependencies]
|
3800
3725
|
if can_unpack
|
3801
|
-
|
3726
|
+
is_doing_upgrade = true if removed_pkgs.include?(pkg[:metadata][:name])
|
3727
|
+
ret_val |= unpack(pkgfile, :passphrase => passphrase, :externals_to_skip => externals_to_skip,
|
3728
|
+
:is_doing_upgrade => is_doing_upgrade)
|
3802
3729
|
end
|
3803
3730
|
|
3804
3731
|
has_updates = true
|
@@ -3816,8 +3743,7 @@ class Tpkg
|
|
3816
3743
|
if !has_updates
|
3817
3744
|
puts "No updates available"
|
3818
3745
|
elsif !@report_server.nil?
|
3819
|
-
options = {:newly_installed => newly_installed,
|
3820
|
-
:removed => removed,
|
3746
|
+
options = {:newly_installed => newly_installed, :removed => removed,
|
3821
3747
|
:currently_installed => currently_installed}
|
3822
3748
|
send_update_to_server(options)
|
3823
3749
|
end
|
@@ -4050,8 +3976,7 @@ class Tpkg
|
|
4050
3976
|
|
4051
3977
|
# send update back to reporting server
|
4052
3978
|
unless @report_server.nil? || options[:upgrade]
|
4053
|
-
options = {:removed => removed,
|
4054
|
-
:currently_installed => currently_installed}
|
3979
|
+
options = {:removed => removed, :currently_installed => currently_installed}
|
4055
3980
|
send_update_to_server(options)
|
4056
3981
|
end
|
4057
3982
|
|
@@ -4088,7 +4013,7 @@ class Tpkg
|
|
4088
4013
|
file_metadata = FileMetadata.new(File.read(file_metadata_xml), 'xml')
|
4089
4014
|
else
|
4090
4015
|
errors = []
|
4091
|
-
errors << "Can't find
|
4016
|
+
errors << "Can't find file metadata. Most likely this is because the package was created before the verify feature was added"
|
4092
4017
|
results[package_file] = errors
|
4093
4018
|
return results
|
4094
4019
|
end
|
@@ -4138,7 +4063,6 @@ class Tpkg
|
|
4138
4063
|
errors << "File is missing"
|
4139
4064
|
else
|
4140
4065
|
# get actual values
|
4141
|
-
#chksum_actual = Digest::SHA256.file(fp).hexdigest if File.file?(fp)
|
4142
4066
|
chksum_actual = Digest::SHA256.hexdigest(File.read(fp)) if File.file?(fp)
|
4143
4067
|
uid_actual = File.stat(fp).uid
|
4144
4068
|
gid_actual = File.stat(fp).gid
|
@@ -4316,7 +4240,7 @@ class Tpkg
|
|
4316
4240
|
|
4317
4241
|
# populate the depencency map
|
4318
4242
|
depended_on.each do | req_pkg |
|
4319
|
-
dependency_mapping[req_pkg[:metadata][:filename]]
|
4243
|
+
dependency_mapping[req_pkg[:metadata][:filename]] ||= []
|
4320
4244
|
dependency_mapping[req_pkg[:metadata][:filename]] << pkg
|
4321
4245
|
end
|
4322
4246
|
end
|
@@ -4360,7 +4284,7 @@ class Tpkg
|
|
4360
4284
|
# log changes of pkgs that were installed/removed
|
4361
4285
|
def log_changes(options={})
|
4362
4286
|
msg = ""
|
4363
|
-
user = Etc.getlogin
|
4287
|
+
user = Etc.getlogin || Etc.getpwuid(Process.uid).name
|
4364
4288
|
newly_installed = removed = []
|
4365
4289
|
newly_installed = options[:newly_installed] if options[:newly_installed]
|
4366
4290
|
removed = options[:removed] if options[:removed]
|
@@ -4379,7 +4303,7 @@ class Tpkg
|
|
4379
4303
|
|
4380
4304
|
def send_update_to_server(options={})
|
4381
4305
|
request = {"client"=>Facter['fqdn'].value}
|
4382
|
-
request[:user] = Etc.getlogin
|
4306
|
+
request[:user] = Etc.getlogin || Etc.getpwuid(Process.uid).name
|
4383
4307
|
request[:tpkg_home] = ENV['TPKG_HOME']
|
4384
4308
|
|
4385
4309
|
if options[:currently_installed]
|