autobuild 1.17.0 → 1.21.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.rubocop.yml +107 -0
- data/.travis.yml +3 -2
- data/Gemfile +2 -1
- data/Rakefile +1 -4
- data/autobuild.gemspec +18 -13
- data/bin/autobuild +4 -3
- data/lib/autobuild.rb +4 -5
- data/lib/autobuild/build_logfile.rb +6 -4
- data/lib/autobuild/config.rb +104 -41
- data/lib/autobuild/configurable.rb +32 -18
- data/lib/autobuild/environment.rb +126 -120
- data/lib/autobuild/exceptions.rb +48 -31
- data/lib/autobuild/import/archive.rb +134 -82
- data/lib/autobuild/import/cvs.rb +28 -24
- data/lib/autobuild/import/darcs.rb +13 -16
- data/lib/autobuild/import/git-lfs.rb +37 -30
- data/lib/autobuild/import/git.rb +246 -182
- data/lib/autobuild/import/hg.rb +23 -18
- data/lib/autobuild/import/svn.rb +48 -29
- data/lib/autobuild/importer.rb +534 -499
- data/lib/autobuild/mail_reporter.rb +77 -77
- data/lib/autobuild/package.rb +200 -122
- data/lib/autobuild/packages/autotools.rb +47 -42
- data/lib/autobuild/packages/cmake.rb +77 -65
- data/lib/autobuild/packages/dummy.rb +9 -8
- data/lib/autobuild/packages/genom.rb +1 -1
- data/lib/autobuild/packages/gnumake.rb +74 -31
- data/lib/autobuild/packages/import.rb +2 -6
- data/lib/autobuild/packages/orogen.rb +32 -31
- data/lib/autobuild/packages/pkgconfig.rb +2 -2
- data/lib/autobuild/packages/python.rb +12 -8
- data/lib/autobuild/packages/ruby.rb +22 -17
- data/lib/autobuild/parallel.rb +50 -46
- data/lib/autobuild/pkgconfig.rb +25 -13
- data/lib/autobuild/progress_display.rb +149 -64
- data/lib/autobuild/rake_task_extension.rb +12 -7
- data/lib/autobuild/reporting.rb +51 -26
- data/lib/autobuild/subcommand.rb +72 -65
- data/lib/autobuild/test.rb +9 -7
- data/lib/autobuild/test_utility.rb +12 -10
- data/lib/autobuild/timestamps.rb +28 -23
- data/lib/autobuild/tools.rb +17 -16
- data/lib/autobuild/utility.rb +67 -23
- data/lib/autobuild/version.rb +1 -1
- metadata +53 -37
data/lib/autobuild/import/hg.rb
CHANGED
@@ -10,13 +10,13 @@ class Hg < Importer
|
|
10
10
|
#
|
11
11
|
# This importer uses the 'hg' tool to perform the
|
12
12
|
# import. It defaults to 'hg' and can be configured by
|
13
|
-
# doing
|
13
|
+
# doing
|
14
14
|
# Autobuild.programs['hg'] = 'my_git_tool'
|
15
15
|
#
|
16
16
|
# @param [String] repository the repository URL
|
17
17
|
# @option options [String] :branch (default) the branch to track
|
18
18
|
def initialize(repository, options = {})
|
19
|
-
hgopts,
|
19
|
+
hgopts, _common = Kernel.filter_options options,
|
20
20
|
branch: 'default'
|
21
21
|
sourceopts, common = Kernel.filter_options options,
|
22
22
|
:repository_id, :source_id
|
@@ -29,10 +29,12 @@ def initialize(repository, options = {})
|
|
29
29
|
# Changes the repository this importer is pointing to
|
30
30
|
def relocate(repository, options = Hash.new)
|
31
31
|
@repository = repository
|
32
|
-
@repository_id =
|
32
|
+
@repository_id =
|
33
|
+
options[:repository_id] ||
|
33
34
|
"hg:#{@repository}"
|
34
|
-
@source_id =
|
35
|
-
|
35
|
+
@source_id =
|
36
|
+
options[:source_id] ||
|
37
|
+
"#{repository_id} branch=#{branch}"
|
36
38
|
end
|
37
39
|
|
38
40
|
# The remote repository URL.
|
@@ -44,38 +46,41 @@ def relocate(repository, options = Hash.new)
|
|
44
46
|
# Raises ConfigException if the current directory is not a hg
|
45
47
|
# repository
|
46
48
|
def validate_importdir(package)
|
47
|
-
|
48
|
-
raise ConfigException.new(package, 'import'),
|
49
|
+
unless File.directory?(File.join(package.importdir, '.hg'))
|
50
|
+
raise ConfigException.new(package, 'import'),
|
51
|
+
"while importing #{package.name}, "\
|
52
|
+
"#{package.importdir} is not a hg repository"
|
49
53
|
end
|
50
54
|
end
|
51
55
|
|
52
56
|
def update(package, options = Hash.new)
|
53
57
|
if options[:only_local]
|
54
|
-
package.warn "%s: the Mercurial importer does not support
|
58
|
+
package.warn "%s: the Mercurial importer does not support "\
|
59
|
+
"local updates, skipping"
|
55
60
|
return false
|
56
61
|
end
|
57
62
|
validate_importdir(package)
|
58
|
-
package.run(:import, Autobuild.tool('hg'), 'pull',
|
59
|
-
|
63
|
+
package.run(:import, Autobuild.tool('hg'), 'pull',
|
64
|
+
repository, retry: true, working_directory: package.importdir)
|
65
|
+
package.run(:import, Autobuild.tool('hg'), 'update',
|
66
|
+
branch, working_directory: package.importdir)
|
60
67
|
true # no easy to know if package was updated, keep previous behavior
|
61
68
|
end
|
62
69
|
|
63
|
-
def checkout(package,
|
70
|
+
def checkout(package, _options = Hash.new)
|
64
71
|
base_dir = File.expand_path('..', package.importdir)
|
65
|
-
|
66
|
-
FileUtils.mkdir_p base_dir
|
67
|
-
end
|
72
|
+
FileUtils.mkdir_p(base_dir) unless File.directory?(base_dir)
|
68
73
|
|
69
|
-
package.run(:import, Autobuild.tool('hg'), 'clone',
|
74
|
+
package.run(:import, Autobuild.tool('hg'), 'clone',
|
75
|
+
'-u', branch, repository, package.importdir, retry: true)
|
70
76
|
end
|
71
77
|
end
|
72
78
|
|
73
|
-
# Creates a hg importer which gets the source for the given repository and
|
74
|
-
# URL +source+.
|
79
|
+
# Creates a hg importer which gets the source for the given repository and
|
80
|
+
# branch URL +source+.
|
75
81
|
#
|
76
82
|
# @param (see Hg#initialize)
|
77
83
|
def self.hg(repository, options = {})
|
78
84
|
Hg.new(repository, options)
|
79
85
|
end
|
80
86
|
end
|
81
|
-
|
data/lib/autobuild/import/svn.rb
CHANGED
@@ -10,7 +10,7 @@ class SVN < Importer
|
|
10
10
|
# [:svnco] options to give to 'svn co'
|
11
11
|
#
|
12
12
|
# This importer uses the 'svn' tool to perform the import. It defaults
|
13
|
-
# to 'svn' and can be configured by doing
|
13
|
+
# to 'svn' and can be configured by doing
|
14
14
|
# Autobuild.programs['svn'] = 'my_svn_tool'
|
15
15
|
def initialize(svnroot, options = {})
|
16
16
|
svnroot = [*svnroot].join("/")
|
@@ -25,7 +25,9 @@ def initialize(svnroot, options = {})
|
|
25
25
|
# @deprecated use {svnroot} instead
|
26
26
|
#
|
27
27
|
# @return [String]
|
28
|
-
def source
|
28
|
+
def source
|
29
|
+
svnroot
|
30
|
+
end
|
29
31
|
|
30
32
|
# Returns the SVN root
|
31
33
|
#
|
@@ -63,13 +65,23 @@ def relocate(root, options = Hash.new)
|
|
63
65
|
def svn_revision(package)
|
64
66
|
svninfo = svn_info(package)
|
65
67
|
revision = svninfo.grep(/^Revision: /).first
|
66
|
-
|
67
|
-
raise ConfigException.new(package, 'import'),
|
68
|
+
unless revision
|
69
|
+
raise ConfigException.new(package, 'import'),
|
70
|
+
"cannot get SVN information for #{package.importdir}"
|
68
71
|
end
|
69
72
|
revision =~ /Revision: (\d+)/
|
70
73
|
Integer($1)
|
71
74
|
end
|
72
75
|
|
76
|
+
# fingerprint method returns an unique hash to identify this package,
|
77
|
+
# for SVN the revision and URL will be used
|
78
|
+
# @param [Package] package
|
79
|
+
# @return [String]
|
80
|
+
# @raises (see svn_info)
|
81
|
+
def vcs_fingerprint(package)
|
82
|
+
Digest::SHA1.hexdigest(svn_info(package).grep(/^(URL|Revision):/).sort.join("\n"))
|
83
|
+
end
|
84
|
+
|
73
85
|
# Returns the URL of the remote SVN repository
|
74
86
|
#
|
75
87
|
# @param [Package] package
|
@@ -79,8 +91,9 @@ def svn_revision(package)
|
|
79
91
|
def svn_url(package)
|
80
92
|
svninfo = svn_info(package)
|
81
93
|
url = svninfo.grep(/^URL: /).first
|
82
|
-
|
83
|
-
raise ConfigException.new(package, 'import'),
|
94
|
+
unless url
|
95
|
+
raise ConfigException.new(package, 'import'),
|
96
|
+
"cannot get SVN information for #{package.importdir}"
|
84
97
|
end
|
85
98
|
url.chomp =~ /URL: (.+)/
|
86
99
|
$1
|
@@ -97,10 +110,8 @@ def svn_url(package)
|
|
97
110
|
def has_local_modifications?(package, with_untracked_files = false)
|
98
111
|
status = run_svn(package, 'status', '--xml')
|
99
112
|
|
100
|
-
not_modified = %w
|
101
|
-
|
102
|
-
not_modified << "unversioned"
|
103
|
-
end
|
113
|
+
not_modified = %w[external ignored none normal]
|
114
|
+
not_modified << "unversioned" unless with_untracked_files
|
104
115
|
|
105
116
|
REXML::Document.new(status.join("")).
|
106
117
|
elements.enum_for(:each, '//wc-status').
|
@@ -126,13 +137,14 @@ def status(package, only_local = false)
|
|
126
137
|
else
|
127
138
|
log = run_svn(package, 'log', '-r', 'BASE:HEAD', '--xml', '.')
|
128
139
|
log = REXML::Document.new(log.join("\n"))
|
129
|
-
missing_revisions = log.elements.enum_for(:each, 'log/logentry').
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
140
|
+
missing_revisions = log.elements.enum_for(:each, 'log/logentry').
|
141
|
+
map do |l|
|
142
|
+
rev = l.attributes['revision']
|
143
|
+
date = l.elements['date'].first.to_s
|
144
|
+
author = l.elements['author'].first.to_s
|
145
|
+
msg = l.elements['msg'].first.to_s.split("\n").first
|
146
|
+
"#{rev} #{DateTime.parse(date)} #{author} #{msg}"
|
147
|
+
end
|
136
148
|
status.remote_commits = missing_revisions[1..-1]
|
137
149
|
status.status =
|
138
150
|
if missing_revisions.empty?
|
@@ -146,10 +158,13 @@ def status(package, only_local = false)
|
|
146
158
|
|
147
159
|
# Helper method to run a SVN command on a package's working copy
|
148
160
|
def run_svn(package, *args, &block)
|
149
|
-
options =
|
150
|
-
|
151
|
-
|
152
|
-
|
161
|
+
options =
|
162
|
+
if args.last.kind_of?(Hash)
|
163
|
+
args.pop
|
164
|
+
else
|
165
|
+
Hash.new
|
166
|
+
end
|
167
|
+
|
153
168
|
options, other_options = Kernel.filter_options options,
|
154
169
|
working_directory: package.importdir, retry: true
|
155
170
|
options = options.merge(other_options)
|
@@ -173,7 +188,8 @@ def validate_importdir(package)
|
|
173
188
|
# @raises [ConfigException] if the working copy is not a subversion
|
174
189
|
# working copy
|
175
190
|
def svn_info(package)
|
176
|
-
old_lang
|
191
|
+
old_lang = ENV['LC_ALL']
|
192
|
+
ENV['LC_ALL'] = 'C'
|
177
193
|
begin
|
178
194
|
svninfo = run_svn(package, 'info')
|
179
195
|
rescue SubcommandFailed => e
|
@@ -185,7 +201,7 @@ def svn_info(package)
|
|
185
201
|
end
|
186
202
|
end
|
187
203
|
|
188
|
-
|
204
|
+
unless svninfo.grep(/is not a working copy/).empty?
|
189
205
|
raise ConfigException.new(package, 'import'),
|
190
206
|
"#{package.importdir} does not appear to be a Subversion working copy"
|
191
207
|
end
|
@@ -196,13 +212,16 @@ def svn_info(package)
|
|
196
212
|
|
197
213
|
def update(package, options = Hash.new) # :nodoc:
|
198
214
|
if options[:only_local]
|
199
|
-
package.warn "%s: the svn importer does not support local updates,
|
215
|
+
package.warn "%s: the svn importer does not support local updates, "\
|
216
|
+
"skipping"
|
200
217
|
return false
|
201
218
|
end
|
202
219
|
|
203
220
|
url = svn_url(package)
|
204
221
|
if url != svnroot
|
205
|
-
raise ConfigException.new(package, 'import'), "current checkout
|
222
|
+
raise ConfigException.new(package, 'import'), "current checkout "\
|
223
|
+
"found at #{package.importdir} is from #{url}, "\
|
224
|
+
"was expecting #{svnroot}"
|
206
225
|
end
|
207
226
|
|
208
227
|
options_up = @options_up.dup
|
@@ -220,9 +239,10 @@ def update(package, options = Hash.new) # :nodoc:
|
|
220
239
|
true
|
221
240
|
end
|
222
241
|
|
223
|
-
def checkout(package,
|
224
|
-
run_svn(package, 'co', "--non-interactive", *@options_co,
|
225
|
-
|
242
|
+
def checkout(package, _options = Hash.new) # :nodoc:
|
243
|
+
run_svn(package, 'co', "--non-interactive", *@options_co,
|
244
|
+
svnroot, package.importdir,
|
245
|
+
working_directory: nil)
|
226
246
|
end
|
227
247
|
end
|
228
248
|
|
@@ -232,4 +252,3 @@ def self.svn(source, options = {})
|
|
232
252
|
SVN.new(source, options)
|
233
253
|
end
|
234
254
|
end
|
235
|
-
|
data/lib/autobuild/importer.rb
CHANGED
@@ -5,595 +5,630 @@
|
|
5
5
|
# various RCS into the package source directory. A list of patches to apply
|
6
6
|
# after the import can be given in the +:patches+ option.
|
7
7
|
module Autobuild
|
8
|
-
class Importer
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
8
|
+
class Importer
|
9
|
+
# call-seq:
|
10
|
+
# Autobuild::Importer.fallback { |package, importer| ... }
|
11
|
+
#
|
12
|
+
# If called, registers the given block as a fallback mechanism for failing
|
13
|
+
# imports.
|
14
|
+
#
|
15
|
+
# Fallbacks are tried in reverse order with the failing importer object as
|
16
|
+
# argument. The first valid importer object that has been returned will be
|
17
|
+
# used instead.
|
18
|
+
#
|
19
|
+
# It is the responsibility of the fallback handler to make sure that it does
|
20
|
+
# not do infinite recursions and stuff like that.
|
21
|
+
def self.fallback(&block)
|
22
|
+
@fallback_handlers.unshift(block)
|
23
|
+
end
|
24
24
|
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
25
|
+
class << self
|
26
|
+
# The set of handlers registered by Importer.fallback
|
27
|
+
attr_reader :fallback_handlers
|
28
|
+
end
|
29
|
+
|
30
|
+
@fallback_handlers = Array.new
|
31
|
+
|
32
|
+
# Instances of the Importer::Status class represent the status of a current
|
33
|
+
# checkout w.r.t. the remote repository.
|
34
|
+
class Status
|
35
|
+
# Remote and local are at the same point
|
36
|
+
UP_TO_DATE = 0
|
37
|
+
# Local contains all data that remote has, but has new commits
|
38
|
+
ADVANCED = 1
|
39
|
+
# Next update will require a merge
|
40
|
+
NEEDS_MERGE = 2
|
41
|
+
# Next update will be simple (no merge)
|
42
|
+
SIMPLE_UPDATE = 3
|
43
|
+
|
44
|
+
# The update status
|
45
|
+
attr_accessor :status
|
46
|
+
# True if there is code in the working copy that is not committed
|
47
|
+
attr_accessor :uncommitted_code
|
48
|
+
# A list of messages describing differences between the local working
|
49
|
+
# copy and its expected state
|
50
|
+
#
|
51
|
+
# On git, it would for instance mention that currently checked out
|
52
|
+
# branch is not the one autoproj expects
|
53
|
+
#
|
54
|
+
# @return [Array<String>]
|
55
|
+
attr_reader :unexpected_working_copy_state
|
56
|
+
|
57
|
+
# An array of strings that represent commits that are in the remote
|
58
|
+
# repository and not in this one (would be merged by an update)
|
59
|
+
attr_accessor :remote_commits
|
60
|
+
# An array of strings that represent commits that are in the local
|
61
|
+
# repository and not in the remote one (would be pushed by an update)
|
62
|
+
attr_accessor :local_commits
|
63
|
+
|
64
|
+
def initialize(status = -1)
|
65
|
+
@status = status
|
66
|
+
@unexpected_working_copy_state = Array.new
|
67
|
+
@uncommitted_code = false
|
68
|
+
@remote_commits = Array.new
|
69
|
+
@local_commits = Array.new
|
70
|
+
end
|
71
|
+
end
|
29
72
|
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
#
|
36
|
-
|
37
|
-
#
|
38
|
-
|
39
|
-
#
|
40
|
-
|
41
|
-
#
|
42
|
-
|
43
|
-
|
44
|
-
#
|
45
|
-
|
46
|
-
# True if there is code in the working copy that is not committed
|
47
|
-
attr_accessor :uncommitted_code
|
48
|
-
# A list of messages describing differences between the local working
|
49
|
-
# copy and its expected state
|
73
|
+
# The cache directories for the given importer type.
|
74
|
+
#
|
75
|
+
# This is used by some importers to save disk space and/or avoid downloading
|
76
|
+
# the same things over and over again
|
77
|
+
#
|
78
|
+
# The default global cache directory is initialized from the
|
79
|
+
# AUTOBUILD_CACHE_DIR environment variable. Per-importer cache directories
|
80
|
+
# can be overriden by setting AUTOBUILD_{TYPE}_CACHE_DIR (e.g.
|
81
|
+
# AUTOBUILD_GIT_CACHE_DIR)
|
82
|
+
#
|
83
|
+
# The following importers use caches:
|
84
|
+
# - the archive importer saves downloaded files in the cache. They are
|
85
|
+
# saved under an archives/ subdirectory of the default cache if set, or to
|
86
|
+
# the value of AUTOBUILD_ARCHIVES_CACHE_DIR
|
87
|
+
# - the git importer uses the cache directories as alternates for the git
|
88
|
+
# checkouts
|
50
89
|
#
|
51
|
-
#
|
52
|
-
#
|
90
|
+
# @param [String] type the importer type. If set, it Given a root cache
|
91
|
+
# directory X, and importer specific cache is setup as a subdirectory of X
|
92
|
+
# with e.g. X/git or X/archives. The subdirectory name is defined by this
|
93
|
+
# argument
|
94
|
+
# @return [nil,Array<String>]
|
53
95
|
#
|
54
|
-
# @
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
# repository and not in the remote one (would be pushed by an update)
|
62
|
-
attr_accessor :local_commits
|
63
|
-
|
64
|
-
def initialize(status = -1)
|
65
|
-
@status = status
|
66
|
-
@unexpected_working_copy_state = Array.new
|
67
|
-
@uncommitted_code = false
|
68
|
-
@remote_commits = Array.new
|
69
|
-
@local_commits = Array.new
|
96
|
+
# @see .set_cache_dirs .default_cache_dirs .default_cache_dirs=
|
97
|
+
def self.cache_dirs(type)
|
98
|
+
if @cache_dirs[type] || (env = ENV["AUTOBUILD_#{type.upcase}_CACHE_DIR"])
|
99
|
+
@cache_dirs[type] ||= env.split(":")
|
100
|
+
elsif (dirs = default_cache_dirs)
|
101
|
+
dirs.map { |d| File.join(d, type) }
|
102
|
+
end
|
70
103
|
end
|
71
|
-
end
|
72
104
|
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
# The following importers use caches:
|
84
|
-
# - the archive importer saves downloaded files in the cache. They are
|
85
|
-
# saved under an archives/ subdirectory of the default cache if set, or to
|
86
|
-
# the value of AUTOBUILD_ARCHIVES_CACHE_DIR
|
87
|
-
# - the git importer uses the cache directories as alternates for the git
|
88
|
-
# checkouts
|
89
|
-
#
|
90
|
-
# @param [String] type the importer type. If set, it Given a root cache
|
91
|
-
# directory X, and importer specific cache is setup as a subdirectory of X
|
92
|
-
# with e.g. X/git or X/archives. The subdirectory name is defined by this
|
93
|
-
# argument
|
94
|
-
# @return [nil,Array<String>]
|
95
|
-
#
|
96
|
-
# @see .set_cache_dirs .default_cache_dirs .default_cache_dirs=
|
97
|
-
def self.cache_dirs(type)
|
98
|
-
if @cache_dirs[type] || (env = ENV["AUTOBUILD_#{type.upcase}_CACHE_DIR"])
|
99
|
-
@cache_dirs[type] ||= env.split(":")
|
100
|
-
elsif dirs = default_cache_dirs
|
101
|
-
dirs.map { |d| File.join(d, type) }
|
105
|
+
# Returns the default cache directory if there is one
|
106
|
+
#
|
107
|
+
# @return [Array<String>,nil]
|
108
|
+
# @see .cache_dirs
|
109
|
+
def self.default_cache_dirs
|
110
|
+
if @default_cache_dirs
|
111
|
+
@default_cache_dirs
|
112
|
+
elsif (from_env = ENV['AUTOBUILD_CACHE_DIR'])
|
113
|
+
@default_cache_dirs = [from_env]
|
114
|
+
end
|
102
115
|
end
|
103
|
-
end
|
104
116
|
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
[
|
117
|
+
# Sets the cache directory for a given importer type
|
118
|
+
#
|
119
|
+
# @param [String] type the importer type
|
120
|
+
# @param [String] dir the cache directory
|
121
|
+
# @see .cache_dirs
|
122
|
+
def self.set_cache_dirs(type, *dirs)
|
123
|
+
@cache_dirs[type] = dirs
|
112
124
|
end
|
113
|
-
end
|
114
125
|
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
end
|
126
|
+
# Sets the default cache directory
|
127
|
+
#
|
128
|
+
# @param [Array<String>,String] the directories
|
129
|
+
# @see .cache_dirs
|
130
|
+
def self.default_cache_dirs=(dirs)
|
131
|
+
@default_cache_dirs = Array(dirs)
|
132
|
+
end
|
123
133
|
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
@default_cache_dirs = Array(dirs)
|
130
|
-
end
|
134
|
+
# Unset all cache directories
|
135
|
+
def self.unset_cache_dirs
|
136
|
+
@cache_dirs = Hash.new
|
137
|
+
@default_cache_dirs = nil
|
138
|
+
end
|
131
139
|
|
132
|
-
|
133
|
-
def self.unset_cache_dirs
|
134
|
-
@cache_dirs = Hash.new
|
135
|
-
@default_cache_dirs = nil
|
136
|
-
end
|
140
|
+
unset_cache_dirs
|
137
141
|
|
138
|
-
|
139
|
-
|
140
|
-
# @return [Hash] the original option hash as given to #initialize
|
141
|
-
attr_reader :options
|
142
|
-
|
143
|
-
# Creates a new Importer object. The options known to Importer are:
|
144
|
-
# [:patches] a list of patch to apply after import
|
145
|
-
#
|
146
|
-
# More options are specific to each importer type.
|
147
|
-
def initialize(options)
|
148
|
-
@options = options.dup
|
149
|
-
@options[:retry_count] = Integer(@options[:retry_count] || 0)
|
150
|
-
@repository_id = options[:repository_id] || "#{self.class.name}:#{object_id}"
|
151
|
-
@interactive = options[:interactive]
|
152
|
-
@source_id = options[:source_id] || @repository_id
|
153
|
-
@post_hooks = Array.new
|
154
|
-
end
|
142
|
+
# @return [Hash] the original option hash as given to #initialize
|
143
|
+
attr_reader :options
|
155
144
|
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
#
|
169
|
-
# This can be used to check whether two importers are pointing to the same
|
170
|
-
# code base inside the same repository. For instance, two git importers that
|
171
|
-
# point to the same repository but different branches would have the same
|
172
|
-
# repository_id but different source_id
|
173
|
-
#
|
174
|
-
# @return [String]
|
175
|
-
# @see repository_id
|
176
|
-
attr_reader :source_id
|
177
|
-
|
178
|
-
# Whether this importer will need interaction with the user, for instance to
|
179
|
-
# give credentials
|
180
|
-
def interactive?; !!@interactive end
|
181
|
-
|
182
|
-
# Changes whether this importer is interactive or not
|
183
|
-
def interactive=(value)
|
184
|
-
@interactive = !!value
|
185
|
-
end
|
145
|
+
# Creates a new Importer object. The options known to Importer are:
|
146
|
+
# [:patches] a list of patch to apply after import
|
147
|
+
#
|
148
|
+
# More options are specific to each importer type.
|
149
|
+
def initialize(options)
|
150
|
+
@options = options.dup
|
151
|
+
@options[:retry_count] = Integer(@options[:retry_count] || 0)
|
152
|
+
@repository_id = options[:repository_id] || "#{self.class.name}:#{object_id}"
|
153
|
+
@interactive = options[:interactive]
|
154
|
+
@source_id = options[:source_id] || @repository_id
|
155
|
+
@post_hooks = Array.new
|
156
|
+
end
|
186
157
|
|
187
|
-
|
188
|
-
|
189
|
-
|
190
|
-
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
|
158
|
+
# Returns a string that identifies the remote repository uniquely
|
159
|
+
#
|
160
|
+
# This can be used to check whether two importers are pointing to the same
|
161
|
+
# repository, regardless of e.g. the access protocol used. For instance,
|
162
|
+
# two git importers that point to the same repository but different branches
|
163
|
+
# would have the same repository_id but different source_id
|
164
|
+
#
|
165
|
+
# @return [String]
|
166
|
+
# @see source_id
|
167
|
+
attr_reader :repository_id
|
195
168
|
|
196
|
-
|
197
|
-
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
|
169
|
+
# Returns a string that identifies the remote source uniquely
|
170
|
+
#
|
171
|
+
# This can be used to check whether two importers are pointing to the same
|
172
|
+
# code base inside the same repository. For instance, two git importers that
|
173
|
+
# point to the same repository but different branches would have the same
|
174
|
+
# repository_id but different source_id
|
175
|
+
#
|
176
|
+
# @return [String]
|
177
|
+
# @see repository_id
|
178
|
+
attr_reader :source_id
|
179
|
+
|
180
|
+
# Whether this importer will need interaction with the user, for instance to
|
181
|
+
# give credentials
|
182
|
+
def interactive?
|
183
|
+
@interactive
|
184
|
+
end
|
203
185
|
|
204
|
-
|
205
|
-
|
206
|
-
|
207
|
-
|
208
|
-
|
209
|
-
|
210
|
-
|
211
|
-
|
186
|
+
# Changes whether this importer is interactive or not
|
187
|
+
attr_writer :interactive
|
188
|
+
|
189
|
+
# The number of times update / checkout should be retried before giving up.
|
190
|
+
# The default is 0 (do not retry)
|
191
|
+
#
|
192
|
+
# Set either with #retry_count= or by setting the :retry_count option when
|
193
|
+
# constructing this importer
|
194
|
+
def retry_count
|
195
|
+
@options[:retry_count] || 0
|
196
|
+
end
|
197
|
+
|
198
|
+
# Returns a unique hash representing the state of the imported package
|
199
|
+
# as a whole unit, including its dependencies and patches
|
200
|
+
def fingerprint(package)
|
201
|
+
vcs_fingerprint_string = vcs_fingerprint(package)
|
202
|
+
return unless vcs_fingerprint_string
|
203
|
+
|
204
|
+
patches_fingerprint_string = patches_fingerprint(package)
|
205
|
+
if patches_fingerprint_string
|
206
|
+
Digest::SHA1.hexdigest(vcs_fingerprint_string + patches_fingerprint_string)
|
207
|
+
elsif patches.empty?
|
208
|
+
vcs_fingerprint_string
|
212
209
|
end
|
210
|
+
end
|
211
|
+
|
212
|
+
# basic fingerprint of the package and its dependencies
|
213
|
+
def vcs_fingerprint(package)
|
214
|
+
#each importer type should implement its own
|
215
|
+
Autoproj.warn "Fingerprint in #{package.name} has not been implemented for this type of packages, results should be discarded"
|
216
|
+
return nil
|
217
|
+
end
|
218
|
+
|
219
|
+
# fingerprint for patches associated to this package
|
220
|
+
def patches_fingerprint(package)
|
221
|
+
cur_patches = currently_applied_patches(package)
|
222
|
+
cur_patches.map(&:shift) #leave only level and source information
|
223
|
+
Digest::SHA1.hexdigest(cur_patches.sort.flatten.join("")) if !patches.empty? && cur_patches
|
224
|
+
end
|
213
225
|
|
214
|
-
|
215
|
-
|
216
|
-
|
217
|
-
|
226
|
+
# Sets the number of times update / checkout should be retried before giving
|
227
|
+
# up. 0 (the default) disables retrying.
|
228
|
+
#
|
229
|
+
# See also #retry_count
|
230
|
+
def retry_count=(count)
|
231
|
+
@options[:retry_count] = Integer(count)
|
232
|
+
end
|
233
|
+
|
234
|
+
def patches
|
235
|
+
patches =
|
236
|
+
if @options[:patches].respond_to?(:to_ary)
|
237
|
+
@options[:patches]
|
238
|
+
elsif !@options[:patches]
|
239
|
+
[]
|
240
|
+
else
|
241
|
+
[[@options[:patches], 0]]
|
242
|
+
end
|
243
|
+
|
244
|
+
single_patch = (patches.size == 2 &&
|
245
|
+
patches[0].respond_to?(:to_str) &&
|
246
|
+
patches[1].respond_to?(:to_int))
|
247
|
+
|
248
|
+
patches = [patches] if single_patch
|
249
|
+
patches.map do |obj|
|
218
250
|
if obj.respond_to?(:to_str)
|
219
|
-
|
251
|
+
path = obj
|
252
|
+
level = 0
|
220
253
|
elsif obj.respond_to?(:to_ary)
|
221
|
-
obj
|
254
|
+
path, level = obj
|
222
255
|
else
|
223
256
|
raise Arguments, "wrong patch specification #{obj.inspect}"
|
224
|
-
obj
|
225
257
|
end
|
258
|
+
[path, level, File.read(path)]
|
226
259
|
end
|
227
260
|
end
|
228
|
-
patches.map do |path, level|
|
229
|
-
[path, level, File.read(path)]
|
230
|
-
end
|
231
|
-
end
|
232
261
|
|
233
|
-
|
234
|
-
|
235
|
-
|
236
|
-
|
262
|
+
def update_retry_count(original_error, retry_count)
|
263
|
+
return if !original_error.respond_to?(:retry?) || !original_error.retry?
|
264
|
+
|
265
|
+
retry_count += 1
|
266
|
+
retry_count if retry_count <= self.retry_count
|
237
267
|
end
|
238
268
|
|
239
|
-
|
240
|
-
|
241
|
-
|
269
|
+
# A list of hooks that are called after a successful checkout or update
|
270
|
+
#
|
271
|
+
# They are added either at the instance level with {#add_post_hook} or
|
272
|
+
# globally for all importers of a given type with {Importer.add_post_hook}
|
273
|
+
attr_reader :post_hooks
|
274
|
+
|
275
|
+
Hook = Struct.new :always, :callback
|
276
|
+
|
277
|
+
# Define a post-import hook for all instances of this class
|
278
|
+
#
|
279
|
+
# @yieldparam [Importer] importer the importer that finished
|
280
|
+
# @yieldparam [Package] package the package we're acting on
|
281
|
+
# @see Importer#add_post_hook
|
282
|
+
def self.add_post_hook(always: false, &hook)
|
283
|
+
@post_hooks ||= Array.new
|
284
|
+
@post_hooks << Hook.new(always, hook)
|
285
|
+
nil
|
242
286
|
end
|
243
|
-
end
|
244
287
|
|
245
|
-
|
246
|
-
|
247
|
-
|
248
|
-
# globally for all importers of a given type with {Importer.add_post_hook}
|
249
|
-
attr_reader :post_hooks
|
250
|
-
|
251
|
-
Hook = Struct.new :always, :callback
|
252
|
-
|
253
|
-
# Define a post-import hook for all instances of this class
|
254
|
-
#
|
255
|
-
# @yieldparam [Importer] importer the importer that finished
|
256
|
-
# @yieldparam [Package] package the package we're acting on
|
257
|
-
# @see Importer#add_post_hook
|
258
|
-
def self.add_post_hook(always: false, &hook)
|
259
|
-
@post_hooks ||= Array.new
|
260
|
-
@post_hooks << Hook.new(always, hook)
|
261
|
-
nil
|
262
|
-
end
|
288
|
+
# Enumerate the post-import hooks defined for all instances of this class
|
289
|
+
def self.each_post_hook(error: false)
|
290
|
+
return enum_for(__method__) unless block_given?
|
263
291
|
|
264
|
-
|
265
|
-
|
266
|
-
|
292
|
+
(@post_hooks ||= Array.new).each do |hook|
|
293
|
+
yield(hook.callback) if hook.always || !error
|
294
|
+
end
|
295
|
+
end
|
267
296
|
|
268
|
-
|
269
|
-
|
270
|
-
|
297
|
+
# @api private
|
298
|
+
#
|
299
|
+
# Call the post-import hooks added with {#add_post_hook}
|
300
|
+
def execute_post_hooks(package, error: false)
|
301
|
+
each_post_hook(error: error) do |block|
|
302
|
+
block.call(self, package)
|
271
303
|
end
|
272
304
|
end
|
273
|
-
end
|
274
305
|
|
275
|
-
|
276
|
-
|
277
|
-
|
278
|
-
|
279
|
-
|
280
|
-
|
306
|
+
# Add a block that should be called when the import has successfully
|
307
|
+
# finished
|
308
|
+
#
|
309
|
+
# @yieldparam [Importer] importer the importer that finished
|
310
|
+
# @yieldparam [Package] package the package we're acting on
|
311
|
+
# @see Importer.add_post_hook
|
312
|
+
def add_post_hook(always: false, &hook)
|
313
|
+
post_hooks << Hook.new(always, hook)
|
281
314
|
end
|
282
|
-
end
|
283
315
|
|
284
|
-
|
285
|
-
|
286
|
-
|
287
|
-
# @yieldparam [Importer] importer the importer that finished
|
288
|
-
# @yieldparam [Package] package the package we're acting on
|
289
|
-
# @see Importer.add_post_hook
|
290
|
-
def add_post_hook(always: false, &hook)
|
291
|
-
post_hooks << Hook.new(always, hook)
|
292
|
-
end
|
316
|
+
# Enumerate the post-import hooks for this importer
|
317
|
+
def each_post_hook(error: false)
|
318
|
+
return enum_for(__method__, error: false) unless block_given?
|
293
319
|
|
294
|
-
|
295
|
-
|
296
|
-
|
320
|
+
self.class.each_post_hook(error: error) do |callback|
|
321
|
+
yield(callback)
|
322
|
+
end
|
297
323
|
|
298
|
-
|
299
|
-
|
300
|
-
if hook.always || !error
|
301
|
-
yield(hook.callback)
|
324
|
+
post_hooks.each do |hook|
|
325
|
+
yield(hook.callback) if hook.always || !error
|
302
326
|
end
|
303
327
|
end
|
304
|
-
end
|
305
328
|
|
306
|
-
|
307
|
-
|
308
|
-
|
309
|
-
|
310
|
-
patch(package, [])
|
311
|
-
end
|
329
|
+
def perform_update(package, only_local = false)
|
330
|
+
cur_patches = currently_applied_patches(package)
|
331
|
+
needed_patches = patches
|
332
|
+
patch_changed = cur_patches.map(&:last) != needed_patches.map(&:last)
|
333
|
+
patch(package, []) if patch_changed
|
312
334
|
|
313
|
-
|
314
|
-
|
315
|
-
|
316
|
-
begin
|
335
|
+
last_error = nil
|
336
|
+
retry_count = 0
|
337
|
+
package.progress_start "updating %s"
|
317
338
|
begin
|
318
|
-
did_update = update(package,only_local)
|
319
|
-
execute_post_hooks(package, error: false)
|
320
|
-
rescue ::Exception
|
321
|
-
execute_post_hooks(package, error: true)
|
322
|
-
raise
|
323
|
-
end
|
324
|
-
|
325
|
-
message = if did_update == false
|
326
|
-
Autobuild.color('already up-to-date', :green)
|
327
|
-
else
|
328
|
-
Autobuild.color('updated', :yellow)
|
329
|
-
end
|
330
|
-
|
331
|
-
did_update
|
332
|
-
rescue Interrupt
|
333
|
-
message = Autobuild.color('interrupted', :red)
|
334
|
-
if last_error
|
335
|
-
raise last_error
|
336
|
-
else raise
|
337
|
-
end
|
338
|
-
rescue ::Exception => original_error
|
339
|
-
message = Autobuild.color('update failed', :red)
|
340
|
-
last_error = original_error
|
341
|
-
# If the package is patched, it might be that the update
|
342
|
-
# failed because we needed to unpatch first. Try it out
|
343
|
-
#
|
344
|
-
# This assumes that importing data with conflict will
|
345
|
-
# make the import fail, but not make the patch
|
346
|
-
# un-appliable. Importers that do not follow this rule
|
347
|
-
# will have to unpatch by themselves.
|
348
|
-
cur_patches = currently_applied_patches(package)
|
349
|
-
if !cur_patches.empty?
|
350
|
-
package.progress_done
|
351
|
-
package.message "update failed and some patches are applied, removing all patches and retrying"
|
352
339
|
begin
|
353
|
-
|
354
|
-
|
355
|
-
rescue Interrupt
|
356
|
-
raise
|
340
|
+
did_update = update(package, only_local)
|
341
|
+
execute_post_hooks(package, error: false)
|
357
342
|
rescue ::Exception
|
358
|
-
|
343
|
+
execute_post_hooks(package, error: true)
|
344
|
+
raise
|
359
345
|
end
|
360
|
-
end
|
361
346
|
|
362
|
-
|
363
|
-
|
364
|
-
|
365
|
-
|
366
|
-
|
367
|
-
package.progress_done "#{message} %s"
|
368
|
-
end
|
369
|
-
|
370
|
-
patch(package)
|
371
|
-
package.updated = true
|
372
|
-
did_update
|
373
|
-
rescue Autobuild::Exception => e
|
374
|
-
fallback(e, package, :import, package)
|
375
|
-
end
|
347
|
+
message = if did_update == false
|
348
|
+
Autobuild.color('already up-to-date', :green)
|
349
|
+
else
|
350
|
+
Autobuild.color('updated', :yellow)
|
351
|
+
end
|
376
352
|
|
377
|
-
|
378
|
-
last_error = nil
|
379
|
-
package.progress_start "checking out %s", :done_message => 'checked out %s' do
|
380
|
-
retry_count = 0
|
381
|
-
begin
|
382
|
-
checkout(package, options)
|
383
|
-
execute_post_hooks(package)
|
353
|
+
did_update
|
384
354
|
rescue Interrupt
|
385
|
-
|
355
|
+
message = Autobuild.color('interrupted', :red)
|
356
|
+
if last_error
|
357
|
+
raise last_error
|
386
358
|
else raise
|
387
359
|
end
|
388
360
|
rescue ::Exception => original_error
|
361
|
+
message = Autobuild.color('update failed', :red)
|
389
362
|
last_error = original_error
|
390
|
-
|
391
|
-
|
392
|
-
|
363
|
+
# If the package is patched, it might be that the update
|
364
|
+
# failed because we needed to unpatch first. Try it out
|
365
|
+
#
|
366
|
+
# This assumes that importing data with conflict will
|
367
|
+
# make the import fail, but not make the patch
|
368
|
+
# un-appliable. Importers that do not follow this rule
|
369
|
+
# will have to unpatch by themselves.
|
370
|
+
cur_patches = currently_applied_patches(package)
|
371
|
+
unless cur_patches.empty?
|
372
|
+
package.progress_done
|
373
|
+
package.message "update failed and some patches are applied, "\
|
374
|
+
"removing all patches and retrying"
|
375
|
+
begin
|
376
|
+
patch(package, [])
|
377
|
+
return perform_update(package, only_local)
|
378
|
+
rescue Interrupt
|
379
|
+
raise
|
380
|
+
rescue ::Exception
|
381
|
+
raise original_error
|
382
|
+
end
|
393
383
|
end
|
394
|
-
|
395
|
-
|
384
|
+
|
385
|
+
retry_count = update_retry_count(original_error, retry_count)
|
386
|
+
raise unless retry_count
|
387
|
+
|
388
|
+
package.message "update failed in #{package.importdir}, "\
|
389
|
+
"retrying (#{retry_count}/#{self.retry_count})"
|
396
390
|
retry
|
391
|
+
ensure
|
392
|
+
package.progress_done "#{message} %s"
|
397
393
|
end
|
394
|
+
|
395
|
+
patch(package)
|
396
|
+
package.updated = true
|
397
|
+
did_update
|
398
|
+
rescue Autobuild::Exception => e
|
399
|
+
fallback(e, package, :import, package)
|
398
400
|
end
|
399
401
|
|
400
|
-
|
401
|
-
|
402
|
-
|
403
|
-
|
404
|
-
|
405
|
-
|
406
|
-
|
407
|
-
|
408
|
-
|
409
|
-
|
410
|
-
|
411
|
-
|
402
|
+
def perform_checkout(package, options = Hash.new)
|
403
|
+
last_error = nil
|
404
|
+
package.progress_start "checking out %s", :done_message => 'checked out %s' do
|
405
|
+
retry_count = 0
|
406
|
+
begin
|
407
|
+
checkout(package, options)
|
408
|
+
execute_post_hooks(package)
|
409
|
+
rescue Interrupt
|
410
|
+
if last_error then raise last_error
|
411
|
+
else raise
|
412
|
+
end
|
413
|
+
rescue ::Exception => original_error
|
414
|
+
last_error = original_error
|
415
|
+
retry_count = update_retry_count(original_error, retry_count)
|
416
|
+
raise unless retry_count
|
417
|
+
|
418
|
+
package.message "checkout of %s failed, "\
|
419
|
+
"deleting the source directory #{package.importdir} "\
|
420
|
+
"and retrying (#{retry_count}/#{self.retry_count})"
|
421
|
+
FileUtils.rm_rf package.importdir
|
422
|
+
retry
|
423
|
+
end
|
424
|
+
end
|
412
425
|
|
413
|
-
|
414
|
-
|
415
|
-
|
416
|
-
|
417
|
-
|
418
|
-
|
419
|
-
|
420
|
-
|
421
|
-
|
422
|
-
|
423
|
-
|
424
|
-
|
425
|
-
|
426
|
-
|
427
|
-
|
428
|
-
|
429
|
-
|
430
|
-
|
431
|
-
|
432
|
-
|
433
|
-
#
|
434
|
-
|
435
|
-
|
436
|
-
|
437
|
-
|
438
|
-
|
439
|
-
|
440
|
-
|
441
|
-
|
442
|
-
|
443
|
-
|
444
|
-
|
445
|
-
|
446
|
-
|
447
|
-
|
448
|
-
|
449
|
-
|
450
|
-
|
451
|
-
|
452
|
-
|
453
|
-
|
454
|
-
|
455
|
-
|
456
|
-
|
426
|
+
patch(package)
|
427
|
+
package.updated = true
|
428
|
+
rescue Interrupt
|
429
|
+
raise
|
430
|
+
rescue ::Exception # rubocop:disable Lint/ShadowedException
|
431
|
+
package.message "checkout of %s failed, "\
|
432
|
+
"deleting the source directory #{package.importdir}"
|
433
|
+
FileUtils.rm_rf package.importdir
|
434
|
+
raise
|
435
|
+
rescue Autobuild::Exception => e
|
436
|
+
FileUtils.rm_rf package.importdir
|
437
|
+
fallback(e, package, :import, package)
|
438
|
+
end
|
439
|
+
|
440
|
+
# Imports the given package
|
441
|
+
#
|
442
|
+
# The importer will checkout or update code in package.importdir. No update
|
443
|
+
# will be done if {update?} returns false.
|
444
|
+
#
|
445
|
+
# @raises ConfigException if package.importdir exists and is not a directory
|
446
|
+
#
|
447
|
+
# @option options [Boolean] :checkout_only (false) if true, the importer
|
448
|
+
# will not update an already checked-out package.
|
449
|
+
# @option options [Boolean] :only_local (false) if true, will only perform
|
450
|
+
# actions that do not require network access. Importers that do not
|
451
|
+
# support this mode will simply do nothing
|
452
|
+
# @option options [Boolean] :reset (false) if true, the importer's
|
453
|
+
# configuration is interpreted as a hard state in which it should put the
|
454
|
+
# working copy. Otherwise, it tries to update the local repository with
|
455
|
+
# the remote information. For instance, a git importer for which a commit
|
456
|
+
# ID is given will, in this mode, reset the repository to the requested ID
|
457
|
+
# (if that does not involve losing commits). Otherwise, it will only
|
458
|
+
# ensure that the requested commit ID is present in the current HEAD.
|
459
|
+
def import(package, options = Hash.new)
|
460
|
+
# Backward compatibility
|
461
|
+
unless options.kind_of?(Hash)
|
462
|
+
options = options
|
463
|
+
Autoproj.warn "calling #import with a boolean as second argument "\
|
464
|
+
"is deprecated, switch to the named argument interface instead"
|
465
|
+
Autoproj.warn " e.g. call import(package, only_local: #{options})"
|
466
|
+
Autoproj.warn " #{caller(1..1).first}"
|
467
|
+
options = Hash[only_local: options]
|
468
|
+
end
|
469
|
+
|
470
|
+
options = Kernel.validate_options options,
|
471
|
+
only_local: false,
|
472
|
+
reset: false,
|
473
|
+
checkout_only: false,
|
474
|
+
ignore_errors: false,
|
475
|
+
allow_interactive: true
|
476
|
+
ignore_errors = options.delete(:ignore_errors)
|
477
|
+
|
478
|
+
importdir = package.importdir
|
479
|
+
if File.directory?(importdir)
|
480
|
+
package.isolate_errors(mark_as_failed: false,
|
481
|
+
ignore_errors: ignore_errors) do
|
482
|
+
if !options[:checkout_only] && package.update?
|
483
|
+
perform_update(package, options)
|
484
|
+
elsif Autobuild.verbose
|
457
485
|
package.message "%s: not updating"
|
458
486
|
end
|
459
487
|
end
|
460
|
-
end
|
461
488
|
|
462
|
-
|
463
|
-
|
464
|
-
|
465
|
-
|
466
|
-
|
467
|
-
|
489
|
+
elsif File.exist?(importdir)
|
490
|
+
raise ConfigException.new(package, 'import'),
|
491
|
+
"#{importdir} exists but is not a directory"
|
492
|
+
else
|
493
|
+
package.isolate_errors(mark_as_failed: true,
|
494
|
+
ignore_errors: ignore_errors) do
|
495
|
+
perform_checkout(package,
|
496
|
+
allow_interactive: options[:allow_interactive])
|
497
|
+
true
|
498
|
+
end
|
468
499
|
end
|
469
500
|
end
|
470
|
-
end
|
471
501
|
|
472
|
-
|
473
|
-
|
474
|
-
|
475
|
-
|
476
|
-
|
477
|
-
|
478
|
-
|
479
|
-
|
480
|
-
|
502
|
+
# Tries to find a fallback importer because of the given error.
|
503
|
+
def fallback(error, package, *args, &block)
|
504
|
+
Importer.fallback_handlers.each do |handler|
|
505
|
+
fallback_importer = handler.call(package, self)
|
506
|
+
if fallback_importer.kind_of?(Importer)
|
507
|
+
begin
|
508
|
+
return fallback_importer.send(*args, &block)
|
509
|
+
rescue Exception
|
510
|
+
raise error
|
511
|
+
end
|
481
512
|
end
|
482
513
|
end
|
514
|
+
raise error
|
483
515
|
end
|
484
|
-
raise error
|
485
|
-
end
|
486
516
|
|
487
|
-
|
488
|
-
|
489
|
-
|
490
|
-
|
491
|
-
# We assume that package.importdir already exists (checkout is supposed to
|
492
|
-
# have been called)
|
493
|
-
def patchlist(package)
|
494
|
-
File.join(patchdir(package), "list")
|
495
|
-
end
|
517
|
+
def patchdir(package)
|
518
|
+
File.join(package.importdir, ".autobuild-patches")
|
519
|
+
end
|
496
520
|
|
497
|
-
|
498
|
-
|
499
|
-
|
500
|
-
|
501
|
-
|
521
|
+
# We assume that package.importdir already exists (checkout is supposed to
|
522
|
+
# have been called)
|
523
|
+
def patchlist(package)
|
524
|
+
File.join(patchdir(package), "list")
|
525
|
+
end
|
502
526
|
|
503
|
-
|
504
|
-
|
527
|
+
def call_patch(package, reverse, file, patch_level)
|
528
|
+
package.run(:patch, Autobuild.tool('patch'),
|
529
|
+
"-p#{patch_level}", (reverse ? '-R' : nil), '--forward',
|
530
|
+
input: file, working_directory: package.importdir)
|
531
|
+
end
|
505
532
|
|
506
|
-
|
507
|
-
|
508
|
-
line = line.rstrip
|
509
|
-
if line =~ /^(.*)\s+(\d+)$/
|
510
|
-
path = File.expand_path($1, package.srcdir)
|
511
|
-
level = Integer($2)
|
512
|
-
else
|
513
|
-
path = File.expand_path(line, package.srcdir)
|
514
|
-
level = 0
|
515
|
-
end
|
516
|
-
[path, level, File.read(path)]
|
533
|
+
def apply(package, path, patch_level = 0)
|
534
|
+
call_patch(package, false, path, patch_level)
|
517
535
|
end
|
518
|
-
end
|
519
536
|
|
520
|
-
|
521
|
-
|
522
|
-
if File.exist?(patches_file)
|
523
|
-
return parse_patch_list(package, patches_file)
|
537
|
+
def unapply(package, path, patch_level = 0)
|
538
|
+
call_patch(package, true, path, patch_level)
|
524
539
|
end
|
525
540
|
|
526
|
-
|
527
|
-
|
528
|
-
|
529
|
-
|
530
|
-
|
531
|
-
|
541
|
+
def parse_patch_list(package, patches_file)
|
542
|
+
File.readlines(patches_file).map do |line|
|
543
|
+
line = line.rstrip
|
544
|
+
if line =~ /^(.*)\s+(\d+)$/
|
545
|
+
path = File.expand_path($1, package.srcdir)
|
546
|
+
level = Integer($2)
|
547
|
+
else
|
548
|
+
path = File.expand_path(line, package.srcdir)
|
549
|
+
level = 0
|
550
|
+
end
|
551
|
+
[path, level, File.read(path)]
|
552
|
+
end
|
532
553
|
end
|
533
554
|
|
534
|
-
|
535
|
-
|
555
|
+
def currently_applied_patches(package)
|
556
|
+
patches_file = patchlist(package)
|
557
|
+
return parse_patch_list(package, patches_file) if File.exist?(patches_file)
|
536
558
|
|
537
|
-
|
538
|
-
|
539
|
-
|
540
|
-
|
541
|
-
|
542
|
-
|
543
|
-
if cur_patches_state == patches_state
|
544
|
-
return false
|
545
|
-
end
|
546
|
-
|
547
|
-
# Do not be smart, remove all already applied patches
|
548
|
-
# and then apply the new ones
|
549
|
-
begin
|
550
|
-
apply_count = (patches - cur_patches).size
|
551
|
-
unapply_count = (cur_patches - patches).size
|
552
|
-
if apply_count > 0 && unapply_count > 0
|
553
|
-
package.message "patching %s: applying #{apply_count} and unapplying #{unapply_count} patch(es)"
|
554
|
-
elsif apply_count > 0
|
555
|
-
package.message "patching %s: applying #{apply_count} patch(es)"
|
556
|
-
elsif unapply_count > 0
|
557
|
-
package.message "patching %s: unapplying #{unapply_count} patch(es)"
|
559
|
+
patches_file = File.join(package.importdir, "patches-autobuild-stamp")
|
560
|
+
if File.exist?(patches_file)
|
561
|
+
cur_patches = parse_patch_list(package, patches_file)
|
562
|
+
save_patch_state(package, cur_patches)
|
563
|
+
FileUtils.rm_f patches_file
|
564
|
+
return currently_applied_patches(package)
|
558
565
|
end
|
559
566
|
|
560
|
-
|
561
|
-
|
562
|
-
unapply(package, p, level)
|
563
|
-
cur_patches.pop
|
564
|
-
end
|
567
|
+
[]
|
568
|
+
end
|
565
569
|
|
566
|
-
|
567
|
-
|
568
|
-
|
570
|
+
def patch(package, patches = self.patches)
|
571
|
+
# Get the list of already applied patches
|
572
|
+
cur_patches = currently_applied_patches(package)
|
573
|
+
|
574
|
+
cur_patches_state = cur_patches.map { |_, level, content| [level, content] }
|
575
|
+
patches_state = patches.map { |_, level, content| [level, content] }
|
576
|
+
return false if cur_patches_state == patches_state
|
577
|
+
|
578
|
+
# Do not be smart, remove all already applied patches
|
579
|
+
# and then apply the new ones
|
580
|
+
begin
|
581
|
+
apply_count = (patches - cur_patches).size
|
582
|
+
unapply_count = (cur_patches - patches).size
|
583
|
+
if apply_count > 0 && unapply_count > 0
|
584
|
+
package.message "patching %s: applying #{apply_count} and "\
|
585
|
+
"unapplying #{unapply_count} patch(es)"
|
586
|
+
elsif apply_count > 0
|
587
|
+
package.message "patching %s: applying #{apply_count} patch(es)"
|
588
|
+
elsif unapply_count > 0
|
589
|
+
package.message "patching %s: unapplying #{unapply_count} patch(es)"
|
590
|
+
end
|
591
|
+
|
592
|
+
while (p = cur_patches.last)
|
593
|
+
p, level, = *p
|
594
|
+
unapply(package, p, level)
|
595
|
+
cur_patches.pop
|
596
|
+
end
|
597
|
+
|
598
|
+
patches.to_a.each do |new_patch, new_patch_level, content|
|
599
|
+
apply(package, new_patch, new_patch_level)
|
600
|
+
cur_patches << [new_patch, new_patch_level, content]
|
601
|
+
end
|
602
|
+
ensure
|
603
|
+
save_patch_state(package, cur_patches)
|
569
604
|
end
|
570
|
-
|
571
|
-
|
605
|
+
|
606
|
+
true
|
572
607
|
end
|
573
608
|
|
574
|
-
|
575
|
-
|
576
|
-
|
577
|
-
|
578
|
-
|
579
|
-
|
580
|
-
|
581
|
-
|
582
|
-
|
583
|
-
|
609
|
+
def save_patch_state(package, cur_patches)
|
610
|
+
patch_dir = patchdir(package)
|
611
|
+
FileUtils.mkdir_p patch_dir
|
612
|
+
cur_patches = cur_patches.each_with_index.
|
613
|
+
map do |(_path, level, content), idx|
|
614
|
+
path = File.join(patch_dir, idx.to_s)
|
615
|
+
File.open(path, 'w') do |patch_io|
|
616
|
+
patch_io.write content
|
617
|
+
end
|
618
|
+
[path, level]
|
619
|
+
end
|
620
|
+
File.open(patchlist(package), 'w') do |f|
|
621
|
+
patch_state = cur_patches.map do |path, level|
|
622
|
+
path = Pathname.new(path).
|
623
|
+
relative_path_from(Pathname.new(package.srcdir)).to_s
|
624
|
+
"#{path} #{level}"
|
625
|
+
end
|
626
|
+
f.write(patch_state.join("\n"))
|
584
627
|
end
|
585
|
-
[path, level]
|
586
628
|
end
|
587
|
-
|
588
|
-
|
589
|
-
|
590
|
-
"#{path} #{level}"
|
591
|
-
end
|
592
|
-
f.write(patch_state.join("\n"))
|
629
|
+
|
630
|
+
def supports_relocation?
|
631
|
+
false
|
593
632
|
end
|
594
633
|
end
|
595
|
-
|
596
|
-
def supports_relocation?; false end
|
597
634
|
end
|
598
|
-
end
|
599
|
-
|