autobuild 1.17.0 → 1.18.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.rubocop.yml +107 -0
- data/Gemfile +2 -1
- data/Rakefile +1 -4
- data/autobuild.gemspec +14 -11
- data/bin/autobuild +4 -3
- data/lib/autobuild.rb +4 -5
- data/lib/autobuild/build_logfile.rb +6 -4
- data/lib/autobuild/config.rb +90 -40
- data/lib/autobuild/configurable.rb +30 -18
- data/lib/autobuild/environment.rb +126 -120
- data/lib/autobuild/exceptions.rb +48 -31
- data/lib/autobuild/import/archive.rb +134 -82
- data/lib/autobuild/import/cvs.rb +28 -24
- data/lib/autobuild/import/darcs.rb +13 -16
- data/lib/autobuild/import/git-lfs.rb +37 -30
- data/lib/autobuild/import/git.rb +231 -179
- data/lib/autobuild/import/hg.rb +23 -18
- data/lib/autobuild/import/svn.rb +48 -29
- data/lib/autobuild/importer.rb +530 -499
- data/lib/autobuild/mail_reporter.rb +77 -77
- data/lib/autobuild/package.rb +171 -101
- data/lib/autobuild/packages/autotools.rb +47 -42
- data/lib/autobuild/packages/cmake.rb +71 -65
- data/lib/autobuild/packages/dummy.rb +9 -8
- data/lib/autobuild/packages/genom.rb +1 -1
- data/lib/autobuild/packages/gnumake.rb +19 -13
- data/lib/autobuild/packages/import.rb +2 -6
- data/lib/autobuild/packages/orogen.rb +32 -31
- data/lib/autobuild/packages/pkgconfig.rb +2 -2
- data/lib/autobuild/packages/python.rb +7 -2
- data/lib/autobuild/packages/ruby.rb +22 -17
- data/lib/autobuild/parallel.rb +35 -39
- data/lib/autobuild/pkgconfig.rb +25 -13
- data/lib/autobuild/progress_display.rb +23 -23
- data/lib/autobuild/rake_task_extension.rb +6 -6
- data/lib/autobuild/reporting.rb +38 -26
- data/lib/autobuild/subcommand.rb +72 -65
- data/lib/autobuild/test.rb +8 -7
- data/lib/autobuild/test_utility.rb +10 -9
- data/lib/autobuild/timestamps.rb +28 -23
- data/lib/autobuild/tools.rb +17 -16
- data/lib/autobuild/utility.rb +16 -18
- data/lib/autobuild/version.rb +1 -1
- metadata +39 -38
data/lib/autobuild/exceptions.rb
CHANGED
@@ -1,28 +1,34 @@
|
|
1
1
|
module Autobuild
|
2
2
|
## Base class for all Autobuild exceptions
|
3
|
-
class
|
4
|
-
|
5
|
-
def mail
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
3
|
+
class PhaseException < RuntimeError
|
4
|
+
# If the error should be reported by mail
|
5
|
+
def mail?
|
6
|
+
false
|
7
|
+
end
|
8
|
+
|
9
|
+
# If the error is fatal
|
10
|
+
def fatal?
|
11
|
+
true
|
12
|
+
end
|
13
|
+
|
14
|
+
# If the error can be retried
|
15
|
+
def retry?
|
16
|
+
@retry
|
17
|
+
end
|
10
18
|
attr_accessor :target, :phase
|
11
19
|
|
12
|
-
## Creates a new exception which occured while doing *phase*
|
20
|
+
## Creates a new exception which occured while doing *phase*
|
13
21
|
# in *target*
|
14
22
|
def initialize(target = nil, phase = nil, options = Hash.new)
|
15
23
|
options = Kernel.validate_options options, retry: true
|
16
|
-
@target
|
17
|
-
@
|
24
|
+
@target = target
|
25
|
+
@phase = phase
|
26
|
+
@retry = options[:retry]
|
18
27
|
end
|
19
28
|
|
20
|
-
alias
|
29
|
+
alias exception_message to_s
|
21
30
|
def to_s
|
22
|
-
dir =
|
23
|
-
if target.respond_to?(:srcdir)
|
24
|
-
"(#{target.srcdir})"
|
25
|
-
end
|
31
|
+
dir = "(#{target.srcdir})" if target.respond_to?(:srcdir)
|
26
32
|
target_name =
|
27
33
|
if target.respond_to?(:name)
|
28
34
|
target.name
|
@@ -39,8 +45,11 @@ def to_s
|
|
39
45
|
end
|
40
46
|
end
|
41
47
|
|
48
|
+
# Backward compatibility
|
49
|
+
Exception = PhaseException
|
50
|
+
|
42
51
|
## There is an error/inconsistency in the configuration
|
43
|
-
class ConfigException
|
52
|
+
class ConfigException < PhaseException
|
44
53
|
def initialize(target = nil, phase = nil, options = Hash.new)
|
45
54
|
options, other_options = Kernel.filter_options options,
|
46
55
|
retry: false
|
@@ -48,8 +57,10 @@ def initialize(target = nil, phase = nil, options = Hash.new)
|
|
48
57
|
end
|
49
58
|
end
|
50
59
|
## An error occured in a package
|
51
|
-
class PackageException <
|
52
|
-
def mail
|
60
|
+
class PackageException < PhaseException
|
61
|
+
def mail?
|
62
|
+
true
|
63
|
+
end
|
53
64
|
|
54
65
|
def initialize(target = nil, phase = nil, options = Hash.new)
|
55
66
|
options, other_options = Kernel.filter_options options,
|
@@ -63,18 +74,24 @@ def initialize(target = nil, phase = nil, options = Hash.new)
|
|
63
74
|
class ImporterCannotReset < PackageException
|
64
75
|
end
|
65
76
|
|
66
|
-
|
67
|
-
class CommandNotFound <
|
68
|
-
|
69
|
-
class SubcommandFailed <
|
70
|
-
def mail
|
77
|
+
# The subcommand is not found
|
78
|
+
class CommandNotFound < PhaseException; end
|
79
|
+
# An error occured while running a subcommand
|
80
|
+
class SubcommandFailed < PhaseException
|
81
|
+
def mail?
|
82
|
+
true
|
83
|
+
end
|
84
|
+
|
71
85
|
attr_writer :retry
|
72
86
|
attr_reader :command, :logfile, :status, :output
|
73
87
|
def initialize(*args)
|
74
88
|
if args.size == 1
|
75
89
|
sc = args[0]
|
76
|
-
target
|
77
|
-
|
90
|
+
target = sc.target
|
91
|
+
command = sc.command
|
92
|
+
logfile = sc.logfile
|
93
|
+
status = sc.status
|
94
|
+
output = sc.output
|
78
95
|
@orig_message = sc.exception_message
|
79
96
|
elsif args.size == 4 || args.size == 5
|
80
97
|
target, command, logfile, status, output = *args
|
@@ -91,15 +108,13 @@ def initialize(*args)
|
|
91
108
|
|
92
109
|
def to_s
|
93
110
|
msg = super
|
94
|
-
if @orig_message
|
95
|
-
msg << "\n #{@orig_message}"
|
96
|
-
end
|
111
|
+
msg << "\n #{@orig_message}" if @orig_message
|
97
112
|
msg << "\n see #{logfile} for details"
|
98
113
|
|
99
114
|
# If we do not have a status, it means an error occured in the
|
100
115
|
# launching process. More importantly, it means we already have a
|
101
116
|
# proper explanation for it. Don't display the logfile at all.
|
102
|
-
if status
|
117
|
+
if status
|
103
118
|
lines = @output
|
104
119
|
logsize = Autobuild.displayed_error_line_count
|
105
120
|
if logsize != Float::INFINITY && lines.size > logsize
|
@@ -134,12 +149,14 @@ def initialize(original_errors)
|
|
134
149
|
@original_errors = original_errors
|
135
150
|
end
|
136
151
|
|
137
|
-
def mail
|
152
|
+
def mail?
|
153
|
+
true
|
154
|
+
end
|
138
155
|
|
139
156
|
def to_s
|
140
157
|
result = ["#{original_errors.size} errors occured"]
|
141
158
|
original_errors.each_with_index do |e, i|
|
142
|
-
result << "(#{i}) #{e
|
159
|
+
result << "(#{i}) #{e}"
|
143
160
|
end
|
144
161
|
result.join("\n")
|
145
162
|
end
|
@@ -7,6 +7,7 @@
|
|
7
7
|
|
8
8
|
module Autobuild
|
9
9
|
class ArchiveImporter < Importer
|
10
|
+
# rubocop:disable Naming/ConstantName
|
10
11
|
# The tarball is not compressed
|
11
12
|
Plain = 0
|
12
13
|
# The tarball is compressed with gzip
|
@@ -15,18 +16,19 @@ class ArchiveImporter < Importer
|
|
15
16
|
Bzip = 2
|
16
17
|
# Not a tarball but a zip
|
17
18
|
Zip = 3
|
19
|
+
# rubocop:enable Naming/ConstantName
|
18
20
|
|
19
21
|
TAR_OPTION = {
|
20
22
|
Plain => '',
|
21
23
|
Gzip => 'z',
|
22
24
|
Bzip => 'j'
|
23
|
-
}
|
25
|
+
}.freeze
|
24
26
|
|
25
27
|
# Known URI schemes for +url+
|
26
|
-
VALID_URI_SCHEMES = [
|
28
|
+
VALID_URI_SCHEMES = %w[file http https ftp].freeze
|
27
29
|
|
28
30
|
# Known URI schemes for +url+ on windows
|
29
|
-
WINDOWS_VALID_URI_SCHEMES = [
|
31
|
+
WINDOWS_VALID_URI_SCHEMES = %w[file http https].freeze
|
30
32
|
|
31
33
|
class << self
|
32
34
|
# The directory in which downloaded files are saved
|
@@ -35,7 +37,7 @@ class << self
|
|
35
37
|
# {Importer.cache_dirs} and falls back #{prefix}/cache
|
36
38
|
def cachedir
|
37
39
|
if @cachedir then @cachedir
|
38
|
-
elsif cache_dirs = Importer.cache_dirs('archives')
|
40
|
+
elsif (cache_dirs = Importer.cache_dirs('archives'))
|
39
41
|
@cachedir = cache_dirs.first
|
40
42
|
else
|
41
43
|
"#{Autobuild.prefix}/cache"
|
@@ -70,19 +72,20 @@ def cachedir
|
|
70
72
|
# @see filename_to_mode
|
71
73
|
def self.find_mode_from_filename(filename)
|
72
74
|
case filename
|
73
|
-
when /\.zip
|
74
|
-
when /\.tar
|
75
|
-
when /\.tar\.gz$|\.tgz
|
76
|
-
when /\.bz2
|
75
|
+
when /\.zip$/ then Zip
|
76
|
+
when /\.tar$/ then Plain
|
77
|
+
when /\.tar\.gz$|\.tgz$/ then Gzip
|
78
|
+
when /\.bz2$/ then Bzip
|
77
79
|
end
|
78
80
|
end
|
79
81
|
|
80
82
|
# Returns the unpack mode from the file name
|
81
83
|
def self.filename_to_mode(filename)
|
82
|
-
if mode = find_mode_from_filename(filename)
|
84
|
+
if (mode = find_mode_from_filename(filename))
|
83
85
|
mode
|
84
86
|
else
|
85
|
-
raise "cannot infer the archive type from '#{filename}',
|
87
|
+
raise "cannot infer the archive type from '#{filename}', "\
|
88
|
+
"provide it explicitely with the mode: option"
|
86
89
|
end
|
87
90
|
end
|
88
91
|
|
@@ -96,6 +99,7 @@ def self.filename_to_mode(filename)
|
|
96
99
|
def self.auto_update?
|
97
100
|
@auto_update
|
98
101
|
end
|
102
|
+
|
99
103
|
def self.auto_update=(flag)
|
100
104
|
@auto_update = flag
|
101
105
|
end
|
@@ -106,15 +110,11 @@ def update_cached_file?
|
|
106
110
|
@update_cached_file
|
107
111
|
end
|
108
112
|
|
109
|
-
def download_http(package, uri, filename,
|
110
|
-
current_time: nil)
|
113
|
+
def download_http(package, uri, filename, # rubocop:disable Metrics/ParameterLists
|
114
|
+
user: nil, password: nil, current_time: nil)
|
111
115
|
request = Net::HTTP::Get.new(uri)
|
112
|
-
if current_time
|
113
|
-
|
114
|
-
end
|
115
|
-
if user
|
116
|
-
request.basic_auth user, password
|
117
|
-
end
|
116
|
+
request['If-Modified-Since'] = current_time.rfc2822 if current_time
|
117
|
+
request.basic_auth(user, password) if user
|
118
118
|
|
119
119
|
Net::HTTP.start(
|
120
120
|
uri.hostname, uri.port, use_ssl: uri.scheme == 'https') do |http|
|
@@ -124,10 +124,10 @@ def download_http(package, uri, filename, user: nil, password: nil,
|
|
124
124
|
when Net::HTTPNotModified
|
125
125
|
return false
|
126
126
|
when Net::HTTPSuccess
|
127
|
-
if current_time && (last_modified = resp
|
127
|
+
if current_time && (last_modified = resp['last-modified'])
|
128
128
|
return false if current_time >= Time.rfc2822(last_modified)
|
129
129
|
end
|
130
|
-
if (length = resp
|
130
|
+
if (length = resp['Content-Length'])
|
131
131
|
length = Integer(length)
|
132
132
|
expected_size = "/#{Autobuild.human_readable_size(length)}"
|
133
133
|
end
|
@@ -150,9 +150,9 @@ def download_http(package, uri, filename, user: nil, password: nil,
|
|
150
150
|
"(#{formatted_size}#{expected_size})"
|
151
151
|
end
|
152
152
|
when Net::HTTPRedirection
|
153
|
-
if (location = resp
|
153
|
+
if (location = resp['location']).start_with?('/')
|
154
154
|
redirect_uri = uri.dup
|
155
|
-
redirect_uri.path = resp
|
155
|
+
redirect_uri.path = resp['location']
|
156
156
|
else
|
157
157
|
redirect_uri = location
|
158
158
|
end
|
@@ -161,7 +161,8 @@ def download_http(package, uri, filename, user: nil, password: nil,
|
|
161
161
|
user: user, password: password, current_time: current_time)
|
162
162
|
else
|
163
163
|
raise PackageException.new(package, 'import'),
|
164
|
-
"failed download of #{package.name} from #{uri}:
|
164
|
+
"failed download of #{package.name} from #{uri}: "\
|
165
|
+
"#{resp.class}"
|
165
166
|
end
|
166
167
|
end
|
167
168
|
end
|
@@ -171,17 +172,12 @@ def download_http(package, uri, filename, user: nil, password: nil,
|
|
171
172
|
def extract_tar_gz(io, target)
|
172
173
|
Gem::Package::TarReader.new(io).each do |entry|
|
173
174
|
newname = File.join(
|
174
|
-
target,
|
175
|
-
|
176
|
-
if
|
177
|
-
|
178
|
-
|
179
|
-
|
180
|
-
dir = newname.slice(0,newname.rindex('/'))
|
181
|
-
if(!File.directory?(dir))
|
182
|
-
FileUtils.mkdir_p(dir)
|
183
|
-
end
|
184
|
-
open(newname, "wb") do |file|
|
175
|
+
target, File.basename(entry.full_name))
|
176
|
+
FileUtils.mkdir_p(newname) if entry.directory?
|
177
|
+
if entry.file?
|
178
|
+
dir = File.dirname(newname)
|
179
|
+
FileUtils.mkdir_p(dir) unless File.directory?(dir)
|
180
|
+
File.open(newname, "wb") do |file|
|
185
181
|
file.write(entry.read)
|
186
182
|
end
|
187
183
|
end
|
@@ -200,21 +196,26 @@ def update_needed?(package)
|
|
200
196
|
size = File.stat(@url.path).size
|
201
197
|
mtime = File.stat(@url.path).mtime
|
202
198
|
else
|
203
|
-
|
199
|
+
# rubocop:disable Security/Open
|
200
|
+
open @url, :content_length_proc => ->(v) { size = v } do |file|
|
204
201
|
mtime = file.last_modified
|
205
202
|
end
|
203
|
+
# rubocop:enable Security/Open
|
206
204
|
end
|
207
205
|
|
208
206
|
if mtime && size
|
209
207
|
return size != cached_size || mtime > cached_mtime
|
210
208
|
elsif mtime
|
211
|
-
package.warn "%s: archive size is not available for #{@url},
|
209
|
+
package.warn "%s: archive size is not available for #{@url}, "\
|
210
|
+
"relying on modification time"
|
212
211
|
return mtime > cached_mtime
|
213
212
|
elsif size
|
214
|
-
package.warn "%s: archive modification time
|
213
|
+
package.warn "%s: archive modification time "\
|
214
|
+
"is not available for #{@url}, relying on size"
|
215
215
|
return size != cached_size
|
216
216
|
else
|
217
|
-
package.warn "%s: neither the archive size nor its modification time
|
217
|
+
package.warn "%s: neither the archive size nor its modification time "\
|
218
|
+
"are available for #{@url}, will always update"
|
218
219
|
return true
|
219
220
|
end
|
220
221
|
end
|
@@ -225,26 +226,32 @@ def download_from_url(package)
|
|
225
226
|
if %w[http https].include?(@url.scheme)
|
226
227
|
if File.file?(cachefile)
|
227
228
|
return false unless update_cached_file?
|
229
|
+
|
228
230
|
cached_mtime = File.lstat(cachefile).mtime
|
229
231
|
end
|
232
|
+
|
230
233
|
updated = download_http(package, @url, "#{cachefile}.partial",
|
231
234
|
user: @user, password: @password,
|
232
235
|
current_time: cached_mtime)
|
233
236
|
return false unless updated
|
234
237
|
elsif Autobuild.bsd?
|
235
238
|
return false unless update_needed?(package)
|
239
|
+
|
236
240
|
package.run(:import, Autobuild.tool('curl'),
|
237
|
-
'-Lso',"#{cachefile}.partial", @url)
|
241
|
+
'-Lso', "#{cachefile}.partial", @url)
|
238
242
|
else
|
239
243
|
return false unless update_needed?(package)
|
244
|
+
|
240
245
|
additional_options = []
|
241
|
-
if timeout = self.timeout
|
246
|
+
if (timeout = self.timeout)
|
242
247
|
additional_options << "--timeout" << timeout
|
243
248
|
end
|
244
|
-
if retries = self.retries
|
249
|
+
if (retries = self.retries)
|
245
250
|
additional_options << "--tries" << retries
|
246
251
|
end
|
247
|
-
package.run(:import, Autobuild.tool('wget'), '-q', '-P', cachedir,
|
252
|
+
package.run(:import, Autobuild.tool('wget'), '-q', '-P', cachedir,
|
253
|
+
*additional_options, @url, '-O', "#{cachefile}.partial",
|
254
|
+
retry: true)
|
248
255
|
end
|
249
256
|
rescue Exception
|
250
257
|
FileUtils.rm_f "#{cachefile}.partial"
|
@@ -257,12 +264,37 @@ def download_from_url(package)
|
|
257
264
|
# Updates the downloaded file in cache only if it is needed
|
258
265
|
#
|
259
266
|
# @return [Boolean] true if a new file was downloaded, false otherwise
|
267
|
+
# @raises ConfigException if a expected digest was given in the source.yml file and it doesn't match
|
260
268
|
def update_cache(package)
|
261
269
|
updated = download_from_url(package)
|
262
|
-
@cachefile_digest =
|
270
|
+
@cachefile_digest = read_cachefile_digest
|
271
|
+
|
272
|
+
if @expected_digest && @expected_digest != @cachefile_digest
|
273
|
+
raise ConfigException, "The archive #{@url.to_s} does not match the digest provided"
|
274
|
+
end
|
275
|
+
|
263
276
|
updated
|
264
277
|
end
|
265
278
|
|
279
|
+
def read_cachefile_digest
|
280
|
+
Digest::SHA1.hexdigest File.read(cachefile)
|
281
|
+
end
|
282
|
+
|
283
|
+
# Fingerprint for archive importer, we are using
|
284
|
+
# its digest whether is calculated or expected
|
285
|
+
# @raises ConfigException if no digest is present
|
286
|
+
def vcs_fingerprint(package)
|
287
|
+
if @cachefile_digest
|
288
|
+
@cachefile_digest
|
289
|
+
elsif File.file?(cachefile)
|
290
|
+
read_cachefile_digest
|
291
|
+
elsif @expected_digest
|
292
|
+
@expected_digest
|
293
|
+
else
|
294
|
+
raise ConfigException, "There is no digest for archive #{@url.to_s}, make sure cache directories are configured correctly"
|
295
|
+
end
|
296
|
+
end
|
297
|
+
|
266
298
|
# The source URL
|
267
299
|
attr_reader :url
|
268
300
|
# The local file (either a downloaded file if +url+ is not local, or +url+ itself)
|
@@ -285,13 +317,16 @@ def cachedir=(dir)
|
|
285
317
|
relocate(@url.to_s)
|
286
318
|
end
|
287
319
|
|
288
|
-
#
|
289
|
-
|
290
|
-
|
291
|
-
|
320
|
+
# @deprecated use {#archive_dir} instead
|
321
|
+
def tardir
|
322
|
+
@options[:tardir]
|
323
|
+
end
|
324
|
+
|
292
325
|
# The directory contained in the archive. If not set, we assume that it
|
293
326
|
# is the same than the source dir
|
294
|
-
def archive_dir
|
327
|
+
def archive_dir
|
328
|
+
@options[:archive_dir] || tardir
|
329
|
+
end
|
295
330
|
|
296
331
|
# The number of time we should retry downloading if the underlying tool
|
297
332
|
# supports it (wget does).
|
@@ -327,8 +362,8 @@ def has_subdirectory?
|
|
327
362
|
!@options[:no_subdirectory]
|
328
363
|
end
|
329
364
|
|
330
|
-
# Creates a new importer which downloads +url+ in +cachedir+ and
|
331
|
-
# are allowed:
|
365
|
+
# Creates a new importer which downloads +url+ in +cachedir+ and
|
366
|
+
# unpacks it. The following options are allowed:
|
332
367
|
# [:cachedir] the cache directory. Defaults to "#{Autobuild.prefix}/cache"
|
333
368
|
# [:archive_dir] the directory contained in the archive file. If set,
|
334
369
|
# the importer will rename that directory to make it match
|
@@ -336,7 +371,8 @@ def has_subdirectory?
|
|
336
371
|
# [:no_subdirectory] the archive does not have the custom archive
|
337
372
|
# subdirectory.
|
338
373
|
# [:retries] The number of retries for downloading
|
339
|
-
# [:timeout] The timeout (in seconds) used during downloading, it
|
374
|
+
# [:timeout] The timeout (in seconds) used during downloading, it
|
375
|
+
# defaults to 10s
|
340
376
|
# [:filename] Rename the archive to this filename (in cache) -- will be
|
341
377
|
# also used to infer the mode
|
342
378
|
# [:mode] The unpack mode: one of Zip, Bzip, Gzip or Plain, this is
|
@@ -344,7 +380,7 @@ def has_subdirectory?
|
|
344
380
|
def initialize(url, options = Hash.new)
|
345
381
|
sourceopts, options = Kernel.filter_options options,
|
346
382
|
:source_id, :repository_id, :filename, :mode, :update_cached_file,
|
347
|
-
:user, :password
|
383
|
+
:user, :password, :expected_digest
|
348
384
|
super(options)
|
349
385
|
|
350
386
|
@filename = nil
|
@@ -371,42 +407,55 @@ def relocate(url, options = Hash.new)
|
|
371
407
|
|
372
408
|
@repository_id = options[:repository_id] || parsed_url.to_s
|
373
409
|
@source_id = options[:source_id] || parsed_url.to_s
|
410
|
+
@expected_digest = options[:expected_digest]
|
374
411
|
|
375
|
-
@filename =
|
412
|
+
@filename =
|
413
|
+
options[:filename] ||
|
414
|
+
@filename ||
|
415
|
+
File.basename(url).gsub(/\?.*/, '')
|
376
416
|
@update_cached_file = options[:update_cached_file]
|
377
417
|
|
378
|
-
@mode =
|
418
|
+
@mode =
|
419
|
+
options[:mode] ||
|
420
|
+
ArchiveImporter.find_mode_from_filename(filename) ||
|
421
|
+
@mode
|
422
|
+
|
379
423
|
if Autobuild.windows? && (mode != Gzip)
|
380
|
-
raise ConfigException, "only gzipped tar archives
|
424
|
+
raise ConfigException, "only gzipped tar archives "\
|
425
|
+
"are supported on Windows"
|
381
426
|
end
|
382
427
|
@user = options[:user]
|
383
428
|
@password = options[:password]
|
384
429
|
if @user && !%w[http https].include?(@url.scheme)
|
385
|
-
raise ConfigException, "authentication is only supported for
|
430
|
+
raise ConfigException, "authentication is only supported for "\
|
431
|
+
"http and https URIs"
|
386
432
|
end
|
387
433
|
|
388
|
-
|
389
|
-
|
390
|
-
|
391
|
-
|
392
|
-
|
434
|
+
@cachefile =
|
435
|
+
if @url.scheme == 'file'
|
436
|
+
@url.path
|
437
|
+
else
|
438
|
+
File.join(cachedir, filename)
|
439
|
+
end
|
393
440
|
end
|
394
441
|
|
395
442
|
def update(package, options = Hash.new) # :nodoc:
|
396
443
|
if options[:only_local]
|
397
|
-
package.warn "%s: the archive importer does not support local updates,
|
444
|
+
package.warn "%s: the archive importer does not support local updates, "\
|
445
|
+
"skipping"
|
398
446
|
return false
|
399
447
|
end
|
400
448
|
needs_update = update_cache(package)
|
401
449
|
|
402
|
-
|
450
|
+
unless File.file?(checkout_digest_stamp(package))
|
403
451
|
write_checkout_digest_stamp(package)
|
404
452
|
end
|
405
453
|
|
406
454
|
if needs_update || archive_changed?(package)
|
407
455
|
return checkout(package, allow_interactive: options[:allow_interactive])
|
456
|
+
else
|
457
|
+
false
|
408
458
|
end
|
409
|
-
false
|
410
459
|
end
|
411
460
|
|
412
461
|
def checkout_digest_stamp(package)
|
@@ -439,11 +488,17 @@ def checkout(package, options = Hash.new) # :nodoc:
|
|
439
488
|
response = 'yes'
|
440
489
|
elsif options[:allow_interactive]
|
441
490
|
package.progress_done
|
442
|
-
package.message "The archive #{@url
|
443
|
-
|
444
|
-
|
491
|
+
package.message "The archive #{@url} is different from "\
|
492
|
+
"the one currently checked out at #{package.srcdir}", :bold
|
493
|
+
package.message "I will have to delete the current folder to go on "\
|
494
|
+
"with the update"
|
495
|
+
response = TTY::Prompt.new.ask " Continue (yes or no) ? "\
|
496
|
+
"If no, this update will be ignored, "\
|
497
|
+
"which can lead to build problems.", convert: :bool
|
445
498
|
else
|
446
|
-
raise Autobuild::InteractionRequired, "importing #{package.name}
|
499
|
+
raise Autobuild::InteractionRequired, "importing #{package.name} "\
|
500
|
+
"would have required user interaction and "\
|
501
|
+
"allow_interactive is false"
|
447
502
|
end
|
448
503
|
|
449
504
|
if !response
|
@@ -469,7 +524,7 @@ def checkout(package, options = Hash.new) # :nodoc:
|
|
469
524
|
end
|
470
525
|
|
471
526
|
FileUtils.mkdir_p base_dir
|
472
|
-
cmd = [
|
527
|
+
cmd = ['-o', cachefile, '-d', main_dir]
|
473
528
|
package.run(:import, Autobuild.tool('unzip'), *cmd)
|
474
529
|
|
475
530
|
archive_dir = (self.archive_dir || File.basename(package.name))
|
@@ -477,33 +532,30 @@ def checkout(package, options = Hash.new) # :nodoc:
|
|
477
532
|
FileUtils.rm_rf File.join(package.srcdir)
|
478
533
|
FileUtils.mv File.join(base_dir, archive_dir), package.srcdir
|
479
534
|
elsif !File.directory?(package.srcdir)
|
480
|
-
raise Autobuild::Exception, "#{cachefile} does not contain
|
535
|
+
raise Autobuild::Exception, "#{cachefile} does not contain "\
|
536
|
+
"directory called #{File.basename(package.srcdir)}. "\
|
537
|
+
"Did you forget to use the archive_dir option ?"
|
481
538
|
end
|
482
539
|
else
|
483
540
|
FileUtils.mkdir_p package.srcdir
|
484
541
|
cmd = ["x#{TAR_OPTION[mode]}f", cachefile, '-C', package.srcdir]
|
485
|
-
|
486
|
-
cmd << '--strip-components=1'
|
487
|
-
end
|
542
|
+
cmd << '--strip-components=1' unless @options[:no_subdirectory]
|
488
543
|
|
489
544
|
if Autobuild.windows?
|
490
545
|
io = if mode == Plain
|
491
|
-
|
492
|
-
|
493
|
-
|
494
|
-
|
546
|
+
File.open(cachefile, 'r')
|
547
|
+
else
|
548
|
+
Zlib::GzipReader.open(cachefile)
|
549
|
+
end
|
495
550
|
extract_tar_gz(io, package.srcdir)
|
496
551
|
else
|
497
552
|
package.run(:import, Autobuild.tool('tar'), *cmd)
|
498
553
|
end
|
499
554
|
end
|
500
555
|
write_checkout_digest_stamp(package)
|
501
|
-
|
502
|
-
|
556
|
+
true
|
503
557
|
rescue SubcommandFailed
|
504
|
-
if cachefile != url.path
|
505
|
-
FileUtils.rm_f cachefile
|
506
|
-
end
|
558
|
+
FileUtils.rm_f(cachefile) if cachefile != url.path
|
507
559
|
raise
|
508
560
|
end
|
509
561
|
end
|