autobuild 1.14.1 → 1.15.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
- SHA1:
3
- metadata.gz: 25014dd95ce502bebe2ffe0c7e8247042d4c64c1
4
- data.tar.gz: 795990e0fb678cbc772882121735c50e306a1d21
2
+ SHA256:
3
+ metadata.gz: c5db12f4baef9f5192a60e44c30b9db25301eeada8b07706dc29b07f8a3d72f9
4
+ data.tar.gz: '078bd5a842db6b28dcc83934074291860998f7688bb8186d7ca1c1b40510054f'
5
5
  SHA512:
6
- metadata.gz: afefb1dcef0ae0ad2413715f69ec890283ed51b581dae702c1e55a9f32615527d06029c575e8c94f02f18b054c1652a8d46a39a45174c68e4a66cd98db4d4c47
7
- data.tar.gz: 7663d891ffc50ad4099daecbfd6af60b2793696e64b9e5bd59aedca72cf417f03bba1cce095023dc12b01b5d9cb951bc604a02ce8bf48cdad1f9b89259720ed9
6
+ metadata.gz: 86b505fbc092682443ae1cf05403ad72ef2cba0cd5f0c42d9b1e70f352f37221a5304baf2a0b1391ccb9b2d8247580abbbe4ab7c503c1a480c7ded04d01e1314
7
+ data.tar.gz: a6110dd2e3f714a9213973f4eebbcee346357295e56970b798f413c692c59cfb49ac2b90399890827ffff101d1e564cce3ca715d36d260acbe290096633e18d8
@@ -1,4 +1,4 @@
1
- sudo: false
1
+ sudo: true
2
2
  language: ruby
3
3
  rvm:
4
4
  - '2.1'
@@ -10,3 +10,11 @@ rvm:
10
10
  matrix:
11
11
  allow_failures:
12
12
  - rvm: 'jruby-9.1.15.0'
13
+
14
+ install:
15
+ - gem install bundler
16
+ - bundle install --path vendor/bundle
17
+
18
+ script:
19
+ - bundle exec rake
20
+ - bundle exec rake test
@@ -13,7 +13,7 @@ def self.macos?
13
13
  @macos
14
14
  end
15
15
 
16
- @freebsd = RbConfig::CONFIG["host_os"].include?('freebsd')
16
+ @freebsd = RbConfig::CONFIG["host_os"].include?('freebsd')
17
17
  def self.freebsd?
18
18
  @freebsd
19
19
  end
@@ -77,7 +77,7 @@ class Environment
77
77
  #
78
78
  # If inherited_environment[varname] is true, the generated shell script
79
79
  # will contain
80
- #
80
+ #
81
81
  # export VARNAME=new_value:new_value:$VARNAME
82
82
  #
83
83
  # otherwise
@@ -113,7 +113,7 @@ def initialize
113
113
 
114
114
  @system_env = Hash.new
115
115
  @original_env = ORIGINAL_ENV.dup
116
-
116
+
117
117
  @default_pkgconfig_search_suffixes = nil
118
118
  @arch_names = nil
119
119
  @target_arch = nil
@@ -142,8 +142,8 @@ def initialize_copy(old)
142
142
  map_value { |k, v| v.dup if v }
143
143
  @environment = @environment.
144
144
  map_value { |k, v| v.dup if v }
145
- @source_before = @source_before.dup
146
- @source_after = @source_after.dup
145
+ @source_before = Marshal.load(Marshal.dump(@source_before)) # deep copy
146
+ @source_after = Marshal.load(Marshal.dump(@source_after)) # deep copy
147
147
  @inherited_variables = @inherited_variables.dup
148
148
 
149
149
  @system_env = @system_env.
@@ -157,7 +157,7 @@ def [](name)
157
157
  end
158
158
 
159
159
  # Resets the value of +name+ to its original value. If it is inherited from
160
- # the
160
+ # the
161
161
  def reset(name = nil)
162
162
  if name
163
163
  environment.delete(name)
@@ -174,7 +174,7 @@ def reset(name = nil)
174
174
  # value.
175
175
  #
176
176
  # In a bourne shell, this would be equivalent to doing
177
- #
177
+ #
178
178
  # unset name
179
179
  #
180
180
  def clear(name = nil)
@@ -212,7 +212,7 @@ def unset(name)
212
212
  # @see env_inherit env_inherit=
213
213
  def inherit?(name = nil)
214
214
  if @inherit
215
- if name
215
+ if name
216
216
  @inherited_variables.include?(name)
217
217
  else true
218
218
  end
@@ -248,7 +248,7 @@ def inherit(*names)
248
248
  names.pop
249
249
  else true
250
250
  end
251
-
251
+
252
252
  if flag
253
253
  @inherited_variables |= names
254
254
  names.each do |env_name|
@@ -446,10 +446,12 @@ def push_path(name, *values)
446
446
  # @overload source_before(path)
447
447
  # @param [String] path a path that should be added to source_before
448
448
  #
449
- def source_before(file = nil)
449
+ def source_before(file = nil, shell: 'sh')
450
450
  if file
451
- @source_before << file
452
- else @source_before
451
+ @source_before << { file: file, shell: shell }
452
+ source_before(shell: shell) # for backwards compatibility
453
+ else @source_before.select { |pair| pair[:shell] == shell }
454
+ .map { |item| item[:file] }
453
455
  end
454
456
  end
455
457
 
@@ -462,10 +464,12 @@ def source_before(file = nil)
462
464
  # @overload source_after(path)
463
465
  # @param [String] path a path that should be added to source_after
464
466
  #
465
- def source_after(file = nil)
467
+ def source_after(file = nil, shell: 'sh')
466
468
  if file
467
- @source_after << file
468
- else @source_after
469
+ @source_after << { file: file, shell: shell }
470
+ source_after(shell: shell) # for backwards compatibility
471
+ else @source_after.select { |pair| pair[:shell] == shell }
472
+ .map { |item| item[:file] }
469
473
  end
470
474
  end
471
475
 
@@ -502,9 +506,9 @@ def exported_environment
502
506
  # Autobuild.inherited_environment.
503
507
  #
504
508
  # It also sources the files added by source_file
505
- def export_env_sh(io)
509
+ def export_env_sh(io, shell: 'sh')
506
510
  export = exported_environment
507
- source_before.each do |path|
511
+ source_before(shell: shell).each do |path|
508
512
  io.puts SHELL_SOURCE_SCRIPT % path
509
513
  end
510
514
  export.unset.each do |name|
@@ -518,7 +522,7 @@ def export_env_sh(io)
518
522
  io.puts SHELL_CONDITIONAL_SET_COMMAND % [name, with_inheritance.join(File::PATH_SEPARATOR), without_inheritance.join(File::PATH_SEPARATOR)]
519
523
  io.puts SHELL_EXPORT_COMMAND % [name]
520
524
  end
521
- source_after.each do |path|
525
+ source_after(shell: shell).each do |path|
522
526
  io.puts SHELL_SOURCE_SCRIPT % [path]
523
527
  end
524
528
  end
@@ -568,7 +572,7 @@ def self.pathvar(path, varname)
568
572
  def each_env_search_path(prefix, patterns)
569
573
  arch_names = self.arch_names
570
574
  arch_size = self.arch_size
571
-
575
+
572
576
  seen = Set.new
573
577
  patterns.each do |base_path|
574
578
  paths = []
@@ -833,16 +837,16 @@ def self.env_push_path(name, *values)
833
837
  env.push_path(name, *values)
834
838
  end
835
839
  # @deprecated, use the API on {env} instead
836
- def self.env_source_file(file)
837
- env.source_after(file)
840
+ def self.env_source_file(file, shell: 'sh')
841
+ env.source_after(file, shell: shell)
838
842
  end
839
843
  # @deprecated, use the API on {env} instead
840
- def self.env_source_before(file = nil)
841
- env.source_before(file)
844
+ def self.env_source_before(file = nil, shell: 'sh')
845
+ env.source_before(file, shell: shell)
842
846
  end
843
847
  # @deprecated, use the API on {env} instead
844
- def self.env_source_after(file = nil)
845
- env.source_after(file)
848
+ def self.env_source_after(file = nil, shell: 'sh')
849
+ env.source_after(file, shell: shell)
846
850
  end
847
851
  # @deprecated, use the API on {env} instead
848
852
  def self.export_env_sh(io)
@@ -877,4 +881,3 @@ def self.arch_names
877
881
  env.arch_names
878
882
  end
879
883
  end
880
-
@@ -2,15 +2,8 @@
2
2
  require 'digest/sha1'
3
3
  require 'open-uri'
4
4
  require 'fileutils'
5
-
6
- WINDOWS = RbConfig::CONFIG["host_os"] =~%r!(msdos|mswin|djgpp|mingw|[Ww]indows)!
7
- if WINDOWS
8
- require 'net/http'
9
- require 'net/https'
10
- require 'rubygems/package'
11
- require 'zlib'
12
- end
13
-
5
+ require 'net/http'
6
+ require 'net/https'
14
7
 
15
8
  module Autobuild
16
9
  class ArchiveImporter < Importer
@@ -30,7 +23,10 @@ class ArchiveImporter < Importer
30
23
  }
31
24
 
32
25
  # Known URI schemes for +url+
33
- VALID_URI_SCHEMES = [ 'file', 'http', 'https', 'ftp' ]
26
+ VALID_URI_SCHEMES = ['file', 'http', 'https', 'ftp']
27
+
28
+ # Known URI schemes for +url+ on windows
29
+ WINDOWS_VALID_URI_SCHEMES = ['file', 'http', 'https']
34
30
 
35
31
  class << self
36
32
  # The directory in which downloaded files are saved
@@ -100,34 +96,83 @@ def self.filename_to_mode(filename)
100
96
  def self.auto_update?
101
97
  @auto_update
102
98
  end
99
+ def self.auto_update=(flag)
100
+ @auto_update = flag
101
+ end
103
102
  @auto_update = (ENV['AUTOBUILD_ARCHIVE_AUTOUPDATE'] == '1')
104
103
 
105
- def update_cached_file?; @options[:update_cached_file] end
104
+ attr_writer :update_cached_file
105
+ def update_cached_file?
106
+ @update_cached_file
107
+ end
108
+
109
+ def download_http(package, uri, filename, user: nil, password: nil,
110
+ current_time: nil)
111
+ request = Net::HTTP::Get.new(uri)
112
+ if current_time
113
+ request['If-Modified-Since'] = current_time.rfc2822
114
+ end
115
+ if user
116
+ request.basic_auth user, password
117
+ end
106
118
 
107
-
108
- def get_url_on_windows(url, filename)
109
- uri = URI(url)
119
+ Net::HTTP.start(
120
+ uri.hostname, uri.port, use_ssl: uri.scheme == 'https') do |http|
110
121
 
111
- http = Net::HTTP.new(uri.host,uri.port)
112
- http.use_ssl = true if uri.port == 443
113
- http.verify_mode = OpenSSL::SSL::VERIFY_NONE #Unsure, critical?, Review this
114
- resp = http.get(uri.request_uri)
122
+ http.request(request) do |resp|
123
+ case resp
124
+ when Net::HTTPNotModified
125
+ return false
126
+ when Net::HTTPSuccess
127
+ if current_time && (last_modified = resp.header['last-modified'])
128
+ return false if current_time >= Time.rfc2822(last_modified)
129
+ end
130
+ if (length = resp.header['Content-Length'])
131
+ length = Integer(length)
132
+ expected_size = "/#{Autobuild.human_readable_size(length)}"
133
+ end
115
134
 
116
- if resp.code == "301" or resp.code == "302"
117
- get_url_on_windows(resp.header['location'],filename)
118
- else
119
- if resp.message != 'OK'
120
- raise "Could not get File from url \"#{url}\", got response #{resp.message} (#{resp.code})"
121
- end
122
- open(filename, "wb") do |file|
123
- file.write(resp.body)
135
+ File.open(filename, 'wb') do |io|
136
+ size = 0
137
+ next_update = Time.now
138
+ resp.read_body do |chunk|
139
+ io.write chunk
140
+ size += chunk.size
141
+ if size != 0 && (Time.now > next_update)
142
+ formatted_size = Autobuild.human_readable_size(size)
143
+ package.progress "downloading %s "\
144
+ "(#{formatted_size}#{expected_size})"
145
+ next_update = Time.now + 1
146
+ end
147
+ end
148
+ formatted_size = Autobuild.human_readable_size(size)
149
+ package.progress "downloaded %s "\
150
+ "(#{formatted_size}#{expected_size})"
151
+ end
152
+ when Net::HTTPRedirection
153
+ if (location = resp.header['location']).start_with?('/')
154
+ redirect_uri = uri.dup
155
+ redirect_uri.path = resp.header['location']
156
+ else
157
+ redirect_uri = location
158
+ end
159
+
160
+ return download_http(package, URI(redirect_uri), filename,
161
+ user: user, password: password, current_time: current_time)
162
+ else
163
+ raise PackageException.new(package, 'import'),
164
+ "failed download of #{package.name} from #{uri}: #{resp.class}"
165
+ end
124
166
  end
125
167
  end
168
+ true
126
169
  end
127
-
128
- def extract_tar_on_windows(filename,target)
129
- Gem::Package::TarReader.new(Zlib::GzipReader.open(filename)).each do |entry|
130
- newname = File.join(target,entry.full_name.slice(entry.full_name.index('/'),entry.full_name.size))
170
+
171
+ def extract_tar_gz(io, target)
172
+ Gem::Package::TarReader.new(io).each do |entry|
173
+ newname = File.join(
174
+ target,
175
+ entry.full_name.slice(entry.full_name.index('/'), entry.full_name.size))
131
176
  if(entry.directory?)
132
177
  FileUtils.mkdir_p(newname)
133
178
  end
@@ -142,68 +187,80 @@ def extract_tar_on_windows(filename,target)
142
187
  end
143
188
  end
144
189
  end
145
-
146
- # Updates the downloaded file in cache only if it is needed
147
- def update_cache(package)
148
- do_update = false
149
-
150
- if !File.file?(cachefile)
151
- do_update = true
152
- elsif self.update_cached_file?
153
- cached_size = File.lstat(cachefile).size
154
- cached_mtime = File.lstat(cachefile).mtime
155
-
156
- size, mtime = nil
157
- if @url.scheme == "file"
158
- size = File.stat(@url.path).size
159
- mtime = File.stat(@url.path).mtime
160
- else
161
- open @url, :content_length_proc => lambda { |v| size = v } do |file|
162
- mtime = file.last_modified
163
- end
164
- end
165
190
 
166
- if mtime && size
167
- do_update = (size != cached_size || mtime > cached_mtime)
168
- elsif mtime
169
- package.warn "%s: archive size is not available for #{@url}, relying on modification time"
170
- do_update = (mtime > cached_mtime)
171
- elsif size
172
- package.warn "%s: archive modification time is not available for #{@url}, relying on size"
173
- do_update = (size != cached_size)
174
- else
175
- package.warn "%s: neither the archive size nor its modification time available for #{@url}, will always update"
176
- do_update = true
191
+ def update_needed?(package)
192
+ return true unless File.file?(cachefile)
193
+ return false unless update_cached_file?
194
+
195
+ cached_size = File.lstat(cachefile).size
196
+ cached_mtime = File.lstat(cachefile).mtime
197
+
198
+ size, mtime = nil
199
+ if @url.scheme == "file"
200
+ size = File.stat(@url.path).size
201
+ mtime = File.stat(@url.path).mtime
202
+ else
203
+ open @url, :content_length_proc => lambda { |v| size = v } do |file|
204
+ mtime = file.last_modified
177
205
  end
178
206
  end
179
207
 
180
- if do_update
181
- FileUtils.mkdir_p(cachedir)
182
- begin
183
- if(WINDOWS)
184
- get_url_on_windows(@url, "#{cachefile}.partial")
185
- elsif Autobuild.bsd?
186
- package.run(:import, Autobuild.tool('curl'), '-Lso',"#{cachefile}.partial", @url)
187
- else
188
- additional_options = []
189
- if timeout = self.timeout
190
- additional_options << "--timeout" << timeout
191
- end
192
- if retries = self.retries
193
- additional_options << "--tries" << retries
194
- end
195
- package.run(:import, Autobuild.tool('wget'), '-q', '-P', cachedir, *additional_options, @url, '-O', "#{cachefile}.partial", retry: true)
208
+ if mtime && size
209
+ return size != cached_size || mtime > cached_mtime
210
+ elsif mtime
211
+ package.warn "%s: archive size is not available for #{@url}, relying on modification time"
212
+ return mtime > cached_mtime
213
+ elsif size
214
+ package.warn "%s: archive modification time is not available for #{@url}, relying on size"
215
+ return size != cached_size
216
+ else
217
+ package.warn "%s: neither the archive size nor its modification time available for #{@url}, will always update"
218
+ return true
219
+ end
220
+ end
221
+
222
+ def download_from_url(package)
223
+ FileUtils.mkdir_p(cachedir)
224
+ begin
225
+ if %w[http https].include?(@url.scheme)
226
+ if File.file?(cachefile)
227
+ return false unless update_cached_file?
228
+ cached_mtime = File.lstat(cachefile).mtime
229
+ end
230
+ updated = download_http(package, @url, "#{cachefile}.partial",
231
+ user: @user, password: @password,
232
+ current_time: cached_mtime)
233
+ return false unless updated
234
+ elsif Autobuild.bsd?
235
+ return false unless update_needed?(package)
236
+ package.run(:import, Autobuild.tool('curl'),
237
+ '-Lso',"#{cachefile}.partial", @url)
238
+ else
239
+ return false unless update_needed?(package)
240
+ additional_options = []
241
+ if timeout = self.timeout
242
+ additional_options << "--timeout" << timeout
243
+ end
244
+ if retries = self.retries
245
+ additional_options << "--tries" << retries
196
246
  end
197
- rescue Exception
198
- FileUtils.rm_f "#{cachefile}.partial"
199
- raise
247
+ package.run(:import, Autobuild.tool('wget'), '-q', '-P', cachedir, *additional_options, @url, '-O', "#{cachefile}.partial", retry: true)
200
248
  end
201
- FileUtils.mv "#{cachefile}.partial", cachefile
249
+ rescue Exception
250
+ FileUtils.rm_f "#{cachefile}.partial"
251
+ raise
202
252
  end
253
+ FileUtils.mv "#{cachefile}.partial", cachefile
254
+ true
255
+ end
203
256
 
257
+ # Updates the downloaded file in cache only if it is needed
258
+ #
259
+ # @return [Boolean] true if a new file was downloaded, false otherwise
260
+ def update_cache(package)
261
+ updated = download_from_url(package)
204
262
  @cachefile_digest = Digest::SHA1.hexdigest File.read(cachefile)
205
-
206
- do_update
263
+ updated
207
264
  end
208
265
 
209
266
  # The source URL
@@ -286,16 +343,15 @@ def has_subdirectory?
286
343
  # usually automatically inferred from the filename
287
344
  def initialize(url, options = Hash.new)
288
345
  sourceopts, options = Kernel.filter_options options,
289
- :source_id, :repository_id, :filename, :mode
346
+ :source_id, :repository_id, :filename, :mode, :update_cached_file,
347
+ :user, :password
290
348
  super(options)
291
349
 
292
350
  @filename = nil
293
- if !@options.has_key?(:update_cached_file)
294
- @options[:update_cached_file] = false
295
- end
351
+ @update_cached_file = false
296
352
  @cachedir = @options[:cachedir] || ArchiveImporter.cachedir
297
- @retries = @options[:retries] || ArchiveImporter.retries
298
- @timeout = @options[:timeout] || ArchiveImporter.timeout
353
+ @retries = @options[:retries] || ArchiveImporter.retries
354
+ @timeout = @options[:timeout] || ArchiveImporter.timeout
299
355
  relocate(url, sourceopts)
300
356
  end
301
357
 
@@ -304,14 +360,31 @@ def relocate(url, options = Hash.new)
304
360
  parsed_url = URI.parse(url).normalize
305
361
  @url = parsed_url
306
362
  if !VALID_URI_SCHEMES.include?(@url.scheme)
307
- raise ConfigException, "invalid URL #{@url} (local files must be prefixed with file://)"
363
+ raise ConfigException, "invalid URL #{@url} (local files "\
364
+ "must be prefixed with file://)"
365
+ elsif Autobuild.windows?
366
+ unless WINDOWS_VALID_URI_SCHEMES.include?(@url.scheme)
367
+ raise ConfigException, "downloading from a #{@url.scheme} URL "\
368
+ "is not supported on windows"
369
+ end
308
370
  end
371
+
309
372
  @repository_id = options[:repository_id] || parsed_url.to_s
310
- @source_id = options[:source_id] || parsed_url.to_s
373
+ @source_id = options[:source_id] || parsed_url.to_s
311
374
 
312
375
  @filename = options[:filename] || @filename || File.basename(url).gsub(/\?.*/, '')
376
+ @update_cached_file = options[:update_cached_file]
313
377
 
314
378
  @mode = options[:mode] || ArchiveImporter.find_mode_from_filename(filename) || @mode
379
+ if Autobuild.windows? && (mode != Gzip)
380
+ raise ConfigException, "only gzipped tar archives are supported on Windows"
381
+ end
382
+ @user = options[:user]
383
+ @password = options[:password]
384
+ if @user && !%w[http https].include?(@url.scheme)
385
+ raise ConfigException, "authentication is only supported for http and https URIs"
386
+ end
387
+
315
388
  if @url.scheme == 'file'
316
389
  @cachefile = @url.path
317
390
  else
@@ -332,10 +405,8 @@ def update(package, options = Hash.new) # :nodoc:
332
405
 
333
406
  if needs_update || archive_changed?(package)
334
407
  checkout(package, allow_interactive: options[:allow_interactive])
408
+ true
335
409
  end
336
- (needs_update || archive_changed?(package))
337
- rescue OpenURI::HTTPError
338
- raise Autobuild::Exception.new(package.name, :import)
339
410
  end
340
411
 
341
412
  def checkout_digest_stamp(package)
@@ -400,7 +471,7 @@ def checkout(package, options = Hash.new) # :nodoc:
400
471
  FileUtils.mkdir_p base_dir
401
472
  cmd = [ '-o', cachefile, '-d', main_dir ]
402
473
  package.run(:import, Autobuild.tool('unzip'), *cmd)
403
-
474
+
404
475
  archive_dir = (self.archive_dir || File.basename(package.name))
405
476
  if archive_dir != File.basename(package.srcdir)
406
477
  FileUtils.rm_rf File.join(package.srcdir)
@@ -414,17 +485,20 @@ def checkout(package, options = Hash.new) # :nodoc:
414
485
  if !@options[:no_subdirectory]
415
486
  cmd << '--strip-components=1'
416
487
  end
417
-
418
- if(WINDOWS)
419
- extract_tar_on_windows(cachefile,package.srcdir)
488
+
489
+ if Autobuild.windows?
490
+ io = if mode == Plain
491
+ File.open(cachefile, 'r')
492
+ else
493
+ Zlib::GzipReader.open(cachefile)
494
+ end
495
+ extract_tar_gz(io, package.srcdir)
420
496
  else
421
497
  package.run(:import, Autobuild.tool('tar'), *cmd)
422
498
  end
423
499
  end
424
500
  write_checkout_digest_stamp(package)
425
501
 
426
- rescue OpenURI::HTTPError
427
- raise Autobuild::PackageException.new(package.name, :import)
428
502
  rescue SubcommandFailed
429
503
  if cachefile != url.path
430
504
  FileUtils.rm_f cachefile
@@ -447,4 +521,3 @@ def self.tar(source, options = {})
447
521
  ArchiveImporter.new(source, options)
448
522
  end
449
523
  end
450
-
@@ -190,10 +190,11 @@ def prepare
190
190
  def genstamp; File.join(srcdir, '.orogen', 'orogen-stamp') end
191
191
 
192
192
  def add_cmd_to_cmdline(cmd, cmdline)
193
- if cmd =~ /^([\w-]+)/
193
+ if cmd =~ /^([\w-]+)$/
194
194
  cmd_filter = $1
195
195
  else
196
- raise ArgumentError, "cannot parse the provided command #{cmd}"
196
+ cmdline << cmd
197
+ return
197
198
  end
198
199
 
199
200
  cmdline.delete_if { |str| str =~ /^#{cmd_filter}/ }
@@ -33,15 +33,15 @@ def progress_enabled?
33
33
  !@silent && @progress_enabled
34
34
  end
35
35
 
36
- def message(message, *args, io: @io)
37
- return if silent?
36
+ def message(message, *args, io: @io, force: false)
37
+ return if silent? && !force
38
38
 
39
39
  if args.last.respond_to?(:to_io)
40
40
  io = args.pop
41
41
  end
42
42
 
43
43
  @display_lock.synchronize do
44
- io.print "#{@cursor.clear_screen_down}#{@color.call(message, *args)}\n"
44
+ io.print "#{@cursor.column(1)}#{@cursor.clear_screen_down}#{@color.call(message, *args)}\n"
45
45
  io.flush if @io != io
46
46
  display_progress
47
47
  @io.flush
@@ -119,7 +119,7 @@ def progress_done(key, display_last = true, message: nil)
119
119
 
120
120
  def display_progress
121
121
  return unless progress_enabled?
122
-
122
+
123
123
  formatted = format_grouped_messages(@progress_messages.map(&:last), indent: " ")
124
124
  @io.print @cursor.clear_screen_down
125
125
  @io.print formatted.join("\n")
@@ -184,26 +184,27 @@ def group_messages(messages)
184
184
  end.compact
185
185
  end
186
186
 
187
- def format_grouped_messages(messages, indent: " ")
188
- terminal_w = TTY::Screen.width
187
+ def format_grouped_messages(messages, indent: " ", width: TTY::Screen.width)
189
188
  groups = group_messages(messages)
190
189
  groups.each_with_object([]) do |(prefix, messages), lines|
191
190
  if prefix.empty?
192
- lines << "#{indent}#{messages.shift}"
193
- else
194
- lines << "#{indent}#{prefix.dup.strip} #{messages.shift}"
191
+ lines.concat(messages.map { |m| "#{indent}#{m.strip}" })
192
+ next
195
193
  end
194
+
195
+ lines << "#{indent}#{prefix.dup.strip} #{messages.shift}"
196
196
  until messages.empty?
197
197
  msg = messages.shift.strip
198
- if lines.last.size + 2 + msg.size > terminal_w
199
- lines << "#{indent} #{msg}"
198
+ margin = messages.empty? ? 1 : 2
199
+ if lines.last.size + margin + msg.size > width
200
+ lines << "".dup
201
+ lines.last << indent << indent << msg
200
202
  else
201
- lines.last << ", #{msg}"
203
+ lines.last << " " << msg
202
204
  end
203
205
  end
204
- lines
206
+ lines.last << "," unless messages.empty?
205
207
  end
206
208
  end
207
209
  end
208
210
  end
209
-
@@ -210,4 +210,18 @@ def success
210
210
  end
211
211
  end
212
212
  end
213
+
214
+ HUMAN_READABLE_SIZES = [
215
+ [1_000_000_000.0, "G"],
216
+ [1_000_000.0, "M"],
217
+ [1_000.0, "k"],
218
+ [1.0, ""]
219
+ ]
220
+
221
+ def self.human_readable_size(size)
222
+ HUMAN_READABLE_SIZES.each do |scale, name|
223
+ scaled_size = (size / scale)
224
+ return format("%3.1f%s", scaled_size, name) if scaled_size > 1
225
+ end
226
+ end
213
227
  end
@@ -45,9 +45,9 @@ class << self
45
45
  #
46
46
  # See #parallel_build_level for detailed information
47
47
  attr_writer :parallel_build_level
48
-
48
+
49
49
  # set/get a value how much log lines should be displayed on errors
50
- # this may be an integer or 'ALL' (which will be translated to -1)
50
+ # this may be an integer or 'ALL' (which will be translated to -1)
51
51
  # this is not using an attr_accessor to be able to validate the values
52
52
  def displayed_error_line_count=(value)
53
53
  @displayed_error_line_count = validate_displayed_error_line_count(value)
@@ -81,7 +81,7 @@ def self.autodetect_processor_count
81
81
  if @processor_count
82
82
  return @processor_count
83
83
  end
84
-
84
+
85
85
  if File.file?('/proc/cpuinfo')
86
86
  cpuinfo = File.readlines('/proc/cpuinfo')
87
87
  physical_ids, core_count, processor_ids = [], [], []
@@ -95,7 +95,7 @@ def self.autodetect_processor_count
95
95
  core_count << Integer($1)
96
96
  end
97
97
  end
98
-
98
+
99
99
  # Try to count the number of physical cores, not the number of
100
100
  # logical ones. If the info is not available, fallback to the
101
101
  # logical count
@@ -132,16 +132,15 @@ def self.autodetect_processor_count
132
132
 
133
133
  @processor_count
134
134
  end
135
-
136
- def self.validate_displayed_error_line_count(lines)
135
+
136
+ def self.validate_displayed_error_line_count(lines)
137
137
  if lines == 'ALL'
138
138
  return Float::INFINITY
139
139
  elsif lines.to_i > 0
140
140
  return lines.to_i
141
141
  end
142
- raise ConfigError, 'Autobuild.displayed_error_line_count can only be a positive integer or \'ALL\''
142
+ raise ConfigException.new, 'Autobuild.displayed_error_line_count can only be a positive integer or \'ALL\''
143
143
  end
144
-
145
144
  end
146
145
 
147
146
 
@@ -294,7 +293,7 @@ def self.run(target, phase, *command)
294
293
  logfile.sync = true
295
294
 
296
295
  if !input_streams.empty?
297
- pread, pwrite = IO.pipe # to feed subprocess stdin
296
+ pread, pwrite = IO.pipe # to feed subprocess stdin
298
297
  end
299
298
 
300
299
  outread, outwrite = IO.pipe
@@ -335,7 +334,7 @@ def self.run(target, phase, *command)
335
334
  pwrite.close
336
335
  $stdin.reopen(pread)
337
336
  end
338
-
337
+
339
338
  exec(env, *command, close_others: false)
340
339
  rescue Errno::ENOENT
341
340
  cwrite.write([CONTROL_COMMAND_NOT_FOUND].pack('I'))
@@ -398,7 +397,7 @@ def self.run(target, phase, *command)
398
397
  if Autobuild.verbose || transparent_mode?
399
398
  STDOUT.puts "#{transparent_prefix}#{line}"
400
399
  elsif block_given?
401
- # Do not yield
400
+ # Do not yield
402
401
  # would mix the progress output with the actual command
403
402
  # output. Assume that if the user wants the command output,
404
403
  # the autobuild progress output is unnecessary
@@ -447,4 +446,3 @@ def self.run(target, phase, *command)
447
446
  end
448
447
 
449
448
  end
450
-
@@ -1,3 +1,3 @@
1
1
  module Autobuild
2
- VERSION = "1.14.1" unless defined? Autobuild::VERSION
2
+ VERSION = "1.15.0" unless defined? Autobuild::VERSION
3
3
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: autobuild
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.14.1
4
+ version: 1.15.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Sylvain Joyeux
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2018-05-18 00:00:00.000000000 Z
11
+ date: 2018-08-31 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: rake
@@ -260,7 +260,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
260
260
  version: '0'
261
261
  requirements: []
262
262
  rubyforge_project:
263
- rubygems_version: 2.5.1
263
+ rubygems_version: 2.7.6
264
264
  signing_key:
265
265
  specification_version: 4
266
266
  summary: Library to handle build systems and import mechanisms