alibuild 1.17.34a1__py3-none-any.whl → 1.17.36__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: alibuild
3
- Version: 1.17.34a1
3
+ Version: 1.17.36
4
4
  Summary: ALICE Build Tool
5
5
  Home-page: https://alisw.github.io/alibuild
6
6
  Author: Giulio Eulisse
@@ -17,7 +17,7 @@ Requires-Dist: pyyaml
17
17
  Requires-Dist: requests
18
18
  Requires-Dist: distro
19
19
  Requires-Dist: jinja2
20
- Requires-Dist: boto3<1.36.0
20
+ Requires-Dist: boto3
21
21
  Provides-Extra: test
22
22
  Requires-Dist: tox; extra == "test"
23
23
  Provides-Extra: docs
@@ -1,28 +1,28 @@
1
- alibuild-1.17.34a1.data/scripts/aliBuild,sha256=5g-apCjhc9cvtFvKSP0yr6IJtChNvtTAP-BtPL-91M8,4307
2
- alibuild-1.17.34a1.data/scripts/aliDeps,sha256=fhfPB2NpuLj8bZZPUjJW2e4VxA8ZkHlp5q76xAOzufI,219
3
- alibuild-1.17.34a1.data/scripts/aliDoctor,sha256=gQwLcNGAm7_QF7FFCtmtFXEoroKsRonNMA7UP4DS1y4,221
4
- alibuild-1.17.34a1.data/scripts/alienv,sha256=8QihJOzKue4oeuIM31MsoX8Fw7FNZ4zY7wyK6UYtgTE,12568
5
- alibuild-1.17.34a1.data/scripts/pb,sha256=LfkIUyX3xXxmaGSdtAbA-dS1Y1eEShEHpmto1tIEga0,211
6
- alibuild-1.17.34a1.dist-info/licenses/LICENSE.md,sha256=WJ7YI-moTFb-uVrFjnzzhGJrnL9P2iqQe8NuED3hutI,35141
1
+ alibuild-1.17.36.data/scripts/aliBuild,sha256=5g-apCjhc9cvtFvKSP0yr6IJtChNvtTAP-BtPL-91M8,4307
2
+ alibuild-1.17.36.data/scripts/aliDeps,sha256=fhfPB2NpuLj8bZZPUjJW2e4VxA8ZkHlp5q76xAOzufI,219
3
+ alibuild-1.17.36.data/scripts/aliDoctor,sha256=gQwLcNGAm7_QF7FFCtmtFXEoroKsRonNMA7UP4DS1y4,221
4
+ alibuild-1.17.36.data/scripts/alienv,sha256=8QihJOzKue4oeuIM31MsoX8Fw7FNZ4zY7wyK6UYtgTE,12568
5
+ alibuild-1.17.36.data/scripts/pb,sha256=LfkIUyX3xXxmaGSdtAbA-dS1Y1eEShEHpmto1tIEga0,211
6
+ alibuild-1.17.36.dist-info/licenses/LICENSE.md,sha256=WJ7YI-moTFb-uVrFjnzzhGJrnL9P2iqQe8NuED3hutI,35141
7
7
  alibuild_helpers/__init__.py,sha256=ZAhyhRxOkiTA1fMzwEKACY8Eo8MdWp1MawVicFlvQWk,736
8
- alibuild_helpers/_version.py,sha256=VocJRnTsefVv_jIsw3e8gzjVm9rnZdAYPfyfpLQ8nig,716
9
- alibuild_helpers/analytics.py,sha256=fuiOagDngFnodGo7upMWeY-RjTTncDRz5Kwz8zxUr9o,4579
10
- alibuild_helpers/args.py,sha256=T0nOOMCpJZdJkEPZENz7nOsaYIfeZ7qHfAR1Vx4-T2Y,31694
11
- alibuild_helpers/build.py,sha256=VJj0KwRkbPs5gCj-Ey2MWhl2NTLZWql6DGwp8AO3tTA,57055
12
- alibuild_helpers/build_template.sh,sha256=yx1YEQN5tdfQNmZfsEnn-FxbzBIpvYwYRQkQLKmm8uo,14011
13
- alibuild_helpers/clean.py,sha256=-LeQUYDwxihzGJi4rCiuALu051T44-0cV8S6-l_lCaQ,3250
14
- alibuild_helpers/cmd.py,sha256=5tdxtyrHDzmdSVIA0pzxBoici1GZDS0fUStbX85r6ao,5906
15
- alibuild_helpers/deps.py,sha256=nHLyNqVuTUfW5bqfzCDxAaVVsklv5wHMl4FMMfTQF-8,4909
16
- alibuild_helpers/doctor.py,sha256=CSvfwmD28NRmvSnmZRTHWaf11pAoSIlMxJ1yW00Xh9c,9685
8
+ alibuild_helpers/_version.py,sha256=7YOMYiZjmL0E_RF1eyvrcHHewMhpFYPcPSSKjeCvW00,708
9
+ alibuild_helpers/analytics.py,sha256=9rayt7cSGffVf9gbowdFOw81Tqn6AgEoEOCLcPuf_B4,4593
10
+ alibuild_helpers/args.py,sha256=CbxzJ8uW-wQ8e2R9WRrAspn2mBCUHuQ1ZreUHLX0T3I,31883
11
+ alibuild_helpers/build.py,sha256=1AQe6NMp-x7PhEbStchmiYXY_1VC46-M9zGxaMdv9iI,59395
12
+ alibuild_helpers/build_template.sh,sha256=h2iY6UkhE5KDEnqFrG2OAR3fLyKPIDTGIB1_4nZKe4A,14638
13
+ alibuild_helpers/clean.py,sha256=-G_N3pEMp8ecTQS7TceFT1p0_j1QABFbZb-4XVMF0po,3233
14
+ alibuild_helpers/cmd.py,sha256=KYyGktQlMffu7HnIs0aaBwNzQDPXytkjjNI37n3XNU8,5783
15
+ alibuild_helpers/deps.py,sha256=YPY9QUyvz_Dw3qWP4B4CsmWpyuUvh2MQq74OBzpYQkM,4891
16
+ alibuild_helpers/doctor.py,sha256=gqSeUmEGncMjSXWTHyCrG7LiC8uQK0c8LByF7FkJ2mU,9684
17
17
  alibuild_helpers/git.py,sha256=20JDRZX0wbJdsK__AI_nnS2rQmgElrMMD-OT6TDHCUU,4015
18
18
  alibuild_helpers/init.py,sha256=x7OAErHzn34ceNqg-0GuHudYigh18Mk-P3RhN2P5mEI,5088
19
- alibuild_helpers/log.py,sha256=OEflXNcGNgvVYLZbvVwd2Inyt12tnEw5RgkrsiAT2c0,4700
20
- alibuild_helpers/scm.py,sha256=pZfEnTgr_ILmtRT3BXeoYVJGq9rjigLXOLAGJMsnDko,1019
21
- alibuild_helpers/sl.py,sha256=Aw3-Lvq3bQ2s_KTw6PXgqcjSoY-s8_0A55GRPKks4x0,2915
22
- alibuild_helpers/sync.py,sha256=vfny1ZF_YahzlSSEtYOq1KFvQLj_ce7MZlV2KDk1-xg,32296
19
+ alibuild_helpers/log.py,sha256=zdSUDDxiKkC3lr_XIp2eDACx9kbjN6wswP-gBGIu0sk,4704
20
+ alibuild_helpers/scm.py,sha256=hjVASxDVuZg_eUGMyXg5icdt2X9RnuLZLGUOCC_fn7g,1011
21
+ alibuild_helpers/sl.py,sha256=erAP5uzsszo5Lk5Fhd4_SnWiczhuTxWEU9MiBG1UM94,2905
22
+ alibuild_helpers/sync.py,sha256=-2BJc1_pxuipxpyCbmTp81s_7mU9vv1A5Sk9QHRHcy8,32709
23
23
  alibuild_helpers/templating_plugin.py,sha256=TWHdMQtDfX6Vqp5w9Huyh4ZEgLv5vYxAtPtX68xTOlk,662
24
- alibuild_helpers/utilities.py,sha256=nb0UC0qn2_rIJ-5GDnx-GoRAbF8tn1ELjZS-prUz9eo,26131
25
- alibuild_helpers/workarea.py,sha256=dYGZ7OOCg87W-njZMqX7Yu72800KZjx9v5Hg-T43juY,7442
24
+ alibuild_helpers/utilities.py,sha256=7o1RupCbGOwib3TFaiRZMzcYTrW3wrlBrse9JrZ_NP0,26074
25
+ alibuild_helpers/workarea.py,sha256=qYYIVyc8x5PrFpYmscr_ytI4znEVth82GVz1-dT_0JQ,7535
26
26
  debian/changelog,sha256=N-9FA5VD4VmD_tAwFfgqPqG7KUw1Jktyz14zP6aLxZs,300
27
27
  debian/compat,sha256=kX3zMg13jduqXFx3QrxARr-APDbtKwUPMIRO0gZ4NGk,3
28
28
  debian/control,sha256=UL8ZCLiCnNH75oVtldYPfrBO8DH7VxvjXPIdTCvXgPc,476
@@ -41,8 +41,8 @@ docs/docs/user.md,sha256=5o150ssZnN1Tow35OWA6Gfw3PjlhJb27ip1740hasV8,20476
41
41
  docs/docs/stylesheets/extra.css,sha256=NAFcBZQe8gh2CTpJFwyv93FvblnF5RaEjtsHxtlFD-w,215
42
42
  templates/alibuild_to_please.jnj,sha256=48SfIwq55zlb5_5lu6WAHSznXE0EfUNcHmFrmzMUSns,1723
43
43
  tests/test_analytics.py,sha256=IlxATGj-MU0sTVqpi2-EKrIhyV9C48K5IZ39kWFz-Os,1942
44
- tests/test_args.py,sha256=8d8BybESxbIDvEOOSHmmd2rCTgNxI_vF-sgbDojI-Fg,8867
45
- tests/test_build.py,sha256=5WtCmrvcG7U2i5jwBdPhMJzINsYQ6WaySEFu-TNnZHc,19017
44
+ tests/test_args.py,sha256=2CHYis9H9eS6XhEr7-dib-TO_U-yP93Dil1bh9Q34WA,8866
45
+ tests/test_build.py,sha256=G0DQmv0ci_lFDFoJK2n_b-U2IjIMs7k5Tl5ptEhqXwk,19272
46
46
  tests/test_clean.py,sha256=Zm3gjtO8Pgl27xUzQIHeGqpe05YMVXZp6Sua59prvcE,7492
47
47
  tests/test_cmd.py,sha256=SsWWasMrhbIu9Lqyr_wpuvDjg72ACJ1H_zDlTnybBuE,5049
48
48
  tests/test_deps.py,sha256=bLDFuqLRJiCW4U71dzXYM0Niv-skQXSsKGb6LXGVsZ0,2113
@@ -52,10 +52,10 @@ tests/test_hashing.py,sha256=oAggZlZp-rZz5MRj4RBD5UZUxV0uNQOtVw47y-LHwt4,2941
52
52
  tests/test_init.py,sha256=y1n16H5JoYAyIihqQglVmPrIsz7VVqyjRz_XdDs4XZM,4281
53
53
  tests/test_log.py,sha256=5eA0lfFHyX4KOMkfQSsbNw9wAbh7t-KuOfvaFMaM0qg,1884
54
54
  tests/test_packagelist.py,sha256=MlVParqXn7zQXX3OTjHhY7sFvfgAoH-jWBMJlmsB5ls,8755
55
- tests/test_parseRecipe.py,sha256=Ar1SWWd-NLC6ZAs180RcHC3UPyWc1ItzIP9S57ADNM4,4982
55
+ tests/test_parseRecipe.py,sha256=dKnKixGWSqKjigOh3Hth7l24q9Dry1npLAYdWdopj94,4966
56
56
  tests/test_sync.py,sha256=ispdYLvTIvoG_EnsNZWb-gI-hzt7cgjxns3ghpraepE,14014
57
- tests/test_utilities.py,sha256=VfAu0oEWkO6YmOG4nZxA4YC3KViLGJZ79EJiapk88Rg,20497
58
- tests/test_workarea.py,sha256=RIbo9b4hckkf9jmsS2NwfdcRU20QRUF_Kjjc-IlK2GU,4897
57
+ tests/test_utilities.py,sha256=PXZ-An-CxOwEgs87uSpO7Vi605xxTcPSCNjbdyVojuQ,20457
58
+ tests/test_workarea.py,sha256=BoEmOyIbo6CBtq8S5eJTbUpGWhegMwQCs4gQKmjNKcQ,6280
59
59
  tests/testdist/broken1.sh,sha256=zdbBCVA9ThnK14Lu9Nm6Firw2EcnRF7budMA3xrcYEg,7
60
60
  tests/testdist/broken2.sh,sha256=9S1xEQPVCkN4MMb7zQT7S6tJoPgvbSbRx5HG6EiN0JA,4
61
61
  tests/testdist/broken3.sh,sha256=1Zs7ajCIndFPYCQdSjg913x5gNBYakUehTUXLpMQSEU,18
@@ -67,7 +67,7 @@ tests/testdist/clobber-initdotsh.sh,sha256=K4L8hiEgNg0hI2WAiEA65rWU5sp-fwOYkubpV
67
67
  tests/testdist/defaults-o2.sh,sha256=IJWVMYjQYz876twos1bj-5VnPv7WTdM0X-fd09j4xxM,325
68
68
  tests/testdist/delete-etc.sh,sha256=NFMApyWMLQu3HJMsx0x8ZEveCaXbosO6XzJk8KDRJdg,63
69
69
  tests/testdist/tracking-env.sh,sha256=3cJszuzJ5dxNYvHO4ydUOWIXwmMS7wfeb-JBCDK21O4,119
70
- alibuild-1.17.34a1.dist-info/METADATA,sha256=OuTpzuSDMKM7ef1OpQAOCZwkpNkMlO9VD9XjD_dPjQ0,2718
71
- alibuild-1.17.34a1.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
72
- alibuild-1.17.34a1.dist-info/top_level.txt,sha256=WjKmc89Vn0WlbEp9a9VmhwqRkeKxjUX_6NT3T8K3Hv0,45
73
- alibuild-1.17.34a1.dist-info/RECORD,,
70
+ alibuild-1.17.36.dist-info/METADATA,sha256=sAhP4xWSWPvLzesPTCKKHt03VSo4uRoqFJjWZBIcapQ,2709
71
+ alibuild-1.17.36.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
72
+ alibuild-1.17.36.dist-info/top_level.txt,sha256=WjKmc89Vn0WlbEp9a9VmhwqRkeKxjUX_6NT3T8K3Hv0,45
73
+ alibuild-1.17.36.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (80.9.0)
2
+ Generator: setuptools (80.10.2)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
28
28
  commit_id: COMMIT_ID
29
29
  __commit_id__: COMMIT_ID
30
30
 
31
- __version__ = version = '1.17.34a1'
32
- __version_tuple__ = version_tuple = (1, 17, 34, 'a1')
31
+ __version__ = version = '1.17.36'
32
+ __version_tuple__ = version_tuple = (1, 17, 36)
33
33
 
34
34
  __commit_id__ = commit_id = None
@@ -72,7 +72,7 @@ def report(eventType, **metadata):
72
72
  ostype = "Macintosh" if architecture.startswith("osx") else "Linux"
73
73
  osversion, osprocessor = architecture.split("_", 1)
74
74
  args = ["curl", "--max-time", "5",
75
- "--user-agent", "aliBuild/%s (%s; %s %s) Python/%s" % (
75
+ "--user-agent", "aliBuild/{} ({}; {} {}) Python/{}".format(
76
76
  os.environ["ALIBUILD_VERSION"],
77
77
  ostype,
78
78
  osprocessor,
@@ -89,7 +89,7 @@ def report(eventType, **metadata):
89
89
  "https://www.google-analytics.com/collect"]
90
90
  try:
91
91
  subprocess.Popen(args)
92
- except:
92
+ except Exception:
93
93
  pass
94
94
 
95
95
  def report_event(category, action, label = "", value = None):
alibuild_helpers/args.py CHANGED
@@ -206,6 +206,8 @@ def doParseArgs():
206
206
  help=("Assume we're not building %(metavar)s and all its (unique) dependencies. "
207
207
  "You can specify this option multiple times or separate multiple arguments "
208
208
  "with commas."))
209
+ deps_parser.add_argument("-e", dest="environment", action="append", default=[],
210
+ help="KEY=VALUE binding to add to the environment. May be specified multiple times.")
209
211
 
210
212
  deps_graph = deps_parser.add_argument_group(title="Customise graph output")
211
213
  deps_graph.add_argument("--neat", dest="neat", action="store_true",
@@ -482,7 +484,7 @@ def finaliseArgs(args, parser):
482
484
  else:
483
485
  args.develPrefix = basename(dirname(abspath(args.configDir)))
484
486
  if getattr(args, "docker", False):
485
- args.develPrefix = "%s-%s" % (args.develPrefix, args.architecture) if "develPrefix" in args else args.architecture
487
+ args.develPrefix = f"{args.develPrefix}-{args.architecture}" if "develPrefix" in args else args.architecture
486
488
 
487
489
  if args.action == "init":
488
490
  args.configDir = args.configDir % {"prefix": args.develPrefix + "/"}
alibuild_helpers/build.py CHANGED
@@ -1,5 +1,6 @@
1
1
  from os.path import abspath, exists, basename, dirname, join, realpath
2
2
  from os import makedirs, unlink, readlink, rmdir
3
+ from pathlib import Path
3
4
  from alibuild_helpers import __version__
4
5
  from alibuild_helpers.analytics import report_event
5
6
  from alibuild_helpers.log import debug, info, banner, warning
@@ -19,7 +20,6 @@ from alibuild_helpers.sync import remote_from_url
19
20
  from alibuild_helpers.workarea import logged_scm, updateReferenceRepoSpec, checkout_sources
20
21
  from alibuild_helpers.log import ProgressPrint, log_current_package
21
22
  from glob import glob
22
- from textwrap import dedent
23
23
  from collections import OrderedDict
24
24
  from shlex import quote
25
25
  import tempfile
@@ -31,6 +31,7 @@ import socket
31
31
  import os
32
32
  import re
33
33
  import shutil
34
+ import sys
34
35
  import time
35
36
 
36
37
 
@@ -43,7 +44,7 @@ def writeAll(fn, txt) -> None:
43
44
  def readHashFile(fn):
44
45
  try:
45
46
  return open(fn).read().strip("\n")
46
- except IOError:
47
+ except OSError:
47
48
  return "0"
48
49
 
49
50
 
@@ -55,11 +56,9 @@ def update_git_repos(args, specs, buildOrder):
55
56
  """
56
57
 
57
58
  def update_repo(package, git_prompt):
58
- specs[package]["scm"] = Git()
59
- if specs[package]["is_devel_pkg"]:
60
- specs[package]["source"] = os.path.join(os.getcwd(), specs[package]["package"])
61
- if exists(os.path.join(specs[package]["source"], ".sl")) or exists(os.path.join(specs[package]["source"], ".git/sl")):
62
- specs[package]["scm"] = Sapling()
59
+ # Note: spec["scm"] should already be initialized before this is called
60
+ # This function just updates the repository and fetches refs
61
+ assert "scm" in specs[package], f"specs[{package!r}] has no scm key"
63
62
  updateReferenceRepoSpec(args.referenceSources, package, specs[package],
64
63
  fetch=args.fetchRepos, allowGitPrompt=git_prompt)
65
64
 
@@ -195,7 +194,7 @@ def storeHashes(package, specs, considerRelocation):
195
194
  # spec["env"] is of type OrderedDict[str, str].
196
195
  # spec["*_path"] are of type OrderedDict[str, list[str]].
197
196
  assert isinstance(spec[key], OrderedDict), \
198
- "spec[%r] was of type %r" % (key, type(spec[key]))
197
+ f"spec[{key!r}] was of type {type(spec[key])!r}"
199
198
 
200
199
  # Python 3.12 changed the string representation of OrderedDicts from
201
200
  # OrderedDict([(key, value)]) to OrderedDict({key: value}), so to remain
@@ -204,7 +203,7 @@ def storeHashes(package, specs, considerRelocation):
204
203
  h_all(", ".join(
205
204
  # XXX: We still rely on repr("str") being "'str'",
206
205
  # and on repr(["a", "b"]) being "['a', 'b']".
207
- "(%r, %r)" % (key, value)
206
+ f"({key!r}, {value!r})"
208
207
  for key, value in spec[key].items()
209
208
  ))
210
209
  h_all("])")
@@ -272,7 +271,7 @@ def hash_local_changes(spec):
272
271
  h = Hasher()
273
272
  if "track_env" in spec:
274
273
  assert isinstance(spec["track_env"], OrderedDict), \
275
- "spec[%r] was of type %r" % ("track_env", type(spec["track_env"]))
274
+ "spec[{!r}] was of type {!r}".format("track_env", type(spec["track_env"]))
276
275
 
277
276
  # Python 3.12 changed the string representation of OrderedDicts from
278
277
  # OrderedDict([(key, value)]) to OrderedDict({key: value}), so to remain
@@ -281,7 +280,7 @@ def hash_local_changes(spec):
281
280
  h(", ".join(
282
281
  # XXX: We still rely on repr("str") being "'str'",
283
282
  # and on repr(["a", "b"]) being "['a', 'b']".
284
- "(%r, %r)" % (key, value) for key, value in spec["track_env"].items()))
283
+ f"({key!r}, {value!r})" for key, value in spec["track_env"].items()))
285
284
  h("])")
286
285
  def hash_output(msg, args):
287
286
  lines = msg % args
@@ -365,6 +364,7 @@ def generate_initdotsh(package, specs, architecture, post_build=False):
365
364
  commit_hash=quote(spec["commit_hash"]),
366
365
  ) for line in (
367
366
  'export {bigpackage}_ROOT="$WORK_DIR/$ALIBUILD_ARCH_PREFIX"/{package}/{version}-{revision}',
367
+ 'export RECC_PREFIX_MAP="${bigpackage}_ROOT=/recc/{bigpackage}_ROOT:$RECC_PREFIX_MAP"',
368
368
  "export {bigpackage}_VERSION={version}",
369
369
  "export {bigpackage}_REVISION={revision}",
370
370
  "export {bigpackage}_HASH={hash}",
@@ -380,12 +380,12 @@ def generate_initdotsh(package, specs, architecture, post_build=False):
380
380
  # First, output a sensible error message if types are wrong.
381
381
  for key in ("env", "append_path", "prepend_path"):
382
382
  dieOnError(not isinstance(spec.get(key, {}), dict),
383
- "Tag `%s' in %s should be a dict." % (key, package))
383
+ f"Tag `{key}' in {package} should be a dict.")
384
384
 
385
385
  # Set "env" variables.
386
386
  # We only put the values in double-quotes, so that they can refer to other
387
387
  # shell variables or do command substitution (e.g. $(brew --prefix ...)).
388
- lines.extend('export {}="{}"'.format(key, value)
388
+ lines.extend(f'export {key}="{value}"'
389
389
  for key, value in spec.get("env", {}).items()
390
390
  if key != "DYLD_LIBRARY_PATH")
391
391
 
@@ -402,7 +402,7 @@ def generate_initdotsh(package, specs, architecture, post_build=False):
402
402
  # By default we add the .../bin directory to PATH and .../lib to LD_LIBRARY_PATH.
403
403
  # Prepend to these paths, so that our packages win against system ones.
404
404
  for key, value in (("PATH", "bin"), ("LD_LIBRARY_PATH", "lib")):
405
- prepend_path.setdefault(key, []).insert(0, "${}_ROOT/{}".format(bigpackage, value))
405
+ prepend_path.setdefault(key, []).insert(0, f"${bigpackage}_ROOT/{value}")
406
406
  lines.extend('export {key}="{value}${{{key}+:${key}}}"'
407
407
  .format(key=key, value=":".join(value))
408
408
  for key, value in prepend_path.items()
@@ -482,12 +482,17 @@ def doBuild(args, parser):
482
482
 
483
483
  makedirs(join(workDir, "SPECS"), exist_ok=True)
484
484
 
485
- # If the alidist workdir contains a .sl directory, we use Sapling as SCM.
486
- # Otherwise, we default to git (without checking for the actual presence of
487
- # .git). We mustn't check for a .git directory, because some tests use a
488
- # subdirectory of the alibuild source tree as the "alidist" checkout, and
489
- # that won't have a .git directory.
490
- scm = exists("%s/.sl" % args.configDir) and Sapling() or Git()
485
+ # If the alidist workdir contains a .sl directory (or .git/sl for git repos
486
+ # with Sapling enabled), we use Sapling as SCM. Otherwise, we default to git
487
+ # (without checking for the actual presence of .git). We mustn't check for a
488
+ # .git directory, because some tests use a subdirectory of the alibuild source
489
+ # tree as the "alidist" checkout, and that won't have a .git directory.
490
+ config_path = Path(args.configDir)
491
+ has_sapling = (config_path / ".sl").exists() or (config_path / ".git" / "sl").exists()
492
+ if has_sapling and shutil.which("sl"):
493
+ scm = Sapling()
494
+ else:
495
+ scm = Git()
491
496
  try:
492
497
  checkedOutCommitName = scm.checkedOutCommitName(directory=args.configDir)
493
498
  except SCMError:
@@ -563,13 +568,13 @@ def doBuild(args, parser):
563
568
  del develCandidates, develCandidatesUpper, develPkgsUpper
564
569
 
565
570
  if buildOrder:
566
- if args.onlyDeps:
571
+ if args.onlyDeps:
567
572
  builtPackages = buildOrder[:-1]
568
573
  else:
569
574
  builtPackages = buildOrder
570
575
  if len(builtPackages) > 1:
571
576
  banner("Packages will be built in the following order:\n - %s",
572
- "\n - ".join(x+" (development package)" if x in develPkgs else "%s@%s" % (x, specs[x]["tag"])
577
+ "\n - ".join(x+" (development package)" if x in develPkgs else "{}@{}".format(x, specs[x]["tag"])
573
578
  for x in builtPackages if x != "defaults-release"))
574
579
  else:
575
580
  banner("No dependencies of package %s to build.", buildOrder[-1])
@@ -587,11 +592,18 @@ def doBuild(args, parser):
587
592
 
588
593
  for pkg, spec in specs.items():
589
594
  spec["is_devel_pkg"] = pkg in develPkgs
590
- spec["scm"] = Git()
591
595
  if spec["is_devel_pkg"]:
592
- spec["source"] = os.path.join(os.getcwd(), pkg)
593
- if "source" in spec and exists(os.path.join(spec["source"], ".sl")):
594
- spec["scm"] = Sapling()
596
+ spec["source"] = str(Path.cwd() / pkg)
597
+
598
+ # Only initialize Sapling if it's in PATH and the repo uses it
599
+ use_sapling = False
600
+ if "source" in spec:
601
+ source_path = Path(spec["source"])
602
+ has_sapling = ( (source_path / ".sl").exists() or (source_path / ".git" / "sl").exists() )
603
+ if has_sapling and shutil.which("sl"):
604
+ use_sapling = True
605
+ spec["scm"] = Sapling() if use_sapling else Git()
606
+
595
607
  reference_repo = join(os.path.abspath(args.referenceSources), pkg.lower())
596
608
  if exists(reference_repo):
597
609
  spec["reference"] = reference_repo
@@ -817,7 +829,7 @@ def doBuild(args, parser):
817
829
  develPrefix = possibleDevelPrefix
818
830
 
819
831
  if possibleDevelPrefix:
820
- spec["build_family"] = "%s-%s" % (possibleDevelPrefix, args.defaults)
832
+ spec["build_family"] = f"{possibleDevelPrefix}-{args.defaults}"
821
833
  else:
822
834
  spec["build_family"] = args.defaults
823
835
  if spec["package"] == mainPackage:
@@ -937,7 +949,7 @@ def doBuild(args, parser):
937
949
 
938
950
  # Now that we have all the information about the package we want to build, let's
939
951
  # check if it wasn't built / unpacked already.
940
- hashPath= "%s/%s/%s/%s-%s" % (workDir,
952
+ hashPath= "{}/{}/{}/{}-{}".format(workDir,
941
953
  args.architecture,
942
954
  spec["package"],
943
955
  spec["version"],
@@ -980,12 +992,12 @@ def doBuild(args, parser):
980
992
  unlink(join(buildWorkDir, "BUILD", spec["package"] + "-latest"))
981
993
  if "develPrefix" in args:
982
994
  unlink(join(buildWorkDir, "BUILD", spec["package"] + "-latest-" + args.develPrefix))
983
- except:
995
+ except Exception:
984
996
  pass
985
997
  try:
986
998
  rmdir(join(buildWorkDir, "BUILD"))
987
999
  rmdir(join(workDir, "INSTALLROOT"))
988
- except:
1000
+ except Exception:
989
1001
  pass
990
1002
  continue
991
1003
 
@@ -1008,15 +1020,10 @@ def doBuild(args, parser):
1008
1020
 
1009
1021
  # The actual build script.
1010
1022
  debug("spec = %r", spec)
1011
-
1012
- cmd_raw = ""
1013
- try:
1014
- fp = open(dirname(realpath(__file__))+'/build_template.sh', 'r')
1015
- cmd_raw = fp.read()
1016
- fp.close()
1017
- except:
1018
- from pkg_resources import resource_string
1019
- cmd_raw = resource_string("alibuild_helpers", 'build_template.sh')
1023
+
1024
+ fp = open(dirname(realpath(__file__))+'/build_template.sh')
1025
+ cmd_raw = fp.read()
1026
+ fp.close()
1020
1027
 
1021
1028
  if args.docker:
1022
1029
  cachedTarball = re.sub("^" + workDir, "/sw", spec["cachedTarball"])
@@ -1030,7 +1037,7 @@ def doBuild(args, parser):
1030
1037
  spec["version"] + "-" + spec["revision"])
1031
1038
 
1032
1039
  makedirs(scriptDir, exist_ok=True)
1033
- writeAll("%s/%s.sh" % (scriptDir, spec["package"]), spec["recipe"])
1040
+ writeAll("{}/{}.sh".format(scriptDir, spec["package"]), spec["recipe"])
1034
1041
  writeAll("%s/build.sh" % scriptDir, cmd_raw % {
1035
1042
  "provenance": create_provenance_info(spec["package"], specs, args),
1036
1043
  "initdotsh_deps": generate_initdotsh(p, specs, args.architecture, post_build=False),
@@ -1093,7 +1100,7 @@ def doBuild(args, parser):
1093
1100
  scriptDir=quote(scriptDir),
1094
1101
  extraArgs=" ".join(map(quote, args.docker_extra_args)),
1095
1102
  additionalEnv=" ".join(
1096
- "-e {}={}".format(var, quote(value)) for var, value in buildEnvironment),
1103
+ f"-e {var}={quote(value)}" for var, value in buildEnvironment),
1097
1104
  # Used e.g. by O2DPG-sim-tests to find the O2DPG repository.
1098
1105
  develVolumes=" ".join(
1099
1106
  '-v "$PWD/$(readlink {pkg} || echo {pkg})":/{pkg}:rw'.format(pkg=quote(spec["package"]))
@@ -1105,12 +1112,14 @@ def doBuild(args, parser):
1105
1112
  )
1106
1113
  else:
1107
1114
  os.environ.update(buildEnvironment)
1108
- build_command = "%s -e -x %s/build.sh 2>&1" % (BASH, quote(scriptDir))
1115
+ build_command = f"{BASH} -e -x {quote(scriptDir)}/build.sh 2>&1"
1109
1116
 
1110
1117
  debug("Build command: %s", build_command)
1118
+ progress_msg = "Unpacking %s@%s" if cachedTarball else "Compiling %s@%s"
1119
+ if not cachedTarball and not args.debug:
1120
+ progress_msg += " (use --debug for full output)"
1111
1121
  progress = ProgressPrint(
1112
- ("Unpacking %s@%s" if cachedTarball else
1113
- "Compiling %s@%s (use --debug for full output)") %
1122
+ progress_msg %
1114
1123
  (spec["package"],
1115
1124
  args.develPrefix if "develPrefix" in args and spec["is_devel_pkg"] else spec["version"])
1116
1125
  )
@@ -1127,51 +1136,100 @@ def doBuild(args, parser):
1127
1136
  if spec["is_devel_pkg"]:
1128
1137
  updatablePkgs.append(spec["package"])
1129
1138
 
1130
- buildErrMsg = dedent("""\
1131
- Error while executing {sd}/build.sh on `{h}'.
1132
- Log can be found in {w}/BUILD/{p}-latest{devSuffix}/log
1133
- Please upload it to CERNBox/Dropbox if you intend to request support.
1134
- Build directory is {w}/BUILD/{p}-latest{devSuffix}/{p}.
1135
- """).format(
1136
- h=socket.gethostname(),
1137
- sd=scriptDir,
1138
- w=buildWorkDir,
1139
- p=spec["package"],
1140
- devSuffix="-" + args.develPrefix
1141
- if "develPrefix" in args and spec["is_devel_pkg"]
1142
- else "",
1143
- )
1144
- if updatablePkgs:
1145
- buildErrMsg += dedent("""
1146
- Note that you have packages in development mode.
1147
- Devel sources are not updated automatically, you must do it by hand.\n
1148
- This problem might be due to one or more outdated devel sources.
1149
- To update all development packages required for this build it is usually sufficient to do:
1150
- """)
1151
- buildErrMsg += "".join("\n ( cd %s && git pull --rebase )" % dp for dp in updatablePkgs)
1139
+ # Determine paths
1140
+ devSuffix = "-" + args.develPrefix if "develPrefix" in args and spec["is_devel_pkg"] else ""
1141
+ log_path = f"{buildWorkDir}/BUILD/{spec['package']}-latest{devSuffix}/log"
1142
+ build_dir = f"{buildWorkDir}/BUILD/{spec['package']}-latest{devSuffix}/{spec['package']}"
1143
+
1144
+ # Use relative paths if we're inside the work directory
1145
+ try:
1146
+ from os.path import relpath
1147
+ log_path = relpath(log_path, os.getcwd())
1148
+ build_dir = relpath(build_dir, os.getcwd())
1149
+ except (ValueError, OSError):
1150
+ pass # Keep absolute paths if relpath fails
1151
+
1152
+ # Color codes for error message (if TTY)
1153
+ bold = "\033[1m" if sys.stderr.isatty() else ""
1154
+ red = "\033[31m" if sys.stderr.isatty() else ""
1155
+ reset = "\033[0m" if sys.stderr.isatty() else ""
1156
+
1157
+ # Build the error message
1158
+ devel_note = " (development package)" if spec["is_devel_pkg"] else ""
1159
+ buildErrMsg = f"{red}{bold}BUILD FAILED:{reset} {spec['package']}@{spec['version']}{devel_note}\n"
1160
+ buildErrMsg += "=" * 70 + "\n\n"
1161
+
1162
+ buildErrMsg += f"{bold}Log File:{reset}\n"
1163
+ buildErrMsg += f" {log_path}\n\n"
1164
+
1165
+ buildErrMsg += f"{bold}Build Directory:{reset}\n"
1166
+ buildErrMsg += f" {build_dir}\n"
1152
1167
 
1153
1168
  # Gather build info for the error message
1154
1169
  try:
1170
+ detected_arch = detectArch()
1171
+
1172
+ # Only show safe arguments (no tokens/secrets) in CLI-usable format
1155
1173
  safe_args = {
1156
1174
  "pkgname", "defaults", "architecture", "forceUnknownArch",
1157
1175
  "develPrefix", "jobs", "noSystem", "noDevel", "forceTracked", "plugin",
1158
1176
  "disable", "annotate", "onlyDeps", "docker"
1159
- }
1160
- args_str = " ".join(f"--{k}={v}" for k, v in vars(args).items() if v and k in safe_args)
1161
- detected_arch = detectArch()
1162
- buildErrMsg += dedent(f"""
1163
- Build info:
1164
- OS: {detected_arch}
1165
- Using aliBuild from alibuild@{__version__ or "unknown"} recipes in alidist@{os.environ["ALIBUILD_ALIDIST_HASH"][:10]}
1166
- Build arguments: {args_str}
1167
- """)
1177
+ }
1178
+
1179
+ cli_args = []
1180
+ for k, v in vars(args).items():
1181
+ if not v or k not in safe_args:
1182
+ continue
1183
+
1184
+ # Format based on type for CLI usage
1185
+ if isinstance(v, bool):
1186
+ if v: # Only show if True
1187
+ cli_args.append(f"--{k}")
1188
+ elif isinstance(v, list):
1189
+ if v: # Only show non-empty lists
1190
+ # For lists, use multiple --flag value or --flag=val1,val2
1191
+ for item in v:
1192
+ cli_args.append(f"--{k}={quote(str(item))}")
1193
+ else:
1194
+ # Quote if needed
1195
+ cli_args.append(f"--{k}={quote(str(v))}")
1196
+
1197
+ args_str = " ".join(cli_args)
1198
+
1199
+ buildErrMsg += f"\n{bold}Environment:{reset}\n"
1200
+ buildErrMsg += f" OS: {detected_arch}\n"
1201
+ buildErrMsg += f" aliBuild: {__version__ or 'unknown'} (alidist@{os.environ['ALIBUILD_ALIDIST_HASH'][:10]})\n"
1168
1202
 
1169
1203
  if detected_arch.startswith("osx"):
1170
- buildErrMsg += f'XCode version: {getstatusoutput("xcodebuild -version")[1]}'
1204
+ xcode_info = getstatusoutput("xcodebuild -version")[1]
1205
+ # Combine XCode version lines into one
1206
+ xcode_lines = xcode_info.strip().split('\n')
1207
+ if len(xcode_lines) >= 2:
1208
+ xcode_str = f"{xcode_lines[0]} ({xcode_lines[1]})"
1209
+ else:
1210
+ xcode_str = xcode_lines[0] if xcode_lines else "Unknown"
1211
+ buildErrMsg += f" XCode: {xcode_str}\n"
1212
+
1213
+ buildErrMsg += f" Arguments: {args_str}\n"
1171
1214
 
1172
1215
  except Exception as exc:
1173
1216
  warning("Failed to gather build info", exc_info=exc)
1174
1217
 
1218
+ # Add note about development packages if applicable
1219
+ if updatablePkgs:
1220
+ buildErrMsg += f"\n{bold}Development Packages:{reset}\n"
1221
+ buildErrMsg += " Development sources are not updated automatically.\n"
1222
+ buildErrMsg += " This may be due to outdated sources. To update:\n"
1223
+ buildErrMsg += "".join(f"\n ( cd {dp} && git pull --rebase )" for dp in updatablePkgs)
1224
+ buildErrMsg += "\n"
1225
+
1226
+ # Add Next Steps section
1227
+ buildErrMsg += f"\n{bold}Next Steps:{reset}\n"
1228
+ buildErrMsg += f" • View error log: cat {log_path}\n"
1229
+ if not args.debug:
1230
+ buildErrMsg += f" • Rebuild with debug: aliBuild build {spec['package']} --debug\n"
1231
+ buildErrMsg += f" • Please upload the full log to CERNBox/Dropbox if you intend to request support.\n"
1232
+
1175
1233
 
1176
1234
  dieOnError(err, buildErrMsg.strip())
1177
1235
 
@@ -71,6 +71,16 @@ export BUILDROOT="$ALIBUILD_BUILD_WORK_DIR/BUILD/$PKGHASH"
71
71
  export SOURCEDIR="$WORK_DIR/SOURCES/$PKGNAME/$PKGVERSION/$COMMIT_HASH"
72
72
  export BUILDDIR="$BUILDROOT/$PKGNAME"
73
73
 
74
+ # All caching for RECC should happen relative to $WORK_DIR
75
+ export RECC_PROJECT_ROOT=$WORK_DIR
76
+ export RECC_WORKING_DIR_PREFIX=$WORK_DIR
77
+ # Moreover we allow caching stuff across different builds of the same
78
+ # package, but not across packages.
79
+ export RECC_PREFIX_MAP=$BUILDDIR=/recc/BUILDDIR-$PKGNAME:$INSTALLROOT=/recc/INSTALLROOT-$PKGNAME:$SOURCEDIR=/recc/SOURCEDIR-$PKGNAME
80
+ #export RECC_PREFIX_MAP=$RECC_PREFIX_MAP:$(readlink $BUILDDIR)=/recc/BUILDDIR-$PKGNAME:$(readlink $INSTALLROOT)=/recc/INSTALLROOT-$PKGNAME:$(readlink $SOURCEDIR)=/recc/SOURCEDIR-$PKGNAME
81
+ # No point in mixing packages
82
+ export RECC_ACTION_SALT="$PKGNAME"
83
+
74
84
  rm -fr "$WORK_DIR/INSTALLROOT/$PKGHASH"
75
85
  # We remove the build directory only if we are not in incremental mode.
76
86
  if [[ "$INCREMENTAL_BUILD_HASH" == 0 ]] && ! rm -rf "$BUILDROOT"; then
alibuild_helpers/clean.py CHANGED
@@ -43,13 +43,13 @@ def decideClean(workDir, architecture, aggressiveCleanup):
43
43
  # we do not need the actual tarballs after they have been built.
44
44
  toDelete = ["%s/TMP" % workDir, "%s/INSTALLROOT" % workDir]
45
45
  if aggressiveCleanup:
46
- toDelete += ["%s/TARS/%s/store" % (workDir, architecture),
46
+ toDelete += [f"{workDir}/TARS/{architecture}/store",
47
47
  "%s/SOURCES" % (workDir)]
48
48
  allBuildStuff = glob.glob("%s/BUILD/*" % workDir)
49
49
  toDelete += [x for x in allBuildStuff
50
50
  if not path.islink(x) and basename(x) not in symlinksBuild]
51
- installGlob ="%s/%s/*/" % (workDir, architecture)
52
- installedPackages = set([dirname(x) for x in glob.glob(installGlob)])
51
+ installGlob =f"{workDir}/{architecture}/*/"
52
+ installedPackages = {dirname(x) for x in glob.glob(installGlob)}
53
53
  symlinksInstall = []
54
54
  for x in installedPackages:
55
55
  symlinksInstall += [path.realpath(y) for y in glob.glob(x + "/latest*")]
alibuild_helpers/cmd.py CHANGED
@@ -6,7 +6,7 @@ from textwrap import dedent
6
6
  from subprocess import TimeoutExpired
7
7
  from shlex import quote
8
8
 
9
- from alibuild_helpers.log import debug, warning, dieOnError
9
+ from alibuild_helpers.log import debug, error, dieOnError
10
10
 
11
11
  def decode_with_fallback(data):
12
12
  """Try to decode DATA as utf-8; if that doesn't work, fall back to latin-1.
@@ -29,7 +29,7 @@ def getoutput(command, timeout=None):
29
29
  try:
30
30
  stdout, stderr = proc.communicate(timeout=timeout)
31
31
  except TimeoutExpired:
32
- warning("Process %r timed out; terminated", command)
32
+ error("Process %r timed out; terminated", command)
33
33
  proc.terminate()
34
34
  stdout, stderr = proc.communicate()
35
35
  dieOnError(proc.returncode, "Command %s failed with code %d: %s" %
@@ -43,7 +43,7 @@ def getstatusoutput(command, timeout=None, cwd=None):
43
43
  try:
44
44
  merged_output, _ = proc.communicate(timeout=timeout)
45
45
  except TimeoutExpired:
46
- warning("Process %r timed out; terminated", command)
46
+ error("Process %r timed out; terminated", command)
47
47
  proc.terminate()
48
48
  merged_output, _ = proc.communicate()
49
49
  merged_output = decode_with_fallback(merged_output)
@@ -88,14 +88,8 @@ class DockerRunner:
88
88
  def __enter__(self):
89
89
  if self._docker_image:
90
90
  # "sleep inf" pauses forever, until we kill it.
91
- envOpts = []
92
- volumes = []
93
- for env in self._extra_env.items():
94
- envOpts.append("-e")
95
- envOpts.append(f"{env[0]}={env[1]}")
96
- for v in self._extra_volumes:
97
- volumes.append("-v")
98
- volumes.append(v)
91
+ envOpts = [opt for k, v in self._extra_env.items() for opt in ("-e", f"{k}={v}")]
92
+ volumes = [opt for v in self._extra_volumes for opt in ("-v", v)]
99
93
  cmd = ["docker", "run", "--detach"] + envOpts + volumes + ["--rm", "--entrypoint="]
100
94
  cmd += self._docker_run_args
101
95
  cmd += [self._docker_image, "sleep", "inf"]
@@ -105,13 +99,13 @@ class DockerRunner:
105
99
  if self._container is None:
106
100
  command_prefix=""
107
101
  if self._extra_env:
108
- command_prefix="env " + " ".join("{}={}".format(k, quote(v)) for (k,v) in self._extra_env.items()) + " "
109
- return getstatusoutput("{}{} -c {}".format(command_prefix, BASH, quote(cmd))
102
+ command_prefix="env " + " ".join(f"{k}={quote(v)}" for (k,v) in self._extra_env.items()) + " "
103
+ return getstatusoutput(f"{command_prefix}{BASH} -c {quote(cmd)}"
110
104
  , cwd=cwd)
111
105
  envOpts = []
112
106
  for env in self._extra_env.items():
113
107
  envOpts.append("-e")
114
- envOpts.append("{}={}".format(env[0], env[1]))
108
+ envOpts.append(f"{env[0]}={env[1]}")
115
109
  exec_cmd = ["docker", "container", "exec"] + envOpts + [self._container, "bash", "-c", cmd]
116
110
  return getstatusoutput(exec_cmd, cwd=cwd)
117
111
 
alibuild_helpers/deps.py CHANGED
@@ -83,18 +83,18 @@ def doDeps(args, parser):
83
83
  assert color, "This should not happen (happened for %s)" % k
84
84
 
85
85
  # Node definition
86
- dot += '"%s" [shape=box, style="rounded,filled", fontname="helvetica", fillcolor=%s]\n' % (k,color)
86
+ dot += f'"{k}" [shape=box, style="rounded,filled", fontname="helvetica", fillcolor={color}]\n'
87
87
 
88
88
  # Connections (different whether it's a build dependency or a runtime one)
89
89
  for dep in spec["build_requires"]:
90
- dot += '"%s" -> "%s" [color=grey70]\n' % (k, dep)
90
+ dot += f'"{k}" -> "{dep}" [color=grey70]\n'
91
91
  for dep in spec["runtime_requires"]:
92
- dot += '"%s" -> "%s" [color=dodgerblue3]\n' % (k, dep)
92
+ dot += f'"{k}" -> "{dep}" [color=dodgerblue3]\n'
93
93
 
94
94
  dot += "}\n"
95
95
 
96
96
  if args.outdot:
97
- fp = open(args.outdot, "wt")
97
+ fp = open(args.outdot, "w")
98
98
  else:
99
99
  fp = NamedTemporaryFile(delete=False, mode="wt")
100
100
  fp.write(dot)
@@ -33,7 +33,7 @@ def checkPreferSystem(spec, cmd, homebrew_replacement, getstatusoutput_docker):
33
33
  warning("Package %s cannot be picked up from the system and will be built by aliBuild.\n"
34
34
  "This is due to the fact the following script fails:\n\n%s\n\n"
35
35
  "with the following output:\n\n%s\n",
36
- spec["package"], cmd, "\n".join("%s: %s" % (spec["package"], x) for x in out.split("\n")))
36
+ spec["package"], cmd, "\n".join("{}: {}".format(spec["package"], x) for x in out.split("\n")))
37
37
  return (err, "")
38
38
 
39
39
  def checkRequirements(spec, cmd, homebrew_replacement, getstatusoutput_docker):
@@ -53,7 +53,7 @@ def checkRequirements(spec, cmd, homebrew_replacement, getstatusoutput_docker):
53
53
  "This is due to the fact that the following script fails:\n\n%s\n"
54
54
  "with the following output:\n\n%s\n%s\n",
55
55
  spec["package"], cmd,
56
- "\n".join("%s: %s" % (spec["package"], x) for x in out.split("\n")),
56
+ "\n".join("{}: {}".format(spec["package"], x) for x in out.split("\n")),
57
57
  spec.get("system_requirement_missing"))
58
58
  return (err, "")
59
59
 
@@ -125,7 +125,7 @@ def doDoctor(args, parser):
125
125
  packages = []
126
126
  exitcode = 0
127
127
  for p in args.packages:
128
- path = "%s/%s.sh" % (args.configDir, p.lower())
128
+ path = f"{args.configDir}/{p.lower()}.sh"
129
129
  if not exists(path):
130
130
  error("Cannot find recipe %s for package %s.", path, p)
131
131
  exitcode = 1
@@ -166,7 +166,7 @@ def doDoctor(args, parser):
166
166
  taps = taps,
167
167
  log = info)
168
168
 
169
- alwaysBuilt = set(x for x in specs) - fromSystem - own - failed
169
+ alwaysBuilt = {x for x in specs} - fromSystem - own - failed
170
170
  if alwaysBuilt:
171
171
  banner("The following packages will be built by aliBuild because\n"
172
172
  " usage of a system version of it is not allowed or supported, by policy:\n\n- %s",
alibuild_helpers/log.py CHANGED
@@ -105,7 +105,7 @@ class ProgressPrint:
105
105
  return
106
106
  self.erase()
107
107
  if msg:
108
- sys.stderr.write(": %s%s\033[m" % ("\033[31m" if error else "\033[32m", msg))
108
+ sys.stderr.write(": {}{}\033[m".format("\033[31m" if error else "\033[32m", msg))
109
109
  sys.stderr.write("\n")
110
110
  sys.stderr.flush()
111
111
 
alibuild_helpers/scm.py CHANGED
@@ -2,7 +2,7 @@ class SCMError(Exception):
2
2
  """Signal that an SCM-related error occurred."""
3
3
 
4
4
 
5
- class SCM(object):
5
+ class SCM:
6
6
  def checkedOutCommitName(self, directory):
7
7
  raise NotImplementedError
8
8
  def branchOrRef(self, directory):
alibuild_helpers/sl.py CHANGED
@@ -28,7 +28,7 @@ class Sapling(SCM):
28
28
  directory=quote(directory),
29
29
  ), timeout=SL_COMMAND_TIMEOUT_SEC)
30
30
  if err > 1:
31
- raise SCMError("Error {} from sl bookmark -r . : {}".format(err, output))
31
+ raise SCMError(f"Error {err} from sl bookmark -r . : {output}")
32
32
  # We use "none" to indicate there are no bookmarks. This means
33
33
  # that a devel package will act as a single branch, regardless of where we are.
34
34
  if not output.strip():
alibuild_helpers/sync.py CHANGED
@@ -114,7 +114,7 @@ class HttpRemoteSync:
114
114
  s3Request = re.match("https://s3.cern.ch/swift/v1[/]+([^/]*)/(.*)$", url)
115
115
  if s3Request:
116
116
  [bucket, prefix] = s3Request.groups()
117
- url = "https://s3.cern.ch/swift/v1/%s/?prefix=%s" % (bucket, prefix.lstrip("/"))
117
+ url = "https://s3.cern.ch/swift/v1/{}/?prefix={}".format(bucket, prefix.lstrip("/"))
118
118
  resp = get(url, verify=not self.insecure, timeout=self.httpTimeoutSec)
119
119
  if resp.status_code == 404:
120
120
  # No need to retry any further
@@ -136,7 +136,7 @@ class HttpRemoteSync:
136
136
  if dest:
137
137
  try:
138
138
  os.unlink(dest+".tmp")
139
- except:
139
+ except Exception:
140
140
  pass
141
141
  return None
142
142
 
@@ -165,7 +165,7 @@ class HttpRemoteSync:
165
165
  # Find the first tarball that matches any possible hash and fetch it.
166
166
  for pkg_hash in spec["remote_hashes"]:
167
167
  store_path = resolve_store_path(self.architecture, pkg_hash)
168
- tarballs = self.getRetry("%s/%s/" % (self.remoteStore, store_path),
168
+ tarballs = self.getRetry(f"{self.remoteStore}/{store_path}/",
169
169
  session=session)
170
170
  if tarballs:
171
171
  use_tarball = tarballs[0]["name"]
@@ -203,7 +203,7 @@ class HttpRemoteSync:
203
203
  with requests.Session() as session:
204
204
  # Fetch manifest file with initial symlinks. This file is updated
205
205
  # regularly; we use it to avoid many small network requests.
206
- manifest = self.getRetry("%s/%s.manifest" % (self.remoteStore, links_path),
206
+ manifest = self.getRetry(f"{self.remoteStore}/{links_path}.manifest",
207
207
  returnResult=True, session=session)
208
208
  symlinks = {
209
209
  linkname.decode("utf-8"): target.decode("utf-8")
@@ -214,7 +214,7 @@ class HttpRemoteSync:
214
214
  # Now add any remaining symlinks that aren't in the manifest yet. There
215
215
  # should always be relatively few of these, as the separate network
216
216
  # requests are a bit expensive.
217
- for link in self.getRetry("%s/%s/" % (self.remoteStore, links_path),
217
+ for link in self.getRetry(f"{self.remoteStore}/{links_path}/",
218
218
  session=session):
219
219
  linkname = link["name"]
220
220
  if linkname in symlinks:
@@ -502,12 +502,23 @@ class Boto3RemoteSync:
502
502
  # have to install it in the first place.
503
503
  try:
504
504
  import boto3
505
+ from botocore.config import Config
505
506
  except ImportError:
506
507
  error("boto3 must be installed to use %s", Boto3RemoteSync)
507
508
  sys.exit(1)
508
509
 
509
510
  try:
510
- self.s3 = boto3.client("s3", endpoint_url="https://s3.cern.ch",
511
+ try:
512
+ config = Config(
513
+ request_checksum_calculation='WHEN_REQUIRED',
514
+ response_checksum_validation='WHEN_REQUIRED',
515
+ )
516
+ except TypeError:
517
+ # Older boto3 versions don't support these parameters (<1.36.0)
518
+ config = None
519
+ self.s3 = boto3.client("s3",
520
+ **({"config": config} if config else {}),
521
+ endpoint_url="https://s3.cern.ch",
511
522
  aws_access_key_id=os.environ["AWS_ACCESS_KEY_ID"],
512
523
  aws_secret_access_key=os.environ["AWS_SECRET_ACCESS_KEY"])
513
524
  except KeyError:
@@ -261,7 +261,7 @@ def detectArch():
261
261
  with open("/etc/os-release") as osr:
262
262
  osReleaseLines = osr.readlines()
263
263
  hasOsRelease = True
264
- except (IOError,OSError):
264
+ except OSError:
265
265
  osReleaseLines = []
266
266
  hasOsRelease = False
267
267
  try:
@@ -270,7 +270,7 @@ def detectArch():
270
270
  return "osx_x86-64"
271
271
  else:
272
272
  return "osx_arm64"
273
- except:
273
+ except Exception:
274
274
  pass
275
275
  try:
276
276
  import distro
@@ -280,7 +280,7 @@ def detectArch():
280
280
  if not platformProcessor or " " in platformProcessor:
281
281
  platformProcessor = platform.machine()
282
282
  return doDetectArch(hasOsRelease, osReleaseLines, platformTuple, platformSystem, platformProcessor)
283
- except:
283
+ except Exception:
284
284
  return doDetectArch(hasOsRelease, osReleaseLines, ["unknown", "", ""], "", "")
285
285
 
286
286
  def filterByArchitectureDefaults(arch, defaults, requires):
@@ -314,7 +314,7 @@ def readDefaults(configDir, defaults, error, architecture):
314
314
  if err:
315
315
  error(err)
316
316
  sys.exit(1)
317
- archDefaults = "%s/defaults-%s.sh" % (configDir, architecture)
317
+ archDefaults = f"{configDir}/defaults-{architecture}.sh"
318
318
  archMeta = {}
319
319
  archBody = ""
320
320
  if exists(archDefaults):
@@ -336,20 +336,20 @@ def getRecipeReader(url:str , dist=None):
336
336
  return FileReader(url)
337
337
 
338
338
  # Read a recipe from a file
339
- class FileReader(object):
339
+ class FileReader:
340
340
  def __init__(self, url) -> None:
341
341
  self.url = url
342
342
  def __call__(self):
343
343
  return open(self.url).read()
344
344
 
345
345
  # Read a recipe from a git repository using git show.
346
- class GitReader(object):
346
+ class GitReader:
347
347
  def __init__(self, url, configDir) -> None:
348
348
  self.url, self.configDir = url, configDir
349
349
  def __call__(self):
350
350
  m = re.search(r'^dist:(.*)@([^@]+)$', self.url)
351
351
  fn, gh = m.groups()
352
- err, d = git(("show", "{gh}:{fn}.sh".format(gh=gh, fn=fn.lower())),
352
+ err, d = git(("show", f"{gh}:{fn.lower()}.sh"),
353
353
  directory=self.configDir)
354
354
  if err:
355
355
  raise RuntimeError("Cannot read recipe {fn} from reference {gh}.\n"
@@ -377,7 +377,7 @@ def yamlDump(s):
377
377
  k = dumper.represent_data(k)
378
378
  v = dumper.represent_data(v)
379
379
  rep.append((k, v))
380
- return yaml.nodes.MappingNode(u'tag:yaml.org,2002:map', rep)
380
+ return yaml.nodes.MappingNode('tag:yaml.org,2002:map', rep)
381
381
  YamlOrderedDumper.add_representer(OrderedDict, represent_ordereddict)
382
382
  return yaml.dump(s, Dumper=YamlOrderedDumper)
383
383
 
@@ -391,14 +391,14 @@ def parseRecipe(reader):
391
391
  validateSpec(spec)
392
392
  except RuntimeError as e:
393
393
  err = str(e)
394
- except IOError as e:
394
+ except OSError as e:
395
395
  err = str(e)
396
396
  except SpecError as e:
397
- err = "Malformed header for %s\n%s" % (reader.url, str(e))
397
+ err = f"Malformed header for {reader.url}\n{str(e)}"
398
398
  except yaml.scanner.ScannerError as e:
399
- err = "Unable to parse %s\n%s" % (reader.url, str(e))
399
+ err = f"Unable to parse {reader.url}\n{str(e)}"
400
400
  except yaml.parser.ParserError as e:
401
- err = "Unable to parse %s\n%s" % (reader.url, str(e))
401
+ err = f"Unable to parse {reader.url}\n{str(e)}"
402
402
  except ValueError:
403
403
  err = "Unable to parse %s. Header missing." % reader.url
404
404
  return err, spec, recipe
@@ -476,7 +476,7 @@ def getPackageList(packages, specs, configDir, preferSystem, noSystem,
476
476
 
477
477
  filename, pkgdir = resolveFilename(taps, pkg_filename, configDir)
478
478
 
479
- dieOnError(not filename, "Package %s not found in %s" % (p, configDir))
479
+ dieOnError(not filename, f"Package {p} not found in {configDir}")
480
480
  assert(filename is not None)
481
481
 
482
482
  err, spec, recipe = parseRecipe(getRecipeReader(filename, configDir))
@@ -486,7 +486,7 @@ def getPackageList(packages, specs, configDir, preferSystem, noSystem,
486
486
  assert(spec is not None)
487
487
  assert(recipe is not None)
488
488
  dieOnError(spec["package"].lower() != pkg_filename,
489
- "%s.sh has different package field: %s" % (p, spec["package"]))
489
+ "{}.sh has different package field: {}".format(p, spec["package"]))
490
490
  spec["pkgdir"] = pkgdir
491
491
 
492
492
  if p == "defaults-release":
@@ -502,7 +502,7 @@ def getPackageList(packages, specs, configDir, preferSystem, noSystem,
502
502
  recipe = ""
503
503
 
504
504
  dieOnError(spec["package"] != p,
505
- "%s should be spelt %s." % (p, spec["package"]))
505
+ "{} should be spelt {}.".format(p, spec["package"]))
506
506
 
507
507
  # If an override fully matches a package, we apply it. This means
508
508
  # you can have multiple overrides being applied for a given package.
@@ -522,7 +522,7 @@ def getPackageList(packages, specs, configDir, preferSystem, noSystem,
522
522
  try:
523
523
  systemREMatches = re.match(systemRE, architecture)
524
524
  except TypeError:
525
- dieOnError(True, "Malformed entry prefer_system: %s in %s" % (systemRE, spec["package"]))
525
+ dieOnError(True, "Malformed entry prefer_system: {} in {}".format(systemRE, spec["package"]))
526
526
 
527
527
  noSystemList = []
528
528
  if noSystem == "*":
@@ -536,7 +536,7 @@ def getPackageList(packages, specs, configDir, preferSystem, noSystem,
536
536
  key = spec["package"] + env
537
537
  if key not in trackingEnvCache:
538
538
  status, out = performPreferCheck(spec, trackingCode)
539
- dieOnError(status, "Error while executing track_env for {}: {} => {}".format(key, trackingCode, out))
539
+ dieOnError(status, f"Error while executing track_env for {key}: {trackingCode} => {out}")
540
540
  trackingEnvCache[key] = out
541
541
  spec["track_env"][env] = trackingEnvCache[key]
542
542
 
@@ -7,7 +7,7 @@ import tempfile
7
7
  from collections import OrderedDict
8
8
 
9
9
  from alibuild_helpers.log import dieOnError, debug, error
10
- from alibuild_helpers.utilities import call_ignoring_oserrors, symlink, short_commit_hash
10
+ from alibuild_helpers.utilities import call_ignoring_oserrors, symlink, short_commit_hash, asList
11
11
 
12
12
  FETCH_LOG_NAME = "fetch-log.txt"
13
13
 
@@ -113,7 +113,8 @@ def updateReferenceRepo(referenceSources, p, spec,
113
113
  cmd = scm.cloneReferenceCmd(spec["source"], referenceRepo, usePartialClone)
114
114
  logged_scm(scm, p, referenceSources, cmd, ".", allowGitPrompt)
115
115
  elif fetch:
116
- cmd = scm.fetchCmd(spec["source"], "+refs/tags/*:refs/tags/*", "+refs/heads/*:refs/heads/*")
116
+ ref_match_rule = asList(spec.get("ref_match_rule", ["+refs/tags/*:refs/tags/*", "+refs/heads/*:refs/heads/*"]))
117
+ cmd = scm.fetchCmd(spec["source"], *ref_match_rule)
117
118
  logged_scm(scm, p, referenceSources, cmd, referenceRepo, allowGitPrompt)
118
119
 
119
120
  return referenceRepo # reference is read-write
@@ -123,7 +124,7 @@ def is_writeable(dirpath):
123
124
  try:
124
125
  with tempfile.NamedTemporaryFile(dir=dirpath):
125
126
  return True
126
- except:
127
+ except Exception:
127
128
  return False
128
129
 
129
130
 
tests/test_args.py CHANGED
@@ -16,7 +16,7 @@ BUILD_MISSING_PKG_ERROR = "the following arguments are required: PACKAGE"
16
16
  ANALYTICS_MISSING_STATE_ERROR = "the following arguments are required: state"
17
17
 
18
18
  # A few errors we should handle, together with the expected result
19
- ARCHITECTURE_ERROR = u"Unknown / unsupported architecture: foo.\n\n.*"
19
+ ARCHITECTURE_ERROR = "Unknown / unsupported architecture: foo.\n\n.*"
20
20
  PARSER_ERRORS = {
21
21
  "build --force-unknown-architecture": BUILD_MISSING_PKG_ERROR,
22
22
  "build --force-unknown-architecture zlib --foo": 'unrecognized arguments: --foo',
tests/test_build.py CHANGED
@@ -167,7 +167,7 @@ def dummy_open(x, mode="r", encoding=None, errors=None):
167
167
  result = None
168
168
  TIMES_ASKED[x] = TIMES_ASKED.get(x, 0) + 1
169
169
  if not result:
170
- raise IOError
170
+ raise OSError
171
171
  return result
172
172
  return DEFAULT
173
173
 
@@ -191,17 +191,22 @@ def dummy_readlink(x):
191
191
 
192
192
 
193
193
  def dummy_exists(x):
194
- if x.endswith("alibuild_helpers/.git"):
194
+ # Convert Path objects to strings for comparison
195
+ path_str = str(x) if hasattr(x, '__fspath__') else x
196
+ if path_str.endswith("alibuild_helpers/.git"):
197
+ return False
198
+ # Return False for any sapling-related paths
199
+ if ".sl" in path_str or path_str.endswith("/sl"):
195
200
  return False
196
201
  return {
197
202
  "/alidist": True,
198
203
  "/alidist/.git": True,
199
- "/alidist/.sl": False,
200
204
  "/sw": True,
201
205
  "/sw/SPECS": False,
202
206
  "/sw/MIRROR/root": True,
207
+ "/sw/MIRROR/root/.git": True,
203
208
  "/sw/MIRROR/zlib": False,
204
- }.get(x, DEFAULT)
209
+ }.get(path_str, DEFAULT)
205
210
 
206
211
 
207
212
  # A few errors we should handle, together with the expected result
tests/test_parseRecipe.py CHANGED
@@ -50,13 +50,13 @@ found unexpected end of stream
50
50
 
51
51
  ^"""
52
52
 
53
- class Recoder(object):
53
+ class Recoder:
54
54
  def __init__(self) -> None:
55
55
  self.buffer = ""
56
56
  def __call__(self, s, *a) -> None:
57
57
  self.buffer += s % a
58
58
 
59
- class BufferReader(object):
59
+ class BufferReader:
60
60
  def __init__(self, filename, recipe) -> None:
61
61
  self.url = filename
62
62
  self.buffer = recipe
tests/test_utilities.py CHANGED
@@ -1,5 +1,3 @@
1
- # vim: set fileencoding=utf-8 :
2
-
3
1
  import unittest
4
2
 
5
3
  # Assuming you are using the mock library to ... mock things
@@ -253,8 +251,8 @@ class TestUtilities(unittest.TestCase):
253
251
  h1 = Hasher()
254
252
  h2 = Hasher()
255
253
  h3 = Hasher()
256
- h1(u'\ua000')
257
- h2(u'\ua001')
254
+ h1('\ua000')
255
+ h2('\ua001')
258
256
  h3(b'foo')
259
257
  self.assertEqual(h1.hexdigest(), "2af8e41129115eb231a0af76ec5465d3a9184fc4")
260
258
  self.assertEqual(h2.hexdigest(), "1619bcdbeff6828138ad9b6e43cc17e856457603")
@@ -466,7 +464,7 @@ class TestTopologicalSort(unittest.TestCase):
466
464
  "B": {"package": "B", "requires": []},
467
465
  "C": {"package": "C", "requires": []}
468
466
  }))
469
- self.assertEqual(set(["A", "B", "C"]), set(result))
467
+ self.assertEqual({"A", "B", "C"}, set(result))
470
468
  self.assertEqual(3, len(result))
471
469
 
472
470
  if __name__ == '__main__':
tests/test_workarea.py CHANGED
@@ -61,6 +61,29 @@ class WorkareaTestCase(unittest.TestCase):
61
61
  ], directory="%s/sw/MIRROR/aliroot" % getcwd(), check=False, prompt=True)
62
62
  self.assertEqual(spec.get("reference"), "%s/sw/MIRROR/aliroot" % getcwd())
63
63
 
64
+ @patch("os.path.exists")
65
+ @patch("os.makedirs")
66
+ @patch("codecs.open")
67
+ @patch("alibuild_helpers.git.git")
68
+ @patch("alibuild_helpers.workarea.is_writeable", new=MagicMock(return_value=True))
69
+ def test_reference_sources_updated_custom_refspec(self, mock_git, mock_open, mock_makedirs, mock_exists):
70
+ """Check mirrors are updated with custom refspec when provided."""
71
+ mock_exists.return_value = True
72
+ mock_git.return_value = 0, "sentinel output"
73
+ mock_open.return_value = MagicMock(
74
+ __enter__=lambda *args, **kw: MagicMock(
75
+ write=lambda output: self.assertEqual(output, "sentinel output")))
76
+ spec = MOCK_SPEC.copy()
77
+ spec["ref_match_rule"] = ["+refs/heads/master:refs/heads/master"]
78
+ updateReferenceRepoSpec(referenceSources="sw/MIRROR", p="AliRoot",
79
+ spec=spec, fetch=True)
80
+ mock_exists.assert_called_with("%s/sw/MIRROR/aliroot" % getcwd())
81
+ mock_makedirs.assert_called_with("%s/sw/MIRROR" % getcwd(), exist_ok=True)
82
+ mock_git.assert_called_once_with([
83
+ "fetch", "-f", "--prune", "--filter=blob:none", spec["source"], "+refs/heads/master:refs/heads/master",
84
+ ], directory="%s/sw/MIRROR/aliroot" % getcwd(), check=False, prompt=True)
85
+ self.assertEqual(spec.get("reference"), "%s/sw/MIRROR/aliroot" % getcwd())
86
+
64
87
  @patch("os.path.exists")
65
88
  @patch("os.makedirs")
66
89
  @patch("alibuild_helpers.git")