alibuild 1.17.19__py3-none-any.whl → 1.17.33__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (35) hide show
  1. {alibuild-1.17.19.dist-info → alibuild-1.17.33.dist-info}/METADATA +22 -3
  2. {alibuild-1.17.19.dist-info → alibuild-1.17.33.dist-info}/RECORD +34 -35
  3. alibuild_helpers/_version.py +16 -3
  4. alibuild_helpers/args.py +2 -0
  5. alibuild_helpers/build.py +5 -2
  6. alibuild_helpers/build_template.sh +14 -3
  7. alibuild_helpers/cmd.py +1 -1
  8. alibuild_helpers/deps.py +5 -1
  9. alibuild_helpers/doctor.py +15 -9
  10. alibuild_helpers/git.py +1 -1
  11. alibuild_helpers/init.py +2 -1
  12. alibuild_helpers/log.py +8 -1
  13. alibuild_helpers/sl.py +16 -3
  14. alibuild_helpers/sync.py +45 -12
  15. alibuild_helpers/utilities.py +2 -2
  16. docs/docs/reference.md +11 -1
  17. docs/docs/troubleshooting.md +5 -4
  18. docs/docs/user.md +80 -40
  19. tests/test_build.py +43 -32
  20. tests/test_cmd.py +22 -0
  21. tests/test_deps.py +2 -1
  22. tests/test_doctor.py +2 -1
  23. tests/test_init.py +4 -2
  24. tests/test_sync.py +3 -3
  25. tests/test_utilities.py +90 -0
  26. tests/test_workarea.py +1 -1
  27. docs/README.md +0 -1
  28. {alibuild-1.17.19.data → alibuild-1.17.33.data}/scripts/aliBuild +0 -0
  29. {alibuild-1.17.19.data → alibuild-1.17.33.data}/scripts/aliDeps +0 -0
  30. {alibuild-1.17.19.data → alibuild-1.17.33.data}/scripts/aliDoctor +0 -0
  31. {alibuild-1.17.19.data → alibuild-1.17.33.data}/scripts/alienv +0 -0
  32. {alibuild-1.17.19.data → alibuild-1.17.33.data}/scripts/pb +0 -0
  33. {alibuild-1.17.19.dist-info → alibuild-1.17.33.dist-info}/WHEEL +0 -0
  34. {alibuild-1.17.19.dist-info → alibuild-1.17.33.dist-info}/licenses/LICENSE.md +0 -0
  35. {alibuild-1.17.19.dist-info → alibuild-1.17.33.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,9 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: alibuild
3
- Version: 1.17.19
3
+ Version: 1.17.33
4
4
  Summary: ALICE Build Tool
5
+ Home-page: https://alisw.github.io/alibuild
6
+ Author: Giulio Eulisse
5
7
  Author-email: Giulio Eulisse <giulio.eulisse@cern.ch>, Timo Wilken <timo.wilken@cern.ch>, Sergio Garcia <sergio.garcia@cern.ch>
6
8
  Project-URL: homepage, https://alisw.github.io/alibuild
7
9
  Keywords: HEP,ALICE
@@ -22,6 +24,8 @@ Provides-Extra: docs
22
24
  Requires-Dist: mkdocs; extra == "docs"
23
25
  Requires-Dist: mkdocs-material; extra == "docs"
24
26
  Requires-Dist: mkdocs-redirects; extra == "docs"
27
+ Dynamic: author
28
+ Dynamic: home-page
25
29
  Dynamic: license-file
26
30
 
27
31
  .. image:: https://badge.fury.io/py/alibuild.svg
@@ -52,14 +56,22 @@ Pre-requisites
52
56
  If you are using aliBuild directly from git clone, you should make sure
53
57
  you have the dependencies installed. The easiest way to do this is to run::
54
58
 
59
+ # Optional, make a venv so the dependencies are not installed globally
60
+ python -m venv .venv
61
+ source .venv/bin/activate
55
62
  pip install -e .
56
63
 
57
64
 
58
- For developers
59
- ==============
65
+ Contributing
66
+ ============
67
+
60
68
 
61
69
  If you want to contribute to aliBuild, you can run the tests with::
62
70
 
71
+ # Optional, make a venv so the dependencies are not installed globally
72
+ python -m venv .venv
73
+ source .venv/bin/activate
74
+
63
75
  pip install -e .[test] # Only needed once
64
76
  tox
65
77
 
@@ -73,6 +85,13 @@ You can also run only the unit tests (it's a lot faster than the full suite) wit
73
85
 
74
86
  To run the documentation locally, you can use::
75
87
 
88
+ # Optional, make a venv so the dependencies are not installed globally
89
+ python -m venv .venv
90
+ source .venv/bin/activate
91
+
92
+ # Install dependencies for the docs, check pyproject.toml for more info
76
93
  pip install -e .[docs]
94
+
95
+ # Run the docs
77
96
  cd docs
78
97
  mkdocs serve
@@ -1,27 +1,27 @@
1
- alibuild-1.17.19.data/scripts/aliBuild,sha256=5g-apCjhc9cvtFvKSP0yr6IJtChNvtTAP-BtPL-91M8,4307
2
- alibuild-1.17.19.data/scripts/aliDeps,sha256=fhfPB2NpuLj8bZZPUjJW2e4VxA8ZkHlp5q76xAOzufI,219
3
- alibuild-1.17.19.data/scripts/aliDoctor,sha256=gQwLcNGAm7_QF7FFCtmtFXEoroKsRonNMA7UP4DS1y4,221
4
- alibuild-1.17.19.data/scripts/alienv,sha256=8QihJOzKue4oeuIM31MsoX8Fw7FNZ4zY7wyK6UYtgTE,12568
5
- alibuild-1.17.19.data/scripts/pb,sha256=LfkIUyX3xXxmaGSdtAbA-dS1Y1eEShEHpmto1tIEga0,211
6
- alibuild-1.17.19.dist-info/licenses/LICENSE.md,sha256=WJ7YI-moTFb-uVrFjnzzhGJrnL9P2iqQe8NuED3hutI,35141
1
+ alibuild-1.17.33.data/scripts/aliBuild,sha256=5g-apCjhc9cvtFvKSP0yr6IJtChNvtTAP-BtPL-91M8,4307
2
+ alibuild-1.17.33.data/scripts/aliDeps,sha256=fhfPB2NpuLj8bZZPUjJW2e4VxA8ZkHlp5q76xAOzufI,219
3
+ alibuild-1.17.33.data/scripts/aliDoctor,sha256=gQwLcNGAm7_QF7FFCtmtFXEoroKsRonNMA7UP4DS1y4,221
4
+ alibuild-1.17.33.data/scripts/alienv,sha256=8QihJOzKue4oeuIM31MsoX8Fw7FNZ4zY7wyK6UYtgTE,12568
5
+ alibuild-1.17.33.data/scripts/pb,sha256=LfkIUyX3xXxmaGSdtAbA-dS1Y1eEShEHpmto1tIEga0,211
6
+ alibuild-1.17.33.dist-info/licenses/LICENSE.md,sha256=WJ7YI-moTFb-uVrFjnzzhGJrnL9P2iqQe8NuED3hutI,35141
7
7
  alibuild_helpers/__init__.py,sha256=ZAhyhRxOkiTA1fMzwEKACY8Eo8MdWp1MawVicFlvQWk,736
8
- alibuild_helpers/_version.py,sha256=DlzvboYmrZFVgEwyvqyRPPIN-bpatfcSZy5avk6c45Q,515
8
+ alibuild_helpers/_version.py,sha256=aocBG_OE5hC31Qts18qEBqT2J_scMmIDImpzF42ytV8,708
9
9
  alibuild_helpers/analytics.py,sha256=fuiOagDngFnodGo7upMWeY-RjTTncDRz5Kwz8zxUr9o,4579
10
- alibuild_helpers/args.py,sha256=LrZ85hH6dhOt-UCK41BVpjDfcOnD-4PWC1oCFJfzRh8,31490
11
- alibuild_helpers/build.py,sha256=1Ue4iWOhPFWFkj6ARVqR3mY8XwJJYmBRkMXOTfaf1mw,56893
12
- alibuild_helpers/build_template.sh,sha256=irbJFYEFn5KKy5wMK0WaUQOPfTzA6MJnbWxRj3cpIXQ,13508
10
+ alibuild_helpers/args.py,sha256=T0nOOMCpJZdJkEPZENz7nOsaYIfeZ7qHfAR1Vx4-T2Y,31694
11
+ alibuild_helpers/build.py,sha256=VJj0KwRkbPs5gCj-Ey2MWhl2NTLZWql6DGwp8AO3tTA,57055
12
+ alibuild_helpers/build_template.sh,sha256=yx1YEQN5tdfQNmZfsEnn-FxbzBIpvYwYRQkQLKmm8uo,14011
13
13
  alibuild_helpers/clean.py,sha256=-LeQUYDwxihzGJi4rCiuALu051T44-0cV8S6-l_lCaQ,3250
14
- alibuild_helpers/cmd.py,sha256=cK33xiBI2ENAWSueW_yqj3Y9aOhTOdKkzsLdNUx9KdU,5899
15
- alibuild_helpers/deps.py,sha256=L1NrXSmuZE2fpVvUn_t19SrFLPu6rvJd5GG3MPiiJ0w,4805
16
- alibuild_helpers/doctor.py,sha256=s3IHGilf5iRSs3GOQgvZdws7SoXtLYp_ynCnCnTSbPI,9327
17
- alibuild_helpers/git.py,sha256=_GjLIxprHnCfUdQ908xfza1rwhqag1_df7_gcC_8sgw,4004
18
- alibuild_helpers/init.py,sha256=GmVLXqMS-aXqPnCk5dgAONMQFe7MqjlEtXtcQ1a-0qY,5009
19
- alibuild_helpers/log.py,sha256=C6nneUPrPrqxIqzqsHMWkqZ_Tks2e4RIMn5sZaQwSE0,4505
14
+ alibuild_helpers/cmd.py,sha256=5tdxtyrHDzmdSVIA0pzxBoici1GZDS0fUStbX85r6ao,5906
15
+ alibuild_helpers/deps.py,sha256=nHLyNqVuTUfW5bqfzCDxAaVVsklv5wHMl4FMMfTQF-8,4909
16
+ alibuild_helpers/doctor.py,sha256=CSvfwmD28NRmvSnmZRTHWaf11pAoSIlMxJ1yW00Xh9c,9685
17
+ alibuild_helpers/git.py,sha256=20JDRZX0wbJdsK__AI_nnS2rQmgElrMMD-OT6TDHCUU,4015
18
+ alibuild_helpers/init.py,sha256=x7OAErHzn34ceNqg-0GuHudYigh18Mk-P3RhN2P5mEI,5088
19
+ alibuild_helpers/log.py,sha256=OEflXNcGNgvVYLZbvVwd2Inyt12tnEw5RgkrsiAT2c0,4700
20
20
  alibuild_helpers/scm.py,sha256=pZfEnTgr_ILmtRT3BXeoYVJGq9rjigLXOLAGJMsnDko,1019
21
- alibuild_helpers/sl.py,sha256=EdHs5Se6xpR9rhmhSJd_6f3Rfkxc8jl-97ZT1lG2T28,2356
22
- alibuild_helpers/sync.py,sha256=g93S_eXmD1Zb9HptX3lICo5hWYH0cl0YQ-yr7wyliGQ,31060
21
+ alibuild_helpers/sl.py,sha256=Aw3-Lvq3bQ2s_KTw6PXgqcjSoY-s8_0A55GRPKks4x0,2915
22
+ alibuild_helpers/sync.py,sha256=vfny1ZF_YahzlSSEtYOq1KFvQLj_ce7MZlV2KDk1-xg,32296
23
23
  alibuild_helpers/templating_plugin.py,sha256=TWHdMQtDfX6Vqp5w9Huyh4ZEgLv5vYxAtPtX68xTOlk,662
24
- alibuild_helpers/utilities.py,sha256=NFNoaed0i7VIC6r2s7So6UhMJG5kUbVWolML9AyD26s,26129
24
+ alibuild_helpers/utilities.py,sha256=nb0UC0qn2_rIJ-5GDnx-GoRAbF8tn1ELjZS-prUz9eo,26131
25
25
  alibuild_helpers/workarea.py,sha256=dYGZ7OOCg87W-njZMqX7Yu72800KZjx9v5Hg-T43juY,7442
26
26
  debian/changelog,sha256=N-9FA5VD4VmD_tAwFfgqPqG7KUw1Jktyz14zP6aLxZs,300
27
27
  debian/compat,sha256=kX3zMg13jduqXFx3QrxARr-APDbtKwUPMIRO0gZ4NGk,3
@@ -29,34 +29,33 @@ debian/control,sha256=UL8ZCLiCnNH75oVtldYPfrBO8DH7VxvjXPIdTCvXgPc,476
29
29
  debian/copyright,sha256=wOLKnTq7-L1OZZoXu-FOojSdqt2nb9aEwVqVZGAxanE,470
30
30
  debian/files,sha256=u9Y63SK26643_rgHm6lvcr4OtZKlCM-FcW4wwiJ4FMY,55
31
31
  debian/rules,sha256=V4agtbpfdaM9h7oNrgNzEShthUASwMfhgTmFvZKLNMI,131
32
- docs/README.md,sha256=-2EVG9RlHchf-TYJIxwZf8zSHc49ujCdTzZmWrxj4aQ,64
33
32
  docs/SUPPORT,sha256=j8Ikz-9B8OkoZwtd7WU5fc_qRQ_MWy2qjWcgsXY3XyQ,150
34
33
  docs/mkdocs.yml,sha256=FQWSVZC75WnuCs6icnSRauBgIJ4hrVut5Rt9X4zxy-w,931
35
34
  docs/docs/alice_logo.png,sha256=w8Jpk9mHdGF5sN0pO0i6PuVyI-ntDNsvbx7DChVIZQQ,65824
36
35
  docs/docs/deps.png,sha256=VY_WJOzWOm_JqAwuzFBd6C7NhrzTDDireNh739NwAow,261802
37
36
  docs/docs/index.md,sha256=pU3PRsZvV5M_W1-GEZ169ECAD4C5eQc0l2V_kN_j7yU,3571
38
37
  docs/docs/quick.md,sha256=PTKHAmt6vrEc3QbaRAS7rZICZ-LcvdqLxgaNcXo63Sw,2967
39
- docs/docs/reference.md,sha256=g516_jCRnFOFy1kgw4d7Bin7OH1UBT4V34JVeDf_kBA,18033
40
- docs/docs/troubleshooting.md,sha256=SvNT68iCVXFu3RMKjLQZrMwxBxaOSJ-QeSJe8Q3N4Oc,15440
41
- docs/docs/user.md,sha256=3Ys1cwNuDTBQl7zZZisChZ4mt9KrLduH1bLUHXK5Jws,18704
38
+ docs/docs/reference.md,sha256=sBH8diOwyOFkgDsAhWexvtpuAwZaNzJaE-dplEnOXdU,18049
39
+ docs/docs/troubleshooting.md,sha256=fcAoLyi46yxMztqbYMY-5Z4_mwLZOJW8Q3M3jEb-Zdc,15458
40
+ docs/docs/user.md,sha256=5o150ssZnN1Tow35OWA6Gfw3PjlhJb27ip1740hasV8,20476
42
41
  docs/docs/stylesheets/extra.css,sha256=NAFcBZQe8gh2CTpJFwyv93FvblnF5RaEjtsHxtlFD-w,215
43
42
  templates/alibuild_to_please.jnj,sha256=48SfIwq55zlb5_5lu6WAHSznXE0EfUNcHmFrmzMUSns,1723
44
43
  tests/test_analytics.py,sha256=IlxATGj-MU0sTVqpi2-EKrIhyV9C48K5IZ39kWFz-Os,1942
45
44
  tests/test_args.py,sha256=8d8BybESxbIDvEOOSHmmd2rCTgNxI_vF-sgbDojI-Fg,8867
46
- tests/test_build.py,sha256=m8PXSRpejJYTeabTOgvu6C77_jqGWq11Ko4uO74ybJw,18503
45
+ tests/test_build.py,sha256=5WtCmrvcG7U2i5jwBdPhMJzINsYQ6WaySEFu-TNnZHc,19017
47
46
  tests/test_clean.py,sha256=Zm3gjtO8Pgl27xUzQIHeGqpe05YMVXZp6Sua59prvcE,7492
48
- tests/test_cmd.py,sha256=iMpfLKqvi-qaSP6EM7PoJnGSZ9Qf4sj5669o2kcD8_I,3820
49
- tests/test_deps.py,sha256=Ae7YPZ6jrWTeXeLaIj0C7J1Lt5fwdsRzMsiHv6GF1fE,2072
50
- tests/test_doctor.py,sha256=LnNijwCtTN2ltX-FMSYRuLEgbCMfG0VpkqSJe3Wh1X4,4233
47
+ tests/test_cmd.py,sha256=SsWWasMrhbIu9Lqyr_wpuvDjg72ACJ1H_zDlTnybBuE,5049
48
+ tests/test_deps.py,sha256=bLDFuqLRJiCW4U71dzXYM0Niv-skQXSsKGb6LXGVsZ0,2113
49
+ tests/test_doctor.py,sha256=Z7X1d6XqCqmoYAEkRllXf-6HWvfnzGOFd_ghh4gmWYQ,4270
51
50
  tests/test_git.py,sha256=fh3sBkW7FwFQQZ3x6Rp3C4wBqM7I9CcX75e6u2oaSKg,1963
52
51
  tests/test_hashing.py,sha256=oAggZlZp-rZz5MRj4RBD5UZUxV0uNQOtVw47y-LHwt4,2941
53
- tests/test_init.py,sha256=Mpj_IB9UNYQgekjLS4c0yBHQe5M3J7-YOHlwMHxSu_8,4227
52
+ tests/test_init.py,sha256=y1n16H5JoYAyIihqQglVmPrIsz7VVqyjRz_XdDs4XZM,4281
54
53
  tests/test_log.py,sha256=5eA0lfFHyX4KOMkfQSsbNw9wAbh7t-KuOfvaFMaM0qg,1884
55
54
  tests/test_packagelist.py,sha256=MlVParqXn7zQXX3OTjHhY7sFvfgAoH-jWBMJlmsB5ls,8755
56
55
  tests/test_parseRecipe.py,sha256=Ar1SWWd-NLC6ZAs180RcHC3UPyWc1ItzIP9S57ADNM4,4982
57
- tests/test_sync.py,sha256=2CyZcC4gKCFo85pfJYn4r196eet0TG5bolL8EofGqFQ,14014
58
- tests/test_utilities.py,sha256=mg0AlBT-hr74jVJwDAGzBscBvoYULyXTA0mgD7tSors,17688
59
- tests/test_workarea.py,sha256=iFA1vv6Kdyd4BPk_nrNk11YUZOrAyYYYA-L0f5-As3o,4886
56
+ tests/test_sync.py,sha256=ispdYLvTIvoG_EnsNZWb-gI-hzt7cgjxns3ghpraepE,14014
57
+ tests/test_utilities.py,sha256=VfAu0oEWkO6YmOG4nZxA4YC3KViLGJZ79EJiapk88Rg,20497
58
+ tests/test_workarea.py,sha256=RIbo9b4hckkf9jmsS2NwfdcRU20QRUF_Kjjc-IlK2GU,4897
60
59
  tests/testdist/broken1.sh,sha256=zdbBCVA9ThnK14Lu9Nm6Firw2EcnRF7budMA3xrcYEg,7
61
60
  tests/testdist/broken2.sh,sha256=9S1xEQPVCkN4MMb7zQT7S6tJoPgvbSbRx5HG6EiN0JA,4
62
61
  tests/testdist/broken3.sh,sha256=1Zs7ajCIndFPYCQdSjg913x5gNBYakUehTUXLpMQSEU,18
@@ -68,7 +67,7 @@ tests/testdist/clobber-initdotsh.sh,sha256=K4L8hiEgNg0hI2WAiEA65rWU5sp-fwOYkubpV
68
67
  tests/testdist/defaults-o2.sh,sha256=IJWVMYjQYz876twos1bj-5VnPv7WTdM0X-fd09j4xxM,325
69
68
  tests/testdist/delete-etc.sh,sha256=NFMApyWMLQu3HJMsx0x8ZEveCaXbosO6XzJk8KDRJdg,63
70
69
  tests/testdist/tracking-env.sh,sha256=3cJszuzJ5dxNYvHO4ydUOWIXwmMS7wfeb-JBCDK21O4,119
71
- alibuild-1.17.19.dist-info/METADATA,sha256=ogbnT-bNVLWBSCytmXY7KLfI70wdBM3DlxoqT7WlM2o,2129
72
- alibuild-1.17.19.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
73
- alibuild-1.17.19.dist-info/top_level.txt,sha256=WjKmc89Vn0WlbEp9a9VmhwqRkeKxjUX_6NT3T8K3Hv0,45
74
- alibuild-1.17.19.dist-info/RECORD,,
70
+ alibuild-1.17.33.dist-info/METADATA,sha256=0riqP6LrOo5K2bEefvmo6XRIxzbL2K9G63MmFHvbPZI,2716
71
+ alibuild-1.17.33.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
72
+ alibuild-1.17.33.dist-info/top_level.txt,sha256=WjKmc89Vn0WlbEp9a9VmhwqRkeKxjUX_6NT3T8K3Hv0,45
73
+ alibuild-1.17.33.dist-info/RECORD,,
@@ -1,7 +1,14 @@
1
1
  # file generated by setuptools-scm
2
2
  # don't change, don't track in version control
3
3
 
4
- __all__ = ["__version__", "__version_tuple__", "version", "version_tuple"]
4
+ __all__ = [
5
+ "__version__",
6
+ "__version_tuple__",
7
+ "version",
8
+ "version_tuple",
9
+ "__commit_id__",
10
+ "commit_id",
11
+ ]
5
12
 
6
13
  TYPE_CHECKING = False
7
14
  if TYPE_CHECKING:
@@ -9,13 +16,19 @@ if TYPE_CHECKING:
9
16
  from typing import Union
10
17
 
11
18
  VERSION_TUPLE = Tuple[Union[int, str], ...]
19
+ COMMIT_ID = Union[str, None]
12
20
  else:
13
21
  VERSION_TUPLE = object
22
+ COMMIT_ID = object
14
23
 
15
24
  version: str
16
25
  __version__: str
17
26
  __version_tuple__: VERSION_TUPLE
18
27
  version_tuple: VERSION_TUPLE
28
+ commit_id: COMMIT_ID
29
+ __commit_id__: COMMIT_ID
19
30
 
20
- __version__ = version = '1.17.19'
21
- __version_tuple__ = version_tuple = (1, 17, 19)
31
+ __version__ = version = '1.17.33'
32
+ __version_tuple__ = version_tuple = (1, 17, 33)
33
+
34
+ __commit_id__ = commit_id = None
alibuild_helpers/args.py CHANGED
@@ -254,6 +254,8 @@ def doParseArgs():
254
254
  help=("Assume we're not building %(metavar)s and all its (unique) dependencies. "
255
255
  "You can specify this option multiple times or separate multiple arguments "
256
256
  "with commas."))
257
+ doctor_parser.add_argument("-e", dest="environment", action="append", default=[],
258
+ help="KEY=VALUE binding to add to the build environment. May be specified multiple times.")
257
259
 
258
260
  doctor_system = doctor_parser.add_mutually_exclusive_group()
259
261
  doctor_system.add_argument("--always-prefer-system", dest="preferSystem", action="store_true",
alibuild_helpers/build.py CHANGED
@@ -58,7 +58,7 @@ def update_git_repos(args, specs, buildOrder):
58
58
  specs[package]["scm"] = Git()
59
59
  if specs[package]["is_devel_pkg"]:
60
60
  specs[package]["source"] = os.path.join(os.getcwd(), specs[package]["package"])
61
- if exists(os.path.join(specs[package]["source"], ".sl")):
61
+ if exists(os.path.join(specs[package]["source"], ".sl")) or exists(os.path.join(specs[package]["source"], ".git/sl")):
62
62
  specs[package]["scm"] = Sapling()
63
63
  updateReferenceRepoSpec(args.referenceSources, package, specs[package],
64
64
  fetch=args.fetchRepos, allowGitPrompt=git_prompt)
@@ -501,7 +501,10 @@ def doBuild(args, parser):
501
501
 
502
502
  install_wrapper_script("git", workDir)
503
503
 
504
- with DockerRunner(args.dockerImage, args.docker_extra_args, extra_env={"ALIBUILD_CONFIG_DIR": "/alidist" if args.docker else os.path.abspath(args.configDir)}, extra_volumes=[f"{os.path.abspath(args.configDir)}:/alidist:ro"] if args.docker else []) as getstatusoutput_docker:
504
+ extra_env = {"ALIBUILD_CONFIG_DIR": "/alidist" if args.docker else os.path.abspath(args.configDir)}
505
+ extra_env.update(dict([e.partition('=')[::2] for e in args.environment]))
506
+
507
+ with DockerRunner(args.dockerImage, args.docker_extra_args, extra_env=extra_env, extra_volumes=[f"{os.path.abspath(args.configDir)}:/alidist:ro"] if args.docker else []) as getstatusoutput_docker:
505
508
  def performPreferCheckWithTempDir(pkg, cmd):
506
509
  with tempfile.TemporaryDirectory(prefix=f"alibuild_prefer_check_{pkg['package']}_") as temp_dir:
507
510
  return getstatusoutput_docker(cmd, cwd=temp_dir)
@@ -1,7 +1,18 @@
1
1
  #!/bin/bash
2
-
2
+ ALIBUILD_START_TIMESTAMP=$(date +%%s)
3
3
  # Automatically generated build script
4
4
  unset DYLD_LIBRARY_PATH
5
+ echo "aliBuild: start building $PKGNAME-$PKGVERSION-$PKGREVISION at $ALIBUILD_START_TIMESTAMP"
6
+
7
+ cleanup() {
8
+ local exit_code=$?
9
+ ALIBUILD_END_TIMESTAMP=$(date +%%s)
10
+ ALIBUILD_DELTA_TIME=$(($ALIBUILD_END_TIMESTAMP - $ALIBUILD_START_TIMESTAMP))
11
+ echo "aliBuild: done building $PKGNAME-$PKGVERSION-$PKGREVISION at $ALIBUILD_START_TIMESTAMP (${ALIBUILD_DELTA_TIME} s)"
12
+ exit $exit_code
13
+ }
14
+
15
+ trap cleanup EXIT
5
16
 
6
17
  # Cleanup variables which should not be exposed to user code
7
18
  unset AWS_ACCESS_KEY_ID
@@ -103,9 +114,9 @@ unset DYLD_LIBRARY_PATH
103
114
  EOF
104
115
 
105
116
  cd "$BUILDROOT"
106
- ln -snf $PKGHASH "${BUILDROOT}-latest"
117
+ ln -snf "$PKGHASH" "$ALIBUILD_BUILD_WORK_DIR/BUILD/$PKGNAME-latest"
107
118
  if [[ $DEVEL_PREFIX ]]; then
108
- ln -snf $PKGHASH "${BUILDROOT}-latest-$DEVEL_PREFIX"
119
+ ln -snf "$PKGHASH" "$ALIBUILD_BUILD_WORK_DIR/BUILD/$PKGNAME-latest-$DEVEL_PREFIX"
109
120
  fi
110
121
 
111
122
  cd "$BUILDDIR"
alibuild_helpers/cmd.py CHANGED
@@ -105,7 +105,7 @@ class DockerRunner:
105
105
  if self._container is None:
106
106
  command_prefix=""
107
107
  if self._extra_env:
108
- command_prefix="env " + " ".join("{}={}".format(k, v) for (k,v) in self._extra_env.items()) + " "
108
+ command_prefix="env " + " ".join("{}={}".format(k, quote(v)) for (k,v) in self._extra_env.items()) + " "
109
109
  return getstatusoutput("{}{} -c {}".format(command_prefix, BASH, quote(cmd))
110
110
  , cwd=cwd)
111
111
  envOpts = []
alibuild_helpers/deps.py CHANGED
@@ -16,7 +16,11 @@ def doDeps(args, parser):
16
16
  specs = {}
17
17
  defaultsReader = lambda: readDefaults(args.configDir, args.defaults, parser.error, args.architecture)
18
18
  (err, overrides, taps) = parseDefaults(args.disable, defaultsReader, debug)
19
- with DockerRunner(args.dockerImage, args.docker_extra_args, extra_env={"ALIBUILD_CONFIG_DIR": "/alidist" if args.docker else path.abspath(args.configDir)}, extra_volumes=[f"{path.abspath(args.configDir)}:/alidist:ro"] if args.docker else []) as getstatusoutput_docker:
19
+
20
+ extra_env = {"ALIBUILD_CONFIG_DIR": "/alidist" if args.docker else path.abspath(args.configDir)}
21
+ extra_env.update(dict([e.partition('=')[::2] for e in args.environment]))
22
+
23
+ with DockerRunner(args.dockerImage, args.docker_extra_args, extra_env=extra_env, extra_volumes=[f"{path.abspath(args.configDir)}:/alidist:ro"] if args.docker else []) as getstatusoutput_docker:
20
24
  def performCheck(pkg, cmd):
21
25
  return getstatusoutput_docker(cmd)
22
26
 
@@ -22,7 +22,8 @@ def checkPreferSystem(spec, cmd, homebrew_replacement, getstatusoutput_docker):
22
22
  debug("Package %s can only be managed via alibuild.", spec["package"])
23
23
  return (1, "")
24
24
  cmd = homebrew_replacement + cmd
25
- err, out = getstatusoutput_docker(cmd)
25
+ with tempfile.TemporaryDirectory(prefix=f"alibuild_prefer_check_{spec['package']}_") as temp_dir:
26
+ err, out = getstatusoutput_docker(cmd, cwd=temp_dir)
26
27
  if not err:
27
28
  success("Package %s will be picked up from the system.", spec["package"])
28
29
  for x in out.split("\n"):
@@ -40,7 +41,8 @@ def checkRequirements(spec, cmd, homebrew_replacement, getstatusoutput_docker):
40
41
  debug("Package %s is not a system requirement.", spec["package"])
41
42
  return (0, "")
42
43
  cmd = homebrew_replacement + cmd
43
- err, out = getstatusoutput_docker(cmd)
44
+ with tempfile.TemporaryDirectory(prefix=f"alibuild_prefer_check_{spec['package']}_") as temp_dir:
45
+ err, out = getstatusoutput_docker(cmd, cwd=temp_dir)
44
46
  if not err:
45
47
  success("Required package %s will be picked up from the system.", spec["package"])
46
48
  debug("%s", cmd)
@@ -83,7 +85,11 @@ def doDoctor(args, parser):
83
85
  # Decide if we can use homebrew. If not, we replace it with "true" so
84
86
  # that we do not get spurious messages on linux
85
87
  homebrew_replacement = ""
86
- with DockerRunner(args.dockerImage, args.docker_extra_args, extra_env={"ALIBUILD_CONFIG_DIR": "/alidist" if args.docker else os.path.abspath(args.configDir)}, extra_volumes=[f"{os.path.abspath(args.configDir)}:/alidist:ro"] if args.docker else []) as getstatusoutput_docker:
88
+
89
+ extra_env = {"ALIBUILD_CONFIG_DIR": "/alidist" if args.docker else os.path.abspath(args.configDir)}
90
+ extra_env.update(dict([e.partition('=')[::2] for e in args.environment]))
91
+
92
+ with DockerRunner(args.dockerImage, args.docker_extra_args, extra_env=extra_env, extra_volumes=[f"{os.path.abspath(args.configDir)}:/alidist:ro"] if args.docker else []) as getstatusoutput_docker:
87
93
  err, output = getstatusoutput_docker("type c++")
88
94
  if err:
89
95
  warning("Unable to find system compiler.\n"
@@ -140,10 +146,10 @@ def doDoctor(args, parser):
140
146
  error("%s", msg)
141
147
  return (ok,msg,valid)
142
148
 
143
- with DockerRunner(args.dockerImage, args.docker_extra_args, extra_env={"ALIBUILD_CONFIG_DIR": "/alidist" if args.docker else os.path.abspath(args.configDir)}, extra_volumes=[f"{os.path.abspath(args.configDir)}:/alidist:ro"] if args.docker else []) as getstatusoutput_docker:
144
- def performPreferCheckWithTempDir(pkg, cmd):
145
- with tempfile.TemporaryDirectory(prefix=f"alibuild_prefer_check_{pkg['package']}_") as temp_dir:
146
- return getstatusoutput_docker(cmd, cwd=temp_dir)
149
+ extra_env = {"ALIBUILD_CONFIG_DIR": "/alidist" if args.docker else os.path.abspath(args.configDir)}
150
+ extra_env.update(dict([e.partition('=')[::2] for e in args.environment]))
151
+
152
+ with DockerRunner(args.dockerImage, args.docker_extra_args, extra_env=extra_env, extra_volumes=[f"{os.path.abspath(args.configDir)}:/alidist:ro"] if args.docker else []) as getstatusoutput_docker:
147
153
  fromSystem, own, failed, validDefaults = \
148
154
  getPackageList(packages = packages,
149
155
  specs = specs,
@@ -153,8 +159,8 @@ def doDoctor(args, parser):
153
159
  architecture = args.architecture,
154
160
  disable = args.disable,
155
161
  defaults = args.defaults,
156
- performPreferCheck = performPreferCheckWithTempDir,
157
- performRequirementCheck = performPreferCheckWithTempDir,
162
+ performPreferCheck = lambda pkg, cmd: checkPreferSystem(pkg, cmd, homebrew_replacement, getstatusoutput_docker),
163
+ performRequirementCheck = lambda pkg, cmd: checkRequirements(pkg, cmd, homebrew_replacement, getstatusoutput_docker),
158
164
  performValidateDefaults = performValidateDefaults,
159
165
  overrides = overrides,
160
166
  taps = taps,
alibuild_helpers/git.py CHANGED
@@ -68,7 +68,7 @@ class Git(SCM):
68
68
  return ["checkout", "-f", tag]
69
69
 
70
70
  def fetchCmd(self, remote, *refs):
71
- return ["fetch", "-f"] + clone_speedup_options() + [remote, *refs]
71
+ return ["fetch", "-f", "--prune"] + clone_speedup_options() + [remote, *refs]
72
72
 
73
73
  def setWriteUrlCmd(self, url):
74
74
  return ["remote", "set-url", "--push", "origin", url]
alibuild_helpers/init.py CHANGED
@@ -3,6 +3,7 @@ from alibuild_helpers.utilities import getPackageList, parseDefaults, readDefaul
3
3
  from alibuild_helpers.log import debug, error, warning, banner, info
4
4
  from alibuild_helpers.log import dieOnError
5
5
  from alibuild_helpers.workarea import updateReferenceRepoSpec
6
+ from alibuild_helpers.cmd import getstatusoutput
6
7
 
7
8
  from os.path import join
8
9
  import os.path as path
@@ -54,7 +55,7 @@ def doInit(args):
54
55
  architecture="",
55
56
  disable=[],
56
57
  defaults=args.defaults,
57
- performPreferCheck=lambda *x, **y: (1, ""),
58
+ performPreferCheck=lambda pkg, cmd: getstatusoutput(["bash", "-c", cmd]),
58
59
  performRequirementCheck=lambda *x, **y: (0, ""),
59
60
  performValidateDefaults=lambda spec : validateDefaults(spec, args.defaults),
60
61
  overrides=overrides,
alibuild_helpers/log.py CHANGED
@@ -53,14 +53,21 @@ def log_current_package(package, main_package, specs, devel_prefix) -> None:
53
53
 
54
54
 
55
55
  class ProgressPrint:
56
- def __init__(self, begin_msg="") -> None:
56
+ def __init__(self, begin_msg="", min_interval=0.) -> None:
57
57
  self.count = -1
58
58
  self.lasttime = 0
59
59
  self.STAGES = ".", "..", "...", "....", ".....", "....", "...", ".."
60
60
  self.begin_msg = begin_msg
61
61
  self.percent = -1
62
+ self.min_interval = min_interval
63
+ self.last_update = 0
62
64
 
63
65
  def __call__(self, txt, *args) -> None:
66
+ now = time.time()
67
+ if (now - self.last_update) < self.min_interval:
68
+ return
69
+ self.last_update = now
70
+
64
71
  if logger.level <= logging.DEBUG or not sys.stdout.isatty():
65
72
  debug(txt, *args)
66
73
  return
alibuild_helpers/sl.py CHANGED
@@ -20,9 +20,22 @@ class Sapling(SCM):
20
20
  return sapling(("whereami", ), directory)
21
21
 
22
22
  def branchOrRef(self, directory):
23
- # Format is <hash>[+] <branch>
24
- identity = sapling(("identify", ), directory)
25
- return identity.split(" ")[-1]
23
+ # Format is * branch ref or nothing
24
+ err, output = getstatusoutput("""\
25
+ set -e +x
26
+ sl -R {directory} bookmark -r . 2>/dev/null | grep -- "*"
27
+ """.format(
28
+ directory=quote(directory),
29
+ ), timeout=SL_COMMAND_TIMEOUT_SEC)
30
+ if err > 1:
31
+ raise SCMError("Error {} from sl bookmark -r . : {}".format(err, output))
32
+ # We use "none" to indicate there are no bookmarks. This means
33
+ # that a devel package will act as a single branch, regardless of where we are.
34
+ if not output.strip():
35
+ return "none"
36
+ # If a bookmark is there, we use it to determine that we should rebuild
37
+ # when we move to it
38
+ return output.split(" ")[2]
26
39
 
27
40
  def exec(self, *args, **kwargs):
28
41
  return sapling(*args, **kwargs)
alibuild_helpers/sync.py CHANGED
@@ -7,7 +7,9 @@ import re
7
7
  import sys
8
8
  import time
9
9
  import requests
10
+ from concurrent.futures import ThreadPoolExecutor, as_completed
10
11
  from requests.exceptions import RequestException
12
+ from urllib.parse import quote
11
13
 
12
14
  from alibuild_helpers.cmd import execute
13
15
  from alibuild_helpers.log import debug, info, error, dieOnError, ProgressPrint
@@ -59,6 +61,7 @@ class HttpRemoteSync:
59
61
 
60
62
  def getRetry(self, url, dest=None, returnResult=False, log=True, session=None, progress=debug):
61
63
  get = session.get if session is not None else requests.get
64
+ url = quote(url, safe=":/")
62
65
  for i in range(0, self.httpConnRetries):
63
66
  if i > 0:
64
67
  pauseSec = self.httpBackoff * (2 ** (i - 1))
@@ -178,7 +181,7 @@ class HttpRemoteSync:
178
181
  destPath = os.path.join(self.workdir, store_path, use_tarball)
179
182
  if not os.path.isfile(destPath): # do not download twice
180
183
  progress = ProgressPrint("Downloading tarball for %s@%s" %
181
- (spec["package"], spec["version"]))
184
+ (spec["package"], spec["version"]), min_interval=5.0)
182
185
  progress("[0%%] Starting download of %s", use_tarball) # initialise progress bar
183
186
  self.getRetry("/".join((self.remoteStore, store_path, use_tarball)),
184
187
  destPath, session=session, progress=progress)
@@ -357,7 +360,7 @@ class CVMFSRemoteSync:
357
360
  # Create the dummy tarball, if it does not exists
358
361
  test -f "{workDir}/{architecture}/store/${{pkg_hash:0:2}}/$pkg_hash/$tarball" && continue
359
362
  mkdir -p "{workDir}/INSTALLROOT/$pkg_hash/{architecture}/{package}"
360
- find "{remote_store}/{cvmfs_architecture}/Packages/{package}/$full_version" ! -name etc -maxdepth 1 -mindepth 1 -exec ln -sf {} "{workDir}/INSTALLROOT/$pkg_hash/{architecture}/{package}/" \;
363
+ find "{remote_store}/{cvmfs_architecture}/Packages/{package}/$full_version" ! -name etc -maxdepth 1 -mindepth 1 -exec ln -sf {} "{workDir}/INSTALLROOT/$pkg_hash/{architecture}/{package}/" \\;
361
364
  cp -fr "{remote_store}/{cvmfs_architecture}/Packages/{package}/$full_version/etc" "{workDir}/INSTALLROOT/$pkg_hash/{architecture}/{package}/etc"
362
365
  mkdir -p "{workDir}/TARS/{architecture}/store/${{pkg_hash:0:2}}/$pkg_hash"
363
366
  tar -C "{workDir}/INSTALLROOT/$pkg_hash" -czf "{workDir}/TARS/{architecture}/store/${{pkg_hash:0:2}}/$pkg_hash/$tarball" .
@@ -551,7 +554,7 @@ class Boto3RemoteSync:
551
554
  for tarball in self._s3_listdir(store_path):
552
555
  debug("Fetching tarball %s", tarball)
553
556
  progress = ProgressPrint("Downloading tarball for %s@%s" %
554
- (spec["package"], spec["version"]))
557
+ (spec["package"], spec["version"]), min_interval=5.0)
555
558
  progress("[0%%] Starting download of %s", tarball) # initialise progress bar
556
559
  # Create containing directory locally. (exist_ok= is python3-specific.)
557
560
  os.makedirs(os.path.join(self.workdir, store_path), exist_ok=True)
@@ -679,15 +682,45 @@ class Boto3RemoteSync:
679
682
 
680
683
  # Second, upload dist symlinks. These should be in place before the main
681
684
  # tarball, to avoid races in the publisher.
682
- for link_dir, symlinks in dist_symlinks.items():
683
- for link_key, hash_path in symlinks:
684
- self.s3.put_object(Bucket=self.writeStore,
685
- Key=link_key,
686
- Body=os.fsencode(hash_path),
687
- ACL="public-read",
688
- WebsiteRedirectLocation=hash_path)
689
- debug("Uploaded %d dist symlinks to S3 from %s",
690
- len(symlinks), link_dir)
685
+ start_time = time.time()
686
+ total_symlinks = 0
687
+
688
+ # Limit concurrency to avoid overwhelming S3 with too many simultaneous requests
689
+ max_workers = min(32, (len(dist_symlinks) * 10) or 1)
690
+
691
+ def _upload_single_symlink(link_key, hash_path):
692
+ self.s3.put_object(Bucket=self.writeStore,
693
+ Key=link_key,
694
+ Body=os.fsencode(hash_path),
695
+ ACL="public-read",
696
+ WebsiteRedirectLocation=hash_path)
697
+ return link_key
698
+
699
+ with ThreadPoolExecutor(max_workers=max_workers) as executor:
700
+ future_to_info = {}
701
+ for link_dir, symlinks in dist_symlinks.items():
702
+ for link_key, hash_path in symlinks:
703
+ future = executor.submit(_upload_single_symlink, link_key, hash_path)
704
+ future_to_info[future] = (link_dir, link_key)
705
+ total_symlinks += 1
706
+
707
+ dir_counts = {link_dir: 0 for link_dir in dist_symlinks.keys()}
708
+ for future in as_completed(future_to_info):
709
+ link_dir, link_key = future_to_info[future]
710
+ try:
711
+ future.result()
712
+ dir_counts[link_dir] += 1
713
+ except Exception as e:
714
+ error("Failed to upload symlink %s: %s", link_key, e)
715
+ raise
716
+
717
+ for link_dir, count in dir_counts.items():
718
+ if count > 0:
719
+ debug("Uploaded %d dist symlinks to S3 from %s", count, link_dir)
720
+
721
+ end_time = time.time()
722
+ debug("Uploaded %d dist symlinks in %.2f seconds",
723
+ total_symlinks, end_time - start_time)
691
724
 
692
725
  self.s3.upload_file(Bucket=self.writeStore, Key=tar_path,
693
726
  Filename=os.path.join(self.workdir, tar_path))
@@ -216,7 +216,7 @@ def doDetectArch(hasOsRelease, osReleaseLines, platformTuple, platformSystem, pl
216
216
  distribution = distribution.lower()
217
217
  # If platform.dist does not return something sensible,
218
218
  # let's try with /etc/os-release
219
- if distribution not in ["ubuntu", "red hat enterprise linux", "redhat", "centos", "almalinux", "rockylinux"] and hasOsRelease:
219
+ if distribution not in ["ubuntu", "red hat enterprise linux", "redhat", "centos", "almalinux", "rocky linux"] and hasOsRelease:
220
220
  for x in osReleaseLines:
221
221
  key, is_prop, val = x.partition("=")
222
222
  if not is_prop:
@@ -236,7 +236,7 @@ def doDetectArch(hasOsRelease, osReleaseLines, platformTuple, platformSystem, pl
236
236
  if version in debian_ubuntu:
237
237
  distribution = "ubuntu"
238
238
  version = debian_ubuntu[version]
239
- elif distribution in ["redhat", "red hat enterprise linux", "centos", "almalinux", "rockylinux"]:
239
+ elif distribution in ["redhat", "red hat enterprise linux", "centos", "almalinux", "rocky linux"]:
240
240
  distribution = "slc"
241
241
 
242
242
  processor = platformProcessor
docs/docs/reference.md CHANGED
@@ -79,13 +79,16 @@ The following entries are optional in the header:
79
79
  - `source`: URL of a Git repository from which the source is downloaded.
80
80
  It's good practice to make sure that they are already patched, so that you
81
81
  can easily point to the actual sources used by the software.
82
+
82
83
  - `write_repo`: in case the repository URL to be used for developing is
83
84
  different from the `source`, set this key. It is used by `aliBuild init`,
84
85
  which will initialise your local repository with the `upstream` remote
85
86
  pointing at this URL instead of the one in `source`.
87
+
86
88
  - `tag`: git reference in the above mentioned repository which points to the
87
89
  software to be built. This can be a tag name, a branch name or a commit
88
90
  hash.
91
+
89
92
  - `env`: dictionary whose key-value pairs are environment variables to be set
90
93
  after the recipe is built. The values are interpreted as the contents of a
91
94
  double-quoted shell string, so you can reference other environment variables
@@ -101,6 +104,7 @@ The following entries are optional in the header:
101
104
  intended to be used to point to build products of the current recipe. If you
102
105
  need to set an environment variable for use in the recipe, use
103
106
  `export VARIABLE=value` in the recipe body.
107
+
104
108
  - `prepend_path`: dictionary whose key-value pairs are an environment variable
105
109
  name and a path to be prepended to it, as it happens in `LD_LIBRARY_PATH`.
106
110
  This happens only after the package declaring the `prepend_path` in question
@@ -116,9 +120,11 @@ The following entries are optional in the header:
116
120
 
117
121
  will result in prepending `$FOO_ROOT/binexec/foobar` to `$PATH`, and both
118
122
  `$FOO_ROOT/sub/lib` and `lib64` to `LD_LIBRARY_PATH`.
123
+
119
124
  - `append_path`: same as `prepend_path` but paths are appended rather than
120
125
  prepended. Like `append_path` and `env`, this **does not** affect the
121
126
  environment of the current recipe.
127
+
122
128
  - `requires`: a list of run-time dependencies for the package, *e.g.*:
123
129
 
124
130
  ```yaml
@@ -145,17 +151,20 @@ The following entries are optional in the header:
145
151
 
146
152
  will make sure that `IgProf` is only built on platforms whose name does not
147
153
  begin with `osx`.
154
+
148
155
  - `build_requires`: a list of build-time dependencies for the package. Like
149
156
  `requires`, these packages will be built before the current package is
150
157
  built.
151
158
 
152
159
  Packages in this list are marked specially in the dependency graph
153
- produced by `aliDeps`. Other tools treat these packages differently from
160
+ produced by `aliBuild deps`. Other tools treat these packages differently from
154
161
  `requires`: for instance, RPMs produced for a package won't depend on its
155
162
  `build_requires`, and `alibuild-generate-module` won't pull in build
156
163
  requirements' modulefiles.
164
+
157
165
  - `force_rebuild`: set it to `true` to force re-running the build recipe every
158
166
  time you invoke alibuild on it.
167
+
159
168
  - `prefer_system_check`: a script which is used to determine whether
160
169
  or not the system equivalent of the package can be used. See also
161
170
  `prefer_system`. If the `--no-system` option is specified, this key is not
@@ -170,6 +179,7 @@ The following entries are optional in the header:
170
179
  does not match, the check is skipped and the recipe is run. Using the switch
171
180
  `--always-prefer-system` runs the check always (even when the regular
172
181
  expression for the architecture does not match).
182
+
173
183
  - `relocate_paths`: a list of toplevel paths scanned recursively to perform
174
184
  relocation of executables and dynamic libraries **on macOS only**. If not
175
185
  specified, defaults to `bin`, `lib` and `lib64`.