alibuild 1.17.18__tar.gz → 1.17.19__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {alibuild-1.17.18 → alibuild-1.17.19}/.github/workflows/pr-check.yml +2 -3
- {alibuild-1.17.18 → alibuild-1.17.19}/.github/workflows/release.yml +17 -19
- alibuild-1.17.19/PACKAGING.md +11 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/PKG-INFO +36 -19
- {alibuild-1.17.18 → alibuild-1.17.19}/README.rst +27 -2
- {alibuild-1.17.18 → alibuild-1.17.19}/alfaBuild +0 -1
- {alibuild-1.17.18 → alibuild-1.17.19}/aliBuild +0 -1
- {alibuild-1.17.18 → alibuild-1.17.19}/alibuild.egg-info/PKG-INFO +36 -19
- {alibuild-1.17.18 → alibuild-1.17.19}/alibuild.egg-info/SOURCES.txt +2 -4
- alibuild-1.17.19/alibuild.egg-info/requires.txt +13 -0
- alibuild-1.17.19/alibuild.egg-info/top_level.txt +6 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/alibuild_helpers/_version.py +2 -2
- {alibuild-1.17.18 → alibuild-1.17.19}/alibuild_helpers/analytics.py +1 -1
- {alibuild-1.17.18 → alibuild-1.17.19}/alibuild_helpers/build.py +53 -16
- {alibuild-1.17.18 → alibuild-1.17.19}/alibuild_helpers/build_template.sh +8 -4
- {alibuild-1.17.18 → alibuild-1.17.19}/alibuild_helpers/cmd.py +26 -8
- {alibuild-1.17.18 → alibuild-1.17.19}/alibuild_helpers/deps.py +8 -6
- {alibuild-1.17.18 → alibuild-1.17.19}/alibuild_helpers/doctor.py +8 -4
- {alibuild-1.17.18 → alibuild-1.17.19}/alibuild_helpers/git.py +1 -1
- {alibuild-1.17.18 → alibuild-1.17.19}/alibuild_helpers/sl.py +1 -1
- {alibuild-1.17.18 → alibuild-1.17.19}/alibuild_helpers/sync.py +3 -2
- {alibuild-1.17.18 → alibuild-1.17.19}/alibuild_helpers/utilities.py +33 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/alienv +2 -1
- {alibuild-1.17.18 → alibuild-1.17.19}/docs/docs/quick.md +1 -1
- {alibuild-1.17.18 → alibuild-1.17.19}/docs/docs/troubleshooting.md +3 -3
- {alibuild-1.17.18 → alibuild-1.17.19}/pyproject.toml +22 -15
- {alibuild-1.17.18 → alibuild-1.17.19}/templates/alibuild_to_please.jnj +1 -1
- {alibuild-1.17.18 → alibuild-1.17.19}/tests/test_args.py +0 -1
- {alibuild-1.17.18 → alibuild-1.17.19}/tests/test_build.py +0 -1
- {alibuild-1.17.18 → alibuild-1.17.19}/tests/test_clean.py +0 -1
- alibuild-1.17.19/tests/test_cmd.py +73 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/tests/test_doctor.py +0 -1
- {alibuild-1.17.18 → alibuild-1.17.19}/tests/test_packagelist.py +25 -4
- {alibuild-1.17.18 → alibuild-1.17.19}/tests/test_utilities.py +36 -1
- alibuild-1.17.19/tests/testdist/tracking-env.sh +6 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/tox.ini +8 -2
- alibuild-1.17.18/MANIFEST.in +0 -1
- alibuild-1.17.18/PACKAGING.md +0 -18
- alibuild-1.17.18/alibuild.egg-info/requires.txt +0 -5
- alibuild-1.17.18/alibuild.egg-info/top_level.txt +0 -1
- alibuild-1.17.18/requirements.txt +0 -6
- alibuild-1.17.18/setup.py +0 -98
- alibuild-1.17.18/tests/test_cmd.py +0 -44
- {alibuild-1.17.18 → alibuild-1.17.19}/.flake8 +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/.github/workflows/documentation.yml +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/.gitignore +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/.pylintrc +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/ANALYTICS.md +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/DESIGN.md +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/LICENSE.md +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/aliDeps +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/aliDoctor +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/alibuild.egg-info/dependency_links.txt +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/alibuild_helpers/__init__.py +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/alibuild_helpers/args.py +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/alibuild_helpers/clean.py +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/alibuild_helpers/init.py +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/alibuild_helpers/log.py +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/alibuild_helpers/scm.py +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/alibuild_helpers/templating_plugin.py +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/alibuild_helpers/workarea.py +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/codecov.yml +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/debian/changelog +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/debian/compat +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/debian/control +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/debian/copyright +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/debian/files +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/debian/rules +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/docs/README.md +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/docs/SUPPORT +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/docs/docs/alice_logo.png +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/docs/docs/deps.png +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/docs/docs/index.md +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/docs/docs/reference.md +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/docs/docs/stylesheets/extra.css +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/docs/docs/user.md +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/docs/mkdocs.yml +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/pb +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/setup.cfg +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/tests/test_analytics.py +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/tests/test_deps.py +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/tests/test_git.py +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/tests/test_hashing.py +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/tests/test_init.py +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/tests/test_log.py +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/tests/test_parseRecipe.py +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/tests/test_sync.py +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/tests/test_workarea.py +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/tests/testdist/broken1.sh +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/tests/testdist/broken2.sh +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/tests/testdist/broken3.sh +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/tests/testdist/broken4.sh +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/tests/testdist/broken5.sh +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/tests/testdist/broken6.sh +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/tests/testdist/broken7.sh +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/tests/testdist/clobber-initdotsh.sh +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/tests/testdist/defaults-o2.sh +0 -0
- {alibuild-1.17.18 → alibuild-1.17.19}/tests/testdist/delete-etc.sh +0 -0
|
@@ -18,14 +18,13 @@ permissions: {}
|
|
|
18
18
|
jobs:
|
|
19
19
|
unittest:
|
|
20
20
|
name: python${{ matrix.python-version }}
|
|
21
|
-
runs-on: ubuntu-
|
|
21
|
+
runs-on: ubuntu-22.04
|
|
22
22
|
|
|
23
23
|
strategy:
|
|
24
24
|
fail-fast: false # continue executing other checks if one fails
|
|
25
25
|
matrix:
|
|
26
26
|
python-version:
|
|
27
|
-
- '3.
|
|
28
|
-
- '3.8.10' # ubuntu2004 container
|
|
27
|
+
- '3.8.18' # ubuntu2004 container
|
|
29
28
|
- '3.9.16' # slc9 container
|
|
30
29
|
- '3.10.6' # ubuntu2204 container
|
|
31
30
|
- '3.11'
|
|
@@ -9,30 +9,28 @@ permissions:
|
|
|
9
9
|
contents: read # required for github.ref to be set
|
|
10
10
|
|
|
11
11
|
jobs:
|
|
12
|
-
pypi:
|
|
13
|
-
name:
|
|
12
|
+
pypi-publish:
|
|
13
|
+
name: Upload release to PyPI
|
|
14
14
|
runs-on: ubuntu-latest
|
|
15
|
-
|
|
15
|
+
environment:
|
|
16
|
+
name: release
|
|
17
|
+
url: https://pypi.org/p/alibuild
|
|
18
|
+
permissions:
|
|
19
|
+
id-token: write # IMPORTANT: this permission is mandatory for trusted publishing
|
|
16
20
|
steps:
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
run: python setup.py sdist
|
|
27
|
-
- name: Publish distribution to PyPI
|
|
28
|
-
uses: pypa/gh-action-pypi-publish@v1.5.0
|
|
29
|
-
with:
|
|
30
|
-
user: __token__
|
|
31
|
-
password: ${{ secrets.pypi_password }}
|
|
21
|
+
- uses: actions/checkout@v4
|
|
22
|
+
with:
|
|
23
|
+
fetch-depth: 0
|
|
24
|
+
- name: Install build dependencies
|
|
25
|
+
run: python -m pip install --upgrade setuptools build pip
|
|
26
|
+
- name: Build the Python distribution
|
|
27
|
+
run: python -m build
|
|
28
|
+
- name: Publish package distributions to PyPI
|
|
29
|
+
uses: pypa/gh-action-pypi-publish@release/v1
|
|
32
30
|
|
|
33
31
|
brew:
|
|
34
32
|
name: Update Homebrew formula
|
|
35
|
-
needs: pypi
|
|
33
|
+
needs: pypi-publish
|
|
36
34
|
runs-on: macos-latest
|
|
37
35
|
if: startsWith(github.ref, 'refs/tags/') && !github.event.release.prerelease
|
|
38
36
|
steps:
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
# Publishing on PyPi
|
|
2
|
+
|
|
3
|
+
alibuild is available from PyPi. Package page at:
|
|
4
|
+
|
|
5
|
+
<https://pypi.python.org/pypi/alibuild/>
|
|
6
|
+
|
|
7
|
+
In order to publish a new version:
|
|
8
|
+
|
|
9
|
+
- Test, test, test.
|
|
10
|
+
- Create a new release in GitHub.
|
|
11
|
+
- The github action should automatically create a new release and upload the package to PyPi (it's a good idea to verify that the release was created and the package uploaded).
|
|
@@ -1,24 +1,14 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: alibuild
|
|
3
|
-
Version: 1.17.
|
|
3
|
+
Version: 1.17.19
|
|
4
4
|
Summary: ALICE Build Tool
|
|
5
|
-
|
|
6
|
-
Author: Giulio Eulisse
|
|
7
|
-
Author-email: Giulio Eulisse <giulio.eulisse@cern.ch>, Timo Wilken <timo.wilken@cern.ch>
|
|
8
|
-
License: GPL
|
|
5
|
+
Author-email: Giulio Eulisse <giulio.eulisse@cern.ch>, Timo Wilken <timo.wilken@cern.ch>, Sergio Garcia <sergio.garcia@cern.ch>
|
|
9
6
|
Project-URL: homepage, https://alisw.github.io/alibuild
|
|
10
7
|
Keywords: HEP,ALICE
|
|
11
8
|
Classifier: Development Status :: 5 - Production/Stable
|
|
12
9
|
Classifier: Intended Audience :: Developers
|
|
13
10
|
Classifier: Topic :: Software Development :: Build Tools
|
|
14
|
-
|
|
15
|
-
Classifier: Programming Language :: Python :: 3.6
|
|
16
|
-
Classifier: Programming Language :: Python :: 3.8
|
|
17
|
-
Classifier: Programming Language :: Python :: 3.9
|
|
18
|
-
Classifier: Programming Language :: Python :: 3.10
|
|
19
|
-
Classifier: Programming Language :: Python :: 3.11
|
|
20
|
-
Classifier: Programming Language :: Python :: 3.12
|
|
21
|
-
Classifier: Programming Language :: Python :: 3.13
|
|
11
|
+
Requires-Python: >=3.6
|
|
22
12
|
Description-Content-Type: text/x-rst
|
|
23
13
|
License-File: LICENSE.md
|
|
24
14
|
Requires-Dist: pyyaml
|
|
@@ -26,10 +16,12 @@ Requires-Dist: requests
|
|
|
26
16
|
Requires-Dist: distro
|
|
27
17
|
Requires-Dist: jinja2
|
|
28
18
|
Requires-Dist: boto3<1.36.0
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
19
|
+
Provides-Extra: test
|
|
20
|
+
Requires-Dist: tox; extra == "test"
|
|
21
|
+
Provides-Extra: docs
|
|
22
|
+
Requires-Dist: mkdocs; extra == "docs"
|
|
23
|
+
Requires-Dist: mkdocs-material; extra == "docs"
|
|
24
|
+
Requires-Dist: mkdocs-redirects; extra == "docs"
|
|
33
25
|
Dynamic: license-file
|
|
34
26
|
|
|
35
27
|
.. image:: https://badge.fury.io/py/alibuild.svg
|
|
@@ -45,7 +37,7 @@ for the externals and ALICE software are stored in
|
|
|
45
37
|
Instant gratification with::
|
|
46
38
|
|
|
47
39
|
pip install alibuild
|
|
48
|
-
|
|
40
|
+
aliBuild init
|
|
49
41
|
aliBuild build AliRoot
|
|
50
42
|
alienv enter AliRoot/latest
|
|
51
43
|
aliroot -b
|
|
@@ -58,4 +50,29 @@ Pre-requisites
|
|
|
58
50
|
==============
|
|
59
51
|
|
|
60
52
|
If you are using aliBuild directly from git clone, you should make sure
|
|
61
|
-
you have
|
|
53
|
+
you have the dependencies installed. The easiest way to do this is to run::
|
|
54
|
+
|
|
55
|
+
pip install -e .
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
For developers
|
|
59
|
+
==============
|
|
60
|
+
|
|
61
|
+
If you want to contribute to aliBuild, you can run the tests with::
|
|
62
|
+
|
|
63
|
+
pip install -e .[test] # Only needed once
|
|
64
|
+
tox
|
|
65
|
+
|
|
66
|
+
The test suite only runs fully on a Linux system, but there is a reduced suite for macOS, runnable with::
|
|
67
|
+
|
|
68
|
+
tox -e darwin
|
|
69
|
+
|
|
70
|
+
You can also run only the unit tests (it's a lot faster than the full suite) with::
|
|
71
|
+
|
|
72
|
+
pytest
|
|
73
|
+
|
|
74
|
+
To run the documentation locally, you can use::
|
|
75
|
+
|
|
76
|
+
pip install -e .[docs]
|
|
77
|
+
cd docs
|
|
78
|
+
mkdocs serve
|
|
@@ -11,7 +11,7 @@ for the externals and ALICE software are stored in
|
|
|
11
11
|
Instant gratification with::
|
|
12
12
|
|
|
13
13
|
pip install alibuild
|
|
14
|
-
|
|
14
|
+
aliBuild init
|
|
15
15
|
aliBuild build AliRoot
|
|
16
16
|
alienv enter AliRoot/latest
|
|
17
17
|
aliroot -b
|
|
@@ -24,4 +24,29 @@ Pre-requisites
|
|
|
24
24
|
==============
|
|
25
25
|
|
|
26
26
|
If you are using aliBuild directly from git clone, you should make sure
|
|
27
|
-
you have
|
|
27
|
+
you have the dependencies installed. The easiest way to do this is to run::
|
|
28
|
+
|
|
29
|
+
pip install -e .
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
For developers
|
|
33
|
+
==============
|
|
34
|
+
|
|
35
|
+
If you want to contribute to aliBuild, you can run the tests with::
|
|
36
|
+
|
|
37
|
+
pip install -e .[test] # Only needed once
|
|
38
|
+
tox
|
|
39
|
+
|
|
40
|
+
The test suite only runs fully on a Linux system, but there is a reduced suite for macOS, runnable with::
|
|
41
|
+
|
|
42
|
+
tox -e darwin
|
|
43
|
+
|
|
44
|
+
You can also run only the unit tests (it's a lot faster than the full suite) with::
|
|
45
|
+
|
|
46
|
+
pytest
|
|
47
|
+
|
|
48
|
+
To run the documentation locally, you can use::
|
|
49
|
+
|
|
50
|
+
pip install -e .[docs]
|
|
51
|
+
cd docs
|
|
52
|
+
mkdocs serve
|
|
@@ -1,24 +1,14 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: alibuild
|
|
3
|
-
Version: 1.17.
|
|
3
|
+
Version: 1.17.19
|
|
4
4
|
Summary: ALICE Build Tool
|
|
5
|
-
|
|
6
|
-
Author: Giulio Eulisse
|
|
7
|
-
Author-email: Giulio Eulisse <giulio.eulisse@cern.ch>, Timo Wilken <timo.wilken@cern.ch>
|
|
8
|
-
License: GPL
|
|
5
|
+
Author-email: Giulio Eulisse <giulio.eulisse@cern.ch>, Timo Wilken <timo.wilken@cern.ch>, Sergio Garcia <sergio.garcia@cern.ch>
|
|
9
6
|
Project-URL: homepage, https://alisw.github.io/alibuild
|
|
10
7
|
Keywords: HEP,ALICE
|
|
11
8
|
Classifier: Development Status :: 5 - Production/Stable
|
|
12
9
|
Classifier: Intended Audience :: Developers
|
|
13
10
|
Classifier: Topic :: Software Development :: Build Tools
|
|
14
|
-
|
|
15
|
-
Classifier: Programming Language :: Python :: 3.6
|
|
16
|
-
Classifier: Programming Language :: Python :: 3.8
|
|
17
|
-
Classifier: Programming Language :: Python :: 3.9
|
|
18
|
-
Classifier: Programming Language :: Python :: 3.10
|
|
19
|
-
Classifier: Programming Language :: Python :: 3.11
|
|
20
|
-
Classifier: Programming Language :: Python :: 3.12
|
|
21
|
-
Classifier: Programming Language :: Python :: 3.13
|
|
11
|
+
Requires-Python: >=3.6
|
|
22
12
|
Description-Content-Type: text/x-rst
|
|
23
13
|
License-File: LICENSE.md
|
|
24
14
|
Requires-Dist: pyyaml
|
|
@@ -26,10 +16,12 @@ Requires-Dist: requests
|
|
|
26
16
|
Requires-Dist: distro
|
|
27
17
|
Requires-Dist: jinja2
|
|
28
18
|
Requires-Dist: boto3<1.36.0
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
19
|
+
Provides-Extra: test
|
|
20
|
+
Requires-Dist: tox; extra == "test"
|
|
21
|
+
Provides-Extra: docs
|
|
22
|
+
Requires-Dist: mkdocs; extra == "docs"
|
|
23
|
+
Requires-Dist: mkdocs-material; extra == "docs"
|
|
24
|
+
Requires-Dist: mkdocs-redirects; extra == "docs"
|
|
33
25
|
Dynamic: license-file
|
|
34
26
|
|
|
35
27
|
.. image:: https://badge.fury.io/py/alibuild.svg
|
|
@@ -45,7 +37,7 @@ for the externals and ALICE software are stored in
|
|
|
45
37
|
Instant gratification with::
|
|
46
38
|
|
|
47
39
|
pip install alibuild
|
|
48
|
-
|
|
40
|
+
aliBuild init
|
|
49
41
|
aliBuild build AliRoot
|
|
50
42
|
alienv enter AliRoot/latest
|
|
51
43
|
aliroot -b
|
|
@@ -58,4 +50,29 @@ Pre-requisites
|
|
|
58
50
|
==============
|
|
59
51
|
|
|
60
52
|
If you are using aliBuild directly from git clone, you should make sure
|
|
61
|
-
you have
|
|
53
|
+
you have the dependencies installed. The easiest way to do this is to run::
|
|
54
|
+
|
|
55
|
+
pip install -e .
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
For developers
|
|
59
|
+
==============
|
|
60
|
+
|
|
61
|
+
If you want to contribute to aliBuild, you can run the tests with::
|
|
62
|
+
|
|
63
|
+
pip install -e .[test] # Only needed once
|
|
64
|
+
tox
|
|
65
|
+
|
|
66
|
+
The test suite only runs fully on a Linux system, but there is a reduced suite for macOS, runnable with::
|
|
67
|
+
|
|
68
|
+
tox -e darwin
|
|
69
|
+
|
|
70
|
+
You can also run only the unit tests (it's a lot faster than the full suite) with::
|
|
71
|
+
|
|
72
|
+
pytest
|
|
73
|
+
|
|
74
|
+
To run the documentation locally, you can use::
|
|
75
|
+
|
|
76
|
+
pip install -e .[docs]
|
|
77
|
+
cd docs
|
|
78
|
+
mkdocs serve
|
|
@@ -4,7 +4,6 @@
|
|
|
4
4
|
ANALYTICS.md
|
|
5
5
|
DESIGN.md
|
|
6
6
|
LICENSE.md
|
|
7
|
-
MANIFEST.in
|
|
8
7
|
PACKAGING.md
|
|
9
8
|
README.rst
|
|
10
9
|
alfaBuild
|
|
@@ -15,8 +14,6 @@ alienv
|
|
|
15
14
|
codecov.yml
|
|
16
15
|
pb
|
|
17
16
|
pyproject.toml
|
|
18
|
-
requirements.txt
|
|
19
|
-
setup.py
|
|
20
17
|
tox.ini
|
|
21
18
|
.github/workflows/documentation.yml
|
|
22
19
|
.github/workflows/pr-check.yml
|
|
@@ -88,4 +85,5 @@ tests/testdist/broken6.sh
|
|
|
88
85
|
tests/testdist/broken7.sh
|
|
89
86
|
tests/testdist/clobber-initdotsh.sh
|
|
90
87
|
tests/testdist/defaults-o2.sh
|
|
91
|
-
tests/testdist/delete-etc.sh
|
|
88
|
+
tests/testdist/delete-etc.sh
|
|
89
|
+
tests/testdist/tracking-env.sh
|
|
@@ -26,7 +26,7 @@ def askForAnalytics():
|
|
|
26
26
|
" https://github.com/alisw/alibuild/blob/master/ANALYTICS.md\n")
|
|
27
27
|
a = input("Is that ok for you [YES/no]? ")
|
|
28
28
|
if a.strip() and a.strip().lower().startswith("n"):
|
|
29
|
-
debug("User
|
|
29
|
+
debug("User requested disabling analytics.")
|
|
30
30
|
return disable_analytics()
|
|
31
31
|
return generate_analytics_id()
|
|
32
32
|
|
|
@@ -22,6 +22,7 @@ from glob import glob
|
|
|
22
22
|
from textwrap import dedent
|
|
23
23
|
from collections import OrderedDict
|
|
24
24
|
from shlex import quote
|
|
25
|
+
import tempfile
|
|
25
26
|
|
|
26
27
|
import concurrent.futures
|
|
27
28
|
import importlib
|
|
@@ -183,7 +184,11 @@ def storeHashes(package, specs, considerRelocation):
|
|
|
183
184
|
for _, _, hasher in h_alternatives:
|
|
184
185
|
hasher(data)
|
|
185
186
|
|
|
186
|
-
|
|
187
|
+
modifies_full_hash_dicts = ["env", "append_path", "prepend_path"]
|
|
188
|
+
if not spec["is_devel_pkg"] and "track_env" in spec:
|
|
189
|
+
modifies_full_hash_dicts.append("track_env")
|
|
190
|
+
|
|
191
|
+
for key in modifies_full_hash_dicts:
|
|
187
192
|
if key not in spec:
|
|
188
193
|
h_all("none")
|
|
189
194
|
else:
|
|
@@ -265,6 +270,19 @@ def hash_local_changes(spec):
|
|
|
265
270
|
class UntrackedChangesError(Exception):
|
|
266
271
|
"""Signal that we cannot detect code changes due to untracked files."""
|
|
267
272
|
h = Hasher()
|
|
273
|
+
if "track_env" in spec:
|
|
274
|
+
assert isinstance(spec["track_env"], OrderedDict), \
|
|
275
|
+
"spec[%r] was of type %r" % ("track_env", type(spec["track_env"]))
|
|
276
|
+
|
|
277
|
+
# Python 3.12 changed the string representation of OrderedDicts from
|
|
278
|
+
# OrderedDict([(key, value)]) to OrderedDict({key: value}), so to remain
|
|
279
|
+
# compatible, we need to emulate the previous string representation.
|
|
280
|
+
h("OrderedDict([")
|
|
281
|
+
h(", ".join(
|
|
282
|
+
# XXX: We still rely on repr("str") being "'str'",
|
|
283
|
+
# and on repr(["a", "b"]) being "['a', 'b']".
|
|
284
|
+
"(%r, %r)" % (key, value) for key, value in spec["track_env"].items()))
|
|
285
|
+
h("])")
|
|
268
286
|
def hash_output(msg, args):
|
|
269
287
|
lines = msg % args
|
|
270
288
|
# `git status --porcelain` indicates untracked files using "??".
|
|
@@ -391,7 +409,7 @@ def generate_initdotsh(package, specs, architecture, post_build=False):
|
|
|
391
409
|
if key != "DYLD_LIBRARY_PATH")
|
|
392
410
|
|
|
393
411
|
# Return string without a trailing newline, since we expect call sites to
|
|
394
|
-
# append that (and the obvious way to inesrt it into the build
|
|
412
|
+
# append that (and the obvious way to inesrt it into the build template is by
|
|
395
413
|
# putting the "%(initdotsh_*)s" on its own line, which has the same effect).
|
|
396
414
|
return "\n".join(lines)
|
|
397
415
|
|
|
@@ -483,7 +501,11 @@ def doBuild(args, parser):
|
|
|
483
501
|
|
|
484
502
|
install_wrapper_script("git", workDir)
|
|
485
503
|
|
|
486
|
-
with DockerRunner(args.dockerImage, args.docker_extra_args) as getstatusoutput_docker:
|
|
504
|
+
with DockerRunner(args.dockerImage, args.docker_extra_args, extra_env={"ALIBUILD_CONFIG_DIR": "/alidist" if args.docker else os.path.abspath(args.configDir)}, extra_volumes=[f"{os.path.abspath(args.configDir)}:/alidist:ro"] if args.docker else []) as getstatusoutput_docker:
|
|
505
|
+
def performPreferCheckWithTempDir(pkg, cmd):
|
|
506
|
+
with tempfile.TemporaryDirectory(prefix=f"alibuild_prefer_check_{pkg['package']}_") as temp_dir:
|
|
507
|
+
return getstatusoutput_docker(cmd, cwd=temp_dir)
|
|
508
|
+
|
|
487
509
|
systemPackages, ownPackages, failed, validDefaults = \
|
|
488
510
|
getPackageList(packages = packages,
|
|
489
511
|
specs = specs,
|
|
@@ -494,8 +516,8 @@ def doBuild(args, parser):
|
|
|
494
516
|
disable = args.disable,
|
|
495
517
|
force_rebuild = args.force_rebuild,
|
|
496
518
|
defaults = args.defaults,
|
|
497
|
-
performPreferCheck =
|
|
498
|
-
performRequirementCheck =
|
|
519
|
+
performPreferCheck = performPreferCheckWithTempDir,
|
|
520
|
+
performRequirementCheck = performPreferCheckWithTempDir,
|
|
499
521
|
performValidateDefaults = lambda spec: validateDefaults(spec, args.defaults),
|
|
500
522
|
overrides = overrides,
|
|
501
523
|
taps = taps,
|
|
@@ -872,8 +894,18 @@ def doBuild(args, parser):
|
|
|
872
894
|
spec["hash"] = spec["local_revision_hash"]
|
|
873
895
|
else:
|
|
874
896
|
spec["hash"] = spec["remote_revision_hash"]
|
|
897
|
+
|
|
898
|
+
# We do not use the override for devel packages, because we
|
|
899
|
+
# want to avoid having to rebuild things when the /tmp gets cleaned.
|
|
900
|
+
if spec["is_devel_pkg"]:
|
|
901
|
+
buildWorkDir = args.workDir
|
|
902
|
+
else:
|
|
903
|
+
buildWorkDir = os.environ.get("ALIBUILD_BUILD_WORK_DIR", args.workDir)
|
|
904
|
+
|
|
905
|
+
buildRoot = join(buildWorkDir, "BUILD", spec["hash"])
|
|
906
|
+
|
|
875
907
|
spec["old_devel_hash"] = readHashFile(join(
|
|
876
|
-
|
|
908
|
+
buildRoot, spec["package"], ".build_succeeded"))
|
|
877
909
|
|
|
878
910
|
# Recreate symlinks to this development package builds.
|
|
879
911
|
if spec["is_devel_pkg"]:
|
|
@@ -881,9 +913,9 @@ def doBuild(args, parser):
|
|
|
881
913
|
# Ignore errors here, because the path we're linking to might not exist
|
|
882
914
|
# (if this is the first run through the loop). On the second run
|
|
883
915
|
# through, the path should have been created by the build process.
|
|
884
|
-
call_ignoring_oserrors(symlink, spec["hash"], join(
|
|
916
|
+
call_ignoring_oserrors(symlink, spec["hash"], join(buildWorkDir, "BUILD", spec["package"] + "-latest"))
|
|
885
917
|
if develPrefix:
|
|
886
|
-
call_ignoring_oserrors(symlink, spec["hash"], join(
|
|
918
|
+
call_ignoring_oserrors(symlink, spec["hash"], join(buildWorkDir, "BUILD", spec["package"] + "-latest-" + develPrefix))
|
|
887
919
|
# Last package built gets a "latest" mark.
|
|
888
920
|
call_ignoring_oserrors(symlink, "{version}-{revision}".format(**spec),
|
|
889
921
|
join(workDir, args.architecture, spec["package"], "latest"))
|
|
@@ -933,7 +965,7 @@ def doBuild(args, parser):
|
|
|
933
965
|
# assuming the package is not a development one. We also can
|
|
934
966
|
# delete the SOURCES in case we have aggressive-cleanup enabled.
|
|
935
967
|
if not spec["is_devel_pkg"] and args.autoCleanup:
|
|
936
|
-
cleanupDirs = [
|
|
968
|
+
cleanupDirs = [buildRoot,
|
|
937
969
|
join(workDir, "INSTALLROOT", spec["hash"])]
|
|
938
970
|
if args.aggressiveCleanup:
|
|
939
971
|
cleanupDirs.append(join(workDir, "SOURCES", spec["package"]))
|
|
@@ -942,13 +974,13 @@ def doBuild(args, parser):
|
|
|
942
974
|
for d in cleanupDirs:
|
|
943
975
|
shutil.rmtree(d.encode("utf8"), True)
|
|
944
976
|
try:
|
|
945
|
-
unlink(join(
|
|
977
|
+
unlink(join(buildWorkDir, "BUILD", spec["package"] + "-latest"))
|
|
946
978
|
if "develPrefix" in args:
|
|
947
|
-
unlink(join(
|
|
979
|
+
unlink(join(buildWorkDir, "BUILD", spec["package"] + "-latest-" + args.develPrefix))
|
|
948
980
|
except:
|
|
949
981
|
pass
|
|
950
982
|
try:
|
|
951
|
-
rmdir(join(
|
|
983
|
+
rmdir(join(buildWorkDir, "BUILD"))
|
|
952
984
|
rmdir(join(workDir, "INSTALLROOT"))
|
|
953
985
|
except:
|
|
954
986
|
pass
|
|
@@ -1035,21 +1067,26 @@ def doBuild(args, parser):
|
|
|
1035
1067
|
("FULL_RUNTIME_REQUIRES", " ".join(spec["full_runtime_requires"])),
|
|
1036
1068
|
("FULL_BUILD_REQUIRES", " ".join(spec["full_build_requires"])),
|
|
1037
1069
|
("FULL_REQUIRES", " ".join(spec["full_requires"])),
|
|
1070
|
+
("ALIBUILD_PREFER_SYSTEM_KEY", spec.get("key", "")),
|
|
1038
1071
|
]
|
|
1039
1072
|
# Add the extra environment as passed from the command line.
|
|
1040
1073
|
buildEnvironment += [e.partition('=')[::2] for e in args.environment]
|
|
1041
1074
|
|
|
1075
|
+
# Add the computed track_env environment
|
|
1076
|
+
buildEnvironment += [(key, value) for key, value in spec.get("track_env", {}).items()]
|
|
1077
|
+
|
|
1042
1078
|
# In case the --docker options is passed, we setup a docker container which
|
|
1043
1079
|
# will perform the actual build. Otherwise build as usual using bash.
|
|
1044
1080
|
if args.docker:
|
|
1045
1081
|
build_command = (
|
|
1046
1082
|
"docker run --rm --entrypoint= --user $(id -u):$(id -g) "
|
|
1047
|
-
"-v {workdir}:/sw -v {scriptDir}/build.sh:/build.sh:ro "
|
|
1083
|
+
"-v {workdir}:/sw -v{configDir}:/alidist:ro -v {scriptDir}/build.sh:/build.sh:ro "
|
|
1048
1084
|
"{mirrorVolume} {develVolumes} {additionalEnv} {additionalVolumes} "
|
|
1049
|
-
"-e WORK_DIR_OVERRIDE=/sw {extraArgs} {image} bash -ex /build.sh"
|
|
1085
|
+
"-e WORK_DIR_OVERRIDE=/sw -e ALIBUILD_CONFIG_DIR_OVERRIDE=/alidist {extraArgs} {image} bash -ex /build.sh"
|
|
1050
1086
|
).format(
|
|
1051
1087
|
image=quote(args.dockerImage),
|
|
1052
1088
|
workdir=quote(abspath(args.workDir)),
|
|
1089
|
+
configDir=quote(abspath(args.configDir)),
|
|
1053
1090
|
scriptDir=quote(scriptDir),
|
|
1054
1091
|
extraArgs=" ".join(map(quote, args.docker_extra_args)),
|
|
1055
1092
|
additionalEnv=" ".join(
|
|
@@ -1095,7 +1132,7 @@ def doBuild(args, parser):
|
|
|
1095
1132
|
""").format(
|
|
1096
1133
|
h=socket.gethostname(),
|
|
1097
1134
|
sd=scriptDir,
|
|
1098
|
-
w=
|
|
1135
|
+
w=buildWorkDir,
|
|
1099
1136
|
p=spec["package"],
|
|
1100
1137
|
devSuffix="-" + args.develPrefix
|
|
1101
1138
|
if "develPrefix" in args and spec["is_devel_pkg"]
|
|
@@ -1163,7 +1200,7 @@ def doBuild(args, parser):
|
|
|
1163
1200
|
for spec in specs.values():
|
|
1164
1201
|
if spec["is_devel_pkg"]:
|
|
1165
1202
|
banner("Build directory for devel package %s:\n%s/BUILD/%s-latest%s/%s",
|
|
1166
|
-
spec["package"], abspath(
|
|
1203
|
+
spec["package"], abspath(buildWorkDir), spec["package"],
|
|
1167
1204
|
("-" + args.develPrefix) if "develPrefix" in args else "",
|
|
1168
1205
|
spec["package"])
|
|
1169
1206
|
if untrackedFilesDirectories:
|
|
@@ -11,6 +11,7 @@ set -e
|
|
|
11
11
|
set +h
|
|
12
12
|
function hash() { true; }
|
|
13
13
|
export WORK_DIR="${WORK_DIR_OVERRIDE:-%(workDir)s}"
|
|
14
|
+
export ALIBUILD_CONFIG_DIR="${ALIBUILD_CONFIG_DIR_OVERRIDE:-%(configDir)s}"
|
|
14
15
|
|
|
15
16
|
# Insert our own wrapper scripts into $PATH, patched to use the system OpenSSL,
|
|
16
17
|
# instead of the one we build ourselves.
|
|
@@ -43,16 +44,19 @@ export PKG_BUILDNUM="$PKGREVISION"
|
|
|
43
44
|
export PKGPATH=${ARCHITECTURE}/${PKGNAME}/${PKGVERSION}-${PKGREVISION}
|
|
44
45
|
mkdir -p "$WORK_DIR/BUILD" "$WORK_DIR/SOURCES" "$WORK_DIR/TARS" \
|
|
45
46
|
"$WORK_DIR/SPECS" "$WORK_DIR/INSTALLROOT"
|
|
46
|
-
export BUILDROOT="$WORK_DIR/BUILD/$PKGHASH"
|
|
47
|
-
|
|
48
47
|
# If we are in development mode, then install directly in $WORK_DIR/$PKGPATH,
|
|
49
48
|
# so that we can do "make install" directly into BUILD/$PKGPATH and have
|
|
50
49
|
# changes being propagated.
|
|
50
|
+
# Moreover, devel packages should always go in the official WORK_DIR
|
|
51
51
|
if [ -n "$DEVEL_HASH" ]; then
|
|
52
|
+
export ALIBUILD_BUILD_WORK_DIR="${WORK_DIR}"
|
|
52
53
|
export INSTALLROOT="$WORK_DIR/$PKGPATH"
|
|
53
54
|
else
|
|
54
55
|
export INSTALLROOT="$WORK_DIR/INSTALLROOT/$PKGHASH/$PKGPATH"
|
|
56
|
+
export ALIBUILD_BUILD_WORK_DIR="${ALIBUILD_BUILD_WORK_DIR:-$WORK_DIR}"
|
|
55
57
|
fi
|
|
58
|
+
|
|
59
|
+
export BUILDROOT="$ALIBUILD_BUILD_WORK_DIR/BUILD/$PKGHASH"
|
|
56
60
|
export SOURCEDIR="$WORK_DIR/SOURCES/$PKGNAME/$PKGVERSION/$COMMIT_HASH"
|
|
57
61
|
export BUILDDIR="$BUILDROOT/$PKGNAME"
|
|
58
62
|
|
|
@@ -99,9 +103,9 @@ unset DYLD_LIBRARY_PATH
|
|
|
99
103
|
EOF
|
|
100
104
|
|
|
101
105
|
cd "$BUILDROOT"
|
|
102
|
-
ln -snf $PKGHASH $
|
|
106
|
+
ln -snf $PKGHASH "${BUILDROOT}-latest"
|
|
103
107
|
if [[ $DEVEL_PREFIX ]]; then
|
|
104
|
-
ln -snf $PKGHASH $
|
|
108
|
+
ln -snf $PKGHASH "${BUILDROOT}-latest-$DEVEL_PREFIX"
|
|
105
109
|
fi
|
|
106
110
|
|
|
107
111
|
cd "$BUILDDIR"
|
|
@@ -37,9 +37,9 @@ def getoutput(command, timeout=None):
|
|
|
37
37
|
return decode_with_fallback(stdout)
|
|
38
38
|
|
|
39
39
|
|
|
40
|
-
def getstatusoutput(command, timeout=None):
|
|
40
|
+
def getstatusoutput(command, timeout=None, cwd=None):
|
|
41
41
|
"""Run command and return its return code and output (stdout and stderr)."""
|
|
42
|
-
proc = Popen(command, shell=isinstance(command, str), stdout=PIPE, stderr=STDOUT)
|
|
42
|
+
proc = Popen(command, shell=isinstance(command, str), stdout=PIPE, stderr=STDOUT, cwd=cwd)
|
|
43
43
|
try:
|
|
44
44
|
merged_output, _ = proc.communicate(timeout=timeout)
|
|
45
45
|
except TimeoutExpired:
|
|
@@ -78,24 +78,42 @@ class DockerRunner:
|
|
|
78
78
|
instead.
|
|
79
79
|
"""
|
|
80
80
|
|
|
81
|
-
def __init__(self, docker_image, docker_run_args=()) -> None:
|
|
81
|
+
def __init__(self, docker_image, docker_run_args=(), extra_env={}, extra_volumes=[]) -> None:
|
|
82
82
|
self._docker_image = docker_image
|
|
83
83
|
self._docker_run_args = docker_run_args
|
|
84
84
|
self._container = None
|
|
85
|
+
self._extra_env = extra_env
|
|
86
|
+
self._extra_volumes = extra_volumes
|
|
85
87
|
|
|
86
88
|
def __enter__(self):
|
|
87
89
|
if self._docker_image:
|
|
88
90
|
# "sleep inf" pauses forever, until we kill it.
|
|
89
|
-
|
|
91
|
+
envOpts = []
|
|
92
|
+
volumes = []
|
|
93
|
+
for env in self._extra_env.items():
|
|
94
|
+
envOpts.append("-e")
|
|
95
|
+
envOpts.append(f"{env[0]}={env[1]}")
|
|
96
|
+
for v in self._extra_volumes:
|
|
97
|
+
volumes.append("-v")
|
|
98
|
+
volumes.append(v)
|
|
99
|
+
cmd = ["docker", "run", "--detach"] + envOpts + volumes + ["--rm", "--entrypoint="]
|
|
90
100
|
cmd += self._docker_run_args
|
|
91
101
|
cmd += [self._docker_image, "sleep", "inf"]
|
|
92
102
|
self._container = getoutput(cmd).strip()
|
|
93
103
|
|
|
94
|
-
def getstatusoutput_docker(cmd):
|
|
104
|
+
def getstatusoutput_docker(cmd, cwd=None):
|
|
95
105
|
if self._container is None:
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
106
|
+
command_prefix=""
|
|
107
|
+
if self._extra_env:
|
|
108
|
+
command_prefix="env " + " ".join("{}={}".format(k, v) for (k,v) in self._extra_env.items()) + " "
|
|
109
|
+
return getstatusoutput("{}{} -c {}".format(command_prefix, BASH, quote(cmd))
|
|
110
|
+
, cwd=cwd)
|
|
111
|
+
envOpts = []
|
|
112
|
+
for env in self._extra_env.items():
|
|
113
|
+
envOpts.append("-e")
|
|
114
|
+
envOpts.append("{}={}".format(env[0], env[1]))
|
|
115
|
+
exec_cmd = ["docker", "container", "exec"] + envOpts + [self._container, "bash", "-c", cmd]
|
|
116
|
+
return getstatusoutput(exec_cmd, cwd=cwd)
|
|
99
117
|
|
|
100
118
|
return getstatusoutput_docker
|
|
101
119
|
|