alibuild 1.15.2__tar.gz → 1.16.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {alibuild-1.15.2/alibuild.egg-info → alibuild-1.16.0}/PKG-INFO +1 -6
- {alibuild-1.15.2 → alibuild-1.16.0}/alfaBuild +2 -2
- {alibuild-1.15.2 → alibuild-1.16.0}/aliBuild +2 -2
- {alibuild-1.15.2 → alibuild-1.16.0/alibuild.egg-info}/PKG-INFO +1 -6
- {alibuild-1.15.2 → alibuild-1.16.0}/alibuild.egg-info/SOURCES.txt +4 -2
- alibuild-1.16.0/alibuild.egg-info/requires.txt +5 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/alibuild_helpers/__init__.py +2 -2
- {alibuild-1.15.2 → alibuild-1.16.0}/alibuild_helpers/_version.py +2 -2
- {alibuild-1.15.2 → alibuild-1.16.0}/alibuild_helpers/args.py +44 -4
- {alibuild-1.15.2 → alibuild-1.16.0}/alibuild_helpers/build.py +171 -106
- {alibuild-1.15.2 → alibuild-1.16.0}/alibuild_helpers/build_template.sh +46 -30
- {alibuild-1.15.2 → alibuild-1.16.0}/alibuild_helpers/clean.py +37 -19
- {alibuild-1.15.2 → alibuild-1.16.0}/alibuild_helpers/deps.py +5 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/alibuild_helpers/utilities.py +32 -7
- {alibuild-1.15.2 → alibuild-1.16.0}/docs/user.markdown +2 -1
- {alibuild-1.15.2 → alibuild-1.16.0}/pyproject.toml +0 -1
- {alibuild-1.15.2 → alibuild-1.16.0}/setup.py +0 -9
- {alibuild-1.15.2 → alibuild-1.16.0}/tests/test_args.py +1 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/tests/test_build.py +77 -29
- {alibuild-1.15.2 → alibuild-1.16.0}/tests/test_clean.py +62 -33
- alibuild-1.15.2/tests/test_system_replacement.py → alibuild-1.16.0/tests/test_packagelist.py +31 -3
- alibuild-1.16.0/tests/testdist/clobber-initdotsh.sh +4 -0
- alibuild-1.16.0/tests/testdist/delete-etc.sh +4 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/tox.ini +6 -0
- alibuild-1.15.2/alibuild.egg-info/requires.txt +0 -13
- {alibuild-1.15.2 → alibuild-1.16.0}/.flake8 +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/.github/workflows/pr-check.yml +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/.github/workflows/release.yml +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/.gitignore +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/.pylintrc +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/ANALYTICS.md +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/DESIGN.md +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/Jenkinsfile +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/LICENSE.md +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/MANIFEST.in +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/PACKAGING.md +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/README.rst +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/aliDeps +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/aliDoctor +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/alibuild.egg-info/dependency_links.txt +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/alibuild.egg-info/top_level.txt +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/alibuild_helpers/analytics.py +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/alibuild_helpers/cmd.py +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/alibuild_helpers/doctor.py +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/alibuild_helpers/git.py +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/alibuild_helpers/init.py +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/alibuild_helpers/log.py +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/alibuild_helpers/scm.py +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/alibuild_helpers/sl.py +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/alibuild_helpers/sync.py +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/alibuild_helpers/templating_plugin.py +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/alibuild_helpers/workarea.py +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/alienv +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/codecov.yml +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/debian/changelog +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/debian/compat +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/debian/control +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/debian/copyright +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/debian/files +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/debian/rules +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/docs/README.md +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/docs/SUPPORT +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/docs/_config.yml +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/docs/_includes/section_toc.html +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/docs/_layouts/main.html +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/docs/_layouts/redirect.html +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/docs/css/bootstrap-theme.min.css +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/docs/css/bootstrap.min.css +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/docs/css/pure-min.css +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/docs/deps.png +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/docs/index.markdown +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/docs/js/highlight.pack.js +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/docs/main.css +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/docs/o2-dataflow-tutorial.markdown +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/docs/o2-tutorial.markdown +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/docs/quick.markdown +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/docs/reference.markdown +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/docs/side-menu.css +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/docs/syntax.css +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/docs/troubleshooting.markdown +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/docs/tutorial.markdown +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/docs/ui.js +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/pb +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/requirements.txt +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/script/custom_htmlproofer.rb +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/setup.cfg +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/tests/test_analytics.py +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/tests/test_cmd.py +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/tests/test_coverage.py +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/tests/test_deps.py +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/tests/test_doctor.py +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/tests/test_git.py +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/tests/test_hashing.py +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/tests/test_init.py +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/tests/test_log.py +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/tests/test_parseRecipe.py +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/tests/test_sync.py +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/tests/test_utilities.py +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/tests/test_workarea.py +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/tests/testdist/broken1.sh +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/tests/testdist/broken2.sh +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/tests/testdist/broken3.sh +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/tests/testdist/broken4.sh +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/tests/testdist/broken5.sh +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/tests/testdist/broken6.sh +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/tests/testdist/broken7.sh +0 -0
- {alibuild-1.15.2 → alibuild-1.16.0}/tests/testdist/defaults-o2.sh +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: alibuild
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.16.0
|
|
4
4
|
Summary: ALICE Build Tool
|
|
5
5
|
Home-page: https://alisw.github.io/alibuild
|
|
6
6
|
Author: Giulio Eulisse
|
|
@@ -12,7 +12,6 @@ Classifier: Development Status :: 5 - Production/Stable
|
|
|
12
12
|
Classifier: Intended Audience :: Developers
|
|
13
13
|
Classifier: Topic :: Software Development :: Build Tools
|
|
14
14
|
Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3)
|
|
15
|
-
Classifier: Programming Language :: Python :: 2.7
|
|
16
15
|
Classifier: Programming Language :: Python :: 3.6
|
|
17
16
|
Classifier: Programming Language :: Python :: 3.8
|
|
18
17
|
Classifier: Programming Language :: Python :: 3.9
|
|
@@ -26,10 +25,6 @@ Requires-Dist: requests
|
|
|
26
25
|
Requires-Dist: distro
|
|
27
26
|
Requires-Dist: jinja2
|
|
28
27
|
Requires-Dist: boto3
|
|
29
|
-
Requires-Dist: futures; python_version == "2.7"
|
|
30
|
-
Requires-Dist: futures; python_version == "2.6"
|
|
31
|
-
Requires-Dist: argparse; python_version == "2.6"
|
|
32
|
-
Requires-Dist: ordereddict; python_version == "2.6"
|
|
33
28
|
|
|
34
29
|
.. image:: https://badge.fury.io/py/alibuild.svg
|
|
35
30
|
.. image:: https://github.com/alisw/alibuild/actions/workflows/pr-check.yml/badge.svg?branch=master&event=push
|
|
@@ -60,7 +60,7 @@ def doMain(args, parser):
|
|
|
60
60
|
|
|
61
61
|
if args.action == "version":
|
|
62
62
|
print("aliBuild version: {version} ({arch})".format(
|
|
63
|
-
version=__version__, arch=args.architecture or "unknown"))
|
|
63
|
+
version=__version__ or "unknown", arch=args.architecture or "unknown"))
|
|
64
64
|
sys.exit(0)
|
|
65
65
|
|
|
66
66
|
if args.action == "doctor":
|
|
@@ -91,7 +91,7 @@ if __name__ == "__main__":
|
|
|
91
91
|
logger.setLevel(logging.DEBUG if args.debug else logging.INFO)
|
|
92
92
|
|
|
93
93
|
os.environ["ALIBUILD_ANALYTICS_ID"] = "UA-77346950-1"
|
|
94
|
-
os.environ["ALIBUILD_VERSION"] = __version__
|
|
94
|
+
os.environ["ALIBUILD_VERSION"] = __version__ or ""
|
|
95
95
|
if args.action == "analytics":
|
|
96
96
|
if args.state == "off":
|
|
97
97
|
disable_analytics()
|
|
@@ -60,7 +60,7 @@ def doMain(args, parser):
|
|
|
60
60
|
|
|
61
61
|
if args.action == "version":
|
|
62
62
|
print("aliBuild version: {version} ({arch})".format(
|
|
63
|
-
version=__version__, arch=args.architecture or "unknown"))
|
|
63
|
+
version=__version__ or "unknown", arch=args.architecture or "unknown"))
|
|
64
64
|
sys.exit(0)
|
|
65
65
|
|
|
66
66
|
if args.action == "doctor":
|
|
@@ -91,7 +91,7 @@ if __name__ == "__main__":
|
|
|
91
91
|
logger.setLevel(logging.DEBUG if args.debug else logging.INFO)
|
|
92
92
|
|
|
93
93
|
os.environ["ALIBUILD_ANALYTICS_ID"] = "UA-77346950-1"
|
|
94
|
-
os.environ["ALIBUILD_VERSION"] = __version__
|
|
94
|
+
os.environ["ALIBUILD_VERSION"] = __version__ or ""
|
|
95
95
|
if args.action == "analytics":
|
|
96
96
|
if args.state == "off":
|
|
97
97
|
disable_analytics()
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: alibuild
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.16.0
|
|
4
4
|
Summary: ALICE Build Tool
|
|
5
5
|
Home-page: https://alisw.github.io/alibuild
|
|
6
6
|
Author: Giulio Eulisse
|
|
@@ -12,7 +12,6 @@ Classifier: Development Status :: 5 - Production/Stable
|
|
|
12
12
|
Classifier: Intended Audience :: Developers
|
|
13
13
|
Classifier: Topic :: Software Development :: Build Tools
|
|
14
14
|
Classifier: License :: OSI Approved :: GNU General Public License v3 (GPLv3)
|
|
15
|
-
Classifier: Programming Language :: Python :: 2.7
|
|
16
15
|
Classifier: Programming Language :: Python :: 3.6
|
|
17
16
|
Classifier: Programming Language :: Python :: 3.8
|
|
18
17
|
Classifier: Programming Language :: Python :: 3.9
|
|
@@ -26,10 +25,6 @@ Requires-Dist: requests
|
|
|
26
25
|
Requires-Dist: distro
|
|
27
26
|
Requires-Dist: jinja2
|
|
28
27
|
Requires-Dist: boto3
|
|
29
|
-
Requires-Dist: futures; python_version == "2.7"
|
|
30
|
-
Requires-Dist: futures; python_version == "2.6"
|
|
31
|
-
Requires-Dist: argparse; python_version == "2.6"
|
|
32
|
-
Requires-Dist: ordereddict; python_version == "2.6"
|
|
33
28
|
|
|
34
29
|
.. image:: https://badge.fury.io/py/alibuild.svg
|
|
35
30
|
.. image:: https://github.com/alisw/alibuild/actions/workflows/pr-check.yml/badge.svg?branch=master&event=push
|
|
@@ -87,9 +87,9 @@ tests/test_git.py
|
|
|
87
87
|
tests/test_hashing.py
|
|
88
88
|
tests/test_init.py
|
|
89
89
|
tests/test_log.py
|
|
90
|
+
tests/test_packagelist.py
|
|
90
91
|
tests/test_parseRecipe.py
|
|
91
92
|
tests/test_sync.py
|
|
92
|
-
tests/test_system_replacement.py
|
|
93
93
|
tests/test_utilities.py
|
|
94
94
|
tests/test_workarea.py
|
|
95
95
|
tests/testdist/broken1.sh
|
|
@@ -99,4 +99,6 @@ tests/testdist/broken4.sh
|
|
|
99
99
|
tests/testdist/broken5.sh
|
|
100
100
|
tests/testdist/broken6.sh
|
|
101
101
|
tests/testdist/broken7.sh
|
|
102
|
-
tests/testdist/
|
|
102
|
+
tests/testdist/clobber-initdotsh.sh
|
|
103
|
+
tests/testdist/defaults-o2.sh
|
|
104
|
+
tests/testdist/delete-etc.sh
|
|
@@ -9,13 +9,13 @@ except ImportError:
|
|
|
9
9
|
try:
|
|
10
10
|
from setuptools_scm import get_version
|
|
11
11
|
except ImportError:
|
|
12
|
-
__version__ =
|
|
12
|
+
__version__ = None
|
|
13
13
|
else:
|
|
14
14
|
import os.path
|
|
15
15
|
source_root = os.path.join(os.path.dirname(__file__), os.path.pardir)
|
|
16
16
|
try:
|
|
17
17
|
__version__ = get_version(root=source_root)
|
|
18
18
|
except LookupError:
|
|
19
|
-
__version__ =
|
|
19
|
+
__version__ = None
|
|
20
20
|
finally:
|
|
21
21
|
del get_version, source_root
|
|
@@ -105,6 +105,12 @@ def doParseArgs():
|
|
|
105
105
|
"same effect as adding 'force_rebuild: true' to its recipe "
|
|
106
106
|
"in CONFIGDIR. You can specify this option multiple times or "
|
|
107
107
|
"separate multiple arguments with commas."))
|
|
108
|
+
build_parser.add_argument("--annotate", default=[], action="append", metavar="PACKAGE=COMMENT",
|
|
109
|
+
help=("Store COMMENT in the build metadata for PACKAGE. This option "
|
|
110
|
+
"can be given multiple times, if you want to store comments "
|
|
111
|
+
"in multiple packages. The comment will only be stored if "
|
|
112
|
+
"PACKAGE is compiled or downloaded during this run; if it "
|
|
113
|
+
"already exists, this does not happen."))
|
|
108
114
|
|
|
109
115
|
build_docker = build_parser.add_argument_group(title="Build inside a container", description="""\
|
|
110
116
|
Builds can be done inside a Docker container, to make it easier to get a
|
|
@@ -272,6 +278,29 @@ def doParseArgs():
|
|
|
272
278
|
"Passed through verbatim -- separate multiple arguments "
|
|
273
279
|
"with spaces, and make sure quoting is correct! Implies --docker."))
|
|
274
280
|
|
|
281
|
+
doctor_remote = doctor_parser.add_argument_group(title="Re-use prebuilt tarballs", description="""\
|
|
282
|
+
Reusing prebuilt tarballs saves compilation time, as common packages need not
|
|
283
|
+
be rebuilt from scratch. rsync://, https://, b3:// and s3:// remote stores
|
|
284
|
+
are recognised. Some of these require credentials: s3:// remotes require an
|
|
285
|
+
~/.s3cfg; b3:// remotes require AWS_ACCESS_KEY_ID and AWS_SECRET_ACCESS_KEY
|
|
286
|
+
environment variables. A useful remote store is
|
|
287
|
+
'https://s3.cern.ch/swift/v1/alibuild-repo'. It requires no credentials and
|
|
288
|
+
provides tarballs for the most common supported architectures.
|
|
289
|
+
""")
|
|
290
|
+
doctor_remote.add_argument("--no-remote-store", action="store_true",
|
|
291
|
+
help="Disable the use of the remote store, even if it is enabled by default.")
|
|
292
|
+
doctor_remote.add_argument("--remote-store", dest="remoteStore", metavar="STORE", default="", help="""\
|
|
293
|
+
Where to find prebuilt tarballs to reuse. See above for available remote stores.
|
|
294
|
+
End with ::rw if you want to upload (in that case, ::rw is stripped and --write-store
|
|
295
|
+
is set to the same value). Implies --no-system. May be set to a default store on some
|
|
296
|
+
architectures; use --no-remote-store to disable it in that case.
|
|
297
|
+
""")
|
|
298
|
+
doctor_remote.add_argument("--write-store", dest="writeStore", metavar="STORE", default="",
|
|
299
|
+
help=("Where to upload newly built packages. Same syntax as --remote-store, "
|
|
300
|
+
"except ::rw is not recognised. Implies --no-system."))
|
|
301
|
+
doctor_remote.add_argument("--insecure", dest="insecure", action="store_true",
|
|
302
|
+
help="Don't validate TLS certificates when connecting to an https:// remote store.")
|
|
303
|
+
|
|
275
304
|
doctor_dirs = doctor_parser.add_argument_group(title="Customise aliBuild directories")
|
|
276
305
|
doctor_dirs.add_argument("-C", "--chdir", metavar="DIR", dest="chdir", default=DEFAULT_CHDIR,
|
|
277
306
|
help=("Change to the specified directory before doing anything. "
|
|
@@ -355,11 +384,12 @@ On Linux, x86-64:
|
|
|
355
384
|
On Linux, POWER8 / PPC64 (little endian):
|
|
356
385
|
RHEL7 / CC7 compatible: slc7_ppc64
|
|
357
386
|
|
|
358
|
-
On Mac,
|
|
359
|
-
|
|
360
|
-
|
|
387
|
+
On Mac, 1-2 latest supported OSX versions:
|
|
388
|
+
Intel: osx_x86-64
|
|
389
|
+
Apple Silicon: osx_arm64
|
|
361
390
|
"""
|
|
362
391
|
|
|
392
|
+
# When updating this variable, also update docs/user.markdown!
|
|
363
393
|
S3_SUPPORTED_ARCHS = "slc7_x86-64", "slc8_x86-64", "ubuntu2004_x86-64", "ubuntu2204_x86-64", "slc9_x86-64"
|
|
364
394
|
|
|
365
395
|
def finaliseArgs(args, parser):
|
|
@@ -410,7 +440,17 @@ def finaliseArgs(args, parser):
|
|
|
410
440
|
if args.docker and not args.dockerImage:
|
|
411
441
|
args.dockerImage = "registry.cern.ch/alisw/%s-builder" % args.architecture.split("_")[0]
|
|
412
442
|
|
|
413
|
-
if
|
|
443
|
+
if "annotate" in args:
|
|
444
|
+
for comment_assignment in args.annotate:
|
|
445
|
+
if "=" not in comment_assignment:
|
|
446
|
+
parser.error("--annotate takes arguments of the form PACKAGE=COMMENT")
|
|
447
|
+
args.annotate = {
|
|
448
|
+
package: comment
|
|
449
|
+
for package, _, comment
|
|
450
|
+
in (assignment.partition("=") for assignment in args.annotate)
|
|
451
|
+
}
|
|
452
|
+
|
|
453
|
+
if args.action in ("build", "doctor"):
|
|
414
454
|
args.configDir = args.configDir
|
|
415
455
|
|
|
416
456
|
# On selected platforms, caching is active by default
|
|
@@ -8,7 +8,7 @@ from alibuild_helpers.cmd import execute, getstatusoutput, DockerRunner, BASH, i
|
|
|
8
8
|
from alibuild_helpers.utilities import prunePaths
|
|
9
9
|
from alibuild_helpers.utilities import resolve_store_path
|
|
10
10
|
from alibuild_helpers.utilities import format, parseDefaults, readDefaults
|
|
11
|
-
from alibuild_helpers.utilities import getPackageList
|
|
11
|
+
from alibuild_helpers.utilities import getPackageList, asList
|
|
12
12
|
from alibuild_helpers.utilities import validateDefaults
|
|
13
13
|
from alibuild_helpers.utilities import Hasher
|
|
14
14
|
from alibuild_helpers.utilities import yamlDump
|
|
@@ -33,6 +33,7 @@ except ImportError:
|
|
|
33
33
|
|
|
34
34
|
import concurrent.futures
|
|
35
35
|
import importlib
|
|
36
|
+
import json
|
|
36
37
|
import socket
|
|
37
38
|
import os
|
|
38
39
|
import re
|
|
@@ -339,6 +340,135 @@ def better_tarball(spec, old, new):
|
|
|
339
340
|
return old if hashes.index(old_hash) < hashes.index(new_hash) else new
|
|
340
341
|
|
|
341
342
|
|
|
343
|
+
def generate_initdotsh(package, specs, architecture, post_build=False):
|
|
344
|
+
"""Return the contents of the given package's etc/profile/init.sh as a string.
|
|
345
|
+
|
|
346
|
+
If post_build is true, also generate variables pointing to the package
|
|
347
|
+
itself; else, only generate variables pointing at it dependencies.
|
|
348
|
+
"""
|
|
349
|
+
spec = specs[package]
|
|
350
|
+
# Allow users to override ALIBUILD_ARCH_PREFIX if they manually source
|
|
351
|
+
# init.sh. This is useful for development off CVMFS, since we have a
|
|
352
|
+
# slightly different directory hierarchy there.
|
|
353
|
+
lines = [': "${ALIBUILD_ARCH_PREFIX:=%s}"' % architecture]
|
|
354
|
+
|
|
355
|
+
# Generate the part which sources the environment for all the dependencies.
|
|
356
|
+
# We guarantee that a dependency is always sourced before the parts
|
|
357
|
+
# depending on it, but we do not guarantee anything for the order in which
|
|
358
|
+
# unrelated components are activated.
|
|
359
|
+
# These variables are also required during the build itself, so always
|
|
360
|
+
# generate them.
|
|
361
|
+
lines.extend((
|
|
362
|
+
'[ -n "${{{bigpackage}_REVISION}}" ] || '
|
|
363
|
+
'. "$WORK_DIR/$ALIBUILD_ARCH_PREFIX"/{package}/{version}-{revision}/etc/profile.d/init.sh'
|
|
364
|
+
).format(
|
|
365
|
+
bigpackage=dep.upper().replace("-", "_"),
|
|
366
|
+
package=quote(specs[dep]["package"]),
|
|
367
|
+
version=quote(specs[dep]["version"]),
|
|
368
|
+
revision=quote(specs[dep]["revision"]),
|
|
369
|
+
) for dep in spec.get("requires", ()))
|
|
370
|
+
|
|
371
|
+
if post_build:
|
|
372
|
+
bigpackage = package.upper().replace("-", "_")
|
|
373
|
+
|
|
374
|
+
# Set standard variables related to the package itself. These should only
|
|
375
|
+
# be set once the build has actually completed.
|
|
376
|
+
lines.extend(line.format(
|
|
377
|
+
bigpackage=bigpackage,
|
|
378
|
+
package=quote(spec["package"]),
|
|
379
|
+
version=quote(spec["version"]),
|
|
380
|
+
revision=quote(spec["revision"]),
|
|
381
|
+
hash=quote(spec["hash"]),
|
|
382
|
+
commit_hash=quote(spec["commit_hash"]),
|
|
383
|
+
) for line in (
|
|
384
|
+
'export {bigpackage}_ROOT="$WORK_DIR/$ALIBUILD_ARCH_PREFIX"/{package}/{version}-{revision}',
|
|
385
|
+
"export {bigpackage}_VERSION={version}",
|
|
386
|
+
"export {bigpackage}_REVISION={revision}",
|
|
387
|
+
"export {bigpackage}_HASH={hash}",
|
|
388
|
+
"export {bigpackage}_COMMIT={commit_hash}",
|
|
389
|
+
))
|
|
390
|
+
|
|
391
|
+
# Generate the part which sets the environment variables related to the
|
|
392
|
+
# package itself. This can be variables set via the "env" keyword in the
|
|
393
|
+
# metadata or paths which get concatenated via the "{append,prepend}_path"
|
|
394
|
+
# keys. These should only be set once the build has actually completed,
|
|
395
|
+
# since the paths referred to will only exist then.
|
|
396
|
+
|
|
397
|
+
# First, output a sensible error message if types are wrong.
|
|
398
|
+
for key in ("env", "append_path", "prepend_path"):
|
|
399
|
+
dieOnError(not isinstance(spec.get(key, {}), dict),
|
|
400
|
+
"Tag `%s' in %s should be a dict." % (key, package))
|
|
401
|
+
|
|
402
|
+
# Set "env" variables.
|
|
403
|
+
# We only put the values in double-quotes, so that they can refer to other
|
|
404
|
+
# shell variables or do command substitution (e.g. $(brew --prefix ...)).
|
|
405
|
+
lines.extend('export {}="{}"'.format(key, value)
|
|
406
|
+
for key, value in spec.get("env", {}).items()
|
|
407
|
+
if key != "DYLD_LIBRARY_PATH")
|
|
408
|
+
|
|
409
|
+
# Append paths to variables, if requested using append_path.
|
|
410
|
+
# Again, only put values in double quotes so that they can refer to other variables.
|
|
411
|
+
lines.extend('export {key}="${key}:{value}"'
|
|
412
|
+
.format(key=key, value=":".join(asList(value)))
|
|
413
|
+
for key, value in spec.get("append_path", {}).items()
|
|
414
|
+
if key != "DYLD_LIBRARY_PATH")
|
|
415
|
+
|
|
416
|
+
# First convert all values to list, so that we can use .setdefault().insert() below.
|
|
417
|
+
prepend_path = {key: asList(value)
|
|
418
|
+
for key, value in spec.get("prepend_path", {}).items()}
|
|
419
|
+
# By default we add the .../bin directory to PATH and .../lib to LD_LIBRARY_PATH.
|
|
420
|
+
# Prepend to these paths, so that our packages win against system ones.
|
|
421
|
+
for key, value in (("PATH", "bin"), ("LD_LIBRARY_PATH", "lib")):
|
|
422
|
+
prepend_path.setdefault(key, []).insert(0, "${}_ROOT/{}".format(bigpackage, value))
|
|
423
|
+
lines.extend('export {key}="{value}${{{key}+:${key}}}"'
|
|
424
|
+
.format(key=key, value=":".join(value))
|
|
425
|
+
for key, value in prepend_path.items()
|
|
426
|
+
if key != "DYLD_LIBRARY_PATH")
|
|
427
|
+
|
|
428
|
+
# Return string without a trailing newline, since we expect call sites to
|
|
429
|
+
# append that (and the obvious way to inesrt it into the build tempate is by
|
|
430
|
+
# putting the "%(initdotsh_*)s" on its own line, which has the same effect).
|
|
431
|
+
return "\n".join(lines)
|
|
432
|
+
|
|
433
|
+
|
|
434
|
+
def create_provenance_info(package, specs, args):
|
|
435
|
+
"""Return a metadata record for storage in the package's install directory."""
|
|
436
|
+
|
|
437
|
+
def spec_info(spec):
|
|
438
|
+
return {
|
|
439
|
+
"name": spec["package"],
|
|
440
|
+
"tag": spec.get("tag"),
|
|
441
|
+
"source": spec.get("source"),
|
|
442
|
+
"version": spec["version"],
|
|
443
|
+
"revision": spec["revision"],
|
|
444
|
+
"hash": spec["hash"],
|
|
445
|
+
}
|
|
446
|
+
|
|
447
|
+
def dependency_list(key):
|
|
448
|
+
return [spec_info(specs[dep]) for dep in specs[package].get(key, ())]
|
|
449
|
+
|
|
450
|
+
return json.dumps({
|
|
451
|
+
"comment": args.annotate.get(package),
|
|
452
|
+
"alibuild_version": __version__,
|
|
453
|
+
"alidist": {
|
|
454
|
+
"commit": os.environ["ALIBUILD_ALIDIST_HASH"],
|
|
455
|
+
},
|
|
456
|
+
"architecture": args.architecture,
|
|
457
|
+
"defaults": args.defaults,
|
|
458
|
+
"package": spec_info(specs[package]),
|
|
459
|
+
"dependencies": {
|
|
460
|
+
"direct": {
|
|
461
|
+
"build": dependency_list("build_requires"),
|
|
462
|
+
"runtime": dependency_list("runtime_requires"),
|
|
463
|
+
},
|
|
464
|
+
"recursive": { # includes direct deps and deps' deps
|
|
465
|
+
"build": dependency_list("full_build_requires"),
|
|
466
|
+
"runtime": dependency_list("full_runtime_requires"),
|
|
467
|
+
},
|
|
468
|
+
},
|
|
469
|
+
})
|
|
470
|
+
|
|
471
|
+
|
|
342
472
|
def doBuild(args, parser):
|
|
343
473
|
if args.remoteStore.startswith("http"):
|
|
344
474
|
syncHelper = HttpRemoteSync(args.remoteStore, args.architecture, args.workDir, args.insecure)
|
|
@@ -399,7 +529,7 @@ def doBuild(args, parser):
|
|
|
399
529
|
debug("Building for architecture %s", args.architecture)
|
|
400
530
|
debug("Number of parallel builds: %d", args.jobs)
|
|
401
531
|
debug("Using aliBuild from alibuild@%s recipes in alidist@%s",
|
|
402
|
-
__version__, os.environ["ALIBUILD_ALIDIST_HASH"])
|
|
532
|
+
__version__ or "unknown", os.environ["ALIBUILD_ALIDIST_HASH"])
|
|
403
533
|
|
|
404
534
|
install_wrapper_script("git", workDir)
|
|
405
535
|
|
|
@@ -694,13 +824,26 @@ def doBuild(args, parser):
|
|
|
694
824
|
# Decide how it should be called, based on the hash and what is already
|
|
695
825
|
# available.
|
|
696
826
|
debug("Checking for packages already built.")
|
|
697
|
-
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
|
|
703
|
-
|
|
827
|
+
|
|
828
|
+
# Make sure this regex broadly matches the regex below that parses the
|
|
829
|
+
# symlink's target. Overly-broadly matching the version, for example, can
|
|
830
|
+
# lead to false positives that trigger a warning below.
|
|
831
|
+
links_regex = re.compile(r"{package}-{version}-(?:local)?[0-9]+\.{arch}\.tar\.gz".format(
|
|
832
|
+
package=re.escape(spec["package"]),
|
|
833
|
+
version=re.escape(spec["version"]),
|
|
834
|
+
arch=re.escape(args.architecture),
|
|
835
|
+
))
|
|
836
|
+
symlink_dir = join(workDir, "TARS", args.architecture, spec["package"])
|
|
837
|
+
try:
|
|
838
|
+
packages = [join(symlink_dir, symlink)
|
|
839
|
+
for symlink in os.listdir(symlink_dir)
|
|
840
|
+
if links_regex.fullmatch(symlink)]
|
|
841
|
+
except OSError as exc:
|
|
842
|
+
# If symlink_dir does not exist or cannot be accessed, return an empty
|
|
843
|
+
# list of packages.
|
|
844
|
+
packages = []
|
|
845
|
+
del links_regex, symlink_dir
|
|
846
|
+
|
|
704
847
|
# In case there is no installed software, revision is 1
|
|
705
848
|
# If there is already an installed package:
|
|
706
849
|
# - Remove it if we do not know its hash
|
|
@@ -927,81 +1070,6 @@ def doBuild(args, parser):
|
|
|
927
1070
|
debug("Found tarball in %s" % spec["cachedTarball"]
|
|
928
1071
|
if spec["cachedTarball"] else "No cache tarballs found")
|
|
929
1072
|
|
|
930
|
-
# Generate the part which sources the environment for all the dependencies.
|
|
931
|
-
# Notice that we guarantee that a dependency is always sourced before the
|
|
932
|
-
# parts depending on it, but we do not guaranteed anything for the order in
|
|
933
|
-
# which unrelated components are activated.
|
|
934
|
-
dependencies = "ALIBUILD_ARCH_PREFIX=\"${ALIBUILD_ARCH_PREFIX:-%s}\"\n" % args.architecture
|
|
935
|
-
dependenciesInit = "echo ALIBUILD_ARCH_PREFIX=\"\${ALIBUILD_ARCH_PREFIX:-%s}\" >> $INSTALLROOT/etc/profile.d/init.sh\n" % args.architecture
|
|
936
|
-
for dep in spec.get("requires", []):
|
|
937
|
-
depSpec = specs[dep]
|
|
938
|
-
depInfo = {
|
|
939
|
-
"architecture": args.architecture,
|
|
940
|
-
"package": dep,
|
|
941
|
-
"version": depSpec["version"],
|
|
942
|
-
"revision": depSpec["revision"],
|
|
943
|
-
"bigpackage": dep.upper().replace("-", "_")
|
|
944
|
-
}
|
|
945
|
-
dependencies += format("[ -z ${%(bigpackage)s_REVISION+x} ] && source \"$WORK_DIR/$ALIBUILD_ARCH_PREFIX/%(package)s/%(version)s-%(revision)s/etc/profile.d/init.sh\"\n",
|
|
946
|
-
**depInfo)
|
|
947
|
-
dependenciesInit += format('echo [ -z \${%(bigpackage)s_REVISION+x} ] \&\& source \${WORK_DIR}/\${ALIBUILD_ARCH_PREFIX}/%(package)s/%(version)s-%(revision)s/etc/profile.d/init.sh >> \"$INSTALLROOT/etc/profile.d/init.sh\"\n',
|
|
948
|
-
**depInfo)
|
|
949
|
-
dependenciesDict = {}
|
|
950
|
-
for dep in spec.get("full_requires", []):
|
|
951
|
-
depSpec = specs[dep]
|
|
952
|
-
depInfo = {
|
|
953
|
-
"architecture": args.architecture,
|
|
954
|
-
"package": dep,
|
|
955
|
-
"version": depSpec["version"],
|
|
956
|
-
"revision": depSpec["revision"],
|
|
957
|
-
"hash": depSpec["hash"]
|
|
958
|
-
}
|
|
959
|
-
dependenciesDict[dep] = depInfo
|
|
960
|
-
dependenciesJSON = str(dependenciesDict)
|
|
961
|
-
|
|
962
|
-
# Generate the part which creates the environment for the package.
|
|
963
|
-
# This can be either variable set via the "env" keyword in the metadata
|
|
964
|
-
# or paths which get appended via the "append_path" one.
|
|
965
|
-
# By default we append LD_LIBRARY_PATH, PATH
|
|
966
|
-
environment = ""
|
|
967
|
-
dieOnError(not isinstance(spec.get("env", {}), dict),
|
|
968
|
-
"Tag `env' in %s should be a dict." % p)
|
|
969
|
-
for key,value in spec.get("env", {}).items():
|
|
970
|
-
if key == "DYLD_LIBRARY_PATH":
|
|
971
|
-
continue
|
|
972
|
-
environment += format("echo 'export %(key)s=\"%(value)s\"' >> $INSTALLROOT/etc/profile.d/init.sh\n",
|
|
973
|
-
key=key,
|
|
974
|
-
value=value)
|
|
975
|
-
basePath = "%s_ROOT" % p.upper().replace("-", "_")
|
|
976
|
-
|
|
977
|
-
pathDict = spec.get("append_path", {})
|
|
978
|
-
dieOnError(not isinstance(pathDict, dict),
|
|
979
|
-
"Tag `append_path' in %s should be a dict." % p)
|
|
980
|
-
for pathName,pathVal in pathDict.items():
|
|
981
|
-
pathVal = isinstance(pathVal, list) and pathVal or [ pathVal ]
|
|
982
|
-
if pathName == "DYLD_LIBRARY_PATH":
|
|
983
|
-
continue
|
|
984
|
-
environment += format("\ncat << \EOF >> \"$INSTALLROOT/etc/profile.d/init.sh\"\nexport %(key)s=$%(key)s:%(value)s\nEOF",
|
|
985
|
-
key=pathName,
|
|
986
|
-
value=":".join(pathVal))
|
|
987
|
-
|
|
988
|
-
# Same thing, but prepending the results so that they win against system ones.
|
|
989
|
-
defaultPrependPaths = { "LD_LIBRARY_PATH": "$%s/lib" % basePath,
|
|
990
|
-
"PATH": "$%s/bin" % basePath }
|
|
991
|
-
pathDict = spec.get("prepend_path", {})
|
|
992
|
-
dieOnError(not isinstance(pathDict, dict),
|
|
993
|
-
"Tag `prepend_path' in %s should be a dict." % p)
|
|
994
|
-
for pathName,pathVal in pathDict.items():
|
|
995
|
-
pathDict[pathName] = isinstance(pathVal, list) and pathVal or [ pathVal ]
|
|
996
|
-
for pathName,pathVal in defaultPrependPaths.items():
|
|
997
|
-
pathDict[pathName] = [ pathVal ] + pathDict.get(pathName, [])
|
|
998
|
-
for pathName,pathVal in pathDict.items():
|
|
999
|
-
if pathName == "DYLD_LIBRARY_PATH":
|
|
1000
|
-
continue
|
|
1001
|
-
environment += format("\ncat << \EOF >> \"$INSTALLROOT/etc/profile.d/init.sh\"\nexport %(key)s=%(value)s${%(key)s+:$%(key)s}\nEOF",
|
|
1002
|
-
key=pathName,
|
|
1003
|
-
value=":".join(pathVal))
|
|
1004
|
-
|
|
1005
1073
|
# The actual build script.
|
|
1006
1074
|
referenceStatement = ""
|
|
1007
1075
|
if "reference" in spec:
|
|
@@ -1034,33 +1102,29 @@ def doBuild(args, parser):
|
|
|
1034
1102
|
else:
|
|
1035
1103
|
cachedTarball = spec["cachedTarball"]
|
|
1036
1104
|
|
|
1037
|
-
|
|
1038
|
-
cmd = format(cmd_raw,
|
|
1039
|
-
dependencies=dependencies,
|
|
1040
|
-
dependenciesInit=dependenciesInit,
|
|
1041
|
-
dependenciesJSON=dependenciesJSON,
|
|
1042
|
-
develPrefix=develPrefix,
|
|
1043
|
-
environment=environment,
|
|
1044
|
-
workDir=workDir,
|
|
1045
|
-
configDir=abspath(args.configDir),
|
|
1046
|
-
incremental_recipe=spec.get("incremental_recipe", ":"),
|
|
1047
|
-
sourceDir=source and (dirname(source) + "/") or "",
|
|
1048
|
-
sourceName=source and basename(source) or "",
|
|
1049
|
-
referenceStatement=referenceStatement,
|
|
1050
|
-
gitOptionsStatement="" if args.docker else
|
|
1051
|
-
"export GIT_CLONE_SPEEDUP=" + quote(" ".join(clone_speedup_options())),
|
|
1052
|
-
requires=" ".join(spec["requires"]),
|
|
1053
|
-
build_requires=" ".join(spec["build_requires"]),
|
|
1054
|
-
runtime_requires=" ".join(spec["runtime_requires"])
|
|
1055
|
-
)
|
|
1056
|
-
|
|
1057
1105
|
scriptDir = join(workDir, "SPECS", args.architecture, spec["package"],
|
|
1058
1106
|
spec["version"] + "-" + spec["revision"])
|
|
1059
1107
|
|
|
1060
1108
|
err, out = getstatusoutput("mkdir -p %s" % scriptDir)
|
|
1061
1109
|
dieOnError(err, "Failed to create script dir %s: %s" % (scriptDir, out))
|
|
1062
|
-
writeAll("%s/build.sh" % scriptDir, cmd)
|
|
1063
1110
|
writeAll("%s/%s.sh" % (scriptDir, spec["package"]), spec["recipe"])
|
|
1111
|
+
writeAll("%s/build.sh" % scriptDir, cmd_raw % {
|
|
1112
|
+
"provenance": create_provenance_info(spec["package"], specs, args),
|
|
1113
|
+
"initdotsh_deps": generate_initdotsh(p, specs, args.architecture, post_build=False),
|
|
1114
|
+
"initdotsh_full": generate_initdotsh(p, specs, args.architecture, post_build=True),
|
|
1115
|
+
"develPrefix": develPrefix,
|
|
1116
|
+
"workDir": workDir,
|
|
1117
|
+
"configDir": abspath(args.configDir),
|
|
1118
|
+
"incremental_recipe": spec.get("incremental_recipe", ":"),
|
|
1119
|
+
"sourceDir": (dirname(source) + "/") if source else "",
|
|
1120
|
+
"sourceName": basename(source) if source else "",
|
|
1121
|
+
"referenceStatement": referenceStatement,
|
|
1122
|
+
"gitOptionsStatement": "" if args.docker else
|
|
1123
|
+
"export GIT_CLONE_SPEEDUP=" + quote(" ".join(clone_speedup_options())),
|
|
1124
|
+
"requires": " ".join(spec["requires"]),
|
|
1125
|
+
"build_requires": " ".join(spec["build_requires"]),
|
|
1126
|
+
"runtime_requires": " ".join(spec["runtime_requires"]),
|
|
1127
|
+
})
|
|
1064
1128
|
|
|
1065
1129
|
banner("Building %s@%s", spec["package"],
|
|
1066
1130
|
args.develPrefix if "develPrefix" in args and spec["package"] in develPkgs
|
|
@@ -1103,7 +1167,7 @@ def doBuild(args, parser):
|
|
|
1103
1167
|
# will perform the actual build. Otherwise build as usual using bash.
|
|
1104
1168
|
if args.docker:
|
|
1105
1169
|
build_command = (
|
|
1106
|
-
"docker run --rm --
|
|
1170
|
+
"docker run --rm --entrypoint= --user $(id -u):$(id -g) "
|
|
1107
1171
|
"-v {workdir}:/sw -v {scriptDir}/build.sh:/build.sh:ro "
|
|
1108
1172
|
"-e GIT_REFERENCE_OVERRIDE=/mirror -e WORK_DIR_OVERRIDE=/sw "
|
|
1109
1173
|
"{mirrorVolume} {develVolumes} {additionalEnv} {additionalVolumes} "
|
|
@@ -1116,6 +1180,7 @@ def doBuild(args, parser):
|
|
|
1116
1180
|
overrideSource="-e SOURCE0_DIR_OVERRIDE=/" if source.startswith("/") else "",
|
|
1117
1181
|
additionalEnv=" ".join(
|
|
1118
1182
|
"-e {}={}".format(var, quote(value)) for var, value in buildEnvironment),
|
|
1183
|
+
# Used e.g. by O2DPG-sim-tests to find the O2DPG repository.
|
|
1119
1184
|
develVolumes=" ".join(
|
|
1120
1185
|
'-v "$PWD/$(readlink {pkg} || echo {pkg})":/{pkg}:rw'.format(pkg=quote(pkg))
|
|
1121
1186
|
for pkg in develPkgs),
|