alibuild 1.17.19__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alibuild-1.17.19.data/scripts/aliBuild +137 -0
- alibuild-1.17.19.data/scripts/aliDeps +7 -0
- alibuild-1.17.19.data/scripts/aliDoctor +7 -0
- alibuild-1.17.19.data/scripts/alienv +344 -0
- alibuild-1.17.19.data/scripts/pb +7 -0
- alibuild-1.17.19.dist-info/METADATA +78 -0
- alibuild-1.17.19.dist-info/RECORD +74 -0
- alibuild-1.17.19.dist-info/WHEEL +5 -0
- alibuild-1.17.19.dist-info/licenses/LICENSE.md +674 -0
- alibuild-1.17.19.dist-info/top_level.txt +5 -0
- alibuild_helpers/__init__.py +21 -0
- alibuild_helpers/_version.py +21 -0
- alibuild_helpers/analytics.py +120 -0
- alibuild_helpers/args.py +493 -0
- alibuild_helpers/build.py +1209 -0
- alibuild_helpers/build_template.sh +314 -0
- alibuild_helpers/clean.py +83 -0
- alibuild_helpers/cmd.py +154 -0
- alibuild_helpers/deps.py +116 -0
- alibuild_helpers/doctor.py +195 -0
- alibuild_helpers/git.py +104 -0
- alibuild_helpers/init.py +103 -0
- alibuild_helpers/log.py +132 -0
- alibuild_helpers/scm.py +31 -0
- alibuild_helpers/sl.py +62 -0
- alibuild_helpers/sync.py +693 -0
- alibuild_helpers/templating_plugin.py +18 -0
- alibuild_helpers/utilities.py +662 -0
- alibuild_helpers/workarea.py +179 -0
- debian/changelog +11 -0
- debian/compat +1 -0
- debian/control +14 -0
- debian/copyright +10 -0
- debian/files +1 -0
- debian/rules +7 -0
- docs/README.md +1 -0
- docs/SUPPORT +3 -0
- docs/docs/alice_logo.png +0 -0
- docs/docs/deps.png +0 -0
- docs/docs/index.md +75 -0
- docs/docs/quick.md +89 -0
- docs/docs/reference.md +430 -0
- docs/docs/stylesheets/extra.css +9 -0
- docs/docs/troubleshooting.md +346 -0
- docs/docs/user.md +413 -0
- docs/mkdocs.yml +37 -0
- templates/alibuild_to_please.jnj +63 -0
- tests/test_analytics.py +42 -0
- tests/test_args.py +119 -0
- tests/test_build.py +426 -0
- tests/test_clean.py +154 -0
- tests/test_cmd.py +73 -0
- tests/test_deps.py +79 -0
- tests/test_doctor.py +128 -0
- tests/test_git.py +48 -0
- tests/test_hashing.py +67 -0
- tests/test_init.py +103 -0
- tests/test_log.py +50 -0
- tests/test_packagelist.py +235 -0
- tests/test_parseRecipe.py +132 -0
- tests/test_sync.py +332 -0
- tests/test_utilities.py +383 -0
- tests/test_workarea.py +101 -0
- tests/testdist/broken1.sh +1 -0
- tests/testdist/broken2.sh +1 -0
- tests/testdist/broken3.sh +3 -0
- tests/testdist/broken4.sh +2 -0
- tests/testdist/broken5.sh +2 -0
- tests/testdist/broken6.sh +2 -0
- tests/testdist/broken7.sh +5 -0
- tests/testdist/clobber-initdotsh.sh +4 -0
- tests/testdist/defaults-o2.sh +10 -0
- tests/testdist/delete-etc.sh +4 -0
- tests/testdist/tracking-env.sh +6 -0
docs/docs/user.md
ADDED
|
@@ -0,0 +1,413 @@
|
|
|
1
|
+
---
|
|
2
|
+
subtitle: User command line reference manual
|
|
3
|
+
layout: main
|
|
4
|
+
---
|
|
5
|
+
|
|
6
|
+
## SYNOPSIS
|
|
7
|
+
|
|
8
|
+
For a quick start introduction, please look [here](quick.md).
|
|
9
|
+
|
|
10
|
+
```
|
|
11
|
+
aliBuild build [-h] [--defaults DEFAULT]
|
|
12
|
+
[-a ARCH] [--force-unknown-architecture]
|
|
13
|
+
[-z [DEVELPREFIX]] [-e ENVIRONMENT] [-j JOBS] [-u]
|
|
14
|
+
[--no-local PKGLIST] [--disable PACKAGE]
|
|
15
|
+
[--always-prefer-system | --no-system]
|
|
16
|
+
[--docker] [--docker-image IMAGE] [--docker-extra-args ARGLIST] [-v VOLUMES]
|
|
17
|
+
[--no-remote-store] [--remote-store STORE] [--write-store STORE] [--insecure]
|
|
18
|
+
[-C DIR] [-w WORKDIR] [-c CONFIGDIR] [--reference-sources MIRRORDIR]
|
|
19
|
+
[--aggressive-cleanup] [--no-auto-cleanup]
|
|
20
|
+
PACKAGE [PACKAGE ...]
|
|
21
|
+
```
|
|
22
|
+
|
|
23
|
+
- `PACKAGE`: One of the packages in `CONFIGDIR`. May be specified multiple
|
|
24
|
+
times.
|
|
25
|
+
- `-h`, `--help`: show this help message and exit
|
|
26
|
+
- `--defaults DEFAULT`: Use defaults from `CONFIGDIR/defaults-DEFAULT.sh`.
|
|
27
|
+
- `-a ARCH`, `--architecture ARCH`: Build as if on the specified architecture.
|
|
28
|
+
When used with `--docker`, build inside a Docker image for the specified
|
|
29
|
+
architecture. Default is the current system architecture.
|
|
30
|
+
- `--force-unknown-architecture`: Build on this system, even if it doesn't have
|
|
31
|
+
a supported architecture.
|
|
32
|
+
- `-z [DEVELPREFIX]`, `--devel-prefix [DEVELPREFIX]`: Version name to use for
|
|
33
|
+
development packages. Defaults to branch name.
|
|
34
|
+
- `-e ENVIRONMENT`: KEY=VALUE binding to add to the build environment. May be
|
|
35
|
+
specified multiple times.
|
|
36
|
+
- `-j JOBS`, `--jobs JOBS`: The number of parallel compilation processes to run.
|
|
37
|
+
- `-u`, `--fetch-repos`: Fetch updates to repositories in `MIRRORDIR`. Required
|
|
38
|
+
but nonexistent repositories are always cloned, even if this option is not
|
|
39
|
+
given.
|
|
40
|
+
- `--no-local PKGLIST`: Do not pick up the following packages from a local
|
|
41
|
+
checkout. `PKGLIST` is a comma-separated list.
|
|
42
|
+
- `--disable PACKAGE`: Do not build `PACKAGE` and all its (unique) dependencies.
|
|
43
|
+
- `--always-prefer-system`: Always use system packages when compatible.
|
|
44
|
+
- `--no-system`: Never use system packages, even if compatible.
|
|
45
|
+
|
|
46
|
+
### Building inside a container
|
|
47
|
+
|
|
48
|
+
Builds can be done inside a Docker container, to make it easier to get a common,
|
|
49
|
+
usable environment. The Docker daemon must be installed and running on your
|
|
50
|
+
system. By default, images from `alisw/<platform>-builder:latest` will be used,
|
|
51
|
+
e.g. `alisw/slc8-builder:latest`. They will be fetched if unavailable.
|
|
52
|
+
|
|
53
|
+
- `--docker`: Build inside a Docker container.
|
|
54
|
+
- `--docker-image IMAGE`: The Docker image to build inside of. Implies
|
|
55
|
+
`--docker`. By default, an image is chosen based on the architecture.
|
|
56
|
+
- `--docker-extra-args ARGLIST`: Command-line arguments to pass to `docker run`.
|
|
57
|
+
Passed through verbatim -- separate multiple arguments with spaces, and make
|
|
58
|
+
sure quoting is correct! Implies `--docker`.
|
|
59
|
+
- `-v VOLUMES`: Additional volume to be mounted inside the Docker container, if
|
|
60
|
+
one is used. May be specified multiple times. Passed verbatim to `docker run`.
|
|
61
|
+
|
|
62
|
+
### Re-using prebuilt tarballs
|
|
63
|
+
|
|
64
|
+
Reusing prebuilt tarballs saves compilation time, as common packages need not be
|
|
65
|
+
rebuilt from scratch. `rsync://`, `https://`, `b3://` and `s3://` remote stores
|
|
66
|
+
are recognised. Some of these require credentials: `s3://` remotes require an
|
|
67
|
+
`~/.s3cfg`; `b3://` remotes require `AWS_ACCESS_KEY_ID` and
|
|
68
|
+
`AWS_SECRET_ACCESS_KEY` environment variables. A useful remote store is
|
|
69
|
+
`https://s3.cern.ch/swift/v1/alibuild-repo`. It requires no credentials and
|
|
70
|
+
provides tarballs for the most common supported architectures.
|
|
71
|
+
|
|
72
|
+
- `--no-remote-store`: Disable the use of the remote store, even if it is
|
|
73
|
+
enabled by default.
|
|
74
|
+
- `--remote-store STORE`: Where to find prebuilt tarballs to reuse. See above
|
|
75
|
+
for available remote stores. End with `::rw` if you want to upload (in that
|
|
76
|
+
case, `::rw` is stripped and `--write-store` is set to the same value).
|
|
77
|
+
Implies `--no-system`. May be set to a default store on some architectures;
|
|
78
|
+
use `--no-remote-store` to disable it in that case.
|
|
79
|
+
- `--write-store STORE`: Where to upload newly built packages. Same syntax as
|
|
80
|
+
`--remote-store`, except `::rw` is not recognised. Implies `--no-system`.
|
|
81
|
+
- `--insecure`: Don't validate TLS certificates when connecting to an `https://`
|
|
82
|
+
remote store.
|
|
83
|
+
|
|
84
|
+
### Customise aliBuild directories
|
|
85
|
+
|
|
86
|
+
- `-C DIR`, `--chdir DIR`: Change to the specified directory before building.
|
|
87
|
+
Alternatively, set `ALIBUILD_CHDIR`. Default `.`.
|
|
88
|
+
- `-w WORKDIR`, `--work-dir WORKDIR` The toplevel directory under which builds
|
|
89
|
+
should be done and build results should be installed. Default `sw`.
|
|
90
|
+
- `-c CONFIGDIR`, `--config-dir CONFIGDIR`: The directory containing build
|
|
91
|
+
recipes. Default `alidist`.
|
|
92
|
+
- `--reference-sources MIRRORDIR`: The directory where reference git
|
|
93
|
+
repositories will be cloned. `%(workDir)s` will be substituted by `WORKDIR`.
|
|
94
|
+
Default `%(workDir)s/MIRROR`.
|
|
95
|
+
|
|
96
|
+
### Cleaning up after building
|
|
97
|
+
|
|
98
|
+
- `--aggressive-cleanup`: Delete as much build data as possible when cleaning
|
|
99
|
+
up.
|
|
100
|
+
- `--no-auto-cleanup`: Do not clean up build directories automatically after a
|
|
101
|
+
build.
|
|
102
|
+
|
|
103
|
+
## Using precompiled packages
|
|
104
|
+
|
|
105
|
+
By running aliBuild with no special option on CentOS/Alma 7, 8 or 9, or on
|
|
106
|
+
Ubuntu 20.04, 22.04 or 24.04, it will automatically try to
|
|
107
|
+
use as many precompiled packages as possible by downloading them from a default
|
|
108
|
+
central server. By using precompiled packages you lose the ability to pick some
|
|
109
|
+
of them from your system. If you do not want to use precompiled packages and you
|
|
110
|
+
want to pick as many packages as possible from your system, you should manually
|
|
111
|
+
specify the `--always-prefer-system` option.
|
|
112
|
+
|
|
113
|
+
It is possible to benefit from precompiled builds on every platform, provided
|
|
114
|
+
that the server caching the builds is maintained by yourself. Since every build
|
|
115
|
+
is stored as a tarball with a unique hash, it is sufficient to provide for a
|
|
116
|
+
server or shared space where cached builds will be stored and made available to
|
|
117
|
+
others.
|
|
118
|
+
|
|
119
|
+
In order to specify the cache store, use the option `--remote-store <uri>`,
|
|
120
|
+
where `<uri>` can be:
|
|
121
|
+
|
|
122
|
+
* a local path, for instance `/opt/alibuild_cache`,
|
|
123
|
+
* a remote SSH accessible path, `ssh://<host>:<path>`,
|
|
124
|
+
* an unencrypted rsync path, `rsync://<host>/path`,
|
|
125
|
+
* a CERN S3 bucket, `b3://<bucket>`,
|
|
126
|
+
* a HTTP(s) server, `http://<host>/<path>`.
|
|
127
|
+
|
|
128
|
+
The first four options can also be writable (if you have proper permissions):
|
|
129
|
+
if you specify `::rw` at the end of the URL, your builds will be cached there.
|
|
130
|
+
This is normally what sysadmins do to precache builds: other users can simply
|
|
131
|
+
use the same URL in read-only mode (no `::rw` specified) to fetch the builds.
|
|
132
|
+
|
|
133
|
+
You need to make sure you have proper filesystem/SSH/rsync permissions of
|
|
134
|
+
course.
|
|
135
|
+
|
|
136
|
+
It is also possible to specify a write store different from the read one by
|
|
137
|
+
using the `--write-store` option.
|
|
138
|
+
|
|
139
|
+
aliBuild can reuse precompiled packages if they were built with a different tag,
|
|
140
|
+
if that tag points to the same actual commit that you're building now. (This is
|
|
141
|
+
used for the nightly tags, as they are built from a branch named
|
|
142
|
+
`rc/nightly-YYYYMMDD`, while alidist is updated to have a tag like
|
|
143
|
+
`nightly-YYYYMMDD` instead, pointing to the same commit.) However, this reuse
|
|
144
|
+
only works if the precompiled package has the same version as specified in your
|
|
145
|
+
copy of alidist.
|
|
146
|
+
|
|
147
|
+
This approach assumes that tags don't move (i.e. don't change which commit they
|
|
148
|
+
are tagging) in the repositories being built. If you administer a cache store,
|
|
149
|
+
make sure to delete cached tarballs built using that tag if a tag is moved!
|
|
150
|
+
|
|
151
|
+
## Developing packages locally
|
|
152
|
+
|
|
153
|
+
One of the use cases we want to cover is the ability to develop external
|
|
154
|
+
packages without having to go through an commit - push - pull cycle.
|
|
155
|
+
|
|
156
|
+
In order to do so, you can simply checkout the package you want to
|
|
157
|
+
develop at the same level as alibuild and alidist.
|
|
158
|
+
|
|
159
|
+
For example, if you want to build O2 while having the ability to modify
|
|
160
|
+
ROOT, you can do the following:
|
|
161
|
+
|
|
162
|
+
git clone https://github.com/alisw/alidist
|
|
163
|
+
git clone https://github.com/root-mirror/root ROOT
|
|
164
|
+
<modify files in ROOT/>
|
|
165
|
+
aliBuild ... build O2
|
|
166
|
+
|
|
167
|
+
The above will make sure the build will pick up your changes in the local
|
|
168
|
+
directory.
|
|
169
|
+
|
|
170
|
+
As a cherry on the cake, in case your recipe does not require any environment,
|
|
171
|
+
you can even do:
|
|
172
|
+
|
|
173
|
+
cd sw/BUILD/ROOT/latest
|
|
174
|
+
make install
|
|
175
|
+
|
|
176
|
+
and it will correctly install everything in `sw/<arch>/ROOT/latest`.
|
|
177
|
+
This of course mean that for each development package you might end up
|
|
178
|
+
with one or more build directories which might increase the used disk
|
|
179
|
+
space.
|
|
180
|
+
|
|
181
|
+
It's also important to notice that if you use your own checkout of a
|
|
182
|
+
package, you will not be able to write to any store and the generated
|
|
183
|
+
tgz will be empty.
|
|
184
|
+
|
|
185
|
+
If you wish to temporary compile with the package as specified by
|
|
186
|
+
alidist, you can use the `--no-local <PACKAGE>` option.
|
|
187
|
+
|
|
188
|
+
### Incremental builds
|
|
189
|
+
|
|
190
|
+
When developing locally using the development mode, if the external
|
|
191
|
+
is well behaved and supports incremental building, it is possible to
|
|
192
|
+
specify an `incremental_recipe` in the YAML preamble. Such a recipe will
|
|
193
|
+
be used after the second time the build happens (to ensure that the non
|
|
194
|
+
incremental parts of the build are done) and will be executed directly
|
|
195
|
+
in $BUILDDIR, only recompiled what changed. Notice that if this is the
|
|
196
|
+
case the incremental recipe will always be executed.
|
|
197
|
+
|
|
198
|
+
### Forcing a different architecture
|
|
199
|
+
|
|
200
|
+
While alibuild does its best to find out which OS / distribution you are
|
|
201
|
+
using, sometimes it might fail to do so, for example in the case you
|
|
202
|
+
start using a new *buntu flavour or a bleeding edge version of Centos.
|
|
203
|
+
In order to force the the correct architecture for the build you can use
|
|
204
|
+
the `--architecture` (`-a`) flag with one of the supported options:
|
|
205
|
+
|
|
206
|
+
- `slc5_x86-64`: Scientific Linux 5 and compatibles, on Intel / AMD x86-64.
|
|
207
|
+
- `slc6_x86-64`: Scientific Linux 6 and compatibles, on Intel / AMD x86-64.
|
|
208
|
+
- `slc7_x86-64`: CERN Centos 7 and compatibles, on Intel / AMD x86-64.
|
|
209
|
+
- `ubuntu1404_x86-64`: Ubuntu 1404 and compatibles, on Intel / AMD x86-64.
|
|
210
|
+
- `osx_x86-64`: OSX, on Intel / AMD x86-64.
|
|
211
|
+
- `slc7_ppc64`: RHEL7 on POWER8 (LE only for now).
|
|
212
|
+
|
|
213
|
+
### Running in Docker
|
|
214
|
+
|
|
215
|
+
Very often one needs to run on a platform which is different from
|
|
216
|
+
the one being used for development. The common use case is that
|
|
217
|
+
development happens on a Mac while production runs on some older Linux
|
|
218
|
+
distribution like SLC5 or SLC6. In order to improve the experience
|
|
219
|
+
of cross platform development aliBuild now offers the ability to run
|
|
220
|
+
in [Docker](https://docker.io) via the `--docker` option. When it is
|
|
221
|
+
specified the first part of the architecture will be used to construct
|
|
222
|
+
the name of the docker container to be used for the build and the build
|
|
223
|
+
itself will be performed inside that container. For example if you
|
|
224
|
+
specify:
|
|
225
|
+
|
|
226
|
+
```bash
|
|
227
|
+
alibuild --docker -a slc7_x86-64 build ROOT
|
|
228
|
+
```
|
|
229
|
+
|
|
230
|
+
the build itself will happen inside the alisw/slc7-builder Docker
|
|
231
|
+
container. Environment variables can be passed to docker by specifying
|
|
232
|
+
them with the `-e` option. Extra volumes can be specified with the -v
|
|
233
|
+
option using the same syntax used by Docker.
|
|
234
|
+
|
|
235
|
+
## Defaults
|
|
236
|
+
|
|
237
|
+
By default `aliBuild` is tuned to build the production version of ALICE
|
|
238
|
+
Offline software, as deployed on the Grid, so some of the choices in
|
|
239
|
+
terms of version of the packages and compilation flags are tweaked for
|
|
240
|
+
that. For example, ROOT5 is used because that's what is what has been
|
|
241
|
+
validated for datataking and the choice will not change until the end of
|
|
242
|
+
RUN2 of LHC. In order to change that and use, for example, a more recent
|
|
243
|
+
version of ROOT you can use the `--default root6` option which will
|
|
244
|
+
enable ROOT6 based builds. For a more complete description of how defaults
|
|
245
|
+
works please look at [the reference manual](reference.md#defaults).
|
|
246
|
+
|
|
247
|
+
## Disabling packages
|
|
248
|
+
|
|
249
|
+
You can optionally disable certain packages by specifying them as a comma
|
|
250
|
+
separated list with the `--disable` option.
|
|
251
|
+
|
|
252
|
+
Moreover, starting from aliBuild 1.4.0, it will also be
|
|
253
|
+
possible to disable packages by adding them to the `disable`
|
|
254
|
+
keyword of your defaults file (see previous paragraph). See the
|
|
255
|
+
[defaults-o2.sh](https://github.com/alisw/alidist/blob/master/defaults-o2.sh)
|
|
256
|
+
file for an example of how to disable `AliEn-Runtime` and `AliRoot`
|
|
257
|
+
when passing `--defaults o2`.
|
|
258
|
+
|
|
259
|
+
## Controlling which system packages are picked up
|
|
260
|
+
|
|
261
|
+
When compiling, there is a number of packages which can be picked up
|
|
262
|
+
from the system, and only if they are not found, do not have their
|
|
263
|
+
devel part installed, or they are not considered good enough they are
|
|
264
|
+
recompiled from scratch. A typical example is things like autotools,
|
|
265
|
+
zlib or cmake which should be available on a standard developer machine
|
|
266
|
+
and we rebuild them as last resort. In certain cases, to ensure full
|
|
267
|
+
compatibility on what is done in production it might be desirable to
|
|
268
|
+
always pick up our own version of the tools. This can be done by passing
|
|
269
|
+
the `--no-system` option to alibuild. On the other hand, there might
|
|
270
|
+
be cases in which you want to pick up not only basic tools, but also
|
|
271
|
+
advanced ones like ROOT, Geant4, or Pythia from the system, either to
|
|
272
|
+
save time or because you have a pre-existing setup which you do not want
|
|
273
|
+
to touch. In this case you can use `--always-prefer-system` option which
|
|
274
|
+
will try very hard to reuse as many system packages as possible (always
|
|
275
|
+
checking they are actually compatible with the one used in the recipe).
|
|
276
|
+
|
|
277
|
+
## Cleaning up the build area (new in 1.1.0)
|
|
278
|
+
|
|
279
|
+
Whenever you build using a different recipe or set of sources, alibuild
|
|
280
|
+
makes sure that all the dependent packages which might be affected
|
|
281
|
+
by the change are rebuild, and it does so in a different directory.
|
|
282
|
+
This can lead to the profiliferation of many build / installation
|
|
283
|
+
directories, in particular while developing a recipe for a new package
|
|
284
|
+
(e.g. a new generator).
|
|
285
|
+
|
|
286
|
+
In order to remove all past builds and only keep the latest one for each
|
|
287
|
+
alidist area you might have used and for each breanch (but not commit)
|
|
288
|
+
ever build for a given development package you can use the
|
|
289
|
+
|
|
290
|
+
aliBuild clean
|
|
291
|
+
|
|
292
|
+
subcommand which will do its best to clean up your build and
|
|
293
|
+
installation area.
|
|
294
|
+
|
|
295
|
+
## Upgrading aliBuild
|
|
296
|
+
|
|
297
|
+
aliBuild is installed via `pip`. In order to upgrade it on most laptops (in
|
|
298
|
+
particular Macs) do:
|
|
299
|
+
|
|
300
|
+
pip install --upgrade alibuild
|
|
301
|
+
|
|
302
|
+
or in case you need to be root (_e.g._ on Ubuntu and most Linux distributions
|
|
303
|
+
for convenience):
|
|
304
|
+
|
|
305
|
+
sudo pip install --upgrade alibuild
|
|
306
|
+
|
|
307
|
+
In general updating aliBuild is safe and it should never trigger a rebuild or
|
|
308
|
+
break compilation of older versions of alidist (i.e. we do try to guarantee
|
|
309
|
+
backward compatibility). In no case an update of aliBuild will result in the
|
|
310
|
+
update of `alidist`, which users will have to be done separately.
|
|
311
|
+
In case some yet to appear bug in alibuild will force us to rebuild a
|
|
312
|
+
previously built area, this will be widely publicized and users will get a warning
|
|
313
|
+
when running the command.
|
|
314
|
+
|
|
315
|
+
You can also upgrade / install a specific version of alibuild by specifying it on the
|
|
316
|
+
command line. E.g.:
|
|
317
|
+
|
|
318
|
+
pip install alibuild=1.5.5.rc1
|
|
319
|
+
|
|
320
|
+
this is in particular required when you want to try out release candidates (rc) builds which
|
|
321
|
+
are masked out by default.
|
|
322
|
+
|
|
323
|
+
## Rebuilding packages from branches instead of tags
|
|
324
|
+
|
|
325
|
+
Generally, recipes specify a Git _tag_ name in the `tag:` field. In some cases,
|
|
326
|
+
_branch names_ might be used instead (such as `tag: master` or `tag: dev`). In
|
|
327
|
+
such a rare case, aliBuild needs to know what is the last branch commit to
|
|
328
|
+
determine whether a rebuild is necessary.
|
|
329
|
+
|
|
330
|
+
Such check by default uses cached information instead of doing very slow queries
|
|
331
|
+
to remote servers. This means that aliBuild is fast in determining which
|
|
332
|
+
packages to build. However, packages using branch names might not get rebuilt as
|
|
333
|
+
expected when new changes are pushed to those branches.
|
|
334
|
+
|
|
335
|
+
In this case, you can ask aliBuild to update cached branches information by
|
|
336
|
+
adding the `-u` or `--fetch-repos` option. Note that by default this is not
|
|
337
|
+
needed, it's only for very special use cases (such as centralized builds and
|
|
338
|
+
server-side pull request checks).
|
|
339
|
+
|
|
340
|
+
## Generating a dependency graph
|
|
341
|
+
|
|
342
|
+
It is possible to generating a PDF with a dependency graph using the `aliDeps`
|
|
343
|
+
tool. Assuming you run it from a directory containing `alidist`, and you have
|
|
344
|
+
Graphviz installed on your system, you can simply run:
|
|
345
|
+
|
|
346
|
+
aliDeps O2 --outgraph graph.pdf
|
|
347
|
+
|
|
348
|
+
The example above generates a dependency graph for the package `O2`, and saving
|
|
349
|
+
the results to a PDF file named `graph.pdf`. This is what the graph looks like:
|
|
350
|
+
|
|
351
|
+

|
|
352
|
+
|
|
353
|
+
Packages in green are runtime dependencies, purple are build dependencies, while
|
|
354
|
+
red packages are runtime dependencies in some cases, and build dependencies in
|
|
355
|
+
others (this can indicate an error in the recipes).
|
|
356
|
+
|
|
357
|
+
Connections are color-coded as well: blue connections indicate a runtime
|
|
358
|
+
dependency whereas a grey connection indicate a build dependency.
|
|
359
|
+
|
|
360
|
+
By default, `aliDeps` runs the usual system checks to exclude packages that can
|
|
361
|
+
be taken from the system. If you want to display the full list of dependencies,
|
|
362
|
+
you may want to use:
|
|
363
|
+
|
|
364
|
+
aliDeps O2 --no-system --outgraph graph.pdf
|
|
365
|
+
|
|
366
|
+
Please run `aliDeps --help` for further information.
|
|
367
|
+
|
|
368
|
+
## Using the packages you have built
|
|
369
|
+
|
|
370
|
+
### Loading the package environment
|
|
371
|
+
|
|
372
|
+
Environment for packages built using aliBuild is managed by
|
|
373
|
+
[Environment Modules](http://modules.sourceforge.net) and the wrapper script
|
|
374
|
+
`alienv`. To list the available packages you can do:
|
|
375
|
+
|
|
376
|
+
alienv q
|
|
377
|
+
|
|
378
|
+
while:
|
|
379
|
+
|
|
380
|
+
alienv enter VO_ALICE@PackageA::VersionA[,VO_ALICE@PackageB::VersionB...]
|
|
381
|
+
|
|
382
|
+
will enter a shell with the appropriate environment set. Note that loading a
|
|
383
|
+
toplevel package recursively sets the environment for all its dependencies.
|
|
384
|
+
|
|
385
|
+
You can also execute a command with the proper environment without altering the
|
|
386
|
+
current one. For instance:
|
|
387
|
+
|
|
388
|
+
alienv setenv VO_ALICE@AliRoot::latest -c aliroot -b
|
|
389
|
+
|
|
390
|
+
To see other commands consult the online manual:
|
|
391
|
+
|
|
392
|
+
alienv help
|
|
393
|
+
|
|
394
|
+
Environment Modules is required: the package is usually called
|
|
395
|
+
`environment-modules` on Linux, or simply `modules` if using Homebrew on OSX.
|
|
396
|
+
|
|
397
|
+
Note that `alienv` works exactly like the one found on CVMFS, but for local
|
|
398
|
+
packages built with `aliBuild`.
|
|
399
|
+
|
|
400
|
+
### Environment for packages lacking a module definition
|
|
401
|
+
|
|
402
|
+
Some packages do not have a modulefile: this usually occurs for those which are
|
|
403
|
+
not distributed on the Grid. If you think this is wrong feel free to submit a
|
|
404
|
+
[pull request](https://github.com/alisw/alidist/pulls) or
|
|
405
|
+
[open an issue](https://github.com/alisw/alidist/issues) to the relevant
|
|
406
|
+
packages.
|
|
407
|
+
|
|
408
|
+
It is still possible to load the environment by sourcing the `init.sh` file
|
|
409
|
+
produced for each package under the `etc/profile.d` subdirectory. For instance:
|
|
410
|
+
|
|
411
|
+
WORK_DIR=$PWD/sw source sw/slc7_x86-64/AliRoot/v5-08-02-1/etc/profile.d/init.sh
|
|
412
|
+
|
|
413
|
+
Dependencies are automatically loaded.
|
docs/mkdocs.yml
ADDED
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
site_name: "aliBuild: ALICE software builder"
|
|
2
|
+
repo_name: View source code
|
|
3
|
+
repo_url: https://github.com/alisw/alibuild
|
|
4
|
+
|
|
5
|
+
theme:
|
|
6
|
+
name: material
|
|
7
|
+
logo: alice_logo.png
|
|
8
|
+
|
|
9
|
+
extra_css: [stylesheets/extra.css]
|
|
10
|
+
|
|
11
|
+
use_directory_urls: false
|
|
12
|
+
|
|
13
|
+
nav:
|
|
14
|
+
- Home: index.md
|
|
15
|
+
- Quickstart: quick.md
|
|
16
|
+
- User manual: user.md
|
|
17
|
+
- Reference: reference.md
|
|
18
|
+
- Troubleshooting: troubleshooting.md
|
|
19
|
+
- ALICE/O2 tutorial: "https://alice-doc.github.io/alice-analysis-tutorial/building/"
|
|
20
|
+
|
|
21
|
+
markdown_extensions:
|
|
22
|
+
- pymdownx.highlight:
|
|
23
|
+
anchor_linenums: true
|
|
24
|
+
line_spans: __span
|
|
25
|
+
pygments_lang_class: true
|
|
26
|
+
- pymdownx.inlinehilite
|
|
27
|
+
- pymdownx.snippets
|
|
28
|
+
- pymdownx.superfences
|
|
29
|
+
- toc:
|
|
30
|
+
permalink: "#"
|
|
31
|
+
|
|
32
|
+
plugins:
|
|
33
|
+
- search
|
|
34
|
+
- redirects:
|
|
35
|
+
redirect_maps:
|
|
36
|
+
'o2-tutorial.html': 'https://alice-doc.github.io/alice-analysis-tutorial/building/'
|
|
37
|
+
'tutorial.html': 'https://alice-doc.github.io/alice-analysis-tutorial/building/'
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
# A alibuild template to convert the alidist recipes to
|
|
2
|
+
# the please build system. While not fully functional, it serves
|
|
3
|
+
# as explanatory example on how one can use templates to
|
|
4
|
+
# extract information from alidist, without having to instrument
|
|
5
|
+
# alibuild itself.
|
|
6
|
+
#
|
|
7
|
+
# Use with:
|
|
8
|
+
#
|
|
9
|
+
# cat alibuild_to_please.jnj | python3 alibuild/aliBuild build lz4 --defaults o2 --debug --no-system --plugin templating --no-system --no-local alibuild-recipe-tools > opendata/BUILD.plz
|
|
10
|
+
#
|
|
11
|
+
{% for name, spec in specs.items() %}
|
|
12
|
+
|
|
13
|
+
{% if spec.source is defined %}
|
|
14
|
+
# {{ spec.source is defined }}
|
|
15
|
+
new_http_archive(
|
|
16
|
+
name = "download_{{name}}",
|
|
17
|
+
urls = ["{{spec.source}}/archive/refs/tags/{{spec.tag}}.tar.gz"],
|
|
18
|
+
)
|
|
19
|
+
{% endif %}
|
|
20
|
+
|
|
21
|
+
genrule(
|
|
22
|
+
name = "build_{{name}}",
|
|
23
|
+
output_dirs = ["_out"],
|
|
24
|
+
outs = [
|
|
25
|
+
{% if spec.tools is defined %}
|
|
26
|
+
{% for tool in spec.tools%}
|
|
27
|
+
"_out/{{tool}}",
|
|
28
|
+
{% endfor %}
|
|
29
|
+
{% endif %}
|
|
30
|
+
{% if spec.tools is defined %}
|
|
31
|
+
{% for lib in spec.libs%}
|
|
32
|
+
"_out/{{lib}}",
|
|
33
|
+
{% endfor %}
|
|
34
|
+
{% endif %}
|
|
35
|
+
],
|
|
36
|
+
{% if spec.source is defined %}
|
|
37
|
+
srcs = [":download_{{name}}"],
|
|
38
|
+
{% endif %}
|
|
39
|
+
deps = [
|
|
40
|
+
{% for build_require_name in spec.build_requires %}
|
|
41
|
+
":build_{{build_require_name}}",{% endfor %}
|
|
42
|
+
],
|
|
43
|
+
cmd = """
|
|
44
|
+
PKGNAME="{{name}}"
|
|
45
|
+
PKGVERSION="{{spec.version}}"
|
|
46
|
+
PKGREVISION="{{spec.revision}}"
|
|
47
|
+
PKGHASH="{{spec.package_hash}}"
|
|
48
|
+
{% if spec.source is defined %}
|
|
49
|
+
SOURCEDIR="$PWD/$SRCS/*"
|
|
50
|
+
{% endif %}
|
|
51
|
+
INSTALLROOT="$PWD/_out"
|
|
52
|
+
BUILDDIR="$PWD/_build"
|
|
53
|
+
JOBS="10"
|
|
54
|
+
mkdir -p $BUILDDIR
|
|
55
|
+
pushd $BUILDDIR
|
|
56
|
+
set -e
|
|
57
|
+
{{ spec.recipe | replace('\$', '\\\\\\\\$')}}
|
|
58
|
+
mkdir -p $INSTALLROOT
|
|
59
|
+
touch $INSTALLROOT/.please-did-build
|
|
60
|
+
popd
|
|
61
|
+
""",
|
|
62
|
+
)
|
|
63
|
+
{% endfor %}
|
tests/test_analytics.py
ADDED
|
@@ -0,0 +1,42 @@
|
|
|
1
|
+
import unittest
|
|
2
|
+
from alibuild_helpers.analytics import decideAnalytics
|
|
3
|
+
|
|
4
|
+
def noAnalytics():
|
|
5
|
+
return False
|
|
6
|
+
|
|
7
|
+
def yesAnalytics():
|
|
8
|
+
return True
|
|
9
|
+
|
|
10
|
+
def notInvoked() -> None:
|
|
11
|
+
assert(False)
|
|
12
|
+
|
|
13
|
+
class TestAnalytics(unittest.TestCase):
|
|
14
|
+
def test_analytics(self) -> None:
|
|
15
|
+
self.assertEqual(False, decideAnalytics(hasDisableFile=False,
|
|
16
|
+
hasUuid=False,
|
|
17
|
+
isTty=False,
|
|
18
|
+
questionCallback=notInvoked))
|
|
19
|
+
self.assertEqual(False, decideAnalytics(hasDisableFile=False,
|
|
20
|
+
hasUuid=False,
|
|
21
|
+
isTty=True,
|
|
22
|
+
questionCallback=noAnalytics))
|
|
23
|
+
self.assertEqual(True, decideAnalytics(hasDisableFile=False,
|
|
24
|
+
hasUuid=False,
|
|
25
|
+
isTty=True,
|
|
26
|
+
questionCallback=yesAnalytics))
|
|
27
|
+
self.assertEqual(True, decideAnalytics(hasDisableFile=False,
|
|
28
|
+
hasUuid=True,
|
|
29
|
+
isTty=False,
|
|
30
|
+
questionCallback=notInvoked))
|
|
31
|
+
self.assertEqual(True, decideAnalytics(hasDisableFile=False,
|
|
32
|
+
hasUuid=True,
|
|
33
|
+
isTty=True,
|
|
34
|
+
questionCallback=yesAnalytics))
|
|
35
|
+
self.assertEqual(False, decideAnalytics(hasDisableFile=True,
|
|
36
|
+
hasUuid=False,
|
|
37
|
+
isTty=True,
|
|
38
|
+
questionCallback=yesAnalytics))
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
if __name__ == '__main__':
|
|
42
|
+
unittest.main()
|
tests/test_args.py
ADDED
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
# Assuming you are using the mock library to ... mock things
|
|
2
|
+
from unittest import mock
|
|
3
|
+
from unittest.mock import patch
|
|
4
|
+
|
|
5
|
+
import alibuild_helpers.args
|
|
6
|
+
from alibuild_helpers.args import doParseArgs, matchValidArch
|
|
7
|
+
import sys
|
|
8
|
+
import os
|
|
9
|
+
import os.path
|
|
10
|
+
import re
|
|
11
|
+
|
|
12
|
+
import unittest
|
|
13
|
+
import shlex
|
|
14
|
+
|
|
15
|
+
BUILD_MISSING_PKG_ERROR = "the following arguments are required: PACKAGE"
|
|
16
|
+
ANALYTICS_MISSING_STATE_ERROR = "the following arguments are required: state"
|
|
17
|
+
|
|
18
|
+
# A few errors we should handle, together with the expected result
|
|
19
|
+
ARCHITECTURE_ERROR = u"Unknown / unsupported architecture: foo.\n\n.*"
|
|
20
|
+
PARSER_ERRORS = {
|
|
21
|
+
"build --force-unknown-architecture": BUILD_MISSING_PKG_ERROR,
|
|
22
|
+
"build --force-unknown-architecture zlib --foo": 'unrecognized arguments: --foo',
|
|
23
|
+
"init --docker-image": 'unrecognized arguments: --docker-image',
|
|
24
|
+
"builda --force-unknown-architecture zlib" : "argument action: invalid choice: 'builda'.*",
|
|
25
|
+
"build --force-unknown-architecture zlib --no-system --always-prefer-system" : 'argument --always-prefer-system: not allowed with argument --no-system',
|
|
26
|
+
"build zlib --architecture foo": ARCHITECTURE_ERROR,
|
|
27
|
+
"build --force-unknown-architecture zlib --remote-store rsync://test1.local/::rw --write-store rsync://test2.local/::rw ": 'cannot specify ::rw and --write-store at the same time',
|
|
28
|
+
"build zlib -a osx_x86-64 --docker-image foo": 'cannot use `-a osx_x86-64` and --docker',
|
|
29
|
+
"build zlib -a slc7_x86-64 --annotate foobar": "--annotate takes arguments of the form PACKAGE=COMMENT",
|
|
30
|
+
"analytics": ANALYTICS_MISSING_STATE_ERROR
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
# A few valid archs
|
|
34
|
+
VALID_ARCHS = ["osx_x86-64", "slc7_x86-64", "slc8_x86-64"]
|
|
35
|
+
INVALID_ARCHS = ["osx_ppc64", "sl8_x86-64"]
|
|
36
|
+
|
|
37
|
+
class FakeExit(Exception):
|
|
38
|
+
pass
|
|
39
|
+
|
|
40
|
+
CORRECT_BEHAVIOR = [
|
|
41
|
+
((), "build --force-unknown-architecture zlib" , [("action", "build"), ("workDir", "sw"), ("referenceSources", "sw/MIRROR")]),
|
|
42
|
+
((), "init" , [("action", "init"), ("workDir", "sw"), ("referenceSources", "sw/MIRROR")]),
|
|
43
|
+
((), "version" , [("action", "version")]),
|
|
44
|
+
((), "clean" , [("action", "clean"), ("workDir", "sw")]),
|
|
45
|
+
((), "build --force-unknown-architecture -j 10 zlib" , [("action", "build"), ("jobs", 10), ("pkgname", ["zlib"])]),
|
|
46
|
+
((), "build --force-unknown-architecture -j 10 zlib --disable gcc --disable foo" , [("disable", ["gcc", "foo"])]),
|
|
47
|
+
((), "build --force-unknown-architecture -j 10 zlib --disable gcc --disable foo,bar" , [("disable", ["gcc", "foo", "bar"])]),
|
|
48
|
+
((), "init zlib --dist master" , [("dist", {"repo": "alisw/alidist", "ver": "master"})]),
|
|
49
|
+
((), "init zlib --dist ktf/alidist@dev" , [("dist", {"repo": "ktf/alidist", "ver": "dev"})]),
|
|
50
|
+
((), "build --force-unknown-architecture zlib --remote-store rsync://test.local/" , [("noSystem", "*"), ("remoteStore", "rsync://test.local/")]),
|
|
51
|
+
((), "build --force-unknown-architecture zlib --remote-store rsync://test.local/::rw", [("noSystem", "*"), ("remoteStore", "rsync://test.local/"), ("writeStore", "rsync://test.local/")]),
|
|
52
|
+
((), "build --force-unknown-architecture zlib --no-remote-store --remote-store rsync://test.local/", [("noSystem", None), ("remoteStore", "")]),
|
|
53
|
+
((), "build zlib --architecture slc7_x86-64" , [("noSystem", "*"), ("preferSystem", False), ("remoteStore", "https://s3.cern.ch/swift/v1/alibuild-repo")]),
|
|
54
|
+
((), "build zlib --architecture ubuntu1804_x86-64" , [("noSystem", None), ("preferSystem", False), ("remoteStore", "")]),
|
|
55
|
+
((), "build zlib -a slc7_x86-64" , [("docker", False), ("dockerImage", None), ("docker_extra_args", ["--network=host"])]),
|
|
56
|
+
((), "build zlib -a slc7_x86-64 --docker-image registry.cern.ch/alisw/some-builder" , [("docker", True), ("dockerImage", "registry.cern.ch/alisw/some-builder")]),
|
|
57
|
+
((), "build zlib -a slc7_x86-64 --docker" , [("docker", True), ("dockerImage", "registry.cern.ch/alisw/slc7-builder")]),
|
|
58
|
+
((), "build zlib -a slc7_x86-64 --docker-extra-args=--foo" , [("docker", True), ("dockerImage", "registry.cern.ch/alisw/slc7-builder"), ("docker_extra_args", ["--foo", "--network=host"])]),
|
|
59
|
+
((), "build zlib --devel-prefix -a slc7_x86-64 --docker" , [("docker", True), ("dockerImage", "registry.cern.ch/alisw/slc7-builder"), ("develPrefix", "%s-slc7_x86-64" % os.path.basename(os.getcwd()))]),
|
|
60
|
+
((), "build zlib --devel-prefix -a slc7_x86-64 --docker-image someimage" , [("docker", True), ("dockerImage", "someimage"), ("develPrefix", "%s-slc7_x86-64" % os.path.basename(os.getcwd()))]),
|
|
61
|
+
((), "--debug build --force-unknown-architecture --defaults o2 O2" , [("debug", True), ("action", "build"), ("defaults", "o2"), ("pkgname", ["O2"])]),
|
|
62
|
+
((), "build --force-unknown-architecture --debug --defaults o2 O2" , [("debug", True), ("action", "build"), ("force_rebuild", []), ("defaults", "o2"), ("pkgname", ["O2"])]),
|
|
63
|
+
((), "build --force-unknown-architecture --force-rebuild O2 --force-rebuild O2Physics --defaults o2 O2Physics", [("action", "build"), ("force_rebuild", ["O2", "O2Physics"]), ("defaults", "o2"), ("pkgname", ["O2Physics"])]),
|
|
64
|
+
((), "build --force-unknown-architecture --force-rebuild O2,O2Physics --defaults o2 O2Physics", [("action", "build"), ("force_rebuild", ["O2", "O2Physics"]), ("defaults", "o2"), ("pkgname", ["O2Physics"])]),
|
|
65
|
+
((), "init -z test zlib" , [("configDir", "test/alidist")]),
|
|
66
|
+
((), "build --force-unknown-architecture -z test zlib" , [("configDir", "alidist")]),
|
|
67
|
+
((), "analytics off" , [("state", "off")]),
|
|
68
|
+
((), "analytics on" , [("state", "on")]),
|
|
69
|
+
|
|
70
|
+
# With ALIBUILD_WORK_DIR and ALIBUILD_CHDIR set
|
|
71
|
+
(("sw2", ".") , "build --force-unknown-architecture zlib" , [("action", "build"), ("workDir", "sw2"), ("referenceSources", "sw2/MIRROR"), ("chdir", ".")]),
|
|
72
|
+
(("sw3", "mydir"), "init" , [("action", "init"), ("workDir", "sw3"), ("referenceSources", "sw3/MIRROR"), ("chdir", "mydir")]),
|
|
73
|
+
(("sw", ".") , "clean --chdir mydir2 --work-dir sw4" , [("action", "clean"), ("workDir", "sw4"), ("chdir", "mydir2")]),
|
|
74
|
+
(() , "doctor zlib -C mydir -w sw2" , [("action", "doctor"), ("workDir", "sw2"), ("chdir", "mydir")]),
|
|
75
|
+
(() , "deps zlib --outgraph graph.pdf" , [("action", "deps"), ("outgraph", "graph.pdf")]),
|
|
76
|
+
]
|
|
77
|
+
|
|
78
|
+
GETSTATUSOUTPUT_MOCKS = {
|
|
79
|
+
"which docker": (0, "/usr/local/bin/docker")
|
|
80
|
+
}
|
|
81
|
+
|
|
82
|
+
class ArgsTestCase(unittest.TestCase):
|
|
83
|
+
@mock.patch("alibuild_helpers.utilities.getoutput", new=lambda cmd: "x86_64") # for uname -m
|
|
84
|
+
@mock.patch('alibuild_helpers.args.commands')
|
|
85
|
+
def test_actionParsing(self, mock_commands):
|
|
86
|
+
mock_commands.getstatusoutput.side_effect = lambda x : GETSTATUSOUTPUT_MOCKS[x]
|
|
87
|
+
for (env, cmd, effects) in CORRECT_BEHAVIOR:
|
|
88
|
+
(alibuild_helpers.args.DEFAULT_WORK_DIR,
|
|
89
|
+
alibuild_helpers.args.DEFAULT_CHDIR) = env or ("sw", ".")
|
|
90
|
+
with patch.object(sys, "argv", ["alibuild"] + shlex.split(cmd)):
|
|
91
|
+
args, parser = doParseArgs()
|
|
92
|
+
args = vars(args)
|
|
93
|
+
for k, v in effects:
|
|
94
|
+
self.assertEqual(args[k], v)
|
|
95
|
+
|
|
96
|
+
@mock.patch("alibuild_helpers.utilities.getoutput", new=lambda cmd: "x86_64") # for uname -m
|
|
97
|
+
@mock.patch('alibuild_helpers.args.argparse.ArgumentParser.error')
|
|
98
|
+
def test_failingParsing(self, mock_print):
|
|
99
|
+
mock_print.side_effect = FakeExit("raised")
|
|
100
|
+
for (cmd, pattern) in PARSER_ERRORS.items():
|
|
101
|
+
mock_print.mock_calls = []
|
|
102
|
+
with patch.object(sys, "argv", ["alibuild"] + shlex.split(cmd)):
|
|
103
|
+
self.assertRaises(FakeExit, doParseArgs)
|
|
104
|
+
for mock_call in mock_print.mock_calls:
|
|
105
|
+
args = mock_call[1]
|
|
106
|
+
print(args)
|
|
107
|
+
self.assertTrue(
|
|
108
|
+
re.match(pattern, args[0]),
|
|
109
|
+
f"Expected '{args[0]}' matching '{pattern}' but it's not the case."
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
def test_validArchitectures(self) -> None:
|
|
113
|
+
for arch in VALID_ARCHS:
|
|
114
|
+
self.assertTrue(matchValidArch(arch))
|
|
115
|
+
for arch in INVALID_ARCHS:
|
|
116
|
+
self.assertFalse(matchValidArch(arch))
|
|
117
|
+
|
|
118
|
+
if __name__ == '__main__':
|
|
119
|
+
unittest.main()
|