alibuild 1.17.19__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- alibuild-1.17.19.data/scripts/aliBuild +137 -0
- alibuild-1.17.19.data/scripts/aliDeps +7 -0
- alibuild-1.17.19.data/scripts/aliDoctor +7 -0
- alibuild-1.17.19.data/scripts/alienv +344 -0
- alibuild-1.17.19.data/scripts/pb +7 -0
- alibuild-1.17.19.dist-info/METADATA +78 -0
- alibuild-1.17.19.dist-info/RECORD +74 -0
- alibuild-1.17.19.dist-info/WHEEL +5 -0
- alibuild-1.17.19.dist-info/licenses/LICENSE.md +674 -0
- alibuild-1.17.19.dist-info/top_level.txt +5 -0
- alibuild_helpers/__init__.py +21 -0
- alibuild_helpers/_version.py +21 -0
- alibuild_helpers/analytics.py +120 -0
- alibuild_helpers/args.py +493 -0
- alibuild_helpers/build.py +1209 -0
- alibuild_helpers/build_template.sh +314 -0
- alibuild_helpers/clean.py +83 -0
- alibuild_helpers/cmd.py +154 -0
- alibuild_helpers/deps.py +116 -0
- alibuild_helpers/doctor.py +195 -0
- alibuild_helpers/git.py +104 -0
- alibuild_helpers/init.py +103 -0
- alibuild_helpers/log.py +132 -0
- alibuild_helpers/scm.py +31 -0
- alibuild_helpers/sl.py +62 -0
- alibuild_helpers/sync.py +693 -0
- alibuild_helpers/templating_plugin.py +18 -0
- alibuild_helpers/utilities.py +662 -0
- alibuild_helpers/workarea.py +179 -0
- debian/changelog +11 -0
- debian/compat +1 -0
- debian/control +14 -0
- debian/copyright +10 -0
- debian/files +1 -0
- debian/rules +7 -0
- docs/README.md +1 -0
- docs/SUPPORT +3 -0
- docs/docs/alice_logo.png +0 -0
- docs/docs/deps.png +0 -0
- docs/docs/index.md +75 -0
- docs/docs/quick.md +89 -0
- docs/docs/reference.md +430 -0
- docs/docs/stylesheets/extra.css +9 -0
- docs/docs/troubleshooting.md +346 -0
- docs/docs/user.md +413 -0
- docs/mkdocs.yml +37 -0
- templates/alibuild_to_please.jnj +63 -0
- tests/test_analytics.py +42 -0
- tests/test_args.py +119 -0
- tests/test_build.py +426 -0
- tests/test_clean.py +154 -0
- tests/test_cmd.py +73 -0
- tests/test_deps.py +79 -0
- tests/test_doctor.py +128 -0
- tests/test_git.py +48 -0
- tests/test_hashing.py +67 -0
- tests/test_init.py +103 -0
- tests/test_log.py +50 -0
- tests/test_packagelist.py +235 -0
- tests/test_parseRecipe.py +132 -0
- tests/test_sync.py +332 -0
- tests/test_utilities.py +383 -0
- tests/test_workarea.py +101 -0
- tests/testdist/broken1.sh +1 -0
- tests/testdist/broken2.sh +1 -0
- tests/testdist/broken3.sh +3 -0
- tests/testdist/broken4.sh +2 -0
- tests/testdist/broken5.sh +2 -0
- tests/testdist/broken6.sh +2 -0
- tests/testdist/broken7.sh +5 -0
- tests/testdist/clobber-initdotsh.sh +4 -0
- tests/testdist/defaults-o2.sh +10 -0
- tests/testdist/delete-etc.sh +4 -0
- tests/testdist/tracking-env.sh +6 -0
|
@@ -0,0 +1,1209 @@
|
|
|
1
|
+
from os.path import abspath, exists, basename, dirname, join, realpath
|
|
2
|
+
from os import makedirs, unlink, readlink, rmdir
|
|
3
|
+
from alibuild_helpers import __version__
|
|
4
|
+
from alibuild_helpers.analytics import report_event
|
|
5
|
+
from alibuild_helpers.log import debug, info, banner, warning
|
|
6
|
+
from alibuild_helpers.log import dieOnError
|
|
7
|
+
from alibuild_helpers.cmd import execute, DockerRunner, BASH, install_wrapper_script, getstatusoutput
|
|
8
|
+
from alibuild_helpers.utilities import prunePaths, symlink, call_ignoring_oserrors, topological_sort, detectArch
|
|
9
|
+
from alibuild_helpers.utilities import resolve_store_path
|
|
10
|
+
from alibuild_helpers.utilities import parseDefaults, readDefaults
|
|
11
|
+
from alibuild_helpers.utilities import getPackageList, asList
|
|
12
|
+
from alibuild_helpers.utilities import validateDefaults
|
|
13
|
+
from alibuild_helpers.utilities import Hasher
|
|
14
|
+
from alibuild_helpers.utilities import resolve_tag, resolve_version, short_commit_hash
|
|
15
|
+
from alibuild_helpers.git import Git, git
|
|
16
|
+
from alibuild_helpers.sl import Sapling
|
|
17
|
+
from alibuild_helpers.scm import SCMError
|
|
18
|
+
from alibuild_helpers.sync import remote_from_url
|
|
19
|
+
from alibuild_helpers.workarea import logged_scm, updateReferenceRepoSpec, checkout_sources
|
|
20
|
+
from alibuild_helpers.log import ProgressPrint, log_current_package
|
|
21
|
+
from glob import glob
|
|
22
|
+
from textwrap import dedent
|
|
23
|
+
from collections import OrderedDict
|
|
24
|
+
from shlex import quote
|
|
25
|
+
import tempfile
|
|
26
|
+
|
|
27
|
+
import concurrent.futures
|
|
28
|
+
import importlib
|
|
29
|
+
import json
|
|
30
|
+
import socket
|
|
31
|
+
import os
|
|
32
|
+
import re
|
|
33
|
+
import shutil
|
|
34
|
+
import time
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def writeAll(fn, txt) -> None:
|
|
38
|
+
f = open(fn, "w")
|
|
39
|
+
f.write(txt)
|
|
40
|
+
f.close()
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def readHashFile(fn):
|
|
44
|
+
try:
|
|
45
|
+
return open(fn).read().strip("\n")
|
|
46
|
+
except IOError:
|
|
47
|
+
return "0"
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
def update_git_repos(args, specs, buildOrder):
|
|
51
|
+
"""Update and/or fetch required git repositories in parallel.
|
|
52
|
+
|
|
53
|
+
If any repository fails to be fetched, then it is retried, while allowing the
|
|
54
|
+
user to input their credentials if required.
|
|
55
|
+
"""
|
|
56
|
+
|
|
57
|
+
def update_repo(package, git_prompt):
|
|
58
|
+
specs[package]["scm"] = Git()
|
|
59
|
+
if specs[package]["is_devel_pkg"]:
|
|
60
|
+
specs[package]["source"] = os.path.join(os.getcwd(), specs[package]["package"])
|
|
61
|
+
if exists(os.path.join(specs[package]["source"], ".sl")):
|
|
62
|
+
specs[package]["scm"] = Sapling()
|
|
63
|
+
updateReferenceRepoSpec(args.referenceSources, package, specs[package],
|
|
64
|
+
fetch=args.fetchRepos, allowGitPrompt=git_prompt)
|
|
65
|
+
|
|
66
|
+
# Retrieve git heads
|
|
67
|
+
output = logged_scm(specs[package]["scm"], package, args.referenceSources,
|
|
68
|
+
specs[package]["scm"].listRefsCmd(specs[package].get("reference", specs[package]["source"])),
|
|
69
|
+
".", prompt=git_prompt, logOutput=False)
|
|
70
|
+
specs[package]["scm_refs"] = specs[package]["scm"].parseRefs(output)
|
|
71
|
+
|
|
72
|
+
progress = ProgressPrint("Updating repositories")
|
|
73
|
+
requires_auth = set()
|
|
74
|
+
with concurrent.futures.ThreadPoolExecutor(max_workers=4) as executor:
|
|
75
|
+
future_to_download = {
|
|
76
|
+
executor.submit(update_repo, package, git_prompt=False): package
|
|
77
|
+
for package in buildOrder if "source" in specs[package]
|
|
78
|
+
}
|
|
79
|
+
for i, future in enumerate(concurrent.futures.as_completed(future_to_download)):
|
|
80
|
+
futurePackage = future_to_download[future]
|
|
81
|
+
progress("[%d/%d] Updating repository for %s",
|
|
82
|
+
i, len(future_to_download), futurePackage)
|
|
83
|
+
try:
|
|
84
|
+
future.result()
|
|
85
|
+
except SCMError:
|
|
86
|
+
# The SCM failed. Let's assume this is because the user needs
|
|
87
|
+
# to supply a password.
|
|
88
|
+
debug("%r requires auth; will prompt later", futurePackage)
|
|
89
|
+
requires_auth.add(futurePackage)
|
|
90
|
+
except Exception as exc:
|
|
91
|
+
progress.end("error", error=True)
|
|
92
|
+
dieOnError(True, "Error on fetching %r: %s. Aborting." %
|
|
93
|
+
(futurePackage, exc))
|
|
94
|
+
else:
|
|
95
|
+
debug("%r package updated: %d refs found", futurePackage,
|
|
96
|
+
len(specs[futurePackage]["scm_refs"]))
|
|
97
|
+
progress.end("done")
|
|
98
|
+
|
|
99
|
+
# Now execute git commands for private packages one-by-one, so the user can
|
|
100
|
+
# type their username and password without multiple prompts interfering.
|
|
101
|
+
for package in requires_auth:
|
|
102
|
+
banner("If prompted now, enter your username and password for %s below\n"
|
|
103
|
+
"If you are prompted too often, see: "
|
|
104
|
+
"https://alisw.github.io/alibuild/troubleshooting.html"
|
|
105
|
+
"#alibuild-keeps-asking-for-my-password",
|
|
106
|
+
specs[package]["source"])
|
|
107
|
+
update_repo(package, git_prompt=True)
|
|
108
|
+
debug("%r package updated: %d refs found", package,
|
|
109
|
+
len(specs[package]["scm_refs"]))
|
|
110
|
+
|
|
111
|
+
|
|
112
|
+
# Creates a directory in the store which contains symlinks to the package
|
|
113
|
+
# and its direct / indirect dependencies
|
|
114
|
+
def createDistLinks(spec, specs, args, syncHelper, repoType, requiresType):
|
|
115
|
+
# At the point we call this function, spec has a single, definitive hash.
|
|
116
|
+
target_dir = "{work_dir}/TARS/{arch}/{repo}/{package}/{package}-{version}-{revision}" \
|
|
117
|
+
.format(work_dir=args.workDir, arch=args.architecture, repo=repoType, **spec)
|
|
118
|
+
shutil.rmtree(target_dir.encode("utf-8"), ignore_errors=True)
|
|
119
|
+
makedirs(target_dir, exist_ok=True)
|
|
120
|
+
for pkg in [spec["package"]] + list(spec[requiresType]):
|
|
121
|
+
dep_tarball = "../../../../../TARS/{arch}/store/{short_hash}/{hash}/{package}-{version}-{revision}.{arch}.tar.gz" \
|
|
122
|
+
.format(arch=args.architecture, short_hash=specs[pkg]["hash"][:2], **specs[pkg])
|
|
123
|
+
symlink(dep_tarball, target_dir)
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def storeHashes(package, specs, considerRelocation):
|
|
127
|
+
"""Calculate various hashes for package, and store them in specs[package].
|
|
128
|
+
|
|
129
|
+
Assumes that all dependencies of the package already have a definitive hash.
|
|
130
|
+
"""
|
|
131
|
+
spec = specs[package]
|
|
132
|
+
|
|
133
|
+
if "remote_revision_hash" in spec and "local_revision_hash" in spec:
|
|
134
|
+
# We've already calculated these hashes before, so no need to do it again.
|
|
135
|
+
# This also works around a bug, where after the first hash calculation,
|
|
136
|
+
# some attributes of spec are changed (e.g. append_path and prepend_path
|
|
137
|
+
# entries are turned from strings into lists), which changes the hash on
|
|
138
|
+
# subsequent calculations.
|
|
139
|
+
return
|
|
140
|
+
|
|
141
|
+
# For now, all the hashers share data -- they'll be split below.
|
|
142
|
+
h_all = Hasher()
|
|
143
|
+
|
|
144
|
+
if spec.get("force_rebuild", False):
|
|
145
|
+
h_all(str(time.time()))
|
|
146
|
+
|
|
147
|
+
for key in ("recipe", "version", "package"):
|
|
148
|
+
h_all(spec.get(key, "none"))
|
|
149
|
+
|
|
150
|
+
# commit_hash could be a commit hash (if we're not building a tag, but
|
|
151
|
+
# instead e.g. a branch or particular commit specified by its hash), or it
|
|
152
|
+
# could be a tag name (if we're building a tag). We want to calculate the
|
|
153
|
+
# hash for both cases, so that if we build some commit, we want to be able to
|
|
154
|
+
# reuse tarballs from other builds of the same commit, even if it was
|
|
155
|
+
# referred to differently in the other build.
|
|
156
|
+
debug("Base git ref is %s", spec["commit_hash"])
|
|
157
|
+
h_default = h_all.copy()
|
|
158
|
+
h_default(spec["commit_hash"])
|
|
159
|
+
try:
|
|
160
|
+
# If spec["commit_hash"] is a tag, get the actual git commit hash.
|
|
161
|
+
real_commit_hash = spec["scm_refs"]["refs/tags/" + spec["commit_hash"]]
|
|
162
|
+
except KeyError:
|
|
163
|
+
# If it's not a tag, assume it's an actual commit hash.
|
|
164
|
+
real_commit_hash = spec["commit_hash"]
|
|
165
|
+
# Get any other git tags that refer to the same commit. We do not consider
|
|
166
|
+
# branches, as their heads move, and that will cause problems.
|
|
167
|
+
debug("Real commit hash is %s, storing alternative", real_commit_hash)
|
|
168
|
+
h_real_commit = h_all.copy()
|
|
169
|
+
h_real_commit(real_commit_hash)
|
|
170
|
+
h_alternatives = [(spec.get("tag", "0"), spec["commit_hash"], h_default),
|
|
171
|
+
(spec.get("tag", "0"), real_commit_hash, h_real_commit)]
|
|
172
|
+
for ref, git_hash in spec.get("scm_refs", {}).items():
|
|
173
|
+
if ref.startswith("refs/tags/") and git_hash == real_commit_hash:
|
|
174
|
+
tag_name = ref[len("refs/tags/"):]
|
|
175
|
+
debug("Tag %s also points to %s, storing alternative",
|
|
176
|
+
tag_name, real_commit_hash)
|
|
177
|
+
hasher = h_all.copy()
|
|
178
|
+
hasher(tag_name)
|
|
179
|
+
h_alternatives.append((tag_name, git_hash, hasher))
|
|
180
|
+
|
|
181
|
+
# Now that we've split the hasher with the real commit hash off from the ones
|
|
182
|
+
# with a tag name, h_all has to add the data to all of them separately.
|
|
183
|
+
def h_all(data): # pylint: disable=function-redefined
|
|
184
|
+
for _, _, hasher in h_alternatives:
|
|
185
|
+
hasher(data)
|
|
186
|
+
|
|
187
|
+
modifies_full_hash_dicts = ["env", "append_path", "prepend_path"]
|
|
188
|
+
if not spec["is_devel_pkg"] and "track_env" in spec:
|
|
189
|
+
modifies_full_hash_dicts.append("track_env")
|
|
190
|
+
|
|
191
|
+
for key in modifies_full_hash_dicts:
|
|
192
|
+
if key not in spec:
|
|
193
|
+
h_all("none")
|
|
194
|
+
else:
|
|
195
|
+
# spec["env"] is of type OrderedDict[str, str].
|
|
196
|
+
# spec["*_path"] are of type OrderedDict[str, list[str]].
|
|
197
|
+
assert isinstance(spec[key], OrderedDict), \
|
|
198
|
+
"spec[%r] was of type %r" % (key, type(spec[key]))
|
|
199
|
+
|
|
200
|
+
# Python 3.12 changed the string representation of OrderedDicts from
|
|
201
|
+
# OrderedDict([(key, value)]) to OrderedDict({key: value}), so to remain
|
|
202
|
+
# compatible, we need to emulate the previous string representation.
|
|
203
|
+
h_all("OrderedDict([")
|
|
204
|
+
h_all(", ".join(
|
|
205
|
+
# XXX: We still rely on repr("str") being "'str'",
|
|
206
|
+
# and on repr(["a", "b"]) being "['a', 'b']".
|
|
207
|
+
"(%r, %r)" % (key, value)
|
|
208
|
+
for key, value in spec[key].items()
|
|
209
|
+
))
|
|
210
|
+
h_all("])")
|
|
211
|
+
|
|
212
|
+
for tag, commit_hash, hasher in h_alternatives:
|
|
213
|
+
# If the commit hash is a real hash, and not a tag, we can safely assume
|
|
214
|
+
# that's unique, and therefore we can avoid putting the repository or the
|
|
215
|
+
# name of the branch in the hash.
|
|
216
|
+
if commit_hash == tag:
|
|
217
|
+
hasher(spec.get("source", "none"))
|
|
218
|
+
if "source" in spec:
|
|
219
|
+
hasher(tag)
|
|
220
|
+
|
|
221
|
+
dh = Hasher()
|
|
222
|
+
for dep in spec.get("requires", []):
|
|
223
|
+
# At this point, our dependencies have a single hash, local or remote, in
|
|
224
|
+
# specs[dep]["hash"].
|
|
225
|
+
hash_and_devel_hash = specs[dep]["hash"] + specs[dep].get("devel_hash", "")
|
|
226
|
+
# If this package is a dev package, and it depends on another dev pkg, then
|
|
227
|
+
# this package's hash shouldn't change if the other dev package was
|
|
228
|
+
# changed, so that we can just rebuild this one incrementally.
|
|
229
|
+
h_all(specs[dep]["hash"] if spec["is_devel_pkg"] else hash_and_devel_hash)
|
|
230
|
+
# The deps_hash should always change, however, so we actually rebuild the
|
|
231
|
+
# dependent package (even if incrementally).
|
|
232
|
+
dh(hash_and_devel_hash)
|
|
233
|
+
|
|
234
|
+
if spec["is_devel_pkg"] and "incremental_recipe" in spec:
|
|
235
|
+
h_all(spec["incremental_recipe"])
|
|
236
|
+
ih = Hasher()
|
|
237
|
+
ih(spec["incremental_recipe"])
|
|
238
|
+
spec["incremental_hash"] = ih.hexdigest()
|
|
239
|
+
elif spec["is_devel_pkg"]:
|
|
240
|
+
h_all(spec["devel_hash"])
|
|
241
|
+
|
|
242
|
+
if considerRelocation and "relocate_paths" in spec:
|
|
243
|
+
h_all("relocate:"+" ".join(sorted(spec["relocate_paths"])))
|
|
244
|
+
|
|
245
|
+
spec["deps_hash"] = dh.hexdigest()
|
|
246
|
+
spec["remote_revision_hash"] = h_default.hexdigest()
|
|
247
|
+
# Store hypothetical hashes of this spec if we were building it using other
|
|
248
|
+
# tags that refer to the same commit that we're actually building. These are
|
|
249
|
+
# later used when fetching from the remote store. The "primary" hash should
|
|
250
|
+
# be the first in the list, so it's checked first by the remote stores.
|
|
251
|
+
spec["remote_hashes"] = [spec["remote_revision_hash"]] + \
|
|
252
|
+
list({h.hexdigest() for _, _, h in h_alternatives} - {spec["remote_revision_hash"]})
|
|
253
|
+
# The local hash must differ from the remote hash to avoid conflicts where
|
|
254
|
+
# the remote has a package with the same hash as an existing local revision.
|
|
255
|
+
h_all("local")
|
|
256
|
+
spec["local_revision_hash"] = h_default.hexdigest()
|
|
257
|
+
spec["local_hashes"] = [spec["local_revision_hash"]] + \
|
|
258
|
+
list({h.hexdigest() for _, _, h, in h_alternatives} - {spec["local_revision_hash"]})
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
def hash_local_changes(spec):
|
|
262
|
+
"""Produce a hash of all local changes in the given git repo.
|
|
263
|
+
|
|
264
|
+
If there are untracked files, this function returns a unique hash to force a
|
|
265
|
+
rebuild, and logs a warning, as we cannot detect changes to those files.
|
|
266
|
+
"""
|
|
267
|
+
directory = spec["source"]
|
|
268
|
+
scm = spec["scm"]
|
|
269
|
+
untrackedFilesDirectories = []
|
|
270
|
+
class UntrackedChangesError(Exception):
|
|
271
|
+
"""Signal that we cannot detect code changes due to untracked files."""
|
|
272
|
+
h = Hasher()
|
|
273
|
+
if "track_env" in spec:
|
|
274
|
+
assert isinstance(spec["track_env"], OrderedDict), \
|
|
275
|
+
"spec[%r] was of type %r" % ("track_env", type(spec["track_env"]))
|
|
276
|
+
|
|
277
|
+
# Python 3.12 changed the string representation of OrderedDicts from
|
|
278
|
+
# OrderedDict([(key, value)]) to OrderedDict({key: value}), so to remain
|
|
279
|
+
# compatible, we need to emulate the previous string representation.
|
|
280
|
+
h("OrderedDict([")
|
|
281
|
+
h(", ".join(
|
|
282
|
+
# XXX: We still rely on repr("str") being "'str'",
|
|
283
|
+
# and on repr(["a", "b"]) being "['a', 'b']".
|
|
284
|
+
"(%r, %r)" % (key, value) for key, value in spec["track_env"].items()))
|
|
285
|
+
h("])")
|
|
286
|
+
def hash_output(msg, args):
|
|
287
|
+
lines = msg % args
|
|
288
|
+
# `git status --porcelain` indicates untracked files using "??".
|
|
289
|
+
# Lines from `git diff` never start with "??".
|
|
290
|
+
if any(scm.checkUntracked(line) for line in lines.split("\n")):
|
|
291
|
+
raise UntrackedChangesError()
|
|
292
|
+
h(lines)
|
|
293
|
+
cmd = scm.diffCmd(directory)
|
|
294
|
+
try:
|
|
295
|
+
err = execute(cmd, hash_output)
|
|
296
|
+
debug("Command %s returned %d", cmd, err)
|
|
297
|
+
dieOnError(err, "Unable to detect source code changes.")
|
|
298
|
+
except UntrackedChangesError:
|
|
299
|
+
untrackedFilesDirectories = [directory]
|
|
300
|
+
warning("You have untracked changes in %s, so aliBuild cannot detect "
|
|
301
|
+
"whether it needs to rebuild the package. Therefore, the package "
|
|
302
|
+
"is being rebuilt unconditionally. Please use 'git add' and/or "
|
|
303
|
+
"'git commit' to track your changes in git.", directory)
|
|
304
|
+
# If there are untracked changes, always rebuild (hopefully incrementally)
|
|
305
|
+
# and let CMake figure out what needs to be rebuilt. Force a rebuild by
|
|
306
|
+
# changing the hash to something basically random.
|
|
307
|
+
h(str(time.time()))
|
|
308
|
+
return (h.hexdigest(), untrackedFilesDirectories)
|
|
309
|
+
|
|
310
|
+
|
|
311
|
+
def better_tarball(spec, old, new):
|
|
312
|
+
"""Return which tarball we should prefer to reuse."""
|
|
313
|
+
if not old: return new
|
|
314
|
+
if not new: return old
|
|
315
|
+
old_rev, old_hash, _ = old
|
|
316
|
+
new_rev, new_hash, _ = new
|
|
317
|
+
old_is_local, new_is_local = old_rev.startswith("local"), new_rev.startswith("local")
|
|
318
|
+
# If one is local and one is remote, return the remote one.
|
|
319
|
+
if old_is_local and not new_is_local: return new
|
|
320
|
+
if new_is_local and not old_is_local: return old
|
|
321
|
+
# Finally, return the one that appears in the list of hashes earlier.
|
|
322
|
+
hashes = spec["local_hashes" if old_is_local else "remote_hashes"]
|
|
323
|
+
return old if hashes.index(old_hash) < hashes.index(new_hash) else new
|
|
324
|
+
|
|
325
|
+
|
|
326
|
+
def generate_initdotsh(package, specs, architecture, post_build=False):
|
|
327
|
+
"""Return the contents of the given package's etc/profile/init.sh as a string.
|
|
328
|
+
|
|
329
|
+
If post_build is true, also generate variables pointing to the package
|
|
330
|
+
itself; else, only generate variables pointing at it dependencies.
|
|
331
|
+
"""
|
|
332
|
+
spec = specs[package]
|
|
333
|
+
# Allow users to override ALIBUILD_ARCH_PREFIX if they manually source
|
|
334
|
+
# init.sh. This is useful for development off CVMFS, since we have a
|
|
335
|
+
# slightly different directory hierarchy there.
|
|
336
|
+
lines = [': "${ALIBUILD_ARCH_PREFIX:=%s}"' % architecture]
|
|
337
|
+
|
|
338
|
+
# Generate the part which sources the environment for all the dependencies.
|
|
339
|
+
# We guarantee that a dependency is always sourced before the parts
|
|
340
|
+
# depending on it, but we do not guarantee anything for the order in which
|
|
341
|
+
# unrelated components are activated.
|
|
342
|
+
# These variables are also required during the build itself, so always
|
|
343
|
+
# generate them.
|
|
344
|
+
lines.extend((
|
|
345
|
+
'[ -n "${{{bigpackage}_REVISION}}" ] || '
|
|
346
|
+
'. "$WORK_DIR/$ALIBUILD_ARCH_PREFIX"/{package}/{version}-{revision}/etc/profile.d/init.sh'
|
|
347
|
+
).format(
|
|
348
|
+
bigpackage=dep.upper().replace("-", "_"),
|
|
349
|
+
package=quote(specs[dep]["package"]),
|
|
350
|
+
version=quote(specs[dep]["version"]),
|
|
351
|
+
revision=quote(specs[dep]["revision"]),
|
|
352
|
+
) for dep in spec.get("requires", ()))
|
|
353
|
+
|
|
354
|
+
if post_build:
|
|
355
|
+
bigpackage = package.upper().replace("-", "_")
|
|
356
|
+
|
|
357
|
+
# Set standard variables related to the package itself. These should only
|
|
358
|
+
# be set once the build has actually completed.
|
|
359
|
+
lines.extend(line.format(
|
|
360
|
+
bigpackage=bigpackage,
|
|
361
|
+
package=quote(spec["package"]),
|
|
362
|
+
version=quote(spec["version"]),
|
|
363
|
+
revision=quote(spec["revision"]),
|
|
364
|
+
hash=quote(spec["hash"]),
|
|
365
|
+
commit_hash=quote(spec["commit_hash"]),
|
|
366
|
+
) for line in (
|
|
367
|
+
'export {bigpackage}_ROOT="$WORK_DIR/$ALIBUILD_ARCH_PREFIX"/{package}/{version}-{revision}',
|
|
368
|
+
"export {bigpackage}_VERSION={version}",
|
|
369
|
+
"export {bigpackage}_REVISION={revision}",
|
|
370
|
+
"export {bigpackage}_HASH={hash}",
|
|
371
|
+
"export {bigpackage}_COMMIT={commit_hash}",
|
|
372
|
+
))
|
|
373
|
+
|
|
374
|
+
# Generate the part which sets the environment variables related to the
|
|
375
|
+
# package itself. This can be variables set via the "env" keyword in the
|
|
376
|
+
# metadata or paths which get concatenated via the "{append,prepend}_path"
|
|
377
|
+
# keys. These should only be set once the build has actually completed,
|
|
378
|
+
# since the paths referred to will only exist then.
|
|
379
|
+
|
|
380
|
+
# First, output a sensible error message if types are wrong.
|
|
381
|
+
for key in ("env", "append_path", "prepend_path"):
|
|
382
|
+
dieOnError(not isinstance(spec.get(key, {}), dict),
|
|
383
|
+
"Tag `%s' in %s should be a dict." % (key, package))
|
|
384
|
+
|
|
385
|
+
# Set "env" variables.
|
|
386
|
+
# We only put the values in double-quotes, so that they can refer to other
|
|
387
|
+
# shell variables or do command substitution (e.g. $(brew --prefix ...)).
|
|
388
|
+
lines.extend('export {}="{}"'.format(key, value)
|
|
389
|
+
for key, value in spec.get("env", {}).items()
|
|
390
|
+
if key != "DYLD_LIBRARY_PATH")
|
|
391
|
+
|
|
392
|
+
# Append paths to variables, if requested using append_path.
|
|
393
|
+
# Again, only put values in double quotes so that they can refer to other variables.
|
|
394
|
+
lines.extend('export {key}="${key}:{value}"'
|
|
395
|
+
.format(key=key, value=":".join(asList(value)))
|
|
396
|
+
for key, value in spec.get("append_path", {}).items()
|
|
397
|
+
if key != "DYLD_LIBRARY_PATH")
|
|
398
|
+
|
|
399
|
+
# First convert all values to list, so that we can use .setdefault().insert() below.
|
|
400
|
+
prepend_path = {key: asList(value)
|
|
401
|
+
for key, value in spec.get("prepend_path", {}).items()}
|
|
402
|
+
# By default we add the .../bin directory to PATH and .../lib to LD_LIBRARY_PATH.
|
|
403
|
+
# Prepend to these paths, so that our packages win against system ones.
|
|
404
|
+
for key, value in (("PATH", "bin"), ("LD_LIBRARY_PATH", "lib")):
|
|
405
|
+
prepend_path.setdefault(key, []).insert(0, "${}_ROOT/{}".format(bigpackage, value))
|
|
406
|
+
lines.extend('export {key}="{value}${{{key}+:${key}}}"'
|
|
407
|
+
.format(key=key, value=":".join(value))
|
|
408
|
+
for key, value in prepend_path.items()
|
|
409
|
+
if key != "DYLD_LIBRARY_PATH")
|
|
410
|
+
|
|
411
|
+
# Return string without a trailing newline, since we expect call sites to
|
|
412
|
+
# append that (and the obvious way to inesrt it into the build template is by
|
|
413
|
+
# putting the "%(initdotsh_*)s" on its own line, which has the same effect).
|
|
414
|
+
return "\n".join(lines)
|
|
415
|
+
|
|
416
|
+
|
|
417
|
+
def create_provenance_info(package, specs, args):
|
|
418
|
+
"""Return a metadata record for storage in the package's install directory."""
|
|
419
|
+
|
|
420
|
+
def spec_info(spec):
|
|
421
|
+
return {
|
|
422
|
+
"name": spec["package"],
|
|
423
|
+
"tag": spec.get("tag"),
|
|
424
|
+
"source": spec.get("source"),
|
|
425
|
+
"version": spec["version"],
|
|
426
|
+
"revision": spec["revision"],
|
|
427
|
+
"hash": spec["hash"],
|
|
428
|
+
}
|
|
429
|
+
|
|
430
|
+
def dependency_list(key):
|
|
431
|
+
return [spec_info(specs[dep]) for dep in specs[package].get(key, ())]
|
|
432
|
+
|
|
433
|
+
return json.dumps({
|
|
434
|
+
"comment": args.annotate.get(package),
|
|
435
|
+
"alibuild_version": __version__,
|
|
436
|
+
"alidist": {
|
|
437
|
+
"commit": os.environ["ALIBUILD_ALIDIST_HASH"],
|
|
438
|
+
},
|
|
439
|
+
"architecture": args.architecture,
|
|
440
|
+
"defaults": args.defaults,
|
|
441
|
+
"package": spec_info(specs[package]),
|
|
442
|
+
"dependencies": {
|
|
443
|
+
"direct": {
|
|
444
|
+
"build": dependency_list("build_requires"),
|
|
445
|
+
"runtime": dependency_list("runtime_requires"),
|
|
446
|
+
},
|
|
447
|
+
"recursive": { # includes direct deps and deps' deps
|
|
448
|
+
"build": dependency_list("full_build_requires"),
|
|
449
|
+
"runtime": dependency_list("full_runtime_requires"),
|
|
450
|
+
},
|
|
451
|
+
},
|
|
452
|
+
})
|
|
453
|
+
|
|
454
|
+
|
|
455
|
+
def doBuild(args, parser):
|
|
456
|
+
syncHelper = remote_from_url(args.remoteStore, args.writeStore, args.architecture,
|
|
457
|
+
args.workDir, getattr(args, "insecure", False))
|
|
458
|
+
|
|
459
|
+
packages = args.pkgname
|
|
460
|
+
specs = {}
|
|
461
|
+
buildOrder = []
|
|
462
|
+
workDir = abspath(args.workDir)
|
|
463
|
+
prunePaths(workDir)
|
|
464
|
+
|
|
465
|
+
dieOnError(not exists(args.configDir),
|
|
466
|
+
'Cannot find alidist recipes under directory "%s".\n'
|
|
467
|
+
'Maybe you need to "cd" to the right directory or '
|
|
468
|
+
'you forgot to run "aliBuild init"?' % args.configDir)
|
|
469
|
+
|
|
470
|
+
_, value = git(("symbolic-ref", "-q", "HEAD"), directory=args.configDir, check=False)
|
|
471
|
+
branch_basename = re.sub("refs/heads/", "", value)
|
|
472
|
+
branch_stream = re.sub("-patches$", "", branch_basename)
|
|
473
|
+
# In case the basename and the stream are the same,
|
|
474
|
+
# the stream becomes empty.
|
|
475
|
+
if branch_stream == branch_basename:
|
|
476
|
+
branch_stream = ""
|
|
477
|
+
|
|
478
|
+
defaultsReader = lambda : readDefaults(args.configDir, args.defaults, parser.error, args.architecture)
|
|
479
|
+
(err, overrides, taps) = parseDefaults(args.disable,
|
|
480
|
+
defaultsReader, debug)
|
|
481
|
+
dieOnError(err, err)
|
|
482
|
+
|
|
483
|
+
makedirs(join(workDir, "SPECS"), exist_ok=True)
|
|
484
|
+
|
|
485
|
+
# If the alidist workdir contains a .sl directory, we use Sapling as SCM.
|
|
486
|
+
# Otherwise, we default to git (without checking for the actual presence of
|
|
487
|
+
# .git). We mustn't check for a .git directory, because some tests use a
|
|
488
|
+
# subdirectory of the alibuild source tree as the "alidist" checkout, and
|
|
489
|
+
# that won't have a .git directory.
|
|
490
|
+
scm = exists("%s/.sl" % args.configDir) and Sapling() or Git()
|
|
491
|
+
try:
|
|
492
|
+
checkedOutCommitName = scm.checkedOutCommitName(directory=args.configDir)
|
|
493
|
+
except SCMError:
|
|
494
|
+
dieOnError(True, "Cannot find SCM directory in %s." % args.configDir)
|
|
495
|
+
os.environ["ALIBUILD_ALIDIST_HASH"] = checkedOutCommitName
|
|
496
|
+
|
|
497
|
+
debug("Building for architecture %s", args.architecture)
|
|
498
|
+
debug("Number of parallel builds: %d", args.jobs)
|
|
499
|
+
debug("Using aliBuild from alibuild@%s recipes in alidist@%s",
|
|
500
|
+
__version__ or "unknown", os.environ["ALIBUILD_ALIDIST_HASH"])
|
|
501
|
+
|
|
502
|
+
install_wrapper_script("git", workDir)
|
|
503
|
+
|
|
504
|
+
with DockerRunner(args.dockerImage, args.docker_extra_args, extra_env={"ALIBUILD_CONFIG_DIR": "/alidist" if args.docker else os.path.abspath(args.configDir)}, extra_volumes=[f"{os.path.abspath(args.configDir)}:/alidist:ro"] if args.docker else []) as getstatusoutput_docker:
|
|
505
|
+
def performPreferCheckWithTempDir(pkg, cmd):
|
|
506
|
+
with tempfile.TemporaryDirectory(prefix=f"alibuild_prefer_check_{pkg['package']}_") as temp_dir:
|
|
507
|
+
return getstatusoutput_docker(cmd, cwd=temp_dir)
|
|
508
|
+
|
|
509
|
+
systemPackages, ownPackages, failed, validDefaults = \
|
|
510
|
+
getPackageList(packages = packages,
|
|
511
|
+
specs = specs,
|
|
512
|
+
configDir = args.configDir,
|
|
513
|
+
preferSystem = args.preferSystem,
|
|
514
|
+
noSystem = args.noSystem,
|
|
515
|
+
architecture = args.architecture,
|
|
516
|
+
disable = args.disable,
|
|
517
|
+
force_rebuild = args.force_rebuild,
|
|
518
|
+
defaults = args.defaults,
|
|
519
|
+
performPreferCheck = performPreferCheckWithTempDir,
|
|
520
|
+
performRequirementCheck = performPreferCheckWithTempDir,
|
|
521
|
+
performValidateDefaults = lambda spec: validateDefaults(spec, args.defaults),
|
|
522
|
+
overrides = overrides,
|
|
523
|
+
taps = taps,
|
|
524
|
+
log = debug)
|
|
525
|
+
|
|
526
|
+
dieOnError(validDefaults and args.defaults not in validDefaults,
|
|
527
|
+
"Specified default `%s' is not compatible with the packages you want to build.\n"
|
|
528
|
+
"Valid defaults:\n\n- %s" % (args.defaults, "\n- ".join(sorted(validDefaults or []))))
|
|
529
|
+
dieOnError(failed,
|
|
530
|
+
"The following packages are system requirements and could not be found:\n\n- %s\n\n"
|
|
531
|
+
"Please run:\n\n\taliDoctor --defaults %s %s\n\nto get a full diagnosis." %
|
|
532
|
+
("\n- ".join(sorted(failed)), args.defaults, " ".join(args.pkgname)))
|
|
533
|
+
|
|
534
|
+
for x in specs.values():
|
|
535
|
+
x["requires"] = [r for r in x["requires"] if r not in args.disable]
|
|
536
|
+
x["build_requires"] = [r for r in x["build_requires"] if r not in args.disable]
|
|
537
|
+
x["runtime_requires"] = [r for r in x["runtime_requires"] if r not in args.disable]
|
|
538
|
+
|
|
539
|
+
if systemPackages:
|
|
540
|
+
banner("aliBuild can take the following packages from the system and will not build them:\n %s",
|
|
541
|
+
", ".join(systemPackages))
|
|
542
|
+
if ownPackages:
|
|
543
|
+
banner("The following packages cannot be taken from the system and will be built:\n %s",
|
|
544
|
+
", ".join(ownPackages))
|
|
545
|
+
|
|
546
|
+
buildOrder = list(topological_sort(specs))
|
|
547
|
+
|
|
548
|
+
# Check if any of the packages can be picked up from a local checkout
|
|
549
|
+
if args.forceTracked:
|
|
550
|
+
develPkgs = set()
|
|
551
|
+
else:
|
|
552
|
+
develCandidates = {basename(d) for d in glob("*") if os.path.isdir(d)} - frozenset(args.noDevel)
|
|
553
|
+
develCandidatesUpper = {d.upper() for d in develCandidates}
|
|
554
|
+
develPkgs = frozenset(buildOrder) & develCandidates
|
|
555
|
+
develPkgsUpper = {p for p in buildOrder if p.upper() in develCandidatesUpper}
|
|
556
|
+
dieOnError(develPkgs != develPkgsUpper,
|
|
557
|
+
"The following development packages have the wrong spelling: %s.\n"
|
|
558
|
+
"Please check your local checkout and adapt to the correct one indicated." %
|
|
559
|
+
", ".join(develPkgsUpper - develPkgs))
|
|
560
|
+
del develCandidates, develCandidatesUpper, develPkgsUpper
|
|
561
|
+
|
|
562
|
+
if buildOrder:
|
|
563
|
+
if args.onlyDeps:
|
|
564
|
+
builtPackages = buildOrder[:-1]
|
|
565
|
+
else:
|
|
566
|
+
builtPackages = buildOrder
|
|
567
|
+
if len(builtPackages) > 1:
|
|
568
|
+
banner("Packages will be built in the following order:\n - %s",
|
|
569
|
+
"\n - ".join(x+" (development package)" if x in develPkgs else "%s@%s" % (x, specs[x]["tag"])
|
|
570
|
+
for x in builtPackages if x != "defaults-release"))
|
|
571
|
+
else:
|
|
572
|
+
banner("No dependencies of package %s to build.", buildOrder[-1])
|
|
573
|
+
|
|
574
|
+
|
|
575
|
+
if develPkgs:
|
|
576
|
+
banner("You have packages in development mode (%s).\n"
|
|
577
|
+
"This means their source code can be freely modified under:\n\n"
|
|
578
|
+
" %s/<package_name>\n\n"
|
|
579
|
+
"aliBuild does not automatically update such packages to avoid work loss.\n"
|
|
580
|
+
"In most cases this is achieved by doing in the package source directory:\n\n"
|
|
581
|
+
" git pull --rebase\n",
|
|
582
|
+
", ".join(develPkgs),
|
|
583
|
+
os.getcwd())
|
|
584
|
+
|
|
585
|
+
for pkg, spec in specs.items():
|
|
586
|
+
spec["is_devel_pkg"] = pkg in develPkgs
|
|
587
|
+
spec["scm"] = Git()
|
|
588
|
+
if spec["is_devel_pkg"]:
|
|
589
|
+
spec["source"] = os.path.join(os.getcwd(), pkg)
|
|
590
|
+
if "source" in spec and exists(os.path.join(spec["source"], ".sl")):
|
|
591
|
+
spec["scm"] = Sapling()
|
|
592
|
+
reference_repo = join(os.path.abspath(args.referenceSources), pkg.lower())
|
|
593
|
+
if exists(reference_repo):
|
|
594
|
+
spec["reference"] = reference_repo
|
|
595
|
+
del develPkgs
|
|
596
|
+
|
|
597
|
+
# Clone/update repos
|
|
598
|
+
update_git_repos(args, specs, buildOrder)
|
|
599
|
+
# This is the list of packages which have untracked files in their
|
|
600
|
+
# source directory, and which are rebuilt every time. We will warn
|
|
601
|
+
# about them at the end of the build.
|
|
602
|
+
untrackedFilesDirectories = []
|
|
603
|
+
|
|
604
|
+
# Resolve the tag to the actual commit ref
|
|
605
|
+
for p in buildOrder:
|
|
606
|
+
spec = specs[p]
|
|
607
|
+
spec["commit_hash"] = "0"
|
|
608
|
+
develPackageBranch = ""
|
|
609
|
+
# This is a development package (i.e. a local directory named like
|
|
610
|
+
# spec["package"]), but there is no "source" key in its alidist recipe,
|
|
611
|
+
# so there shouldn't be any code for it! Presumably, a user has
|
|
612
|
+
# mistakenly named a local directory after one of our packages.
|
|
613
|
+
dieOnError("source" not in spec and spec["is_devel_pkg"],
|
|
614
|
+
"Found a directory called {package} here, but we're not "
|
|
615
|
+
"expecting any code for the package {package}. If this is a "
|
|
616
|
+
"mistake, please rename the {package} directory or use the "
|
|
617
|
+
"'--no-local {package}' option. If aliBuild should pick up "
|
|
618
|
+
"source code from this directory, add a 'source:' key to "
|
|
619
|
+
"alidist/{recipe}.sh instead."
|
|
620
|
+
.format(package=p, recipe=p.lower()))
|
|
621
|
+
if "source" in spec:
|
|
622
|
+
# Tag may contain date params like %(year)s, %(month)s, %(day)s, %(hour).
|
|
623
|
+
spec["tag"] = resolve_tag(spec)
|
|
624
|
+
# First, we try to resolve the "tag" as a branch name, and use its tip as
|
|
625
|
+
# the commit_hash. If it's not a branch, it must be a tag or a raw commit
|
|
626
|
+
# hash, so we use it directly. Finally if the package is a development
|
|
627
|
+
# one, we use the name of the branch as commit_hash.
|
|
628
|
+
assert "scm_refs" in spec
|
|
629
|
+
try:
|
|
630
|
+
spec["commit_hash"] = spec["scm_refs"]["refs/heads/" + spec["tag"]]
|
|
631
|
+
except KeyError:
|
|
632
|
+
spec["commit_hash"] = spec["tag"]
|
|
633
|
+
# We are in development mode, we need to rebuild if the commit hash is
|
|
634
|
+
# different or if there are extra changes on top.
|
|
635
|
+
if spec["is_devel_pkg"]:
|
|
636
|
+
# Devel package: we get the commit hash from the checked source, not from remote.
|
|
637
|
+
out = spec["scm"].checkedOutCommitName(directory=spec["source"])
|
|
638
|
+
spec["commit_hash"] = out.strip()
|
|
639
|
+
local_hash, untracked = hash_local_changes(spec)
|
|
640
|
+
untrackedFilesDirectories.extend(untracked)
|
|
641
|
+
spec["devel_hash"] = spec["commit_hash"] + local_hash
|
|
642
|
+
out = spec["scm"].branchOrRef(directory=spec["source"])
|
|
643
|
+
develPackageBranch = out.replace("/", "-")
|
|
644
|
+
spec["tag"] = args.develPrefix if "develPrefix" in args else develPackageBranch
|
|
645
|
+
spec["commit_hash"] = "0"
|
|
646
|
+
|
|
647
|
+
# Version may contain date params like tag, plus %(commit_hash)s,
|
|
648
|
+
# %(short_hash)s and %(tag)s.
|
|
649
|
+
spec["version"] = resolve_version(spec, args.defaults, branch_basename, branch_stream)
|
|
650
|
+
|
|
651
|
+
if spec["is_devel_pkg"] and "develPrefix" in args and args.develPrefix != "ali-master":
|
|
652
|
+
spec["version"] = args.develPrefix
|
|
653
|
+
|
|
654
|
+
# Decide what is the main package we are building and at what commit.
|
|
655
|
+
#
|
|
656
|
+
# We emit an event for the main package, when encountered, so that we can use
|
|
657
|
+
# it to index builds of the same hash on different architectures. We also
|
|
658
|
+
# make sure add the main package and it's hash to the debug log, so that we
|
|
659
|
+
# can always extract it from it.
|
|
660
|
+
# If one of the special packages is in the list of packages to be built,
|
|
661
|
+
# we use it as main package, rather than the last one.
|
|
662
|
+
if not buildOrder:
|
|
663
|
+
banner("Nothing to be done.")
|
|
664
|
+
return
|
|
665
|
+
mainPackage = buildOrder[-1]
|
|
666
|
+
mainHash = specs[mainPackage]["commit_hash"]
|
|
667
|
+
|
|
668
|
+
debug("Main package is %s@%s", mainPackage, mainHash)
|
|
669
|
+
log_current_package(None, mainPackage, specs, getattr(args, "develPrefix", None))
|
|
670
|
+
|
|
671
|
+
# Now that we have the main package set, we can print out Useful information
|
|
672
|
+
# which we will be able to associate with this build. Also lets make sure each package
|
|
673
|
+
# we need to build can be built with the current default.
|
|
674
|
+
for p in buildOrder:
|
|
675
|
+
spec = specs[p]
|
|
676
|
+
if "source" in spec:
|
|
677
|
+
debug("Commit hash for %s@%s is %s", spec["source"], spec["tag"], spec["commit_hash"])
|
|
678
|
+
|
|
679
|
+
# We recursively calculate the full set of requires "full_requires"
|
|
680
|
+
# including build_requires and the subset of them which are needed at
|
|
681
|
+
# runtime "full_runtime_requires". Do this in build order, so that we can
|
|
682
|
+
# rely on each spec's dependencies already having their full_*_requires
|
|
683
|
+
# properties populated.
|
|
684
|
+
for p in buildOrder:
|
|
685
|
+
spec = specs[p]
|
|
686
|
+
for key in ("requires", "runtime_requires", "build_requires"):
|
|
687
|
+
full_key = "full_" + key
|
|
688
|
+
spec[full_key] = set()
|
|
689
|
+
for dep in spec.get(key, ()):
|
|
690
|
+
spec[full_key].add(dep)
|
|
691
|
+
# Runtime deps of build deps should count as build deps.
|
|
692
|
+
spec[full_key] |= specs[dep]["full_requires" if key == "build_requires" else full_key]
|
|
693
|
+
# Propagate build deps of runtime deps, so that they are not added into
|
|
694
|
+
# the generated modulefile by alibuild-generate-module.
|
|
695
|
+
for dep in spec["runtime_requires"]:
|
|
696
|
+
spec["full_build_requires"] |= specs[dep]["full_build_requires"]
|
|
697
|
+
# If something requires or runtime_requires a package, then it's not a
|
|
698
|
+
# pure build_requires only anymore, so we drop it from the list.
|
|
699
|
+
spec["full_build_requires"] -= spec["full_runtime_requires"]
|
|
700
|
+
|
|
701
|
+
# Use the selected plugin to build, instead of the default behaviour, if a
|
|
702
|
+
# plugin was selected.
|
|
703
|
+
if args.plugin != "legacy":
|
|
704
|
+
return importlib.import_module("alibuild_helpers.%s_plugin" % args.plugin) \
|
|
705
|
+
.build_plugin(specs, args, buildOrder)
|
|
706
|
+
|
|
707
|
+
debug("We will build packages in the following order: %s", " ".join(buildOrder))
|
|
708
|
+
if args.dryRun:
|
|
709
|
+
info("--dry-run / -n specified. Not building.")
|
|
710
|
+
return
|
|
711
|
+
|
|
712
|
+
# We now iterate on all the packages, making sure we build correctly every
|
|
713
|
+
# single one of them. This is done this way so that the second time we run we
|
|
714
|
+
# can check if the build was consistent and if it is, we bail out.
|
|
715
|
+
report_event("install", "{p} disabled={dis} devel={dev} system={sys} own={own} deps={deps}".format(
|
|
716
|
+
p=args.pkgname,
|
|
717
|
+
dis=",".join(sorted(args.disable)),
|
|
718
|
+
dev=",".join(sorted(spec["package"] for spec in specs.values() if spec["is_devel_pkg"])),
|
|
719
|
+
sys=",".join(sorted(systemPackages)),
|
|
720
|
+
own=",".join(sorted(ownPackages)),
|
|
721
|
+
deps=",".join(buildOrder[:-1]),
|
|
722
|
+
), args.architecture)
|
|
723
|
+
|
|
724
|
+
# If we are building only the dependencies, the last package in
|
|
725
|
+
# the build order can be considered done.
|
|
726
|
+
if args.onlyDeps and len(buildOrder) > 1:
|
|
727
|
+
mainPackage = buildOrder.pop()
|
|
728
|
+
warning("Not rebuilding %s because --only-deps option provided.", mainPackage)
|
|
729
|
+
|
|
730
|
+
while buildOrder:
|
|
731
|
+
p = buildOrder[0]
|
|
732
|
+
spec = specs[p]
|
|
733
|
+
log_current_package(p, mainPackage, specs, getattr(args, "develPrefix", None))
|
|
734
|
+
|
|
735
|
+
# Calculate the hashes. We do this in build order so that we can guarantee
|
|
736
|
+
# that the hashes of the dependencies are calculated first. Do this inside
|
|
737
|
+
# the main build loop to make sure that our dependencies have been assigned
|
|
738
|
+
# a single, definitive hash.
|
|
739
|
+
debug("Calculating hash.")
|
|
740
|
+
debug("spec = %r", spec)
|
|
741
|
+
debug("develPkgs = %r", sorted(spec["package"] for spec in specs.values() if spec["is_devel_pkg"]))
|
|
742
|
+
storeHashes(p, specs, considerRelocation=args.architecture.startswith("osx"))
|
|
743
|
+
debug("Hashes for recipe %s are %s (remote); %s (local)", p,
|
|
744
|
+
", ".join(spec["remote_hashes"]), ", ".join(spec["local_hashes"]))
|
|
745
|
+
|
|
746
|
+
if spec["is_devel_pkg"] and getattr(syncHelper, "writeStore", None):
|
|
747
|
+
warning("Disabling remote write store from now since %s is a development package.", spec["package"])
|
|
748
|
+
syncHelper.writeStore = ""
|
|
749
|
+
|
|
750
|
+
# Since we can execute this multiple times for a given package, in order to
|
|
751
|
+
# ensure consistency, we need to reset things and make them pristine.
|
|
752
|
+
spec.pop("revision", None)
|
|
753
|
+
|
|
754
|
+
debug("Updating from tarballs")
|
|
755
|
+
# If we arrived here it really means we have a tarball which was created
|
|
756
|
+
# using the same recipe. We will use it as a cache for the build. This means
|
|
757
|
+
# that while we will still perform the build process, rather than
|
|
758
|
+
# executing the build itself we will:
|
|
759
|
+
#
|
|
760
|
+
# - Unpack it in a temporary place.
|
|
761
|
+
# - Invoke the relocation specifying the correct work_dir and the
|
|
762
|
+
# correct path which should have been used.
|
|
763
|
+
# - Move the version directory to its final destination, including the
|
|
764
|
+
# correct revision.
|
|
765
|
+
# - Repack it and put it in the store with the
|
|
766
|
+
#
|
|
767
|
+
# this will result in a new package which has the same binary contents of
|
|
768
|
+
# the old one but where the relocation will work for the new dictory. Here
|
|
769
|
+
# we simply store the fact that we can reuse the contents of cachedTarball.
|
|
770
|
+
syncHelper.fetch_symlinks(spec)
|
|
771
|
+
|
|
772
|
+
# Decide how it should be called, based on the hash and what is already
|
|
773
|
+
# available.
|
|
774
|
+
debug("Checking for packages already built.")
|
|
775
|
+
|
|
776
|
+
# Make sure this regex broadly matches the regex below that parses the
|
|
777
|
+
# symlink's target. Overly-broadly matching the version, for example, can
|
|
778
|
+
# lead to false positives that trigger a warning below.
|
|
779
|
+
links_regex = re.compile(r"{package}-{version}-(?:local)?[0-9]+\.{arch}\.tar\.gz".format(
|
|
780
|
+
package=re.escape(spec["package"]),
|
|
781
|
+
version=re.escape(spec["version"]),
|
|
782
|
+
arch=re.escape(args.architecture),
|
|
783
|
+
))
|
|
784
|
+
symlink_dir = join(workDir, "TARS", args.architecture, spec["package"])
|
|
785
|
+
try:
|
|
786
|
+
packages = [join(symlink_dir, symlink_path)
|
|
787
|
+
for symlink_path in os.listdir(symlink_dir)
|
|
788
|
+
if links_regex.fullmatch(symlink_path)]
|
|
789
|
+
except OSError:
|
|
790
|
+
# If symlink_dir does not exist or cannot be accessed, return an empty
|
|
791
|
+
# list of packages.
|
|
792
|
+
packages = []
|
|
793
|
+
del links_regex, symlink_dir
|
|
794
|
+
|
|
795
|
+
# In case there is no installed software, revision is 1
|
|
796
|
+
# If there is already an installed package:
|
|
797
|
+
# - Remove it if we do not know its hash
|
|
798
|
+
# - Use the latest number in the version, to decide its revision
|
|
799
|
+
debug("Packages already built using this version\n%s", "\n".join(packages))
|
|
800
|
+
|
|
801
|
+
# Calculate the build_family for the package
|
|
802
|
+
#
|
|
803
|
+
# If the package is a devel package, we need to associate it a devel
|
|
804
|
+
# prefix, either via the -z option or using its checked out branch. This
|
|
805
|
+
# affects its build hash.
|
|
806
|
+
#
|
|
807
|
+
# Moreover we need to define a global "buildFamily" which is used
|
|
808
|
+
# to tag all the packages incurred in the build, this way we can have
|
|
809
|
+
# a latest-<buildFamily> link for all of them an we will not incur in the
|
|
810
|
+
# flip - flopping described in https://github.com/alisw/alibuild/issues/325.
|
|
811
|
+
develPrefix = ""
|
|
812
|
+
possibleDevelPrefix = getattr(args, "develPrefix", develPackageBranch)
|
|
813
|
+
if spec["is_devel_pkg"]:
|
|
814
|
+
develPrefix = possibleDevelPrefix
|
|
815
|
+
|
|
816
|
+
if possibleDevelPrefix:
|
|
817
|
+
spec["build_family"] = "%s-%s" % (possibleDevelPrefix, args.defaults)
|
|
818
|
+
else:
|
|
819
|
+
spec["build_family"] = args.defaults
|
|
820
|
+
if spec["package"] == mainPackage:
|
|
821
|
+
mainBuildFamily = spec["build_family"]
|
|
822
|
+
|
|
823
|
+
candidate = None
|
|
824
|
+
busyRevisions = set()
|
|
825
|
+
# We can tell that the remote store is read-only if it has an empty or
|
|
826
|
+
# no writeStore property. See below for explanation of why we need this.
|
|
827
|
+
revisionPrefix = "" if getattr(syncHelper, "writeStore", "") else "local"
|
|
828
|
+
for symlink_path in packages:
|
|
829
|
+
realPath = readlink(symlink_path)
|
|
830
|
+
matcher = "../../{arch}/store/[0-9a-f]{{2}}/([0-9a-f]+)/{package}-{version}-((?:local)?[0-9]+).{arch}.tar.gz$" \
|
|
831
|
+
.format(arch=args.architecture, **spec)
|
|
832
|
+
match = re.match(matcher, realPath)
|
|
833
|
+
if not match:
|
|
834
|
+
warning("Symlink %s -> %s couldn't be parsed", symlink_path, realPath)
|
|
835
|
+
continue
|
|
836
|
+
rev_hash, revision = match.groups()
|
|
837
|
+
|
|
838
|
+
if not (("local" in revision and rev_hash in spec["local_hashes"]) or
|
|
839
|
+
("local" not in revision and rev_hash in spec["remote_hashes"])):
|
|
840
|
+
# This tarball's hash doesn't match what we need. Remember that its
|
|
841
|
+
# revision number is taken, in case we assign our own later.
|
|
842
|
+
if revision.startswith(revisionPrefix) and revision[len(revisionPrefix):].isdigit():
|
|
843
|
+
# Strip revisionPrefix; the rest is an integer. Convert it to an int
|
|
844
|
+
# so we can get a sensible max() existing revision below.
|
|
845
|
+
busyRevisions.add(int(revision[len(revisionPrefix):]))
|
|
846
|
+
continue
|
|
847
|
+
|
|
848
|
+
# Don't re-use local revisions when we have a read-write store, so that
|
|
849
|
+
# packages we'll upload later don't depend on local revisions.
|
|
850
|
+
if getattr(syncHelper, "writeStore", False) and "local" in revision:
|
|
851
|
+
debug("Skipping revision %s because we want to upload later", revision)
|
|
852
|
+
continue
|
|
853
|
+
|
|
854
|
+
# If we have an hash match, we use the old revision for the package
|
|
855
|
+
# and we do not need to build it. Because we prefer reusing remote
|
|
856
|
+
# revisions, only store a local revision if there is no other candidate
|
|
857
|
+
# for reuse yet.
|
|
858
|
+
candidate = better_tarball(spec, candidate, (revision, rev_hash, symlink_path))
|
|
859
|
+
|
|
860
|
+
try:
|
|
861
|
+
revision, rev_hash, symlink_path = candidate
|
|
862
|
+
except TypeError: # raised if candidate is still None
|
|
863
|
+
# If we can't reuse an existing revision, assign the next free revision
|
|
864
|
+
# to this package. If we're not uploading it, name it localN to avoid
|
|
865
|
+
# interference with the remote store -- in case this package is built
|
|
866
|
+
# somewhere else, the next revision N might be assigned there, and would
|
|
867
|
+
# conflict with our revision N.
|
|
868
|
+
# The code finding busyRevisions above already ensures that revision
|
|
869
|
+
# numbers start with revisionPrefix, and has left us plain ints.
|
|
870
|
+
spec["revision"] = revisionPrefix + str(
|
|
871
|
+
min(set(range(1, max(busyRevisions) + 2)) - busyRevisions)
|
|
872
|
+
if busyRevisions else 1)
|
|
873
|
+
else:
|
|
874
|
+
spec["revision"] = revision
|
|
875
|
+
# Remember what hash we're actually using.
|
|
876
|
+
spec["local_revision_hash" if revision.startswith("local")
|
|
877
|
+
else "remote_revision_hash"] = rev_hash
|
|
878
|
+
if spec["is_devel_pkg"] and "incremental_recipe" in spec:
|
|
879
|
+
spec["obsolete_tarball"] = symlink_path
|
|
880
|
+
else:
|
|
881
|
+
debug("Package %s with hash %s is already found in %s. Not building.",
|
|
882
|
+
p, rev_hash, symlink_path)
|
|
883
|
+
# Ignore errors here, because the path we're linking to might not
|
|
884
|
+
# exist (if this is the first run through the loop). On the second run
|
|
885
|
+
# through, the path should have been created by the build process.
|
|
886
|
+
call_ignoring_oserrors(symlink, "{version}-{revision}".format(**spec),
|
|
887
|
+
"{wd}/{arch}/{package}/latest-{build_family}".format(wd=workDir, arch=args.architecture, **spec))
|
|
888
|
+
call_ignoring_oserrors(symlink, "{version}-{revision}".format(**spec),
|
|
889
|
+
"{wd}/{arch}/{package}/latest".format(wd=workDir, arch=args.architecture, **spec))
|
|
890
|
+
|
|
891
|
+
# Now we know whether we're using a local or remote package, so we can set
|
|
892
|
+
# the proper hash and tarball directory.
|
|
893
|
+
if spec["revision"].startswith("local"):
|
|
894
|
+
spec["hash"] = spec["local_revision_hash"]
|
|
895
|
+
else:
|
|
896
|
+
spec["hash"] = spec["remote_revision_hash"]
|
|
897
|
+
|
|
898
|
+
# We do not use the override for devel packages, because we
|
|
899
|
+
# want to avoid having to rebuild things when the /tmp gets cleaned.
|
|
900
|
+
if spec["is_devel_pkg"]:
|
|
901
|
+
buildWorkDir = args.workDir
|
|
902
|
+
else:
|
|
903
|
+
buildWorkDir = os.environ.get("ALIBUILD_BUILD_WORK_DIR", args.workDir)
|
|
904
|
+
|
|
905
|
+
buildRoot = join(buildWorkDir, "BUILD", spec["hash"])
|
|
906
|
+
|
|
907
|
+
spec["old_devel_hash"] = readHashFile(join(
|
|
908
|
+
buildRoot, spec["package"], ".build_succeeded"))
|
|
909
|
+
|
|
910
|
+
# Recreate symlinks to this development package builds.
|
|
911
|
+
if spec["is_devel_pkg"]:
|
|
912
|
+
debug("Creating symlinks to builds of devel package %s", spec["package"])
|
|
913
|
+
# Ignore errors here, because the path we're linking to might not exist
|
|
914
|
+
# (if this is the first run through the loop). On the second run
|
|
915
|
+
# through, the path should have been created by the build process.
|
|
916
|
+
call_ignoring_oserrors(symlink, spec["hash"], join(buildWorkDir, "BUILD", spec["package"] + "-latest"))
|
|
917
|
+
if develPrefix:
|
|
918
|
+
call_ignoring_oserrors(symlink, spec["hash"], join(buildWorkDir, "BUILD", spec["package"] + "-latest-" + develPrefix))
|
|
919
|
+
# Last package built gets a "latest" mark.
|
|
920
|
+
call_ignoring_oserrors(symlink, "{version}-{revision}".format(**spec),
|
|
921
|
+
join(workDir, args.architecture, spec["package"], "latest"))
|
|
922
|
+
# Latest package built for a given devel prefix gets a "latest-<family>" mark.
|
|
923
|
+
if spec["build_family"]:
|
|
924
|
+
call_ignoring_oserrors(symlink, "{version}-{revision}".format(**spec),
|
|
925
|
+
join(workDir, args.architecture, spec["package"], "latest-" + spec["build_family"]))
|
|
926
|
+
|
|
927
|
+
# Check if this development package needs to be rebuilt.
|
|
928
|
+
if spec["is_devel_pkg"]:
|
|
929
|
+
debug("Checking if devel package %s needs rebuild", spec["package"])
|
|
930
|
+
if spec["devel_hash"]+spec["deps_hash"] == spec["old_devel_hash"]:
|
|
931
|
+
info("Development package %s does not need rebuild", spec["package"])
|
|
932
|
+
buildOrder.pop(0)
|
|
933
|
+
continue
|
|
934
|
+
|
|
935
|
+
# Now that we have all the information about the package we want to build, let's
|
|
936
|
+
# check if it wasn't built / unpacked already.
|
|
937
|
+
hashPath= "%s/%s/%s/%s-%s" % (workDir,
|
|
938
|
+
args.architecture,
|
|
939
|
+
spec["package"],
|
|
940
|
+
spec["version"],
|
|
941
|
+
spec["revision"])
|
|
942
|
+
hashFile = hashPath + "/.build-hash"
|
|
943
|
+
# If the folder is a symlink, we consider it to be to CVMFS and
|
|
944
|
+
# take the hash for good.
|
|
945
|
+
if os.path.islink(hashPath):
|
|
946
|
+
fileHash = spec["hash"]
|
|
947
|
+
else:
|
|
948
|
+
fileHash = readHashFile(hashFile)
|
|
949
|
+
# Development packages have their own rebuild-detection logic above.
|
|
950
|
+
# spec["hash"] is only useful here for regular packages.
|
|
951
|
+
if fileHash == spec["hash"] and not spec["is_devel_pkg"]:
|
|
952
|
+
# If we get here, we know we are in sync with whatever remote store. We
|
|
953
|
+
# can therefore create a directory which contains all the packages which
|
|
954
|
+
# were used to compile this one.
|
|
955
|
+
debug("Package %s was correctly compiled. Moving to next one.", spec["package"])
|
|
956
|
+
# If using incremental builds, next time we execute the script we need to remove
|
|
957
|
+
# the placeholders which avoid rebuilds.
|
|
958
|
+
if spec["is_devel_pkg"] and "incremental_recipe" in spec:
|
|
959
|
+
unlink(hashFile)
|
|
960
|
+
if "obsolete_tarball" in spec:
|
|
961
|
+
unlink(realpath(spec["obsolete_tarball"]))
|
|
962
|
+
unlink(spec["obsolete_tarball"])
|
|
963
|
+
buildOrder.pop(0)
|
|
964
|
+
# We can now delete the INSTALLROOT and BUILD directories,
|
|
965
|
+
# assuming the package is not a development one. We also can
|
|
966
|
+
# delete the SOURCES in case we have aggressive-cleanup enabled.
|
|
967
|
+
if not spec["is_devel_pkg"] and args.autoCleanup:
|
|
968
|
+
cleanupDirs = [buildRoot,
|
|
969
|
+
join(workDir, "INSTALLROOT", spec["hash"])]
|
|
970
|
+
if args.aggressiveCleanup:
|
|
971
|
+
cleanupDirs.append(join(workDir, "SOURCES", spec["package"]))
|
|
972
|
+
debug("Cleaning up:\n%s", "\n".join(cleanupDirs))
|
|
973
|
+
|
|
974
|
+
for d in cleanupDirs:
|
|
975
|
+
shutil.rmtree(d.encode("utf8"), True)
|
|
976
|
+
try:
|
|
977
|
+
unlink(join(buildWorkDir, "BUILD", spec["package"] + "-latest"))
|
|
978
|
+
if "develPrefix" in args:
|
|
979
|
+
unlink(join(buildWorkDir, "BUILD", spec["package"] + "-latest-" + args.develPrefix))
|
|
980
|
+
except:
|
|
981
|
+
pass
|
|
982
|
+
try:
|
|
983
|
+
rmdir(join(buildWorkDir, "BUILD"))
|
|
984
|
+
rmdir(join(workDir, "INSTALLROOT"))
|
|
985
|
+
except:
|
|
986
|
+
pass
|
|
987
|
+
continue
|
|
988
|
+
|
|
989
|
+
if fileHash != "0":
|
|
990
|
+
debug("Mismatch between local area (%s) and the one which I should build (%s). Redoing.",
|
|
991
|
+
fileHash, spec["hash"])
|
|
992
|
+
# shutil.rmtree under Python 2 fails when hashFile is unicode and the
|
|
993
|
+
# directory contains files with non-ASCII names, e.g. Golang/Boost.
|
|
994
|
+
shutil.rmtree(dirname(hashFile).encode("utf-8"), True)
|
|
995
|
+
|
|
996
|
+
tar_hash_dir = os.path.join(workDir, resolve_store_path(args.architecture, spec["hash"]))
|
|
997
|
+
debug("Looking for cached tarball in %s", tar_hash_dir)
|
|
998
|
+
spec["cachedTarball"] = ""
|
|
999
|
+
if not spec["is_devel_pkg"]:
|
|
1000
|
+
syncHelper.fetch_tarball(spec)
|
|
1001
|
+
tarballs = glob(os.path.join(tar_hash_dir, "*gz"))
|
|
1002
|
+
spec["cachedTarball"] = tarballs[0] if len(tarballs) else ""
|
|
1003
|
+
debug("Found tarball in %s" % spec["cachedTarball"]
|
|
1004
|
+
if spec["cachedTarball"] else "No cache tarballs found")
|
|
1005
|
+
|
|
1006
|
+
# The actual build script.
|
|
1007
|
+
debug("spec = %r", spec)
|
|
1008
|
+
|
|
1009
|
+
cmd_raw = ""
|
|
1010
|
+
try:
|
|
1011
|
+
fp = open(dirname(realpath(__file__))+'/build_template.sh', 'r')
|
|
1012
|
+
cmd_raw = fp.read()
|
|
1013
|
+
fp.close()
|
|
1014
|
+
except:
|
|
1015
|
+
from pkg_resources import resource_string
|
|
1016
|
+
cmd_raw = resource_string("alibuild_helpers", 'build_template.sh')
|
|
1017
|
+
|
|
1018
|
+
if args.docker:
|
|
1019
|
+
cachedTarball = re.sub("^" + workDir, "/sw", spec["cachedTarball"])
|
|
1020
|
+
else:
|
|
1021
|
+
cachedTarball = spec["cachedTarball"]
|
|
1022
|
+
|
|
1023
|
+
if not cachedTarball:
|
|
1024
|
+
checkout_sources(spec, workDir, args.referenceSources, args.docker)
|
|
1025
|
+
|
|
1026
|
+
scriptDir = join(workDir, "SPECS", args.architecture, spec["package"],
|
|
1027
|
+
spec["version"] + "-" + spec["revision"])
|
|
1028
|
+
|
|
1029
|
+
makedirs(scriptDir, exist_ok=True)
|
|
1030
|
+
writeAll("%s/%s.sh" % (scriptDir, spec["package"]), spec["recipe"])
|
|
1031
|
+
writeAll("%s/build.sh" % scriptDir, cmd_raw % {
|
|
1032
|
+
"provenance": create_provenance_info(spec["package"], specs, args),
|
|
1033
|
+
"initdotsh_deps": generate_initdotsh(p, specs, args.architecture, post_build=False),
|
|
1034
|
+
"initdotsh_full": generate_initdotsh(p, specs, args.architecture, post_build=True),
|
|
1035
|
+
"develPrefix": develPrefix,
|
|
1036
|
+
"workDir": workDir,
|
|
1037
|
+
"configDir": abspath(args.configDir),
|
|
1038
|
+
"incremental_recipe": spec.get("incremental_recipe", ":"),
|
|
1039
|
+
"requires": " ".join(spec["requires"]),
|
|
1040
|
+
"build_requires": " ".join(spec["build_requires"]),
|
|
1041
|
+
"runtime_requires": " ".join(spec["runtime_requires"]),
|
|
1042
|
+
})
|
|
1043
|
+
|
|
1044
|
+
# Define the environment so that it can be passed up to the
|
|
1045
|
+
# actual build script
|
|
1046
|
+
buildEnvironment = [
|
|
1047
|
+
("ARCHITECTURE", args.architecture),
|
|
1048
|
+
("BUILD_REQUIRES", " ".join(spec["build_requires"])),
|
|
1049
|
+
("CACHED_TARBALL", cachedTarball),
|
|
1050
|
+
("CAN_DELETE", args.aggressiveCleanup and "1" or ""),
|
|
1051
|
+
("COMMIT_HASH", short_commit_hash(spec)),
|
|
1052
|
+
("DEPS_HASH", spec.get("deps_hash", "")),
|
|
1053
|
+
("DEVEL_HASH", spec.get("devel_hash", "")),
|
|
1054
|
+
("DEVEL_PREFIX", develPrefix),
|
|
1055
|
+
("BUILD_FAMILY", spec["build_family"]),
|
|
1056
|
+
("GIT_COMMITTER_NAME", "unknown"),
|
|
1057
|
+
("GIT_COMMITTER_EMAIL", "unknown"),
|
|
1058
|
+
("INCREMENTAL_BUILD_HASH", spec.get("incremental_hash", "0")),
|
|
1059
|
+
("JOBS", str(args.jobs)),
|
|
1060
|
+
("PKGHASH", spec["hash"]),
|
|
1061
|
+
("PKGNAME", spec["package"]),
|
|
1062
|
+
("PKGREVISION", spec["revision"]),
|
|
1063
|
+
("PKGVERSION", spec["version"]),
|
|
1064
|
+
("RELOCATE_PATHS", " ".join(spec.get("relocate_paths", []))),
|
|
1065
|
+
("REQUIRES", " ".join(spec["requires"])),
|
|
1066
|
+
("RUNTIME_REQUIRES", " ".join(spec["runtime_requires"])),
|
|
1067
|
+
("FULL_RUNTIME_REQUIRES", " ".join(spec["full_runtime_requires"])),
|
|
1068
|
+
("FULL_BUILD_REQUIRES", " ".join(spec["full_build_requires"])),
|
|
1069
|
+
("FULL_REQUIRES", " ".join(spec["full_requires"])),
|
|
1070
|
+
("ALIBUILD_PREFER_SYSTEM_KEY", spec.get("key", "")),
|
|
1071
|
+
]
|
|
1072
|
+
# Add the extra environment as passed from the command line.
|
|
1073
|
+
buildEnvironment += [e.partition('=')[::2] for e in args.environment]
|
|
1074
|
+
|
|
1075
|
+
# Add the computed track_env environment
|
|
1076
|
+
buildEnvironment += [(key, value) for key, value in spec.get("track_env", {}).items()]
|
|
1077
|
+
|
|
1078
|
+
# In case the --docker options is passed, we setup a docker container which
|
|
1079
|
+
# will perform the actual build. Otherwise build as usual using bash.
|
|
1080
|
+
if args.docker:
|
|
1081
|
+
build_command = (
|
|
1082
|
+
"docker run --rm --entrypoint= --user $(id -u):$(id -g) "
|
|
1083
|
+
"-v {workdir}:/sw -v{configDir}:/alidist:ro -v {scriptDir}/build.sh:/build.sh:ro "
|
|
1084
|
+
"{mirrorVolume} {develVolumes} {additionalEnv} {additionalVolumes} "
|
|
1085
|
+
"-e WORK_DIR_OVERRIDE=/sw -e ALIBUILD_CONFIG_DIR_OVERRIDE=/alidist {extraArgs} {image} bash -ex /build.sh"
|
|
1086
|
+
).format(
|
|
1087
|
+
image=quote(args.dockerImage),
|
|
1088
|
+
workdir=quote(abspath(args.workDir)),
|
|
1089
|
+
configDir=quote(abspath(args.configDir)),
|
|
1090
|
+
scriptDir=quote(scriptDir),
|
|
1091
|
+
extraArgs=" ".join(map(quote, args.docker_extra_args)),
|
|
1092
|
+
additionalEnv=" ".join(
|
|
1093
|
+
"-e {}={}".format(var, quote(value)) for var, value in buildEnvironment),
|
|
1094
|
+
# Used e.g. by O2DPG-sim-tests to find the O2DPG repository.
|
|
1095
|
+
develVolumes=" ".join(
|
|
1096
|
+
'-v "$PWD/$(readlink {pkg} || echo {pkg})":/{pkg}:rw'.format(pkg=quote(spec["package"]))
|
|
1097
|
+
for spec in specs.values() if spec["is_devel_pkg"]),
|
|
1098
|
+
additionalVolumes=" ".join(
|
|
1099
|
+
"-v %s" % quote(volume) for volume in args.volumes),
|
|
1100
|
+
mirrorVolume=("-v %s:/mirror" % quote(dirname(spec["reference"]))
|
|
1101
|
+
if "reference" in spec else ""),
|
|
1102
|
+
)
|
|
1103
|
+
else:
|
|
1104
|
+
os.environ.update(buildEnvironment)
|
|
1105
|
+
build_command = "%s -e -x %s/build.sh 2>&1" % (BASH, quote(scriptDir))
|
|
1106
|
+
|
|
1107
|
+
debug("Build command: %s", build_command)
|
|
1108
|
+
progress = ProgressPrint(
|
|
1109
|
+
("Unpacking %s@%s" if cachedTarball else
|
|
1110
|
+
"Compiling %s@%s (use --debug for full output)") %
|
|
1111
|
+
(spec["package"],
|
|
1112
|
+
args.develPrefix if "develPrefix" in args and spec["is_devel_pkg"] else spec["version"])
|
|
1113
|
+
)
|
|
1114
|
+
err = execute(build_command, printer=progress)
|
|
1115
|
+
progress.end("failed" if err else "done", err)
|
|
1116
|
+
report_event("BuildError" if err else "BuildSuccess", spec["package"], " ".join((
|
|
1117
|
+
args.architecture,
|
|
1118
|
+
spec["version"],
|
|
1119
|
+
spec["commit_hash"],
|
|
1120
|
+
os.environ["ALIBUILD_ALIDIST_HASH"][:10],
|
|
1121
|
+
)))
|
|
1122
|
+
|
|
1123
|
+
updatablePkgs = [dep for dep in spec["requires"] if specs[dep]["is_devel_pkg"]]
|
|
1124
|
+
if spec["is_devel_pkg"]:
|
|
1125
|
+
updatablePkgs.append(spec["package"])
|
|
1126
|
+
|
|
1127
|
+
buildErrMsg = dedent("""\
|
|
1128
|
+
Error while executing {sd}/build.sh on `{h}'.
|
|
1129
|
+
Log can be found in {w}/BUILD/{p}-latest{devSuffix}/log
|
|
1130
|
+
Please upload it to CERNBox/Dropbox if you intend to request support.
|
|
1131
|
+
Build directory is {w}/BUILD/{p}-latest{devSuffix}/{p}.
|
|
1132
|
+
""").format(
|
|
1133
|
+
h=socket.gethostname(),
|
|
1134
|
+
sd=scriptDir,
|
|
1135
|
+
w=buildWorkDir,
|
|
1136
|
+
p=spec["package"],
|
|
1137
|
+
devSuffix="-" + args.develPrefix
|
|
1138
|
+
if "develPrefix" in args and spec["is_devel_pkg"]
|
|
1139
|
+
else "",
|
|
1140
|
+
)
|
|
1141
|
+
if updatablePkgs:
|
|
1142
|
+
buildErrMsg += dedent("""
|
|
1143
|
+
Note that you have packages in development mode.
|
|
1144
|
+
Devel sources are not updated automatically, you must do it by hand.\n
|
|
1145
|
+
This problem might be due to one or more outdated devel sources.
|
|
1146
|
+
To update all development packages required for this build it is usually sufficient to do:
|
|
1147
|
+
""")
|
|
1148
|
+
buildErrMsg += "".join("\n ( cd %s && git pull --rebase )" % dp for dp in updatablePkgs)
|
|
1149
|
+
|
|
1150
|
+
# Gather build info for the error message
|
|
1151
|
+
try:
|
|
1152
|
+
safe_args = {
|
|
1153
|
+
"pkgname", "defaults", "architecture", "forceUnknownArch",
|
|
1154
|
+
"develPrefix", "jobs", "noSystem", "noDevel", "forceTracked", "plugin",
|
|
1155
|
+
"disable", "annotate", "onlyDeps", "docker"
|
|
1156
|
+
}
|
|
1157
|
+
args_str = " ".join(f"--{k}={v}" for k, v in vars(args).items() if v and k in safe_args)
|
|
1158
|
+
detected_arch = detectArch()
|
|
1159
|
+
buildErrMsg += dedent(f"""
|
|
1160
|
+
Build info:
|
|
1161
|
+
OS: {detected_arch}
|
|
1162
|
+
Using aliBuild from alibuild@{__version__ or "unknown"} recipes in alidist@{os.environ["ALIBUILD_ALIDIST_HASH"][:10]}
|
|
1163
|
+
Build arguments: {args_str}
|
|
1164
|
+
""")
|
|
1165
|
+
|
|
1166
|
+
if detected_arch.startswith("osx"):
|
|
1167
|
+
buildErrMsg += f'XCode version: {getstatusoutput("xcodebuild -version")[1]}'
|
|
1168
|
+
|
|
1169
|
+
except Exception as exc:
|
|
1170
|
+
warning("Failed to gather build info", exc_info=exc)
|
|
1171
|
+
|
|
1172
|
+
|
|
1173
|
+
dieOnError(err, buildErrMsg.strip())
|
|
1174
|
+
|
|
1175
|
+
# We need to create 2 sets of links, once with the full requires,
|
|
1176
|
+
# once with only direct dependencies, since that's required to
|
|
1177
|
+
# register packages in Alien.
|
|
1178
|
+
createDistLinks(spec, specs, args, syncHelper, "dist", "full_requires")
|
|
1179
|
+
createDistLinks(spec, specs, args, syncHelper, "dist-direct", "requires")
|
|
1180
|
+
createDistLinks(spec, specs, args, syncHelper, "dist-runtime", "full_runtime_requires")
|
|
1181
|
+
|
|
1182
|
+
# Make sure not to upload local-only packages! These might have been
|
|
1183
|
+
# produced in a previous run with a read-only remote store.
|
|
1184
|
+
if not spec["revision"].startswith("local"):
|
|
1185
|
+
syncHelper.upload_symlinks_and_tarball(spec)
|
|
1186
|
+
|
|
1187
|
+
if not args.onlyDeps:
|
|
1188
|
+
banner("Build of %s successfully completed on `%s'.\n"
|
|
1189
|
+
"Your software installation is at:"
|
|
1190
|
+
"\n\n %s\n\n"
|
|
1191
|
+
"You can use this package by loading the environment:"
|
|
1192
|
+
"\n\n alienv enter %s/latest-%s",
|
|
1193
|
+
mainPackage, socket.gethostname(),
|
|
1194
|
+
abspath(join(args.workDir, args.architecture)),
|
|
1195
|
+
mainPackage, mainBuildFamily)
|
|
1196
|
+
else:
|
|
1197
|
+
banner("Successfully built dependencies for package %s on `%s'.\n",
|
|
1198
|
+
mainPackage, socket.gethostname()
|
|
1199
|
+
)
|
|
1200
|
+
for spec in specs.values():
|
|
1201
|
+
if spec["is_devel_pkg"]:
|
|
1202
|
+
banner("Build directory for devel package %s:\n%s/BUILD/%s-latest%s/%s",
|
|
1203
|
+
spec["package"], abspath(buildWorkDir), spec["package"],
|
|
1204
|
+
("-" + args.develPrefix) if "develPrefix" in args else "",
|
|
1205
|
+
spec["package"])
|
|
1206
|
+
if untrackedFilesDirectories:
|
|
1207
|
+
banner("Untracked files in the following directories resulted in a rebuild of "
|
|
1208
|
+
"the associated package and its dependencies:\n%s\n\nPlease commit or remove them to avoid useless rebuilds.", "\n".join(untrackedFilesDirectories))
|
|
1209
|
+
debug("Everything done")
|