alibuild 1.17.19__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (74) hide show
  1. alibuild-1.17.19.data/scripts/aliBuild +137 -0
  2. alibuild-1.17.19.data/scripts/aliDeps +7 -0
  3. alibuild-1.17.19.data/scripts/aliDoctor +7 -0
  4. alibuild-1.17.19.data/scripts/alienv +344 -0
  5. alibuild-1.17.19.data/scripts/pb +7 -0
  6. alibuild-1.17.19.dist-info/METADATA +78 -0
  7. alibuild-1.17.19.dist-info/RECORD +74 -0
  8. alibuild-1.17.19.dist-info/WHEEL +5 -0
  9. alibuild-1.17.19.dist-info/licenses/LICENSE.md +674 -0
  10. alibuild-1.17.19.dist-info/top_level.txt +5 -0
  11. alibuild_helpers/__init__.py +21 -0
  12. alibuild_helpers/_version.py +21 -0
  13. alibuild_helpers/analytics.py +120 -0
  14. alibuild_helpers/args.py +493 -0
  15. alibuild_helpers/build.py +1209 -0
  16. alibuild_helpers/build_template.sh +314 -0
  17. alibuild_helpers/clean.py +83 -0
  18. alibuild_helpers/cmd.py +154 -0
  19. alibuild_helpers/deps.py +116 -0
  20. alibuild_helpers/doctor.py +195 -0
  21. alibuild_helpers/git.py +104 -0
  22. alibuild_helpers/init.py +103 -0
  23. alibuild_helpers/log.py +132 -0
  24. alibuild_helpers/scm.py +31 -0
  25. alibuild_helpers/sl.py +62 -0
  26. alibuild_helpers/sync.py +693 -0
  27. alibuild_helpers/templating_plugin.py +18 -0
  28. alibuild_helpers/utilities.py +662 -0
  29. alibuild_helpers/workarea.py +179 -0
  30. debian/changelog +11 -0
  31. debian/compat +1 -0
  32. debian/control +14 -0
  33. debian/copyright +10 -0
  34. debian/files +1 -0
  35. debian/rules +7 -0
  36. docs/README.md +1 -0
  37. docs/SUPPORT +3 -0
  38. docs/docs/alice_logo.png +0 -0
  39. docs/docs/deps.png +0 -0
  40. docs/docs/index.md +75 -0
  41. docs/docs/quick.md +89 -0
  42. docs/docs/reference.md +430 -0
  43. docs/docs/stylesheets/extra.css +9 -0
  44. docs/docs/troubleshooting.md +346 -0
  45. docs/docs/user.md +413 -0
  46. docs/mkdocs.yml +37 -0
  47. templates/alibuild_to_please.jnj +63 -0
  48. tests/test_analytics.py +42 -0
  49. tests/test_args.py +119 -0
  50. tests/test_build.py +426 -0
  51. tests/test_clean.py +154 -0
  52. tests/test_cmd.py +73 -0
  53. tests/test_deps.py +79 -0
  54. tests/test_doctor.py +128 -0
  55. tests/test_git.py +48 -0
  56. tests/test_hashing.py +67 -0
  57. tests/test_init.py +103 -0
  58. tests/test_log.py +50 -0
  59. tests/test_packagelist.py +235 -0
  60. tests/test_parseRecipe.py +132 -0
  61. tests/test_sync.py +332 -0
  62. tests/test_utilities.py +383 -0
  63. tests/test_workarea.py +101 -0
  64. tests/testdist/broken1.sh +1 -0
  65. tests/testdist/broken2.sh +1 -0
  66. tests/testdist/broken3.sh +3 -0
  67. tests/testdist/broken4.sh +2 -0
  68. tests/testdist/broken5.sh +2 -0
  69. tests/testdist/broken6.sh +2 -0
  70. tests/testdist/broken7.sh +5 -0
  71. tests/testdist/clobber-initdotsh.sh +4 -0
  72. tests/testdist/defaults-o2.sh +10 -0
  73. tests/testdist/delete-etc.sh +4 -0
  74. tests/testdist/tracking-env.sh +6 -0
@@ -0,0 +1,18 @@
1
+ """A text templating plugin for aliBuild.
2
+
3
+ This plugin allows reusing specs like those that would be used during the
4
+ build, for instance to get the "real" version numbers for various packages.
5
+
6
+ We use Jinja2 as the templating language, read the user-provided template from
7
+ stdin and print the rendered output to stdout.
8
+ """
9
+
10
+ import sys
11
+ from jinja2.sandbox import SandboxedEnvironment
12
+
13
+
14
+ def build_plugin(specs, args, build_order) -> None:
15
+ """Read a user-provided template from stdin and render it."""
16
+ print(SandboxedEnvironment(autoescape=False)
17
+ .from_string(sys.stdin.read())
18
+ .render(specs=specs, args=args, build_order=build_order))
@@ -0,0 +1,662 @@
1
+ #!/usr/bin/env python3
2
+ import yaml
3
+ from os.path import exists
4
+ import hashlib
5
+ from glob import glob
6
+ from os.path import basename, join, isdir, islink
7
+ import sys
8
+ import os
9
+ import re
10
+ import platform
11
+
12
+ from datetime import datetime
13
+ from collections import OrderedDict
14
+ from shlex import quote
15
+ from typing import Optional
16
+
17
+ from alibuild_helpers.cmd import getoutput
18
+ from alibuild_helpers.git import git
19
+ from alibuild_helpers.log import warning, dieOnError
20
+
21
+
22
+ class SpecError(Exception):
23
+ pass
24
+
25
+
26
+ def call_ignoring_oserrors(function, *args, **kwargs):
27
+ try:
28
+ return function(*args, **kwargs)
29
+ except OSError:
30
+ return None
31
+
32
+
33
+ def symlink(link_target, link_name):
34
+ """Match the behaviour of `ln -nsf LINK_TARGET LINK_NAME`, without having to fork.
35
+
36
+ Create a new symlink named LINK_NAME pointing to LINK_TARGET. If LINK_NAME
37
+ is a directory, create a symlink named basename(LINK_TARGET) inside it.
38
+ """
39
+ # If link_name is a symlink pointing to a directory, isdir() will return True.
40
+ if isdir(link_name) and not islink(link_name):
41
+ link_name = join(link_name, basename(link_target))
42
+ call_ignoring_oserrors(os.unlink, link_name)
43
+ os.symlink(link_target, link_name)
44
+
45
+
46
+ asList = lambda x : x if type(x) == list else [x]
47
+
48
+
49
+ def topological_sort(specs):
50
+ """Topologically sort specs so that dependencies come before the packages that depend on them.
51
+
52
+ This function returns a generator, yielding package names in order.
53
+
54
+ The algorithm used here was adapted from:
55
+ http://www.stoimen.com/blog/2012/10/01/computer-algorithms-topological-sort-of-a-graph/
56
+ """
57
+ edges = [(spec["package"], dep) for spec in specs.values() for dep in spec["requires"]]
58
+ leaves = [spec["package"] for spec in specs.values() if not spec["requires"]]
59
+ while leaves:
60
+ current_package = leaves.pop(0)
61
+ yield current_package
62
+ # Find every package that depends on the current one.
63
+ new_leaves = {pkg for pkg, dep in edges if dep == current_package}
64
+ # Stop blocking packages that depend on the current one...
65
+ edges = [(pkg, dep) for pkg, dep in edges if dep != current_package]
66
+ # ...but keep blocking those that still depend on other stuff!
67
+ leaves.extend(new_leaves - {pkg for pkg, _ in edges})
68
+ # If we have any edges left, we have a cycle
69
+ if edges:
70
+ # Find a cycle by following dependencies
71
+ cycle = []
72
+ start = edges[0][0] # Start with any remaining package
73
+ current = start
74
+ max_iter = 10000 # Prevent infinite loops
75
+ while max_iter > 0:
76
+ max_iter -= 1
77
+ cycle.append(current)
78
+ # Find what current depends on
79
+ for pkg, dep in edges:
80
+ if pkg == current:
81
+ current = dep
82
+ break
83
+ if current in cycle: # We found a cycle
84
+ cycle = cycle[cycle.index(current):] # Trim to just the cycle
85
+ dieOnError(True, "Dependency cycle detected: " + " -> ".join(cycle + [cycle[0]]))
86
+ if current == start: # We've gone full circle
87
+ raise RuntimeError("Internal error: cycle detection failed")
88
+ assert False, "Unreachable error: cycle detection failed"
89
+
90
+
91
+ def resolve_store_path(architecture, spec_hash):
92
+ """Return the path where a tarball with the given hash is to be stored.
93
+
94
+ The returned path is relative to the working directory (normally sw/) or the
95
+ root of the remote store.
96
+ """
97
+ return "/".join(("TARS", architecture, "store", spec_hash[:2], spec_hash))
98
+
99
+
100
+ def resolve_links_path(architecture, package):
101
+ """Return the path where symlinks for the given package are to be stored.
102
+
103
+ The returned path is relative to the working directory (normally sw/) or the
104
+ root of the remote store.
105
+ """
106
+ return "/".join(("TARS", architecture, package))
107
+
108
+
109
+ def short_commit_hash(spec):
110
+ """Shorten the spec's commit hash to make it more human-readable.
111
+
112
+ This is complicated by the fact that the commit_hash property is not
113
+ necessarily a commit hash, but might be a tag name. If it is a tag name,
114
+ return it as-is, else assume it is actually a commit hash and shorten it.
115
+ """
116
+ if spec["tag"] == spec["commit_hash"]:
117
+ return spec["commit_hash"]
118
+ return spec["commit_hash"][:10]
119
+
120
+
121
+ # Date fields to substitute: they are zero-padded
122
+ now = datetime.now()
123
+ nowKwds = { "year": str(now.year),
124
+ "month": str(now.month).zfill(2),
125
+ "day": str(now.day).zfill(2),
126
+ "hour": str(now.hour).zfill(2) }
127
+
128
+ def resolve_version(spec, defaults, branch_basename, branch_stream):
129
+ """Expand the version replacing the following keywords:
130
+
131
+ - %(commit_hash)s
132
+ - %(short_hash)s
133
+ - %(tag)s
134
+ - %(branch_basename)s
135
+ - %(branch_stream)s
136
+ - %(tag_basename)s
137
+ - %(defaults_upper)s
138
+ - %(year)s
139
+ - %(month)s
140
+ - %(day)s
141
+ - %(hour)s
142
+
143
+ with the calculated content.
144
+ """
145
+ defaults_upper = defaults != "release" and "_" + defaults.upper().replace("-", "_") or ""
146
+ commit_hash = spec.get("commit_hash", "hash_unknown")
147
+ tag = str(spec.get("tag", "tag_unknown"))
148
+ return spec["version"] % {
149
+ "commit_hash": commit_hash,
150
+ "short_hash": commit_hash[0:10],
151
+ "tag": tag,
152
+ "branch_basename": branch_basename,
153
+ "branch_stream": branch_stream or tag,
154
+ "tag_basename": basename(tag),
155
+ "defaults_upper": defaults_upper,
156
+ **nowKwds,
157
+ }
158
+
159
+ def resolve_tag(spec):
160
+ """Expand the tag, replacing the following keywords:
161
+ - %(year)s
162
+ - %(month)s
163
+ - %(day)s
164
+ - %(hour)s
165
+ """
166
+ return spec["tag"] % nowKwds
167
+
168
+
169
+ def normalise_multiple_options(option, sep=","):
170
+ return [x for x in ",".join(option).split(sep) if x]
171
+
172
+ def prunePaths(workDir):
173
+ for x in ["PATH", "LD_LIBRARY_PATH", "DYLD_LIBRARY_PATH"]:
174
+ if x not in os.environ:
175
+ continue
176
+ workDirEscaped = re.escape("%s" % workDir) + "[^:]*:?"
177
+ os.environ[x] = re.sub(workDirEscaped, "", os.environ[x])
178
+ for x in list(os.environ.keys()):
179
+ if x.endswith("_VERSION") and x != "ALIBUILD_VERSION":
180
+ os.environ.pop(x)
181
+
182
+ def validateSpec(spec):
183
+ if not spec:
184
+ raise SpecError("Empty recipe.")
185
+ if type(spec) != OrderedDict:
186
+ raise SpecError("Not a YAML key / value.")
187
+ if "package" not in spec:
188
+ raise SpecError("Missing package field in header.")
189
+
190
+ # Use this to check if a given spec is compatible with the given default
191
+ def validateDefaults(finalPkgSpec, defaults):
192
+ if "valid_defaults" not in finalPkgSpec:
193
+ return (True, "", [])
194
+ validDefaults = asList(finalPkgSpec["valid_defaults"])
195
+ nonStringDefaults = [x for x in validDefaults if not type(x) == str]
196
+ if nonStringDefaults:
197
+ return (False, "valid_defaults needs to be a string or a list of strings. Found %s." % nonStringDefaults, [])
198
+ if defaults in validDefaults:
199
+ return (True, "", validDefaults)
200
+ return (False, "Cannot compile %s with `%s' default. Valid defaults are\n%s" %
201
+ (finalPkgSpec["package"],
202
+ defaults,
203
+ "\n".join([" - " + x for x in validDefaults])), validDefaults)
204
+
205
+
206
+ def doDetectArch(hasOsRelease, osReleaseLines, platformTuple, platformSystem, platformProcessor):
207
+ if platformSystem == "Darwin":
208
+ processor = platformProcessor
209
+ if not processor:
210
+ if platform.machine() == "x86_64":
211
+ processor = "x86-64"
212
+ else:
213
+ processor = "arm64"
214
+ return "osx_%s" % processor.replace("_", "-")
215
+ distribution, version, flavour = platformTuple
216
+ distribution = distribution.lower()
217
+ # If platform.dist does not return something sensible,
218
+ # let's try with /etc/os-release
219
+ if distribution not in ["ubuntu", "red hat enterprise linux", "redhat", "centos", "almalinux", "rockylinux"] and hasOsRelease:
220
+ for x in osReleaseLines:
221
+ key, is_prop, val = x.partition("=")
222
+ if not is_prop:
223
+ continue
224
+ val = val.strip("\n \"")
225
+ if key == "ID":
226
+ distribution = val.lower()
227
+ if key == "VERSION_ID":
228
+ version = val
229
+
230
+ if distribution == "ubuntu":
231
+ major, _, minor = version.partition(".")
232
+ version = major + minor
233
+ elif distribution == "debian":
234
+ # http://askubuntu.com/questions/445487/which-ubuntu-version-is-equivalent-to-debian-squeeze
235
+ debian_ubuntu = {"7": "1204", "8": "1404", "9": "1604", "10": "1804", "11": "2004"}
236
+ if version in debian_ubuntu:
237
+ distribution = "ubuntu"
238
+ version = debian_ubuntu[version]
239
+ elif distribution in ["redhat", "red hat enterprise linux", "centos", "almalinux", "rockylinux"]:
240
+ distribution = "slc"
241
+
242
+ processor = platformProcessor
243
+ if not processor:
244
+ # Sometimes platform.processor returns an empty string
245
+ processor = getoutput(("uname", "-m")).strip()
246
+
247
+ return "{distro}{version}_{machine}".format(
248
+ distro=distribution, version=version.split(".")[0],
249
+ machine=processor.replace("_", "-"))
250
+
251
+ # Try to guess a good platform. This does not try to cover all the
252
+ # possibly compatible linux distributions, but tries to get right the
253
+ # common one, obvious one. If you use a Unknownbuntu which is compatible
254
+ # with Ubuntu 15.10 you will still have to give an explicit platform
255
+ # string.
256
+ #
257
+ # FIXME: we should have a fallback for lsb_release, since platform.dist
258
+ # is going away.
259
+ def detectArch():
260
+ try:
261
+ with open("/etc/os-release") as osr:
262
+ osReleaseLines = osr.readlines()
263
+ hasOsRelease = True
264
+ except (IOError,OSError):
265
+ osReleaseLines = []
266
+ hasOsRelease = False
267
+ try:
268
+ if platform.system() == "Darwin":
269
+ if platform.machine() == "x86_64":
270
+ return "osx_x86-64"
271
+ else:
272
+ return "osx_arm64"
273
+ except:
274
+ pass
275
+ try:
276
+ import distro
277
+ platformTuple = distro.linux_distribution()
278
+ platformSystem = platform.system()
279
+ platformProcessor = platform.processor()
280
+ if not platformProcessor or " " in platformProcessor:
281
+ platformProcessor = platform.machine()
282
+ return doDetectArch(hasOsRelease, osReleaseLines, platformTuple, platformSystem, platformProcessor)
283
+ except:
284
+ return doDetectArch(hasOsRelease, osReleaseLines, ["unknown", "", ""], "", "")
285
+
286
+ def filterByArchitectureDefaults(arch, defaults, requires):
287
+ for r in requires:
288
+ require, matcher = ":" in r and r.split(":", 1) or (r, ".*")
289
+ if matcher.startswith("defaults="):
290
+ wanted = matcher[len("defaults="):]
291
+ if re.match(wanted, defaults):
292
+ yield require
293
+ if re.match(matcher, arch):
294
+ yield require
295
+
296
+ def disabledByArchitectureDefaults(arch, defaults, requires):
297
+ for r in requires:
298
+ require, matcher = ":" in r and r.split(":", 1) or (r, ".*")
299
+ if matcher.startswith("defaults="):
300
+ wanted = matcher[len("defaults="):]
301
+ if not re.match(wanted, defaults):
302
+ yield require
303
+ elif not re.match(matcher, arch):
304
+ yield require
305
+
306
+ def readDefaults(configDir, defaults, error, architecture):
307
+ defaultsFilename = resolveDefaultsFilename(defaults, configDir)
308
+ if not defaultsFilename:
309
+ error("Default `%s' does not exists. Viable options:\n%s" %
310
+ (defaults or "<no defaults specified>",
311
+ "\n".join("- " + basename(x).replace("defaults-", "").replace(".sh", "")
312
+ for x in glob(join(configDir, "defaults-*.sh")))))
313
+ err, defaultsMeta, defaultsBody = parseRecipe(getRecipeReader(defaultsFilename))
314
+ if err:
315
+ error(err)
316
+ sys.exit(1)
317
+ archDefaults = "%s/defaults-%s.sh" % (configDir, architecture)
318
+ archMeta = {}
319
+ archBody = ""
320
+ if exists(archDefaults):
321
+ err, archMeta, archBody = parseRecipe(getRecipeReader(defaultsFilename))
322
+ if err:
323
+ error(err)
324
+ sys.exit(1)
325
+ for x in ["env", "disable", "overrides"]:
326
+ defaultsMeta.setdefault(x, {}).update(archMeta.get(x, {}))
327
+ defaultsBody += "\n# Architecture defaults\n" + archBody
328
+ return (defaultsMeta, defaultsBody)
329
+
330
+
331
+ def getRecipeReader(url:str , dist=None):
332
+ m = re.search(r'^dist:(.*)@([^@]+)$', url)
333
+ if m and dist:
334
+ return GitReader(url, dist)
335
+ else:
336
+ return FileReader(url)
337
+
338
+ # Read a recipe from a file
339
+ class FileReader(object):
340
+ def __init__(self, url) -> None:
341
+ self.url = url
342
+ def __call__(self):
343
+ return open(self.url).read()
344
+
345
+ # Read a recipe from a git repository using git show.
346
+ class GitReader(object):
347
+ def __init__(self, url, configDir) -> None:
348
+ self.url, self.configDir = url, configDir
349
+ def __call__(self):
350
+ m = re.search(r'^dist:(.*)@([^@]+)$', self.url)
351
+ fn, gh = m.groups()
352
+ err, d = git(("show", "{gh}:{fn}.sh".format(gh=gh, fn=fn.lower())),
353
+ directory=self.configDir)
354
+ if err:
355
+ raise RuntimeError("Cannot read recipe {fn} from reference {gh}.\n"
356
+ "Make sure you run first (this will not alter your recipes):\n"
357
+ " cd {dist} && git remote update -p && git fetch --tags"
358
+ .format(dist=self.configDir, gh=gh, fn=fn))
359
+ return d
360
+
361
+ def yamlLoad(s):
362
+ class YamlSafeOrderedLoader(yaml.SafeLoader):
363
+ pass
364
+ def construct_mapping(loader, node):
365
+ loader.flatten_mapping(node)
366
+ return OrderedDict(loader.construct_pairs(node))
367
+ YamlSafeOrderedLoader.add_constructor(yaml.resolver.BaseResolver.DEFAULT_MAPPING_TAG,
368
+ construct_mapping)
369
+ return yaml.load(s, YamlSafeOrderedLoader)
370
+
371
+ def yamlDump(s):
372
+ class YamlOrderedDumper(yaml.SafeDumper):
373
+ pass
374
+ def represent_ordereddict(dumper, data):
375
+ rep = []
376
+ for k,v in data.items():
377
+ k = dumper.represent_data(k)
378
+ v = dumper.represent_data(v)
379
+ rep.append((k, v))
380
+ return yaml.nodes.MappingNode(u'tag:yaml.org,2002:map', rep)
381
+ YamlOrderedDumper.add_representer(OrderedDict, represent_ordereddict)
382
+ return yaml.dump(s, Dumper=YamlOrderedDumper)
383
+
384
+ def parseRecipe(reader):
385
+ assert(reader.__call__)
386
+ err, spec, recipe = (None, None, None)
387
+ try:
388
+ d = reader()
389
+ header,recipe = d.split("---", 1)
390
+ spec = yamlLoad(header)
391
+ validateSpec(spec)
392
+ except RuntimeError as e:
393
+ err = str(e)
394
+ except IOError as e:
395
+ err = str(e)
396
+ except SpecError as e:
397
+ err = "Malformed header for %s\n%s" % (reader.url, str(e))
398
+ except yaml.scanner.ScannerError as e:
399
+ err = "Unable to parse %s\n%s" % (reader.url, str(e))
400
+ except yaml.parser.ParserError as e:
401
+ err = "Unable to parse %s\n%s" % (reader.url, str(e))
402
+ except ValueError:
403
+ err = "Unable to parse %s. Header missing." % reader.url
404
+ return err, spec, recipe
405
+
406
+ # (Almost pure part of the defaults parsing)
407
+ # Override defaultsGetter for unit tests.
408
+ def parseDefaults(disable, defaultsGetter, log):
409
+ defaultsMeta, defaultsBody = defaultsGetter()
410
+ # Defaults are actually special packages. They can override metadata
411
+ # of any other package and they can disable other packages. For
412
+ # example they could decide to switch from ROOT 5 to ROOT 6 and they
413
+ # could disable alien for O2. For this reason we need to parse their
414
+ # metadata early and extract the override and disable data.
415
+ defaultsDisable = asList(defaultsMeta.get("disable", []))
416
+ for x in defaultsDisable:
417
+ log("Package %s has been disabled by current default.", x)
418
+ disable.extend(defaultsDisable)
419
+ if type(defaultsMeta.get("overrides", OrderedDict())) != OrderedDict:
420
+ return ("overrides should be a dictionary", None, None)
421
+ overrides, taps = OrderedDict(), {}
422
+ commonEnv = {"env": defaultsMeta["env"]} if "env" in defaultsMeta else {}
423
+ overrides["defaults-release"] = commonEnv
424
+ for k, v in defaultsMeta.get("overrides", {}).items():
425
+ f = k.split("@", 1)[0].lower()
426
+ if "@" in k:
427
+ taps[f] = "dist:"+k
428
+ overrides[f] = dict(**(v or {}))
429
+ return (None, overrides, taps)
430
+
431
+ def checkForFilename(taps: dict, pkg: str, d: str):
432
+ return taps.get(pkg, join(d, f"{pkg}.sh"))
433
+
434
+ def getPkgDirs(configDir):
435
+ configPath = os.environ.get("BITS_PATH", "").rstrip(":") + ":"
436
+ pkgDirs = [join(configDir, d) for d in configPath.lstrip(":").split(":")]
437
+ return pkgDirs
438
+
439
+ def resolveFilename(taps: dict, pkg: str, configDir: str):
440
+ for d in getPkgDirs(configDir):
441
+ filename = checkForFilename(taps, pkg, d)
442
+ if os.path.exists(filename):
443
+ return (filename, os.path.abspath(d))
444
+ return (None, None)
445
+
446
+ def resolveDefaultsFilename(defaults, configDir) -> Optional[str]:
447
+ for d in getPkgDirs(configDir):
448
+ filename = join(d, f"defaults-{defaults}.sh")
449
+ if os.path.exists(filename):
450
+ return filename
451
+
452
+ def getPackageList(packages, specs, configDir, preferSystem, noSystem,
453
+ architecture, disable, defaults, performPreferCheck, performRequirementCheck,
454
+ performValidateDefaults, overrides, taps: dict, log, force_rebuild=()):
455
+ systemPackages = set()
456
+ ownPackages = set()
457
+ failedRequirements = set()
458
+ testCache = {}
459
+ requirementsCache = {}
460
+ trackingEnvCache = {}
461
+ packages = packages[:]
462
+ validDefaults = [] # empty list: all OK; None: no valid default; non-empty list: list of valid ones
463
+ while packages:
464
+ p = packages.pop(0)
465
+ if p in specs or (p == "defaults-release" and ("defaults-" + defaults) in specs):
466
+ continue
467
+
468
+ # We rewrite all defaults to "defaults-release", so load the correct
469
+ # defaults package here.
470
+ # The reason for this rewriting is (I assume) so that packages that are
471
+ # not overridden by some defaults can be shared with other defaults, since
472
+ # they will end up with the same hash. The defaults must be called
473
+ # "defaults-release" for this to work, since the defaults are a dependency
474
+ # and all dependencies' names go into a package's hash.
475
+ pkg_filename = ("defaults-" + defaults) if p == "defaults-release" else p.lower()
476
+
477
+ filename, pkgdir = resolveFilename(taps, pkg_filename, configDir)
478
+
479
+ dieOnError(not filename, "Package %s not found in %s" % (p, configDir))
480
+ assert(filename is not None)
481
+
482
+ err, spec, recipe = parseRecipe(getRecipeReader(filename, configDir))
483
+ dieOnError(err, err)
484
+ # Unless there was an error, both spec and recipe should be valid.
485
+ # otherwise the error should have been caught above.
486
+ assert(spec is not None)
487
+ assert(recipe is not None)
488
+ dieOnError(spec["package"].lower() != pkg_filename,
489
+ "%s.sh has different package field: %s" % (p, spec["package"]))
490
+ spec["pkgdir"] = pkgdir
491
+
492
+ if p == "defaults-release":
493
+ # Re-rewrite the defaults' name to "defaults-release". Everything auto-
494
+ # depends on "defaults-release", so we need something with that name.
495
+ spec["package"] = "defaults-release"
496
+
497
+ # Never run the defaults' recipe, to match previous behaviour.
498
+ # Warn if a non-trivial recipe is found (i.e., one with any non-comment lines).
499
+ for line in map(str.strip, recipe.splitlines()):
500
+ if line and not line.startswith("#"):
501
+ warning("%s.sh contains a recipe, which will be ignored", pkg_filename)
502
+ recipe = ""
503
+
504
+ dieOnError(spec["package"] != p,
505
+ "%s should be spelt %s." % (p, spec["package"]))
506
+
507
+ # If an override fully matches a package, we apply it. This means
508
+ # you can have multiple overrides being applied for a given package.
509
+ for override in overrides:
510
+ # We downcase the regex in parseDefaults(), so downcase the package name
511
+ # as well. FIXME: This is probably a bad idea; we should use
512
+ # re.IGNORECASE instead or just match case-sensitively.
513
+ if not re.fullmatch(override, p.lower()):
514
+ continue
515
+ log("Overrides for package %s: %s", spec["package"], overrides[override])
516
+ spec.update(overrides.get(override, {}) or {})
517
+
518
+ # If --always-prefer-system is passed or if prefer_system is set to true
519
+ # inside the recipe, use the script specified in the prefer_system_check
520
+ # stanza to see if we can use the system version of the package.
521
+ systemRE = spec.get("prefer_system", "(?!.*)")
522
+ try:
523
+ systemREMatches = re.match(systemRE, architecture)
524
+ except TypeError:
525
+ dieOnError(True, "Malformed entry prefer_system: %s in %s" % (systemRE, spec["package"]))
526
+
527
+ noSystemList = []
528
+ if noSystem == "*":
529
+ noSystemList = [spec["package"]]
530
+ elif noSystem is not None:
531
+ noSystemList = noSystem.split(",")
532
+ systemExcluded = (spec["package"] in noSystemList)
533
+ allowSystemPackageUpload = spec.get("allow_system_package_upload", False)
534
+ # Fill the track env with the actual result from executing the script.
535
+ for env, trackingCode in spec.get("track_env", {}).items():
536
+ key = spec["package"] + env
537
+ if key not in trackingEnvCache:
538
+ status, out = performPreferCheck(spec, trackingCode)
539
+ dieOnError(status, "Error while executing track_env for {}: {} => {}".format(key, trackingCode, out))
540
+ trackingEnvCache[key] = out
541
+ spec["track_env"][env] = trackingEnvCache[key]
542
+
543
+ if (not systemExcluded or allowSystemPackageUpload) and (preferSystem or systemREMatches):
544
+ requested_version = resolve_version(spec, defaults, "unavailable", "unavailable")
545
+ cmd = "REQUESTED_VERSION={version}\n{check}".format(
546
+ version=quote(requested_version),
547
+ check=spec.get("prefer_system_check", "false"),
548
+ ).strip()
549
+ if spec["package"] not in testCache:
550
+ testCache[spec["package"]] = performPreferCheck(spec, cmd)
551
+ err, output = testCache[spec["package"]]
552
+ if err:
553
+ # prefer_system_check errored; this means we must build the package ourselves.
554
+ ownPackages.add(spec["package"])
555
+ else:
556
+ # prefer_system_check succeeded; this means we should use the system package.
557
+ match = re.search(r"^alibuild_system_replace:(?P<key>.*)$", output, re.MULTILINE)
558
+ if not match and systemExcluded:
559
+ # No replacement spec name given. Fall back to old system package
560
+ # behaviour and just disable the package.
561
+ ownPackages.add(spec["package"])
562
+ elif not match and not systemExcluded:
563
+ # No replacement spec name given. Fall back to old system package
564
+ # behaviour and just disable the package.
565
+ systemPackages.add(spec["package"])
566
+ disable.append(spec["package"])
567
+ elif match:
568
+ # The check printed the name of a replacement; use it.
569
+ key = match.group("key").strip()
570
+ replacement = None
571
+ for replacement_matcher in spec["prefer_system_replacement_specs"]:
572
+ if re.match(replacement_matcher, key):
573
+ replacement = spec["prefer_system_replacement_specs"][replacement_matcher]
574
+ break
575
+ if replacement:
576
+ # We must keep the package name the same, since it is used to
577
+ # specify dependencies.
578
+ replacement["package"] = spec["package"]
579
+ # The version is required for all specs. What we put there will
580
+ # influence the package's hash, so allow the user to override it.
581
+ replacement.setdefault("version", requested_version)
582
+ spec = replacement
583
+ # Allows generalising the version based on the actual key provided
584
+ spec["version"] = spec["version"].replace("%(key)s", key)
585
+ # We need the key to inject the version into the replacement recipe later.
586
+ spec["key"] = key
587
+ recipe = replacement.get("recipe", "")
588
+ # If there's an explicitly-specified recipe, we're still building
589
+ # the package. If not, aliBuild will still "build" it, but it's
590
+ # basically instantaneous, so report to the user that we're taking
591
+ # it from the system.
592
+ if recipe:
593
+ ownPackages.add(spec["package"])
594
+ else:
595
+ systemPackages.add(spec["package"])
596
+ else:
597
+ warning(f"Could not find named replacement spec for {spec['package']}: {key}, "
598
+ "falling back to building the package ourselves.")
599
+
600
+ dieOnError(("system_requirement" in spec) and recipe.strip("\n\t "),
601
+ "System requirements %s cannot have a recipe" % spec["package"])
602
+ if re.match(spec.get("system_requirement", "(?!.*)"), architecture):
603
+ cmd = spec.get("system_requirement_check", "false")
604
+ if spec["package"] not in requirementsCache:
605
+ requirementsCache[spec["package"]] = performRequirementCheck(spec, cmd.strip())
606
+
607
+ err, output = requirementsCache[spec["package"]]
608
+ if err:
609
+ failedRequirements.update([spec["package"]])
610
+ spec["version"] = "failed"
611
+ else:
612
+ disable.append(spec["package"])
613
+
614
+ spec["disabled"] = list(disable)
615
+ if spec["package"] in disable:
616
+ continue
617
+
618
+ # Check whether the package is compatible with the specified defaults
619
+ if validDefaults is not None:
620
+ (ok,msg,valid) = performValidateDefaults(spec)
621
+ if valid:
622
+ validDefaults = [ v for v in validDefaults if v in valid ] if validDefaults else valid[:]
623
+ if not validDefaults:
624
+ validDefaults = None # no valid default works for all current packages
625
+
626
+ # For the moment we treat build_requires just as requires.
627
+ fn = lambda what: disabledByArchitectureDefaults(architecture, defaults, spec.get(what, []))
628
+ spec["disabled"] += [x for x in fn("requires")]
629
+ spec["disabled"] += [x for x in fn("build_requires")]
630
+ fn = lambda what: filterByArchitectureDefaults(architecture, defaults, spec.get(what, []))
631
+ spec["requires"] = [x for x in fn("requires") if x not in disable]
632
+ spec["build_requires"] = [x for x in fn("build_requires") if x not in disable]
633
+ if spec["package"] != "defaults-release":
634
+ spec["build_requires"].append("defaults-release")
635
+ spec["runtime_requires"] = spec["requires"]
636
+ spec["requires"] = spec["runtime_requires"] + spec["build_requires"]
637
+ # Check that version is a string
638
+ dieOnError(not isinstance(spec["version"], str),
639
+ "In recipe \"%s\": version must be a string" % p)
640
+ spec["tag"] = spec.get("tag", spec["version"])
641
+ spec["version"] = spec["version"].replace("/", "_")
642
+ spec["recipe"] = recipe.strip("\n")
643
+ if spec["package"] in force_rebuild:
644
+ spec["force_rebuild"] = True
645
+ specs[spec["package"]] = spec
646
+ packages += spec["requires"]
647
+ return (systemPackages, ownPackages, failedRequirements, validDefaults)
648
+
649
+
650
+ class Hasher:
651
+ def __init__(self) -> None:
652
+ self.h = hashlib.sha1()
653
+ def __call__(self, txt):
654
+ if not type(txt) == bytes:
655
+ txt = txt.encode('utf-8', 'ignore')
656
+ self.h.update(txt)
657
+ def hexdigest(self):
658
+ return self.h.hexdigest()
659
+ def copy(self):
660
+ new_hasher = Hasher()
661
+ new_hasher.h = self.h.copy()
662
+ return new_hasher