relenv 0.21.2__py3-none-any.whl → 0.22.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. relenv/__init__.py +14 -2
  2. relenv/__main__.py +12 -6
  3. relenv/_resources/xz/config.h +148 -0
  4. relenv/_resources/xz/readme.md +4 -0
  5. relenv/build/__init__.py +28 -30
  6. relenv/build/common/__init__.py +50 -0
  7. relenv/build/common/_sysconfigdata_template.py +72 -0
  8. relenv/build/common/builder.py +907 -0
  9. relenv/build/common/builders.py +163 -0
  10. relenv/build/common/download.py +324 -0
  11. relenv/build/common/install.py +609 -0
  12. relenv/build/common/ui.py +432 -0
  13. relenv/build/darwin.py +128 -14
  14. relenv/build/linux.py +292 -74
  15. relenv/build/windows.py +123 -169
  16. relenv/buildenv.py +48 -17
  17. relenv/check.py +10 -5
  18. relenv/common.py +492 -165
  19. relenv/create.py +147 -7
  20. relenv/fetch.py +16 -4
  21. relenv/manifest.py +15 -7
  22. relenv/python-versions.json +350 -0
  23. relenv/pyversions.py +817 -30
  24. relenv/relocate.py +101 -55
  25. relenv/runtime.py +457 -282
  26. relenv/toolchain.py +9 -3
  27. {relenv-0.21.2.dist-info → relenv-0.22.1.dist-info}/METADATA +1 -1
  28. relenv-0.22.1.dist-info/RECORD +48 -0
  29. tests/__init__.py +2 -0
  30. tests/_pytest_typing.py +45 -0
  31. tests/conftest.py +42 -36
  32. tests/test_build.py +426 -9
  33. tests/test_common.py +373 -48
  34. tests/test_create.py +149 -6
  35. tests/test_downloads.py +19 -15
  36. tests/test_fips_photon.py +6 -3
  37. tests/test_module_imports.py +44 -0
  38. tests/test_pyversions_runtime.py +177 -0
  39. tests/test_relocate.py +45 -39
  40. tests/test_relocate_module.py +257 -0
  41. tests/test_runtime.py +1968 -6
  42. tests/test_verify_build.py +477 -34
  43. relenv/build/common.py +0 -1707
  44. relenv-0.21.2.dist-info/RECORD +0 -35
  45. {relenv-0.21.2.dist-info → relenv-0.22.1.dist-info}/WHEEL +0 -0
  46. {relenv-0.21.2.dist-info → relenv-0.22.1.dist-info}/entry_points.txt +0 -0
  47. {relenv-0.21.2.dist-info → relenv-0.22.1.dist-info}/licenses/LICENSE.md +0 -0
  48. {relenv-0.21.2.dist-info → relenv-0.22.1.dist-info}/licenses/NOTICE +0 -0
  49. {relenv-0.21.2.dist-info → relenv-0.22.1.dist-info}/top_level.txt +0 -0
relenv/build/common.py DELETED
@@ -1,1707 +0,0 @@
1
- # Copyright 2022-2025 Broadcom.
2
- # SPDX-License-Identifier: Apache-2
3
- """
4
- Build process common methods.
5
- """
6
- import logging
7
- import os.path
8
- import hashlib
9
- import pathlib
10
- import glob
11
- import shutil
12
- import tarfile
13
- import tempfile
14
- import time
15
- import subprocess
16
- import random
17
- import sys
18
- import io
19
- import os
20
- import multiprocessing
21
- import pprint
22
- import re
23
- from html.parser import HTMLParser
24
-
25
-
26
- from relenv.common import (
27
- DATA_DIR,
28
- LINUX,
29
- MODULE_DIR,
30
- RelenvException,
31
- build_arch,
32
- download_url,
33
- extract_archive,
34
- format_shebang,
35
- get_download_location,
36
- get_toolchain,
37
- get_triplet,
38
- runcmd,
39
- work_dirs,
40
- fetch_url,
41
- Version,
42
- )
43
- import relenv.relocate
44
-
45
-
46
- CHECK_VERSIONS_SUPPORT = True
47
- try:
48
- from packaging.version import InvalidVersion, parse
49
- from looseversion import LooseVersion
50
- except ImportError:
51
- CHECK_VERSIONS_SUPPORT = False
52
-
53
- log = logging.getLogger(__name__)
54
-
55
-
56
- GREEN = "\033[0;32m"
57
- YELLOW = "\033[1;33m"
58
- RED = "\033[0;31m"
59
- END = "\033[0m"
60
- MOVEUP = "\033[F"
61
-
62
-
63
- CICD = "CI" in os.environ
64
- NODOWLOAD = False
65
-
66
-
67
- RELENV_PTH = (
68
- "import os; "
69
- "import sys; "
70
- "from importlib import util; "
71
- "from pathlib import Path; "
72
- "spec = util.spec_from_file_location("
73
- "'relenv.runtime', str(Path(__file__).parent / 'site-packages' / 'relenv' / 'runtime.py')"
74
- "); "
75
- "mod = util.module_from_spec(spec); "
76
- "sys.modules['relenv.runtime'] = mod; "
77
- "spec.loader.exec_module(mod); mod.bootstrap();"
78
- )
79
-
80
-
81
- SYSCONFIGDATA = """
82
- import pathlib, sys, platform, os, logging
83
-
84
- log = logging.getLogger(__name__)
85
-
86
- def build_arch():
87
- machine = platform.machine()
88
- return machine.lower()
89
-
90
- def get_triplet(machine=None, plat=None):
91
- if not plat:
92
- plat = sys.platform
93
- if not machine:
94
- machine = build_arch()
95
- if plat == "darwin":
96
- return f"{machine}-macos"
97
- elif plat == "win32":
98
- return f"{machine}-win"
99
- elif plat == "linux":
100
- return f"{machine}-linux-gnu"
101
- else:
102
- raise RelenvException("Unknown platform {}".format(platform))
103
-
104
-
105
-
106
- pydir = pathlib.Path(__file__).resolve().parent
107
- if sys.platform == "win32":
108
- DEFAULT_DATA_DIR = pathlib.Path.home() / "AppData" / "Local" / "relenv"
109
- else:
110
- DEFAULT_DATA_DIR = pathlib.Path.home() / ".local" / "relenv"
111
-
112
- if "RELENV_DATA" in os.environ:
113
- DATA_DIR = pathlib.Path(os.environ["RELENV_DATA"]).resolve()
114
- else:
115
- DATA_DIR = DEFAULT_DATA_DIR
116
-
117
- buildroot = pydir.parent.parent
118
-
119
- toolchain = DATA_DIR / "toolchain" / get_triplet()
120
-
121
- build_time_vars = {}
122
- for key in _build_time_vars:
123
- val = _build_time_vars[key]
124
- orig = val
125
- if isinstance(val, str):
126
- val = val.format(
127
- BUILDROOT=buildroot,
128
- TOOLCHAIN=toolchain,
129
- )
130
- build_time_vars[key] = val
131
- """
132
-
133
-
134
- def print_ui(events, processes, fails, flipstat=None):
135
- """
136
- Prints the UI during the relenv building process.
137
-
138
- :param events: A dictionary of events that are updated during the build process
139
- :type events: dict
140
- :param processes: A dictionary of build processes
141
- :type processes: dict
142
- :param fails: A list of processes that have failed
143
- :type fails: list
144
- :param flipstat: A dictionary of process statuses, defaults to {}
145
- :type flipstat: dict, optional
146
- """
147
- if flipstat is None:
148
- flipstat = {}
149
- if CICD:
150
- sys.stdout.flush()
151
- return
152
- uiline = []
153
- for name in events:
154
- if not events[name].is_set():
155
- status = " {}.".format(YELLOW)
156
- elif name in processes:
157
- now = time.time()
158
- if name not in flipstat:
159
- flipstat[name] = (0, now)
160
- if flipstat[name][1] < now:
161
- flipstat[name] = (1 - flipstat[name][0], now + random.random())
162
- status = " {}{}".format(GREEN, " " if flipstat[name][0] == 1 else ".")
163
- elif name in fails:
164
- status = " {}\u2718".format(RED)
165
- else:
166
- status = " {}\u2718".format(GREEN)
167
- uiline.append(status)
168
- uiline.append(" " + END)
169
- sys.stdout.write("\r")
170
- sys.stdout.write("".join(uiline))
171
- sys.stdout.flush()
172
-
173
-
174
- def verify_checksum(file, checksum):
175
- """
176
- Verify the checksum of a files.
177
-
178
- :param file: The path to the file to check.
179
- :type file: str
180
- :param checksum: The checksum to verify against
181
- :type checksum: str
182
-
183
- :raises RelenvException: If the checksum verification failed
184
-
185
- :return: True if it succeeded, or False if the checksum was None
186
- :rtype: bool
187
- """
188
- if checksum is None:
189
- log.error("Can't verify checksum because none was given")
190
- return False
191
- with open(file, "rb") as fp:
192
- file_checksum = hashlib.sha1(fp.read()).hexdigest()
193
- if checksum != file_checksum:
194
- raise RelenvException(
195
- f"sha1 checksum verification failed. expected={checksum} found={file_checksum}"
196
- )
197
- return True
198
-
199
-
200
- def all_dirs(root, recurse=True):
201
- """
202
- Get all directories under and including the given root.
203
-
204
- :param root: The root directory to traverse
205
- :type root: str
206
- :param recurse: Whether to recursively search for directories, defaults to True
207
- :type recurse: bool, optional
208
-
209
- :return: A list of directories found
210
- :rtype: list
211
- """
212
- paths = [root]
213
- for root, dirs, files in os.walk(root):
214
- for name in dirs:
215
- paths.append(os.path.join(root, name))
216
- return paths
217
-
218
-
219
- def populate_env(dirs, env):
220
- pass
221
-
222
-
223
- def build_default(env, dirs, logfp):
224
- """
225
- The default build function if none is given during the build process.
226
-
227
- :param env: The environment dictionary
228
- :type env: dict
229
- :param dirs: The working directories
230
- :type dirs: ``relenv.build.common.Dirs``
231
- :param logfp: A handle for the log file
232
- :type logfp: file
233
- """
234
- cmd = [
235
- "./configure",
236
- "--prefix={}".format(dirs.prefix),
237
- ]
238
- if env["RELENV_HOST"].find("linux") > -1:
239
- cmd += [
240
- "--build={}".format(env["RELENV_BUILD"]),
241
- "--host={}".format(env["RELENV_HOST"]),
242
- ]
243
- runcmd(cmd, env=env, stderr=logfp, stdout=logfp)
244
- runcmd(["make", "-j8"], env=env, stderr=logfp, stdout=logfp)
245
- runcmd(["make", "install"], env=env, stderr=logfp, stdout=logfp)
246
-
247
-
248
- def build_openssl_fips(env, dirs, logfp):
249
- return build_openssl(env, dirs, logfp, fips=True)
250
-
251
-
252
- def build_openssl(env, dirs, logfp, fips=False):
253
- """
254
- Build openssl.
255
-
256
- :param env: The environment dictionary
257
- :type env: dict
258
- :param dirs: The working directories
259
- :type dirs: ``relenv.build.common.Dirs``
260
- :param logfp: A handle for the log file
261
- :type logfp: file
262
- """
263
- arch = "aarch64"
264
- if sys.platform == "darwin":
265
- plat = "darwin64"
266
- if env["RELENV_HOST_ARCH"] == "x86_64":
267
- arch = "x86_64-cc"
268
- elif env["RELENV_HOST_ARCH"] == "arm64":
269
- arch = "arm64-cc"
270
- else:
271
- raise RelenvException(f"Unable to build {env['RELENV_HOST_ARCH']}")
272
- extended_cmd = []
273
- else:
274
- plat = "linux"
275
- if env["RELENV_HOST_ARCH"] == "x86_64":
276
- arch = "x86_64"
277
- elif env["RELENV_HOST_ARCH"] == "aarch64":
278
- arch = "aarch64"
279
- else:
280
- raise RelenvException(f"Unable to build {env['RELENV_HOST_ARCH']}")
281
- extended_cmd = [
282
- "-Wl,-z,noexecstack",
283
- ]
284
- if fips:
285
- extended_cmd.append("enable-fips")
286
- cmd = [
287
- "./Configure",
288
- f"{plat}-{arch}",
289
- f"--prefix={dirs.prefix}",
290
- "--openssldir=/etc/ssl",
291
- "--libdir=lib",
292
- "--api=1.1.1",
293
- "--shared",
294
- "--with-rand-seed=os,egd",
295
- "enable-md2",
296
- "enable-egd",
297
- "no-idea",
298
- ]
299
- cmd.extend(extended_cmd)
300
- runcmd(
301
- cmd,
302
- env=env,
303
- stderr=logfp,
304
- stdout=logfp,
305
- )
306
- runcmd(["make", "-j8"], env=env, stderr=logfp, stdout=logfp)
307
- if fips:
308
- shutil.copy(
309
- pathlib.Path("providers") / "fips.so",
310
- pathlib.Path(dirs.prefix) / "lib" / "ossl-modules",
311
- )
312
- else:
313
- runcmd(["make", "install_sw"], env=env, stderr=logfp, stdout=logfp)
314
-
315
-
316
- def build_sqlite(env, dirs, logfp):
317
- """
318
- Build sqlite.
319
-
320
- :param env: The environment dictionary
321
- :type env: dict
322
- :param dirs: The working directories
323
- :type dirs: ``relenv.build.common.Dirs``
324
- :param logfp: A handle for the log file
325
- :type logfp: file
326
- """
327
- # extra_cflags=('-Os '
328
- # '-DSQLITE_ENABLE_FTS5 '
329
- # '-DSQLITE_ENABLE_FTS4 '
330
- # '-DSQLITE_ENABLE_FTS3_PARENTHESIS '
331
- # '-DSQLITE_ENABLE_JSON1 '
332
- # '-DSQLITE_ENABLE_RTREE '
333
- # '-DSQLITE_TCL=0 '
334
- # )
335
- # configure_pre=[
336
- # '--enable-threadsafe',
337
- # '--enable-shared=no',
338
- # '--enable-static=yes',
339
- # '--disable-readline',
340
- # '--disable-dependency-tracking',
341
- # ]
342
- cmd = [
343
- "./configure",
344
- # "--with-shared",
345
- # "--without-static",
346
- "--enable-threadsafe",
347
- "--disable-readline",
348
- "--disable-dependency-tracking",
349
- "--prefix={}".format(dirs.prefix),
350
- # "--enable-add-ons=nptl,ports",
351
- ]
352
- if env["RELENV_HOST"].find("linux") > -1:
353
- cmd += [
354
- "--build={}".format(env["RELENV_BUILD_ARCH"]),
355
- "--host={}".format(env["RELENV_HOST"]),
356
- ]
357
- runcmd(cmd, env=env, stderr=logfp, stdout=logfp)
358
- runcmd(["make", "-j8"], env=env, stderr=logfp, stdout=logfp)
359
- runcmd(["make", "install"], env=env, stderr=logfp, stdout=logfp)
360
-
361
-
362
- def update_ensurepip(directory):
363
- """
364
- Update bundled dependencies for ensurepip (pip & setuptools).
365
- """
366
- # ensurepip bundle location
367
- bundle_dir = directory / "ensurepip" / "_bundled"
368
-
369
- # Make sure the destination directory exists
370
- bundle_dir.mkdir(parents=True, exist_ok=True)
371
-
372
- # Detect existing whl. Later versions of python don't include setuptools. We
373
- # only want to update whl files that python expects to be there
374
- pip_version = "25.2"
375
- setuptools_version = "80.9.0"
376
- update_pip = False
377
- update_setuptools = False
378
- for file in bundle_dir.glob("*.whl"):
379
-
380
- log.debug("Checking whl: %s", str(file))
381
- if file.name.startswith("pip-"):
382
- found_version = file.name.split("-")[1]
383
- log.debug("Found version %s", found_version)
384
- if Version(found_version) >= Version(pip_version):
385
- log.debug("Found correct pip version or newer: %s", found_version)
386
- else:
387
- file.unlink()
388
- update_pip = True
389
- if file.name.startswith("setuptools-"):
390
- found_version = file.name.split("-")[1]
391
- log.debug("Found version %s", found_version)
392
- if Version(found_version) >= Version(setuptools_version):
393
- log.debug(
394
- "Found correct setuptools version or newer: %s", found_version
395
- )
396
- else:
397
- file.unlink()
398
- update_setuptools = True
399
-
400
- # Download whl files and update __init__.py
401
- init_file = directory / "ensurepip" / "__init__.py"
402
- if update_pip:
403
- whl = f"pip-{pip_version}-py3-none-any.whl"
404
- whl_path = "b7/3f/945ef7ab14dc4f9d7f40288d2df998d1837ee0888ec3659c813487572faa"
405
- url = f"https://files.pythonhosted.org/packages/{whl_path}/{whl}"
406
- download_url(url=url, dest=bundle_dir)
407
- assert (bundle_dir / whl).exists()
408
-
409
- # Update __init__.py
410
- old = "^_PIP_VERSION.*"
411
- new = f'_PIP_VERSION = "{pip_version}"'
412
- patch_file(path=init_file, old=old, new=new)
413
-
414
- # setuptools
415
- if update_setuptools:
416
- whl = f"setuptools-{setuptools_version}-py3-none-any.whl"
417
- whl_path = "a3/dc/17031897dae0efacfea57dfd3a82fdd2a2aeb58e0ff71b77b87e44edc772"
418
- url = f"https://files.pythonhosted.org/packages/{whl_path}/{whl}"
419
- download_url(url=url, dest=bundle_dir)
420
- assert (bundle_dir / whl).exists()
421
-
422
- # setuptools
423
- old = "^_SETUPTOOLS_VERSION.*"
424
- new = f'_SETUPTOOLS_VERSION = "{setuptools_version}"'
425
- patch_file(path=init_file, old=old, new=new)
426
-
427
- log.debug("ensurepip __init__.py contents:")
428
- log.debug(init_file.read_text())
429
-
430
-
431
- def patch_file(path, old, new):
432
- """
433
- Search a file line by line for a string to replace.
434
-
435
- :param path: Location of the file to search
436
- :type path: str
437
- :param old: The value that will be replaced
438
- :type path: str
439
- :param new: The value that will replace the 'old' value.
440
- :type path: str
441
- """
442
- log.debug("Patching file: %s", path)
443
- import re
444
-
445
- with open(path, "r") as fp:
446
- content = fp.read()
447
- new_content = ""
448
- for line in content.splitlines():
449
- line = re.sub(old, new, line)
450
- new_content += line + "\n"
451
- with open(path, "w") as fp:
452
- fp.write(new_content)
453
-
454
-
455
- def tarball_version(href):
456
- if href.endswith("tar.gz"):
457
- try:
458
- x = href.split("-", 1)[1][:-7]
459
- if x != "latest":
460
- return x
461
- except IndexError:
462
- return None
463
-
464
-
465
- def sqlite_version(href):
466
- if "releaselog" in href:
467
- link = href.split("/")[1][:-5]
468
- return "{:d}{:02d}{:02d}00".format(*[int(_) for _ in link.split("_")])
469
-
470
-
471
- def github_version(href):
472
- if "tag/" in href:
473
- return href.split("/v")[-1]
474
-
475
-
476
- def krb_version(href):
477
- if re.match(r"\d\.\d\d/", href):
478
- return href[:-1]
479
-
480
-
481
- def python_version(href):
482
- if re.match(r"(\d+\.)+\d/", href):
483
- return href[:-1]
484
-
485
-
486
- def uuid_version(href):
487
- if "download" in href and "latest" not in href:
488
- return href[:-16].rsplit("/")[-1].replace("libuuid-", "")
489
-
490
-
491
- def parse_links(text):
492
- class HrefParser(HTMLParser):
493
- hrefs = []
494
-
495
- def handle_starttag(self, tag, attrs):
496
- if tag == "a":
497
- link = dict(attrs).get("href", "")
498
- if link:
499
- self.hrefs.append(link)
500
-
501
- parser = HrefParser()
502
- parser.feed(text)
503
- return parser.hrefs
504
-
505
-
506
- def check_files(name, location, func, current):
507
- fp = io.BytesIO()
508
- fetch_url(location, fp)
509
- fp.seek(0)
510
- text = fp.read().decode()
511
- loose = False
512
- try:
513
- current = parse(current)
514
- except InvalidVersion:
515
- current = LooseVersion(current)
516
- loose = True
517
-
518
- versions = []
519
- for _ in parse_links(text):
520
- version = func(_)
521
- if version:
522
- if loose:
523
- versions.append(LooseVersion(version))
524
- else:
525
- try:
526
- versions.append(parse(version))
527
- except InvalidVersion:
528
- pass
529
-
530
- versions.sort()
531
- compare_versions(name, current, versions)
532
-
533
-
534
- def compare_versions(name, current, versions):
535
- for version in versions:
536
- try:
537
- if version > current:
538
- print(f"Found new version of {name} {version} > {current}")
539
- except TypeError:
540
- print(f"Unable to compare versions {version}")
541
-
542
-
543
- class Download:
544
- """
545
- A utility that holds information about content to be downloaded.
546
-
547
- :param name: The name of the download
548
- :type name: str
549
- :param url: The url of the download
550
- :type url: str
551
- :param signature: The signature of the download, defaults to None
552
- :type signature: str
553
- :param destination: The path to download the file to
554
- :type destination: str
555
- :param version: The version of the content to download
556
- :type version: str
557
- :param sha1: The sha1 sum of the download
558
- :type sha1: str
559
-
560
- """
561
-
562
- def __init__(
563
- self,
564
- name,
565
- url,
566
- fallback_url=None,
567
- signature=None,
568
- destination="",
569
- version="",
570
- checksum=None,
571
- checkfunc=None,
572
- checkurl=None,
573
- ):
574
- self.name = name
575
- self.url_tpl = url
576
- self.fallback_url_tpl = fallback_url
577
- self.signature_tpl = signature
578
- self.destination = destination
579
- self.version = version
580
- self.checksum = checksum
581
- self.checkfunc = checkfunc
582
- self.checkurl = checkurl
583
-
584
- def copy(self):
585
- return Download(
586
- self.name,
587
- self.url_tpl,
588
- self.fallback_url_tpl,
589
- self.signature_tpl,
590
- self.destination,
591
- self.version,
592
- self.checksum,
593
- self.checkfunc,
594
- self.checkurl,
595
- )
596
-
597
- @property
598
- def url(self):
599
- return self.url_tpl.format(version=self.version)
600
-
601
- @property
602
- def fallback_url(self):
603
- if self.fallback_url_tpl:
604
- return self.fallback_url_tpl.format(version=self.version)
605
-
606
- @property
607
- def signature_url(self):
608
- return self.signature_tpl.format(version=self.version)
609
-
610
- @property
611
- def filepath(self):
612
- _, name = self.url.rsplit("/", 1)
613
- return pathlib.Path(self.destination) / name
614
-
615
- @property
616
- def formatted_url(self):
617
- return self.url.format(version=self.version)
618
-
619
- def fetch_file(self):
620
- """
621
- Download the file.
622
-
623
- :return: The path to the downloaded content, and whether it was downloaded.
624
- :rtype: tuple(str, bool)
625
- """
626
- try:
627
- return download_url(self.url, self.destination, CICD), True
628
- except Exception as exc:
629
- if self.fallback_url:
630
- print(f"Download failed {self.url} ({exc}); trying fallback url")
631
- return download_url(self.fallback_url, self.destination, CICD), True
632
- raise
633
-
634
- def fetch_signature(self, version):
635
- """
636
- Download the file signature.
637
-
638
- :return: The path to the downloaded signature.
639
- :rtype: str
640
- """
641
- return download_url(self.signature_url, self.destination, CICD)
642
-
643
- def exists(self):
644
- """
645
- True when the artifact already exists on disk.
646
-
647
- :return: True when the artifact already exists on disk
648
- :rtype: bool
649
- """
650
- return self.filepath.exists()
651
-
652
- def valid_hash(self):
653
- pass
654
-
655
- @staticmethod
656
- def validate_signature(archive, signature):
657
- """
658
- True when the archive's signature is valid.
659
-
660
- :param archive: The path to the archive to validate
661
- :type archive: str
662
- :param signature: The path to the signature to validate against
663
- :type signature: str
664
-
665
- :return: True if it validated properly, else False
666
- :rtype: bool
667
- """
668
- if signature is None:
669
- log.error("Can't check signature because none was given")
670
- return False
671
- try:
672
- runcmd(
673
- ["gpg", "--verify", signature, archive],
674
- stderr=subprocess.PIPE,
675
- stdout=subprocess.PIPE,
676
- )
677
- return True
678
- except RelenvException as exc:
679
- log.error("Signature validation failed on %s: %s", archive, exc)
680
- return False
681
-
682
- @staticmethod
683
- def validate_checksum(archive, checksum):
684
- """
685
- True when when the archive matches the sha1 hash.
686
-
687
- :param archive: The path to the archive to validate
688
- :type archive: str
689
- :param checksum: The sha1 sum to validate against
690
- :type checksum: str
691
- :return: True if the sums matched, else False
692
- :rtype: bool
693
- """
694
- try:
695
- verify_checksum(archive, checksum)
696
- return True
697
- except RelenvException as exc:
698
- log.error("sha1 validation failed on %s: %s", archive, exc)
699
- return False
700
-
701
- def __call__(self, force_download=False, show_ui=False, exit_on_failure=False):
702
- """
703
- Downloads the url and validates the signature and sha1 sum.
704
-
705
- :return: Whether or not validation succeeded
706
- :rtype: bool
707
- """
708
- os.makedirs(self.filepath.parent, exist_ok=True)
709
-
710
- downloaded = False
711
- if force_download:
712
- _, downloaded = self.fetch_file()
713
- else:
714
- file_is_valid = False
715
- dest = get_download_location(self.url, self.destination)
716
- if self.checksum and os.path.exists(dest):
717
- file_is_valid = self.validate_checksum(dest, self.checksum)
718
- if file_is_valid:
719
- log.debug("%s already downloaded, skipping.", self.url)
720
- else:
721
- _, downloaded = self.fetch_file()
722
- valid = True
723
- if downloaded:
724
- if self.signature_tpl is not None:
725
- sig, _ = self.fetch_signature()
726
- valid_sig = self.validate_signature(self.filepath, sig)
727
- valid = valid and valid_sig
728
- if self.checksum is not None:
729
- valid_checksum = self.validate_checksum(self.filepath, self.checksum)
730
- valid = valid and valid_checksum
731
-
732
- if not valid:
733
- log.warning("Checksum did not match %s: %s", self.name, self.checksum)
734
- if show_ui:
735
- sys.stderr.write(
736
- f"\nChecksum did not match {self.name}: {self.checksum}\n"
737
- )
738
- sys.stderr.flush()
739
- if exit_on_failure and not valid:
740
- sys.exit(1)
741
- return valid
742
-
743
- def check_version(self):
744
- if self.checkurl:
745
- url = self.checkurl
746
- else:
747
- url = self.url.rsplit("/", 1)[0]
748
- check_files(self.name, url, self.checkfunc, self.version)
749
-
750
-
751
- class Dirs:
752
- """
753
- A container for directories during build time.
754
-
755
- :param dirs: A collection of working directories
756
- :type dirs: ``relenv.common.WorkDirs``
757
- :param name: The name of this collection
758
- :type name: str
759
- :param arch: The architecture being worked with
760
- :type arch: str
761
- """
762
-
763
- def __init__(self, dirs, name, arch, version):
764
- # XXX name is the specific to a step where as everything
765
- # else here is generalized to the entire build
766
- self.name = name
767
- self.version = version
768
- self.arch = arch
769
- self.root = dirs.root
770
- self.build = dirs.build
771
- self.downloads = dirs.download
772
- self.logs = dirs.logs
773
- self.sources = dirs.src
774
- self.tmpbuild = tempfile.mkdtemp(prefix="{}_build".format(name))
775
-
776
- @property
777
- def toolchain(self):
778
- if sys.platform == "darwin":
779
- return get_toolchain(root=self.root)
780
- elif sys.platform == "win32":
781
- return get_toolchain(root=self.root)
782
- else:
783
- return get_toolchain(self.arch, self.root)
784
-
785
- @property
786
- def _triplet(self):
787
- if sys.platform == "darwin":
788
- return "{}-macos".format(self.arch)
789
- elif sys.platform == "win32":
790
- return "{}-win".format(self.arch)
791
- else:
792
- return "{}-linux-gnu".format(self.arch)
793
-
794
- @property
795
- def prefix(self):
796
- return self.build / f"{self.version}-{self._triplet}"
797
-
798
- def __getstate__(self):
799
- """
800
- Return an object used for pickling.
801
-
802
- :return: The picklable state
803
- """
804
- return {
805
- "name": self.name,
806
- "arch": self.arch,
807
- "root": self.root,
808
- "build": self.build,
809
- "downloads": self.downloads,
810
- "logs": self.logs,
811
- "sources": self.sources,
812
- "tmpbuild": self.tmpbuild,
813
- }
814
-
815
- def __setstate__(self, state):
816
- """
817
- Unwrap the object returned from unpickling.
818
-
819
- :param state: The state to unpickle
820
- :type state: dict
821
- """
822
- self.name = state["name"]
823
- self.arch = state["arch"]
824
- self.root = state["root"]
825
- self.downloads = state["downloads"]
826
- self.logs = state["logs"]
827
- self.sources = state["sources"]
828
- self.build = state["build"]
829
- self.tmpbuild = state["tmpbuild"]
830
-
831
- def to_dict(self):
832
- """
833
- Get a dictionary representation of the directories in this collection.
834
-
835
- :return: A dictionary of all the directories
836
- :rtype: dict
837
- """
838
- return {
839
- x: getattr(self, x)
840
- for x in [
841
- "root",
842
- "prefix",
843
- "downloads",
844
- "logs",
845
- "sources",
846
- "build",
847
- "toolchain",
848
- ]
849
- }
850
-
851
-
852
- class Builds:
853
- """
854
- Collection of builds.
855
- """
856
-
857
- def __init__(self):
858
- self.builds = {}
859
-
860
- def add(self, platform, *args, **kwargs):
861
- if "builder" in kwargs:
862
- build = kwargs.pop("builder")
863
- if args or kwargs:
864
- raise RuntimeError(
865
- "builder keyword can not be used with other kwargs or args"
866
- )
867
- else:
868
- build = Builder(*args, **kwargs)
869
- if platform not in self.builds:
870
- self.builds[platform] = build
871
- else:
872
- self.builds[platform] = build
873
- return build
874
-
875
-
876
- builds = Builds()
877
-
878
-
879
- class Builder:
880
- """
881
- Utility that handles the build process.
882
-
883
- :param root: The root of the working directories for this build
884
- :type root: str
885
- :param recipies: The instructions for the build steps
886
- :type recipes: list
887
- :param build_default: The default build function, defaults to ``build_default``
888
- :type build_default: types.FunctionType
889
- :param populate_env: The default function to populate the build environment, defaults to ``populate_env``
890
- :type populate_env: types.FunctionType
891
- :param force_download: If True, forces downloading the archives even if they exist, defaults to False
892
- :type force_download: bool
893
- :param arch: The architecture being built
894
- :type arch: str
895
- """
896
-
897
- def __init__(
898
- self,
899
- root=None,
900
- recipies=None,
901
- build_default=build_default,
902
- populate_env=populate_env,
903
- arch="x86_64",
904
- version="",
905
- ):
906
- self.root = root
907
- self.dirs = work_dirs(root)
908
- self.build_arch = build_arch()
909
- self.build_triplet = get_triplet(self.build_arch)
910
- self.arch = arch
911
- self.sources = self.dirs.src
912
- self.downloads = self.dirs.download
913
-
914
- if recipies is None:
915
- self.recipies = {}
916
- else:
917
- self.recipies = recipies
918
-
919
- self.build_default = build_default
920
- self.populate_env = populate_env
921
- self.version = version
922
- self.toolchains = get_toolchain(root=self.dirs.root)
923
- self.set_arch(self.arch)
924
-
925
- def copy(self, version, checksum):
926
- recipies = {}
927
- for name in self.recipies:
928
- _ = self.recipies[name]
929
- recipies[name] = {
930
- "build_func": _["build_func"],
931
- "wait_on": _["wait_on"],
932
- "download": _["download"].copy() if _["download"] else None,
933
- }
934
- build = Builder(
935
- self.root,
936
- recipies,
937
- self.build_default,
938
- self.populate_env,
939
- self.arch,
940
- version,
941
- )
942
- build.recipies["python"]["download"].version = version
943
- build.recipies["python"]["download"].checksum = checksum
944
- return build
945
-
946
- def set_arch(self, arch):
947
- """
948
- Set the architecture for the build.
949
-
950
- :param arch: The arch to build
951
- :type arch: str
952
- """
953
- self.arch = arch
954
- if sys.platform in ["darwin", "win32"]:
955
- self.toolchain = None
956
- else:
957
- self.toolchain = get_toolchain(self.arch, self.dirs.root)
958
-
959
- @property
960
- def triplet(self):
961
- return get_triplet(self.arch)
962
-
963
- @property
964
- def prefix(self):
965
- return self.dirs.build / f"{self.version}-{self.triplet}"
966
-
967
- @property
968
- def _triplet(self):
969
- if sys.platform == "darwin":
970
- return "{}-macos".format(self.arch)
971
- elif sys.platform == "win32":
972
- return "{}-win".format(self.arch)
973
- else:
974
- return "{}-linux-gnu".format(self.arch)
975
-
976
- def add(self, name, build_func=None, wait_on=None, download=None):
977
- """
978
- Add a step to the build process.
979
-
980
- :param name: The name of the step
981
- :type name: str
982
- :param build_func: The function that builds this step, defaults to None
983
- :type build_func: types.FunctionType, optional
984
- :param wait_on: Processes to wait on before running this step, defaults to None
985
- :type wait_on: list, optional
986
- :param download: A dictionary of download information, defaults to None
987
- :type download: dict, optional
988
- """
989
- if wait_on is None:
990
- wait_on = []
991
- if build_func is None:
992
- build_func = self.build_default
993
- if download is not None:
994
- download = Download(name, destination=self.downloads, **download)
995
- self.recipies[name] = {
996
- "build_func": build_func,
997
- "wait_on": wait_on,
998
- "download": download,
999
- }
1000
-
1001
- def run(
1002
- self, name, event, build_func, download, show_ui=False, log_level="WARNING"
1003
- ):
1004
- """
1005
- Run a build step.
1006
-
1007
- :param name: The name of the step to run
1008
- :type name: str
1009
- :param event: An event to track this process' status and alert waiting steps
1010
- :type event: ``multiprocessing.Event``
1011
- :param build_func: The function to use to build this step
1012
- :type build_func: types.FunctionType
1013
- :param download: The ``Download`` instance for this step
1014
- :type download: ``Download``
1015
-
1016
- :return: The output of the build function
1017
- """
1018
- root_log = logging.getLogger(None)
1019
- if sys.platform == "win32":
1020
- if not show_ui:
1021
- handler = logging.StreamHandler()
1022
- handler.setLevel(logging.getLevelName(log_level))
1023
- root_log.addHandler(handler)
1024
-
1025
- for handler in root_log.handlers:
1026
- if isinstance(handler, logging.StreamHandler):
1027
- handler.setFormatter(
1028
- logging.Formatter(f"%(asctime)s {name} %(message)s")
1029
- )
1030
-
1031
- if not self.dirs.build.exists():
1032
- os.makedirs(self.dirs.build, exist_ok=True)
1033
-
1034
- dirs = Dirs(self.dirs, name, self.arch, self.version)
1035
- os.makedirs(dirs.sources, exist_ok=True)
1036
- os.makedirs(dirs.logs, exist_ok=True)
1037
- os.makedirs(dirs.prefix, exist_ok=True)
1038
-
1039
- while event.is_set() is False:
1040
- time.sleep(0.3)
1041
-
1042
- logfp = io.open(os.path.join(dirs.logs, "{}.log".format(name)), "w")
1043
- handler = logging.FileHandler(dirs.logs / f"{name}.log")
1044
- root_log.addHandler(handler)
1045
- root_log.setLevel(logging.NOTSET)
1046
-
1047
- # DEBUG: Uncomment to debug
1048
- # logfp = sys.stdout
1049
-
1050
- cwd = os.getcwd()
1051
- if download:
1052
- extract_archive(dirs.sources, str(download.filepath))
1053
- dirs.source = dirs.sources / download.filepath.name.split(".tar")[0]
1054
- os.chdir(dirs.source)
1055
- else:
1056
- os.chdir(dirs.prefix)
1057
-
1058
- if sys.platform == "win32":
1059
- env = os.environ.copy()
1060
- else:
1061
- env = {
1062
- "PATH": os.environ["PATH"],
1063
- }
1064
- env["RELENV_DEBUG"] = "1"
1065
- env["RELENV_BUILDENV"] = "1"
1066
- env["RELENV_HOST"] = self.triplet
1067
- env["RELENV_HOST_ARCH"] = self.arch
1068
- env["RELENV_BUILD"] = self.build_triplet
1069
- env["RELENV_BUILD_ARCH"] = self.build_arch
1070
- env["RELENV_PY_VERSION"] = self.recipies["python"]["download"].version
1071
- env["RELENV_PY_MAJOR_VERSION"] = env["RELENV_PY_VERSION"].rsplit(".", 1)[0]
1072
- if "RELENV_DATA" in os.environ:
1073
- env["RELENV_DATA"] = os.environ["RELENV_DATA"]
1074
- if self.build_arch != self.arch:
1075
- native_root = DATA_DIR / "native"
1076
- env["RELENV_NATIVE_PY"] = str(native_root / "bin" / "python3")
1077
-
1078
- self.populate_env(env, dirs)
1079
-
1080
- _ = dirs.to_dict()
1081
- for k in _:
1082
- log.info("Directory %s %s", k, _[k])
1083
- for k in env:
1084
- log.info("Environment %s %s", k, env[k])
1085
- try:
1086
- return build_func(env, dirs, logfp)
1087
- except Exception:
1088
- log.exception("Build failure")
1089
- sys.exit(1)
1090
- finally:
1091
- os.chdir(cwd)
1092
- log.removeHandler(handler)
1093
- logfp.close()
1094
-
1095
- def cleanup(self):
1096
- """
1097
- Clean up the build directories.
1098
- """
1099
- shutil.rmtree(self.prefix)
1100
-
1101
- def clean(self):
1102
- """
1103
- Completely clean up the remnants of a relenv build.
1104
- """
1105
- # Clean directories
1106
- for _ in [self.prefix, self.sources]:
1107
- try:
1108
- shutil.rmtree(_)
1109
- except PermissionError:
1110
- sys.stderr.write(f"Unable to remove directory: {_}")
1111
- except FileNotFoundError:
1112
- pass
1113
- # Clean files
1114
- archive = f"{self.prefix}.tar.xz"
1115
- for _ in [archive]:
1116
- try:
1117
- os.remove(_)
1118
- except FileNotFoundError:
1119
- pass
1120
-
1121
- def download_files(self, steps=None, force_download=False, show_ui=False):
1122
- """
1123
- Download all of the needed archives.
1124
-
1125
- :param steps: The steps to download archives for, defaults to None
1126
- :type steps: list, optional
1127
- """
1128
- if steps is None:
1129
- steps = list(self.recipies)
1130
-
1131
- fails = []
1132
- processes = {}
1133
- events = {}
1134
- if show_ui:
1135
- sys.stdout.write("Starting downloads \n")
1136
- log.info("Starting downloads")
1137
- if show_ui:
1138
- print_ui(events, processes, fails)
1139
- for name in steps:
1140
- download = self.recipies[name]["download"]
1141
- if download is None:
1142
- continue
1143
- event = multiprocessing.Event()
1144
- event.set()
1145
- events[name] = event
1146
- proc = multiprocessing.Process(
1147
- name=name,
1148
- target=download,
1149
- kwargs={
1150
- "force_download": force_download,
1151
- "show_ui": show_ui,
1152
- "exit_on_failure": True,
1153
- },
1154
- )
1155
- proc.start()
1156
- processes[name] = proc
1157
-
1158
- while processes:
1159
- for proc in list(processes.values()):
1160
- proc.join(0.3)
1161
- # DEBUG: Comment to debug
1162
- if show_ui:
1163
- print_ui(events, processes, fails)
1164
- if proc.exitcode is None:
1165
- continue
1166
- processes.pop(proc.name)
1167
- if proc.exitcode != 0:
1168
- fails.append(proc.name)
1169
- if show_ui:
1170
- print_ui(events, processes, fails)
1171
- sys.stdout.write("\n")
1172
- if fails and False:
1173
- if show_ui:
1174
- print_ui(events, processes, fails)
1175
- sys.stderr.write("The following failures were reported\n")
1176
- for fail in fails:
1177
- sys.stderr.write(fail + "\n")
1178
- sys.stderr.flush()
1179
- sys.exit(1)
1180
-
1181
- def build(self, steps=None, cleanup=True, show_ui=False, log_level="WARNING"):
1182
- """
1183
- Build!
1184
-
1185
- :param steps: The steps to run, defaults to None
1186
- :type steps: list, optional
1187
- :param cleanup: Whether to clean up or not, defaults to True
1188
- :type cleanup: bool, optional
1189
- """ # noqa: D400
1190
- fails = []
1191
- events = {}
1192
- waits = {}
1193
- processes = {}
1194
-
1195
- if show_ui:
1196
- sys.stdout.write("Starting builds\n")
1197
- # DEBUG: Comment to debug
1198
- print_ui(events, processes, fails)
1199
- log.info("Starting builds")
1200
-
1201
- for name in steps:
1202
- event = multiprocessing.Event()
1203
- events[name] = event
1204
- kwargs = dict(self.recipies[name])
1205
- kwargs["show_ui"] = show_ui
1206
- kwargs["log_level"] = log_level
1207
-
1208
- # Determine needed dependency recipies.
1209
- wait_on = kwargs.pop("wait_on", [])
1210
- for _ in wait_on[:]:
1211
- if _ not in steps:
1212
- wait_on.remove(_)
1213
-
1214
- waits[name] = wait_on
1215
- if not waits[name]:
1216
- event.set()
1217
-
1218
- proc = multiprocessing.Process(
1219
- name=name, target=self.run, args=(name, event), kwargs=kwargs
1220
- )
1221
- proc.start()
1222
- processes[name] = proc
1223
-
1224
- # Wait for the processes to finish and check if we should send any
1225
- # dependency events.
1226
- while processes:
1227
- for proc in list(processes.values()):
1228
- proc.join(0.3)
1229
- if show_ui:
1230
- # DEBUG: Comment to debug
1231
- print_ui(events, processes, fails)
1232
- if proc.exitcode is None:
1233
- continue
1234
- processes.pop(proc.name)
1235
- if proc.exitcode != 0:
1236
- fails.append(proc.name)
1237
- is_failure = True
1238
- else:
1239
- is_failure = False
1240
- for name in waits:
1241
- if proc.name in waits[name]:
1242
- if is_failure:
1243
- if name in processes:
1244
- processes[name].terminate()
1245
- time.sleep(0.1)
1246
- waits[name].remove(proc.name)
1247
- if not waits[name] and not events[name].is_set():
1248
- events[name].set()
1249
-
1250
- if fails:
1251
- sys.stderr.write("The following failures were reported\n")
1252
- last_outs = {}
1253
- for fail in fails:
1254
- log_file = self.dirs.logs / f"{fail}.log"
1255
- try:
1256
- with io.open(log_file) as fp:
1257
- fp.seek(0, 2)
1258
- end = fp.tell()
1259
- ind = end - 4096
1260
- if ind > 0:
1261
- fp.seek(ind)
1262
- else:
1263
- fp.seek(0)
1264
- last_out = fp.read()
1265
- if show_ui:
1266
- sys.stderr.write("=" * 20 + f" {fail} " + "=" * 20 + "\n")
1267
- sys.stderr.write(fp.read() + "\n\n")
1268
- except FileNotFoundError:
1269
- last_outs[fail] = f"Log file not found: {log_file}"
1270
- log.error("Build step %s has failed", fail)
1271
- log.error(last_out)
1272
- if show_ui:
1273
- sys.stderr.flush()
1274
- if cleanup:
1275
- log.debug("Performing cleanup.")
1276
- self.cleanup()
1277
- sys.exit(1)
1278
- if show_ui:
1279
- time.sleep(0.3)
1280
- print_ui(events, processes, fails)
1281
- sys.stdout.write("\n")
1282
- sys.stdout.flush()
1283
- if cleanup:
1284
- log.debug("Performing cleanup.")
1285
- self.cleanup()
1286
-
1287
- def check_prereqs(self):
1288
- """
1289
- Check pre-requsists for build.
1290
-
1291
- This method verifies all requrements for a successful build are satisfied.
1292
-
1293
- :return: Returns a list of string describing failed checks
1294
- :rtype: list
1295
- """
1296
- fail = []
1297
- if sys.platform == "linux":
1298
- if not self.toolchain or not self.toolchain.exists():
1299
- fail.append(
1300
- f"Toolchain for {self.arch} does not exist. Please pip install ppbt."
1301
- )
1302
- return fail
1303
-
1304
- def __call__(
1305
- self,
1306
- steps=None,
1307
- arch=None,
1308
- clean=True,
1309
- cleanup=True,
1310
- force_download=False,
1311
- download_only=False,
1312
- show_ui=False,
1313
- log_level="WARNING",
1314
- ):
1315
- """
1316
- Set the architecture, define the steps, clean if needed, download what is needed, and build.
1317
-
1318
- :param steps: The steps to run, defaults to None
1319
- :type steps: list, optional
1320
- :param arch: The architecture to build, defaults to None
1321
- :type arch: str, optional
1322
- :param clean: If true, cleans the directories first, defaults to True
1323
- :type clean: bool, optional
1324
- :param cleanup: Cleans up after build if true, defaults to True
1325
- :type cleanup: bool, optional
1326
- :param force_download: Whether or not to download the content if it already exists, defaults to True
1327
- :type force_download: bool, optional
1328
- """
1329
- log = logging.getLogger(None)
1330
- log.setLevel(logging.NOTSET)
1331
-
1332
- if not show_ui:
1333
- handler = logging.StreamHandler()
1334
- handler.setLevel(logging.getLevelName(log_level))
1335
- log.addHandler(handler)
1336
-
1337
- os.makedirs(self.dirs.logs, exist_ok=True)
1338
- handler = logging.FileHandler(self.dirs.logs / "build.log")
1339
- handler.setLevel(logging.INFO)
1340
- log.addHandler(handler)
1341
-
1342
- if arch:
1343
- self.set_arch(arch)
1344
-
1345
- if steps is None:
1346
- steps = self.recipies
1347
-
1348
- failures = self.check_prereqs()
1349
- if not download_only and failures:
1350
- for _ in failures:
1351
- sys.stderr.write(f"{_}\n")
1352
- sys.stderr.flush()
1353
- sys.exit(1)
1354
-
1355
- if clean:
1356
- self.clean()
1357
-
1358
- if self.build_arch != self.arch:
1359
- native_root = DATA_DIR / "native"
1360
- if not native_root.exists():
1361
- if "RELENV_NATIVE_PY_VERSION" in os.environ:
1362
- version = os.environ["RELENV_NATIVE_PY_VERSION"]
1363
- else:
1364
- version = self.version
1365
- from relenv.create import create
1366
-
1367
- create("native", DATA_DIR, version=version)
1368
-
1369
- # Start a process for each build passing it an event used to notify each
1370
- # process if it's dependencies have finished.
1371
- self.download_files(steps, force_download=force_download, show_ui=show_ui)
1372
- if download_only:
1373
- return
1374
- self.build(steps, cleanup, show_ui=show_ui, log_level=log_level)
1375
-
1376
- def check_versions(self):
1377
- success = True
1378
- for step in list(self.recipies):
1379
- download = self.recipies[step]["download"]
1380
- if not download:
1381
- continue
1382
- if not download.check_version():
1383
- success = False
1384
- return success
1385
-
1386
-
1387
- def patch_shebang(path, old, new):
1388
- """
1389
- Replace a file's shebang.
1390
-
1391
- :param path: The path of the file to patch
1392
- :type path: str
1393
- :param old: The old shebang, will only patch when this is found
1394
- :type old: str
1395
- :param name: The new shebang to be written
1396
- :type name: str
1397
- """
1398
- with open(path, "rb") as fp:
1399
- try:
1400
- data = fp.read(len(old.encode())).decode()
1401
- except UnicodeError:
1402
- return False
1403
- except Exception as exc:
1404
- log.warning("Unhandled exception: %r", exc)
1405
- return False
1406
- if data != old:
1407
- log.warning("Shebang doesn't match: %s %r != %r", path, old, data)
1408
- return False
1409
- data = fp.read().decode()
1410
- with open(path, "w") as fp:
1411
- fp.write(new)
1412
- fp.write(data)
1413
- with open(path, "r") as fp:
1414
- data = fp.read()
1415
- log.info("Patched shebang of %s => %r", path, data)
1416
- return True
1417
-
1418
-
1419
- def patch_shebangs(path, old, new):
1420
- """
1421
- Traverse directory and patch shebangs.
1422
-
1423
- :param path: The of the directory to traverse
1424
- :type path: str
1425
- :param old: The old shebang, will only patch when this is found
1426
- :type old: str
1427
- :param name: The new shebang to be written
1428
- :type name: str
1429
- """
1430
- for root, _dirs, files in os.walk(str(path)):
1431
- for file in files:
1432
- patch_shebang(os.path.join(root, file), old, new)
1433
-
1434
-
1435
- def install_sysdata(mod, destfile, buildroot, toolchain):
1436
- """
1437
- Create a Relenv Python environment's sysconfigdata.
1438
-
1439
- Helper method used by the `finalize` build method to create a Relenv
1440
- Python environment's sysconfigdata.
1441
-
1442
- :param mod: The module to operate on
1443
- :type mod: ``types.ModuleType``
1444
- :param destfile: Path to the file to write the data to
1445
- :type destfile: str
1446
- :param buildroot: Path to the root of the build
1447
- :type buildroot: str
1448
- :param toolchain: Path to the root of the toolchain
1449
- :type toolchain: str
1450
- """
1451
- data = {}
1452
- fbuildroot = lambda _: _.replace(str(buildroot), "{BUILDROOT}") # noqa: E731
1453
- ftoolchain = lambda _: _.replace(str(toolchain), "{TOOLCHAIN}") # noqa: E731
1454
- # XXX: keymap is not used, remove it?
1455
- # keymap = {
1456
- # "BINDIR": (fbuildroot,),
1457
- # "BINLIBDEST": (fbuildroot,),
1458
- # "CFLAGS": (fbuildroot, ftoolchain),
1459
- # "CPPLAGS": (fbuildroot, ftoolchain),
1460
- # "CXXFLAGS": (fbuildroot, ftoolchain),
1461
- # "datarootdir": (fbuildroot,),
1462
- # "exec_prefix": (fbuildroot,),
1463
- # "LDFLAGS": (fbuildroot, ftoolchain),
1464
- # "LDSHARED": (fbuildroot, ftoolchain),
1465
- # "LIBDEST": (fbuildroot,),
1466
- # "prefix": (fbuildroot,),
1467
- # "SCRIPTDIR": (fbuildroot,),
1468
- # }
1469
- for key in sorted(mod.build_time_vars):
1470
- val = mod.build_time_vars[key]
1471
- if isinstance(val, str):
1472
- for _ in (fbuildroot, ftoolchain):
1473
- val = _(val)
1474
- log.info("SYSCONFIG [%s] %s => %s", key, mod.build_time_vars[key], val)
1475
- data[key] = val
1476
-
1477
- with open(destfile, "w", encoding="utf8") as f:
1478
- f.write(
1479
- "# system configuration generated and used by" " the relenv at runtime\n"
1480
- )
1481
- f.write("_build_time_vars = ")
1482
- pprint.pprint(data, stream=f)
1483
- f.write(SYSCONFIGDATA)
1484
-
1485
-
1486
- def find_sysconfigdata(pymodules):
1487
- """
1488
- Find sysconfigdata directory for python installation.
1489
-
1490
- :param pymodules: Path to python modules (e.g. lib/python3.10)
1491
- :type pymodules: str
1492
-
1493
- :return: The name of the sysconig data module
1494
- :rtype: str
1495
- """
1496
- for root, dirs, files in os.walk(pymodules):
1497
- for file in files:
1498
- if file.find("sysconfigdata") > -1 and file.endswith(".py"):
1499
- return file[:-3]
1500
-
1501
-
1502
- def install_runtime(sitepackages):
1503
- """
1504
- Install a base relenv runtime.
1505
- """
1506
- relenv_pth = sitepackages / "relenv.pth"
1507
- with io.open(str(relenv_pth), "w") as fp:
1508
- fp.write(RELENV_PTH)
1509
-
1510
- # Lay down relenv.runtime, we'll pip install the rest later
1511
- relenv = sitepackages / "relenv"
1512
- os.makedirs(relenv, exist_ok=True)
1513
-
1514
- for name in [
1515
- "runtime.py",
1516
- "relocate.py",
1517
- "common.py",
1518
- "buildenv.py",
1519
- "__init__.py",
1520
- ]:
1521
- src = MODULE_DIR / name
1522
- dest = relenv / name
1523
- with io.open(src, "r") as rfp:
1524
- with io.open(dest, "w") as wfp:
1525
- wfp.write(rfp.read())
1526
-
1527
-
1528
- def finalize(env, dirs, logfp):
1529
- """
1530
- Run after we've fully built python.
1531
-
1532
- This method enhances the newly created python with Relenv's runtime hacks.
1533
-
1534
- :param env: The environment dictionary
1535
- :type env: dict
1536
- :param dirs: The working directories
1537
- :type dirs: ``relenv.build.common.Dirs``
1538
- :param logfp: A handle for the log file
1539
- :type logfp: file
1540
- """
1541
- # Run relok8 to make sure the rpaths are relocatable.
1542
- relenv.relocate.main(dirs.prefix, log_file_name=str(dirs.logs / "relocate.py.log"))
1543
- # Install relenv-sysconfigdata module
1544
- libdir = pathlib.Path(dirs.prefix) / "lib"
1545
-
1546
- def find_pythonlib(libdir):
1547
- for root, dirs, files in os.walk(libdir):
1548
- for _ in dirs:
1549
- if _.startswith("python"):
1550
- return _
1551
-
1552
- pymodules = libdir / find_pythonlib(libdir)
1553
-
1554
- # update ensurepip
1555
- update_ensurepip(pymodules)
1556
-
1557
- cwd = os.getcwd()
1558
- modname = find_sysconfigdata(pymodules)
1559
- path = sys.path
1560
- sys.path = [str(pymodules)]
1561
- try:
1562
- mod = __import__(str(modname))
1563
- finally:
1564
- os.chdir(cwd)
1565
- sys.path = path
1566
-
1567
- dest = pymodules / f"{modname}.py"
1568
- install_sysdata(mod, dest, dirs.prefix, dirs.toolchain)
1569
-
1570
- # Lay down site customize
1571
- bindir = pathlib.Path(dirs.prefix) / "bin"
1572
- sitepackages = pymodules / "site-packages"
1573
- install_runtime(sitepackages)
1574
-
1575
- # Install pip
1576
- python = dirs.prefix / "bin" / "python3"
1577
- if env["RELENV_HOST_ARCH"] != env["RELENV_BUILD_ARCH"]:
1578
- env["RELENV_CROSS"] = dirs.prefix
1579
- python = env["RELENV_NATIVE_PY"]
1580
- logfp.write("\nRUN ENSURE PIP\n")
1581
-
1582
- env.pop("RELENV_BUILDENV")
1583
-
1584
- runcmd(
1585
- [str(python), "-m", "ensurepip"],
1586
- env=env,
1587
- stderr=logfp,
1588
- stdout=logfp,
1589
- )
1590
-
1591
- # Fix the shebangs in the scripts python layed down. Order matters.
1592
- shebangs = [
1593
- "#!{}".format(bindir / f"python{env['RELENV_PY_MAJOR_VERSION']}"),
1594
- "#!{}".format(
1595
- bindir / f"python{env['RELENV_PY_MAJOR_VERSION'].split('.', 1)[0]}"
1596
- ),
1597
- ]
1598
- newshebang = format_shebang("/python3")
1599
- for shebang in shebangs:
1600
- log.info("Patch shebang %r with %r", shebang, newshebang)
1601
- patch_shebangs(
1602
- str(pathlib.Path(dirs.prefix) / "bin"),
1603
- shebang,
1604
- newshebang,
1605
- )
1606
-
1607
- if sys.platform == "linux":
1608
- pyconf = f"config-{env['RELENV_PY_MAJOR_VERSION']}-{env['RELENV_HOST']}"
1609
- patch_shebang(
1610
- str(pymodules / pyconf / "python-config.py"),
1611
- "#!{}".format(str(bindir / f"python{env['RELENV_PY_MAJOR_VERSION']}")),
1612
- format_shebang("../../../bin/python3"),
1613
- )
1614
-
1615
- shutil.copy(
1616
- pathlib.Path(dirs.toolchain)
1617
- / env["RELENV_HOST"]
1618
- / "sysroot"
1619
- / "lib"
1620
- / "libstdc++.so.6",
1621
- libdir,
1622
- )
1623
-
1624
- # Moved in python 3.13 or removed?
1625
- if (pymodules / "cgi.py").exists():
1626
- patch_shebang(
1627
- str(pymodules / "cgi.py"),
1628
- "#! /usr/local/bin/python",
1629
- format_shebang("../../bin/python3"),
1630
- )
1631
-
1632
- def runpip(pkg, upgrade=False):
1633
- logfp.write(f"\nRUN PIP {pkg} {upgrade}\n")
1634
- target = None
1635
- python = dirs.prefix / "bin" / "python3"
1636
- if sys.platform == LINUX:
1637
- if env["RELENV_HOST_ARCH"] != env["RELENV_BUILD_ARCH"]:
1638
- target = pymodules / "site-packages"
1639
- python = env["RELENV_NATIVE_PY"]
1640
- cmd = [
1641
- str(python),
1642
- "-m",
1643
- "pip",
1644
- "install",
1645
- str(pkg),
1646
- ]
1647
- if upgrade:
1648
- cmd.append("--upgrade")
1649
- if target:
1650
- cmd.append("--target={}".format(target))
1651
- runcmd(cmd, env=env, stderr=logfp, stdout=logfp)
1652
-
1653
- runpip("wheel")
1654
- # This needs to handle running from the root of the git repo and also from
1655
- # an installed Relenv
1656
- if (MODULE_DIR.parent / ".git").exists():
1657
- runpip(MODULE_DIR.parent, upgrade=True)
1658
- else:
1659
- runpip("relenv", upgrade=True)
1660
- globs = [
1661
- "/bin/python*",
1662
- "/bin/pip*",
1663
- "/bin/relenv",
1664
- "/lib/python*/ensurepip/*",
1665
- "/lib/python*/site-packages/*",
1666
- "/include/*",
1667
- "*.so",
1668
- "/lib/*.so.*",
1669
- "*.py",
1670
- # Mac specific, factor this out
1671
- "*.dylib",
1672
- ]
1673
- archive = f"{ dirs.prefix }.tar.xz"
1674
- log.info("Archive is %s", archive)
1675
- with tarfile.open(archive, mode="w:xz") as fp:
1676
- create_archive(fp, dirs.prefix, globs, logfp)
1677
-
1678
-
1679
- def create_archive(tarfp, toarchive, globs, logfp=None):
1680
- """
1681
- Create an archive.
1682
-
1683
- :param tarfp: A pointer to the archive to be created
1684
- :type tarfp: file
1685
- :param toarchive: The path to the directory to archive
1686
- :type toarchive: str
1687
- :param globs: A list of filtering patterns to match against files to be added
1688
- :type globs: list
1689
- :param logfp: A pointer to the log file
1690
- :type logfp: file
1691
- """
1692
- log.debug("Current directory %s", os.getcwd())
1693
- log.debug("Creating archive %s", tarfp.name)
1694
- for root, _dirs, files in os.walk(toarchive):
1695
- relroot = pathlib.Path(root).relative_to(toarchive)
1696
- for f in files:
1697
- relpath = relroot / f
1698
- matches = False
1699
- for g in globs:
1700
- if glob.fnmatch.fnmatch("/" / relpath, g):
1701
- matches = True
1702
- break
1703
- if matches:
1704
- log.debug("Adding %s", relpath)
1705
- tarfp.add(relpath, relpath, recursive=False)
1706
- else:
1707
- log.debug("Skipping %s", relpath)