relenv 0.21.1__py3-none-any.whl → 0.22.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. relenv/__init__.py +14 -2
  2. relenv/__main__.py +12 -6
  3. relenv/_resources/xz/config.h +148 -0
  4. relenv/_resources/xz/readme.md +4 -0
  5. relenv/build/__init__.py +28 -30
  6. relenv/build/common/__init__.py +50 -0
  7. relenv/build/common/_sysconfigdata_template.py +72 -0
  8. relenv/build/common/builder.py +907 -0
  9. relenv/build/common/builders.py +163 -0
  10. relenv/build/common/download.py +324 -0
  11. relenv/build/common/install.py +609 -0
  12. relenv/build/common/ui.py +432 -0
  13. relenv/build/darwin.py +128 -14
  14. relenv/build/linux.py +296 -78
  15. relenv/build/windows.py +259 -44
  16. relenv/buildenv.py +48 -17
  17. relenv/check.py +10 -5
  18. relenv/common.py +499 -163
  19. relenv/create.py +147 -7
  20. relenv/fetch.py +16 -4
  21. relenv/manifest.py +15 -7
  22. relenv/python-versions.json +329 -0
  23. relenv/pyversions.py +817 -30
  24. relenv/relocate.py +101 -55
  25. relenv/runtime.py +452 -253
  26. relenv/toolchain.py +9 -3
  27. {relenv-0.21.1.dist-info → relenv-0.22.0.dist-info}/METADATA +1 -1
  28. relenv-0.22.0.dist-info/RECORD +48 -0
  29. tests/__init__.py +2 -0
  30. tests/_pytest_typing.py +45 -0
  31. tests/conftest.py +42 -36
  32. tests/test_build.py +426 -9
  33. tests/test_common.py +311 -48
  34. tests/test_create.py +149 -6
  35. tests/test_downloads.py +19 -15
  36. tests/test_fips_photon.py +6 -3
  37. tests/test_module_imports.py +44 -0
  38. tests/test_pyversions_runtime.py +177 -0
  39. tests/test_relocate.py +45 -39
  40. tests/test_relocate_module.py +257 -0
  41. tests/test_runtime.py +1802 -6
  42. tests/test_verify_build.py +500 -34
  43. relenv/build/common.py +0 -1609
  44. relenv-0.21.1.dist-info/RECORD +0 -35
  45. {relenv-0.21.1.dist-info → relenv-0.22.0.dist-info}/WHEEL +0 -0
  46. {relenv-0.21.1.dist-info → relenv-0.22.0.dist-info}/entry_points.txt +0 -0
  47. {relenv-0.21.1.dist-info → relenv-0.22.0.dist-info}/licenses/LICENSE.md +0 -0
  48. {relenv-0.21.1.dist-info → relenv-0.22.0.dist-info}/licenses/NOTICE +0 -0
  49. {relenv-0.21.1.dist-info → relenv-0.22.0.dist-info}/top_level.txt +0 -0
relenv/build/common.py DELETED
@@ -1,1609 +0,0 @@
1
- # Copyright 2022-2025 Broadcom.
2
- # SPDX-License-Identifier: Apache-2
3
- """
4
- Build process common methods.
5
- """
6
- import logging
7
- import os.path
8
- import hashlib
9
- import pathlib
10
- import glob
11
- import shutil
12
- import tarfile
13
- import tempfile
14
- import time
15
- import subprocess
16
- import random
17
- import sys
18
- import io
19
- import os
20
- import multiprocessing
21
- import pprint
22
- import re
23
- from html.parser import HTMLParser
24
-
25
-
26
- from relenv.common import (
27
- DATA_DIR,
28
- LINUX,
29
- MODULE_DIR,
30
- RelenvException,
31
- build_arch,
32
- download_url,
33
- extract_archive,
34
- format_shebang,
35
- get_download_location,
36
- get_toolchain,
37
- get_triplet,
38
- runcmd,
39
- work_dirs,
40
- fetch_url,
41
- )
42
- import relenv.relocate
43
-
44
-
45
- CHECK_VERSIONS_SUPPORT = True
46
- try:
47
- from packaging.version import InvalidVersion, parse
48
- from looseversion import LooseVersion
49
- except ImportError:
50
- CHECK_VERSIONS_SUPPORT = False
51
-
52
- log = logging.getLogger(__name__)
53
-
54
-
55
- GREEN = "\033[0;32m"
56
- YELLOW = "\033[1;33m"
57
- RED = "\033[0;31m"
58
- END = "\033[0m"
59
- MOVEUP = "\033[F"
60
-
61
-
62
- CICD = "CI" in os.environ
63
- NODOWLOAD = False
64
-
65
-
66
- RELENV_PTH = (
67
- "import os; "
68
- "import sys; "
69
- "from importlib import util; "
70
- "from pathlib import Path; "
71
- "spec = util.spec_from_file_location("
72
- "'relenv.runtime', str(Path(__file__).parent / 'site-packages' / 'relenv' / 'runtime.py')"
73
- "); "
74
- "mod = util.module_from_spec(spec); "
75
- "sys.modules['relenv.runtime'] = mod; "
76
- "spec.loader.exec_module(mod); mod.bootstrap();"
77
- )
78
-
79
-
80
- SYSCONFIGDATA = """
81
- import pathlib, sys, platform, os, logging
82
-
83
- log = logging.getLogger(__name__)
84
-
85
- def build_arch():
86
- machine = platform.machine()
87
- return machine.lower()
88
-
89
- def get_triplet(machine=None, plat=None):
90
- if not plat:
91
- plat = sys.platform
92
- if not machine:
93
- machine = build_arch()
94
- if plat == "darwin":
95
- return f"{machine}-macos"
96
- elif plat == "win32":
97
- return f"{machine}-win"
98
- elif plat == "linux":
99
- return f"{machine}-linux-gnu"
100
- else:
101
- raise RelenvException("Unknown platform {}".format(platform))
102
-
103
-
104
-
105
- pydir = pathlib.Path(__file__).resolve().parent
106
- if sys.platform == "win32":
107
- DEFAULT_DATA_DIR = pathlib.Path.home() / "AppData" / "Local" / "relenv"
108
- else:
109
- DEFAULT_DATA_DIR = pathlib.Path.home() / ".local" / "relenv"
110
-
111
- if "RELENV_DATA" in os.environ:
112
- DATA_DIR = pathlib.Path(os.environ["RELENV_DATA"]).resolve()
113
- else:
114
- DATA_DIR = DEFAULT_DATA_DIR
115
-
116
- buildroot = pydir.parent.parent
117
-
118
- toolchain = DATA_DIR / "toolchain" / get_triplet()
119
-
120
- build_time_vars = {}
121
- for key in _build_time_vars:
122
- val = _build_time_vars[key]
123
- orig = val
124
- if isinstance(val, str):
125
- val = val.format(
126
- BUILDROOT=buildroot,
127
- TOOLCHAIN=toolchain,
128
- )
129
- build_time_vars[key] = val
130
- """
131
-
132
-
133
- def print_ui(events, processes, fails, flipstat=None):
134
- """
135
- Prints the UI during the relenv building process.
136
-
137
- :param events: A dictionary of events that are updated during the build process
138
- :type events: dict
139
- :param processes: A dictionary of build processes
140
- :type processes: dict
141
- :param fails: A list of processes that have failed
142
- :type fails: list
143
- :param flipstat: A dictionary of process statuses, defaults to {}
144
- :type flipstat: dict, optional
145
- """
146
- if flipstat is None:
147
- flipstat = {}
148
- if CICD:
149
- sys.stdout.flush()
150
- return
151
- uiline = []
152
- for name in events:
153
- if not events[name].is_set():
154
- status = " {}.".format(YELLOW)
155
- elif name in processes:
156
- now = time.time()
157
- if name not in flipstat:
158
- flipstat[name] = (0, now)
159
- if flipstat[name][1] < now:
160
- flipstat[name] = (1 - flipstat[name][0], now + random.random())
161
- status = " {}{}".format(GREEN, " " if flipstat[name][0] == 1 else ".")
162
- elif name in fails:
163
- status = " {}\u2718".format(RED)
164
- else:
165
- status = " {}\u2718".format(GREEN)
166
- uiline.append(status)
167
- uiline.append(" " + END)
168
- sys.stdout.write("\r")
169
- sys.stdout.write("".join(uiline))
170
- sys.stdout.flush()
171
-
172
-
173
- def verify_checksum(file, checksum):
174
- """
175
- Verify the checksum of a files.
176
-
177
- :param file: The path to the file to check.
178
- :type file: str
179
- :param checksum: The checksum to verify against
180
- :type checksum: str
181
-
182
- :raises RelenvException: If the checksum verification failed
183
-
184
- :return: True if it succeeded, or False if the checksum was None
185
- :rtype: bool
186
- """
187
- if checksum is None:
188
- log.error("Can't verify checksum because none was given")
189
- return False
190
- with open(file, "rb") as fp:
191
- file_checksum = hashlib.sha1(fp.read()).hexdigest()
192
- if checksum != file_checksum:
193
- raise RelenvException(
194
- f"sha1 checksum verification failed. expected={checksum} found={file_checksum}"
195
- )
196
- return True
197
-
198
-
199
- def all_dirs(root, recurse=True):
200
- """
201
- Get all directories under and including the given root.
202
-
203
- :param root: The root directory to traverse
204
- :type root: str
205
- :param recurse: Whether to recursively search for directories, defaults to True
206
- :type recurse: bool, optional
207
-
208
- :return: A list of directories found
209
- :rtype: list
210
- """
211
- paths = [root]
212
- for root, dirs, files in os.walk(root):
213
- for name in dirs:
214
- paths.append(os.path.join(root, name))
215
- return paths
216
-
217
-
218
- def populate_env(dirs, env):
219
- pass
220
-
221
-
222
- def build_default(env, dirs, logfp):
223
- """
224
- The default build function if none is given during the build process.
225
-
226
- :param env: The environment dictionary
227
- :type env: dict
228
- :param dirs: The working directories
229
- :type dirs: ``relenv.build.common.Dirs``
230
- :param logfp: A handle for the log file
231
- :type logfp: file
232
- """
233
- cmd = [
234
- "./configure",
235
- "--prefix={}".format(dirs.prefix),
236
- ]
237
- if env["RELENV_HOST"].find("linux") > -1:
238
- cmd += [
239
- "--build={}".format(env["RELENV_BUILD"]),
240
- "--host={}".format(env["RELENV_HOST"]),
241
- ]
242
- runcmd(cmd, env=env, stderr=logfp, stdout=logfp)
243
- runcmd(["make", "-j8"], env=env, stderr=logfp, stdout=logfp)
244
- runcmd(["make", "install"], env=env, stderr=logfp, stdout=logfp)
245
-
246
-
247
- def build_openssl_fips(env, dirs, logfp):
248
- return build_openssl(env, dirs, logfp, fips=True)
249
-
250
-
251
- def build_openssl(env, dirs, logfp, fips=False):
252
- """
253
- Build openssl.
254
-
255
- :param env: The environment dictionary
256
- :type env: dict
257
- :param dirs: The working directories
258
- :type dirs: ``relenv.build.common.Dirs``
259
- :param logfp: A handle for the log file
260
- :type logfp: file
261
- """
262
- arch = "aarch64"
263
- if sys.platform == "darwin":
264
- plat = "darwin64"
265
- if env["RELENV_HOST_ARCH"] == "x86_64":
266
- arch = "x86_64-cc"
267
- elif env["RELENV_HOST_ARCH"] == "arm64":
268
- arch = "arm64-cc"
269
- else:
270
- raise RelenvException(f"Unable to build {env['RELENV_HOST_ARCH']}")
271
- extended_cmd = []
272
- else:
273
- plat = "linux"
274
- if env["RELENV_HOST_ARCH"] == "x86_64":
275
- arch = "x86_64"
276
- elif env["RELENV_HOST_ARCH"] == "aarch64":
277
- arch = "aarch64"
278
- else:
279
- raise RelenvException(f"Unable to build {env['RELENV_HOST_ARCH']}")
280
- extended_cmd = [
281
- "-Wl,-z,noexecstack",
282
- ]
283
- if fips:
284
- extended_cmd.append("enable-fips")
285
- cmd = [
286
- "./Configure",
287
- f"{plat}-{arch}",
288
- f"--prefix={dirs.prefix}",
289
- "--openssldir=/etc/ssl",
290
- "--libdir=lib",
291
- "--api=1.1.1",
292
- "--shared",
293
- "--with-rand-seed=os,egd",
294
- "enable-md2",
295
- "enable-egd",
296
- "no-idea",
297
- ]
298
- cmd.extend(extended_cmd)
299
- runcmd(
300
- cmd,
301
- env=env,
302
- stderr=logfp,
303
- stdout=logfp,
304
- )
305
- runcmd(["make", "-j8"], env=env, stderr=logfp, stdout=logfp)
306
- if fips:
307
- shutil.copy(
308
- pathlib.Path("providers") / "fips.so",
309
- pathlib.Path(dirs.prefix) / "lib" / "ossl-modules",
310
- )
311
- else:
312
- runcmd(["make", "install_sw"], env=env, stderr=logfp, stdout=logfp)
313
-
314
-
315
- def build_sqlite(env, dirs, logfp):
316
- """
317
- Build sqlite.
318
-
319
- :param env: The environment dictionary
320
- :type env: dict
321
- :param dirs: The working directories
322
- :type dirs: ``relenv.build.common.Dirs``
323
- :param logfp: A handle for the log file
324
- :type logfp: file
325
- """
326
- # extra_cflags=('-Os '
327
- # '-DSQLITE_ENABLE_FTS5 '
328
- # '-DSQLITE_ENABLE_FTS4 '
329
- # '-DSQLITE_ENABLE_FTS3_PARENTHESIS '
330
- # '-DSQLITE_ENABLE_JSON1 '
331
- # '-DSQLITE_ENABLE_RTREE '
332
- # '-DSQLITE_TCL=0 '
333
- # )
334
- # configure_pre=[
335
- # '--enable-threadsafe',
336
- # '--enable-shared=no',
337
- # '--enable-static=yes',
338
- # '--disable-readline',
339
- # '--disable-dependency-tracking',
340
- # ]
341
- cmd = [
342
- "./configure",
343
- # "--with-shared",
344
- # "--without-static",
345
- "--enable-threadsafe",
346
- "--disable-readline",
347
- "--disable-dependency-tracking",
348
- "--prefix={}".format(dirs.prefix),
349
- # "--enable-add-ons=nptl,ports",
350
- ]
351
- if env["RELENV_HOST"].find("linux") > -1:
352
- cmd += [
353
- "--build={}".format(env["RELENV_BUILD_ARCH"]),
354
- "--host={}".format(env["RELENV_HOST"]),
355
- ]
356
- runcmd(cmd, env=env, stderr=logfp, stdout=logfp)
357
- runcmd(["make", "-j8"], env=env, stderr=logfp, stdout=logfp)
358
- runcmd(["make", "install"], env=env, stderr=logfp, stdout=logfp)
359
-
360
-
361
- def tarball_version(href):
362
- if href.endswith("tar.gz"):
363
- try:
364
- x = href.split("-", 1)[1][:-7]
365
- if x != "latest":
366
- return x
367
- except IndexError:
368
- return None
369
-
370
-
371
- def sqlite_version(href):
372
- if "releaselog" in href:
373
- link = href.split("/")[1][:-5]
374
- return "{:d}{:02d}{:02d}00".format(*[int(_) for _ in link.split("_")])
375
-
376
-
377
- def github_version(href):
378
- if "tag/" in href:
379
- return href.split("/v")[-1]
380
-
381
-
382
- def krb_version(href):
383
- if re.match(r"\d\.\d\d/", href):
384
- return href[:-1]
385
-
386
-
387
- def python_version(href):
388
- if re.match(r"(\d+\.)+\d/", href):
389
- return href[:-1]
390
-
391
-
392
- def uuid_version(href):
393
- if "download" in href and "latest" not in href:
394
- return href[:-16].rsplit("/")[-1].replace("libuuid-", "")
395
-
396
-
397
- def parse_links(text):
398
- class HrefParser(HTMLParser):
399
- hrefs = []
400
-
401
- def handle_starttag(self, tag, attrs):
402
- if tag == "a":
403
- link = dict(attrs).get("href", "")
404
- if link:
405
- self.hrefs.append(link)
406
-
407
- parser = HrefParser()
408
- parser.feed(text)
409
- return parser.hrefs
410
-
411
-
412
- def check_files(name, location, func, current):
413
- fp = io.BytesIO()
414
- fetch_url(location, fp)
415
- fp.seek(0)
416
- text = fp.read().decode()
417
- loose = False
418
- try:
419
- current = parse(current)
420
- except InvalidVersion:
421
- current = LooseVersion(current)
422
- loose = True
423
-
424
- versions = []
425
- for _ in parse_links(text):
426
- version = func(_)
427
- if version:
428
- if loose:
429
- versions.append(LooseVersion(version))
430
- else:
431
- try:
432
- versions.append(parse(version))
433
- except InvalidVersion:
434
- pass
435
-
436
- versions.sort()
437
- compare_versions(name, current, versions)
438
-
439
-
440
- def compare_versions(name, current, versions):
441
- for version in versions:
442
- try:
443
- if version > current:
444
- print(f"Found new version of {name} {version} > {current}")
445
- except TypeError:
446
- print(f"Unable to compare versions {version}")
447
-
448
-
449
- class Download:
450
- """
451
- A utility that holds information about content to be downloaded.
452
-
453
- :param name: The name of the download
454
- :type name: str
455
- :param url: The url of the download
456
- :type url: str
457
- :param signature: The signature of the download, defaults to None
458
- :type signature: str
459
- :param destination: The path to download the file to
460
- :type destination: str
461
- :param version: The version of the content to download
462
- :type version: str
463
- :param sha1: The sha1 sum of the download
464
- :type sha1: str
465
-
466
- """
467
-
468
- def __init__(
469
- self,
470
- name,
471
- url,
472
- fallback_url=None,
473
- signature=None,
474
- destination="",
475
- version="",
476
- checksum=None,
477
- checkfunc=None,
478
- checkurl=None,
479
- ):
480
- self.name = name
481
- self.url_tpl = url
482
- self.fallback_url_tpl = fallback_url
483
- self.signature_tpl = signature
484
- self.destination = destination
485
- self.version = version
486
- self.checksum = checksum
487
- self.checkfunc = checkfunc
488
- self.checkurl = checkurl
489
-
490
- def copy(self):
491
- return Download(
492
- self.name,
493
- self.url_tpl,
494
- self.fallback_url_tpl,
495
- self.signature_tpl,
496
- self.destination,
497
- self.version,
498
- self.checksum,
499
- self.checkfunc,
500
- self.checkurl,
501
- )
502
-
503
- @property
504
- def url(self):
505
- return self.url_tpl.format(version=self.version)
506
-
507
- @property
508
- def fallback_url(self):
509
- if self.fallback_url_tpl:
510
- return self.fallback_url_tpl.format(version=self.version)
511
-
512
- @property
513
- def signature_url(self):
514
- return self.signature_tpl.format(version=self.version)
515
-
516
- @property
517
- def filepath(self):
518
- _, name = self.url.rsplit("/", 1)
519
- return pathlib.Path(self.destination) / name
520
-
521
- @property
522
- def formatted_url(self):
523
- return self.url.format(version=self.version)
524
-
525
- def fetch_file(self):
526
- """
527
- Download the file.
528
-
529
- :return: The path to the downloaded content, and whether it was downloaded.
530
- :rtype: tuple(str, bool)
531
- """
532
- try:
533
- return download_url(self.url, self.destination, CICD), True
534
- except Exception as exc:
535
- if self.fallback_url:
536
- print(f"Download failed {self.url} ({exc}); trying fallback url")
537
- return download_url(self.fallback_url, self.destination, CICD), True
538
- raise
539
-
540
- def fetch_signature(self, version):
541
- """
542
- Download the file signature.
543
-
544
- :return: The path to the downloaded signature.
545
- :rtype: str
546
- """
547
- return download_url(self.signature_url, self.destination, CICD)
548
-
549
- def exists(self):
550
- """
551
- True when the artifact already exists on disk.
552
-
553
- :return: True when the artifact already exists on disk
554
- :rtype: bool
555
- """
556
- return self.filepath.exists()
557
-
558
- def valid_hash(self):
559
- pass
560
-
561
- @staticmethod
562
- def validate_signature(archive, signature):
563
- """
564
- True when the archive's signature is valid.
565
-
566
- :param archive: The path to the archive to validate
567
- :type archive: str
568
- :param signature: The path to the signature to validate against
569
- :type signature: str
570
-
571
- :return: True if it validated properly, else False
572
- :rtype: bool
573
- """
574
- if signature is None:
575
- log.error("Can't check signature because none was given")
576
- return False
577
- try:
578
- runcmd(
579
- ["gpg", "--verify", signature, archive],
580
- stderr=subprocess.PIPE,
581
- stdout=subprocess.PIPE,
582
- )
583
- return True
584
- except RelenvException as exc:
585
- log.error("Signature validation failed on %s: %s", archive, exc)
586
- return False
587
-
588
- @staticmethod
589
- def validate_checksum(archive, checksum):
590
- """
591
- True when when the archive matches the sha1 hash.
592
-
593
- :param archive: The path to the archive to validate
594
- :type archive: str
595
- :param checksum: The sha1 sum to validate against
596
- :type checksum: str
597
- :return: True if the sums matched, else False
598
- :rtype: bool
599
- """
600
- try:
601
- verify_checksum(archive, checksum)
602
- return True
603
- except RelenvException as exc:
604
- log.error("sha1 validation failed on %s: %s", archive, exc)
605
- return False
606
-
607
- def __call__(self, force_download=False, show_ui=False, exit_on_failure=False):
608
- """
609
- Downloads the url and validates the signature and sha1 sum.
610
-
611
- :return: Whether or not validation succeeded
612
- :rtype: bool
613
- """
614
- os.makedirs(self.filepath.parent, exist_ok=True)
615
-
616
- downloaded = False
617
- if force_download:
618
- _, downloaded = self.fetch_file()
619
- else:
620
- file_is_valid = False
621
- dest = get_download_location(self.url, self.destination)
622
- if self.checksum and os.path.exists(dest):
623
- file_is_valid = self.validate_checksum(dest, self.checksum)
624
- if file_is_valid:
625
- log.debug("%s already downloaded, skipping.", self.url)
626
- else:
627
- _, downloaded = self.fetch_file()
628
- valid = True
629
- if downloaded:
630
- if self.signature_tpl is not None:
631
- sig, _ = self.fetch_signature()
632
- valid_sig = self.validate_signature(self.filepath, sig)
633
- valid = valid and valid_sig
634
- if self.checksum is not None:
635
- valid_checksum = self.validate_checksum(self.filepath, self.checksum)
636
- valid = valid and valid_checksum
637
-
638
- if not valid:
639
- log.warning("Checksum did not match %s: %s", self.name, self.checksum)
640
- if show_ui:
641
- sys.stderr.write(
642
- f"\nChecksum did not match {self.name}: {self.checksum}\n"
643
- )
644
- sys.stderr.flush()
645
- if exit_on_failure and not valid:
646
- sys.exit(1)
647
- return valid
648
-
649
- def check_version(self):
650
- if self.checkurl:
651
- url = self.checkurl
652
- else:
653
- url = self.url.rsplit("/", 1)[0]
654
- check_files(self.name, url, self.checkfunc, self.version)
655
-
656
-
657
- class Dirs:
658
- """
659
- A container for directories during build time.
660
-
661
- :param dirs: A collection of working directories
662
- :type dirs: ``relenv.common.WorkDirs``
663
- :param name: The name of this collection
664
- :type name: str
665
- :param arch: The architecture being worked with
666
- :type arch: str
667
- """
668
-
669
- def __init__(self, dirs, name, arch, version):
670
- # XXX name is the specific to a step where as everything
671
- # else here is generalized to the entire build
672
- self.name = name
673
- self.version = version
674
- self.arch = arch
675
- self.root = dirs.root
676
- self.build = dirs.build
677
- self.downloads = dirs.download
678
- self.logs = dirs.logs
679
- self.sources = dirs.src
680
- self.tmpbuild = tempfile.mkdtemp(prefix="{}_build".format(name))
681
-
682
- @property
683
- def toolchain(self):
684
- if sys.platform == "darwin":
685
- return get_toolchain(root=self.root)
686
- elif sys.platform == "win32":
687
- return get_toolchain(root=self.root)
688
- else:
689
- return get_toolchain(self.arch, self.root)
690
-
691
- @property
692
- def _triplet(self):
693
- if sys.platform == "darwin":
694
- return "{}-macos".format(self.arch)
695
- elif sys.platform == "win32":
696
- return "{}-win".format(self.arch)
697
- else:
698
- return "{}-linux-gnu".format(self.arch)
699
-
700
- @property
701
- def prefix(self):
702
- return self.build / f"{self.version}-{self._triplet}"
703
-
704
- def __getstate__(self):
705
- """
706
- Return an object used for pickling.
707
-
708
- :return: The picklable state
709
- """
710
- return {
711
- "name": self.name,
712
- "arch": self.arch,
713
- "root": self.root,
714
- "build": self.build,
715
- "downloads": self.downloads,
716
- "logs": self.logs,
717
- "sources": self.sources,
718
- "tmpbuild": self.tmpbuild,
719
- }
720
-
721
- def __setstate__(self, state):
722
- """
723
- Unwrap the object returned from unpickling.
724
-
725
- :param state: The state to unpickle
726
- :type state: dict
727
- """
728
- self.name = state["name"]
729
- self.arch = state["arch"]
730
- self.root = state["root"]
731
- self.downloads = state["downloads"]
732
- self.logs = state["logs"]
733
- self.sources = state["sources"]
734
- self.build = state["build"]
735
- self.tmpbuild = state["tmpbuild"]
736
-
737
- def to_dict(self):
738
- """
739
- Get a dictionary representation of the directories in this collection.
740
-
741
- :return: A dictionary of all the directories
742
- :rtype: dict
743
- """
744
- return {
745
- x: getattr(self, x)
746
- for x in [
747
- "root",
748
- "prefix",
749
- "downloads",
750
- "logs",
751
- "sources",
752
- "build",
753
- "toolchain",
754
- ]
755
- }
756
-
757
-
758
- class Builds:
759
- """
760
- Collection of builds.
761
- """
762
-
763
- def __init__(self):
764
- self.builds = {}
765
-
766
- def add(self, platform, *args, **kwargs):
767
- if "builder" in kwargs:
768
- build = kwargs.pop("builder")
769
- if args or kwargs:
770
- raise RuntimeError(
771
- "builder keyword can not be used with other kwargs or args"
772
- )
773
- else:
774
- build = Builder(*args, **kwargs)
775
- if platform not in self.builds:
776
- self.builds[platform] = build
777
- else:
778
- self.builds[platform] = build
779
- return build
780
-
781
-
782
- builds = Builds()
783
-
784
-
785
- class Builder:
786
- """
787
- Utility that handles the build process.
788
-
789
- :param root: The root of the working directories for this build
790
- :type root: str
791
- :param recipies: The instructions for the build steps
792
- :type recipes: list
793
- :param build_default: The default build function, defaults to ``build_default``
794
- :type build_default: types.FunctionType
795
- :param populate_env: The default function to populate the build environment, defaults to ``populate_env``
796
- :type populate_env: types.FunctionType
797
- :param force_download: If True, forces downloading the archives even if they exist, defaults to False
798
- :type force_download: bool
799
- :param arch: The architecture being built
800
- :type arch: str
801
- """
802
-
803
- def __init__(
804
- self,
805
- root=None,
806
- recipies=None,
807
- build_default=build_default,
808
- populate_env=populate_env,
809
- arch="x86_64",
810
- version="",
811
- ):
812
- self.root = root
813
- self.dirs = work_dirs(root)
814
- self.build_arch = build_arch()
815
- self.build_triplet = get_triplet(self.build_arch)
816
- self.arch = arch
817
- self.sources = self.dirs.src
818
- self.downloads = self.dirs.download
819
-
820
- if recipies is None:
821
- self.recipies = {}
822
- else:
823
- self.recipies = recipies
824
-
825
- self.build_default = build_default
826
- self.populate_env = populate_env
827
- self.version = version
828
- self.toolchains = get_toolchain(root=self.dirs.root)
829
- self.set_arch(self.arch)
830
-
831
- def copy(self, version, checksum):
832
- recipies = {}
833
- for name in self.recipies:
834
- _ = self.recipies[name]
835
- recipies[name] = {
836
- "build_func": _["build_func"],
837
- "wait_on": _["wait_on"],
838
- "download": _["download"].copy() if _["download"] else None,
839
- }
840
- build = Builder(
841
- self.root,
842
- recipies,
843
- self.build_default,
844
- self.populate_env,
845
- self.arch,
846
- version,
847
- )
848
- build.recipies["python"]["download"].version = version
849
- build.recipies["python"]["download"].checksum = checksum
850
- return build
851
-
852
- def set_arch(self, arch):
853
- """
854
- Set the architecture for the build.
855
-
856
- :param arch: The arch to build
857
- :type arch: str
858
- """
859
- self.arch = arch
860
- if sys.platform in ["darwin", "win32"]:
861
- self.toolchain = None
862
- else:
863
- self.toolchain = get_toolchain(self.arch, self.dirs.root)
864
-
865
- @property
866
- def triplet(self):
867
- return get_triplet(self.arch)
868
-
869
- @property
870
- def prefix(self):
871
- return self.dirs.build / f"{self.version}-{self.triplet}"
872
-
873
- @property
874
- def _triplet(self):
875
- if sys.platform == "darwin":
876
- return "{}-macos".format(self.arch)
877
- elif sys.platform == "win32":
878
- return "{}-win".format(self.arch)
879
- else:
880
- return "{}-linux-gnu".format(self.arch)
881
-
882
- def add(self, name, build_func=None, wait_on=None, download=None):
883
- """
884
- Add a step to the build process.
885
-
886
- :param name: The name of the step
887
- :type name: str
888
- :param build_func: The function that builds this step, defaults to None
889
- :type build_func: types.FunctionType, optional
890
- :param wait_on: Processes to wait on before running this step, defaults to None
891
- :type wait_on: list, optional
892
- :param download: A dictionary of download information, defaults to None
893
- :type download: dict, optional
894
- """
895
- if wait_on is None:
896
- wait_on = []
897
- if build_func is None:
898
- build_func = self.build_default
899
- if download is not None:
900
- download = Download(name, destination=self.downloads, **download)
901
- self.recipies[name] = {
902
- "build_func": build_func,
903
- "wait_on": wait_on,
904
- "download": download,
905
- }
906
-
907
- def run(
908
- self, name, event, build_func, download, show_ui=False, log_level="WARNING"
909
- ):
910
- """
911
- Run a build step.
912
-
913
- :param name: The name of the step to run
914
- :type name: str
915
- :param event: An event to track this process' status and alert waiting steps
916
- :type event: ``multiprocessing.Event``
917
- :param build_func: The function to use to build this step
918
- :type build_func: types.FunctionType
919
- :param download: The ``Download`` instance for this step
920
- :type download: ``Download``
921
-
922
- :return: The output of the build function
923
- """
924
- root_log = logging.getLogger(None)
925
- if sys.platform == "win32":
926
- if not show_ui:
927
- handler = logging.StreamHandler()
928
- handler.setLevel(logging.getLevelName(log_level))
929
- root_log.addHandler(handler)
930
-
931
- for handler in root_log.handlers:
932
- if isinstance(handler, logging.StreamHandler):
933
- handler.setFormatter(
934
- logging.Formatter(f"%(asctime)s {name} %(message)s")
935
- )
936
-
937
- if not self.dirs.build.exists():
938
- os.makedirs(self.dirs.build, exist_ok=True)
939
-
940
- dirs = Dirs(self.dirs, name, self.arch, self.version)
941
- os.makedirs(dirs.sources, exist_ok=True)
942
- os.makedirs(dirs.logs, exist_ok=True)
943
- os.makedirs(dirs.prefix, exist_ok=True)
944
-
945
- while event.is_set() is False:
946
- time.sleep(0.3)
947
-
948
- logfp = io.open(os.path.join(dirs.logs, "{}.log".format(name)), "w")
949
- handler = logging.FileHandler(dirs.logs / f"{name}.log")
950
- root_log.addHandler(handler)
951
- root_log.setLevel(logging.NOTSET)
952
-
953
- # DEBUG: Uncomment to debug
954
- # logfp = sys.stdout
955
-
956
- cwd = os.getcwd()
957
- if download:
958
- extract_archive(dirs.sources, str(download.filepath))
959
- dirs.source = dirs.sources / download.filepath.name.split(".tar")[0]
960
- os.chdir(dirs.source)
961
- else:
962
- os.chdir(dirs.prefix)
963
-
964
- if sys.platform == "win32":
965
- env = os.environ.copy()
966
- else:
967
- env = {
968
- "PATH": os.environ["PATH"],
969
- }
970
- env["RELENV_DEBUG"] = "1"
971
- env["RELENV_BUILDENV"] = "1"
972
- env["RELENV_HOST"] = self.triplet
973
- env["RELENV_HOST_ARCH"] = self.arch
974
- env["RELENV_BUILD"] = self.build_triplet
975
- env["RELENV_BUILD_ARCH"] = self.build_arch
976
- env["RELENV_PY_VERSION"] = self.recipies["python"]["download"].version
977
- env["RELENV_PY_MAJOR_VERSION"] = env["RELENV_PY_VERSION"].rsplit(".", 1)[0]
978
- if "RELENV_DATA" in os.environ:
979
- env["RELENV_DATA"] = os.environ["RELENV_DATA"]
980
- if self.build_arch != self.arch:
981
- native_root = DATA_DIR / "native"
982
- env["RELENV_NATIVE_PY"] = str(native_root / "bin" / "python3")
983
-
984
- self.populate_env(env, dirs)
985
-
986
- _ = dirs.to_dict()
987
- for k in _:
988
- log.info("Directory %s %s", k, _[k])
989
- for k in env:
990
- log.info("Environment %s %s", k, env[k])
991
- try:
992
- return build_func(env, dirs, logfp)
993
- except Exception:
994
- log.exception("Build failure")
995
- sys.exit(1)
996
- finally:
997
- os.chdir(cwd)
998
- log.removeHandler(handler)
999
- logfp.close()
1000
-
1001
- def cleanup(self):
1002
- """
1003
- Clean up the build directories.
1004
- """
1005
- shutil.rmtree(self.prefix)
1006
-
1007
- def clean(self):
1008
- """
1009
- Completely clean up the remnants of a relenv build.
1010
- """
1011
- # Clean directories
1012
- for _ in [self.prefix, self.sources]:
1013
- try:
1014
- shutil.rmtree(_)
1015
- except PermissionError:
1016
- sys.stderr.write(f"Unable to remove directory: {_}")
1017
- except FileNotFoundError:
1018
- pass
1019
- # Clean files
1020
- archive = f"{self.prefix}.tar.xz"
1021
- for _ in [archive]:
1022
- try:
1023
- os.remove(_)
1024
- except FileNotFoundError:
1025
- pass
1026
-
1027
- def download_files(self, steps=None, force_download=False, show_ui=False):
1028
- """
1029
- Download all of the needed archives.
1030
-
1031
- :param steps: The steps to download archives for, defaults to None
1032
- :type steps: list, optional
1033
- """
1034
- if steps is None:
1035
- steps = list(self.recipies)
1036
-
1037
- fails = []
1038
- processes = {}
1039
- events = {}
1040
- if show_ui:
1041
- sys.stdout.write("Starting downloads \n")
1042
- log.info("Starting downloads")
1043
- if show_ui:
1044
- print_ui(events, processes, fails)
1045
- for name in steps:
1046
- download = self.recipies[name]["download"]
1047
- if download is None:
1048
- continue
1049
- event = multiprocessing.Event()
1050
- event.set()
1051
- events[name] = event
1052
- proc = multiprocessing.Process(
1053
- name=name,
1054
- target=download,
1055
- kwargs={
1056
- "force_download": force_download,
1057
- "show_ui": show_ui,
1058
- "exit_on_failure": True,
1059
- },
1060
- )
1061
- proc.start()
1062
- processes[name] = proc
1063
-
1064
- while processes:
1065
- for proc in list(processes.values()):
1066
- proc.join(0.3)
1067
- # DEBUG: Comment to debug
1068
- if show_ui:
1069
- print_ui(events, processes, fails)
1070
- if proc.exitcode is None:
1071
- continue
1072
- processes.pop(proc.name)
1073
- if proc.exitcode != 0:
1074
- fails.append(proc.name)
1075
- if show_ui:
1076
- print_ui(events, processes, fails)
1077
- sys.stdout.write("\n")
1078
- if fails and False:
1079
- if show_ui:
1080
- print_ui(events, processes, fails)
1081
- sys.stderr.write("The following failures were reported\n")
1082
- for fail in fails:
1083
- sys.stderr.write(fail + "\n")
1084
- sys.stderr.flush()
1085
- sys.exit(1)
1086
-
1087
- def build(self, steps=None, cleanup=True, show_ui=False, log_level="WARNING"):
1088
- """
1089
- Build!
1090
-
1091
- :param steps: The steps to run, defaults to None
1092
- :type steps: list, optional
1093
- :param cleanup: Whether to clean up or not, defaults to True
1094
- :type cleanup: bool, optional
1095
- """ # noqa: D400
1096
- fails = []
1097
- events = {}
1098
- waits = {}
1099
- processes = {}
1100
-
1101
- if show_ui:
1102
- sys.stdout.write("Starting builds\n")
1103
- # DEBUG: Comment to debug
1104
- print_ui(events, processes, fails)
1105
- log.info("Starting builds")
1106
-
1107
- for name in steps:
1108
- event = multiprocessing.Event()
1109
- events[name] = event
1110
- kwargs = dict(self.recipies[name])
1111
- kwargs["show_ui"] = show_ui
1112
- kwargs["log_level"] = log_level
1113
-
1114
- # Determine needed dependency recipies.
1115
- wait_on = kwargs.pop("wait_on", [])
1116
- for _ in wait_on[:]:
1117
- if _ not in steps:
1118
- wait_on.remove(_)
1119
-
1120
- waits[name] = wait_on
1121
- if not waits[name]:
1122
- event.set()
1123
-
1124
- proc = multiprocessing.Process(
1125
- name=name, target=self.run, args=(name, event), kwargs=kwargs
1126
- )
1127
- proc.start()
1128
- processes[name] = proc
1129
-
1130
- # Wait for the processes to finish and check if we should send any
1131
- # dependency events.
1132
- while processes:
1133
- for proc in list(processes.values()):
1134
- proc.join(0.3)
1135
- if show_ui:
1136
- # DEBUG: Comment to debug
1137
- print_ui(events, processes, fails)
1138
- if proc.exitcode is None:
1139
- continue
1140
- processes.pop(proc.name)
1141
- if proc.exitcode != 0:
1142
- fails.append(proc.name)
1143
- is_failure = True
1144
- else:
1145
- is_failure = False
1146
- for name in waits:
1147
- if proc.name in waits[name]:
1148
- if is_failure:
1149
- if name in processes:
1150
- processes[name].terminate()
1151
- time.sleep(0.1)
1152
- waits[name].remove(proc.name)
1153
- if not waits[name] and not events[name].is_set():
1154
- events[name].set()
1155
-
1156
- if fails:
1157
- sys.stderr.write("The following failures were reported\n")
1158
- last_outs = {}
1159
- for fail in fails:
1160
- log_file = self.dirs.logs / f"{fail}.log"
1161
- try:
1162
- with io.open(log_file) as fp:
1163
- fp.seek(0, 2)
1164
- end = fp.tell()
1165
- ind = end - 4096
1166
- if ind > 0:
1167
- fp.seek(ind)
1168
- else:
1169
- fp.seek(0)
1170
- last_out = fp.read()
1171
- if show_ui:
1172
- sys.stderr.write("=" * 20 + f" {fail} " + "=" * 20 + "\n")
1173
- sys.stderr.write(fp.read() + "\n\n")
1174
- except FileNotFoundError:
1175
- last_outs[fail] = f"Log file not found: {log_file}"
1176
- log.error("Build step %s has failed", fail)
1177
- log.error(last_out)
1178
- if show_ui:
1179
- sys.stderr.flush()
1180
- if cleanup:
1181
- log.debug("Performing cleanup.")
1182
- self.cleanup()
1183
- sys.exit(1)
1184
- if show_ui:
1185
- time.sleep(0.3)
1186
- print_ui(events, processes, fails)
1187
- sys.stdout.write("\n")
1188
- sys.stdout.flush()
1189
- if cleanup:
1190
- log.debug("Performing cleanup.")
1191
- self.cleanup()
1192
-
1193
- def check_prereqs(self):
1194
- """
1195
- Check pre-requsists for build.
1196
-
1197
- This method verifies all requrements for a successful build are satisfied.
1198
-
1199
- :return: Returns a list of string describing failed checks
1200
- :rtype: list
1201
- """
1202
- fail = []
1203
- if sys.platform == "linux":
1204
- if not self.toolchain or not self.toolchain.exists():
1205
- fail.append(
1206
- f"Toolchain for {self.arch} does not exist. Please pip install ppbt."
1207
- )
1208
- return fail
1209
-
1210
- def __call__(
1211
- self,
1212
- steps=None,
1213
- arch=None,
1214
- clean=True,
1215
- cleanup=True,
1216
- force_download=False,
1217
- download_only=False,
1218
- show_ui=False,
1219
- log_level="WARNING",
1220
- ):
1221
- """
1222
- Set the architecture, define the steps, clean if needed, download what is needed, and build.
1223
-
1224
- :param steps: The steps to run, defaults to None
1225
- :type steps: list, optional
1226
- :param arch: The architecture to build, defaults to None
1227
- :type arch: str, optional
1228
- :param clean: If true, cleans the directories first, defaults to True
1229
- :type clean: bool, optional
1230
- :param cleanup: Cleans up after build if true, defaults to True
1231
- :type cleanup: bool, optional
1232
- :param force_download: Whether or not to download the content if it already exists, defaults to True
1233
- :type force_download: bool, optional
1234
- """
1235
- log = logging.getLogger(None)
1236
- log.setLevel(logging.NOTSET)
1237
-
1238
- if not show_ui:
1239
- handler = logging.StreamHandler()
1240
- handler.setLevel(logging.getLevelName(log_level))
1241
- log.addHandler(handler)
1242
-
1243
- os.makedirs(self.dirs.logs, exist_ok=True)
1244
- handler = logging.FileHandler(self.dirs.logs / "build.log")
1245
- handler.setLevel(logging.INFO)
1246
- log.addHandler(handler)
1247
-
1248
- if arch:
1249
- self.set_arch(arch)
1250
-
1251
- if steps is None:
1252
- steps = self.recipies
1253
-
1254
- failures = self.check_prereqs()
1255
- if not download_only and failures:
1256
- for _ in failures:
1257
- sys.stderr.write(f"{_}\n")
1258
- sys.stderr.flush()
1259
- sys.exit(1)
1260
-
1261
- if clean:
1262
- self.clean()
1263
-
1264
- if self.build_arch != self.arch:
1265
- native_root = DATA_DIR / "native"
1266
- if not native_root.exists():
1267
- if "RELENV_NATIVE_PY_VERSION" in os.environ:
1268
- version = os.environ["RELENV_NATIVE_PY_VERSION"]
1269
- else:
1270
- version = self.version
1271
- from relenv.create import create
1272
-
1273
- create("native", DATA_DIR, version=version)
1274
-
1275
- # Start a process for each build passing it an event used to notify each
1276
- # process if it's dependencies have finished.
1277
- self.download_files(steps, force_download=force_download, show_ui=show_ui)
1278
- if download_only:
1279
- return
1280
- self.build(steps, cleanup, show_ui=show_ui, log_level=log_level)
1281
-
1282
- def check_versions(self):
1283
- success = True
1284
- for step in list(self.recipies):
1285
- download = self.recipies[step]["download"]
1286
- if not download:
1287
- continue
1288
- if not download.check_version():
1289
- success = False
1290
- return success
1291
-
1292
-
1293
- def patch_shebang(path, old, new):
1294
- """
1295
- Replace a file's shebang.
1296
-
1297
- :param path: The path of the file to patch
1298
- :type path: str
1299
- :param old: The old shebang, will only patch when this is found
1300
- :type old: str
1301
- :param name: The new shebang to be written
1302
- :type name: str
1303
- """
1304
- with open(path, "rb") as fp:
1305
- try:
1306
- data = fp.read(len(old.encode())).decode()
1307
- except UnicodeError:
1308
- return False
1309
- except Exception as exc:
1310
- log.warning("Unhandled exception: %r", exc)
1311
- return False
1312
- if data != old:
1313
- log.warning("Shebang doesn't match: %s %r != %r", path, old, data)
1314
- return False
1315
- data = fp.read().decode()
1316
- with open(path, "w") as fp:
1317
- fp.write(new)
1318
- fp.write(data)
1319
- with open(path, "r") as fp:
1320
- data = fp.read()
1321
- log.info("Patched shebang of %s => %r", path, data)
1322
- return True
1323
-
1324
-
1325
- def patch_shebangs(path, old, new):
1326
- """
1327
- Traverse directory and patch shebangs.
1328
-
1329
- :param path: The of the directory to traverse
1330
- :type path: str
1331
- :param old: The old shebang, will only patch when this is found
1332
- :type old: str
1333
- :param name: The new shebang to be written
1334
- :type name: str
1335
- """
1336
- for root, _dirs, files in os.walk(str(path)):
1337
- for file in files:
1338
- patch_shebang(os.path.join(root, file), old, new)
1339
-
1340
-
1341
- def install_sysdata(mod, destfile, buildroot, toolchain):
1342
- """
1343
- Create a Relenv Python environment's sysconfigdata.
1344
-
1345
- Helper method used by the `finalize` build method to create a Relenv
1346
- Python environment's sysconfigdata.
1347
-
1348
- :param mod: The module to operate on
1349
- :type mod: ``types.ModuleType``
1350
- :param destfile: Path to the file to write the data to
1351
- :type destfile: str
1352
- :param buildroot: Path to the root of the build
1353
- :type buildroot: str
1354
- :param toolchain: Path to the root of the toolchain
1355
- :type toolchain: str
1356
- """
1357
- data = {}
1358
- fbuildroot = lambda _: _.replace(str(buildroot), "{BUILDROOT}") # noqa: E731
1359
- ftoolchain = lambda _: _.replace(str(toolchain), "{TOOLCHAIN}") # noqa: E731
1360
- # XXX: keymap is not used, remove it?
1361
- # keymap = {
1362
- # "BINDIR": (fbuildroot,),
1363
- # "BINLIBDEST": (fbuildroot,),
1364
- # "CFLAGS": (fbuildroot, ftoolchain),
1365
- # "CPPLAGS": (fbuildroot, ftoolchain),
1366
- # "CXXFLAGS": (fbuildroot, ftoolchain),
1367
- # "datarootdir": (fbuildroot,),
1368
- # "exec_prefix": (fbuildroot,),
1369
- # "LDFLAGS": (fbuildroot, ftoolchain),
1370
- # "LDSHARED": (fbuildroot, ftoolchain),
1371
- # "LIBDEST": (fbuildroot,),
1372
- # "prefix": (fbuildroot,),
1373
- # "SCRIPTDIR": (fbuildroot,),
1374
- # }
1375
- for key in sorted(mod.build_time_vars):
1376
- val = mod.build_time_vars[key]
1377
- if isinstance(val, str):
1378
- for _ in (fbuildroot, ftoolchain):
1379
- val = _(val)
1380
- log.info("SYSCONFIG [%s] %s => %s", key, mod.build_time_vars[key], val)
1381
- data[key] = val
1382
-
1383
- with open(destfile, "w", encoding="utf8") as f:
1384
- f.write(
1385
- "# system configuration generated and used by" " the relenv at runtime\n"
1386
- )
1387
- f.write("_build_time_vars = ")
1388
- pprint.pprint(data, stream=f)
1389
- f.write(SYSCONFIGDATA)
1390
-
1391
-
1392
- def find_sysconfigdata(pymodules):
1393
- """
1394
- Find sysconfigdata directory for python installation.
1395
-
1396
- :param pymodules: Path to python modules (e.g. lib/python3.10)
1397
- :type pymodules: str
1398
-
1399
- :return: The name of the sysconig data module
1400
- :rtype: str
1401
- """
1402
- for root, dirs, files in os.walk(pymodules):
1403
- for file in files:
1404
- if file.find("sysconfigdata") > -1 and file.endswith(".py"):
1405
- return file[:-3]
1406
-
1407
-
1408
- def install_runtime(sitepackages):
1409
- """
1410
- Install a base relenv runtime.
1411
- """
1412
- relenv_pth = sitepackages / "relenv.pth"
1413
- with io.open(str(relenv_pth), "w") as fp:
1414
- fp.write(RELENV_PTH)
1415
-
1416
- # Lay down relenv.runtime, we'll pip install the rest later
1417
- relenv = sitepackages / "relenv"
1418
- os.makedirs(relenv, exist_ok=True)
1419
-
1420
- for name in [
1421
- "runtime.py",
1422
- "relocate.py",
1423
- "common.py",
1424
- "buildenv.py",
1425
- "__init__.py",
1426
- ]:
1427
- src = MODULE_DIR / name
1428
- dest = relenv / name
1429
- with io.open(src, "r") as rfp:
1430
- with io.open(dest, "w") as wfp:
1431
- wfp.write(rfp.read())
1432
-
1433
-
1434
- def finalize(env, dirs, logfp):
1435
- """
1436
- Run after we've fully built python.
1437
-
1438
- This method enhances the newly created python with Relenv's runtime hacks.
1439
-
1440
- :param env: The environment dictionary
1441
- :type env: dict
1442
- :param dirs: The working directories
1443
- :type dirs: ``relenv.build.common.Dirs``
1444
- :param logfp: A handle for the log file
1445
- :type logfp: file
1446
- """
1447
- # Run relok8 to make sure the rpaths are relocatable.
1448
- relenv.relocate.main(dirs.prefix, log_file_name=str(dirs.logs / "relocate.py.log"))
1449
- # Install relenv-sysconfigdata module
1450
- libdir = pathlib.Path(dirs.prefix) / "lib"
1451
-
1452
- def find_pythonlib(libdir):
1453
- for root, dirs, files in os.walk(libdir):
1454
- for _ in dirs:
1455
- if _.startswith("python"):
1456
- return _
1457
-
1458
- pymodules = libdir / find_pythonlib(libdir)
1459
-
1460
- cwd = os.getcwd()
1461
- modname = find_sysconfigdata(pymodules)
1462
- path = sys.path
1463
- sys.path = [str(pymodules)]
1464
- try:
1465
- mod = __import__(str(modname))
1466
- finally:
1467
- os.chdir(cwd)
1468
- sys.path = path
1469
-
1470
- dest = pymodules / f"{modname}.py"
1471
- install_sysdata(mod, dest, dirs.prefix, dirs.toolchain)
1472
-
1473
- # Lay down site customize
1474
- bindir = pathlib.Path(dirs.prefix) / "bin"
1475
- sitepackages = pymodules / "site-packages"
1476
- install_runtime(sitepackages)
1477
- # Install pip
1478
- python = dirs.prefix / "bin" / "python3"
1479
- if env["RELENV_HOST_ARCH"] != env["RELENV_BUILD_ARCH"]:
1480
- env["RELENV_CROSS"] = dirs.prefix
1481
- python = env["RELENV_NATIVE_PY"]
1482
- logfp.write("\nRUN ENSURE PIP\n")
1483
-
1484
- env.pop("RELENV_BUILDENV")
1485
-
1486
- runcmd(
1487
- [str(python), "-m", "ensurepip"],
1488
- env=env,
1489
- stderr=logfp,
1490
- stdout=logfp,
1491
- )
1492
-
1493
- # Fix the shebangs in the scripts python layed down. Order matters.
1494
- shebangs = [
1495
- "#!{}".format(bindir / f"python{env['RELENV_PY_MAJOR_VERSION']}"),
1496
- "#!{}".format(
1497
- bindir / f"python{env['RELENV_PY_MAJOR_VERSION'].split('.', 1)[0]}"
1498
- ),
1499
- ]
1500
- newshebang = format_shebang("/python3")
1501
- for shebang in shebangs:
1502
- log.info("Patch shebang %r with %r", shebang, newshebang)
1503
- patch_shebangs(
1504
- str(pathlib.Path(dirs.prefix) / "bin"),
1505
- shebang,
1506
- newshebang,
1507
- )
1508
-
1509
- if sys.platform == "linux":
1510
- pyconf = f"config-{env['RELENV_PY_MAJOR_VERSION']}-{env['RELENV_HOST']}"
1511
- patch_shebang(
1512
- str(pymodules / pyconf / "python-config.py"),
1513
- "#!{}".format(str(bindir / f"python{env['RELENV_PY_MAJOR_VERSION']}")),
1514
- format_shebang("../../../bin/python3"),
1515
- )
1516
-
1517
- shutil.copy(
1518
- pathlib.Path(dirs.toolchain)
1519
- / env["RELENV_HOST"]
1520
- / "sysroot"
1521
- / "lib"
1522
- / "libstdc++.so.6",
1523
- libdir,
1524
- )
1525
-
1526
- # Moved in python 3.13 or removed?
1527
- if (pymodules / "cgi.py").exists():
1528
- patch_shebang(
1529
- str(pymodules / "cgi.py"),
1530
- "#! /usr/local/bin/python",
1531
- format_shebang("../../bin/python3"),
1532
- )
1533
-
1534
- def runpip(pkg, upgrade=False):
1535
- logfp.write(f"\nRUN PIP {pkg} {upgrade}\n")
1536
- target = None
1537
- python = dirs.prefix / "bin" / "python3"
1538
- if sys.platform == LINUX:
1539
- if env["RELENV_HOST_ARCH"] != env["RELENV_BUILD_ARCH"]:
1540
- target = pymodules / "site-packages"
1541
- python = env["RELENV_NATIVE_PY"]
1542
- cmd = [
1543
- str(python),
1544
- "-m",
1545
- "pip",
1546
- "install",
1547
- str(pkg),
1548
- ]
1549
- if upgrade:
1550
- cmd.append("--upgrade")
1551
- if target:
1552
- cmd.append("--target={}".format(target))
1553
- runcmd(cmd, env=env, stderr=logfp, stdout=logfp)
1554
-
1555
- runpip("wheel")
1556
- # This needs to handle running from the root of the git repo and also from
1557
- # an installed Relenv
1558
- if (MODULE_DIR.parent / ".git").exists():
1559
- runpip(MODULE_DIR.parent, upgrade=True)
1560
- else:
1561
- runpip("relenv", upgrade=True)
1562
- globs = [
1563
- "/bin/python*",
1564
- "/bin/pip*",
1565
- "/bin/relenv",
1566
- "/lib/python*/ensurepip/*",
1567
- "/lib/python*/site-packages/*",
1568
- "/include/*",
1569
- "*.so",
1570
- "/lib/*.so.*",
1571
- "*.py",
1572
- # Mac specific, factor this out
1573
- "*.dylib",
1574
- ]
1575
- archive = f"{ dirs.prefix }.tar.xz"
1576
- log.info("Archive is %s", archive)
1577
- with tarfile.open(archive, mode="w:xz") as fp:
1578
- create_archive(fp, dirs.prefix, globs, logfp)
1579
-
1580
-
1581
- def create_archive(tarfp, toarchive, globs, logfp=None):
1582
- """
1583
- Create an archive.
1584
-
1585
- :param tarfp: A pointer to the archive to be created
1586
- :type tarfp: file
1587
- :param toarchive: The path to the directory to archive
1588
- :type toarchive: str
1589
- :param globs: A list of filtering patterns to match against files to be added
1590
- :type globs: list
1591
- :param logfp: A pointer to the log file
1592
- :type logfp: file
1593
- """
1594
- log.debug("Current directory %s", os.getcwd())
1595
- log.debug("Creating archive %s", tarfp.name)
1596
- for root, _dirs, files in os.walk(toarchive):
1597
- relroot = pathlib.Path(root).relative_to(toarchive)
1598
- for f in files:
1599
- relpath = relroot / f
1600
- matches = False
1601
- for g in globs:
1602
- if glob.fnmatch.fnmatch("/" / relpath, g):
1603
- matches = True
1604
- break
1605
- if matches:
1606
- log.debug("Adding %s", relpath)
1607
- tarfp.add(relpath, relpath, recursive=False)
1608
- else:
1609
- log.debug("Skipping %s", relpath)