relenv 0.21.2__py3-none-any.whl → 0.22.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- relenv/__init__.py +14 -2
- relenv/__main__.py +12 -6
- relenv/_resources/xz/config.h +148 -0
- relenv/_resources/xz/readme.md +4 -0
- relenv/build/__init__.py +28 -30
- relenv/build/common/__init__.py +50 -0
- relenv/build/common/_sysconfigdata_template.py +72 -0
- relenv/build/common/builder.py +907 -0
- relenv/build/common/builders.py +163 -0
- relenv/build/common/download.py +324 -0
- relenv/build/common/install.py +609 -0
- relenv/build/common/ui.py +432 -0
- relenv/build/darwin.py +128 -14
- relenv/build/linux.py +292 -74
- relenv/build/windows.py +123 -169
- relenv/buildenv.py +48 -17
- relenv/check.py +10 -5
- relenv/common.py +492 -165
- relenv/create.py +147 -7
- relenv/fetch.py +16 -4
- relenv/manifest.py +15 -7
- relenv/python-versions.json +350 -0
- relenv/pyversions.py +817 -30
- relenv/relocate.py +101 -55
- relenv/runtime.py +457 -282
- relenv/toolchain.py +9 -3
- {relenv-0.21.2.dist-info → relenv-0.22.1.dist-info}/METADATA +1 -1
- relenv-0.22.1.dist-info/RECORD +48 -0
- tests/__init__.py +2 -0
- tests/_pytest_typing.py +45 -0
- tests/conftest.py +42 -36
- tests/test_build.py +426 -9
- tests/test_common.py +373 -48
- tests/test_create.py +149 -6
- tests/test_downloads.py +19 -15
- tests/test_fips_photon.py +6 -3
- tests/test_module_imports.py +44 -0
- tests/test_pyversions_runtime.py +177 -0
- tests/test_relocate.py +45 -39
- tests/test_relocate_module.py +257 -0
- tests/test_runtime.py +1968 -6
- tests/test_verify_build.py +477 -34
- relenv/build/common.py +0 -1707
- relenv-0.21.2.dist-info/RECORD +0 -35
- {relenv-0.21.2.dist-info → relenv-0.22.1.dist-info}/WHEEL +0 -0
- {relenv-0.21.2.dist-info → relenv-0.22.1.dist-info}/entry_points.txt +0 -0
- {relenv-0.21.2.dist-info → relenv-0.22.1.dist-info}/licenses/LICENSE.md +0 -0
- {relenv-0.21.2.dist-info → relenv-0.22.1.dist-info}/licenses/NOTICE +0 -0
- {relenv-0.21.2.dist-info → relenv-0.22.1.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,907 @@
|
|
|
1
|
+
# Copyright 2022-2025 Broadcom.
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
"""
|
|
4
|
+
Builder and Builds classes for managing the build process.
|
|
5
|
+
"""
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
import io
|
|
9
|
+
import json
|
|
10
|
+
import logging
|
|
11
|
+
import multiprocessing
|
|
12
|
+
import os
|
|
13
|
+
import pathlib
|
|
14
|
+
import shutil
|
|
15
|
+
import sys
|
|
16
|
+
import time
|
|
17
|
+
from typing import (
|
|
18
|
+
Any,
|
|
19
|
+
Callable,
|
|
20
|
+
Dict,
|
|
21
|
+
IO,
|
|
22
|
+
List,
|
|
23
|
+
MutableMapping,
|
|
24
|
+
Optional,
|
|
25
|
+
Sequence,
|
|
26
|
+
TypedDict,
|
|
27
|
+
Union,
|
|
28
|
+
cast,
|
|
29
|
+
)
|
|
30
|
+
import tempfile
|
|
31
|
+
|
|
32
|
+
from relenv.common import (
|
|
33
|
+
DATA_DIR,
|
|
34
|
+
MODULE_DIR,
|
|
35
|
+
ConfigurationError,
|
|
36
|
+
build_arch,
|
|
37
|
+
extract_archive,
|
|
38
|
+
get_toolchain,
|
|
39
|
+
get_triplet,
|
|
40
|
+
work_dirs,
|
|
41
|
+
WorkDirs,
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
from .download import Download
|
|
45
|
+
from .ui import (
|
|
46
|
+
LineCountHandler,
|
|
47
|
+
load_build_stats,
|
|
48
|
+
print_ui,
|
|
49
|
+
print_ui_expanded,
|
|
50
|
+
update_build_stats,
|
|
51
|
+
BuildStats,
|
|
52
|
+
)
|
|
53
|
+
from .builders import build_default as _default_build_func
|
|
54
|
+
|
|
55
|
+
# Type alias for path-like objects
|
|
56
|
+
PathLike = Union[str, os.PathLike[str]]
|
|
57
|
+
|
|
58
|
+
log = logging.getLogger(__name__)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def _default_populate_env(env: MutableMapping[str, str], dirs: "Dirs") -> None:
|
|
62
|
+
"""Default populate_env implementation (does nothing).
|
|
63
|
+
|
|
64
|
+
This default implementation intentionally does nothing; specific steps may
|
|
65
|
+
provide their own implementation via the ``populate_env`` hook.
|
|
66
|
+
"""
|
|
67
|
+
_ = env
|
|
68
|
+
_ = dirs
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def get_dependency_version(name: str, platform: str) -> Optional[Dict[str, str]]:
|
|
72
|
+
"""
|
|
73
|
+
Get dependency version and metadata from python-versions.json.
|
|
74
|
+
|
|
75
|
+
Returns dict with keys: version, url, sha256, and any extra fields (e.g., sqliteversion)
|
|
76
|
+
Returns None if dependency not found.
|
|
77
|
+
|
|
78
|
+
:param name: Dependency name (openssl, sqlite, xz)
|
|
79
|
+
:param platform: Platform name (linux, darwin, win32)
|
|
80
|
+
:return: Dict with version, url, sha256, and extra fields, or None
|
|
81
|
+
"""
|
|
82
|
+
versions_file = MODULE_DIR / "python-versions.json"
|
|
83
|
+
if not versions_file.exists():
|
|
84
|
+
return None
|
|
85
|
+
|
|
86
|
+
data = json.loads(versions_file.read_text())
|
|
87
|
+
dependencies = data.get("dependencies", {})
|
|
88
|
+
|
|
89
|
+
if name not in dependencies:
|
|
90
|
+
return None
|
|
91
|
+
|
|
92
|
+
# Get the latest version for this dependency that supports the platform
|
|
93
|
+
dep_versions = dependencies[name]
|
|
94
|
+
for version, info in sorted(
|
|
95
|
+
dep_versions.items(),
|
|
96
|
+
key=lambda x: [int(n) for n in x[0].split(".")],
|
|
97
|
+
reverse=True,
|
|
98
|
+
):
|
|
99
|
+
if platform in info.get("platforms", []):
|
|
100
|
+
# Build result dict with version, url, sha256, and any extra fields
|
|
101
|
+
result = {
|
|
102
|
+
"version": version,
|
|
103
|
+
"url": info["url"],
|
|
104
|
+
"sha256": info.get("sha256", ""),
|
|
105
|
+
}
|
|
106
|
+
# Add any extra fields (like sqliteversion for SQLite)
|
|
107
|
+
for key, value in info.items():
|
|
108
|
+
if key not in ["url", "sha256", "platforms"]:
|
|
109
|
+
result[key] = value
|
|
110
|
+
return result
|
|
111
|
+
|
|
112
|
+
return None
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
# Public alias for _default_populate_env for backward compatibility
|
|
116
|
+
populate_env = _default_populate_env
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
class Dirs:
|
|
120
|
+
"""
|
|
121
|
+
A container for directories during build time.
|
|
122
|
+
|
|
123
|
+
:param dirs: A collection of working directories
|
|
124
|
+
:type dirs: ``relenv.common.WorkDirs``
|
|
125
|
+
:param name: The name of this collection
|
|
126
|
+
:type name: str
|
|
127
|
+
:param arch: The architecture being worked with
|
|
128
|
+
:type arch: str
|
|
129
|
+
"""
|
|
130
|
+
|
|
131
|
+
def __init__(self, dirs: WorkDirs, name: str, arch: str, version: str) -> None:
|
|
132
|
+
# XXX name is the specific to a step where as everything
|
|
133
|
+
# else here is generalized to the entire build
|
|
134
|
+
self.name = name
|
|
135
|
+
self.version = version
|
|
136
|
+
self.arch = arch
|
|
137
|
+
self.root = dirs.root
|
|
138
|
+
self.build = dirs.build
|
|
139
|
+
self.downloads = dirs.download
|
|
140
|
+
self.logs = dirs.logs
|
|
141
|
+
self.sources = dirs.src
|
|
142
|
+
self.tmpbuild = tempfile.mkdtemp(prefix="{}_build".format(name))
|
|
143
|
+
self.source: Optional[pathlib.Path] = None
|
|
144
|
+
|
|
145
|
+
@property
|
|
146
|
+
def toolchain(self) -> Optional[pathlib.Path]:
|
|
147
|
+
"""Get the toolchain directory path for the current platform."""
|
|
148
|
+
if sys.platform == "darwin":
|
|
149
|
+
return get_toolchain(root=self.root)
|
|
150
|
+
elif sys.platform == "win32":
|
|
151
|
+
return get_toolchain(root=self.root)
|
|
152
|
+
else:
|
|
153
|
+
return get_toolchain(self.arch, self.root)
|
|
154
|
+
|
|
155
|
+
@property
|
|
156
|
+
def _triplet(self) -> str:
|
|
157
|
+
if sys.platform == "darwin":
|
|
158
|
+
return "{}-macos".format(self.arch)
|
|
159
|
+
elif sys.platform == "win32":
|
|
160
|
+
return "{}-win".format(self.arch)
|
|
161
|
+
else:
|
|
162
|
+
return "{}-linux-gnu".format(self.arch)
|
|
163
|
+
|
|
164
|
+
@property
|
|
165
|
+
def prefix(self) -> pathlib.Path:
|
|
166
|
+
"""Get the build prefix directory path."""
|
|
167
|
+
return self.build / f"{self.version}-{self._triplet}"
|
|
168
|
+
|
|
169
|
+
def __getstate__(self) -> Dict[str, Any]:
|
|
170
|
+
"""
|
|
171
|
+
Return an object used for pickling.
|
|
172
|
+
|
|
173
|
+
:return: The picklable state
|
|
174
|
+
"""
|
|
175
|
+
return {
|
|
176
|
+
"name": self.name,
|
|
177
|
+
"arch": self.arch,
|
|
178
|
+
"root": self.root,
|
|
179
|
+
"build": self.build,
|
|
180
|
+
"downloads": self.downloads,
|
|
181
|
+
"logs": self.logs,
|
|
182
|
+
"sources": self.sources,
|
|
183
|
+
"tmpbuild": self.tmpbuild,
|
|
184
|
+
}
|
|
185
|
+
|
|
186
|
+
def __setstate__(self, state: Dict[str, Any]) -> None:
|
|
187
|
+
"""
|
|
188
|
+
Unwrap the object returned from unpickling.
|
|
189
|
+
|
|
190
|
+
:param state: The state to unpickle
|
|
191
|
+
:type state: dict
|
|
192
|
+
"""
|
|
193
|
+
self.name = state["name"]
|
|
194
|
+
self.arch = state["arch"]
|
|
195
|
+
self.root = state["root"]
|
|
196
|
+
self.downloads = state["downloads"]
|
|
197
|
+
self.logs = state["logs"]
|
|
198
|
+
self.sources = state["sources"]
|
|
199
|
+
self.build = state["build"]
|
|
200
|
+
self.tmpbuild = state["tmpbuild"]
|
|
201
|
+
|
|
202
|
+
def to_dict(self) -> Dict[str, Any]:
|
|
203
|
+
"""
|
|
204
|
+
Get a dictionary representation of the directories in this collection.
|
|
205
|
+
|
|
206
|
+
:return: A dictionary of all the directories
|
|
207
|
+
:rtype: dict
|
|
208
|
+
"""
|
|
209
|
+
return {
|
|
210
|
+
x: getattr(self, x)
|
|
211
|
+
for x in [
|
|
212
|
+
"root",
|
|
213
|
+
"prefix",
|
|
214
|
+
"downloads",
|
|
215
|
+
"logs",
|
|
216
|
+
"sources",
|
|
217
|
+
"build",
|
|
218
|
+
"toolchain",
|
|
219
|
+
]
|
|
220
|
+
}
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
class Recipe(TypedDict):
|
|
224
|
+
"""Typed description of a build recipe entry."""
|
|
225
|
+
|
|
226
|
+
build_func: Callable[[MutableMapping[str, str], Dirs, IO[str]], None]
|
|
227
|
+
wait_on: List[str]
|
|
228
|
+
download: Optional[Download]
|
|
229
|
+
|
|
230
|
+
|
|
231
|
+
class Builder:
|
|
232
|
+
"""
|
|
233
|
+
Utility that handles the build process.
|
|
234
|
+
|
|
235
|
+
:param root: The root of the working directories for this build
|
|
236
|
+
:type root: str
|
|
237
|
+
:param recipies: The instructions for the build steps
|
|
238
|
+
:type recipes: list
|
|
239
|
+
:param build_default: The default build function, defaults to ``build_default``
|
|
240
|
+
:type build_default: types.FunctionType
|
|
241
|
+
:param populate_env: The default function to populate the build environment, defaults to ``populate_env``
|
|
242
|
+
:type populate_env: types.FunctionType
|
|
243
|
+
:param force_download: If True, forces downloading the archives even if they exist, defaults to False
|
|
244
|
+
:type force_download: bool
|
|
245
|
+
:param arch: The architecture being built
|
|
246
|
+
:type arch: str
|
|
247
|
+
"""
|
|
248
|
+
|
|
249
|
+
def __init__(
|
|
250
|
+
self,
|
|
251
|
+
root: Optional[PathLike] = None,
|
|
252
|
+
recipies: Optional[Dict[str, Recipe]] = None,
|
|
253
|
+
build_default: Optional[
|
|
254
|
+
Callable[[MutableMapping[str, str], Dirs, IO[str]], None]
|
|
255
|
+
] = None,
|
|
256
|
+
populate_env: Optional[Callable[[MutableMapping[str, str], Dirs], None]] = None,
|
|
257
|
+
arch: str = "x86_64",
|
|
258
|
+
version: str = "",
|
|
259
|
+
) -> None:
|
|
260
|
+
self.root = root
|
|
261
|
+
self.dirs: WorkDirs = work_dirs(root)
|
|
262
|
+
self.build_arch = build_arch()
|
|
263
|
+
self.build_triplet = get_triplet(self.build_arch)
|
|
264
|
+
self.arch = arch
|
|
265
|
+
self.sources = self.dirs.src
|
|
266
|
+
self.downloads = self.dirs.download
|
|
267
|
+
|
|
268
|
+
if recipies is None:
|
|
269
|
+
self.recipies: Dict[str, Recipe] = {}
|
|
270
|
+
else:
|
|
271
|
+
self.recipies = recipies
|
|
272
|
+
|
|
273
|
+
# Use dependency injection with sensible defaults
|
|
274
|
+
self.build_default: Callable[
|
|
275
|
+
[MutableMapping[str, str], Dirs, IO[str]], None
|
|
276
|
+
] = (build_default if build_default is not None else _default_build_func)
|
|
277
|
+
|
|
278
|
+
# Use the default populate_env if none provided
|
|
279
|
+
self.populate_env: Callable[[MutableMapping[str, str], Dirs], None] = (
|
|
280
|
+
populate_env if populate_env is not None else _default_populate_env
|
|
281
|
+
)
|
|
282
|
+
|
|
283
|
+
self.version = version
|
|
284
|
+
self.set_arch(self.arch)
|
|
285
|
+
|
|
286
|
+
def copy(self, version: str, checksum: Optional[str]) -> "Builder":
|
|
287
|
+
"""Create a copy of this Builder with a different version."""
|
|
288
|
+
recipies: Dict[str, Recipe] = {}
|
|
289
|
+
for name in self.recipies:
|
|
290
|
+
recipe = self.recipies[name]
|
|
291
|
+
recipies[name] = {
|
|
292
|
+
"build_func": recipe["build_func"],
|
|
293
|
+
"wait_on": list(recipe["wait_on"]),
|
|
294
|
+
"download": recipe["download"].copy() if recipe["download"] else None,
|
|
295
|
+
}
|
|
296
|
+
build = Builder(
|
|
297
|
+
self.root,
|
|
298
|
+
recipies,
|
|
299
|
+
self.build_default,
|
|
300
|
+
self.populate_env,
|
|
301
|
+
self.arch,
|
|
302
|
+
version,
|
|
303
|
+
)
|
|
304
|
+
python_download = build.recipies["python"].get("download")
|
|
305
|
+
if python_download is None:
|
|
306
|
+
raise ConfigurationError("Python recipe is missing a download entry")
|
|
307
|
+
python_download.version = version
|
|
308
|
+
python_download.checksum = checksum
|
|
309
|
+
return build
|
|
310
|
+
|
|
311
|
+
def set_arch(self, arch: str) -> None:
|
|
312
|
+
"""
|
|
313
|
+
Set the architecture for the build.
|
|
314
|
+
|
|
315
|
+
:param arch: The arch to build
|
|
316
|
+
:type arch: str
|
|
317
|
+
"""
|
|
318
|
+
self.arch = arch
|
|
319
|
+
self._toolchain: Optional[pathlib.Path] = None
|
|
320
|
+
|
|
321
|
+
@property
|
|
322
|
+
def toolchain(self) -> Optional[pathlib.Path]:
|
|
323
|
+
"""Lazily fetch toolchain only when needed."""
|
|
324
|
+
if self._toolchain is None and sys.platform == "linux":
|
|
325
|
+
from relenv.common import get_toolchain
|
|
326
|
+
|
|
327
|
+
self._toolchain = get_toolchain(self.arch, self.dirs.root)
|
|
328
|
+
return self._toolchain
|
|
329
|
+
|
|
330
|
+
@property
|
|
331
|
+
def triplet(self) -> str:
|
|
332
|
+
"""Get the target triplet for the current architecture."""
|
|
333
|
+
return get_triplet(self.arch)
|
|
334
|
+
|
|
335
|
+
@property
|
|
336
|
+
def prefix(self) -> pathlib.Path:
|
|
337
|
+
"""Get the build prefix directory path."""
|
|
338
|
+
return self.dirs.build / f"{self.version}-{self.triplet}"
|
|
339
|
+
|
|
340
|
+
@property
|
|
341
|
+
def _triplet(self) -> str:
|
|
342
|
+
if sys.platform == "darwin":
|
|
343
|
+
return "{}-macos".format(self.arch)
|
|
344
|
+
elif sys.platform == "win32":
|
|
345
|
+
return "{}-win".format(self.arch)
|
|
346
|
+
else:
|
|
347
|
+
return "{}-linux-gnu".format(self.arch)
|
|
348
|
+
|
|
349
|
+
def add(
|
|
350
|
+
self,
|
|
351
|
+
name: str,
|
|
352
|
+
build_func: Optional[Callable[..., Any]] = None,
|
|
353
|
+
wait_on: Optional[Sequence[str]] = None,
|
|
354
|
+
download: Optional[Dict[str, Any]] = None,
|
|
355
|
+
) -> None:
|
|
356
|
+
"""
|
|
357
|
+
Add a step to the build process.
|
|
358
|
+
|
|
359
|
+
:param name: The name of the step
|
|
360
|
+
:type name: str
|
|
361
|
+
:param build_func: The function that builds this step, defaults to None
|
|
362
|
+
:type build_func: types.FunctionType, optional
|
|
363
|
+
:param wait_on: Processes to wait on before running this step, defaults to None
|
|
364
|
+
:type wait_on: list, optional
|
|
365
|
+
:param download: A dictionary of download information, defaults to None
|
|
366
|
+
:type download: dict, optional
|
|
367
|
+
"""
|
|
368
|
+
if wait_on is None:
|
|
369
|
+
wait_on_list: List[str] = []
|
|
370
|
+
else:
|
|
371
|
+
wait_on_list = list(wait_on)
|
|
372
|
+
if build_func is None:
|
|
373
|
+
build_func = self.build_default
|
|
374
|
+
download_obj: Optional[Download] = None
|
|
375
|
+
if download is not None:
|
|
376
|
+
download_obj = Download(name, destination=self.downloads, **download)
|
|
377
|
+
self.recipies[name] = {
|
|
378
|
+
"build_func": build_func,
|
|
379
|
+
"wait_on": wait_on_list,
|
|
380
|
+
"download": download_obj,
|
|
381
|
+
}
|
|
382
|
+
|
|
383
|
+
def run(
|
|
384
|
+
self,
|
|
385
|
+
name: str,
|
|
386
|
+
event: "multiprocessing.synchronize.Event",
|
|
387
|
+
build_func: Callable[..., Any],
|
|
388
|
+
download: Optional[Download],
|
|
389
|
+
show_ui: bool = False,
|
|
390
|
+
log_level: str = "WARNING",
|
|
391
|
+
line_counts: Optional[MutableMapping[str, int]] = None,
|
|
392
|
+
) -> Any:
|
|
393
|
+
"""
|
|
394
|
+
Run a build step.
|
|
395
|
+
|
|
396
|
+
:param name: The name of the step to run
|
|
397
|
+
:type name: str
|
|
398
|
+
:param event: An event to track this process' status and alert waiting steps
|
|
399
|
+
:type event: ``multiprocessing.Event``
|
|
400
|
+
:param build_func: The function to use to build this step
|
|
401
|
+
:type build_func: types.FunctionType
|
|
402
|
+
:param download: The ``Download`` instance for this step
|
|
403
|
+
:type download: ``Download``
|
|
404
|
+
:param line_counts: Optional shared dict for tracking log line counts
|
|
405
|
+
:type line_counts: Optional[MutableMapping[str, int]]
|
|
406
|
+
|
|
407
|
+
:return: The output of the build function
|
|
408
|
+
"""
|
|
409
|
+
root_log = logging.getLogger(None)
|
|
410
|
+
if sys.platform == "win32":
|
|
411
|
+
if not show_ui:
|
|
412
|
+
handler = logging.StreamHandler()
|
|
413
|
+
handler.setLevel(logging.getLevelName(log_level))
|
|
414
|
+
root_log.addHandler(handler)
|
|
415
|
+
|
|
416
|
+
for handler in root_log.handlers:
|
|
417
|
+
if isinstance(handler, logging.StreamHandler):
|
|
418
|
+
handler.setFormatter(
|
|
419
|
+
logging.Formatter(f"%(asctime)s {name} %(message)s")
|
|
420
|
+
)
|
|
421
|
+
|
|
422
|
+
if not self.dirs.build.exists():
|
|
423
|
+
os.makedirs(self.dirs.build, exist_ok=True)
|
|
424
|
+
|
|
425
|
+
dirs = Dirs(self.dirs, name, self.arch, self.version)
|
|
426
|
+
os.makedirs(dirs.sources, exist_ok=True)
|
|
427
|
+
os.makedirs(dirs.logs, exist_ok=True)
|
|
428
|
+
os.makedirs(dirs.prefix, exist_ok=True)
|
|
429
|
+
|
|
430
|
+
while event.is_set() is False:
|
|
431
|
+
time.sleep(0.3)
|
|
432
|
+
|
|
433
|
+
logfp = io.open(os.path.join(dirs.logs, "{}.log".format(name)), "w")
|
|
434
|
+
handler = logging.FileHandler(dirs.logs / f"{name}.log")
|
|
435
|
+
root_log.addHandler(handler)
|
|
436
|
+
root_log.setLevel(logging.NOTSET)
|
|
437
|
+
|
|
438
|
+
# Add line count handler if tracking is enabled
|
|
439
|
+
line_count_handler: Optional[LineCountHandler] = None
|
|
440
|
+
if line_counts is not None:
|
|
441
|
+
line_count_handler = LineCountHandler(name, line_counts)
|
|
442
|
+
root_log.addHandler(line_count_handler)
|
|
443
|
+
|
|
444
|
+
# DEBUG: Uncomment to debug
|
|
445
|
+
# logfp = sys.stdout
|
|
446
|
+
|
|
447
|
+
cwd = os.getcwd()
|
|
448
|
+
if download:
|
|
449
|
+
extract_archive(dirs.sources, str(download.filepath))
|
|
450
|
+
dirs.source = dirs.sources / download.filepath.name.split(".tar")[0]
|
|
451
|
+
os.chdir(dirs.source)
|
|
452
|
+
else:
|
|
453
|
+
os.chdir(dirs.prefix)
|
|
454
|
+
|
|
455
|
+
if sys.platform == "win32":
|
|
456
|
+
env = os.environ.copy()
|
|
457
|
+
else:
|
|
458
|
+
env = {
|
|
459
|
+
"PATH": os.environ["PATH"],
|
|
460
|
+
}
|
|
461
|
+
env["RELENV_DEBUG"] = "1"
|
|
462
|
+
env["RELENV_BUILDENV"] = "1"
|
|
463
|
+
env["RELENV_HOST"] = self.triplet
|
|
464
|
+
env["RELENV_HOST_ARCH"] = self.arch
|
|
465
|
+
env["RELENV_BUILD"] = self.build_triplet
|
|
466
|
+
env["RELENV_BUILD_ARCH"] = self.build_arch
|
|
467
|
+
python_download = self.recipies["python"].get("download")
|
|
468
|
+
if python_download is None:
|
|
469
|
+
raise ConfigurationError("Python recipe is missing download configuration")
|
|
470
|
+
env["RELENV_PY_VERSION"] = python_download.version
|
|
471
|
+
env["RELENV_PY_MAJOR_VERSION"] = env["RELENV_PY_VERSION"].rsplit(".", 1)[0]
|
|
472
|
+
if "RELENV_DATA" in os.environ:
|
|
473
|
+
env["RELENV_DATA"] = os.environ["RELENV_DATA"]
|
|
474
|
+
if self.build_arch != self.arch:
|
|
475
|
+
native_root = DATA_DIR / "native"
|
|
476
|
+
env["RELENV_NATIVE_PY"] = str(native_root / "bin" / "python3")
|
|
477
|
+
|
|
478
|
+
self.populate_env(env, dirs)
|
|
479
|
+
|
|
480
|
+
_ = dirs.to_dict()
|
|
481
|
+
for k in _:
|
|
482
|
+
log.info("Directory %s %s", k, _[k])
|
|
483
|
+
for k in env:
|
|
484
|
+
log.info("Environment %s %s", k, env[k])
|
|
485
|
+
try:
|
|
486
|
+
result = build_func(env, dirs, logfp)
|
|
487
|
+
# Update build stats with final line count on success
|
|
488
|
+
if line_count_handler is not None and line_counts is not None:
|
|
489
|
+
if name in line_counts:
|
|
490
|
+
final_count = line_counts[name]
|
|
491
|
+
update_build_stats(name, final_count)
|
|
492
|
+
return result
|
|
493
|
+
except Exception:
|
|
494
|
+
log.exception("Build failure")
|
|
495
|
+
sys.exit(1)
|
|
496
|
+
finally:
|
|
497
|
+
os.chdir(cwd)
|
|
498
|
+
if line_count_handler is not None:
|
|
499
|
+
root_log.removeHandler(line_count_handler)
|
|
500
|
+
root_log.removeHandler(handler)
|
|
501
|
+
logfp.close()
|
|
502
|
+
|
|
503
|
+
def cleanup(self) -> None:
|
|
504
|
+
"""
|
|
505
|
+
Clean up the build directories.
|
|
506
|
+
"""
|
|
507
|
+
shutil.rmtree(self.prefix)
|
|
508
|
+
|
|
509
|
+
def clean(self) -> None:
|
|
510
|
+
"""
|
|
511
|
+
Completely clean up the remnants of a relenv build.
|
|
512
|
+
"""
|
|
513
|
+
# Clean directories
|
|
514
|
+
for _ in [self.prefix, self.sources]:
|
|
515
|
+
try:
|
|
516
|
+
shutil.rmtree(_)
|
|
517
|
+
except PermissionError:
|
|
518
|
+
sys.stderr.write(f"Unable to remove directory: {_}")
|
|
519
|
+
except FileNotFoundError:
|
|
520
|
+
pass
|
|
521
|
+
# Clean files
|
|
522
|
+
archive = f"{self.prefix}.tar.xz"
|
|
523
|
+
for _ in [archive]:
|
|
524
|
+
try:
|
|
525
|
+
os.remove(_)
|
|
526
|
+
except FileNotFoundError:
|
|
527
|
+
pass
|
|
528
|
+
|
|
529
|
+
def download_files(
|
|
530
|
+
self,
|
|
531
|
+
steps: Optional[Sequence[str]] = None,
|
|
532
|
+
force_download: bool = False,
|
|
533
|
+
show_ui: bool = False,
|
|
534
|
+
expanded_ui: bool = False,
|
|
535
|
+
) -> None:
|
|
536
|
+
"""
|
|
537
|
+
Download all of the needed archives.
|
|
538
|
+
|
|
539
|
+
:param steps: The steps to download archives for, defaults to None
|
|
540
|
+
:type steps: list, optional
|
|
541
|
+
:param expanded_ui: Whether to use expanded UI with progress bars
|
|
542
|
+
:type expanded_ui: bool, optional
|
|
543
|
+
"""
|
|
544
|
+
step_names = list(steps) if steps is not None else list(self.recipies)
|
|
545
|
+
|
|
546
|
+
fails: List[str] = []
|
|
547
|
+
processes: Dict[str, multiprocessing.Process] = {}
|
|
548
|
+
events: Dict[str, Any] = {}
|
|
549
|
+
|
|
550
|
+
# For downloads, we don't track line counts but can still use expanded UI format
|
|
551
|
+
manager = multiprocessing.Manager()
|
|
552
|
+
line_counts: MutableMapping[str, int] = manager.dict()
|
|
553
|
+
build_stats: Dict[str, BuildStats] = {}
|
|
554
|
+
|
|
555
|
+
if show_ui:
|
|
556
|
+
if not expanded_ui:
|
|
557
|
+
sys.stdout.write("Starting downloads \n")
|
|
558
|
+
log.info("Starting downloads")
|
|
559
|
+
if show_ui and not expanded_ui:
|
|
560
|
+
print_ui(events, processes, fails)
|
|
561
|
+
for name in step_names:
|
|
562
|
+
download = self.recipies[name]["download"]
|
|
563
|
+
if download is None:
|
|
564
|
+
continue
|
|
565
|
+
event = multiprocessing.Event()
|
|
566
|
+
event.set()
|
|
567
|
+
events[name] = event
|
|
568
|
+
|
|
569
|
+
# Create progress callback if using expanded UI
|
|
570
|
+
def make_progress_callback(
|
|
571
|
+
step_name: str, shared_dict: MutableMapping[str, int]
|
|
572
|
+
) -> Callable[[int, int], None]:
|
|
573
|
+
def progress_callback(downloaded: int, total: int) -> None:
|
|
574
|
+
shared_dict[step_name] = downloaded
|
|
575
|
+
shared_dict[f"{step_name}_total"] = total
|
|
576
|
+
|
|
577
|
+
return progress_callback
|
|
578
|
+
|
|
579
|
+
download_kwargs: Dict[str, Any] = {
|
|
580
|
+
"force_download": force_download,
|
|
581
|
+
"show_ui": show_ui,
|
|
582
|
+
"exit_on_failure": True,
|
|
583
|
+
}
|
|
584
|
+
if expanded_ui:
|
|
585
|
+
download_kwargs["progress_callback"] = make_progress_callback(
|
|
586
|
+
name, line_counts
|
|
587
|
+
)
|
|
588
|
+
|
|
589
|
+
proc = multiprocessing.Process(
|
|
590
|
+
name=name,
|
|
591
|
+
target=download,
|
|
592
|
+
kwargs=download_kwargs,
|
|
593
|
+
)
|
|
594
|
+
proc.start()
|
|
595
|
+
processes[name] = proc
|
|
596
|
+
|
|
597
|
+
while processes:
|
|
598
|
+
for proc in list(processes.values()):
|
|
599
|
+
proc.join(0.3)
|
|
600
|
+
# DEBUG: Comment to debug
|
|
601
|
+
if show_ui:
|
|
602
|
+
if expanded_ui:
|
|
603
|
+
print_ui_expanded(
|
|
604
|
+
events,
|
|
605
|
+
processes,
|
|
606
|
+
fails,
|
|
607
|
+
line_counts,
|
|
608
|
+
build_stats,
|
|
609
|
+
"download",
|
|
610
|
+
)
|
|
611
|
+
else:
|
|
612
|
+
print_ui(events, processes, fails)
|
|
613
|
+
if proc.exitcode is None:
|
|
614
|
+
continue
|
|
615
|
+
processes.pop(proc.name)
|
|
616
|
+
if proc.exitcode != 0:
|
|
617
|
+
fails.append(proc.name)
|
|
618
|
+
if show_ui:
|
|
619
|
+
if expanded_ui:
|
|
620
|
+
print_ui_expanded(
|
|
621
|
+
events, processes, fails, line_counts, build_stats, "download"
|
|
622
|
+
)
|
|
623
|
+
else:
|
|
624
|
+
print_ui(events, processes, fails)
|
|
625
|
+
sys.stdout.write("\n")
|
|
626
|
+
if fails and False:
|
|
627
|
+
if show_ui:
|
|
628
|
+
print_ui(events, processes, fails)
|
|
629
|
+
sys.stderr.write("The following failures were reported\n")
|
|
630
|
+
for fail in fails:
|
|
631
|
+
sys.stderr.write(fail + "\n")
|
|
632
|
+
sys.stderr.flush()
|
|
633
|
+
sys.exit(1)
|
|
634
|
+
|
|
635
|
+
def build(
|
|
636
|
+
self,
|
|
637
|
+
steps: Optional[Sequence[str]] = None,
|
|
638
|
+
cleanup: bool = True,
|
|
639
|
+
show_ui: bool = False,
|
|
640
|
+
log_level: str = "WARNING",
|
|
641
|
+
expanded_ui: bool = False,
|
|
642
|
+
) -> None:
|
|
643
|
+
"""
|
|
644
|
+
Build!
|
|
645
|
+
|
|
646
|
+
:param steps: The steps to run, defaults to None
|
|
647
|
+
:type steps: list, optional
|
|
648
|
+
:param cleanup: Whether to clean up or not, defaults to True
|
|
649
|
+
:type cleanup: bool, optional
|
|
650
|
+
:param expanded_ui: Whether to use expanded UI with progress bars
|
|
651
|
+
:type expanded_ui: bool, optional
|
|
652
|
+
""" # noqa: D400
|
|
653
|
+
fails: List[str] = []
|
|
654
|
+
events: Dict[str, Any] = {}
|
|
655
|
+
waits: Dict[str, List[str]] = {}
|
|
656
|
+
processes: Dict[str, multiprocessing.Process] = {}
|
|
657
|
+
|
|
658
|
+
# Set up shared line counts and load build stats for expanded UI
|
|
659
|
+
manager = multiprocessing.Manager()
|
|
660
|
+
line_counts: MutableMapping[str, int] = manager.dict()
|
|
661
|
+
build_stats: Dict[str, BuildStats] = {}
|
|
662
|
+
if expanded_ui:
|
|
663
|
+
build_stats = load_build_stats()
|
|
664
|
+
|
|
665
|
+
if show_ui:
|
|
666
|
+
if expanded_ui:
|
|
667
|
+
# Expanded UI will print its own header
|
|
668
|
+
pass
|
|
669
|
+
else:
|
|
670
|
+
sys.stdout.write("Starting builds\n")
|
|
671
|
+
# DEBUG: Comment to debug
|
|
672
|
+
print_ui(events, processes, fails)
|
|
673
|
+
log.info("Starting builds")
|
|
674
|
+
|
|
675
|
+
step_names = list(steps) if steps is not None else list(self.recipies)
|
|
676
|
+
|
|
677
|
+
for name in step_names:
|
|
678
|
+
event = multiprocessing.Event()
|
|
679
|
+
events[name] = event
|
|
680
|
+
recipe = self.recipies[name]
|
|
681
|
+
kwargs = dict(recipe)
|
|
682
|
+
kwargs["show_ui"] = show_ui
|
|
683
|
+
kwargs["log_level"] = log_level
|
|
684
|
+
kwargs["line_counts"] = line_counts
|
|
685
|
+
|
|
686
|
+
# Determine needed dependency recipies.
|
|
687
|
+
wait_on_seq = cast(List[str], kwargs.pop("wait_on", []))
|
|
688
|
+
wait_on_list = list(wait_on_seq)
|
|
689
|
+
for dependency in wait_on_list[:]:
|
|
690
|
+
if dependency not in step_names:
|
|
691
|
+
wait_on_list.remove(dependency)
|
|
692
|
+
|
|
693
|
+
waits[name] = wait_on_list
|
|
694
|
+
if not waits[name]:
|
|
695
|
+
event.set()
|
|
696
|
+
|
|
697
|
+
proc = multiprocessing.Process(
|
|
698
|
+
name=name, target=self.run, args=(name, event), kwargs=kwargs
|
|
699
|
+
)
|
|
700
|
+
proc.start()
|
|
701
|
+
processes[name] = proc
|
|
702
|
+
|
|
703
|
+
# Wait for the processes to finish and check if we should send any
|
|
704
|
+
# dependency events.
|
|
705
|
+
while processes:
|
|
706
|
+
for proc in list(processes.values()):
|
|
707
|
+
proc.join(0.3)
|
|
708
|
+
if show_ui:
|
|
709
|
+
# DEBUG: Comment to debug
|
|
710
|
+
if expanded_ui:
|
|
711
|
+
print_ui_expanded(
|
|
712
|
+
events, processes, fails, line_counts, build_stats, "build"
|
|
713
|
+
)
|
|
714
|
+
else:
|
|
715
|
+
print_ui(events, processes, fails)
|
|
716
|
+
if proc.exitcode is None:
|
|
717
|
+
continue
|
|
718
|
+
processes.pop(proc.name)
|
|
719
|
+
if proc.exitcode != 0:
|
|
720
|
+
fails.append(proc.name)
|
|
721
|
+
is_failure = True
|
|
722
|
+
else:
|
|
723
|
+
is_failure = False
|
|
724
|
+
for name in waits:
|
|
725
|
+
if proc.name in waits[name]:
|
|
726
|
+
if is_failure:
|
|
727
|
+
if name in processes:
|
|
728
|
+
processes[name].terminate()
|
|
729
|
+
time.sleep(0.1)
|
|
730
|
+
waits[name].remove(proc.name)
|
|
731
|
+
if not waits[name] and not events[name].is_set():
|
|
732
|
+
events[name].set()
|
|
733
|
+
|
|
734
|
+
if fails:
|
|
735
|
+
sys.stderr.write("The following failures were reported\n")
|
|
736
|
+
last_outs = {}
|
|
737
|
+
for fail in fails:
|
|
738
|
+
log_file = self.dirs.logs / f"{fail}.log"
|
|
739
|
+
try:
|
|
740
|
+
with io.open(log_file) as fp:
|
|
741
|
+
fp.seek(0, 2)
|
|
742
|
+
end = fp.tell()
|
|
743
|
+
ind = end - 4096
|
|
744
|
+
if ind > 0:
|
|
745
|
+
fp.seek(ind)
|
|
746
|
+
else:
|
|
747
|
+
fp.seek(0)
|
|
748
|
+
last_out = fp.read()
|
|
749
|
+
if show_ui:
|
|
750
|
+
sys.stderr.write("=" * 20 + f" {fail} " + "=" * 20 + "\n")
|
|
751
|
+
sys.stderr.write(fp.read() + "\n\n")
|
|
752
|
+
except FileNotFoundError:
|
|
753
|
+
last_outs[fail] = f"Log file not found: {log_file}"
|
|
754
|
+
log.error("Build step %s has failed", fail)
|
|
755
|
+
log.error(last_out)
|
|
756
|
+
if show_ui:
|
|
757
|
+
sys.stderr.flush()
|
|
758
|
+
if cleanup:
|
|
759
|
+
log.debug("Performing cleanup.")
|
|
760
|
+
self.cleanup()
|
|
761
|
+
sys.exit(1)
|
|
762
|
+
if show_ui:
|
|
763
|
+
time.sleep(0.3)
|
|
764
|
+
if expanded_ui:
|
|
765
|
+
print_ui_expanded(
|
|
766
|
+
events, processes, fails, line_counts, build_stats, "build"
|
|
767
|
+
)
|
|
768
|
+
else:
|
|
769
|
+
print_ui(events, processes, fails)
|
|
770
|
+
sys.stdout.write("\n")
|
|
771
|
+
sys.stdout.flush()
|
|
772
|
+
if cleanup:
|
|
773
|
+
log.debug("Performing cleanup.")
|
|
774
|
+
self.cleanup()
|
|
775
|
+
|
|
776
|
+
def check_prereqs(self) -> List[str]:
|
|
777
|
+
"""
|
|
778
|
+
Check pre-requsists for build.
|
|
779
|
+
|
|
780
|
+
This method verifies all requrements for a successful build are satisfied.
|
|
781
|
+
|
|
782
|
+
:return: Returns a list of string describing failed checks
|
|
783
|
+
:rtype: list
|
|
784
|
+
"""
|
|
785
|
+
fail: List[str] = []
|
|
786
|
+
if sys.platform == "linux":
|
|
787
|
+
if not self.toolchain or not self.toolchain.exists():
|
|
788
|
+
fail.append(
|
|
789
|
+
f"Toolchain for {self.arch} does not exist. Please pip install ppbt."
|
|
790
|
+
)
|
|
791
|
+
return fail
|
|
792
|
+
|
|
793
|
+
def __call__(
|
|
794
|
+
self,
|
|
795
|
+
steps: Optional[Sequence[str]] = None,
|
|
796
|
+
arch: Optional[str] = None,
|
|
797
|
+
clean: bool = True,
|
|
798
|
+
cleanup: bool = True,
|
|
799
|
+
force_download: bool = False,
|
|
800
|
+
download_only: bool = False,
|
|
801
|
+
show_ui: bool = False,
|
|
802
|
+
log_level: str = "WARNING",
|
|
803
|
+
expanded_ui: bool = False,
|
|
804
|
+
) -> None:
|
|
805
|
+
"""
|
|
806
|
+
Set the architecture, define the steps, clean if needed, download what is needed, and build.
|
|
807
|
+
|
|
808
|
+
:param steps: The steps to run, defaults to None
|
|
809
|
+
:type steps: list, optional
|
|
810
|
+
:param arch: The architecture to build, defaults to None
|
|
811
|
+
:type arch: str, optional
|
|
812
|
+
:param clean: If true, cleans the directories first, defaults to True
|
|
813
|
+
:type clean: bool, optional
|
|
814
|
+
:param cleanup: Cleans up after build if true, defaults to True
|
|
815
|
+
:type cleanup: bool, optional
|
|
816
|
+
:param force_download: Whether or not to download the content if it already exists, defaults to True
|
|
817
|
+
:type force_download: bool, optional
|
|
818
|
+
:param expanded_ui: Whether to use expanded UI with progress bars
|
|
819
|
+
:type expanded_ui: bool, optional
|
|
820
|
+
"""
|
|
821
|
+
log = logging.getLogger(None)
|
|
822
|
+
log.setLevel(logging.NOTSET)
|
|
823
|
+
|
|
824
|
+
stream_handler: Optional[logging.Handler] = None
|
|
825
|
+
if not show_ui:
|
|
826
|
+
stream_handler = logging.StreamHandler()
|
|
827
|
+
stream_handler.setLevel(logging.getLevelName(log_level))
|
|
828
|
+
log.addHandler(stream_handler)
|
|
829
|
+
|
|
830
|
+
os.makedirs(self.dirs.logs, exist_ok=True)
|
|
831
|
+
file_handler = logging.FileHandler(self.dirs.logs / "build.log")
|
|
832
|
+
file_handler.setLevel(logging.INFO)
|
|
833
|
+
log.addHandler(file_handler)
|
|
834
|
+
|
|
835
|
+
if arch:
|
|
836
|
+
self.set_arch(arch)
|
|
837
|
+
|
|
838
|
+
step_names = list(steps) if steps is not None else list(self.recipies)
|
|
839
|
+
|
|
840
|
+
failures = self.check_prereqs()
|
|
841
|
+
if not download_only and failures:
|
|
842
|
+
for _ in failures:
|
|
843
|
+
sys.stderr.write(f"{_}\n")
|
|
844
|
+
sys.stderr.flush()
|
|
845
|
+
sys.exit(1)
|
|
846
|
+
|
|
847
|
+
if clean:
|
|
848
|
+
self.clean()
|
|
849
|
+
|
|
850
|
+
if self.build_arch != self.arch:
|
|
851
|
+
native_root = DATA_DIR / "native"
|
|
852
|
+
if not native_root.exists():
|
|
853
|
+
if "RELENV_NATIVE_PY_VERSION" in os.environ:
|
|
854
|
+
version = os.environ["RELENV_NATIVE_PY_VERSION"]
|
|
855
|
+
else:
|
|
856
|
+
version = self.version
|
|
857
|
+
from relenv.create import create
|
|
858
|
+
|
|
859
|
+
create("native", DATA_DIR, version=version)
|
|
860
|
+
|
|
861
|
+
# Start a process for each build passing it an event used to notify each
|
|
862
|
+
# process if it's dependencies have finished.
|
|
863
|
+
try:
|
|
864
|
+
self.download_files(
|
|
865
|
+
step_names,
|
|
866
|
+
force_download=force_download,
|
|
867
|
+
show_ui=show_ui,
|
|
868
|
+
expanded_ui=expanded_ui,
|
|
869
|
+
)
|
|
870
|
+
if download_only:
|
|
871
|
+
return
|
|
872
|
+
self.build(
|
|
873
|
+
step_names,
|
|
874
|
+
cleanup,
|
|
875
|
+
show_ui=show_ui,
|
|
876
|
+
log_level=log_level,
|
|
877
|
+
expanded_ui=expanded_ui,
|
|
878
|
+
)
|
|
879
|
+
finally:
|
|
880
|
+
log.removeHandler(file_handler)
|
|
881
|
+
if stream_handler is not None:
|
|
882
|
+
log.removeHandler(stream_handler)
|
|
883
|
+
|
|
884
|
+
|
|
885
|
+
class Builds:
|
|
886
|
+
"""Collection of platform-specific builders."""
|
|
887
|
+
|
|
888
|
+
def __init__(self) -> None:
|
|
889
|
+
"""Initialize an empty collection of builders."""
|
|
890
|
+
self.builds: Dict[str, Builder] = {}
|
|
891
|
+
|
|
892
|
+
def add(self, platform: str, *args: Any, **kwargs: Any) -> Builder:
|
|
893
|
+
"""Add a builder for a specific platform."""
|
|
894
|
+
if "builder" in kwargs:
|
|
895
|
+
build_candidate = kwargs.pop("builder")
|
|
896
|
+
if args or kwargs:
|
|
897
|
+
raise RuntimeError(
|
|
898
|
+
"builder keyword can not be used with other kwargs or args"
|
|
899
|
+
)
|
|
900
|
+
build = cast(Builder, build_candidate)
|
|
901
|
+
else:
|
|
902
|
+
build = Builder(*args, **kwargs)
|
|
903
|
+
self.builds[platform] = build
|
|
904
|
+
return build
|
|
905
|
+
|
|
906
|
+
|
|
907
|
+
builds = Builds()
|