relenv 0.21.1__py3-none-any.whl → 0.22.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- relenv/__init__.py +14 -2
- relenv/__main__.py +12 -6
- relenv/_resources/xz/config.h +148 -0
- relenv/_resources/xz/readme.md +4 -0
- relenv/build/__init__.py +28 -30
- relenv/build/common/__init__.py +50 -0
- relenv/build/common/_sysconfigdata_template.py +72 -0
- relenv/build/common/builder.py +907 -0
- relenv/build/common/builders.py +163 -0
- relenv/build/common/download.py +324 -0
- relenv/build/common/install.py +609 -0
- relenv/build/common/ui.py +432 -0
- relenv/build/darwin.py +128 -14
- relenv/build/linux.py +296 -78
- relenv/build/windows.py +259 -44
- relenv/buildenv.py +48 -17
- relenv/check.py +10 -5
- relenv/common.py +499 -163
- relenv/create.py +147 -7
- relenv/fetch.py +16 -4
- relenv/manifest.py +15 -7
- relenv/python-versions.json +329 -0
- relenv/pyversions.py +817 -30
- relenv/relocate.py +101 -55
- relenv/runtime.py +452 -253
- relenv/toolchain.py +9 -3
- {relenv-0.21.1.dist-info → relenv-0.22.0.dist-info}/METADATA +1 -1
- relenv-0.22.0.dist-info/RECORD +48 -0
- tests/__init__.py +2 -0
- tests/_pytest_typing.py +45 -0
- tests/conftest.py +42 -36
- tests/test_build.py +426 -9
- tests/test_common.py +311 -48
- tests/test_create.py +149 -6
- tests/test_downloads.py +19 -15
- tests/test_fips_photon.py +6 -3
- tests/test_module_imports.py +44 -0
- tests/test_pyversions_runtime.py +177 -0
- tests/test_relocate.py +45 -39
- tests/test_relocate_module.py +257 -0
- tests/test_runtime.py +1802 -6
- tests/test_verify_build.py +500 -34
- relenv/build/common.py +0 -1609
- relenv-0.21.1.dist-info/RECORD +0 -35
- {relenv-0.21.1.dist-info → relenv-0.22.0.dist-info}/WHEEL +0 -0
- {relenv-0.21.1.dist-info → relenv-0.22.0.dist-info}/entry_points.txt +0 -0
- {relenv-0.21.1.dist-info → relenv-0.22.0.dist-info}/licenses/LICENSE.md +0 -0
- {relenv-0.21.1.dist-info → relenv-0.22.0.dist-info}/licenses/NOTICE +0 -0
- {relenv-0.21.1.dist-info → relenv-0.22.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,432 @@
|
|
|
1
|
+
# Copyright 2022-2025 Broadcom.
|
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
|
3
|
+
"""
|
|
4
|
+
UI and build statistics utilities.
|
|
5
|
+
"""
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
import logging
|
|
9
|
+
import os
|
|
10
|
+
import pathlib
|
|
11
|
+
import sys
|
|
12
|
+
import threading
|
|
13
|
+
from typing import Dict, MutableMapping, Optional, Sequence, cast
|
|
14
|
+
|
|
15
|
+
import multiprocessing
|
|
16
|
+
|
|
17
|
+
from typing import TYPE_CHECKING, TypedDict
|
|
18
|
+
|
|
19
|
+
if TYPE_CHECKING:
|
|
20
|
+
from multiprocessing.synchronize import Event as SyncEvent
|
|
21
|
+
else:
|
|
22
|
+
SyncEvent = None
|
|
23
|
+
|
|
24
|
+
from relenv.common import DATA_DIR
|
|
25
|
+
|
|
26
|
+
from .download import CICD
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
log = logging.getLogger(__name__)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
# ANSI color codes for terminal output
|
|
33
|
+
GREEN = "\033[0;32m"
|
|
34
|
+
YELLOW = "\033[1;33m"
|
|
35
|
+
RED = "\033[0;31m"
|
|
36
|
+
END = "\033[0m"
|
|
37
|
+
MOVEUP = "\033[F"
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
# Detect terminal capabilities for Unicode vs ASCII output
|
|
41
|
+
USE_UNICODE = True
|
|
42
|
+
|
|
43
|
+
# Allow forcing ASCII mode via environment variable (useful for testing/debugging)
|
|
44
|
+
if os.environ.get("RELENV_ASCII"):
|
|
45
|
+
USE_UNICODE = False
|
|
46
|
+
elif sys.platform == "win32":
|
|
47
|
+
# Check if we're in a modern terminal that supports Unicode
|
|
48
|
+
# Windows Terminal and modern PowerShell support Unicode
|
|
49
|
+
wt_session = os.environ.get("WT_SESSION")
|
|
50
|
+
term_program = os.environ.get("TERM_PROGRAM")
|
|
51
|
+
if not wt_session and not term_program:
|
|
52
|
+
# Likely cmd.exe or old PowerShell, use ASCII
|
|
53
|
+
USE_UNICODE = False
|
|
54
|
+
|
|
55
|
+
|
|
56
|
+
# Spinner frames for in-progress builds
|
|
57
|
+
if USE_UNICODE:
|
|
58
|
+
# Modern Unicode spinner (looks great in most terminals)
|
|
59
|
+
SPINNER_FRAMES = ["⠋", "⠙", "⠹", "⠸", "⠼", "⠴", "⠦", "⠧", "⠇", "⠏"]
|
|
60
|
+
SYMBOL_PENDING = "◯"
|
|
61
|
+
SYMBOL_RUNNING = None # Will use spinner
|
|
62
|
+
SYMBOL_SUCCESS = "✓"
|
|
63
|
+
SYMBOL_FAILED = "✗"
|
|
64
|
+
else:
|
|
65
|
+
# ASCII fallback for Windows cmd.exe
|
|
66
|
+
SPINNER_FRAMES = ["|", "/", "-", "\\"]
|
|
67
|
+
SYMBOL_PENDING = "o"
|
|
68
|
+
SYMBOL_RUNNING = None # Will use spinner
|
|
69
|
+
SYMBOL_SUCCESS = "+"
|
|
70
|
+
SYMBOL_FAILED = "X"
|
|
71
|
+
|
|
72
|
+
|
|
73
|
+
# Build statistics filename (path computed at runtime)
|
|
74
|
+
BUILD_STATS_FILENAME = "build_stats.json"
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def _get_build_stats_file() -> pathlib.Path:
|
|
78
|
+
"""Get the path to the build statistics file.
|
|
79
|
+
|
|
80
|
+
Returns:
|
|
81
|
+
Path to build_stats.json in the relenv data directory.
|
|
82
|
+
|
|
83
|
+
Note:
|
|
84
|
+
This is a function rather than a module-level constant to avoid
|
|
85
|
+
import-time dependencies on DATA_DIR, following CPython conventions.
|
|
86
|
+
"""
|
|
87
|
+
return DATA_DIR / BUILD_STATS_FILENAME
|
|
88
|
+
|
|
89
|
+
|
|
90
|
+
class SpinnerState:
|
|
91
|
+
"""Thread-safe spinner state management.
|
|
92
|
+
|
|
93
|
+
Tracks the animation frame index for each named spinner to ensure
|
|
94
|
+
smooth, consistent animation across multiple UI updates.
|
|
95
|
+
"""
|
|
96
|
+
|
|
97
|
+
def __init__(self) -> None:
|
|
98
|
+
"""Initialize empty spinner state with thread safety."""
|
|
99
|
+
self._state: Dict[str, int] = {}
|
|
100
|
+
self._lock = threading.Lock()
|
|
101
|
+
|
|
102
|
+
def get(self, name: str) -> int:
|
|
103
|
+
"""Get the current frame index for a named spinner.
|
|
104
|
+
|
|
105
|
+
Args:
|
|
106
|
+
name: The spinner identifier
|
|
107
|
+
|
|
108
|
+
Returns:
|
|
109
|
+
The current frame index (0 if spinner hasn't been used yet)
|
|
110
|
+
"""
|
|
111
|
+
with self._lock:
|
|
112
|
+
return self._state.get(name, 0)
|
|
113
|
+
|
|
114
|
+
def increment(self, name: str) -> None:
|
|
115
|
+
"""Increment the frame index for a named spinner.
|
|
116
|
+
|
|
117
|
+
Args:
|
|
118
|
+
name: The spinner identifier
|
|
119
|
+
"""
|
|
120
|
+
with self._lock:
|
|
121
|
+
self._state[name] = self._state.get(name, 0) + 1
|
|
122
|
+
|
|
123
|
+
def reset(self, name: Optional[str] = None) -> None:
|
|
124
|
+
"""Reset spinner state.
|
|
125
|
+
|
|
126
|
+
Args:
|
|
127
|
+
name: The spinner to reset, or None to reset all spinners
|
|
128
|
+
"""
|
|
129
|
+
with self._lock:
|
|
130
|
+
if name is None:
|
|
131
|
+
self._state.clear()
|
|
132
|
+
elif name in self._state:
|
|
133
|
+
del self._state[name]
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
# Module-level spinner state instance
|
|
137
|
+
_spinner_state = SpinnerState()
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
class BuildStats(TypedDict):
|
|
141
|
+
"""Structure for tracking build step statistics."""
|
|
142
|
+
|
|
143
|
+
avg_lines: int
|
|
144
|
+
samples: int
|
|
145
|
+
last_lines: int
|
|
146
|
+
|
|
147
|
+
|
|
148
|
+
def print_ui(
|
|
149
|
+
events: MutableMapping[str, "multiprocessing.synchronize.Event"],
|
|
150
|
+
processes: MutableMapping[str, multiprocessing.Process],
|
|
151
|
+
fails: Sequence[str],
|
|
152
|
+
flipstat: Optional[Dict[str, tuple[int, float]]] = None,
|
|
153
|
+
) -> None:
|
|
154
|
+
"""
|
|
155
|
+
Prints the UI during the relenv building process.
|
|
156
|
+
|
|
157
|
+
:param events: A dictionary of events that are updated during the build process
|
|
158
|
+
:type events: dict
|
|
159
|
+
:param processes: A dictionary of build processes
|
|
160
|
+
:type processes: dict
|
|
161
|
+
:param fails: A list of processes that have failed
|
|
162
|
+
:type fails: list
|
|
163
|
+
:param flipstat: Deprecated parameter, no longer used
|
|
164
|
+
:type flipstat: dict, optional
|
|
165
|
+
"""
|
|
166
|
+
if CICD:
|
|
167
|
+
sys.stdout.flush()
|
|
168
|
+
return
|
|
169
|
+
uiline = []
|
|
170
|
+
for name in events:
|
|
171
|
+
if not events[name].is_set():
|
|
172
|
+
# Pending: event not yet started
|
|
173
|
+
status = " {}{}".format(YELLOW, SYMBOL_PENDING)
|
|
174
|
+
elif name in processes:
|
|
175
|
+
# Running: show animated spinner
|
|
176
|
+
frame_idx = _spinner_state.get(name) % len(SPINNER_FRAMES)
|
|
177
|
+
spinner = SPINNER_FRAMES[frame_idx]
|
|
178
|
+
_spinner_state.increment(name)
|
|
179
|
+
status = " {}{}".format(GREEN, spinner)
|
|
180
|
+
elif name in fails:
|
|
181
|
+
# Failed: show error symbol
|
|
182
|
+
status = " {}{}".format(RED, SYMBOL_FAILED)
|
|
183
|
+
else:
|
|
184
|
+
# Success: show success symbol
|
|
185
|
+
status = " {}{}".format(GREEN, SYMBOL_SUCCESS)
|
|
186
|
+
uiline.append(status)
|
|
187
|
+
uiline.append(" " + END)
|
|
188
|
+
sys.stdout.write("\r")
|
|
189
|
+
sys.stdout.write("".join(uiline))
|
|
190
|
+
sys.stdout.flush()
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
def print_ui_expanded(
|
|
194
|
+
events: MutableMapping[str, "multiprocessing.synchronize.Event"],
|
|
195
|
+
processes: MutableMapping[str, multiprocessing.Process],
|
|
196
|
+
fails: Sequence[str],
|
|
197
|
+
line_counts: MutableMapping[str, int],
|
|
198
|
+
build_stats: Dict[str, BuildStats],
|
|
199
|
+
phase: str = "build",
|
|
200
|
+
) -> None:
|
|
201
|
+
"""
|
|
202
|
+
Prints an expanded UI with progress bars during the relenv building process.
|
|
203
|
+
|
|
204
|
+
:param events: A dictionary of events that are updated during the build process
|
|
205
|
+
:type events: dict
|
|
206
|
+
:param processes: A dictionary of build processes
|
|
207
|
+
:type processes: dict
|
|
208
|
+
:param fails: A list of processes that have failed
|
|
209
|
+
:type fails: list
|
|
210
|
+
:param line_counts: Current line counts for each step
|
|
211
|
+
:type line_counts: MutableMapping[str, int]
|
|
212
|
+
:param build_stats: Historical build statistics
|
|
213
|
+
:type build_stats: dict
|
|
214
|
+
:param phase: The current phase ("download" or "build")
|
|
215
|
+
:type phase: str
|
|
216
|
+
"""
|
|
217
|
+
if CICD:
|
|
218
|
+
sys.stdout.flush()
|
|
219
|
+
return
|
|
220
|
+
|
|
221
|
+
# Track state per phase to handle download->build transitions
|
|
222
|
+
if not hasattr(print_ui_expanded, "_phase_state"):
|
|
223
|
+
print_ui_expanded._phase_state = {} # type: ignore
|
|
224
|
+
|
|
225
|
+
phase_state = print_ui_expanded._phase_state # type: ignore
|
|
226
|
+
|
|
227
|
+
# Number of lines = number of steps + 2 (header + separator)
|
|
228
|
+
num_lines = len(events) + 2
|
|
229
|
+
|
|
230
|
+
# If this phase has been called before, move up to overwrite previous output
|
|
231
|
+
if phase in phase_state:
|
|
232
|
+
prev_lines = phase_state[phase]
|
|
233
|
+
# Move up by previous line count to overwrite
|
|
234
|
+
sys.stdout.write(MOVEUP * prev_lines)
|
|
235
|
+
else:
|
|
236
|
+
# First call for this phase - if we're starting builds after downloads,
|
|
237
|
+
# add a newline to separate them
|
|
238
|
+
if phase == "build" and "download" in phase_state:
|
|
239
|
+
sys.stdout.write("\n")
|
|
240
|
+
|
|
241
|
+
# Store line count for this phase
|
|
242
|
+
phase_state[phase] = num_lines
|
|
243
|
+
|
|
244
|
+
# Clear line and print header
|
|
245
|
+
phase_name = "Downloads" if phase == "download" else "Builds"
|
|
246
|
+
sys.stdout.write("\r\033[K") # Clear line
|
|
247
|
+
sys.stdout.write(f"{phase_name}\n")
|
|
248
|
+
sys.stdout.write("─" * 70 + "\n")
|
|
249
|
+
|
|
250
|
+
# Print each step
|
|
251
|
+
for name in events:
|
|
252
|
+
# Determine status
|
|
253
|
+
if not events[name].is_set():
|
|
254
|
+
# Pending
|
|
255
|
+
status_symbol = f"{YELLOW}{SYMBOL_PENDING}{END}"
|
|
256
|
+
status_text = "Pending"
|
|
257
|
+
progress_bar = ""
|
|
258
|
+
elif name in processes:
|
|
259
|
+
# Running - show spinner and progress
|
|
260
|
+
frame_idx = _spinner_state.get(name) % len(SPINNER_FRAMES)
|
|
261
|
+
spinner = SPINNER_FRAMES[frame_idx]
|
|
262
|
+
_spinner_state.increment(name)
|
|
263
|
+
status_symbol = f"{GREEN}{spinner}{END}"
|
|
264
|
+
|
|
265
|
+
# Determine if this is download or build phase
|
|
266
|
+
phase_action = "Downloading" if phase == "download" else "Building"
|
|
267
|
+
|
|
268
|
+
# Calculate progress if we have historical data
|
|
269
|
+
current_lines = line_counts.get(name, 0)
|
|
270
|
+
if phase == "download":
|
|
271
|
+
# For downloads, line_counts stores bytes downloaded and total bytes
|
|
272
|
+
# Format: line_counts[name] = downloaded, line_counts[f"{name}_total"] = total
|
|
273
|
+
downloaded = current_lines
|
|
274
|
+
total = line_counts.get(f"{name}_total", 0)
|
|
275
|
+
if total > 0:
|
|
276
|
+
progress = min(100, int((downloaded / total) * 100))
|
|
277
|
+
status_text = f"{phase_action} {progress:3d}%"
|
|
278
|
+
# Progress bar (20 chars wide)
|
|
279
|
+
filled = int(progress / 5) # 20 segments = 100% / 5
|
|
280
|
+
bar = (
|
|
281
|
+
"█" * filled + "░" * (20 - filled)
|
|
282
|
+
if USE_UNICODE
|
|
283
|
+
else ("#" * filled + "-" * (20 - filled))
|
|
284
|
+
)
|
|
285
|
+
progress_bar = f" [{bar}]"
|
|
286
|
+
else:
|
|
287
|
+
status_text = phase_action
|
|
288
|
+
progress_bar = ""
|
|
289
|
+
else:
|
|
290
|
+
# For builds, use historical line count data
|
|
291
|
+
if name in build_stats and build_stats[name]["avg_lines"] > 0:
|
|
292
|
+
avg_lines = build_stats[name]["avg_lines"]
|
|
293
|
+
progress = min(100, int((current_lines / avg_lines) * 100))
|
|
294
|
+
status_text = f"{phase_action} {progress:3d}%"
|
|
295
|
+
|
|
296
|
+
# Progress bar (20 chars wide)
|
|
297
|
+
filled = int(progress / 5) # 20 segments = 100% / 5
|
|
298
|
+
bar = (
|
|
299
|
+
"█" * filled + "░" * (20 - filled)
|
|
300
|
+
if USE_UNICODE
|
|
301
|
+
else ("#" * filled + "-" * (20 - filled))
|
|
302
|
+
)
|
|
303
|
+
progress_bar = f" [{bar}]"
|
|
304
|
+
else:
|
|
305
|
+
status_text = phase_action
|
|
306
|
+
progress_bar = ""
|
|
307
|
+
elif name in fails:
|
|
308
|
+
# Failed
|
|
309
|
+
status_symbol = f"{RED}{SYMBOL_FAILED}{END}"
|
|
310
|
+
status_text = "Failed"
|
|
311
|
+
progress_bar = ""
|
|
312
|
+
else:
|
|
313
|
+
# Success
|
|
314
|
+
status_symbol = f"{GREEN}{SYMBOL_SUCCESS}{END}"
|
|
315
|
+
status_text = "Done"
|
|
316
|
+
progress_bar = ""
|
|
317
|
+
|
|
318
|
+
# Format step name (truncate/pad to 20 chars)
|
|
319
|
+
name_display = f"{name:<20}"[:20]
|
|
320
|
+
status_display = f"{status_text:<12}"
|
|
321
|
+
|
|
322
|
+
# Clear line before writing to prevent leftover text
|
|
323
|
+
sys.stdout.write("\r\033[K")
|
|
324
|
+
sys.stdout.write(
|
|
325
|
+
f"{status_symbol} {name_display} {status_display}{progress_bar}\n"
|
|
326
|
+
)
|
|
327
|
+
|
|
328
|
+
sys.stdout.flush()
|
|
329
|
+
|
|
330
|
+
|
|
331
|
+
def load_build_stats() -> Dict[str, BuildStats]:
|
|
332
|
+
"""
|
|
333
|
+
Load historical build statistics from disk.
|
|
334
|
+
|
|
335
|
+
:return: Dictionary mapping step names to their statistics
|
|
336
|
+
:rtype: dict
|
|
337
|
+
"""
|
|
338
|
+
stats_file = _get_build_stats_file()
|
|
339
|
+
if not stats_file.exists():
|
|
340
|
+
return {}
|
|
341
|
+
try:
|
|
342
|
+
import json
|
|
343
|
+
|
|
344
|
+
with open(stats_file, "r") as f:
|
|
345
|
+
data = json.load(f)
|
|
346
|
+
return cast(Dict[str, BuildStats], data)
|
|
347
|
+
except (json.JSONDecodeError, IOError):
|
|
348
|
+
log.warning("Failed to load build stats, starting fresh")
|
|
349
|
+
return {}
|
|
350
|
+
|
|
351
|
+
|
|
352
|
+
def save_build_stats(stats: Dict[str, BuildStats]) -> None:
|
|
353
|
+
"""
|
|
354
|
+
Save build statistics to disk.
|
|
355
|
+
|
|
356
|
+
:param stats: Dictionary mapping step names to their statistics
|
|
357
|
+
:type stats: dict
|
|
358
|
+
"""
|
|
359
|
+
try:
|
|
360
|
+
import json
|
|
361
|
+
|
|
362
|
+
stats_file = _get_build_stats_file()
|
|
363
|
+
stats_file.parent.mkdir(parents=True, exist_ok=True)
|
|
364
|
+
with open(stats_file, "w") as f:
|
|
365
|
+
json.dump(stats, f, indent=2)
|
|
366
|
+
except IOError:
|
|
367
|
+
log.warning("Failed to save build stats")
|
|
368
|
+
|
|
369
|
+
|
|
370
|
+
def update_build_stats(step_name: str, line_count: int) -> None:
|
|
371
|
+
"""
|
|
372
|
+
Update statistics for a build step with a new sample.
|
|
373
|
+
|
|
374
|
+
Uses exponential moving average with weight 0.7 for new samples.
|
|
375
|
+
|
|
376
|
+
:param step_name: Name of the build step
|
|
377
|
+
:type step_name: str
|
|
378
|
+
:param line_count: Number of log lines for this build
|
|
379
|
+
:type line_count: int
|
|
380
|
+
"""
|
|
381
|
+
stats = load_build_stats()
|
|
382
|
+
if step_name not in stats:
|
|
383
|
+
stats[step_name] = BuildStats(
|
|
384
|
+
avg_lines=line_count, samples=1, last_lines=line_count
|
|
385
|
+
)
|
|
386
|
+
else:
|
|
387
|
+
old_avg = stats[step_name]["avg_lines"]
|
|
388
|
+
# Exponential moving average: 70% new value, 30% old average
|
|
389
|
+
new_avg = int(0.7 * line_count + 0.3 * old_avg)
|
|
390
|
+
stats[step_name] = BuildStats(
|
|
391
|
+
avg_lines=new_avg,
|
|
392
|
+
samples=stats[step_name]["samples"] + 1,
|
|
393
|
+
last_lines=line_count,
|
|
394
|
+
)
|
|
395
|
+
save_build_stats(stats)
|
|
396
|
+
|
|
397
|
+
|
|
398
|
+
class LineCountHandler(logging.Handler):
|
|
399
|
+
"""
|
|
400
|
+
Custom logging handler that counts log lines for progress tracking.
|
|
401
|
+
|
|
402
|
+
This handler increments a counter in a shared multiprocessing dict
|
|
403
|
+
for each log message emitted, enabling real-time progress estimation.
|
|
404
|
+
"""
|
|
405
|
+
|
|
406
|
+
def __init__(self, step_name: str, shared_dict: MutableMapping[str, int]) -> None:
|
|
407
|
+
"""
|
|
408
|
+
Initialize the line count handler.
|
|
409
|
+
|
|
410
|
+
:param step_name: Name of the build step being tracked
|
|
411
|
+
:type step_name: str
|
|
412
|
+
:param shared_dict: Multiprocessing-safe dict for sharing counts
|
|
413
|
+
:type shared_dict: MutableMapping[str, int]
|
|
414
|
+
"""
|
|
415
|
+
super().__init__()
|
|
416
|
+
self.step_name = step_name
|
|
417
|
+
self.shared_dict = shared_dict
|
|
418
|
+
|
|
419
|
+
def emit(self, record: logging.LogRecord) -> None:
|
|
420
|
+
"""
|
|
421
|
+
Count each log record as a line.
|
|
422
|
+
|
|
423
|
+
:param record: The log record to process
|
|
424
|
+
:type record: logging.LogRecord
|
|
425
|
+
"""
|
|
426
|
+
try:
|
|
427
|
+
# Increment line count in shared memory
|
|
428
|
+
current = self.shared_dict.get(self.step_name, 0)
|
|
429
|
+
self.shared_dict[self.step_name] = current + 1
|
|
430
|
+
except Exception:
|
|
431
|
+
# Silently ignore errors in the handler to avoid breaking builds
|
|
432
|
+
pass
|
relenv/build/darwin.py
CHANGED
|
@@ -1,17 +1,36 @@
|
|
|
1
1
|
# Copyright 2025 Broadcom.
|
|
2
2
|
# SPDX-License-Identifier: Apache-2
|
|
3
|
+
# mypy: ignore-errors
|
|
3
4
|
"""
|
|
4
5
|
The darwin build process.
|
|
5
6
|
"""
|
|
6
|
-
import
|
|
7
|
+
from __future__ import annotations
|
|
7
8
|
|
|
8
|
-
|
|
9
|
-
|
|
9
|
+
import glob
|
|
10
|
+
import io
|
|
11
|
+
import os
|
|
12
|
+
import pathlib
|
|
13
|
+
import shutil
|
|
14
|
+
import tarfile
|
|
15
|
+
import time
|
|
16
|
+
import urllib.request
|
|
17
|
+
from typing import IO, MutableMapping
|
|
18
|
+
|
|
19
|
+
from ..common import DARWIN, MACOS_DEVELOPMENT_TARGET, arches, runcmd
|
|
20
|
+
from .common import (
|
|
21
|
+
Dirs,
|
|
22
|
+
build_openssl,
|
|
23
|
+
build_sqlite,
|
|
24
|
+
builds,
|
|
25
|
+
finalize,
|
|
26
|
+
get_dependency_version,
|
|
27
|
+
update_sbom_checksums,
|
|
28
|
+
)
|
|
10
29
|
|
|
11
30
|
ARCHES = arches[DARWIN]
|
|
12
31
|
|
|
13
32
|
|
|
14
|
-
def populate_env(env, dirs):
|
|
33
|
+
def populate_env(env: MutableMapping[str, str], dirs: Dirs) -> None:
|
|
15
34
|
"""
|
|
16
35
|
Make sure we have the correct environment variables set.
|
|
17
36
|
|
|
@@ -34,7 +53,66 @@ def populate_env(env, dirs):
|
|
|
34
53
|
env["CFLAGS"] = " ".join(cflags).format(prefix=dirs.prefix)
|
|
35
54
|
|
|
36
55
|
|
|
37
|
-
def
|
|
56
|
+
def update_expat(dirs: Dirs, env: MutableMapping[str, str]) -> None:
|
|
57
|
+
"""
|
|
58
|
+
Update the bundled expat library to the latest version.
|
|
59
|
+
|
|
60
|
+
Python ships with an older bundled expat. This function updates it
|
|
61
|
+
to the latest version for security and bug fixes.
|
|
62
|
+
"""
|
|
63
|
+
# Get version from JSON
|
|
64
|
+
expat_info = get_dependency_version("expat", "darwin")
|
|
65
|
+
if not expat_info:
|
|
66
|
+
# No update needed, use bundled version
|
|
67
|
+
return
|
|
68
|
+
|
|
69
|
+
version = expat_info["version"]
|
|
70
|
+
version_tag = version.replace(".", "_")
|
|
71
|
+
url = f"https://github.com/libexpat/libexpat/releases/download/R_{version_tag}/expat-{version}.tar.xz"
|
|
72
|
+
|
|
73
|
+
expat_dir = pathlib.Path(dirs.source) / "Modules" / "expat"
|
|
74
|
+
if not expat_dir.exists():
|
|
75
|
+
# No expat directory, skip
|
|
76
|
+
return
|
|
77
|
+
|
|
78
|
+
# Download expat tarball
|
|
79
|
+
tmpbuild = pathlib.Path(dirs.tmpbuild)
|
|
80
|
+
tarball_path = tmpbuild / f"expat-{version}.tar.xz"
|
|
81
|
+
urllib.request.urlretrieve(url, str(tarball_path))
|
|
82
|
+
|
|
83
|
+
# Extract tarball
|
|
84
|
+
with tarfile.open(tarball_path) as tar:
|
|
85
|
+
tar.extractall(path=str(tmpbuild))
|
|
86
|
+
|
|
87
|
+
# Copy source files to Modules/expat/
|
|
88
|
+
expat_source_dir = tmpbuild / f"expat-{version}" / "lib"
|
|
89
|
+
updated_files = []
|
|
90
|
+
for source_file in ["*.h", "*.c"]:
|
|
91
|
+
for file_path in glob.glob(str(expat_source_dir / source_file)):
|
|
92
|
+
target_file = expat_dir / pathlib.Path(file_path).name
|
|
93
|
+
# Remove old file if it exists
|
|
94
|
+
if target_file.exists():
|
|
95
|
+
target_file.unlink()
|
|
96
|
+
shutil.copy2(file_path, str(expat_dir))
|
|
97
|
+
updated_files.append(target_file)
|
|
98
|
+
|
|
99
|
+
# Touch all updated files to ensure make rebuilds them
|
|
100
|
+
# (The tarball may contain files with newer timestamps)
|
|
101
|
+
now = time.time()
|
|
102
|
+
for target_file in updated_files:
|
|
103
|
+
os.utime(target_file, (now, now))
|
|
104
|
+
|
|
105
|
+
# Update SBOM with correct checksums for updated expat files
|
|
106
|
+
files_to_update = {}
|
|
107
|
+
for target_file in updated_files:
|
|
108
|
+
# SBOM uses relative paths from Python source root
|
|
109
|
+
relative_path = f"Modules/expat/{target_file.name}"
|
|
110
|
+
files_to_update[relative_path] = target_file
|
|
111
|
+
|
|
112
|
+
update_sbom_checksums(dirs.source, files_to_update)
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
def build_python(env: MutableMapping[str, str], dirs: Dirs, logfp: IO[str]) -> None:
|
|
38
116
|
"""
|
|
39
117
|
Run the commands to build Python.
|
|
40
118
|
|
|
@@ -45,6 +123,9 @@ def build_python(env, dirs, logfp):
|
|
|
45
123
|
:param logfp: A handle for the log file
|
|
46
124
|
:type logfp: file
|
|
47
125
|
"""
|
|
126
|
+
# Update bundled expat to latest version
|
|
127
|
+
update_expat(dirs, env)
|
|
128
|
+
|
|
48
129
|
env["LDFLAGS"] = "-Wl,-rpath,{prefix}/lib {ldflags}".format(
|
|
49
130
|
prefix=dirs.prefix, ldflags=env["LDFLAGS"]
|
|
50
131
|
)
|
|
@@ -73,33 +154,66 @@ def build_python(env, dirs, logfp):
|
|
|
73
154
|
|
|
74
155
|
build = builds.add("darwin", populate_env=populate_env)
|
|
75
156
|
|
|
157
|
+
# Get dependency versions from JSON (with fallback to hardcoded values)
|
|
158
|
+
openssl_info = get_dependency_version("openssl", "darwin")
|
|
159
|
+
if openssl_info:
|
|
160
|
+
openssl_version = openssl_info["version"]
|
|
161
|
+
openssl_url = openssl_info["url"]
|
|
162
|
+
openssl_checksum = openssl_info["sha256"]
|
|
163
|
+
else:
|
|
164
|
+
openssl_version = "3.5.4"
|
|
165
|
+
openssl_url = "https://github.com/openssl/openssl/releases/download/openssl-{version}/openssl-{version}.tar.gz"
|
|
166
|
+
openssl_checksum = "b75daac8e10f189abe28a076ba5905d363e4801f"
|
|
167
|
+
|
|
76
168
|
build.add(
|
|
77
169
|
"openssl",
|
|
78
170
|
build_func=build_openssl,
|
|
79
171
|
download={
|
|
80
|
-
"url":
|
|
81
|
-
"version":
|
|
82
|
-
"checksum":
|
|
172
|
+
"url": openssl_url,
|
|
173
|
+
"version": openssl_version,
|
|
174
|
+
"checksum": openssl_checksum,
|
|
83
175
|
},
|
|
84
176
|
)
|
|
85
177
|
|
|
178
|
+
# Get XZ version from JSON
|
|
179
|
+
xz_info = get_dependency_version("xz", "darwin")
|
|
180
|
+
if xz_info:
|
|
181
|
+
xz_version = xz_info["version"]
|
|
182
|
+
xz_url = xz_info["url"]
|
|
183
|
+
xz_checksum = xz_info["sha256"]
|
|
184
|
+
else:
|
|
185
|
+
xz_version = "5.8.1"
|
|
186
|
+
xz_url = "http://tukaani.org/xz/xz-{version}.tar.gz"
|
|
187
|
+
xz_checksum = "ed4d5589c4cfe84e1697bd02a9954b76af336931"
|
|
188
|
+
|
|
86
189
|
build.add(
|
|
87
190
|
"XZ",
|
|
88
191
|
download={
|
|
89
|
-
"url":
|
|
90
|
-
"version":
|
|
91
|
-
"checksum":
|
|
192
|
+
"url": xz_url,
|
|
193
|
+
"version": xz_version,
|
|
194
|
+
"checksum": xz_checksum,
|
|
92
195
|
},
|
|
93
196
|
)
|
|
94
197
|
|
|
198
|
+
# Get SQLite version from JSON
|
|
199
|
+
sqlite_info = get_dependency_version("sqlite", "darwin")
|
|
200
|
+
if sqlite_info:
|
|
201
|
+
sqlite_url = sqlite_info["url"]
|
|
202
|
+
sqlite_checksum = sqlite_info["sha256"]
|
|
203
|
+
sqlite_version_num = sqlite_info.get("sqliteversion", "3500400")
|
|
204
|
+
else:
|
|
205
|
+
sqlite_version_num = "3500400"
|
|
206
|
+
sqlite_url = "https://sqlite.org/2025/sqlite-autoconf-{version}.tar.gz"
|
|
207
|
+
sqlite_checksum = "145048005c777796dd8494aa1cfed304e8c34283"
|
|
208
|
+
|
|
95
209
|
build.add(
|
|
96
210
|
name="SQLite",
|
|
97
211
|
build_func=build_sqlite,
|
|
98
212
|
download={
|
|
99
|
-
"url":
|
|
213
|
+
"url": sqlite_url,
|
|
100
214
|
"fallback_url": "https://woz.io/relenv/dependencies/sqlite-autoconf-{version}.tar.gz",
|
|
101
|
-
"version":
|
|
102
|
-
"checksum":
|
|
215
|
+
"version": sqlite_version_num,
|
|
216
|
+
"checksum": sqlite_checksum,
|
|
103
217
|
},
|
|
104
218
|
)
|
|
105
219
|
|