pysfi 0.1.10__py3-none-any.whl → 0.1.11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pysfi-0.1.10.dist-info → pysfi-0.1.11.dist-info}/METADATA +7 -7
- pysfi-0.1.11.dist-info/RECORD +60 -0
- {pysfi-0.1.10.dist-info → pysfi-0.1.11.dist-info}/entry_points.txt +12 -2
- sfi/__init__.py +1 -1
- sfi/alarmclock/alarmclock.py +40 -40
- sfi/bumpversion/__init__.py +1 -1
- sfi/cleanbuild/cleanbuild.py +155 -0
- sfi/condasetup/condasetup.py +116 -0
- sfi/docscan/__init__.py +1 -1
- sfi/docscan/docscan_gui.py +1 -1
- sfi/docscan/lang/eng.py +152 -152
- sfi/docscan/lang/zhcn.py +170 -170
- sfi/filedate/filedate.py +185 -112
- sfi/gittool/__init__.py +2 -0
- sfi/gittool/gittool.py +401 -0
- sfi/llmclient/llmclient.py +592 -0
- sfi/llmquantize/llmquantize.py +480 -0
- sfi/llmserver/llmserver.py +335 -0
- sfi/makepython/makepython.py +2 -2
- sfi/pdfsplit/pdfsplit.py +4 -4
- sfi/pyarchive/pyarchive.py +418 -0
- sfi/pyembedinstall/pyembedinstall.py +629 -0
- sfi/pylibpack/pylibpack.py +813 -269
- sfi/pylibpack/rules/numpy.json +22 -0
- sfi/pylibpack/rules/pymupdf.json +10 -0
- sfi/pylibpack/rules/pyqt5.json +19 -0
- sfi/pylibpack/rules/pyside2.json +23 -0
- sfi/pylibpack/rules/scipy.json +23 -0
- sfi/pylibpack/rules/shiboken2.json +24 -0
- sfi/pyloadergen/pyloadergen.py +271 -572
- sfi/pypack/pypack.py +822 -471
- sfi/pyprojectparse/__init__.py +0 -0
- sfi/pyprojectparse/pyprojectparse.py +500 -0
- sfi/pysourcepack/pysourcepack.py +308 -369
- sfi/quizbase/__init__.py +0 -0
- sfi/quizbase/quizbase.py +828 -0
- sfi/quizbase/quizbase_gui.py +987 -0
- sfi/regexvalidate/__init__.py +0 -0
- sfi/regexvalidate/regex_help.html +284 -0
- sfi/regexvalidate/regexvalidate.py +468 -0
- sfi/taskkill/taskkill.py +0 -2
- pysfi-0.1.10.dist-info/RECORD +0 -39
- sfi/embedinstall/embedinstall.py +0 -478
- sfi/projectparse/projectparse.py +0 -152
- {pysfi-0.1.10.dist-info → pysfi-0.1.11.dist-info}/WHEEL +0 -0
- /sfi/{embedinstall → llmquantize}/__init__.py +0 -0
- /sfi/{projectparse → pyembedinstall}/__init__.py +0 -0
|
@@ -0,0 +1,629 @@
|
|
|
1
|
+
"""Download Python embeddable package to a specific directory."""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
import argparse
|
|
6
|
+
import logging
|
|
7
|
+
import platform
|
|
8
|
+
import shutil
|
|
9
|
+
import time
|
|
10
|
+
import zipfile
|
|
11
|
+
from contextlib import suppress
|
|
12
|
+
from dataclasses import dataclass
|
|
13
|
+
from functools import cached_property
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
from typing import Final
|
|
16
|
+
from urllib.error import HTTPError, URLError
|
|
17
|
+
from urllib.request import Request, urlopen
|
|
18
|
+
|
|
19
|
+
from sfi.pyprojectparse.pyprojectparse import Solution
|
|
20
|
+
|
|
21
|
+
logging.basicConfig(level=logging.INFO, format="%(message)s")
|
|
22
|
+
|
|
23
|
+
cwd = Path.cwd()
|
|
24
|
+
logger = logging.getLogger(__name__)
|
|
25
|
+
|
|
26
|
+
# Default cache directory
|
|
27
|
+
_DEFAULT_CACHE_DIR = Path.home() / ".pysfi" / ".cache" / "embed-python"
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
# Architecture mapping
|
|
31
|
+
ARCH_DICT: Final = {
|
|
32
|
+
"amd64": "amd64",
|
|
33
|
+
"x86_64": "amd64",
|
|
34
|
+
"x86": "amd64",
|
|
35
|
+
"arm64": "arm64",
|
|
36
|
+
"aarch64": "arm64",
|
|
37
|
+
}
|
|
38
|
+
|
|
39
|
+
# User-Agent for HTTP requests
|
|
40
|
+
USER_AGENT: Final = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36"
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
@dataclass(frozen=True)
|
|
44
|
+
class EmbedDownloader:
|
|
45
|
+
"""Class for downloading Python embeddable package."""
|
|
46
|
+
|
|
47
|
+
version: str
|
|
48
|
+
arch: str
|
|
49
|
+
cache_dir: Path
|
|
50
|
+
offline: bool
|
|
51
|
+
skip_speed_test: bool
|
|
52
|
+
timeout: int = 5
|
|
53
|
+
|
|
54
|
+
@cached_property
|
|
55
|
+
def cache_file(self) -> Path:
|
|
56
|
+
"""Get the cache file path for the embeddable package."""
|
|
57
|
+
return self.cache_dir / f"python-{self.version}-embed-{self.arch}.zip"
|
|
58
|
+
|
|
59
|
+
@cached_property
|
|
60
|
+
def download_urls(self) -> list[str]:
|
|
61
|
+
"""Get the list of URLs to try for download."""
|
|
62
|
+
if self.skip_speed_test:
|
|
63
|
+
url = self.get_official_url_template(self.arch).format(version=self.version)
|
|
64
|
+
logger.info(f"Skipping speed test, using official URL: {url}")
|
|
65
|
+
return [url]
|
|
66
|
+
return self.find_available_urls(self.version, self.arch, self.timeout)
|
|
67
|
+
|
|
68
|
+
def download(self) -> bool:
|
|
69
|
+
"""Download the Python embeddable package to the cache file."""
|
|
70
|
+
return self.has_cached_file or self.download_python_package(
|
|
71
|
+
self.cache_file, self.download_urls, self.offline
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
@cached_property
|
|
75
|
+
def has_cached_file(self) -> bool:
|
|
76
|
+
"""Check if the embeddable package is already cached."""
|
|
77
|
+
if not self.cache_file.exists():
|
|
78
|
+
return False
|
|
79
|
+
|
|
80
|
+
is_valid = zipfile.is_zipfile(self.cache_file)
|
|
81
|
+
if not is_valid:
|
|
82
|
+
logger.warning(f"Corrupted cache file detected: {self.cache_file}")
|
|
83
|
+
logger.info("Deleting corrupted cache file and re-downloading...")
|
|
84
|
+
try:
|
|
85
|
+
self.cache_file.unlink()
|
|
86
|
+
except OSError as e:
|
|
87
|
+
logger.warning(f"Failed to delete corrupted cache file: {e}")
|
|
88
|
+
else:
|
|
89
|
+
logger.debug(f"Using cached file: {self.cache_file}")
|
|
90
|
+
|
|
91
|
+
return is_valid
|
|
92
|
+
|
|
93
|
+
@cached_property
|
|
94
|
+
def latest_version(self) -> str | None:
|
|
95
|
+
"""Get the latest version of Python available for download."""
|
|
96
|
+
version = self.get_latest_patch_version(self.version, self.arch, self.cache_dir)
|
|
97
|
+
return version if "." in version else None
|
|
98
|
+
|
|
99
|
+
def download_with_progress(self, url: str, dest_path: Path) -> None:
|
|
100
|
+
"""Download file with progress bar and integrity check."""
|
|
101
|
+
logger.info(f"Downloading from: {url}")
|
|
102
|
+
|
|
103
|
+
req = Request(url, headers={"User-Agent": USER_AGENT})
|
|
104
|
+
try:
|
|
105
|
+
with urlopen(req, timeout=self.timeout) as response:
|
|
106
|
+
total_size = int(response.headers.get("content-length", 0))
|
|
107
|
+
dest_path.parent.mkdir(parents=True, exist_ok=True)
|
|
108
|
+
|
|
109
|
+
downloaded = self.download_file_chunks(response, dest_path, total_size)
|
|
110
|
+
logger.info(f"Download completed: {downloaded / 1024 / 1024:.2f} MB")
|
|
111
|
+
except HTTPError as e:
|
|
112
|
+
logger.error(f"HTTP error {e.code} while downloading {url}")
|
|
113
|
+
raise
|
|
114
|
+
except URLError as e:
|
|
115
|
+
logger.error(f"Failed to download {url}: {e}")
|
|
116
|
+
raise
|
|
117
|
+
|
|
118
|
+
def download_file_chunks(self, response, dest_path: Path, total_size: int) -> int:
|
|
119
|
+
"""Download file chunks and return downloaded size."""
|
|
120
|
+
downloaded = 0
|
|
121
|
+
last_progress = 0
|
|
122
|
+
last_log_time = 0
|
|
123
|
+
chunk_size = 65536
|
|
124
|
+
|
|
125
|
+
with dest_path.open("wb") as f:
|
|
126
|
+
while chunk := response.read(chunk_size):
|
|
127
|
+
f.write(chunk)
|
|
128
|
+
downloaded += len(chunk)
|
|
129
|
+
|
|
130
|
+
if total_size > 0:
|
|
131
|
+
progress = (downloaded / total_size) * 100
|
|
132
|
+
current_time = time.time()
|
|
133
|
+
# Update progress every 1% or every 0.5 seconds, whichever comes first
|
|
134
|
+
if (
|
|
135
|
+
int(progress) > last_progress
|
|
136
|
+
and current_time - last_log_time >= 0.5
|
|
137
|
+
) or downloaded == total_size:
|
|
138
|
+
downloaded_mb = downloaded / 1024 / 1024
|
|
139
|
+
total_mb = total_size / 1024 / 1024
|
|
140
|
+
logger.info(
|
|
141
|
+
f"Progress: {downloaded_mb:.2f} MB / {total_mb:.2f} MB ({progress:.1f}%)"
|
|
142
|
+
)
|
|
143
|
+
last_progress = int(progress)
|
|
144
|
+
last_log_time = current_time
|
|
145
|
+
|
|
146
|
+
if total_size > 0 and downloaded != total_size:
|
|
147
|
+
raise URLError(
|
|
148
|
+
f"Incomplete download: expected {total_size} bytes, got {downloaded} bytes"
|
|
149
|
+
)
|
|
150
|
+
return downloaded
|
|
151
|
+
|
|
152
|
+
def try_download_from_url(self, cache_file: Path, url: str) -> bool:
|
|
153
|
+
"""Try to download from a single URL and validate."""
|
|
154
|
+
try:
|
|
155
|
+
self.download_with_progress(url, cache_file)
|
|
156
|
+
except (URLError, HTTPError):
|
|
157
|
+
# Remove partial file on download failure
|
|
158
|
+
if cache_file.exists():
|
|
159
|
+
with suppress(OSError):
|
|
160
|
+
cache_file.unlink()
|
|
161
|
+
raise
|
|
162
|
+
|
|
163
|
+
is_valid = zipfile.is_zipfile(cache_file)
|
|
164
|
+
|
|
165
|
+
if is_valid:
|
|
166
|
+
logger.info("Downloaded file is valid")
|
|
167
|
+
else:
|
|
168
|
+
logger.warning("Downloaded file is corrupted, trying next URL...")
|
|
169
|
+
if cache_file.exists():
|
|
170
|
+
with suppress(OSError):
|
|
171
|
+
cache_file.unlink()
|
|
172
|
+
|
|
173
|
+
return is_valid
|
|
174
|
+
|
|
175
|
+
def download_python_package(
|
|
176
|
+
self, cache_file: Path, urls: list[str], offline: bool
|
|
177
|
+
) -> bool:
|
|
178
|
+
"""Download Python package from available URLs."""
|
|
179
|
+
if offline:
|
|
180
|
+
logger.error("Offline mode: no cached file found")
|
|
181
|
+
return False
|
|
182
|
+
|
|
183
|
+
if not urls:
|
|
184
|
+
logger.error("No available URLs to download from")
|
|
185
|
+
return False
|
|
186
|
+
|
|
187
|
+
# Attempt download from each URL
|
|
188
|
+
for i, url in enumerate(urls, 1):
|
|
189
|
+
try:
|
|
190
|
+
logger.info(f"Attempting download {i}/{len(urls)}: {url}")
|
|
191
|
+
if self.try_download_from_url(cache_file, url):
|
|
192
|
+
return True
|
|
193
|
+
except (URLError, HTTPError) as e:
|
|
194
|
+
logger.warning(f"Download failed from {url}: {e}")
|
|
195
|
+
if i < len(urls):
|
|
196
|
+
logger.info("Retrying with next available URL...")
|
|
197
|
+
|
|
198
|
+
# All downloads failed
|
|
199
|
+
logger.error(
|
|
200
|
+
"Failed to download Python from all available URLs. "
|
|
201
|
+
"Please check your internet connection and version."
|
|
202
|
+
)
|
|
203
|
+
return False
|
|
204
|
+
|
|
205
|
+
def get_official_url_template(self, arch: str | None = None) -> str:
|
|
206
|
+
"""Get official URL template based on architecture."""
|
|
207
|
+
arch = arch or self.arch
|
|
208
|
+
return f"https://www.python.org/ftp/python/{{version}}/python-{{version}}-embed-{arch}.zip"
|
|
209
|
+
|
|
210
|
+
def get_mirror_url_templates(self, arch: str | None = None) -> list[str]:
|
|
211
|
+
"""Get mirror URL templates based on architecture."""
|
|
212
|
+
arch = arch or self.arch
|
|
213
|
+
return [
|
|
214
|
+
f"https://mirrors.huaweicloud.com/python/{{version}}/python-{{version}}-embed-{arch}.zip",
|
|
215
|
+
f"https://mirrors.aliyun.com/python-release/{{version}}/python-{{version}}-embed-{arch}.zip",
|
|
216
|
+
f"https://mirrors.tuna.tsinghua.edu.cn/python/{{version}}/python-{{version}}-embed-{arch}.zip",
|
|
217
|
+
f"https://mirrors.pku.edu.cn/python/{{version}}/python-{{version}}-embed-{arch}.zip",
|
|
218
|
+
]
|
|
219
|
+
|
|
220
|
+
def test_url_speed(
|
|
221
|
+
self, url: str, timeout: int | None = None, use_get: bool = False
|
|
222
|
+
) -> float:
|
|
223
|
+
"""Test URL speed by making a HEAD or GET request."""
|
|
224
|
+
timeout = timeout or self.timeout
|
|
225
|
+
start_time = time.time()
|
|
226
|
+
|
|
227
|
+
# First attempt HEAD request
|
|
228
|
+
if not use_get:
|
|
229
|
+
req = Request(url, headers={"User-Agent": USER_AGENT}, method="HEAD")
|
|
230
|
+
try:
|
|
231
|
+
with urlopen(req, timeout=timeout) as response:
|
|
232
|
+
if response.status == 200:
|
|
233
|
+
elapsed = time.time() - start_time
|
|
234
|
+
logger.debug(f"URL {url}: {elapsed:.3f}s")
|
|
235
|
+
return elapsed
|
|
236
|
+
logger.debug(f"URL {url}: HTTP {response.status}")
|
|
237
|
+
except HTTPError as e:
|
|
238
|
+
if e.code in (405, 403):
|
|
239
|
+
use_get = True # Fall back to GET request
|
|
240
|
+
else:
|
|
241
|
+
logger.debug(f"URL {url}: HTTP {e.code}")
|
|
242
|
+
return float("inf")
|
|
243
|
+
except (URLError, TimeoutError) as e:
|
|
244
|
+
logger.debug(f"URL {url}: {type(e).__name__}")
|
|
245
|
+
return float("inf")
|
|
246
|
+
|
|
247
|
+
# Fallback to GET request if needed
|
|
248
|
+
if use_get:
|
|
249
|
+
req = Request(
|
|
250
|
+
url,
|
|
251
|
+
headers={"User-Agent": USER_AGENT, "Range": "bytes=0-1023"},
|
|
252
|
+
method="GET",
|
|
253
|
+
)
|
|
254
|
+
try:
|
|
255
|
+
with urlopen(req, timeout=timeout) as response:
|
|
256
|
+
if response.status in (206, 200):
|
|
257
|
+
elapsed = time.time() - start_time
|
|
258
|
+
logger.debug(f"URL {url}: {elapsed:.3f}s (GET fallback)")
|
|
259
|
+
return elapsed
|
|
260
|
+
logger.debug(f"URL {url}: HTTP {response.status} (GET fallback)")
|
|
261
|
+
except (URLError, TimeoutError) as e:
|
|
262
|
+
logger.debug(f"URL {url}: {type(e).__name__} (GET fallback)")
|
|
263
|
+
|
|
264
|
+
return float("inf")
|
|
265
|
+
|
|
266
|
+
def get_urls_to_test(
|
|
267
|
+
self, version: str | None = None, arch: str | None = None
|
|
268
|
+
) -> list[str]:
|
|
269
|
+
"""Get list of URLs to test for speed."""
|
|
270
|
+
version = version or self.version
|
|
271
|
+
arch = arch or self.arch
|
|
272
|
+
return [
|
|
273
|
+
template.format(version=version)
|
|
274
|
+
for template in [
|
|
275
|
+
*self.get_mirror_url_templates(arch),
|
|
276
|
+
self.get_official_url_template(arch),
|
|
277
|
+
]
|
|
278
|
+
]
|
|
279
|
+
|
|
280
|
+
def find_available_urls(
|
|
281
|
+
self,
|
|
282
|
+
version: str | None = None,
|
|
283
|
+
arch: str | None = None,
|
|
284
|
+
timeout: int | None = None,
|
|
285
|
+
) -> list[str]:
|
|
286
|
+
"""Find all available URLs for downloading Python embeddable package, sorted by speed."""
|
|
287
|
+
version = version or self.version
|
|
288
|
+
arch = arch or self.arch
|
|
289
|
+
timeout = timeout or self.timeout
|
|
290
|
+
official_url = self.get_official_url_template(arch).format(version=version)
|
|
291
|
+
|
|
292
|
+
logger.debug("Testing mirror speeds...")
|
|
293
|
+
all_tests = [
|
|
294
|
+
(url, self.test_url_speed(url, timeout))
|
|
295
|
+
for url in self.get_urls_to_test(version, arch)
|
|
296
|
+
]
|
|
297
|
+
speed_results = [
|
|
298
|
+
(speed, url) for url, speed in all_tests if speed != float("inf")
|
|
299
|
+
]
|
|
300
|
+
|
|
301
|
+
# Log results
|
|
302
|
+
for speed, url in speed_results:
|
|
303
|
+
logger.debug(f" ✓ {url} ({speed:.3f}s)")
|
|
304
|
+
for url, speed in all_tests:
|
|
305
|
+
if speed == float("inf"):
|
|
306
|
+
logger.debug(f" ✗ {url} (unavailable)")
|
|
307
|
+
|
|
308
|
+
if not speed_results:
|
|
309
|
+
logger.warning("All mirrors failed, falling back to official URL")
|
|
310
|
+
return [official_url]
|
|
311
|
+
|
|
312
|
+
# Sort URLs by speed and return
|
|
313
|
+
available_urls = [url for _, url in sorted(speed_results)]
|
|
314
|
+
if available_urls and logger.isEnabledFor(logging.DEBUG):
|
|
315
|
+
logger.debug(
|
|
316
|
+
f"Available URLs (sorted by speed): {len(available_urls)} found"
|
|
317
|
+
)
|
|
318
|
+
for i, url in enumerate(available_urls, 1):
|
|
319
|
+
logger.debug(f" {i}. {url}")
|
|
320
|
+
return available_urls
|
|
321
|
+
|
|
322
|
+
# Methods moved from EmbedInstallConfig
|
|
323
|
+
def is_version_complete(self, version: str) -> bool:
|
|
324
|
+
"""Check if version string is complete (has 3 or more parts)."""
|
|
325
|
+
version_parts = version.split(".")
|
|
326
|
+
return len(version_parts) >= 3
|
|
327
|
+
|
|
328
|
+
def validate_version_format(self, version: str) -> bool:
|
|
329
|
+
"""Validate if version has correct major.minor format."""
|
|
330
|
+
version_parts = version.split(".")
|
|
331
|
+
return "." in version and len(version_parts) == 2
|
|
332
|
+
|
|
333
|
+
def extract_version_from_filename(self, filename: str, arch: str) -> str:
|
|
334
|
+
"""Extract version from cache filename."""
|
|
335
|
+
return filename.replace("python-", "").replace(f"-embed-{arch}", "")
|
|
336
|
+
|
|
337
|
+
def get_cached_version(
|
|
338
|
+
self, major: str, minor: str, arch: str, cache_dir: Path
|
|
339
|
+
) -> str | None:
|
|
340
|
+
"""Get the highest cached version for a given major.minor."""
|
|
341
|
+
if not cache_dir.exists():
|
|
342
|
+
return None
|
|
343
|
+
|
|
344
|
+
pattern = f"python-{major}.{minor}.*-embed-{arch}.zip"
|
|
345
|
+
cached_versions = [
|
|
346
|
+
self.extract_version_from_filename(cache_file.stem, arch)
|
|
347
|
+
for cache_file in cache_dir.glob(pattern)
|
|
348
|
+
if cache_file.stem and zipfile.is_zipfile(cache_file)
|
|
349
|
+
]
|
|
350
|
+
|
|
351
|
+
return max(cached_versions, default=None) if cached_versions else None
|
|
352
|
+
|
|
353
|
+
def check_version_url(self, version: str, arch: str, timeout: int) -> bool:
|
|
354
|
+
"""Check if version URL is accessible."""
|
|
355
|
+
base_url = self.get_official_url_template(arch)
|
|
356
|
+
test_url = base_url.format(version=version)
|
|
357
|
+
req = Request(test_url, headers={"User-Agent": USER_AGENT}, method="HEAD")
|
|
358
|
+
|
|
359
|
+
try:
|
|
360
|
+
with urlopen(req, timeout=timeout) as response:
|
|
361
|
+
if response.status == 200:
|
|
362
|
+
return True
|
|
363
|
+
logger.debug(f"Version {version}: HTTP {response.status}")
|
|
364
|
+
return False
|
|
365
|
+
except HTTPError as e:
|
|
366
|
+
log_level = logger.info if e.code == 404 else logger.debug
|
|
367
|
+
log_level(f"Version {version}: HTTP {e.code}")
|
|
368
|
+
except (URLError, TimeoutError) as e:
|
|
369
|
+
logger.debug(f"Version {version}: {type(e).__name__}")
|
|
370
|
+
|
|
371
|
+
return False
|
|
372
|
+
|
|
373
|
+
def find_latest_patch_online(
|
|
374
|
+
self, major: str, minor: str, arch: str, timeout: int
|
|
375
|
+
) -> str | None:
|
|
376
|
+
"""Find latest patch version by checking online."""
|
|
377
|
+
# Check patches from highest to lowest (50 down to 0)
|
|
378
|
+
for patch in range(50, -1, -1):
|
|
379
|
+
version = f"{major}.{minor}.{patch}"
|
|
380
|
+
if self.check_version_url(version, arch, timeout):
|
|
381
|
+
logger.info(f"Latest version found: {version}")
|
|
382
|
+
return version
|
|
383
|
+
return None
|
|
384
|
+
|
|
385
|
+
def resolve_patch_version(
|
|
386
|
+
self, major_minor: str, arch: str, cache_dir: Path, timeout: int
|
|
387
|
+
) -> str:
|
|
388
|
+
"""Resolve version to latest patch or return original."""
|
|
389
|
+
major, minor = major_minor.split(".")
|
|
390
|
+
|
|
391
|
+
# Check for cached version first
|
|
392
|
+
if cached_version := self.get_cached_version(major, minor, arch, cache_dir):
|
|
393
|
+
logger.info(f"Using cached version: {cached_version}")
|
|
394
|
+
return cached_version
|
|
395
|
+
|
|
396
|
+
# Otherwise find latest online
|
|
397
|
+
logger.info(f"Checking latest version for {major_minor}...")
|
|
398
|
+
latest_version = self.find_latest_patch_online(major, minor, arch, timeout)
|
|
399
|
+
|
|
400
|
+
if latest_version:
|
|
401
|
+
return latest_version
|
|
402
|
+
|
|
403
|
+
logger.warning(
|
|
404
|
+
f"Could not find any patch version for {major_minor}, using as-is"
|
|
405
|
+
)
|
|
406
|
+
return major_minor
|
|
407
|
+
|
|
408
|
+
def get_latest_patch_version(
|
|
409
|
+
self, major_minor: str, arch: str, cache_dir: Path, timeout: int = 5
|
|
410
|
+
) -> str:
|
|
411
|
+
"""Get the latest patch version for a given major.minor version.
|
|
412
|
+
|
|
413
|
+
Args:
|
|
414
|
+
major_minor: Major.minor version (e.g., '3.13') or full version (e.g., '3.13.1')
|
|
415
|
+
arch: Architecture (amd64 or arm64)
|
|
416
|
+
cache_dir: Cache directory to check for cached versions
|
|
417
|
+
timeout: Request timeout in seconds
|
|
418
|
+
|
|
419
|
+
Returns:
|
|
420
|
+
Full version string (e.g., '3.13.1') or original if already complete
|
|
421
|
+
"""
|
|
422
|
+
# Return early if version is already complete or invalid format
|
|
423
|
+
if self.is_version_complete(major_minor) or not self.validate_version_format(
|
|
424
|
+
major_minor
|
|
425
|
+
):
|
|
426
|
+
log_msg = (
|
|
427
|
+
f"Version already complete: {major_minor}, skipping auto-completion"
|
|
428
|
+
if self.is_version_complete(major_minor)
|
|
429
|
+
else f"Invalid version format: {major_minor}, using as-is"
|
|
430
|
+
)
|
|
431
|
+
logger.debug(log_msg)
|
|
432
|
+
return major_minor
|
|
433
|
+
|
|
434
|
+
return self.resolve_patch_version(major_minor, arch, cache_dir, timeout)
|
|
435
|
+
|
|
436
|
+
|
|
437
|
+
@dataclass(frozen=True)
|
|
438
|
+
class EmbedInstaller:
|
|
439
|
+
"""Class for installing Python embeddable package."""
|
|
440
|
+
|
|
441
|
+
root_dir: Path
|
|
442
|
+
version: str
|
|
443
|
+
cache_dir: Path
|
|
444
|
+
offline: bool
|
|
445
|
+
skip_speed_test: bool
|
|
446
|
+
keep_cache: bool
|
|
447
|
+
timeout: int = 5
|
|
448
|
+
|
|
449
|
+
def __post_init__(self):
|
|
450
|
+
pass
|
|
451
|
+
|
|
452
|
+
@cached_property
|
|
453
|
+
def solution(self) -> Solution:
|
|
454
|
+
"""Get the solution from the target directory."""
|
|
455
|
+
return Solution.from_directory(self.root_dir)
|
|
456
|
+
|
|
457
|
+
@cached_property
|
|
458
|
+
def downloader(self) -> EmbedDownloader:
|
|
459
|
+
"""Get the embed downloader for the given version and architecture."""
|
|
460
|
+
return EmbedDownloader(
|
|
461
|
+
version=self.version,
|
|
462
|
+
arch=self.arch,
|
|
463
|
+
cache_dir=self.cache_dir,
|
|
464
|
+
offline=self.offline,
|
|
465
|
+
skip_speed_test=self.skip_speed_test,
|
|
466
|
+
timeout=self.timeout,
|
|
467
|
+
)
|
|
468
|
+
|
|
469
|
+
@cached_property
|
|
470
|
+
def arch(self) -> str:
|
|
471
|
+
"""Get the architecture of the machine."""
|
|
472
|
+
machine_arch = platform.machine().lower()
|
|
473
|
+
return ARCH_DICT.get(machine_arch, "amd64")
|
|
474
|
+
|
|
475
|
+
@cached_property
|
|
476
|
+
def runtime_dir(self) -> Path:
|
|
477
|
+
"""Get the runtime directory."""
|
|
478
|
+
runtime_path = self.root_dir / "dist" / "runtime"
|
|
479
|
+
runtime_path.mkdir(parents=True, exist_ok=True)
|
|
480
|
+
return runtime_path
|
|
481
|
+
|
|
482
|
+
@cached_property
|
|
483
|
+
def python_exe_path(self) -> Path:
|
|
484
|
+
"""Get the path to python.exe in the target directory."""
|
|
485
|
+
return self.runtime_dir / "python.exe"
|
|
486
|
+
|
|
487
|
+
@cached_property
|
|
488
|
+
def cache_file(self) -> Path:
|
|
489
|
+
"""Get the path to the cache file."""
|
|
490
|
+
return self.downloader.cache_file
|
|
491
|
+
|
|
492
|
+
def extract_package(self) -> bool:
|
|
493
|
+
"""Extract package to target directory."""
|
|
494
|
+
if not zipfile.is_zipfile(self.cache_file):
|
|
495
|
+
logger.error(f"Invalid zip file: {self.cache_file}")
|
|
496
|
+
return False
|
|
497
|
+
|
|
498
|
+
try:
|
|
499
|
+
logger.info(f"Extracting to: {self.runtime_dir}")
|
|
500
|
+
self.runtime_dir.mkdir(parents=True, exist_ok=True)
|
|
501
|
+
|
|
502
|
+
# First get file count for progress
|
|
503
|
+
with zipfile.ZipFile(self.cache_file, "r") as zip_ref:
|
|
504
|
+
file_count = len(zip_ref.namelist())
|
|
505
|
+
|
|
506
|
+
# Extract the archive
|
|
507
|
+
shutil.unpack_archive(str(self.cache_file), str(self.runtime_dir), "zip")
|
|
508
|
+
logger.info(f"Extracted {file_count} files")
|
|
509
|
+
return True
|
|
510
|
+
except (zipfile.BadZipFile, OSError) as e:
|
|
511
|
+
logger.error(f"Failed to extract: {e}")
|
|
512
|
+
return False
|
|
513
|
+
|
|
514
|
+
def cleanup_cache(self) -> None:
|
|
515
|
+
"""Clean up cache file if not keeping it."""
|
|
516
|
+
if not self.keep_cache and not self.offline:
|
|
517
|
+
try:
|
|
518
|
+
if self.cache_file.exists():
|
|
519
|
+
logger.debug(f"Cleaning up cache: {self.cache_file}")
|
|
520
|
+
self.cache_file.unlink()
|
|
521
|
+
except OSError as e:
|
|
522
|
+
logger.warning(f"Failed to clean up cache file: {e}")
|
|
523
|
+
|
|
524
|
+
def install_package(self) -> bool:
|
|
525
|
+
"""Install Python embeddable package to target directory using embedded methods."""
|
|
526
|
+
# Download the package (latest_version is resolved automatically during download)
|
|
527
|
+
if not self.downloader.download():
|
|
528
|
+
logger.error("Failed to download Python embeddable package")
|
|
529
|
+
return False
|
|
530
|
+
|
|
531
|
+
# Extract the package to runtime directory
|
|
532
|
+
if not self.extract_package():
|
|
533
|
+
logger.error("Failed to extract Python embeddable package")
|
|
534
|
+
return False
|
|
535
|
+
|
|
536
|
+
# Clean up cache if not keeping it
|
|
537
|
+
self.cleanup_cache()
|
|
538
|
+
|
|
539
|
+
# Verify installation by checking for python.exe
|
|
540
|
+
success = self.python_exe_path.exists()
|
|
541
|
+
if success:
|
|
542
|
+
logger.info(f"Installation completed, python.exe: {self.python_exe_path}")
|
|
543
|
+
else:
|
|
544
|
+
logger.error("Installation failed, python.exe not found")
|
|
545
|
+
|
|
546
|
+
return success
|
|
547
|
+
|
|
548
|
+
|
|
549
|
+
def parse_args() -> argparse.Namespace:
|
|
550
|
+
"""Create and configure argument parser for CLI."""
|
|
551
|
+
parser = argparse.ArgumentParser(
|
|
552
|
+
prog="pyembedinstall",
|
|
553
|
+
description="Download and install Python embeddable package to a specific directory.",
|
|
554
|
+
)
|
|
555
|
+
parser.add_argument(
|
|
556
|
+
"directory",
|
|
557
|
+
type=str,
|
|
558
|
+
nargs="?",
|
|
559
|
+
default=str(cwd),
|
|
560
|
+
help="Directory to install Python embeddable package (default: current directory)",
|
|
561
|
+
)
|
|
562
|
+
parser.add_argument("--debug", "-d", action="store_true", help="Enable debug mode")
|
|
563
|
+
parser.add_argument(
|
|
564
|
+
"--offline",
|
|
565
|
+
"-o",
|
|
566
|
+
action="store_true",
|
|
567
|
+
help="Offline mode (use cached files only)",
|
|
568
|
+
)
|
|
569
|
+
parser.add_argument(
|
|
570
|
+
"--version",
|
|
571
|
+
"-v",
|
|
572
|
+
type=str,
|
|
573
|
+
default="3.8.10",
|
|
574
|
+
help="Python version to install (default: 3.8.10)",
|
|
575
|
+
)
|
|
576
|
+
parser.add_argument(
|
|
577
|
+
"--cache-dir",
|
|
578
|
+
"-C",
|
|
579
|
+
type=str,
|
|
580
|
+
default=str(_DEFAULT_CACHE_DIR),
|
|
581
|
+
help="Cache directory for downloaded files",
|
|
582
|
+
)
|
|
583
|
+
parser.add_argument(
|
|
584
|
+
"--keep-cache",
|
|
585
|
+
"-k",
|
|
586
|
+
action="store_true",
|
|
587
|
+
default=True,
|
|
588
|
+
help="Keep downloaded cache files",
|
|
589
|
+
)
|
|
590
|
+
parser.add_argument(
|
|
591
|
+
"--skip-speed-test",
|
|
592
|
+
"-s",
|
|
593
|
+
action="store_true",
|
|
594
|
+
help="Skip speed test and use official URL directly",
|
|
595
|
+
)
|
|
596
|
+
parser.add_argument(
|
|
597
|
+
"--timeout",
|
|
598
|
+
"-t",
|
|
599
|
+
type=int,
|
|
600
|
+
default=5,
|
|
601
|
+
help="Timeout in seconds for URL speed test (default: 5)",
|
|
602
|
+
)
|
|
603
|
+
return parser.parse_args()
|
|
604
|
+
|
|
605
|
+
|
|
606
|
+
def main() -> None:
|
|
607
|
+
"""Main entry point for pyembedinstall CLI."""
|
|
608
|
+
args = parse_args()
|
|
609
|
+
|
|
610
|
+
if args.debug:
|
|
611
|
+
logger.setLevel(logging.DEBUG)
|
|
612
|
+
|
|
613
|
+
cache_dir = Path(args.cache_dir)
|
|
614
|
+
cache_dir.mkdir(parents=True, exist_ok=True)
|
|
615
|
+
|
|
616
|
+
t0 = time.perf_counter()
|
|
617
|
+
|
|
618
|
+
if not EmbedInstaller(
|
|
619
|
+
root_dir=Path(args.directory),
|
|
620
|
+
version=args.version,
|
|
621
|
+
cache_dir=cache_dir,
|
|
622
|
+
offline=args.offline,
|
|
623
|
+
skip_speed_test=args.skip_speed_test,
|
|
624
|
+
keep_cache=args.keep_cache,
|
|
625
|
+
timeout=args.timeout,
|
|
626
|
+
).install_package():
|
|
627
|
+
exit(1)
|
|
628
|
+
|
|
629
|
+
logger.info(f"Installation completed in {time.perf_counter() - t0:.4f} seconds")
|