pysfi 0.1.10__py3-none-any.whl → 0.1.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. {pysfi-0.1.10.dist-info → pysfi-0.1.12.dist-info}/METADATA +9 -7
  2. pysfi-0.1.12.dist-info/RECORD +62 -0
  3. {pysfi-0.1.10.dist-info → pysfi-0.1.12.dist-info}/entry_points.txt +13 -2
  4. sfi/__init__.py +1 -1
  5. sfi/alarmclock/alarmclock.py +40 -40
  6. sfi/bumpversion/__init__.py +1 -1
  7. sfi/cleanbuild/cleanbuild.py +155 -0
  8. sfi/condasetup/condasetup.py +116 -0
  9. sfi/docdiff/docdiff.py +238 -0
  10. sfi/docscan/__init__.py +1 -1
  11. sfi/docscan/docscan_gui.py +1 -1
  12. sfi/docscan/lang/eng.py +152 -152
  13. sfi/docscan/lang/zhcn.py +170 -170
  14. sfi/filedate/filedate.py +185 -112
  15. sfi/gittool/__init__.py +2 -0
  16. sfi/gittool/gittool.py +401 -0
  17. sfi/llmclient/llmclient.py +592 -0
  18. sfi/llmquantize/llmquantize.py +480 -0
  19. sfi/llmserver/llmserver.py +335 -0
  20. sfi/makepython/makepython.py +2 -2
  21. sfi/pdfsplit/pdfsplit.py +4 -4
  22. sfi/pyarchive/pyarchive.py +418 -0
  23. sfi/pyembedinstall/__init__.py +0 -0
  24. sfi/pyembedinstall/pyembedinstall.py +629 -0
  25. sfi/pylibpack/pylibpack.py +813 -269
  26. sfi/pylibpack/rules/numpy.json +22 -0
  27. sfi/pylibpack/rules/pymupdf.json +10 -0
  28. sfi/pylibpack/rules/pyqt5.json +19 -0
  29. sfi/pylibpack/rules/pyside2.json +23 -0
  30. sfi/pylibpack/rules/scipy.json +23 -0
  31. sfi/pylibpack/rules/shiboken2.json +24 -0
  32. sfi/pyloadergen/pyloadergen.py +271 -572
  33. sfi/pypack/pypack.py +822 -471
  34. sfi/pyprojectparse/__init__.py +0 -0
  35. sfi/pyprojectparse/pyprojectparse.py +500 -0
  36. sfi/pysourcepack/pysourcepack.py +308 -369
  37. sfi/quizbase/__init__.py +0 -0
  38. sfi/quizbase/quizbase.py +828 -0
  39. sfi/quizbase/quizbase_gui.py +987 -0
  40. sfi/regexvalidate/__init__.py +0 -0
  41. sfi/regexvalidate/regex_help.html +284 -0
  42. sfi/regexvalidate/regexvalidate.py +468 -0
  43. sfi/taskkill/taskkill.py +0 -2
  44. pysfi-0.1.10.dist-info/RECORD +0 -39
  45. sfi/embedinstall/embedinstall.py +0 -478
  46. sfi/projectparse/projectparse.py +0 -152
  47. {pysfi-0.1.10.dist-info → pysfi-0.1.12.dist-info}/WHEEL +0 -0
  48. /sfi/{embedinstall → llmclient}/__init__.py +0 -0
  49. /sfi/{projectparse → llmquantize}/__init__.py +0 -0
@@ -1,478 +0,0 @@
1
- """Download Python embeddable package to a specific directory."""
2
-
3
- from __future__ import annotations
4
-
5
- import argparse
6
- import logging
7
- import platform
8
- import shutil
9
- import time
10
- import zipfile
11
- from pathlib import Path
12
- from typing import Final
13
- from urllib.error import HTTPError, URLError
14
- from urllib.request import Request, urlopen
15
-
16
- # Architecture mapping
17
- ARCH_MAP: Final = {
18
- "AMD64": "amd64",
19
- "x86_64": "amd64",
20
- "x86": "amd64",
21
- "i686": "amd64",
22
- "i386": "amd64",
23
- "ARM64": "arm64",
24
- "aarch64": "arm64",
25
- }
26
-
27
- USER_AGENT: Final = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36"
28
-
29
- logging.basicConfig(level=logging.INFO, format="%(message)s")
30
-
31
- cwd = Path.cwd()
32
- logger = logging.getLogger(__name__)
33
-
34
- # Default cache directory
35
- DEFAULT_CACHE_DIR = Path.home() / ".pysfi" / ".cache" / "embed-python"
36
-
37
-
38
- def get_system_arch(manual_arch: str | None = None) -> str:
39
- """Get system architecture for Python embeddable package."""
40
- if manual_arch:
41
- logger.debug(f"Using manual architecture: {manual_arch}")
42
- return manual_arch
43
-
44
- machine = platform.machine().upper()
45
-
46
- logger.debug(f"System machine: {machine}")
47
-
48
- if machine in ARCH_MAP:
49
- arch = ARCH_MAP[machine]
50
- logger.debug(f"Detected architecture: {arch}")
51
- return arch
52
-
53
- logger.warning(f"Unknown architecture: {machine}, defaulting to amd64")
54
- return "amd64"
55
-
56
-
57
- def get_download_url(base_url_template: str, version: str, arch: str) -> str:
58
- """Get download URL with correct architecture."""
59
- return base_url_template.format(version=version, arch=arch)
60
-
61
-
62
- def get_official_url_template(arch: str) -> str:
63
- """Get official URL template based on architecture."""
64
- return f"https://www.python.org/ftp/python/{{version}}/python-{{version}}-embed-{arch}.zip"
65
-
66
-
67
- def get_latest_patch_version(major_minor: str, arch: str, cache_dir: Path, timeout: int = 5) -> str:
68
- """Get the latest patch version for a given major.minor version.
69
-
70
- Args:
71
- major_minor: Major.minor version (e.g., '3.13') or full version (e.g., '3.13.1')
72
- arch: Architecture (amd64 or arm64)
73
- cache_dir: Cache directory to check for cached versions
74
- timeout: Request timeout in seconds
75
-
76
- Returns:
77
- Full version string (e.g., '3.13.1') or original if already complete
78
- """
79
- # Check if version is already complete (has 3 or more parts)
80
- version_parts = major_minor.split(".")
81
- if len(version_parts) >= 3:
82
- logger.debug(f"Version already complete: {major_minor}, skipping auto-completion")
83
- return major_minor
84
-
85
- # Check if version format is valid
86
- if "." not in major_minor or len(version_parts) != 2:
87
- logger.debug(f"Invalid version format: {major_minor}, using as: `3.13.5`")
88
- return major_minor
89
-
90
- major, minor = major_minor.split(".")
91
-
92
- # Check cache for any cached version matching the major.minor
93
- if cache_dir.exists():
94
- cached_versions = []
95
- for cache_file in cache_dir.glob(f"python-{major}.{minor}.*-embed-{arch}.zip"):
96
- try:
97
- # Extract version from filename
98
- match = cache_file.stem.replace("python-", "").replace(f"-embed-{arch}", "")
99
- if match:
100
- cached_versions.append((match, cache_file))
101
- except Exception:
102
- pass
103
-
104
- if cached_versions:
105
- # Return the highest cached version
106
- cached_versions.sort(key=lambda x: x[0], reverse=True)
107
- latest_cached = cached_versions[0][0]
108
- logger.info(f"Using cached version: {latest_cached}")
109
- return latest_cached
110
-
111
- logger.info(f"Checking latest version for {major_minor}...")
112
-
113
- base_url = get_official_url_template(arch)
114
-
115
- # Try from highest patch number down to 0
116
- for patch in range(20, -1, -1):
117
- version = f"{major}.{minor}.{patch}"
118
- test_url = base_url.format(version=version, arch=arch)
119
-
120
- req = Request(test_url, headers={"User-Agent": USER_AGENT}, method="HEAD")
121
- try:
122
- with urlopen(req, timeout=timeout) as response:
123
- if response.status == 200:
124
- logger.info(f"Latest version found: {version}")
125
- return version
126
- except HTTPError as e:
127
- if e.code == 404:
128
- logger.debug(f"Version {version} not found")
129
- else:
130
- logger.debug(f"Version {version}: HTTP {e.code}")
131
- except (URLError, TimeoutError):
132
- logger.debug(f"Version {version}: connection failed")
133
-
134
- logger.warning(f"Could not find any patch version for {major_minor}, using as-is")
135
- return major_minor
136
-
137
-
138
- def get_mirror_url_templates(arch: str) -> list[str]:
139
- """Get mirror URL templates based on architecture."""
140
- return [
141
- f"https://mirrors.huaweicloud.com/python/{{version}}/python-{{version}}-embed-{arch}.zip",
142
- f"https://mirrors.aliyun.com/python-release/{{version}}/python-{{version}}-embed-{arch}.zip",
143
- f"https://mirrors.tuna.tsinghua.edu.cn/python/{{version}}/python-{{version}}-embed-{arch}.zip",
144
- f"https://mirrors.pku.edu.cn/python/{{version}}/python-{{version}}-embed-{arch}.zip",
145
- ]
146
-
147
-
148
- def test_url_speed(url: str, timeout: int = 5) -> float:
149
- """Test URL speed by making a HEAD request and measuring response time.
150
-
151
- Falls back to GET request if HEAD is not supported.
152
- """
153
- start_time = time.time()
154
-
155
- req = Request(url, headers={"User-Agent": USER_AGENT}, method="HEAD")
156
- try:
157
- with urlopen(req, timeout=timeout) as response:
158
- if response.status == 200:
159
- elapsed = time.time() - start_time
160
- logger.debug(f"URL {url}: {elapsed:.3f}s")
161
- return elapsed
162
- except HTTPError as e:
163
- # Some servers don't support HEAD, fallback to GET with range request
164
- if e.code == 405 or e.code == 403:
165
- req = Request(
166
- url,
167
- headers={"User-Agent": USER_AGENT, "Range": "bytes=0-1023"},
168
- method="GET",
169
- )
170
- try:
171
- with urlopen(req, timeout=timeout) as response:
172
- if response.status == 206 or response.status == 200:
173
- elapsed = time.time() - start_time
174
- logger.debug(f"URL {url}: {elapsed:.3f}s (GET fallback)")
175
- return elapsed
176
- except (URLError, TimeoutError):
177
- logger.debug(f"URL {url}: failed (GET fallback)")
178
- else:
179
- logger.debug(f"URL {url}: failed (HTTP {e.code})")
180
- except (URLError, TimeoutError) as e:
181
- logger.debug(f"URL {url}: failed ({type(e).__name__})")
182
-
183
- return float("inf")
184
-
185
-
186
- def find_available_urls(version: str, arch: str, timeout: int = 5) -> list[str]:
187
- """Find all available URLs for downloading Python embeddable package, sorted by speed."""
188
- official_url = get_official_url_template(arch).format(version=version)
189
- mirror_templates = get_mirror_url_templates(arch)
190
- mirror_urls = [template.format(version=version) for template in mirror_templates]
191
- urls_to_test = [*mirror_urls, official_url]
192
-
193
- logger.debug("Testing mirror speeds...")
194
-
195
- speed_results = []
196
- for url in urls_to_test:
197
- speed = test_url_speed(url, timeout)
198
- if speed != float("inf"):
199
- speed_results.append((speed, url))
200
- logger.debug(f" ✓ {url} ({speed:.3f}s)")
201
- else:
202
- logger.debug(f" ✗ {url} (failed)")
203
-
204
- if not speed_results:
205
- logger.warning("All mirrors failed, falling back to official URL")
206
- return [official_url]
207
-
208
- speed_results.sort(key=lambda x: x[0])
209
- sorted_urls = sorted(speed_results, key=lambda x: x[0])
210
- available_urls = [url for _, url in sorted_urls]
211
-
212
- logger.debug(f"Available URLs (sorted by speed): {len(available_urls)} found")
213
- for i, url in enumerate(available_urls, 1):
214
- logger.debug(f" {i}. {url}")
215
-
216
- return available_urls
217
-
218
-
219
- def download_with_progress(url: str, dest_path: Path) -> None:
220
- """Download file with progress bar and integrity check."""
221
- logger.info(f"Downloading from: {url}")
222
-
223
- req = Request(url, headers={"User-Agent": USER_AGENT})
224
- try:
225
- with urlopen(req) as response:
226
- total_size = int(response.headers.get("content-length", 0))
227
- downloaded = 0
228
- last_progress = 0
229
-
230
- dest_path.parent.mkdir(parents=True, exist_ok=True)
231
-
232
- with dest_path.open("wb") as f:
233
- while True:
234
- chunk = response.read(65536) # Larger chunk size for better performance
235
- if not chunk:
236
- break
237
- f.write(chunk)
238
- downloaded += len(chunk)
239
-
240
- if total_size > 0:
241
- progress = (downloaded / total_size) * 100
242
- # Update progress when percentage changes (every 1%)
243
- if int(progress) > last_progress or downloaded == total_size:
244
- logger.info(
245
- f"Progress: {downloaded / 1024 / 1024:.2f} MB / {total_size / 1024 / 1024:.2f} MB ({progress:.1f}%)"
246
- )
247
- last_progress = int(progress)
248
-
249
- # Verify downloaded file size
250
- if total_size > 0 and downloaded != total_size:
251
- raise URLError(f"Incomplete download: expected {total_size} bytes, got {downloaded} bytes")
252
-
253
- logger.info(f"Download completed: {downloaded / 1024 / 1024:.2f} MB")
254
- except URLError as e:
255
- logger.error(f"Failed to download {url}: {e}")
256
- raise
257
-
258
-
259
- def extract_zip(zip_path: Path, dest_dir: Path) -> None:
260
- """Extract zip file to destination directory using optimized shutil method."""
261
- logger.info(f"Extracting to: {dest_dir}")
262
-
263
- dest_dir.mkdir(parents=True, exist_ok=True)
264
-
265
- # Use shutil.unpack_archive for better performance (uses optimized C implementation)
266
- shutil.unpack_archive(str(zip_path), str(dest_dir), "zip")
267
-
268
- # Count extracted files for logging
269
- with zipfile.ZipFile(zip_path, "r") as zip_ref:
270
- member_count = len(zip_ref.namelist())
271
- logger.info(f"Extracted {member_count} files")
272
-
273
-
274
- def install_embed_python(
275
- target_dir: Path,
276
- version: str,
277
- cache_dir: Path,
278
- offline: bool,
279
- keep_cache: bool,
280
- skip_speed_test: bool,
281
- arch: str,
282
- timeout: int = 5,
283
- ) -> bool:
284
- """Install Python embeddable package to target directory."""
285
- version_filename = f"python-{version}-embed-{arch}.zip"
286
- cache_file = cache_dir / version_filename
287
-
288
- # Check if cached file exists and is valid
289
- if cache_file.exists():
290
- if not zipfile.is_zipfile(cache_file):
291
- logger.warning(f"Corrupted cache file detected: {cache_file}")
292
- logger.info("Deleting corrupted cache file and re-downloading...")
293
- cache_file.unlink()
294
- else:
295
- logger.debug(f"Using cached file: {cache_file}")
296
-
297
- # Download if cache file doesn't exist or was corrupted
298
- if not cache_file.exists():
299
- if offline:
300
- logger.error(f"Offline mode: no cached file found for version {version}")
301
- return False
302
-
303
- if skip_speed_test:
304
- url = get_official_url_template(arch).format(version=version)
305
- logger.info(f"Skipping speed test, using official URL: {url}")
306
- urls = [url]
307
- else:
308
- urls = find_available_urls(version, arch, timeout)
309
-
310
- download_success = False
311
- for i, url in enumerate(urls, 1):
312
- try:
313
- logger.info(f"Attempting download {i}/{len(urls)}: {url}")
314
- download_with_progress(url, cache_file)
315
-
316
- # Validate downloaded file
317
- if not zipfile.is_zipfile(cache_file):
318
- logger.warning("Downloaded file is corrupted, trying next URL...")
319
- cache_file.unlink()
320
- continue
321
-
322
- download_success = True
323
- break
324
- except URLError as e:
325
- logger.warning(f"Download failed from {url}: {e}")
326
- if i < len(urls):
327
- logger.info("Retrying with next fastest URL...")
328
- else:
329
- logger.error(
330
- f"Failed to download Python {version} ({arch}) from all available URLs. "
331
- f"Please check your internet connection and version."
332
- )
333
- return False
334
-
335
- if not download_success:
336
- return False
337
-
338
- try:
339
- # Final validation before extraction
340
- if not zipfile.is_zipfile(cache_file):
341
- logger.error(f"Invalid zip file: {cache_file}")
342
- return False
343
-
344
- extract_zip(cache_file, target_dir)
345
- except zipfile.BadZipFile as e:
346
- logger.error(f"Corrupted zip file: {cache_file}")
347
- logger.error(f"Error: {e}")
348
- # Delete corrupted file and try to re-download in the next run
349
- if not keep_cache:
350
- logger.info("Deleting corrupted cache file...")
351
- cache_file.unlink()
352
- # Retry download if not offline
353
- if not offline:
354
- logger.info("Attempting to re-download...")
355
- return install_embed_python(
356
- target_dir=target_dir,
357
- version=version,
358
- cache_dir=cache_dir,
359
- offline=offline,
360
- keep_cache=keep_cache,
361
- skip_speed_test=skip_speed_test,
362
- arch=arch,
363
- timeout=timeout,
364
- )
365
- return False
366
- except Exception as e:
367
- logger.error(f"Failed to extract: {e}")
368
- return False
369
-
370
- if not keep_cache and not offline:
371
- logger.debug(f"Cleaning up cache: {cache_file}")
372
- cache_file.unlink()
373
-
374
- python_exe = target_dir / "python.exe"
375
- if python_exe.exists():
376
- logger.info(f"Successfully installed Python {version} ({arch}) embeddable package to: {target_dir}")
377
- logger.info(f"Python executable: {python_exe}")
378
- return True
379
-
380
- logger.warning(f"Installation completed but python.exe not found at: {python_exe}")
381
- return True
382
-
383
-
384
- def main() -> None:
385
- parser = argparse.ArgumentParser(
386
- prog="embedinstall",
387
- description="Download and install Python embeddable package to a specific directory.",
388
- )
389
- parser.add_argument(
390
- "directory",
391
- type=str,
392
- nargs="?",
393
- default=str(cwd / "dist" / "runtime"),
394
- help="Directory to install Python embeddable package (default: current directory)",
395
- )
396
- parser.add_argument("--debug", "-d", action="store_true", help="Enable debug mode")
397
- parser.add_argument(
398
- "--offline",
399
- "-o",
400
- action="store_true",
401
- help="Offline mode (use cached files only)",
402
- )
403
- parser.add_argument(
404
- "--version",
405
- "-v",
406
- type=str,
407
- default="3.8.10",
408
- help="Python version to install (default: 3.8.10)",
409
- )
410
- parser.add_argument(
411
- "--cache-dir",
412
- "-C",
413
- type=str,
414
- default=str(DEFAULT_CACHE_DIR),
415
- help="Cache directory for downloaded files",
416
- )
417
- parser.add_argument(
418
- "--keep-cache",
419
- "-k",
420
- action="store_true",
421
- default=True,
422
- help="Keep downloaded cache files",
423
- )
424
- parser.add_argument(
425
- "--skip-speed-test",
426
- "-s",
427
- action="store_true",
428
- help="Skip speed test and use official URL directly",
429
- )
430
- parser.add_argument(
431
- "--arch",
432
- "-a",
433
- type=str,
434
- help="Manually specify architecture (amd64, arm64). Default: auto-detect",
435
- )
436
- parser.add_argument(
437
- "--timeout",
438
- "-t",
439
- type=int,
440
- default=5,
441
- help="Timeout in seconds for URL speed test (default: 5)",
442
- )
443
-
444
- args = parser.parse_args()
445
-
446
- if args.debug:
447
- logger.setLevel(logging.DEBUG)
448
-
449
- target_dir = Path(args.directory)
450
- target_dir.mkdir(parents=True, exist_ok=True)
451
-
452
- cache_dir = Path(args.cache_dir)
453
- t0 = time.perf_counter()
454
- arch = get_system_arch(args.arch)
455
-
456
- # Auto-complete version if only major.minor is provided
457
- version = get_latest_patch_version(args.version, arch, cache_dir, args.timeout)
458
- if "." not in version:
459
- return
460
-
461
- logger.info(f"Installing Python {version} ({arch}) to: {target_dir}")
462
- logger.info(f"Cache directory: {cache_dir}")
463
-
464
- success = install_embed_python(
465
- target_dir=target_dir,
466
- version=version,
467
- cache_dir=cache_dir,
468
- offline=args.offline,
469
- keep_cache=args.keep_cache,
470
- skip_speed_test=args.skip_speed_test,
471
- arch=arch,
472
- timeout=args.timeout,
473
- )
474
-
475
- if not success:
476
- exit(1)
477
-
478
- logger.info(f"Installation completed in {time.perf_counter() - t0:.4f} seconds")
@@ -1,152 +0,0 @@
1
- """Parse pyproject.toml files in directory, supports multiple projects."""
2
-
3
- from __future__ import annotations
4
-
5
- import argparse
6
- import json
7
- import logging
8
- import sys
9
- import time
10
- from pathlib import Path
11
-
12
- if sys.version_info >= (3, 11):
13
- import tomllib
14
- else:
15
- import tomli as tomllib # type: ignore
16
-
17
- __all__ = ["parse_project_data"]
18
-
19
-
20
- logging.basicConfig(level=logging.INFO, format="%(message)s")
21
- logger = logging.getLogger(__name__)
22
- cwd = Path.cwd()
23
-
24
-
25
- def parse_project_data(directory: Path, recursive: bool = False) -> dict:
26
- """Parse pyproject.toml file in directory and return project data.
27
-
28
- Returns:
29
- dict: Project data.
30
- """
31
- data = _parse_pyproject(directory, recursive=recursive)
32
- if not data:
33
- return {}
34
- return _extract_project_info(data)
35
-
36
-
37
- def _parse_pyproject(directory: Path, recursive: bool = False) -> dict[str, dict]:
38
- """Parse pyproject.toml file in directory and return raw data."""
39
- data = {}
40
- if recursive:
41
- for pyproject_path in directory.rglob("pyproject.toml"):
42
- with pyproject_path.open("rb") as f:
43
- data[pyproject_path.parent.stem] = tomllib.load(f)
44
- else:
45
- pyproject_path = directory / "pyproject.toml"
46
- if not pyproject_path.is_file():
47
- logger.error(f"No pyproject.toml found in {directory}")
48
- return {}
49
-
50
- with pyproject_path.open("rb") as f:
51
- logger.debug(f"Parsing {pyproject_path}")
52
- data[pyproject_path.parent.stem] = tomllib.load(f)
53
-
54
- logger.debug(f"Parsed {len(data)} pyproject.toml files, data: {data}")
55
- return data
56
-
57
-
58
- def _extract_project_info(data: dict) -> dict:
59
- """Extract commonly used project information from parsed data."""
60
- if not data:
61
- logger.error("No data to extract")
62
- return {}
63
-
64
- project_info = {}
65
- for key, value in data.items():
66
- if "project" in value:
67
- project = value.get("project", {})
68
- build_system = value.get("build-system", {})
69
- project_info.setdefault(
70
- key,
71
- {
72
- "name": project.get("name"),
73
- "version": project.get("version"),
74
- "description": project.get("description"),
75
- "readme": project.get("readme"),
76
- "requires_python": project.get("requires-python"),
77
- "dependencies": project.get("dependencies", []),
78
- "optional_dependencies": project.get("optional-dependencies", {}),
79
- "scripts": project.get("scripts", {}),
80
- "entry_points": project.get("entry-points", {}),
81
- "authors": project.get("authors", []),
82
- "license": project.get("license"),
83
- "keywords": project.get("keywords", []),
84
- "classifiers": project.get("classifiers", []),
85
- "urls": project.get("urls", {}),
86
- "build_backend": build_system.get("build-backend"),
87
- "requires": build_system.get("requires", []),
88
- },
89
- )
90
- else:
91
- logger.warning(f"No project information found in {key}")
92
- project_info.setdefault(key, {})
93
-
94
- logger.debug(f"Extracted {len(project_info)} projects, info: {project_info}")
95
- return project_info
96
-
97
-
98
- def _check_directory(directory: str) -> bool:
99
- """Check if directory is valid."""
100
- if not directory:
101
- logger.error("Error: No directory specified")
102
- return False
103
-
104
- dir_path = Path(directory)
105
- if not dir_path.is_dir():
106
- logger.error(f"Error: {dir_path} is not a directory")
107
- return False
108
-
109
- return True
110
-
111
-
112
- def main():
113
- parser = argparse.ArgumentParser()
114
- parser.add_argument("--directory", "-D", type=str, default=str(cwd), help="Directory to parse")
115
- parser.add_argument("--debug", "-d", action="store_true", help="Debug mode")
116
- parser.add_argument("--recursive", "-r", action="store_true", help="Recursively parse subdirectories")
117
- parser.add_argument("--show", "-s", action="store_true", help="Show parsed data")
118
- parser.add_argument("--output", "-o", type=str, default="projects.json", help="Output file path")
119
-
120
- args = parser.parse_args()
121
- if args.debug:
122
- logger.setLevel(logging.DEBUG)
123
-
124
- if not _check_directory(args.directory):
125
- return
126
-
127
- output_path = (cwd / args.output).with_suffix(".json")
128
- if args.show:
129
- if output_path.is_file():
130
- logger.info(f"Loading output from `{output_path}`:")
131
- with output_path.open("r", encoding="utf-8") as f:
132
- output_data = json.load(f)
133
- logger.info(json.dumps(output_data, indent=2, ensure_ascii=False, sort_keys=True))
134
- return
135
- else:
136
- logger.debug(f"No json file found at {output_path}, continue parsing...")
137
-
138
- t0 = time.perf_counter()
139
- logger.info(f"Parsing pyproject.toml in {args.directory}")
140
- output_data = parse_project_data(Path(args.directory), recursive=args.recursive)
141
- if args.show:
142
- logger.info(json.dumps(output_data, indent=2, ensure_ascii=False, sort_keys=True))
143
- return
144
-
145
- try:
146
- with output_path.open("w", encoding="utf-8") as f:
147
- json.dump(output_data, f, indent=2, ensure_ascii=False)
148
- except Exception as e:
149
- logger.error(f"Error writing output to {output_path}: {e}")
150
- return
151
- else:
152
- logger.info(f"Output written to {output_path}, took {time.perf_counter() - t0:.4f}s")
File without changes
File without changes
File without changes