pysfi 0.1.7__py3-none-any.whl → 0.1.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. {pysfi-0.1.7.dist-info → pysfi-0.1.11.dist-info}/METADATA +11 -9
  2. pysfi-0.1.11.dist-info/RECORD +60 -0
  3. pysfi-0.1.11.dist-info/entry_points.txt +28 -0
  4. sfi/__init__.py +1 -1
  5. sfi/alarmclock/alarmclock.py +40 -40
  6. sfi/bumpversion/__init__.py +1 -1
  7. sfi/cleanbuild/cleanbuild.py +155 -0
  8. sfi/condasetup/condasetup.py +116 -0
  9. sfi/docscan/__init__.py +1 -1
  10. sfi/docscan/docscan.py +407 -103
  11. sfi/docscan/docscan_gui.py +1282 -596
  12. sfi/docscan/lang/eng.py +152 -0
  13. sfi/docscan/lang/zhcn.py +170 -0
  14. sfi/filedate/filedate.py +185 -112
  15. sfi/gittool/__init__.py +2 -0
  16. sfi/gittool/gittool.py +401 -0
  17. sfi/llmclient/llmclient.py +592 -0
  18. sfi/llmquantize/llmquantize.py +480 -0
  19. sfi/llmserver/llmserver.py +335 -0
  20. sfi/makepython/makepython.py +31 -30
  21. sfi/pdfsplit/pdfsplit.py +173 -173
  22. sfi/pyarchive/pyarchive.py +418 -0
  23. sfi/pyembedinstall/pyembedinstall.py +629 -0
  24. sfi/pylibpack/__init__.py +0 -0
  25. sfi/pylibpack/pylibpack.py +1457 -0
  26. sfi/pylibpack/rules/numpy.json +22 -0
  27. sfi/pylibpack/rules/pymupdf.json +10 -0
  28. sfi/pylibpack/rules/pyqt5.json +19 -0
  29. sfi/pylibpack/rules/pyside2.json +23 -0
  30. sfi/pylibpack/rules/scipy.json +23 -0
  31. sfi/pylibpack/rules/shiboken2.json +24 -0
  32. sfi/pyloadergen/pyloadergen.py +512 -227
  33. sfi/pypack/__init__.py +0 -0
  34. sfi/pypack/pypack.py +1142 -0
  35. sfi/pyprojectparse/__init__.py +0 -0
  36. sfi/pyprojectparse/pyprojectparse.py +500 -0
  37. sfi/pysourcepack/pysourcepack.py +308 -0
  38. sfi/quizbase/__init__.py +0 -0
  39. sfi/quizbase/quizbase.py +828 -0
  40. sfi/quizbase/quizbase_gui.py +987 -0
  41. sfi/regexvalidate/__init__.py +0 -0
  42. sfi/regexvalidate/regex_help.html +284 -0
  43. sfi/regexvalidate/regexvalidate.py +468 -0
  44. sfi/taskkill/taskkill.py +0 -2
  45. sfi/workflowengine/__init__.py +0 -0
  46. sfi/workflowengine/workflowengine.py +444 -0
  47. pysfi-0.1.7.dist-info/RECORD +0 -31
  48. pysfi-0.1.7.dist-info/entry_points.txt +0 -15
  49. sfi/embedinstall/embedinstall.py +0 -418
  50. sfi/projectparse/projectparse.py +0 -152
  51. sfi/pypacker/fspacker.py +0 -91
  52. {pysfi-0.1.7.dist-info → pysfi-0.1.11.dist-info}/WHEEL +0 -0
  53. /sfi/{embedinstall → docscan/lang}/__init__.py +0 -0
  54. /sfi/{projectparse → llmquantize}/__init__.py +0 -0
  55. /sfi/{pypacker → pyembedinstall}/__init__.py +0 -0
@@ -1,418 +0,0 @@
1
- """Download Python embeddable package to a specific directory."""
2
-
3
- from __future__ import annotations
4
-
5
- import argparse
6
- import logging
7
- import platform
8
- import shutil
9
- import time
10
- import zipfile
11
- from pathlib import Path
12
- from typing import Final
13
- from urllib.error import HTTPError, URLError
14
- from urllib.request import Request, urlopen
15
-
16
- # Architecture mapping
17
- ARCH_MAP: Final = {
18
- "AMD64": "amd64",
19
- "x86_64": "amd64",
20
- "x86": "amd64",
21
- "i686": "amd64",
22
- "i386": "amd64",
23
- "ARM64": "arm64",
24
- "aarch64": "arm64",
25
- }
26
-
27
- USER_AGENT: Final = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/91.0.4472.124 Safari/537.36"
28
-
29
- logging.basicConfig(level=logging.INFO, format="%(message)s")
30
-
31
- cwd = Path.cwd()
32
- logger = logging.getLogger(__name__)
33
-
34
-
35
- def get_system_arch(manual_arch: str | None = None) -> str:
36
- """Get system architecture for Python embeddable package."""
37
- if manual_arch:
38
- logger.debug(f"Using manual architecture: {manual_arch}")
39
- return manual_arch
40
-
41
- machine = platform.machine().upper()
42
-
43
- logger.debug(f"System machine: {machine}")
44
-
45
- if machine in ARCH_MAP:
46
- arch = ARCH_MAP[machine]
47
- logger.debug(f"Detected architecture: {arch}")
48
- return arch
49
-
50
- logger.warning(f"Unknown architecture: {machine}, defaulting to amd64")
51
- return "amd64"
52
-
53
-
54
- def get_download_url(base_url_template: str, version: str, arch: str) -> str:
55
- """Get download URL with correct architecture."""
56
- return base_url_template.format(version=version, arch=arch)
57
-
58
-
59
- def get_official_url_template(arch: str) -> str:
60
- """Get official URL template based on architecture."""
61
- return f"https://www.python.org/ftp/python/{{version}}/python-{{version}}-embed-{arch}.zip"
62
-
63
-
64
- def get_latest_patch_version(major_minor: str, arch: str, timeout: int = 5) -> str:
65
- """Get the latest patch version for a given major.minor version.
66
-
67
- Args:
68
- major_minor: Major.minor version (e.g., '3.13') or full version (e.g., '3.13.1')
69
- arch: Architecture (amd64 or arm64)
70
- timeout: Request timeout in seconds
71
-
72
- Returns:
73
- Full version string (e.g., '3.13.1') or original if already complete
74
- """
75
- # Check if version is already complete (has 3 or more parts)
76
- version_parts = major_minor.split(".")
77
- if len(version_parts) >= 3:
78
- logger.debug(f"Version already complete: {major_minor}, skipping auto-completion")
79
- return major_minor
80
-
81
- # Check if version format is valid
82
- if "." not in major_minor or len(version_parts) != 2:
83
- logger.debug(f"Invalid version format: {major_minor}, using as: `3.13.5`")
84
- return major_minor
85
-
86
- logger.info(f"Checking latest version for {major_minor}...")
87
-
88
- base_url = get_official_url_template(arch)
89
- major, minor = major_minor.split(".")
90
-
91
- # Try from highest patch number down to 0
92
- for patch in range(20, -1, -1):
93
- version = f"{major}.{minor}.{patch}"
94
- test_url = base_url.format(version=version, arch=arch)
95
-
96
- req = Request(test_url, headers={"User-Agent": USER_AGENT}, method="HEAD")
97
- try:
98
- with urlopen(req, timeout=timeout) as response:
99
- if response.status == 200:
100
- logger.info(f"Latest version found: {version}")
101
- return version
102
- except HTTPError as e:
103
- if e.code == 404:
104
- logger.debug(f"Version {version} not found")
105
- else:
106
- logger.debug(f"Version {version}: HTTP {e.code}")
107
- except (URLError, TimeoutError):
108
- logger.debug(f"Version {version}: connection failed")
109
-
110
- logger.warning(f"Could not find any patch version for {major_minor}, using as-is")
111
- return major_minor
112
-
113
-
114
- def get_mirror_url_templates(arch: str) -> list[str]:
115
- """Get mirror URL templates based on architecture."""
116
- return [
117
- f"https://mirrors.huaweicloud.com/python/{{version}}/python-{{version}}-embed-{arch}.zip",
118
- f"https://mirrors.aliyun.com/python-release/{{version}}/python-{{version}}-embed-{arch}.zip",
119
- f"https://mirrors.tuna.tsinghua.edu.cn/python/{{version}}/python-{{version}}-embed-{arch}.zip",
120
- f"https://mirrors.pku.edu.cn/python/{{version}}/python-{{version}}-embed-{arch}.zip",
121
- ]
122
-
123
-
124
- def test_url_speed(url: str, timeout: int = 5) -> float:
125
- """Test URL speed by making a HEAD request and measuring response time.
126
-
127
- Falls back to GET request if HEAD is not supported.
128
- """
129
- start_time = time.time()
130
-
131
- req = Request(url, headers={"User-Agent": USER_AGENT}, method="HEAD")
132
- try:
133
- with urlopen(req, timeout=timeout) as response:
134
- if response.status == 200:
135
- elapsed = time.time() - start_time
136
- logger.debug(f"URL {url}: {elapsed:.3f}s")
137
- return elapsed
138
- except HTTPError as e:
139
- # Some servers don't support HEAD, fallback to GET with range request
140
- if e.code == 405 or e.code == 403:
141
- req = Request(
142
- url,
143
- headers={"User-Agent": USER_AGENT, "Range": "bytes=0-1023"},
144
- method="GET",
145
- )
146
- try:
147
- with urlopen(req, timeout=timeout) as response:
148
- if response.status == 206 or response.status == 200:
149
- elapsed = time.time() - start_time
150
- logger.debug(f"URL {url}: {elapsed:.3f}s (GET fallback)")
151
- return elapsed
152
- except (URLError, TimeoutError):
153
- logger.debug(f"URL {url}: failed (GET fallback)")
154
- else:
155
- logger.debug(f"URL {url}: failed (HTTP {e.code})")
156
- except (URLError, TimeoutError) as e:
157
- logger.debug(f"URL {url}: failed ({type(e).__name__})")
158
-
159
- return float("inf")
160
-
161
-
162
- def find_available_urls(version: str, arch: str, timeout: int = 5) -> list[str]:
163
- """Find all available URLs for downloading Python embeddable package, sorted by speed."""
164
- official_url = get_official_url_template(arch).format(version=version)
165
- mirror_templates = get_mirror_url_templates(arch)
166
- mirror_urls = [template.format(version=version) for template in mirror_templates]
167
- urls_to_test = [*mirror_urls, official_url]
168
-
169
- logger.debug("Testing mirror speeds...")
170
-
171
- speed_results = []
172
- for url in urls_to_test:
173
- speed = test_url_speed(url, timeout)
174
- if speed != float("inf"):
175
- speed_results.append((speed, url))
176
- logger.debug(f" ✓ {url} ({speed:.3f}s)")
177
- else:
178
- logger.debug(f" ✗ {url} (failed)")
179
-
180
- if not speed_results:
181
- logger.warning("All mirrors failed, falling back to official URL")
182
- return [official_url]
183
-
184
- speed_results.sort(key=lambda x: x[0])
185
- sorted_urls = sorted(speed_results, key=lambda x: x[0])
186
- available_urls = [url for _, url in sorted_urls]
187
-
188
- logger.debug(f"Available URLs (sorted by speed): {len(available_urls)} found")
189
- for i, url in enumerate(available_urls, 1):
190
- logger.debug(f" {i}. {url}")
191
-
192
- return available_urls
193
-
194
-
195
- def download_with_progress(url: str, dest_path: Path) -> None:
196
- """Download file with progress bar and integrity check."""
197
- logger.info(f"Downloading from: {url}")
198
-
199
- req = Request(url, headers={"User-Agent": USER_AGENT})
200
- try:
201
- with urlopen(req) as response:
202
- total_size = int(response.headers.get("content-length", 0))
203
- downloaded = 0
204
- last_progress = 0
205
-
206
- dest_path.parent.mkdir(parents=True, exist_ok=True)
207
-
208
- with dest_path.open("wb") as f:
209
- while True:
210
- chunk = response.read(65536) # Larger chunk size for better performance
211
- if not chunk:
212
- break
213
- f.write(chunk)
214
- downloaded += len(chunk)
215
-
216
- if total_size > 0:
217
- progress = (downloaded / total_size) * 100
218
- # Update progress when percentage changes (every 1%)
219
- if int(progress) > last_progress or downloaded == total_size:
220
- logger.info(
221
- f"Progress: {downloaded / 1024 / 1024:.2f} MB / {total_size / 1024 / 1024:.2f} MB ({progress:.1f}%)"
222
- )
223
- last_progress = int(progress)
224
-
225
- # Verify downloaded file size
226
- if total_size > 0 and downloaded != total_size:
227
- raise URLError(f"Incomplete download: expected {total_size} bytes, got {downloaded} bytes")
228
-
229
- logger.info(f"Download completed: {downloaded / 1024 / 1024:.2f} MB")
230
- except URLError as e:
231
- logger.error(f"Failed to download {url}: {e}")
232
- raise
233
-
234
-
235
- def extract_zip(zip_path: Path, dest_dir: Path) -> None:
236
- """Extract zip file to destination directory using optimized shutil method."""
237
- logger.info(f"Extracting to: {dest_dir}")
238
-
239
- dest_dir.mkdir(parents=True, exist_ok=True)
240
-
241
- # Use shutil.unpack_archive for better performance (uses optimized C implementation)
242
- shutil.unpack_archive(str(zip_path), str(dest_dir), "zip")
243
-
244
- # Count extracted files for logging
245
- with zipfile.ZipFile(zip_path, "r") as zip_ref:
246
- member_count = len(zip_ref.namelist())
247
- logger.info(f"Extracted {member_count} files")
248
-
249
-
250
- def install_embed_python(
251
- target_dir: Path,
252
- version: str,
253
- cache_dir: Path,
254
- offline: bool,
255
- keep_cache: bool,
256
- skip_speed_test: bool,
257
- arch: str,
258
- timeout: int = 5,
259
- ) -> bool:
260
- """Install Python embeddable package to target directory."""
261
- version_filename = f"python-{version}-embed-{arch}.zip"
262
- cache_file = cache_dir / version_filename
263
-
264
- if cache_file.exists():
265
- logger.debug(f"Using cached file: {cache_file}")
266
- elif offline:
267
- logger.error(f"Offline mode: no cached file found for version {version}")
268
- return False
269
- else:
270
- if skip_speed_test:
271
- url = get_official_url_template(arch).format(version=version)
272
- logger.info(f"Skipping speed test, using official URL: {url}")
273
- urls = [url]
274
- else:
275
- urls = find_available_urls(version, arch, timeout)
276
-
277
- download_success = False
278
- for i, url in enumerate(urls, 1):
279
- try:
280
- logger.info(f"Attempting download {i}/{len(urls)}: {url}")
281
- download_with_progress(url, cache_file)
282
- download_success = True
283
- break
284
- except URLError as e:
285
- logger.warning(f"Download failed from {url}: {e}")
286
- if i < len(urls):
287
- logger.info("Retrying with next fastest URL...")
288
- else:
289
- logger.error(
290
- f"Failed to download Python {version} ({arch}) from all available URLs. "
291
- f"Please check your internet connection and version."
292
- )
293
- return False
294
-
295
- if not download_success:
296
- return False
297
-
298
- try:
299
- extract_zip(cache_file, target_dir)
300
- except zipfile.BadZipFile:
301
- logger.error(f"Invalid zip file: {cache_file}")
302
- if not keep_cache:
303
- cache_file.unlink()
304
- return False
305
- except Exception as e:
306
- logger.error(f"Failed to extract: {e}")
307
- return False
308
-
309
- if not keep_cache and not offline:
310
- logger.debug(f"Cleaning up cache: {cache_file}")
311
- cache_file.unlink()
312
-
313
- python_exe = target_dir / "python.exe"
314
- if python_exe.exists():
315
- logger.info(f"Successfully installed Python {version} ({arch}) embeddable package to: {target_dir}")
316
- logger.info(f"Python executable: {python_exe}")
317
- return True
318
-
319
- logger.warning(f"Installation completed but python.exe not found at: {python_exe}")
320
- return True
321
-
322
-
323
- def main() -> None:
324
- parser = argparse.ArgumentParser(
325
- prog="embedinstall",
326
- description="Download and install Python embeddable package to a specific directory.",
327
- )
328
- parser.add_argument(
329
- "--directory",
330
- "-D",
331
- type=str,
332
- default=str(cwd / "runtime"),
333
- help="Directory to install Python embeddable package (default: current directory)",
334
- )
335
- parser.add_argument("--debug", "-d", action="store_true", help="Enable debug mode")
336
- parser.add_argument(
337
- "--offline",
338
- "-o",
339
- action="store_true",
340
- help="Offline mode (use cached files only)",
341
- )
342
- parser.add_argument(
343
- "--version",
344
- "-v",
345
- type=str,
346
- default="3.8.10",
347
- help="Python version to install (default: 3.8.10)",
348
- )
349
- parser.add_argument(
350
- "--cache-dir",
351
- "-C",
352
- type=str,
353
- default=str(cwd / ".cache" / "embedinstall"),
354
- help="Cache directory for downloaded files",
355
- )
356
- parser.add_argument(
357
- "--keep-cache",
358
- "-k",
359
- action="store_true",
360
- default=True,
361
- help="Keep downloaded cache files",
362
- )
363
- parser.add_argument(
364
- "--skip-speed-test",
365
- "-s",
366
- action="store_true",
367
- help="Skip speed test and use official URL directly",
368
- )
369
- parser.add_argument(
370
- "--arch",
371
- "-a",
372
- type=str,
373
- help="Manually specify architecture (amd64, arm64). Default: auto-detect",
374
- )
375
- parser.add_argument(
376
- "--timeout",
377
- "-t",
378
- type=int,
379
- default=5,
380
- help="Timeout in seconds for URL speed test (default: 5)",
381
- )
382
-
383
- args = parser.parse_args()
384
-
385
- if args.debug:
386
- logger.setLevel(logging.DEBUG)
387
- else:
388
- logger.setLevel(logging.INFO)
389
-
390
- target_dir = Path(args.directory)
391
- cache_dir = Path(args.cache_dir)
392
-
393
- t0 = time.perf_counter()
394
- arch = get_system_arch(args.arch)
395
-
396
- # Auto-complete version if only major.minor is provided
397
- version = get_latest_patch_version(args.version, arch, args.timeout)
398
- if "." not in version:
399
- return
400
-
401
- logger.info(f"Installing Python {version} ({arch}) to: {target_dir}")
402
- logger.info(f"Cache directory: {cache_dir}")
403
-
404
- success = install_embed_python(
405
- target_dir=target_dir,
406
- version=version,
407
- cache_dir=cache_dir,
408
- offline=args.offline,
409
- keep_cache=args.keep_cache,
410
- skip_speed_test=args.skip_speed_test,
411
- arch=arch,
412
- timeout=args.timeout,
413
- )
414
-
415
- if not success:
416
- exit(1)
417
-
418
- logger.info(f"Installation completed in {time.perf_counter() - t0:.4f} seconds")
@@ -1,152 +0,0 @@
1
- """Parse pyproject.toml files in directory, supports multiple projects."""
2
-
3
- from __future__ import annotations
4
-
5
- import argparse
6
- import json
7
- import logging
8
- import sys
9
- import time
10
- from pathlib import Path
11
-
12
- if sys.version_info >= (3, 11):
13
- import tomllib
14
- else:
15
- import tomli as tomllib # type: ignore
16
-
17
- __all__ = ["parse_project_data"]
18
-
19
-
20
- logging.basicConfig(level=logging.INFO, format="%(message)s")
21
- logger = logging.getLogger(__name__)
22
- cwd = Path.cwd()
23
-
24
-
25
- def parse_project_data(directory: Path, recursive: bool = False) -> dict:
26
- """Parse pyproject.toml file in directory and return project data.
27
-
28
- Returns:
29
- dict: Project data.
30
- """
31
- data = _parse_pyproject(directory, recursive=recursive)
32
- if not data:
33
- return {}
34
- return _extract_project_info(data)
35
-
36
-
37
- def _parse_pyproject(directory: Path, recursive: bool = False) -> dict[str, dict]:
38
- """Parse pyproject.toml file in directory and return raw data."""
39
- data = {}
40
- if recursive:
41
- for pyproject_path in directory.rglob("pyproject.toml"):
42
- with pyproject_path.open("rb") as f:
43
- data[pyproject_path.parent.stem] = tomllib.load(f)
44
- else:
45
- pyproject_path = directory / "pyproject.toml"
46
- if not pyproject_path.is_file():
47
- logger.error(f"No pyproject.toml found in {directory}")
48
- return {}
49
-
50
- with pyproject_path.open("rb") as f:
51
- logger.debug(f"Parsing {pyproject_path}")
52
- data[pyproject_path.parent.stem] = tomllib.load(f)
53
-
54
- logger.debug(f"Parsed {len(data)} pyproject.toml files, data: {data}")
55
- return data
56
-
57
-
58
- def _extract_project_info(data: dict) -> dict:
59
- """Extract commonly used project information from parsed data."""
60
- if not data:
61
- logger.error("No data to extract")
62
- return {}
63
-
64
- project_info = {}
65
- for key, value in data.items():
66
- if "project" in value:
67
- project = value.get("project", {})
68
- build_system = value.get("build-system", {})
69
- project_info.setdefault(
70
- key,
71
- {
72
- "name": project.get("name"),
73
- "version": project.get("version"),
74
- "description": project.get("description"),
75
- "readme": project.get("readme"),
76
- "requires_python": project.get("requires-python"),
77
- "dependencies": project.get("dependencies", []),
78
- "optional_dependencies": project.get("optional-dependencies", {}),
79
- "scripts": project.get("scripts", {}),
80
- "entry_points": project.get("entry-points", {}),
81
- "authors": project.get("authors", []),
82
- "license": project.get("license"),
83
- "keywords": project.get("keywords", []),
84
- "classifiers": project.get("classifiers", []),
85
- "urls": project.get("urls", {}),
86
- "build_backend": build_system.get("build-backend"),
87
- "requires": build_system.get("requires", []),
88
- },
89
- )
90
- else:
91
- logger.warning(f"No project information found in {key}")
92
- project_info.setdefault(key, {})
93
-
94
- logger.debug(f"Extracted {len(project_info)} projects, info: {project_info}")
95
- return project_info
96
-
97
-
98
- def _check_directory(directory: str) -> bool:
99
- """Check if directory is valid."""
100
- if not directory:
101
- logger.error("Error: No directory specified")
102
- return False
103
-
104
- dir_path = Path(directory)
105
- if not dir_path.is_dir():
106
- logger.error(f"Error: {dir_path} is not a directory")
107
- return False
108
-
109
- return True
110
-
111
-
112
- def main():
113
- parser = argparse.ArgumentParser()
114
- parser.add_argument("--directory", "-D", type=str, default=str(cwd), help="Directory to parse")
115
- parser.add_argument("--debug", "-d", action="store_true", help="Debug mode")
116
- parser.add_argument("--recursive", "-r", action="store_true", help="Recursively parse subdirectories")
117
- parser.add_argument("--show", "-s", action="store_true", help="Show parsed data")
118
- parser.add_argument("--output", "-o", type=str, default="projects.json", help="Output file path")
119
-
120
- args = parser.parse_args()
121
- if args.debug:
122
- logger.setLevel(logging.DEBUG)
123
-
124
- if not _check_directory(args.directory):
125
- return
126
-
127
- output_path = (cwd / args.output).with_suffix(".json")
128
- if args.show:
129
- if output_path.is_file():
130
- logger.info(f"Loading output from `{output_path}`:")
131
- with output_path.open("r", encoding="utf-8") as f:
132
- output_data = json.load(f)
133
- logger.info(json.dumps(output_data, indent=2, ensure_ascii=False, sort_keys=True))
134
- return
135
- else:
136
- logger.debug(f"No json file found at {output_path}, continue parsing...")
137
-
138
- t0 = time.perf_counter()
139
- logger.info(f"Parsing pyproject.toml in {args.directory}")
140
- output_data = parse_project_data(Path(args.directory), recursive=args.recursive)
141
- if args.show:
142
- logger.info(json.dumps(output_data, indent=2, ensure_ascii=False, sort_keys=True))
143
- return
144
-
145
- try:
146
- with output_path.open("w", encoding="utf-8") as f:
147
- json.dump(output_data, f, indent=2, ensure_ascii=False)
148
- except Exception as e:
149
- logger.error(f"Error writing output to {output_path}: {e}")
150
- return
151
- else:
152
- logger.info(f"Output written to {output_path}, took {time.perf_counter() - t0:.4f}s")
sfi/pypacker/fspacker.py DELETED
@@ -1,91 +0,0 @@
1
- from __future__ import annotations
2
-
3
- import argparse
4
- import logging
5
- from pathlib import Path
6
-
7
- logging.basicConfig(level=logging.INFO, format="%(message)s")
8
- logger = logging.getLogger(__name__)
9
-
10
- __version__ = "0.9.30"
11
- __build__ = "20260113"
12
-
13
-
14
- def main() -> None:
15
- parser = argparse.ArgumentParser()
16
- parser.add_argument(
17
- "options",
18
- choices=("build", "b", "run", "r", "clean", "c", "version", "v"),
19
- help="Operation command",
20
- )
21
- parser.add_argument(
22
- "target",
23
- type=str,
24
- default=None,
25
- nargs="?",
26
- help="Target name to match, supports fuzzy matching, leave empty if only one.",
27
- )
28
- parser.add_argument(
29
- "directory",
30
- type=str,
31
- default=str(Path.cwd()),
32
- nargs="?",
33
- help="Project directory path",
34
- )
35
-
36
- parser.add_argument("--archive", "-a", action="store_true", help="zip packaging mode")
37
- parser.add_argument("--compile", "-c", action="store_true", help="compile mode")
38
- parser.add_argument("--debug", "-d", action="store_true", help="debug mode")
39
- parser.add_argument("--gui", "-g", action="store_true", help="GUI mode")
40
- parser.add_argument("--nsis", "-n", action="store_true", help="nsis packaging mode")
41
- parser.add_argument("--offline", "-o", action="store_true", help="offline mode")
42
- parser.add_argument("--rebuild", "-rb", action="store_true", help="rebuild mode")
43
- parser.add_argument("--recursive", "-r", action="store_true", default=False, help="recursive")
44
- parser.add_argument("--simplify", "-s", action="store_true", default=True, help="simplify mode")
45
- parser.add_argument("--use-mingw", "-mingw", action="store_true", help="use mingw to compile")
46
- parser.add_argument(
47
- "--use-nuitka",
48
- "-nuitka",
49
- action="store_true",
50
- default=True,
51
- help="use nuitka to compile",
52
- )
53
- parser.add_argument("--with-tk", "-tk", action="store_true", help="package tk library")
54
- parser.add_argument("--with-js", "-js", action="store_true", help="package js")
55
-
56
- args = parser.parse_args()
57
-
58
- # settings.mode["archive"] = args.archive
59
- # settings.mode["compile"] = args.compile
60
- # settings.mode["debug"] = args.debug
61
- # settings.mode["gui"] = args.gui
62
- # settings.mode["nsis"] = args.nsis
63
- # settings.mode["offline"] = args.offline
64
- # settings.mode["rebuild"] = args.rebuild
65
- # settings.mode["recursive"] = args.recursive
66
- # settings.mode["simplify"] = args.simplify
67
- # settings.mode["use_mingw"] = args.use_mingw
68
- # settings.mode["use_nuitka"] = args.use_nuitka
69
- # settings.mode["with_tk"] = args.with_tk
70
- # settings.mode["with_js"] = args.with_js
71
-
72
- if args.debug:
73
- logger.setLevel(logging.DEBUG)
74
- else:
75
- logger.setLevel(logging.INFO)
76
-
77
- # logger.show_header()
78
-
79
- if args.options in {"version", "v"}:
80
- logger.info(f"pypacker {__version__} (build {__build__})")
81
- return
82
-
83
- # manager = ProjectManager(Path(args.directory), match_name=args.target)
84
- # if args.options in {"build", "b"}:
85
- # manager.build()
86
- # elif args.options in {"run", "r"}:
87
- # manager.run()
88
- # elif args.options in {"clean", "c"}:
89
- # manager.clean()
90
-
91
- # settings.dump()
File without changes
File without changes
File without changes
File without changes