pysfi 0.1.7__py3-none-any.whl → 0.1.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. {pysfi-0.1.7.dist-info → pysfi-0.1.11.dist-info}/METADATA +11 -9
  2. pysfi-0.1.11.dist-info/RECORD +60 -0
  3. pysfi-0.1.11.dist-info/entry_points.txt +28 -0
  4. sfi/__init__.py +1 -1
  5. sfi/alarmclock/alarmclock.py +40 -40
  6. sfi/bumpversion/__init__.py +1 -1
  7. sfi/cleanbuild/cleanbuild.py +155 -0
  8. sfi/condasetup/condasetup.py +116 -0
  9. sfi/docscan/__init__.py +1 -1
  10. sfi/docscan/docscan.py +407 -103
  11. sfi/docscan/docscan_gui.py +1282 -596
  12. sfi/docscan/lang/eng.py +152 -0
  13. sfi/docscan/lang/zhcn.py +170 -0
  14. sfi/filedate/filedate.py +185 -112
  15. sfi/gittool/__init__.py +2 -0
  16. sfi/gittool/gittool.py +401 -0
  17. sfi/llmclient/llmclient.py +592 -0
  18. sfi/llmquantize/llmquantize.py +480 -0
  19. sfi/llmserver/llmserver.py +335 -0
  20. sfi/makepython/makepython.py +31 -30
  21. sfi/pdfsplit/pdfsplit.py +173 -173
  22. sfi/pyarchive/pyarchive.py +418 -0
  23. sfi/pyembedinstall/pyembedinstall.py +629 -0
  24. sfi/pylibpack/__init__.py +0 -0
  25. sfi/pylibpack/pylibpack.py +1457 -0
  26. sfi/pylibpack/rules/numpy.json +22 -0
  27. sfi/pylibpack/rules/pymupdf.json +10 -0
  28. sfi/pylibpack/rules/pyqt5.json +19 -0
  29. sfi/pylibpack/rules/pyside2.json +23 -0
  30. sfi/pylibpack/rules/scipy.json +23 -0
  31. sfi/pylibpack/rules/shiboken2.json +24 -0
  32. sfi/pyloadergen/pyloadergen.py +512 -227
  33. sfi/pypack/__init__.py +0 -0
  34. sfi/pypack/pypack.py +1142 -0
  35. sfi/pyprojectparse/__init__.py +0 -0
  36. sfi/pyprojectparse/pyprojectparse.py +500 -0
  37. sfi/pysourcepack/pysourcepack.py +308 -0
  38. sfi/quizbase/__init__.py +0 -0
  39. sfi/quizbase/quizbase.py +828 -0
  40. sfi/quizbase/quizbase_gui.py +987 -0
  41. sfi/regexvalidate/__init__.py +0 -0
  42. sfi/regexvalidate/regex_help.html +284 -0
  43. sfi/regexvalidate/regexvalidate.py +468 -0
  44. sfi/taskkill/taskkill.py +0 -2
  45. sfi/workflowengine/__init__.py +0 -0
  46. sfi/workflowengine/workflowengine.py +444 -0
  47. pysfi-0.1.7.dist-info/RECORD +0 -31
  48. pysfi-0.1.7.dist-info/entry_points.txt +0 -15
  49. sfi/embedinstall/embedinstall.py +0 -418
  50. sfi/projectparse/projectparse.py +0 -152
  51. sfi/pypacker/fspacker.py +0 -91
  52. {pysfi-0.1.7.dist-info → pysfi-0.1.11.dist-info}/WHEEL +0 -0
  53. /sfi/{embedinstall → docscan/lang}/__init__.py +0 -0
  54. /sfi/{projectparse → llmquantize}/__init__.py +0 -0
  55. /sfi/{pypacker → pyembedinstall}/__init__.py +0 -0
sfi/pypack/pypack.py ADDED
@@ -0,0 +1,1142 @@
1
+ """Package Workflow - Advanced Python project packaging tool with workflow orchestration.
2
+
3
+ This module provides a comprehensive packaging solution that integrates pyprojectparse,
4
+ pysourcepack, pyembedinstall, and pyloadergen tools through a workflow engine to achieve
5
+ mixed serial and parallel execution for optimal efficiency.
6
+ """
7
+
8
+ from __future__ import annotations
9
+
10
+ import argparse
11
+ import asyncio
12
+ import json
13
+ import logging
14
+ import platform
15
+ import shutil
16
+ from dataclasses import dataclass
17
+ from pathlib import Path
18
+ from typing import Any
19
+
20
+ from sfi.pyprojectparse.pyprojectparse import Project, Solution
21
+ from sfi.pysourcepack.pysourcepack import pack_project
22
+ from sfi.workflowengine.workflowengine import (
23
+ CPUTask,
24
+ IOTask,
25
+ SerialTask,
26
+ WorkflowEngine,
27
+ )
28
+
29
+ logging.basicConfig(
30
+ level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s"
31
+ )
32
+ logger = logging.getLogger(__name__)
33
+ cwd = Path.cwd()
34
+ is_windows = platform.system() == "Windows"
35
+ ext = ".exe" if is_windows else ""
36
+
37
+ __version__ = "1.0.0"
38
+ __build__ = "20260120"
39
+
40
+
41
+ @dataclass
42
+ class WorkflowConfig:
43
+ """Configuration for package workflow."""
44
+
45
+ directory: Path
46
+ project_name: str | None = None
47
+ python_version: str = "3.8.10"
48
+ loader_type: str = "console"
49
+ entry_suffix: str = ".ent"
50
+ generate_loader: bool = True
51
+ recursive: bool = False
52
+ offline: bool = False
53
+ max_concurrent: int = 4
54
+ debug: bool = False
55
+ cache_dir: Path | None = None
56
+ archive_format: str = "zip"
57
+ mirror: str = "aliyun"
58
+
59
+
60
+ @dataclass
61
+ class ParseProjectResult:
62
+ """Result from project parsing task."""
63
+
64
+ projects: dict[str, Any]
65
+ projects_file: str
66
+
67
+
68
+ @dataclass
69
+ class PackSourceResult:
70
+ """Result from source packing task."""
71
+
72
+ packed_projects: list[str]
73
+ output_dir: str
74
+
75
+
76
+ @dataclass
77
+ class InstallPythonResult:
78
+ """Result from Python installation task."""
79
+
80
+ version: str
81
+ target_dir: str
82
+ install_result: Any
83
+
84
+
85
+ @dataclass
86
+ class PackLibrariesResult:
87
+ """Result from library packing task."""
88
+
89
+ pack_result: Any
90
+ packages_dir: str
91
+
92
+
93
+ class ParseProjectTask(IOTask):
94
+ """Task to parse project configuration using pyprojectparse."""
95
+
96
+ def __init__(self, directory: Path, recursive: bool = False, timeout: float = 60.0):
97
+ super().__init__("parse_project", 2.0, [], timeout)
98
+ self.directory = directory
99
+ self.recursive = recursive
100
+
101
+ async def execute(self, context: dict[str, Any]) -> Any:
102
+ """Execute project parsing."""
103
+ logger.info(f"Starting project parsing: {self.directory}")
104
+
105
+ try:
106
+ projects = Solution.from_directory(
107
+ root_dir=self.directory, recursive=self.recursive
108
+ )
109
+
110
+ logger.info(f"Found {len(projects)} project(s)")
111
+ for project in projects.values():
112
+ logger.info(project)
113
+
114
+ return {
115
+ "projects": projects,
116
+ "projects_file": str(self.directory / "projects.json"),
117
+ }
118
+
119
+ except Exception as e:
120
+ logger.error(f"Failed to parse projects: {e}")
121
+ raise
122
+
123
+
124
+ class PackSourceTask(IOTask):
125
+ """Task to pack source code using pysourcepack."""
126
+
127
+ def __init__(
128
+ self,
129
+ directory: Path,
130
+ projects_file: Path,
131
+ project_name: str | None = None,
132
+ timeout: float = 120.0,
133
+ ):
134
+ super().__init__("pack_source", 5.0, ["parse_project"], timeout)
135
+ self.base_dir = directory
136
+ self.projects_file = projects_file
137
+ self.projects: dict[str, Project] = {}
138
+ self.project_name = project_name
139
+
140
+ def _pack_projects(self, projects: dict) -> list[str]:
141
+ """Pack specified projects.
142
+
143
+ Args:
144
+ projects: Projects data dict
145
+
146
+ Returns:
147
+ List of packed project names
148
+ """
149
+ project_names = (
150
+ [self.project_name] if self.project_name else list(projects.keys())
151
+ )
152
+ packed_projects = []
153
+
154
+ for name in project_names:
155
+ logger.info(f"Packing project: {name}")
156
+ pack_project(base_dir=self.base_dir, project_name=name, projects=projects)
157
+ packed_projects.append(name)
158
+
159
+ return packed_projects
160
+
161
+ async def execute(self, context: dict[str, Any]) -> Any:
162
+ """Execute source packing."""
163
+ logger.info(f"Starting source packing: {self.base_dir}")
164
+
165
+ try:
166
+ projects = Solution.from_directory(
167
+ root_dir=self.base_dir, recursive=True, update=False
168
+ ).projects
169
+
170
+ packed_projects = self._pack_projects(projects)
171
+ logger.info(f"Packed {len(packed_projects)} project(s)")
172
+ return {"packed_projects": packed_projects, "output_dir": "dist/src"}
173
+
174
+ except Exception as e:
175
+ logger.error(f"Failed to pack source: {e}")
176
+ raise
177
+
178
+
179
+ class InstallPythonTask(IOTask):
180
+ """Task to install embedded Python using pyembedinstall."""
181
+
182
+ def __init__(
183
+ self,
184
+ version: str,
185
+ target_dir: Path,
186
+ offline: bool = False,
187
+ timeout: float = 300.0,
188
+ ):
189
+ super().__init__("install_python", 10.0, ["parse_project"], timeout)
190
+ self.version = version
191
+ self.target_dir = target_dir
192
+ self.offline = offline
193
+
194
+ def _get_architecture(self) -> str:
195
+ """Determine system architecture.
196
+
197
+ Returns:
198
+ Architecture string (amd64 or arm64)
199
+ """
200
+ arch = platform.machine().lower()
201
+ if arch in ("amd64", "x86_64"):
202
+ return "amd64"
203
+ elif arch in ("arm64", "aarch64"):
204
+ return "arm64"
205
+ return "amd64"
206
+
207
+ def _prepare_cache_dir(self) -> Path:
208
+ """Prepare cache directory for Python installation.
209
+
210
+ Returns:
211
+ Cache directory path
212
+ """
213
+ from sfi.pyembedinstall.pyembedinstall import _DEFAULT_CACHE_DIR
214
+
215
+ cache_dir = _DEFAULT_CACHE_DIR
216
+ cache_dir.mkdir(parents=True, exist_ok=True)
217
+ return cache_dir
218
+
219
+ def _install_python(self, cache_dir: Path, arch: str) -> Any:
220
+ """Install embedded Python.
221
+
222
+ Args:
223
+ cache_dir: Cache directory path
224
+ arch: System architecture
225
+
226
+ Returns:
227
+ Installation result
228
+ """
229
+ from sfi.pyembedinstall.pyembedinstall import (
230
+ EmbedInstallConfig,
231
+ install_embed_python,
232
+ )
233
+
234
+ config = EmbedInstallConfig(
235
+ target_dir=self.target_dir,
236
+ version=self.version,
237
+ cache_dir=cache_dir,
238
+ offline=self.offline,
239
+ keep_cache=True,
240
+ skip_speed_test=False,
241
+ arch=arch,
242
+ )
243
+
244
+ return install_embed_python(config)
245
+
246
+ async def execute(self, context: dict[str, Any]) -> Any:
247
+ """Execute Python installation."""
248
+ logger.info(f"Starting Python installation: {self.version}")
249
+
250
+ try:
251
+ cache_dir = self._prepare_cache_dir()
252
+ arch = self._get_architecture()
253
+ install_result = self._install_python(cache_dir, arch)
254
+
255
+ logger.info(f"Python {self.version} installed to {self.target_dir}")
256
+
257
+ return {
258
+ "version": self.version,
259
+ "target_dir": str(self.target_dir),
260
+ "install_result": install_result,
261
+ }
262
+
263
+ except Exception as e:
264
+ logger.error(f"Failed to install Python: {e}")
265
+ raise
266
+
267
+
268
+ class PackLibrariesTask(IOTask):
269
+ """Task to pack dependencies using pylibpack."""
270
+
271
+ def __init__(
272
+ self,
273
+ project_dir: Path,
274
+ output_dir: Path,
275
+ cache_dir: Path | None = None,
276
+ python_version: str | None = None,
277
+ max_workers: int = 4,
278
+ archive_format: str = "zip",
279
+ mirror: str = "aliyun",
280
+ timeout: float = 300.0,
281
+ ):
282
+ super().__init__("pack_libraries", 10.0, ["parse_project"], timeout)
283
+ self.project_dir = project_dir
284
+ self.output_dir = output_dir
285
+ self.cache_dir = cache_dir
286
+ self.python_version = python_version
287
+ self.max_workers = max_workers
288
+ self.archive_format = archive_format
289
+ self.mirror = mirror
290
+
291
+ def _create_packer(self) -> Any:
292
+ """Create PyLibPack instance.
293
+
294
+ Returns:
295
+ PyLibPack instance
296
+ """
297
+ from sfi.pylibpack.pylibpack import PyLibPack
298
+
299
+ return PyLibPack(
300
+ cache_dir=self.cache_dir,
301
+ python_version=self.python_version,
302
+ mirror=self.mirror,
303
+ )
304
+
305
+ async def execute(self, context: dict[str, Any]) -> Any:
306
+ """Execute library packing."""
307
+ logger.info(f"Starting library packing: {self.project_dir}")
308
+
309
+ try:
310
+ packer = self._create_packer()
311
+ pack_result = packer.pack(
312
+ base_dir=self.project_dir,
313
+ output_dir=self.output_dir,
314
+ max_workers=self.max_workers,
315
+ )
316
+
317
+ logger.info(
318
+ f"Library packing completed: {pack_result.successful}/{pack_result.total}"
319
+ )
320
+
321
+ return {
322
+ "pack_result": pack_result,
323
+ "packages_dir": pack_result.output_dir,
324
+ }
325
+
326
+ except Exception as e:
327
+ logger.error(f"Failed to pack libraries: {e}")
328
+ raise
329
+
330
+
331
+ class GenerateLoaderTask(CPUTask):
332
+ """Task to generate loader using pyloadergen."""
333
+
334
+ def __init__(
335
+ self,
336
+ base_dir: Path,
337
+ debug: bool = False,
338
+ timeout: float = 60.0,
339
+ ):
340
+ super().__init__(
341
+ "generate_loader", 100000, ["parse_project", "pack_source"], timeout
342
+ )
343
+ self.base_dir = base_dir
344
+ self.debug = debug
345
+
346
+ async def execute(self, inputs: dict[str, Any]) -> Any:
347
+ """Execute loader generation."""
348
+ try:
349
+ from sfi.pyloadergen.pyloadergen import generate_loader
350
+
351
+ generate_loader(self.base_dir, self.debug)
352
+ except Exception as e:
353
+ logger.error(f"Failed to generate loader: {e}")
354
+ raise
355
+
356
+
357
+ class AssemblePackageTask(SerialTask):
358
+ """Task to assemble final package."""
359
+
360
+ def __init__(
361
+ self, output_dir: Path, dependencies: list[str], timeout: float = 60.0
362
+ ):
363
+ self.output_dir = output_dir
364
+ super().__init__("assemble_package", self._assemble, dependencies, timeout)
365
+
366
+ def _get_project_dir(self, inputs: dict[str, Any]) -> Path:
367
+ """Get project directory from parse result or current directory."""
368
+ if "parse_project" in inputs and hasattr(inputs["parse_project"], "data"):
369
+ parse_result = inputs["parse_project"].data
370
+ if isinstance(parse_result, dict) and "projects_file" in parse_result:
371
+ return Path(parse_result["projects_file"]).parent
372
+ return Path.cwd()
373
+
374
+ def _prepare_dist_dir(self) -> Path:
375
+ """Prepare and return distribution directory."""
376
+ dist_dir = (
377
+ self.output_dir.parent if self.output_dir.is_file() else self.output_dir
378
+ )
379
+ dist_dir = dist_dir if dist_dir.name == "dist" else dist_dir.parent / "dist"
380
+ dist_dir.mkdir(parents=True, exist_ok=True)
381
+ return dist_dir
382
+
383
+ def _copy_loaders(self, project_dir: Path, dist_dir: Path) -> int:
384
+ """Copy loaders and entry files to dist directory."""
385
+ loader_dir = project_dir / "dist"
386
+ if not loader_dir.exists():
387
+ return 0
388
+
389
+ loaders_count = 0
390
+ for loader in loader_dir.glob("*.ent"):
391
+ logger.info(f"Entry file: {loader.name}")
392
+ loaders_count += 1
393
+
394
+ # Support both Linux (no extension) and Windows (.exe) executables
395
+ for loader in loader_dir.glob("*.exe"):
396
+ logger.info(f"Executable: {loader.name}")
397
+ shutil.copy2(loader, dist_dir / loader.name)
398
+ loaders_count += 1
399
+
400
+ # Also copy Linux executables (files without extension)
401
+ for loader in loader_dir.glob("*"):
402
+ if loader.is_file() and not loader.name.endswith((".ent", ".exe")):
403
+ logger.info(f"Executable: {loader.name}")
404
+ shutil.copy2(loader, dist_dir / loader.name)
405
+ loaders_count += 1
406
+
407
+ logger.info(f"Found and copied {loaders_count} loader(s) and entry file(s)")
408
+ return loaders_count
409
+
410
+ def _copy_libraries(self, project_dir: Path, dist_dir: Path) -> None:
411
+ """Copy libraries to dist/site-packages directory."""
412
+ libs_dir = project_dir / "dist" / "libs"
413
+ if not libs_dir.exists():
414
+ return
415
+
416
+ dest_libs_dir = dist_dir / "site-packages"
417
+ if dest_libs_dir.exists():
418
+ shutil.rmtree(dest_libs_dir)
419
+ shutil.copytree(libs_dir, dest_libs_dir)
420
+ logger.info(f"Libraries copied to {dest_libs_dir}")
421
+
422
+ def _create_metadata(self, dist_dir: Path) -> dict[str, Any]:
423
+ """Create package metadata file."""
424
+ metadata = {
425
+ "version": __version__,
426
+ "build": __build__,
427
+ "assembled_at": asyncio.get_event_loop().time(),
428
+ }
429
+
430
+ metadata_file = dist_dir / "metadata.json"
431
+ with open(metadata_file, "w", encoding="utf-8") as f:
432
+ json.dump(metadata, f, indent=2)
433
+
434
+ return metadata
435
+
436
+ def _assemble(
437
+ self, inputs: dict[str, Any], state: dict[str, Any]
438
+ ) -> dict[str, Any]:
439
+ """Assemble final package."""
440
+ logger.info("Starting package assembly")
441
+
442
+ try:
443
+ project_dir = self._get_project_dir(inputs)
444
+ dist_dir = self._prepare_dist_dir()
445
+
446
+ self._copy_loaders(project_dir, dist_dir)
447
+ self._copy_libraries(project_dir, dist_dir)
448
+ metadata = self._create_metadata(dist_dir)
449
+
450
+ logger.info(f"Package assembled: {dist_dir}")
451
+ return {"output_dir": str(dist_dir), "metadata": metadata}
452
+
453
+ except Exception as e:
454
+ logger.error(f"Failed to assemble package: {e}")
455
+ raise
456
+
457
+
458
+ class PackageWorkflow:
459
+ """Package workflow orchestrator."""
460
+
461
+ def __init__(self, directory: Path, config: WorkflowConfig):
462
+ self.directory = directory
463
+ self.config = config
464
+ self.engine = WorkflowEngine(max_concurrent=config.max_concurrent)
465
+
466
+ def _add_parse_task(self) -> None:
467
+ """Add project parsing task to workflow."""
468
+ parse_task = ParseProjectTask(
469
+ directory=self.directory,
470
+ recursive=self.config.recursive,
471
+ )
472
+ self.engine.add_task(parse_task)
473
+
474
+ def _add_pack_and_install_tasks(self) -> None:
475
+ """Add pack source and install Python tasks to workflow."""
476
+ pack_task = PackSourceTask(
477
+ directory=self.directory,
478
+ projects_file=self.directory / "projects.json",
479
+ project_name=self.config.project_name,
480
+ )
481
+ self.engine.add_task(pack_task)
482
+
483
+ install_task = InstallPythonTask(
484
+ version=self.config.python_version,
485
+ target_dir=self.directory / "dist" / "runtime",
486
+ offline=self.config.offline,
487
+ )
488
+ self.engine.add_task(install_task)
489
+
490
+ def _add_pack_libraries_task(self) -> None:
491
+ """Add library packing task to workflow."""
492
+ lib_pack_task = PackLibrariesTask(
493
+ project_dir=self.directory,
494
+ output_dir=self.directory / "dist/site-packages",
495
+ cache_dir=self.config.cache_dir,
496
+ python_version=self.config.python_version,
497
+ max_workers=self.config.max_concurrent,
498
+ archive_format=self.config.archive_format,
499
+ mirror=self.config.mirror,
500
+ )
501
+ self.engine.add_task(lib_pack_task)
502
+
503
+ def _add_loader_task(self) -> list[str]:
504
+ """Add loader generation task to workflow.
505
+
506
+ Returns:
507
+ List of task dependencies for assembly phase
508
+ """
509
+ if not self.config.generate_loader:
510
+ return ["pack_source", "install_python"]
511
+
512
+ loader_task = GenerateLoaderTask(self.directory, self.config.debug)
513
+ self.engine.add_task(loader_task)
514
+
515
+ assembly_deps = ["pack_source", "install_python", "generate_loader"]
516
+ assembly_deps.append("pack_libraries")
517
+
518
+ return assembly_deps
519
+
520
+ def _add_assemble_task(self, dependencies: list[str]) -> None:
521
+ """Add package assembly task to workflow.
522
+
523
+ Args:
524
+ dependencies: List of task dependencies
525
+ """
526
+ assemble_task = AssemblePackageTask(
527
+ output_dir=self.directory / "dist",
528
+ dependencies=dependencies,
529
+ )
530
+ self.engine.add_task(assemble_task)
531
+
532
+ def build_workflow(self) -> WorkflowEngine:
533
+ """Build the packaging workflow."""
534
+ logger.info("Building packaging workflow")
535
+
536
+ self._add_parse_task()
537
+ self._add_pack_and_install_tasks()
538
+ self._add_pack_libraries_task()
539
+ dependencies = self._add_loader_task()
540
+ self._add_assemble_task(dependencies)
541
+
542
+ return self.engine
543
+
544
+ def _log_execution_summary(self, summary: dict[str, Any]) -> None:
545
+ """Log workflow execution summary.
546
+
547
+ Args:
548
+ summary: Execution summary dict
549
+ """
550
+ logger.info("=" * 50)
551
+ logger.info("Workflow execution summary:")
552
+ logger.info(f" Total tasks: {summary['total_tasks']}")
553
+ logger.info(f" Completed: {summary['completed']}")
554
+ logger.info(f" Failed: {summary['failed']}")
555
+ logger.info(f" Success rate: {summary['success_rate'] * 100:.1f}%")
556
+ logger.info(f" Total time: {summary['total_execution_time']:.2f}s")
557
+ logger.info("=" * 50)
558
+
559
+ async def execute(self) -> dict[str, Any]:
560
+ """Execute the packaging workflow.
561
+
562
+ Returns:
563
+ Dict with results and summary
564
+ """
565
+ logger.info("Starting packaging workflow execution")
566
+
567
+ self.build_workflow()
568
+
569
+ try:
570
+ results = await self.engine.execute_workflow()
571
+ summary = self.engine.get_execution_summary()
572
+ self._log_execution_summary(summary)
573
+
574
+ return {"results": results, "summary": summary}
575
+
576
+ except Exception as e:
577
+ logger.error(f"Workflow execution failed: {e}")
578
+ raise
579
+
580
+
581
+ def _load_projects_data(directory: Path, silent: bool = False) -> dict[str, Any] | None:
582
+ """Load projects data from projects.json.
583
+
584
+ Args:
585
+ directory: Project directory containing projects.json
586
+ silent: If True, use warning instead of error messages
587
+
588
+ Returns:
589
+ Projects data dict or None if not found/error
590
+ """
591
+ projects_file = directory / "projects.json"
592
+
593
+ if not projects_file.exists():
594
+ level = logger.warning if silent else logger.error
595
+ level(f"No projects.json found in {directory}")
596
+ logger.info("Run 'pypack build' first to create projects.json")
597
+ return None
598
+
599
+ try:
600
+ with open(projects_file, encoding="utf-8") as f:
601
+ projects_data = json.load(f)
602
+
603
+ if not projects_data:
604
+ level = logger.info if silent else logger.error
605
+ level("No projects found in projects.json")
606
+ return None
607
+
608
+ return projects_data
609
+ except Exception as e:
610
+ logger.error(f"Failed to load projects.json: {e}")
611
+ return None
612
+
613
+
614
+ def _get_project_info(project_info: Any) -> tuple[str, str, str]:
615
+ """Extract project info from project data.
616
+
617
+ Args:
618
+ project_info: Project info dict or string
619
+
620
+ Returns:
621
+ Tuple of (version, entry, description)
622
+ """
623
+ if isinstance(project_info, str):
624
+ return "N/A", "N/A", ""
625
+
626
+ version = (
627
+ project_info.get("version", "N/A") if isinstance(project_info, dict) else "N/A"
628
+ )
629
+ entry = "main.py"
630
+ description = (
631
+ project_info.get("description", "") if isinstance(project_info, dict) else ""
632
+ )
633
+ return version, entry, description
634
+
635
+
636
+ def _print_project(project_name: str, project_info: Any, index: int) -> None:
637
+ """Print project information.
638
+
639
+ Args:
640
+ project_name: Name of the project
641
+ project_info: Project info dict or string
642
+ index: Index number for listing
643
+ """
644
+ version, entry, description = _get_project_info(project_info)
645
+
646
+ logger.info(f"\n[{index}] {project_name}")
647
+ logger.info(f" Version: {version}")
648
+ logger.info(f" Entry: {entry}")
649
+ if description:
650
+ logger.info(f" Description: {description}")
651
+
652
+
653
+ def list_projects(directory: Path) -> None:
654
+ """List projects from projects.json.
655
+
656
+ Args:
657
+ directory: Project directory containing projects.json
658
+ """
659
+ projects_data = _load_projects_data(directory, silent=True)
660
+
661
+ if not projects_data:
662
+ return
663
+
664
+ logger.info(f"Found {len(projects_data)} project(s):")
665
+ logger.info("=" * 60)
666
+
667
+ for i, (project_name, project_info) in enumerate(projects_data.items(), 1):
668
+ _print_project(project_name, project_info, i)
669
+
670
+ logger.info("=" * 60)
671
+
672
+
673
+ def _get_available_executables(directory: Path) -> list[str]:
674
+ """Get list of available executables in dist directory.
675
+
676
+ Args:
677
+ directory: Project directory
678
+
679
+ Returns:
680
+ List of executable names (without .exe extension)
681
+ """
682
+ dist_dir = directory / "dist"
683
+ if not dist_dir.exists():
684
+ logger.error(f"Dist directory not found: {dist_dir}")
685
+ logger.info("Run 'pypack build' first to build the project")
686
+ return []
687
+
688
+ # Support both Windows (.exe) and Linux (no extension) executables
689
+ exe_names = set()
690
+ for exe in dist_dir.glob("*.exe"):
691
+ if exe.is_file():
692
+ exe_names.add(exe.stem)
693
+
694
+ for exe in dist_dir.glob("*"):
695
+ if exe.is_file() and not exe.name.endswith((".ent", ".exe", ".json")):
696
+ exe_names.add(exe.name)
697
+
698
+ available_exes = list(exe_names)
699
+ if not available_exes:
700
+ logger.error("No executables found in dist directory")
701
+ logger.info("Run 'pypack build' first to build the project")
702
+
703
+ return available_exes
704
+
705
+
706
+ def _find_matching_executables(
707
+ project_name: str, available_exes: list[str]
708
+ ) -> list[str]:
709
+ """Find executables matching project name.
710
+
711
+ Args:
712
+ project_name: Project name to match
713
+ available_exes: List of available executable names
714
+
715
+ Returns:
716
+ List of matching executable names
717
+ """
718
+ return [
719
+ exe
720
+ for exe in available_exes
721
+ if exe == project_name or exe.startswith(f"{project_name}-")
722
+ ]
723
+
724
+
725
+ def _log_available_executables(available_exes: list[str]) -> None:
726
+ """Log available executable names.
727
+
728
+ Args:
729
+ available_exes: List of available executable names
730
+ """
731
+ logger.info("Available executables:")
732
+ for exe in available_exes:
733
+ logger.info(f"** {exe} **")
734
+
735
+
736
+ def _resolve_exact_project_name(
737
+ project_name: str, available_exes: list[str]
738
+ ) -> str | None:
739
+ """Resolve executable when project name is specified.
740
+
741
+ Args:
742
+ project_name: Project name to resolve
743
+ available_exes: List of available executable names
744
+
745
+ Returns:
746
+ Target executable name or None
747
+ """
748
+ if project_name in available_exes:
749
+ return project_name
750
+ return None
751
+
752
+
753
+ def _resolve_project_by_name(
754
+ project_name: str, available_exes: list[str]
755
+ ) -> str | None:
756
+ """Resolve executable by matching project name.
757
+
758
+ Args:
759
+ project_name: Project name to resolve
760
+ available_exes: List of available executable names
761
+
762
+ Returns:
763
+ Target executable name or None
764
+ """
765
+ matching_exes = _find_matching_executables(project_name, available_exes)
766
+
767
+ if len(matching_exes) == 1:
768
+ return matching_exes[0]
769
+ elif len(matching_exes) > 1:
770
+ logger.error(f"Multiple executables found for project '{project_name}':")
771
+ for exe in matching_exes:
772
+ logger.info(f" - {exe}")
773
+ logger.info("Please specify the full executable name (e.g., 'docscan-gui')")
774
+ return None
775
+
776
+ logger.error(f"No executable found for project '{project_name}'")
777
+ return None
778
+
779
+
780
+ def _resolve_no_project_name(available_exes: list[str]) -> str | None:
781
+ """Resolve executable when no project name is specified.
782
+
783
+ Args:
784
+ available_exes: List of available executable names
785
+
786
+ Returns:
787
+ Target executable name or None
788
+ """
789
+ if len(available_exes) == 1:
790
+ logger.info(f"Running single executable: {available_exes[0]}")
791
+ return available_exes[0]
792
+
793
+ logger.error(
794
+ "Multiple executables found. Please specify executable name with --project"
795
+ )
796
+ _log_available_executables(available_exes)
797
+ return None
798
+
799
+
800
+ def _resolve_target_executable(
801
+ project_name: str | None,
802
+ available_exes: list[str],
803
+ projects_data: dict[str, Any],
804
+ ) -> str | None:
805
+ """Resolve which executable to run based on project name.
806
+
807
+ Args:
808
+ project_name: Project or executable name to run
809
+ available_exes: List of available executable names
810
+ projects_data: Projects data dict
811
+
812
+ Returns:
813
+ Target executable name or None if not found/ambiguous
814
+ """
815
+ if not project_name:
816
+ return _resolve_no_project_name(available_exes)
817
+
818
+ exact_match = _resolve_exact_project_name(project_name, available_exes)
819
+ if exact_match:
820
+ return exact_match
821
+
822
+ if project_name in projects_data:
823
+ return _resolve_project_by_name(project_name, available_exes)
824
+
825
+ logger.error(f"Project or executable '{project_name}' not found")
826
+ _log_available_executables(available_exes)
827
+ return None
828
+
829
+
830
+ def _execute_project(exe_path: Path, dist_dir: Path) -> None:
831
+ """Execute the project executable.
832
+
833
+ Args:
834
+ exe_path: Path to the executable
835
+ dist_dir: Working directory for execution
836
+ """
837
+ import subprocess
838
+
839
+ try:
840
+ subprocess.run([str(exe_path)], cwd=str(dist_dir), check=True)
841
+ except subprocess.CalledProcessError as e:
842
+ logger.error(f"Project execution failed with exit code {e.returncode}")
843
+ except KeyboardInterrupt:
844
+ logger.info("Project execution interrupted")
845
+ except FileNotFoundError:
846
+ logger.error(f"Failed to execute {exe_path}")
847
+
848
+
849
+ def run_project(project_name: str | None, directory: Path) -> None:
850
+ """Run a built project.
851
+
852
+ Args:
853
+ project_name: Project name or executable name to run (e.g., 'docscan' or 'docscan-gui')
854
+ directory: Project directory
855
+ """
856
+ projects_data = _load_projects_data(directory)
857
+ if not projects_data:
858
+ return
859
+
860
+ available_exes = _get_available_executables(directory)
861
+ if not available_exes:
862
+ return
863
+
864
+ target_exe_name = _resolve_target_executable(
865
+ project_name, available_exes, projects_data
866
+ )
867
+ if not target_exe_name:
868
+ return
869
+
870
+ # Try both Windows (.exe) and Linux (no extension) executables
871
+ exe_path_with_ext = directory / "dist" / f"{target_exe_name}.exe"
872
+ exe_path_no_ext = directory / "dist" / target_exe_name
873
+
874
+ exe_path = None
875
+ if exe_path_with_ext.exists():
876
+ exe_path = exe_path_with_ext
877
+ elif exe_path_no_ext.exists():
878
+ exe_path = exe_path_no_ext
879
+
880
+ if not exe_path:
881
+ logger.error(f"Executable not found for '{target_exe_name}'")
882
+ logger.info("Run 'pypack build' first to build the project")
883
+ return
884
+
885
+ logger.info(f"Starting: {target_exe_name}")
886
+ logger.info(f"Executable: {exe_path}")
887
+ _execute_project(exe_path, directory / "dist")
888
+
889
+
890
+ def clean_project(directory: Path) -> None:
891
+ """Clean build artifacts and package files.
892
+
893
+ Args:
894
+ directory: Project directory to clean
895
+ """
896
+ logger.info("Cleaning build artifacts...")
897
+
898
+ cleaned_dirs, cleaned_files = _clean_build_artifacts(directory)
899
+
900
+ if not cleaned_dirs and not cleaned_files:
901
+ logger.info("No build artifacts found")
902
+ else:
903
+ logger.info(
904
+ f"Cleaned {len(cleaned_dirs)} directories and {len(cleaned_files)} file(s)"
905
+ )
906
+
907
+
908
+ def _should_clean(entry: Path) -> bool:
909
+ """Check if a directory or file should be cleaned.
910
+
911
+ Args:
912
+ entry: Path to check
913
+
914
+ Returns:
915
+ True if the entry should be cleaned
916
+ """
917
+ protected_dirs = {
918
+ ".git",
919
+ ".venv",
920
+ ".virtualenv",
921
+ ".vscode",
922
+ ".idea",
923
+ ".codebuddy",
924
+ ".qoder",
925
+ }
926
+
927
+ if entry.is_file() and entry.name == "projects.json":
928
+ return True
929
+
930
+ if not entry.is_dir():
931
+ return False
932
+
933
+ if entry.name.startswith(".") and entry.name in protected_dirs:
934
+ return False
935
+
936
+ return (
937
+ entry.name.startswith(".")
938
+ or entry.name.startswith("__")
939
+ or entry.name in ("build", "dist", "pysfi_build", "cbuild", "benchmarks")
940
+ )
941
+
942
+
943
+ def _clean_build_artifacts(directory: Path) -> tuple[list[str], list[str]]:
944
+ """Clean all build artifacts from directory.
945
+
946
+ Args:
947
+ directory: Project directory to clean
948
+
949
+ Returns:
950
+ Tuple of (cleaned directories, cleaned files)
951
+ """
952
+ cleaned_dirs = []
953
+ cleaned_files = []
954
+
955
+ entries_to_clean = [f for f in directory.iterdir() if _should_clean(f)]
956
+
957
+ for entry in entries_to_clean:
958
+ if not entry.exists():
959
+ continue
960
+
961
+ if entry.is_dir():
962
+ logger.info(f"Removing directory: {entry}")
963
+ if _remove_directory(entry):
964
+ cleaned_dirs.append(str(entry))
965
+ logger.info(f" Removed directory: {entry}")
966
+ else:
967
+ logger.warning(f" Failed to remove {entry}")
968
+ elif entry.is_file():
969
+ logger.info(f"Removing file: {entry}")
970
+ if _remove_file(entry):
971
+ cleaned_files.append(str(entry))
972
+ logger.info(f" Removed file: {entry}")
973
+ else:
974
+ logger.warning(f" Failed to remove {entry}")
975
+
976
+ return cleaned_dirs, cleaned_files
977
+
978
+
979
+ def _remove_directory(dir_path: Path) -> bool:
980
+ """Remove a directory safely.
981
+
982
+ Args:
983
+ dir_path: Directory path to remove
984
+
985
+ Returns:
986
+ True if removal succeeded
987
+ """
988
+ try:
989
+ shutil.rmtree(dir_path)
990
+ return True
991
+ except Exception:
992
+ return False
993
+
994
+
995
+ def _remove_file(file_path: Path) -> bool:
996
+ """Remove a file safely.
997
+
998
+ Args:
999
+ file_path: File path to remove
1000
+
1001
+ Returns:
1002
+ True if removal succeeded
1003
+ """
1004
+ try:
1005
+ file_path.unlink()
1006
+ return True
1007
+ except Exception:
1008
+ return False
1009
+
1010
+
1011
+ def parse_args() -> argparse.Namespace:
1012
+ """Parse command line arguments.
1013
+
1014
+ Returns:
1015
+ Parsed arguments namespace
1016
+ """
1017
+ parser = argparse.ArgumentParser(
1018
+ prog="pypack", description="Python packaging tool with workflow orchestration"
1019
+ )
1020
+
1021
+ parser.add_argument(
1022
+ "action",
1023
+ choices=("build", "b", "list", "l", "run", "r", "clean", "c", "version", "v"),
1024
+ help="Action to perform",
1025
+ )
1026
+ parser.add_argument(
1027
+ "project",
1028
+ type=str,
1029
+ nargs="?",
1030
+ default=None,
1031
+ help="Project or executable name (e.g., 'docscan' or 'docscan-gui')",
1032
+ )
1033
+ parser.add_argument("--debug", "-d", action="store_true", help="Debug mode")
1034
+ parser.add_argument(
1035
+ "--python-version", type=str, default="3.8.10", help="Python version to install"
1036
+ )
1037
+ parser.add_argument(
1038
+ "--loader-type",
1039
+ type=str,
1040
+ choices=("console", "gui"),
1041
+ default="console",
1042
+ help="Loader type",
1043
+ )
1044
+ parser.add_argument(
1045
+ "--entry-suffix",
1046
+ type=str,
1047
+ default=".ent",
1048
+ help="Entry file suffix (default: .ent, alternatives: .py)",
1049
+ )
1050
+ parser.add_argument(
1051
+ "--no-loader", action="store_true", help="Skip loader generation"
1052
+ )
1053
+ parser.add_argument(
1054
+ "-r", "--recursive", action="store_true", help="Parse projects recursively"
1055
+ )
1056
+ parser.add_argument("-o", "--offline", action="store_true", help="Offline mode")
1057
+ parser.add_argument(
1058
+ "-j", "--jobs", type=int, default=4, help="Maximum concurrent tasks"
1059
+ )
1060
+
1061
+ # Library packing arguments
1062
+ parser.add_argument(
1063
+ "--cache-dir",
1064
+ type=str,
1065
+ default=None,
1066
+ help="Custom cache directory for dependencies",
1067
+ )
1068
+ parser.add_argument(
1069
+ "--archive-format",
1070
+ type=str,
1071
+ choices=("zip", "7z", "nsis"),
1072
+ default="zip",
1073
+ help="Archive format for dependencies",
1074
+ )
1075
+ parser.add_argument(
1076
+ "--mirror",
1077
+ type=str,
1078
+ default="aliyun",
1079
+ choices=("pypi", "tsinghua", "aliyun", "ustc", "douban", "tencent"),
1080
+ help="PyPI mirror source for faster downloads",
1081
+ )
1082
+
1083
+ return parser.parse_args()
1084
+
1085
+
1086
+ async def _execute_build_workflow(config: WorkflowConfig) -> None:
1087
+ """Execute the build workflow."""
1088
+ workflow = PackageWorkflow(directory=config.directory, config=config)
1089
+
1090
+ try:
1091
+ await workflow.execute()
1092
+ logger.info("Packaging completed successfully!")
1093
+ except Exception as e:
1094
+ logger.error(f"Packaging failed: {e}")
1095
+ raise
1096
+
1097
+
1098
+ def main() -> None:
1099
+ """Main entry point for package workflow tool."""
1100
+ args = parse_args()
1101
+
1102
+ if args.debug:
1103
+ logging.getLogger().setLevel(logging.DEBUG)
1104
+
1105
+ if args.action in {"version", "v"}:
1106
+ logger.info(f"pypack {__version__} (build {__build__})")
1107
+ return
1108
+
1109
+ if args.action in {"list", "l"}:
1110
+ list_projects(cwd)
1111
+ return
1112
+
1113
+ if args.action in {"run", "r"}:
1114
+ run_project(args.project, cwd)
1115
+ return
1116
+
1117
+ if args.action in {"clean", "c"}:
1118
+ clean_project(cwd)
1119
+ return
1120
+
1121
+ cache_dir = Path(args.cache_dir) if args.cache_dir else None
1122
+ config = WorkflowConfig(
1123
+ directory=cwd,
1124
+ project_name=args.project,
1125
+ python_version=args.python_version,
1126
+ loader_type=args.loader_type,
1127
+ entry_suffix=args.entry_suffix,
1128
+ generate_loader=not args.no_loader,
1129
+ recursive=args.recursive,
1130
+ offline=args.offline,
1131
+ max_concurrent=args.jobs,
1132
+ debug=args.debug,
1133
+ cache_dir=cache_dir,
1134
+ archive_format=args.archive_format,
1135
+ mirror=args.mirror,
1136
+ )
1137
+
1138
+ asyncio.run(_execute_build_workflow(config))
1139
+
1140
+
1141
+ if __name__ == "__main__":
1142
+ main()