pysfi 0.1.10__py3-none-any.whl → 0.1.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (49) hide show
  1. {pysfi-0.1.10.dist-info → pysfi-0.1.12.dist-info}/METADATA +9 -7
  2. pysfi-0.1.12.dist-info/RECORD +62 -0
  3. {pysfi-0.1.10.dist-info → pysfi-0.1.12.dist-info}/entry_points.txt +13 -2
  4. sfi/__init__.py +1 -1
  5. sfi/alarmclock/alarmclock.py +40 -40
  6. sfi/bumpversion/__init__.py +1 -1
  7. sfi/cleanbuild/cleanbuild.py +155 -0
  8. sfi/condasetup/condasetup.py +116 -0
  9. sfi/docdiff/docdiff.py +238 -0
  10. sfi/docscan/__init__.py +1 -1
  11. sfi/docscan/docscan_gui.py +1 -1
  12. sfi/docscan/lang/eng.py +152 -152
  13. sfi/docscan/lang/zhcn.py +170 -170
  14. sfi/filedate/filedate.py +185 -112
  15. sfi/gittool/__init__.py +2 -0
  16. sfi/gittool/gittool.py +401 -0
  17. sfi/llmclient/llmclient.py +592 -0
  18. sfi/llmquantize/llmquantize.py +480 -0
  19. sfi/llmserver/llmserver.py +335 -0
  20. sfi/makepython/makepython.py +2 -2
  21. sfi/pdfsplit/pdfsplit.py +4 -4
  22. sfi/pyarchive/pyarchive.py +418 -0
  23. sfi/pyembedinstall/__init__.py +0 -0
  24. sfi/pyembedinstall/pyembedinstall.py +629 -0
  25. sfi/pylibpack/pylibpack.py +813 -269
  26. sfi/pylibpack/rules/numpy.json +22 -0
  27. sfi/pylibpack/rules/pymupdf.json +10 -0
  28. sfi/pylibpack/rules/pyqt5.json +19 -0
  29. sfi/pylibpack/rules/pyside2.json +23 -0
  30. sfi/pylibpack/rules/scipy.json +23 -0
  31. sfi/pylibpack/rules/shiboken2.json +24 -0
  32. sfi/pyloadergen/pyloadergen.py +271 -572
  33. sfi/pypack/pypack.py +822 -471
  34. sfi/pyprojectparse/__init__.py +0 -0
  35. sfi/pyprojectparse/pyprojectparse.py +500 -0
  36. sfi/pysourcepack/pysourcepack.py +308 -369
  37. sfi/quizbase/__init__.py +0 -0
  38. sfi/quizbase/quizbase.py +828 -0
  39. sfi/quizbase/quizbase_gui.py +987 -0
  40. sfi/regexvalidate/__init__.py +0 -0
  41. sfi/regexvalidate/regex_help.html +284 -0
  42. sfi/regexvalidate/regexvalidate.py +468 -0
  43. sfi/taskkill/taskkill.py +0 -2
  44. pysfi-0.1.10.dist-info/RECORD +0 -39
  45. sfi/embedinstall/embedinstall.py +0 -478
  46. sfi/projectparse/projectparse.py +0 -152
  47. {pysfi-0.1.10.dist-info → pysfi-0.1.12.dist-info}/WHEEL +0 -0
  48. /sfi/{embedinstall → llmclient}/__init__.py +0 -0
  49. /sfi/{projectparse → llmquantize}/__init__.py +0 -0
sfi/pypack/pypack.py CHANGED
@@ -1,7 +1,7 @@
1
1
  """Package Workflow - Advanced Python project packaging tool with workflow orchestration.
2
2
 
3
- This module provides a comprehensive packaging solution that integrates projectparse,
4
- pysourcepack, embedinstall, and pyloadergen tools through a workflow engine to achieve
3
+ This module provides a comprehensive packaging solution that integrates pyprojectparse,
4
+ pysourcepack, pyembedinstall, and pyloadergen tools through a workflow engine to achieve
5
5
  mixed serial and parallel execution for optimal efficiency.
6
6
  """
7
7
 
@@ -13,9 +13,12 @@ import json
13
13
  import logging
14
14
  import platform
15
15
  import shutil
16
+ from dataclasses import dataclass
16
17
  from pathlib import Path
17
18
  from typing import Any
18
19
 
20
+ from sfi.pyprojectparse.pyprojectparse import Project, Solution
21
+ from sfi.pysourcepack.pysourcepack import pack_project
19
22
  from sfi.workflowengine.workflowengine import (
20
23
  CPUTask,
21
24
  IOTask,
@@ -23,15 +26,72 @@ from sfi.workflowengine.workflowengine import (
23
26
  WorkflowEngine,
24
27
  )
25
28
 
26
- logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
29
+ logging.basicConfig(
30
+ level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s"
31
+ )
27
32
  logger = logging.getLogger(__name__)
33
+ cwd = Path.cwd()
34
+ is_windows = platform.system() == "Windows"
35
+ ext = ".exe" if is_windows else ""
28
36
 
29
37
  __version__ = "1.0.0"
30
38
  __build__ = "20260120"
31
39
 
32
40
 
41
+ @dataclass
42
+ class WorkflowConfig:
43
+ """Configuration for package workflow."""
44
+
45
+ directory: Path
46
+ project_name: str | None = None
47
+ python_version: str = "3.8.10"
48
+ loader_type: str = "console"
49
+ entry_suffix: str = ".ent"
50
+ generate_loader: bool = True
51
+ recursive: bool = False
52
+ offline: bool = False
53
+ max_concurrent: int = 4
54
+ debug: bool = False
55
+ cache_dir: Path | None = None
56
+ archive_format: str = "zip"
57
+ mirror: str = "aliyun"
58
+
59
+
60
+ @dataclass
61
+ class ParseProjectResult:
62
+ """Result from project parsing task."""
63
+
64
+ projects: dict[str, Any]
65
+ projects_file: str
66
+
67
+
68
+ @dataclass
69
+ class PackSourceResult:
70
+ """Result from source packing task."""
71
+
72
+ packed_projects: list[str]
73
+ output_dir: str
74
+
75
+
76
+ @dataclass
77
+ class InstallPythonResult:
78
+ """Result from Python installation task."""
79
+
80
+ version: str
81
+ target_dir: str
82
+ install_result: Any
83
+
84
+
85
+ @dataclass
86
+ class PackLibrariesResult:
87
+ """Result from library packing task."""
88
+
89
+ pack_result: Any
90
+ packages_dir: str
91
+
92
+
33
93
  class ParseProjectTask(IOTask):
34
- """Task to parse project configuration using projectparse."""
94
+ """Task to parse project configuration using pyprojectparse."""
35
95
 
36
96
  def __init__(self, directory: Path, recursive: bool = False, timeout: float = 60.0):
37
97
  super().__init__("parse_project", 2.0, [], timeout)
@@ -43,23 +103,18 @@ class ParseProjectTask(IOTask):
43
103
  logger.info(f"Starting project parsing: {self.directory}")
44
104
 
45
105
  try:
46
- # Import projectparse module
47
- from sfi.projectparse.projectparse import parse_project_data
48
-
49
- # Parse project
50
- projects = parse_project_data(self.directory, recursive=self.recursive)
51
-
52
- # Save to projects.json
53
- output_file = self.directory / "projects.json"
54
- with open(output_file, "w", encoding="utf-8") as f:
55
- json.dump(projects, f, indent=2, ensure_ascii=False)
106
+ projects = Solution.from_directory(
107
+ root_dir=self.directory, recursive=self.recursive
108
+ )
56
109
 
57
110
  logger.info(f"Found {len(projects)} project(s)")
58
- for project_name, project_info in projects.items():
59
- version = project_info.get("version", "N/A") if isinstance(project_info, dict) else "N/A"
60
- logger.info(f" - {project_name}: {version}")
111
+ for project in projects.values():
112
+ logger.info(project)
61
113
 
62
- return {"projects": projects, "projects_file": str(output_file)}
114
+ return {
115
+ "projects": projects,
116
+ "projects_file": str(self.directory / "projects.json"),
117
+ }
63
118
 
64
119
  except Exception as e:
65
120
  logger.error(f"Failed to parse projects: {e}")
@@ -69,35 +124,51 @@ class ParseProjectTask(IOTask):
69
124
  class PackSourceTask(IOTask):
70
125
  """Task to pack source code using pysourcepack."""
71
126
 
72
- def __init__(self, directory: Path, projects_file: Path, project_name: str | None = None, timeout: float = 120.0):
127
+ def __init__(
128
+ self,
129
+ directory: Path,
130
+ projects_file: Path,
131
+ project_name: str | None = None,
132
+ timeout: float = 120.0,
133
+ ):
73
134
  super().__init__("pack_source", 5.0, ["parse_project"], timeout)
74
- self.directory = directory
135
+ self.base_dir = directory
75
136
  self.projects_file = projects_file
137
+ self.projects: dict[str, Project] = {}
76
138
  self.project_name = project_name
77
139
 
78
- async def execute(self, context: dict[str, Any]) -> Any:
79
- """Execute source packing."""
80
- logger.info(f"Starting source packing: {self.directory}")
140
+ def _pack_projects(self, projects: dict) -> list[str]:
141
+ """Pack specified projects.
81
142
 
82
- try:
83
- # Import pysourcepack module
84
- from sfi.pysourcepack.pysourcepack import load_projects, pack_project
143
+ Args:
144
+ projects: Projects data dict
85
145
 
86
- # Load projects
87
- projects = load_projects(self.projects_file)
146
+ Returns:
147
+ List of packed project names
148
+ """
149
+ project_names = (
150
+ [self.project_name] if self.project_name else list(projects.keys())
151
+ )
152
+ packed_projects = []
88
153
 
89
- # Pack specified project or all projects
90
- # projects is a dict: {project_name: project_info}
91
- project_names = [self.project_name] if self.project_name else list(projects.keys())
154
+ for name in project_names:
155
+ logger.info(f"Packing project: {name}")
156
+ pack_project(base_dir=self.base_dir, project_name=name, projects=projects)
157
+ packed_projects.append(name)
92
158
 
93
- packed_projects = []
94
- for name in project_names:
95
- logger.info(f"Packing project: {name}")
96
- pack_project(name, projects, self.directory, output_dir=Path("dist/src"))
97
- packed_projects.append(name)
159
+ return packed_projects
98
160
 
99
- logger.info(f"Packed {len(packed_projects)} project(s)")
161
+ async def execute(self, context: dict[str, Any]) -> Any:
162
+ """Execute source packing."""
163
+ logger.info(f"Starting source packing: {self.base_dir}")
164
+
165
+ try:
166
+ projects = Solution.from_directory(
167
+ root_dir=self.base_dir, recursive=True, update=False
168
+ ).projects
100
169
 
170
+ packed_projects = self._pack_projects(projects)
171
+ logger.info(f"Packed {len(packed_projects)} project(s)")
101
172
  return {"packed_projects": packed_projects, "output_dir": "dist/src"}
102
173
 
103
174
  except Exception as e:
@@ -106,7 +177,7 @@ class PackSourceTask(IOTask):
106
177
 
107
178
 
108
179
  class InstallPythonTask(IOTask):
109
- """Task to install embedded Python using embedinstall."""
180
+ """Task to install embedded Python using pyembedinstall."""
110
181
 
111
182
  def __init__(
112
183
  self,
@@ -120,37 +191,66 @@ class InstallPythonTask(IOTask):
120
191
  self.target_dir = target_dir
121
192
  self.offline = offline
122
193
 
194
+ def _get_architecture(self) -> str:
195
+ """Determine system architecture.
196
+
197
+ Returns:
198
+ Architecture string (amd64 or arm64)
199
+ """
200
+ arch = platform.machine().lower()
201
+ if arch in ("amd64", "x86_64"):
202
+ return "amd64"
203
+ elif arch in ("arm64", "aarch64"):
204
+ return "arm64"
205
+ return "amd64"
206
+
207
+ def _prepare_cache_dir(self) -> Path:
208
+ """Prepare cache directory for Python installation.
209
+
210
+ Returns:
211
+ Cache directory path
212
+ """
213
+ from sfi.pyembedinstall.pyembedinstall import _DEFAULT_CACHE_DIR
214
+
215
+ cache_dir = _DEFAULT_CACHE_DIR
216
+ cache_dir.mkdir(parents=True, exist_ok=True)
217
+ return cache_dir
218
+
219
+ def _install_python(self, cache_dir: Path, arch: str) -> Any:
220
+ """Install embedded Python.
221
+
222
+ Args:
223
+ cache_dir: Cache directory path
224
+ arch: System architecture
225
+
226
+ Returns:
227
+ Installation result
228
+ """
229
+ from sfi.pyembedinstall.pyembedinstall import (
230
+ EmbedInstallConfig,
231
+ install_embed_python,
232
+ )
233
+
234
+ config = EmbedInstallConfig(
235
+ target_dir=self.target_dir,
236
+ version=self.version,
237
+ cache_dir=cache_dir,
238
+ offline=self.offline,
239
+ keep_cache=True,
240
+ skip_speed_test=False,
241
+ arch=arch,
242
+ )
243
+
244
+ return install_embed_python(config)
245
+
123
246
  async def execute(self, context: dict[str, Any]) -> Any:
124
247
  """Execute Python installation."""
125
248
  logger.info(f"Starting Python installation: {self.version}")
126
249
 
127
250
  try:
128
- # Import embedinstall module
129
- from sfi.embedinstall.embedinstall import DEFAULT_CACHE_DIR, install_embed_python
130
-
131
- # Setup cache directory (use default cache directory)
132
- cache_dir = DEFAULT_CACHE_DIR
133
- cache_dir.mkdir(parents=True, exist_ok=True)
134
-
135
- # Determine architecture
136
- arch = platform.machine().lower()
137
- if arch in ("amd64", "x86_64"):
138
- arch = "amd64"
139
- elif arch in ("arm64", "aarch64"):
140
- arch = "arm64"
141
- else:
142
- arch = "amd64"
143
-
144
- # Install embedded Python
145
- install_result = install_embed_python(
146
- target_dir=self.target_dir,
147
- version=self.version,
148
- cache_dir=cache_dir,
149
- offline=self.offline,
150
- keep_cache=True, # Keep cache for future use
151
- skip_speed_test=False,
152
- arch=arch,
153
- )
251
+ cache_dir = self._prepare_cache_dir()
252
+ arch = self._get_architecture()
253
+ install_result = self._install_python(cache_dir, arch)
154
254
 
155
255
  logger.info(f"Python {self.version} installed to {self.target_dir}")
156
256
 
@@ -188,25 +288,35 @@ class PackLibrariesTask(IOTask):
188
288
  self.archive_format = archive_format
189
289
  self.mirror = mirror
190
290
 
291
+ def _create_packer(self) -> Any:
292
+ """Create PyLibPack instance.
293
+
294
+ Returns:
295
+ PyLibPack instance
296
+ """
297
+ from sfi.pylibpack.pylibpack import PyLibPack
298
+
299
+ return PyLibPack(
300
+ cache_dir=self.cache_dir,
301
+ python_version=self.python_version,
302
+ mirror=self.mirror,
303
+ )
304
+
191
305
  async def execute(self, context: dict[str, Any]) -> Any:
192
306
  """Execute library packing."""
193
307
  logger.info(f"Starting library packing: {self.project_dir}")
194
308
 
195
309
  try:
196
- # Import pylibpack module
197
- from sfi.pylibpack.pylibpack import PyLibPack
198
-
199
- # Initialize packer with mirror support
200
- packer = PyLibPack(cache_dir=self.cache_dir, python_version=self.python_version, mirror=self.mirror)
201
-
202
- # Pack dependencies
310
+ packer = self._create_packer()
203
311
  pack_result = packer.pack(
204
312
  base_dir=self.project_dir,
205
313
  output_dir=self.output_dir,
206
314
  max_workers=self.max_workers,
207
315
  )
208
316
 
209
- logger.info(f"Library packing completed: {pack_result.successful}/{pack_result.total}")
317
+ logger.info(
318
+ f"Library packing completed: {pack_result.successful}/{pack_result.total}"
319
+ )
210
320
 
211
321
  return {
212
322
  "pack_result": pack_result,
@@ -223,90 +333,22 @@ class GenerateLoaderTask(CPUTask):
223
333
 
224
334
  def __init__(
225
335
  self,
226
- project_name: str | None,
227
- is_debug: bool = False,
228
- compiler: str | None = None,
336
+ base_dir: Path,
337
+ debug: bool = False,
229
338
  timeout: float = 60.0,
230
- project_dir: Path | None = None, # Added for backward compatibility with tests
231
- output_dir: Path | None = None, # Added for backward compatibility with tests
232
339
  ):
233
- super().__init__("generate_loader", 100000, ["parse_project", "pack_source"], timeout)
234
- self.project_name = project_name
235
- self.is_debug = is_debug
236
- self.compiler = compiler
340
+ super().__init__(
341
+ "generate_loader", 100000, ["parse_project", "pack_source"], timeout
342
+ )
343
+ self.base_dir = base_dir
344
+ self.debug = debug
237
345
 
238
346
  async def execute(self, inputs: dict[str, Any]) -> Any:
239
347
  """Execute loader generation."""
240
348
  try:
241
- # Import pyloadergen module
242
- from sfi.pyloadergen.pyloadergen import find_compiler, generate_loader
243
-
244
- # Get projects.json path from parse_project task
245
- parse_result = inputs["parse_project"].data
246
- if not isinstance(parse_result, dict):
247
- logger.error("Parse project result is not a dictionary")
248
- raise ValueError("Invalid parse result format")
249
-
250
- projects_file = parse_result.get("projects_file")
251
- if not projects_file:
252
- logger.error("projects.json path not found in parse result")
253
- raise ValueError("Missing projects_file in parse result")
254
-
255
- projects_file = Path(projects_file)
256
- projects = parse_result.get("projects", {})
257
-
258
- # Determine project name
259
- if self.project_name and self.project_name in projects:
260
- project_name = self.project_name
261
- elif len(projects) == 1:
262
- project_name = next(iter(projects.keys()))
263
- elif self.project_name:
264
- logger.error(f"Project '{self.project_name}' not found in parsed projects")
265
- raise ValueError(f"Project not found: {self.project_name}")
266
- else:
267
- logger.error("Multiple projects found but no project name specified")
268
- raise ValueError("Please specify project name when multiple projects exist")
269
-
270
- logger.info(f"Starting loader generation for project: {project_name}")
271
-
272
- # Find compiler if not specified
273
- compiler = self.compiler
274
- if compiler is None:
275
- compiler = find_compiler()
276
- if compiler is None:
277
- logger.warning("No compiler found, loader generation may fail")
278
-
279
- # Generate loader using simplified generate_loader function
280
- success = generate_loader(
281
- project_name=project_name,
282
- projects_json_path=projects_file,
283
- is_debug=self.is_debug,
284
- compiler=compiler,
285
- )
286
-
287
- if success:
288
- # Output executable path
289
- project_dir = projects_file.parent
290
- output_dir = project_dir / "dist"
291
- output_exe = output_dir / f"{project_name}.exe"
292
- output_ent = output_dir / f"{project_name}.ent"
293
-
294
- logger.info(f"Loader generated successfully: {output_exe}")
295
- logger.info(f"Entry file: {output_ent}")
296
-
297
- return {
298
- "project_name": project_name,
299
- "output_exe": str(output_exe),
300
- "entry_file": str(output_ent),
301
- "success": True,
302
- }
303
- else:
304
- logger.error(f"Failed to generate loader for {project_name}")
305
- return {
306
- "project_name": project_name,
307
- "success": False,
308
- }
349
+ from sfi.pyloadergen.pyloadergen import generate_loader
309
350
 
351
+ generate_loader(self.base_dir, self.debug)
310
352
  except Exception as e:
311
353
  logger.error(f"Failed to generate loader: {e}")
312
354
  raise
@@ -315,172 +357,219 @@ class GenerateLoaderTask(CPUTask):
315
357
  class AssemblePackageTask(SerialTask):
316
358
  """Task to assemble final package."""
317
359
 
318
- def __init__(self, output_dir: Path, dependencies: list[str], timeout: float = 60.0):
360
+ def __init__(
361
+ self, output_dir: Path, dependencies: list[str], timeout: float = 60.0
362
+ ):
319
363
  self.output_dir = output_dir
364
+ super().__init__("assemble_package", self._assemble, dependencies, timeout)
365
+
366
+ def _get_project_dir(self, inputs: dict[str, Any]) -> Path:
367
+ """Get project directory from parse result or current directory."""
368
+ if "parse_project" in inputs and hasattr(inputs["parse_project"], "data"):
369
+ parse_result = inputs["parse_project"].data
370
+ if isinstance(parse_result, dict) and "projects_file" in parse_result:
371
+ return Path(parse_result["projects_file"]).parent
372
+ return Path.cwd()
373
+
374
+ def _prepare_dist_dir(self) -> Path:
375
+ """Prepare and return distribution directory."""
376
+ dist_dir = (
377
+ self.output_dir.parent if self.output_dir.is_file() else self.output_dir
378
+ )
379
+ dist_dir = dist_dir if dist_dir.name == "dist" else dist_dir.parent / "dist"
380
+ dist_dir.mkdir(parents=True, exist_ok=True)
381
+ return dist_dir
382
+
383
+ def _copy_loaders(self, project_dir: Path, dist_dir: Path) -> int:
384
+ """Copy loaders and entry files to dist directory."""
385
+ loader_dir = project_dir / "dist"
386
+ if not loader_dir.exists():
387
+ return 0
388
+
389
+ loaders_count = 0
390
+ for loader in loader_dir.glob("*.ent"):
391
+ logger.info(f"Entry file: {loader.name}")
392
+ loaders_count += 1
393
+
394
+ # Support both Linux (no extension) and Windows (.exe) executables
395
+ for loader in loader_dir.glob("*.exe"):
396
+ logger.info(f"Executable: {loader.name}")
397
+ shutil.copy2(loader, dist_dir / loader.name)
398
+ loaders_count += 1
399
+
400
+ # Also copy Linux executables (files without extension)
401
+ for loader in loader_dir.glob("*"):
402
+ if loader.is_file() and not loader.name.endswith((".ent", ".exe")):
403
+ logger.info(f"Executable: {loader.name}")
404
+ shutil.copy2(loader, dist_dir / loader.name)
405
+ loaders_count += 1
406
+
407
+ logger.info(f"Found and copied {loaders_count} loader(s) and entry file(s)")
408
+ return loaders_count
409
+
410
+ def _copy_libraries(self, project_dir: Path, dist_dir: Path) -> None:
411
+ """Copy libraries to dist/site-packages directory."""
412
+ libs_dir = project_dir / "dist" / "libs"
413
+ if not libs_dir.exists():
414
+ return
415
+
416
+ dest_libs_dir = dist_dir / "site-packages"
417
+ if dest_libs_dir.exists():
418
+ shutil.rmtree(dest_libs_dir)
419
+ shutil.copytree(libs_dir, dest_libs_dir)
420
+ logger.info(f"Libraries copied to {dest_libs_dir}")
421
+
422
+ def _create_metadata(self, dist_dir: Path) -> dict[str, Any]:
423
+ """Create package metadata file."""
424
+ metadata = {
425
+ "version": __version__,
426
+ "build": __build__,
427
+ "assembled_at": asyncio.get_event_loop().time(),
428
+ }
429
+
430
+ metadata_file = dist_dir / "metadata.json"
431
+ with open(metadata_file, "w", encoding="utf-8") as f:
432
+ json.dump(metadata, f, indent=2)
320
433
 
321
- def assemble(inputs: dict[str, Any], state: dict[str, Any]) -> Any:
322
- """Assemble final package."""
323
- logger.info("Starting package assembly")
324
-
325
- try:
326
- # Determine project directory from the first input if available
327
- project_dir = None
328
- if "parse_project" in inputs and hasattr(inputs["parse_project"], "data"):
329
- parse_result = inputs["parse_project"].data
330
- if isinstance(parse_result, dict) and "projects_file" in parse_result:
331
- project_dir = Path(parse_result["projects_file"]).parent
332
- if not project_dir:
333
- project_dir = Path.cwd()
334
-
335
- # Create dist directory
336
- dist_dir = self.output_dir.parent if self.output_dir.is_file() else self.output_dir
337
- dist_dir = dist_dir if dist_dir.name == "dist" else dist_dir.parent / "dist"
338
- dist_dir.mkdir(parents=True, exist_ok=True)
339
-
340
- # Python runtime is already in dist/runtime from InstallPythonTask
341
- # No need to copy
342
-
343
- # Copy loaders and entry files to dist (already in dist, just log)
344
- loader_dir = project_dir / "dist"
345
- if loader_dir.exists():
346
- loaders_count = 0
347
- for loader in loader_dir.glob("*.ent"):
348
- logger.info(f"Entry file: {loader.name}")
349
- loaders_count += 1
350
- for loader in loader_dir.glob("*.exe"):
351
- logger.info(f"Executable: {loader.name}")
352
- # Copy loader to dist
353
- shutil.copy2(loader, dist_dir / loader.name)
354
- loaders_count += 1
355
- logger.info(f"Found and copied {loaders_count} loader(s) and entry file(s)")
356
-
357
- # Copy libraries to dist/site-packages
358
- libs_dir = project_dir / "dist" / "libs"
359
- if libs_dir.exists():
360
- dest_libs_dir = dist_dir / "site-packages"
361
- if dest_libs_dir.exists():
362
- shutil.rmtree(dest_libs_dir)
363
- shutil.copytree(libs_dir, dest_libs_dir)
364
- logger.info(f"Libraries copied to {dest_libs_dir}")
365
-
366
- # Create package metadata
367
- metadata = {
368
- "version": __version__,
369
- "build": __build__,
370
- "assembled_at": asyncio.get_event_loop().time(),
371
- }
372
-
373
- metadata_file = dist_dir / "metadata.json"
374
- with open(metadata_file, "w", encoding="utf-8") as f:
375
- json.dump(metadata, f, indent=2)
376
-
377
- logger.info(f"Package assembled: {dist_dir}")
378
-
379
- return {"output_dir": str(dist_dir), "metadata": metadata}
380
-
381
- except Exception as e:
382
- logger.error(f"Failed to assemble package: {e}")
383
- raise
384
-
385
- super().__init__("assemble_package", assemble, dependencies, timeout)
434
+ return metadata
435
+
436
+ def _assemble(
437
+ self, inputs: dict[str, Any], state: dict[str, Any]
438
+ ) -> dict[str, Any]:
439
+ """Assemble final package."""
440
+ logger.info("Starting package assembly")
441
+
442
+ try:
443
+ project_dir = self._get_project_dir(inputs)
444
+ dist_dir = self._prepare_dist_dir()
445
+
446
+ self._copy_loaders(project_dir, dist_dir)
447
+ self._copy_libraries(project_dir, dist_dir)
448
+ metadata = self._create_metadata(dist_dir)
449
+
450
+ logger.info(f"Package assembled: {dist_dir}")
451
+ return {"output_dir": str(dist_dir), "metadata": metadata}
452
+
453
+ except Exception as e:
454
+ logger.error(f"Failed to assemble package: {e}")
455
+ raise
386
456
 
387
457
 
388
458
  class PackageWorkflow:
389
459
  """Package workflow orchestrator."""
390
460
 
391
- def __init__(self, directory: Path, config: dict[str, Any]):
461
+ def __init__(self, directory: Path, config: WorkflowConfig):
392
462
  self.directory = directory
393
463
  self.config = config
394
- self.engine = WorkflowEngine(max_concurrent=config.get("max_concurrent", 4))
464
+ self.engine = WorkflowEngine(max_concurrent=config.max_concurrent)
395
465
 
396
- def build_workflow(self) -> WorkflowEngine:
397
- """Build the packaging workflow."""
398
- logger.info("Building packaging workflow")
399
-
400
- # Phase 1: Parse project
466
+ def _add_parse_task(self) -> None:
467
+ """Add project parsing task to workflow."""
401
468
  parse_task = ParseProjectTask(
402
469
  directory=self.directory,
403
- recursive=self.config.get("recursive", False),
470
+ recursive=self.config.recursive,
404
471
  )
405
472
  self.engine.add_task(parse_task)
406
473
 
407
- # Phase 2: Pack source, install Python, and pack libraries (parallel)
474
+ def _add_pack_and_install_tasks(self) -> None:
475
+ """Add pack source and install Python tasks to workflow."""
408
476
  pack_task = PackSourceTask(
409
477
  directory=self.directory,
410
478
  projects_file=self.directory / "projects.json",
411
- project_name=self.config.get("project_name"),
479
+ project_name=self.config.project_name,
412
480
  )
413
481
  self.engine.add_task(pack_task)
414
482
 
415
483
  install_task = InstallPythonTask(
416
- version=self.config.get("python_version", "3.8.10"),
484
+ version=self.config.python_version,
417
485
  target_dir=self.directory / "dist" / "runtime",
418
- offline=self.config.get("offline", False),
486
+ offline=self.config.offline,
419
487
  )
420
488
  self.engine.add_task(install_task)
421
489
 
422
- # Pack libraries task (always enabled unless explicitly skipped)
423
- if not self.config.get("skip_pack_libraries", False):
424
- cache_dir_value = self.config.get("cache_dir")
425
- cache_dir = Path(cache_dir_value) if cache_dir_value else None
426
- lib_pack_task = PackLibrariesTask(
427
- project_dir=self.directory,
428
- output_dir=self.directory / "dist/site-packages",
429
- cache_dir=cache_dir,
430
- python_version=self.config.get("python_version", "3.8.10"),
431
- max_workers=self.config.get("max_concurrent", 4),
432
- archive_format=self.config.get("archive_format", "zip"),
433
- mirror=self.config.get("mirror", "aliyun"),
434
- )
435
- self.engine.add_task(lib_pack_task)
436
-
437
- # Phase 3: Generate loader
438
- if self.config.get("generate_loader", False):
439
- # Create loader task - project name will be determined from parse_project results
440
- loader_task = GenerateLoaderTask(
441
- project_name=self.config.get("project_name"),
442
- is_debug=self.config.get("debug", False),
443
- )
444
- self.engine.add_task(loader_task)
445
- assembly_deps = ["pack_source", "install_python", "generate_loader"]
446
- else:
447
- assembly_deps = ["pack_source", "install_python"]
490
+ def _add_pack_libraries_task(self) -> None:
491
+ """Add library packing task to workflow."""
492
+ lib_pack_task = PackLibrariesTask(
493
+ project_dir=self.directory,
494
+ output_dir=self.directory / "dist/site-packages",
495
+ cache_dir=self.config.cache_dir,
496
+ python_version=self.config.python_version,
497
+ max_workers=self.config.max_concurrent,
498
+ archive_format=self.config.archive_format,
499
+ mirror=self.config.mirror,
500
+ )
501
+ self.engine.add_task(lib_pack_task)
502
+
503
+ def _add_loader_task(self) -> list[str]:
504
+ """Add loader generation task to workflow.
505
+
506
+ Returns:
507
+ List of task dependencies for assembly phase
508
+ """
509
+ if not self.config.generate_loader:
510
+ return ["pack_source", "install_python"]
511
+
512
+ loader_task = GenerateLoaderTask(self.directory, self.config.debug)
513
+ self.engine.add_task(loader_task)
514
+
515
+ assembly_deps = ["pack_source", "install_python", "generate_loader"]
516
+ assembly_deps.append("pack_libraries")
448
517
 
449
- # Add pack_libraries to assembly dependencies (always added unless skipped)
450
- if not self.config.get("skip_pack_libraries", False):
451
- assembly_deps.append("pack_libraries")
518
+ return assembly_deps
452
519
 
453
- # Phase 4: Assemble final package
520
+ def _add_assemble_task(self, dependencies: list[str]) -> None:
521
+ """Add package assembly task to workflow.
522
+
523
+ Args:
524
+ dependencies: List of task dependencies
525
+ """
454
526
  assemble_task = AssemblePackageTask(
455
527
  output_dir=self.directory / "dist",
456
- dependencies=assembly_deps,
528
+ dependencies=dependencies,
457
529
  )
458
530
  self.engine.add_task(assemble_task)
459
531
 
532
+ def build_workflow(self) -> WorkflowEngine:
533
+ """Build the packaging workflow."""
534
+ logger.info("Building packaging workflow")
535
+
536
+ self._add_parse_task()
537
+ self._add_pack_and_install_tasks()
538
+ self._add_pack_libraries_task()
539
+ dependencies = self._add_loader_task()
540
+ self._add_assemble_task(dependencies)
541
+
460
542
  return self.engine
461
543
 
544
+ def _log_execution_summary(self, summary: dict[str, Any]) -> None:
545
+ """Log workflow execution summary.
546
+
547
+ Args:
548
+ summary: Execution summary dict
549
+ """
550
+ logger.info("=" * 50)
551
+ logger.info("Workflow execution summary:")
552
+ logger.info(f" Total tasks: {summary['total_tasks']}")
553
+ logger.info(f" Completed: {summary['completed']}")
554
+ logger.info(f" Failed: {summary['failed']}")
555
+ logger.info(f" Success rate: {summary['success_rate'] * 100:.1f}%")
556
+ logger.info(f" Total time: {summary['total_execution_time']:.2f}s")
557
+ logger.info("=" * 50)
558
+
462
559
  async def execute(self) -> dict[str, Any]:
463
- """Execute the packaging workflow."""
560
+ """Execute the packaging workflow.
561
+
562
+ Returns:
563
+ Dict with results and summary
564
+ """
464
565
  logger.info("Starting packaging workflow execution")
465
566
 
466
- # Build workflow
467
567
  self.build_workflow()
468
568
 
469
- # Execute workflow
470
569
  try:
471
570
  results = await self.engine.execute_workflow()
472
-
473
- # Get summary
474
571
  summary = self.engine.get_execution_summary()
475
-
476
- logger.info("=" * 50)
477
- logger.info("Workflow execution summary:")
478
- logger.info(f" Total tasks: {summary['total_tasks']}")
479
- logger.info(f" Completed: {summary['completed']}")
480
- logger.info(f" Failed: {summary['failed']}")
481
- logger.info(f" Success rate: {summary['success_rate'] * 100:.1f}%")
482
- logger.info(f" Total time: {summary['total_execution_time']:.2f}s")
483
- logger.info("=" * 50)
572
+ self._log_execution_summary(summary)
484
573
 
485
574
  return {"results": results, "summary": summary}
486
575
 
@@ -489,156 +578,313 @@ class PackageWorkflow:
489
578
  raise
490
579
 
491
580
 
492
- def list_projects(directory: Path) -> None:
493
- """List projects from projects.json.
581
+ def _load_projects_data(directory: Path, silent: bool = False) -> dict[str, Any] | None:
582
+ """Load projects data from projects.json.
494
583
 
495
584
  Args:
496
585
  directory: Project directory containing projects.json
586
+ silent: If True, use warning instead of error messages
587
+
588
+ Returns:
589
+ Projects data dict or None if not found/error
497
590
  """
498
591
  projects_file = directory / "projects.json"
499
592
 
500
593
  if not projects_file.exists():
501
- logger.warning(f"No projects.json found in {directory}")
594
+ level = logger.warning if silent else logger.error
595
+ level(f"No projects.json found in {directory}")
502
596
  logger.info("Run 'pypack build' first to create projects.json")
503
- return
597
+ return None
504
598
 
505
599
  try:
506
600
  with open(projects_file, encoding="utf-8") as f:
507
601
  projects_data = json.load(f)
508
602
 
509
- # projects_data is a dict: {project_name: project_info}
510
603
  if not projects_data:
511
- logger.info("No projects found in projects.json")
512
- return
604
+ level = logger.info if silent else logger.error
605
+ level("No projects found in projects.json")
606
+ return None
513
607
 
514
- logger.info(f"Found {len(projects_data)} project(s):")
515
- logger.info("=" * 60)
516
- for i, (project_name, project_info) in enumerate(projects_data.items(), 1):
517
- # Handle both dict and str (in case of malformed data)
518
- if isinstance(project_info, str):
519
- version = "N/A"
520
- entry = "N/A"
521
- description = ""
522
- else:
523
- version = project_info.get("version", "N/A") if isinstance(project_info, dict) else "N/A"
524
- entry = "main.py" # Default entry file
525
- description = project_info.get("description", "") if isinstance(project_info, dict) else ""
608
+ return projects_data
609
+ except Exception as e:
610
+ logger.error(f"Failed to load projects.json: {e}")
611
+ return None
526
612
 
527
- logger.info(f"\n[{i}] {project_name}")
528
- logger.info(f" Version: {version}")
529
- logger.info(f" Entry: {entry}")
530
- if description:
531
- logger.info(f" Description: {description}")
532
613
 
533
- logger.info("=" * 60)
614
+ def _get_project_info(project_info: Any) -> tuple[str, str, str]:
615
+ """Extract project info from project data.
534
616
 
535
- except Exception as e:
536
- logger.error(f"Failed to load projects.json: {e}")
617
+ Args:
618
+ project_info: Project info dict or string
537
619
 
620
+ Returns:
621
+ Tuple of (version, entry, description)
622
+ """
623
+ if isinstance(project_info, str):
624
+ return "N/A", "N/A", ""
625
+
626
+ version = (
627
+ project_info.get("version", "N/A") if isinstance(project_info, dict) else "N/A"
628
+ )
629
+ entry = "main.py"
630
+ description = (
631
+ project_info.get("description", "") if isinstance(project_info, dict) else ""
632
+ )
633
+ return version, entry, description
538
634
 
539
- def run_project(project_name: str | None, directory: Path) -> None:
540
- """Run a built project.
635
+
636
+ def _print_project(project_name: str, project_info: Any, index: int) -> None:
637
+ """Print project information.
541
638
 
542
639
  Args:
543
- project_name: Project name or executable name to run (e.g., 'docscan' or 'docscan-gui')
544
- directory: Project directory
640
+ project_name: Name of the project
641
+ project_info: Project info dict or string
642
+ index: Index number for listing
545
643
  """
546
- # Load projects
547
- projects_file = directory / "projects.json"
644
+ version, entry, description = _get_project_info(project_info)
548
645
 
549
- if not projects_file.exists():
550
- logger.error(f"No projects.json found in {directory}")
551
- logger.info("Run 'pypack build' first to create projects.json")
646
+ logger.info(f"\n[{index}] {project_name}")
647
+ logger.info(f" Version: {version}")
648
+ logger.info(f" Entry: {entry}")
649
+ if description:
650
+ logger.info(f" Description: {description}")
651
+
652
+
653
+ def list_projects(directory: Path) -> None:
654
+ """List projects from projects.json.
655
+
656
+ Args:
657
+ directory: Project directory containing projects.json
658
+ """
659
+ projects_data = _load_projects_data(directory, silent=True)
660
+
661
+ if not projects_data:
552
662
  return
553
663
 
664
+ logger.info(f"Found {len(projects_data)} project(s):")
665
+ logger.info("=" * 60)
666
+
667
+ for i, (project_name, project_info) in enumerate(projects_data.items(), 1):
668
+ _print_project(project_name, project_info, i)
669
+
670
+ logger.info("=" * 60)
671
+
672
+
673
+ def _get_available_executables(directory: Path) -> list[str]:
674
+ """Get list of available executables in dist directory.
675
+
676
+ Args:
677
+ directory: Project directory
678
+
679
+ Returns:
680
+ List of executable names (without .exe extension)
681
+ """
682
+ dist_dir = directory / "dist"
683
+ if not dist_dir.exists():
684
+ logger.error(f"Dist directory not found: {dist_dir}")
685
+ logger.info("Run 'pypack build' first to build the project")
686
+ return []
687
+
688
+ # Support both Windows (.exe) and Linux (no extension) executables
689
+ exe_names = set()
690
+ for exe in dist_dir.glob("*.exe"):
691
+ if exe.is_file():
692
+ exe_names.add(exe.stem)
693
+
694
+ for exe in dist_dir.glob("*"):
695
+ if exe.is_file() and not exe.name.endswith((".ent", ".exe", ".json")):
696
+ exe_names.add(exe.name)
697
+
698
+ available_exes = list(exe_names)
699
+ if not available_exes:
700
+ logger.error("No executables found in dist directory")
701
+ logger.info("Run 'pypack build' first to build the project")
702
+
703
+ return available_exes
704
+
705
+
706
+ def _find_matching_executables(
707
+ project_name: str, available_exes: list[str]
708
+ ) -> list[str]:
709
+ """Find executables matching project name.
710
+
711
+ Args:
712
+ project_name: Project name to match
713
+ available_exes: List of available executable names
714
+
715
+ Returns:
716
+ List of matching executable names
717
+ """
718
+ return [
719
+ exe
720
+ for exe in available_exes
721
+ if exe == project_name or exe.startswith(f"{project_name}-")
722
+ ]
723
+
724
+
725
+ def _log_available_executables(available_exes: list[str]) -> None:
726
+ """Log available executable names.
727
+
728
+ Args:
729
+ available_exes: List of available executable names
730
+ """
731
+ logger.info("Available executables:")
732
+ for exe in available_exes:
733
+ logger.info(f"** {exe} **")
734
+
735
+
736
+ def _resolve_exact_project_name(
737
+ project_name: str, available_exes: list[str]
738
+ ) -> str | None:
739
+ """Resolve executable when project name is specified.
740
+
741
+ Args:
742
+ project_name: Project name to resolve
743
+ available_exes: List of available executable names
744
+
745
+ Returns:
746
+ Target executable name or None
747
+ """
748
+ if project_name in available_exes:
749
+ return project_name
750
+ return None
751
+
752
+
753
+ def _resolve_project_by_name(
754
+ project_name: str, available_exes: list[str]
755
+ ) -> str | None:
756
+ """Resolve executable by matching project name.
757
+
758
+ Args:
759
+ project_name: Project name to resolve
760
+ available_exes: List of available executable names
761
+
762
+ Returns:
763
+ Target executable name or None
764
+ """
765
+ matching_exes = _find_matching_executables(project_name, available_exes)
766
+
767
+ if len(matching_exes) == 1:
768
+ return matching_exes[0]
769
+ elif len(matching_exes) > 1:
770
+ logger.error(f"Multiple executables found for project '{project_name}':")
771
+ for exe in matching_exes:
772
+ logger.info(f" - {exe}")
773
+ logger.info("Please specify the full executable name (e.g., 'docscan-gui')")
774
+ return None
775
+
776
+ logger.error(f"No executable found for project '{project_name}'")
777
+ return None
778
+
779
+
780
+ def _resolve_no_project_name(available_exes: list[str]) -> str | None:
781
+ """Resolve executable when no project name is specified.
782
+
783
+ Args:
784
+ available_exes: List of available executable names
785
+
786
+ Returns:
787
+ Target executable name or None
788
+ """
789
+ if len(available_exes) == 1:
790
+ logger.info(f"Running single executable: {available_exes[0]}")
791
+ return available_exes[0]
792
+
793
+ logger.error(
794
+ "Multiple executables found. Please specify executable name with --project"
795
+ )
796
+ _log_available_executables(available_exes)
797
+ return None
798
+
799
+
800
+ def _resolve_target_executable(
801
+ project_name: str | None,
802
+ available_exes: list[str],
803
+ projects_data: dict[str, Any],
804
+ ) -> str | None:
805
+ """Resolve which executable to run based on project name.
806
+
807
+ Args:
808
+ project_name: Project or executable name to run
809
+ available_exes: List of available executable names
810
+ projects_data: Projects data dict
811
+
812
+ Returns:
813
+ Target executable name or None if not found/ambiguous
814
+ """
815
+ if not project_name:
816
+ return _resolve_no_project_name(available_exes)
817
+
818
+ exact_match = _resolve_exact_project_name(project_name, available_exes)
819
+ if exact_match:
820
+ return exact_match
821
+
822
+ if project_name in projects_data:
823
+ return _resolve_project_by_name(project_name, available_exes)
824
+
825
+ logger.error(f"Project or executable '{project_name}' not found")
826
+ _log_available_executables(available_exes)
827
+ return None
828
+
829
+
830
+ def _execute_project(exe_path: Path, dist_dir: Path) -> None:
831
+ """Execute the project executable.
832
+
833
+ Args:
834
+ exe_path: Path to the executable
835
+ dist_dir: Working directory for execution
836
+ """
837
+ import subprocess
838
+
554
839
  try:
555
- with open(projects_file, encoding="utf-8") as f:
556
- projects_data = json.load(f)
840
+ subprocess.run([str(exe_path)], cwd=str(dist_dir), check=True)
841
+ except subprocess.CalledProcessError as e:
842
+ logger.error(f"Project execution failed with exit code {e.returncode}")
843
+ except KeyboardInterrupt:
844
+ logger.info("Project execution interrupted")
845
+ except FileNotFoundError:
846
+ logger.error(f"Failed to execute {exe_path}")
557
847
 
558
- # projects_data is a dict: {project_name: project_info}
559
- if not projects_data:
560
- logger.error("No projects found in projects.json")
561
- return
562
848
 
563
- # Find all available executables in dist directory
564
- dist_dir = directory / "dist"
565
- if not dist_dir.exists():
566
- logger.error(f"Dist directory not found: {dist_dir}")
567
- logger.info("Run 'pypack build' first to build the project")
568
- return
849
+ def run_project(project_name: str | None, directory: Path) -> None:
850
+ """Run a built project.
569
851
 
570
- available_exes = [exe.stem for exe in dist_dir.glob("*.exe")]
571
- if not available_exes:
572
- logger.error("No executables found in dist directory")
573
- logger.info("Run 'pypack build' first to build the project")
574
- return
852
+ Args:
853
+ project_name: Project name or executable name to run (e.g., 'docscan' or 'docscan-gui')
854
+ directory: Project directory
855
+ """
856
+ projects_data = _load_projects_data(directory)
857
+ if not projects_data:
858
+ return
575
859
 
576
- # Determine which executable to run
577
- target_exe_name = None
578
- if project_name:
579
- # Check if project_name is an exact executable name (e.g., 'docscan-gui')
580
- if project_name in available_exes:
581
- target_exe_name = project_name
582
- # Check if project_name matches a project in projects.json
583
- elif project_name in projects_data:
584
- # Look for matching executables
585
- matching_exes = [
586
- exe for exe in available_exes if exe == project_name or exe.startswith(f"{project_name}-")
587
- ]
588
- if len(matching_exes) == 1:
589
- target_exe_name = matching_exes[0]
590
- elif len(matching_exes) > 1:
591
- logger.error(f"Multiple executables found for project '{project_name}':")
592
- for exe in matching_exes:
593
- logger.info(f" - {exe}")
594
- logger.info("Please specify the full executable name (e.g., 'docscan-gui')")
595
- return
596
- else:
597
- logger.error(f"No executable found for project '{project_name}'")
598
- return
599
- else:
600
- logger.error(f"Project or executable '{project_name}' not found")
601
- logger.info("Available executables:")
602
- for exe in available_exes:
603
- logger.info(f"** {exe} **")
604
- return
605
- else:
606
- # No project_name specified
607
- if len(available_exes) == 1:
608
- target_exe_name = available_exes[0]
609
- logger.info(f"Running single executable: {target_exe_name}")
610
- else:
611
- logger.error("Multiple executables found. Please specify executable name with --project")
612
- logger.info("Available executables:")
613
- for exe in available_exes:
614
- logger.info(f"** {exe} **")
615
- return
616
-
617
- # Find the executable
618
- exe_path = dist_dir / f"{target_exe_name}.exe"
619
-
620
- if not exe_path.exists():
621
- logger.error(f"Executable not found: {exe_path}")
622
- logger.info("Run 'pypack build' first to build the project")
623
- return
860
+ available_exes = _get_available_executables(directory)
861
+ if not available_exes:
862
+ return
624
863
 
625
- logger.info(f"Starting: {target_exe_name}")
626
- logger.info(f"Executable: {exe_path}")
864
+ target_exe_name = _resolve_target_executable(
865
+ project_name, available_exes, projects_data
866
+ )
867
+ if not target_exe_name:
868
+ return
627
869
 
628
- import subprocess
870
+ # Try both Windows (.exe) and Linux (no extension) executables
871
+ exe_path_with_ext = directory / "dist" / f"{target_exe_name}.exe"
872
+ exe_path_no_ext = directory / "dist" / target_exe_name
629
873
 
630
- try:
631
- # Run from dist directory so the executable can find runtime and entry files
632
- subprocess.run([str(exe_path)], cwd=str(dist_dir), check=True)
633
- except subprocess.CalledProcessError as e:
634
- logger.error(f"Project execution failed with exit code {e.returncode}")
635
- except KeyboardInterrupt:
636
- logger.info("Project execution interrupted")
637
- except FileNotFoundError:
638
- logger.error(f"Failed to execute {exe_path}")
874
+ exe_path = None
875
+ if exe_path_with_ext.exists():
876
+ exe_path = exe_path_with_ext
877
+ elif exe_path_no_ext.exists():
878
+ exe_path = exe_path_no_ext
639
879
 
640
- except Exception as e:
641
- logger.error(f"Failed to run project: {e}")
880
+ if not exe_path:
881
+ logger.error(f"Executable not found for '{target_exe_name}'")
882
+ logger.info("Run 'pypack build' first to build the project")
883
+ return
884
+
885
+ logger.info(f"Starting: {target_exe_name}")
886
+ logger.info(f"Executable: {exe_path}")
887
+ _execute_project(exe_path, directory / "dist")
642
888
 
643
889
 
644
890
  def clean_project(directory: Path) -> None:
@@ -647,52 +893,130 @@ def clean_project(directory: Path) -> None:
647
893
  Args:
648
894
  directory: Project directory to clean
649
895
  """
896
+ logger.info("Cleaning build artifacts...")
650
897
 
651
- # Directories to clean
652
- def should_clean_entries(dir_path: Path) -> bool:
653
- return any([
654
- (dir_path.name.startswith(".") and dir_path.is_dir())
655
- or (dir_path.name.startswith("__") and dir_path.is_dir())
656
- or (dir_path.name == "build" and dir_path.is_dir())
657
- or (dir_path.name == "dist" and dir_path.is_dir())
658
- or (dir_path.name == "pysfi_build" and dir_path.is_dir())
659
- or (dir_path.name == "cbuild" and dir_path.is_dir())
660
- or (dir_path.name == "benchmarks" and dir_path.is_dir())
661
- or (dir_path.name == "projects.json" and dir_path.is_file())
662
- ])
898
+ cleaned_dirs, cleaned_files = _clean_build_artifacts(directory)
663
899
 
664
- logger.info("Cleaning build artifacts...")
900
+ if not cleaned_dirs and not cleaned_files:
901
+ logger.info("No build artifacts found")
902
+ else:
903
+ logger.info(
904
+ f"Cleaned {len(cleaned_dirs)} directories and {len(cleaned_files)} file(s)"
905
+ )
906
+
907
+
908
+ def _should_clean(entry: Path) -> bool:
909
+ """Check if a directory or file should be cleaned.
910
+
911
+ Args:
912
+ entry: Path to check
913
+
914
+ Returns:
915
+ True if the entry should be cleaned
916
+ """
917
+ protected_dirs = {
918
+ ".git",
919
+ ".venv",
920
+ ".virtualenv",
921
+ ".vscode",
922
+ ".idea",
923
+ ".codebuddy",
924
+ ".qoder",
925
+ }
926
+
927
+ if entry.is_file() and entry.name == "projects.json":
928
+ return True
929
+
930
+ if not entry.is_dir():
931
+ return False
932
+
933
+ if entry.name.startswith(".") and entry.name in protected_dirs:
934
+ return False
935
+
936
+ return (
937
+ entry.name.startswith(".")
938
+ or entry.name.startswith("__")
939
+ or entry.name in ("build", "dist", "pysfi_build", "cbuild", "benchmarks")
940
+ )
941
+
942
+
943
+ def _clean_build_artifacts(directory: Path) -> tuple[list[str], list[str]]:
944
+ """Clean all build artifacts from directory.
945
+
946
+ Args:
947
+ directory: Project directory to clean
948
+
949
+ Returns:
950
+ Tuple of (cleaned directories, cleaned files)
951
+ """
665
952
  cleaned_dirs = []
666
953
  cleaned_files = []
667
- entries_to_clean = [f for f in directory.iterdir() if should_clean_entries(f)]
954
+
955
+ entries_to_clean = [f for f in directory.iterdir() if _should_clean(f)]
956
+
668
957
  for entry in entries_to_clean:
669
- if entry.exists():
670
- if entry.is_dir():
671
- logger.info(f"Removing directory: {entry}")
672
- try:
673
- shutil.rmtree(entry)
674
- cleaned_dirs.append(str(entry))
675
- logger.info(f" Removed directory: {entry}")
676
- except Exception as e:
677
- logger.warning(f" Failed to remove {entry}: {e}")
678
- elif entry.is_file():
679
- logger.info(f"Removing file: {entry}")
680
- try:
681
- entry.unlink()
682
- cleaned_files.append(str(entry))
683
- logger.info(f" Removed file: {entry}")
684
- except Exception as e:
685
- logger.warning(f" Failed to remove {entry}: {e}")
958
+ if not entry.exists():
959
+ continue
960
+
961
+ if entry.is_dir():
962
+ logger.info(f"Removing directory: {entry}")
963
+ if _remove_directory(entry):
964
+ cleaned_dirs.append(str(entry))
965
+ logger.info(f" Removed directory: {entry}")
966
+ else:
967
+ logger.warning(f" Failed to remove {entry}")
968
+ elif entry.is_file():
969
+ logger.info(f"Removing file: {entry}")
970
+ if _remove_file(entry):
971
+ cleaned_files.append(str(entry))
972
+ logger.info(f" Removed file: {entry}")
973
+ else:
974
+ logger.warning(f" Failed to remove {entry}")
686
975
 
687
- if not cleaned_dirs and not cleaned_files:
688
- logger.info("No build artifacts found")
689
- else:
690
- logger.info(f"Cleaned {len(cleaned_dirs)} directorie(s) and {len(cleaned_files)} file(s)")
976
+ return cleaned_dirs, cleaned_files
691
977
 
692
978
 
693
- def main() -> None:
694
- """Main entry point for package workflow tool."""
695
- parser = argparse.ArgumentParser(prog="pypack", description="Python packaging tool with workflow orchestration")
979
+ def _remove_directory(dir_path: Path) -> bool:
980
+ """Remove a directory safely.
981
+
982
+ Args:
983
+ dir_path: Directory path to remove
984
+
985
+ Returns:
986
+ True if removal succeeded
987
+ """
988
+ try:
989
+ shutil.rmtree(dir_path)
990
+ return True
991
+ except Exception:
992
+ return False
993
+
994
+
995
+ def _remove_file(file_path: Path) -> bool:
996
+ """Remove a file safely.
997
+
998
+ Args:
999
+ file_path: File path to remove
1000
+
1001
+ Returns:
1002
+ True if removal succeeded
1003
+ """
1004
+ try:
1005
+ file_path.unlink()
1006
+ return True
1007
+ except Exception:
1008
+ return False
1009
+
1010
+
1011
+ def parse_args() -> argparse.Namespace:
1012
+ """Parse command line arguments.
1013
+
1014
+ Returns:
1015
+ Parsed arguments namespace
1016
+ """
1017
+ parser = argparse.ArgumentParser(
1018
+ prog="pypack", description="Python packaging tool with workflow orchestration"
1019
+ )
696
1020
 
697
1021
  parser.add_argument(
698
1022
  "action",
@@ -700,26 +1024,51 @@ def main() -> None:
700
1024
  help="Action to perform",
701
1025
  )
702
1026
  parser.add_argument(
703
- "-p", "--project", type=str, default=None, help="Project or executable name (e.g., 'docscan' or 'docscan-gui')"
1027
+ "project",
1028
+ type=str,
1029
+ nargs="?",
1030
+ default=None,
1031
+ help="Project or executable name (e.g., 'docscan' or 'docscan-gui')",
704
1032
  )
705
1033
  parser.add_argument("--debug", "-d", action="store_true", help="Debug mode")
706
- parser.add_argument("--python-version", type=str, default="3.8.10", help="Python version to install")
707
- parser.add_argument("--loader-type", type=str, choices=("console", "gui"), default="console", help="Loader type")
708
1034
  parser.add_argument(
709
- "--entry-suffix", type=str, default=".ent", help="Entry file suffix (default: .ent, alternatives: .py)"
1035
+ "--python-version", type=str, default="3.8.10", help="Python version to install"
1036
+ )
1037
+ parser.add_argument(
1038
+ "--loader-type",
1039
+ type=str,
1040
+ choices=("console", "gui"),
1041
+ default="console",
1042
+ help="Loader type",
1043
+ )
1044
+ parser.add_argument(
1045
+ "--entry-suffix",
1046
+ type=str,
1047
+ default=".ent",
1048
+ help="Entry file suffix (default: .ent, alternatives: .py)",
1049
+ )
1050
+ parser.add_argument(
1051
+ "--no-loader", action="store_true", help="Skip loader generation"
1052
+ )
1053
+ parser.add_argument(
1054
+ "-r", "--recursive", action="store_true", help="Parse projects recursively"
710
1055
  )
711
- parser.add_argument("--no-loader", action="store_true", help="Skip loader generation")
712
- parser.add_argument("-r", "--recursive", action="store_true", help="Parse projects recursively")
713
1056
  parser.add_argument("-o", "--offline", action="store_true", help="Offline mode")
714
- parser.add_argument("-j", "--jobs", type=int, default=4, help="Maximum concurrent tasks")
1057
+ parser.add_argument(
1058
+ "-j", "--jobs", type=int, default=4, help="Maximum concurrent tasks"
1059
+ )
715
1060
 
716
1061
  # Library packing arguments
717
- parser.add_argument("--no-pack-libs", action="store_true", help="Skip packing project dependencies")
718
- parser.add_argument("--cache-dir", type=str, default=None, help="Custom cache directory for dependencies")
1062
+ parser.add_argument(
1063
+ "--cache-dir",
1064
+ type=str,
1065
+ default=None,
1066
+ help="Custom cache directory for dependencies",
1067
+ )
719
1068
  parser.add_argument(
720
1069
  "--archive-format",
721
1070
  type=str,
722
- choices=("zip", "tar", "gztar", "bztar", "xztar"),
1071
+ choices=("zip", "7z", "nsis"),
723
1072
  default="zip",
724
1073
  help="Archive format for dependencies",
725
1074
  )
@@ -731,60 +1080,62 @@ def main() -> None:
731
1080
  help="PyPI mirror source for faster downloads",
732
1081
  )
733
1082
 
734
- args = parser.parse_args()
735
- working_dir = Path.cwd()
1083
+ return parser.parse_args()
1084
+
1085
+
1086
+ async def _execute_build_workflow(config: WorkflowConfig) -> None:
1087
+ """Execute the build workflow."""
1088
+ workflow = PackageWorkflow(directory=config.directory, config=config)
1089
+
1090
+ try:
1091
+ await workflow.execute()
1092
+ logger.info("Packaging completed successfully!")
1093
+ except Exception as e:
1094
+ logger.error(f"Packaging failed: {e}")
1095
+ raise
1096
+
1097
+
1098
+ def main() -> None:
1099
+ """Main entry point for package workflow tool."""
1100
+ args = parse_args()
736
1101
 
737
- # Setup logging
738
1102
  if args.debug:
739
1103
  logging.getLogger().setLevel(logging.DEBUG)
740
1104
 
741
- # Version command
742
1105
  if args.action in {"version", "v"}:
743
1106
  logger.info(f"pypack {__version__} (build {__build__})")
744
1107
  return
745
1108
 
746
- # List command
747
1109
  if args.action in {"list", "l"}:
748
- list_projects(working_dir)
1110
+ list_projects(cwd)
749
1111
  return
750
1112
 
751
- # Run command
752
1113
  if args.action in {"run", "r"}:
753
- run_project(args.project, working_dir)
1114
+ run_project(args.project, cwd)
754
1115
  return
755
1116
 
756
- # Clean command
757
1117
  if args.action in {"clean", "c"}:
758
- clean_project(working_dir)
1118
+ clean_project(cwd)
759
1119
  return
760
1120
 
761
- # Build configuration
762
- config = {
763
- "directory": working_dir,
764
- "project_name": args.project,
765
- "python_version": args.python_version,
766
- "loader_type": args.loader_type,
767
- "entry_suffix": args.entry_suffix,
768
- "generate_loader": not args.no_loader,
769
- "recursive": args.recursive,
770
- "offline": args.offline,
771
- "max_concurrent": args.jobs,
772
- "debug": args.debug,
773
- "skip_pack_libraries": args.no_pack_libs,
774
- "cache_dir": args.cache_dir,
775
- "archive_format": args.archive_format,
776
- "mirror": args.mirror,
777
- }
778
-
779
- # Create and execute workflow
780
- workflow = PackageWorkflow(directory=working_dir, config=config)
1121
+ cache_dir = Path(args.cache_dir) if args.cache_dir else None
1122
+ config = WorkflowConfig(
1123
+ directory=cwd,
1124
+ project_name=args.project,
1125
+ python_version=args.python_version,
1126
+ loader_type=args.loader_type,
1127
+ entry_suffix=args.entry_suffix,
1128
+ generate_loader=not args.no_loader,
1129
+ recursive=args.recursive,
1130
+ offline=args.offline,
1131
+ max_concurrent=args.jobs,
1132
+ debug=args.debug,
1133
+ cache_dir=cache_dir,
1134
+ archive_format=args.archive_format,
1135
+ mirror=args.mirror,
1136
+ )
781
1137
 
782
- try:
783
- asyncio.run(workflow.execute())
784
- logger.info("Packaging completed successfully!")
785
- except Exception as e:
786
- logger.error(f"Packaging failed: {e}")
787
- raise
1138
+ asyncio.run(_execute_build_workflow(config))
788
1139
 
789
1140
 
790
1141
  if __name__ == "__main__":