pysfi 0.1.12__py3-none-any.whl → 0.1.13__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pysfi-0.1.12.dist-info → pysfi-0.1.13.dist-info}/METADATA +1 -1
- {pysfi-0.1.12.dist-info → pysfi-0.1.13.dist-info}/RECORD +35 -27
- {pysfi-0.1.12.dist-info → pysfi-0.1.13.dist-info}/entry_points.txt +2 -0
- sfi/__init__.py +5 -3
- sfi/alarmclock/__init__.py +3 -0
- sfi/alarmclock/alarmclock.py +23 -40
- sfi/bumpversion/__init__.py +5 -3
- sfi/cleanbuild/__init__.py +3 -0
- sfi/cli.py +12 -2
- sfi/condasetup/__init__.py +1 -0
- sfi/docdiff/__init__.py +1 -0
- sfi/docdiff/docdiff.py +1 -1
- sfi/docscan/__init__.py +3 -3
- sfi/docscan/docscan_gui.py +150 -46
- sfi/img2pdf/__init__.py +0 -0
- sfi/img2pdf/img2pdf.py +453 -0
- sfi/llmclient/llmclient.py +31 -8
- sfi/llmquantize/llmquantize.py +39 -11
- sfi/llmserver/__init__.py +1 -0
- sfi/llmserver/llmserver.py +63 -13
- sfi/makepython/makepython.py +507 -124
- sfi/pyarchive/__init__.py +1 -0
- sfi/pyarchive/pyarchive.py +908 -278
- sfi/pyembedinstall/pyembedinstall.py +88 -89
- sfi/pylibpack/pylibpack.py +571 -465
- sfi/pyloadergen/pyloadergen.py +372 -218
- sfi/pypack/pypack.py +494 -965
- sfi/pyprojectparse/pyprojectparse.py +328 -28
- sfi/pysourcepack/__init__.py +1 -0
- sfi/pysourcepack/pysourcepack.py +210 -131
- sfi/quizbase/quizbase_gui.py +2 -2
- sfi/taskkill/taskkill.py +168 -59
- sfi/which/which.py +11 -3
- sfi/workflowengine/workflowengine.py +225 -122
- {pysfi-0.1.12.dist-info → pysfi-0.1.13.dist-info}/WHEEL +0 -0
sfi/pypack/pypack.py
CHANGED
|
@@ -1,30 +1,32 @@
|
|
|
1
|
-
"""Package Workflow - Advanced Python project packaging tool
|
|
1
|
+
"""Package Workflow - Advanced Python project packaging tool.
|
|
2
2
|
|
|
3
3
|
This module provides a comprehensive packaging solution that integrates pyprojectparse,
|
|
4
|
-
pysourcepack, pyembedinstall, and pyloadergen tools
|
|
5
|
-
|
|
4
|
+
pysourcepack, pyembedinstall, and pyloadergen tools with configurable serial and
|
|
5
|
+
parallel execution for optimal efficiency.
|
|
6
|
+
|
|
7
|
+
The module follows established design patterns:
|
|
8
|
+
- Factory pattern for configuration creation
|
|
9
|
+
- Strategy pattern for cleaning operations
|
|
10
|
+
- Builder pattern for workflow construction
|
|
11
|
+
- Singleton pattern for logging configuration
|
|
6
12
|
"""
|
|
7
13
|
|
|
8
14
|
from __future__ import annotations
|
|
9
15
|
|
|
10
16
|
import argparse
|
|
11
17
|
import asyncio
|
|
12
|
-
import json
|
|
13
18
|
import logging
|
|
14
19
|
import platform
|
|
15
20
|
import shutil
|
|
16
|
-
|
|
21
|
+
import subprocess
|
|
22
|
+
import time
|
|
23
|
+
from dataclasses import dataclass, field
|
|
24
|
+
from enum import Enum
|
|
25
|
+
from functools import cached_property
|
|
17
26
|
from pathlib import Path
|
|
18
|
-
from typing import Any
|
|
27
|
+
from typing import Any, Protocol
|
|
19
28
|
|
|
20
29
|
from sfi.pyprojectparse.pyprojectparse import Project, Solution
|
|
21
|
-
from sfi.pysourcepack.pysourcepack import pack_project
|
|
22
|
-
from sfi.workflowengine.workflowengine import (
|
|
23
|
-
CPUTask,
|
|
24
|
-
IOTask,
|
|
25
|
-
SerialTask,
|
|
26
|
-
WorkflowEngine,
|
|
27
|
-
)
|
|
28
30
|
|
|
29
31
|
logging.basicConfig(
|
|
30
32
|
level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s"
|
|
@@ -38,1104 +40,631 @@ __version__ = "1.0.0"
|
|
|
38
40
|
__build__ = "20260120"
|
|
39
41
|
|
|
40
42
|
|
|
43
|
+
# Enums for better type safety
|
|
44
|
+
class LoaderType(Enum):
|
|
45
|
+
"""Enumeration of supported loader types."""
|
|
46
|
+
|
|
47
|
+
CONSOLE = "console"
|
|
48
|
+
GUI = "gui"
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class ArchiveFormat(Enum):
|
|
52
|
+
"""Enumeration of supported archive formats."""
|
|
53
|
+
|
|
54
|
+
ZIP = "zip"
|
|
55
|
+
SEVEN_ZIP = "7z"
|
|
56
|
+
NSIS = "nsis"
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
class MirrorSource(Enum):
|
|
60
|
+
"""Enumeration of supported PyPI mirror sources."""
|
|
61
|
+
|
|
62
|
+
PYPI = "pypi"
|
|
63
|
+
TSINGHUA = "tsinghua"
|
|
64
|
+
ALIYUN = "aliyun"
|
|
65
|
+
USTC = "ustc"
|
|
66
|
+
DOUBAN = "douban"
|
|
67
|
+
TENCENT = "tencent"
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
# Protocol for cleaning strategies
|
|
71
|
+
class CleaningStrategy(Protocol):
|
|
72
|
+
"""Protocol for cleaning strategies."""
|
|
73
|
+
|
|
74
|
+
def should_clean(self, entry: Path) -> bool:
|
|
75
|
+
"""Determine if an entry should be cleaned."""
|
|
76
|
+
...
|
|
77
|
+
|
|
78
|
+
def clean_entry(self, entry: Path) -> tuple[bool, str]:
|
|
79
|
+
"""Clean an entry and return success status and message."""
|
|
80
|
+
...
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
# Constants
|
|
84
|
+
LOG_SEPARATOR = "=" * 50
|
|
85
|
+
PROTECTED_DIRS = {
|
|
86
|
+
".git",
|
|
87
|
+
".venv",
|
|
88
|
+
".virtualenv",
|
|
89
|
+
".vscode",
|
|
90
|
+
".idea",
|
|
91
|
+
".codebuddy",
|
|
92
|
+
".qoder",
|
|
93
|
+
}
|
|
94
|
+
CLEANABLE_DIRS = {"build", "dist", "pysfi_build", "cbuild", "benchmarks"}
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
# Configuration Factory Pattern
|
|
98
|
+
class ConfigFactory:
|
|
99
|
+
"""Factory for creating workflow configurations with validation."""
|
|
100
|
+
|
|
101
|
+
@staticmethod
|
|
102
|
+
def create_from_args(args: argparse.Namespace, cwd: Path) -> WorkflowConfig:
|
|
103
|
+
"""Create configuration from command line arguments."""
|
|
104
|
+
# For commands that don't need project-specific config, use defaults
|
|
105
|
+
project_name = getattr(args, "project", None)
|
|
106
|
+
|
|
107
|
+
return WorkflowConfig(
|
|
108
|
+
directory=cwd,
|
|
109
|
+
project_name=project_name,
|
|
110
|
+
python_version=getattr(args, "python_version", "3.8.10"),
|
|
111
|
+
loader_type=LoaderType(getattr(args, "loader_type", "console")),
|
|
112
|
+
entry_suffix=getattr(args, "entry_suffix", ".ent"),
|
|
113
|
+
generate_loader=not getattr(args, "no_loader", False),
|
|
114
|
+
offline=getattr(args, "offline", False),
|
|
115
|
+
max_concurrent=getattr(args, "jobs", 4),
|
|
116
|
+
debug=getattr(args, "debug", False),
|
|
117
|
+
cache_dir=Path(args.cache_dir)
|
|
118
|
+
if getattr(args, "cache_dir", None)
|
|
119
|
+
else None,
|
|
120
|
+
archive_format=ArchiveFormat(getattr(args, "archive_format", "zip")),
|
|
121
|
+
mirror=MirrorSource(getattr(args, "mirror", "aliyun")),
|
|
122
|
+
archive_type=getattr(args, "archive", None),
|
|
123
|
+
)
|
|
124
|
+
|
|
125
|
+
@staticmethod
|
|
126
|
+
def create_default(directory: Path) -> WorkflowConfig:
|
|
127
|
+
"""Create default configuration."""
|
|
128
|
+
return WorkflowConfig(directory=directory)
|
|
129
|
+
|
|
130
|
+
|
|
41
131
|
@dataclass
|
|
42
132
|
class WorkflowConfig:
|
|
43
|
-
"""Configuration for package workflow."""
|
|
133
|
+
"""Configuration for package workflow with type-safe enums."""
|
|
44
134
|
|
|
45
135
|
directory: Path
|
|
46
136
|
project_name: str | None = None
|
|
47
137
|
python_version: str = "3.8.10"
|
|
48
|
-
loader_type:
|
|
138
|
+
loader_type: LoaderType = LoaderType.CONSOLE
|
|
49
139
|
entry_suffix: str = ".ent"
|
|
50
140
|
generate_loader: bool = True
|
|
51
|
-
recursive: bool = False
|
|
52
141
|
offline: bool = False
|
|
53
142
|
max_concurrent: int = 4
|
|
54
143
|
debug: bool = False
|
|
55
144
|
cache_dir: Path | None = None
|
|
56
|
-
archive_format:
|
|
57
|
-
mirror:
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
@
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
class
|
|
78
|
-
"""
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
self.directory = directory
|
|
99
|
-
self.recursive = recursive
|
|
100
|
-
|
|
101
|
-
async def execute(self, context: dict[str, Any]) -> Any:
|
|
102
|
-
"""Execute project parsing."""
|
|
103
|
-
logger.info(f"Starting project parsing: {self.directory}")
|
|
104
|
-
|
|
105
|
-
try:
|
|
106
|
-
projects = Solution.from_directory(
|
|
107
|
-
root_dir=self.directory, recursive=self.recursive
|
|
108
|
-
)
|
|
109
|
-
|
|
110
|
-
logger.info(f"Found {len(projects)} project(s)")
|
|
111
|
-
for project in projects.values():
|
|
112
|
-
logger.info(project)
|
|
113
|
-
|
|
114
|
-
return {
|
|
115
|
-
"projects": projects,
|
|
116
|
-
"projects_file": str(self.directory / "projects.json"),
|
|
117
|
-
}
|
|
118
|
-
|
|
119
|
-
except Exception as e:
|
|
120
|
-
logger.error(f"Failed to parse projects: {e}")
|
|
121
|
-
raise
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
class PackSourceTask(IOTask):
|
|
125
|
-
"""Task to pack source code using pysourcepack."""
|
|
126
|
-
|
|
127
|
-
def __init__(
|
|
128
|
-
self,
|
|
129
|
-
directory: Path,
|
|
130
|
-
projects_file: Path,
|
|
131
|
-
project_name: str | None = None,
|
|
132
|
-
timeout: float = 120.0,
|
|
133
|
-
):
|
|
134
|
-
super().__init__("pack_source", 5.0, ["parse_project"], timeout)
|
|
135
|
-
self.base_dir = directory
|
|
136
|
-
self.projects_file = projects_file
|
|
137
|
-
self.projects: dict[str, Project] = {}
|
|
138
|
-
self.project_name = project_name
|
|
139
|
-
|
|
140
|
-
def _pack_projects(self, projects: dict) -> list[str]:
|
|
141
|
-
"""Pack specified projects.
|
|
142
|
-
|
|
143
|
-
Args:
|
|
144
|
-
projects: Projects data dict
|
|
145
|
-
|
|
146
|
-
Returns:
|
|
147
|
-
List of packed project names
|
|
148
|
-
"""
|
|
149
|
-
project_names = (
|
|
150
|
-
[self.project_name] if self.project_name else list(projects.keys())
|
|
145
|
+
archive_format: ArchiveFormat = ArchiveFormat.ZIP
|
|
146
|
+
mirror: MirrorSource = MirrorSource.ALIYUN
|
|
147
|
+
archive_type: str | None = None
|
|
148
|
+
|
|
149
|
+
@cached_property
|
|
150
|
+
def normalized_directory(self) -> Path:
|
|
151
|
+
"""Get normalized directory path."""
|
|
152
|
+
return self.directory.resolve()
|
|
153
|
+
|
|
154
|
+
@cached_property
|
|
155
|
+
def dist_dir(self) -> Path:
|
|
156
|
+
"""Get distribution directory path."""
|
|
157
|
+
return self.directory / "dist"
|
|
158
|
+
|
|
159
|
+
@cached_property
|
|
160
|
+
def build_dir(self) -> Path:
|
|
161
|
+
"""Get build directory path."""
|
|
162
|
+
return self.directory / "build"
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
# Strategy Pattern Implementation for Cleaning
|
|
166
|
+
class StandardCleaningStrategy:
|
|
167
|
+
"""Standard cleaning strategy implementation."""
|
|
168
|
+
|
|
169
|
+
def should_clean(self, entry: Path) -> bool:
|
|
170
|
+
"""Determine if entry should be cleaned using standard rules."""
|
|
171
|
+
# Special case: projects.json file should always be cleaned
|
|
172
|
+
if entry.is_file() and entry.name == "projects.json":
|
|
173
|
+
return True
|
|
174
|
+
|
|
175
|
+
if not entry.is_dir():
|
|
176
|
+
return False
|
|
177
|
+
|
|
178
|
+
# Protected directories starting with dot
|
|
179
|
+
if entry.name.startswith(".") and entry.name in PROTECTED_DIRS:
|
|
180
|
+
return False
|
|
181
|
+
|
|
182
|
+
# Clean temporary and build directories
|
|
183
|
+
return (
|
|
184
|
+
entry.name.startswith(".")
|
|
185
|
+
or entry.name.startswith("__")
|
|
186
|
+
or entry.name in CLEANABLE_DIRS
|
|
151
187
|
)
|
|
152
|
-
packed_projects = []
|
|
153
|
-
|
|
154
|
-
for name in project_names:
|
|
155
|
-
logger.info(f"Packing project: {name}")
|
|
156
|
-
pack_project(base_dir=self.base_dir, project_name=name, projects=projects)
|
|
157
|
-
packed_projects.append(name)
|
|
158
188
|
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
logger.info(f"Starting source packing: {self.base_dir}")
|
|
189
|
+
def clean_entry(self, entry: Path) -> tuple[bool, str]:
|
|
190
|
+
"""Clean entry and return success status with message."""
|
|
191
|
+
if not entry.exists():
|
|
192
|
+
return True, "Entry does not exist"
|
|
164
193
|
|
|
165
194
|
try:
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
195
|
+
if entry.is_dir():
|
|
196
|
+
shutil.rmtree(entry)
|
|
197
|
+
return True, f"Removed directory: {entry}"
|
|
198
|
+
elif entry.is_file():
|
|
199
|
+
entry.unlink()
|
|
200
|
+
return True, f"Removed file: {entry}"
|
|
201
|
+
else:
|
|
202
|
+
return False, f"Unknown entry type: {entry}"
|
|
174
203
|
except Exception as e:
|
|
175
|
-
|
|
176
|
-
raise
|
|
177
|
-
|
|
178
|
-
|
|
179
|
-
class InstallPythonTask(IOTask):
|
|
180
|
-
"""Task to install embedded Python using pyembedinstall."""
|
|
204
|
+
return False, f"Failed to remove {entry}: {e}"
|
|
181
205
|
|
|
182
|
-
def __init__(
|
|
183
|
-
self,
|
|
184
|
-
version: str,
|
|
185
|
-
target_dir: Path,
|
|
186
|
-
offline: bool = False,
|
|
187
|
-
timeout: float = 300.0,
|
|
188
|
-
):
|
|
189
|
-
super().__init__("install_python", 10.0, ["parse_project"], timeout)
|
|
190
|
-
self.version = version
|
|
191
|
-
self.target_dir = target_dir
|
|
192
|
-
self.offline = offline
|
|
193
206
|
|
|
194
|
-
|
|
195
|
-
|
|
196
|
-
|
|
197
|
-
Returns:
|
|
198
|
-
Architecture string (amd64 or arm64)
|
|
199
|
-
"""
|
|
200
|
-
arch = platform.machine().lower()
|
|
201
|
-
if arch in ("amd64", "x86_64"):
|
|
202
|
-
return "amd64"
|
|
203
|
-
elif arch in ("arm64", "aarch64"):
|
|
204
|
-
return "arm64"
|
|
205
|
-
return "amd64"
|
|
206
|
-
|
|
207
|
-
def _prepare_cache_dir(self) -> Path:
|
|
208
|
-
"""Prepare cache directory for Python installation.
|
|
209
|
-
|
|
210
|
-
Returns:
|
|
211
|
-
Cache directory path
|
|
212
|
-
"""
|
|
213
|
-
from sfi.pyembedinstall.pyembedinstall import _DEFAULT_CACHE_DIR
|
|
214
|
-
|
|
215
|
-
cache_dir = _DEFAULT_CACHE_DIR
|
|
216
|
-
cache_dir.mkdir(parents=True, exist_ok=True)
|
|
217
|
-
return cache_dir
|
|
207
|
+
@dataclass(frozen=True)
|
|
208
|
+
class PackageWorkflow:
|
|
209
|
+
"""Package workflow orchestrator using strategy pattern for cleaning."""
|
|
218
210
|
|
|
219
|
-
|
|
220
|
-
|
|
211
|
+
root_dir: Path
|
|
212
|
+
config: WorkflowConfig
|
|
213
|
+
cleaning_strategy: CleaningStrategy = field(
|
|
214
|
+
default_factory=StandardCleaningStrategy
|
|
215
|
+
)
|
|
221
216
|
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
217
|
+
@cached_property
|
|
218
|
+
def solution(self) -> Solution:
|
|
219
|
+
"""Get the solution for the root directory.
|
|
225
220
|
|
|
226
221
|
Returns:
|
|
227
|
-
|
|
222
|
+
Solution: The solution for the root directory.
|
|
228
223
|
"""
|
|
229
|
-
|
|
230
|
-
EmbedInstallConfig,
|
|
231
|
-
install_embed_python,
|
|
232
|
-
)
|
|
233
|
-
|
|
234
|
-
config = EmbedInstallConfig(
|
|
235
|
-
target_dir=self.target_dir,
|
|
236
|
-
version=self.version,
|
|
237
|
-
cache_dir=cache_dir,
|
|
238
|
-
offline=self.offline,
|
|
239
|
-
keep_cache=True,
|
|
240
|
-
skip_speed_test=False,
|
|
241
|
-
arch=arch,
|
|
242
|
-
)
|
|
243
|
-
|
|
244
|
-
return install_embed_python(config)
|
|
245
|
-
|
|
246
|
-
async def execute(self, context: dict[str, Any]) -> Any:
|
|
247
|
-
"""Execute Python installation."""
|
|
248
|
-
logger.info(f"Starting Python installation: {self.version}")
|
|
249
|
-
|
|
250
|
-
try:
|
|
251
|
-
cache_dir = self._prepare_cache_dir()
|
|
252
|
-
arch = self._get_architecture()
|
|
253
|
-
install_result = self._install_python(cache_dir, arch)
|
|
254
|
-
|
|
255
|
-
logger.info(f"Python {self.version} installed to {self.target_dir}")
|
|
256
|
-
|
|
257
|
-
return {
|
|
258
|
-
"version": self.version,
|
|
259
|
-
"target_dir": str(self.target_dir),
|
|
260
|
-
"install_result": install_result,
|
|
261
|
-
}
|
|
262
|
-
|
|
263
|
-
except Exception as e:
|
|
264
|
-
logger.error(f"Failed to install Python: {e}")
|
|
265
|
-
raise
|
|
266
|
-
|
|
224
|
+
return Solution.from_directory(root_dir=self.root_dir)
|
|
267
225
|
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
def __init__(
|
|
272
|
-
self,
|
|
273
|
-
project_dir: Path,
|
|
274
|
-
output_dir: Path,
|
|
275
|
-
cache_dir: Path | None = None,
|
|
276
|
-
python_version: str | None = None,
|
|
277
|
-
max_workers: int = 4,
|
|
278
|
-
archive_format: str = "zip",
|
|
279
|
-
mirror: str = "aliyun",
|
|
280
|
-
timeout: float = 300.0,
|
|
281
|
-
):
|
|
282
|
-
super().__init__("pack_libraries", 10.0, ["parse_project"], timeout)
|
|
283
|
-
self.project_dir = project_dir
|
|
284
|
-
self.output_dir = output_dir
|
|
285
|
-
self.cache_dir = cache_dir
|
|
286
|
-
self.python_version = python_version
|
|
287
|
-
self.max_workers = max_workers
|
|
288
|
-
self.archive_format = archive_format
|
|
289
|
-
self.mirror = mirror
|
|
290
|
-
|
|
291
|
-
def _create_packer(self) -> Any:
|
|
292
|
-
"""Create PyLibPack instance.
|
|
226
|
+
@cached_property
|
|
227
|
+
def projects(self) -> dict[str, Project]:
|
|
228
|
+
"""Get the projects for the solution.
|
|
293
229
|
|
|
294
230
|
Returns:
|
|
295
|
-
|
|
231
|
+
dict[str, Project]: The projects for the solution.
|
|
296
232
|
"""
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
"""
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
233
|
+
return self.solution.projects
|
|
234
|
+
|
|
235
|
+
@cached_property
|
|
236
|
+
def dist_dir(self) -> Path:
|
|
237
|
+
"""Get distribution directory path."""
|
|
238
|
+
return self.config.dist_dir
|
|
239
|
+
|
|
240
|
+
@cached_property
|
|
241
|
+
def build_dir(self) -> Path:
|
|
242
|
+
"""Get build directory path."""
|
|
243
|
+
return self.config.build_dir
|
|
244
|
+
|
|
245
|
+
def clean_project(self) -> None:
|
|
246
|
+
"""Clean build artifacts and package files using strategy pattern."""
|
|
247
|
+
logger.info("Cleaning build artifacts using strategy pattern...")
|
|
248
|
+
|
|
249
|
+
cleaned_dirs: list[str] = []
|
|
250
|
+
cleaned_files: list[str] = []
|
|
251
|
+
failed_operations: list[str] = []
|
|
252
|
+
|
|
253
|
+
entries_to_clean = [
|
|
254
|
+
entry
|
|
255
|
+
for entry in self.root_dir.iterdir()
|
|
256
|
+
if self.cleaning_strategy.should_clean(entry)
|
|
257
|
+
]
|
|
258
|
+
|
|
259
|
+
for entry in entries_to_clean:
|
|
260
|
+
success, message = self.cleaning_strategy.clean_entry(entry)
|
|
261
|
+
if success:
|
|
262
|
+
if entry.is_dir():
|
|
263
|
+
cleaned_dirs.append(str(entry))
|
|
264
|
+
else:
|
|
265
|
+
cleaned_files.append(str(entry))
|
|
266
|
+
logger.info(message)
|
|
267
|
+
else:
|
|
268
|
+
failed_operations.append(message)
|
|
269
|
+
logger.warning(message)
|
|
316
270
|
|
|
271
|
+
# Summary logging
|
|
272
|
+
if not cleaned_dirs and not cleaned_files:
|
|
273
|
+
logger.info("No build artifacts found to clean")
|
|
274
|
+
else:
|
|
317
275
|
logger.info(
|
|
318
|
-
f"
|
|
276
|
+
f"Cleaned {len(cleaned_dirs)} directories and {len(cleaned_files)} file(s)"
|
|
319
277
|
)
|
|
320
278
|
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
"packages_dir": pack_result.output_dir,
|
|
324
|
-
}
|
|
325
|
-
|
|
326
|
-
except Exception as e:
|
|
327
|
-
logger.error(f"Failed to pack libraries: {e}")
|
|
328
|
-
raise
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
class GenerateLoaderTask(CPUTask):
|
|
332
|
-
"""Task to generate loader using pyloadergen."""
|
|
333
|
-
|
|
334
|
-
def __init__(
|
|
335
|
-
self,
|
|
336
|
-
base_dir: Path,
|
|
337
|
-
debug: bool = False,
|
|
338
|
-
timeout: float = 60.0,
|
|
339
|
-
):
|
|
340
|
-
super().__init__(
|
|
341
|
-
"generate_loader", 100000, ["parse_project", "pack_source"], timeout
|
|
342
|
-
)
|
|
343
|
-
self.base_dir = base_dir
|
|
344
|
-
self.debug = debug
|
|
345
|
-
|
|
346
|
-
async def execute(self, inputs: dict[str, Any]) -> Any:
|
|
347
|
-
"""Execute loader generation."""
|
|
348
|
-
try:
|
|
349
|
-
from sfi.pyloadergen.pyloadergen import generate_loader
|
|
350
|
-
|
|
351
|
-
generate_loader(self.base_dir, self.debug)
|
|
352
|
-
except Exception as e:
|
|
353
|
-
logger.error(f"Failed to generate loader: {e}")
|
|
354
|
-
raise
|
|
279
|
+
if failed_operations:
|
|
280
|
+
logger.error(f"Failed operations: {len(failed_operations)}")
|
|
355
281
|
|
|
282
|
+
async def _run_in_executor(self, func, *args) -> None:
|
|
283
|
+
"""Run a synchronous function in thread pool executor.
|
|
356
284
|
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
self.output_dir = output_dir
|
|
364
|
-
super().__init__("assemble_package", self._assemble, dependencies, timeout)
|
|
365
|
-
|
|
366
|
-
def _get_project_dir(self, inputs: dict[str, Any]) -> Path:
|
|
367
|
-
"""Get project directory from parse result or current directory."""
|
|
368
|
-
if "parse_project" in inputs and hasattr(inputs["parse_project"], "data"):
|
|
369
|
-
parse_result = inputs["parse_project"].data
|
|
370
|
-
if isinstance(parse_result, dict) and "projects_file" in parse_result:
|
|
371
|
-
return Path(parse_result["projects_file"]).parent
|
|
372
|
-
return Path.cwd()
|
|
373
|
-
|
|
374
|
-
def _prepare_dist_dir(self) -> Path:
|
|
375
|
-
"""Prepare and return distribution directory."""
|
|
376
|
-
dist_dir = (
|
|
377
|
-
self.output_dir.parent if self.output_dir.is_file() else self.output_dir
|
|
378
|
-
)
|
|
379
|
-
dist_dir = dist_dir if dist_dir.name == "dist" else dist_dir.parent / "dist"
|
|
380
|
-
dist_dir.mkdir(parents=True, exist_ok=True)
|
|
381
|
-
return dist_dir
|
|
382
|
-
|
|
383
|
-
def _copy_loaders(self, project_dir: Path, dist_dir: Path) -> int:
|
|
384
|
-
"""Copy loaders and entry files to dist directory."""
|
|
385
|
-
loader_dir = project_dir / "dist"
|
|
386
|
-
if not loader_dir.exists():
|
|
387
|
-
return 0
|
|
388
|
-
|
|
389
|
-
loaders_count = 0
|
|
390
|
-
for loader in loader_dir.glob("*.ent"):
|
|
391
|
-
logger.info(f"Entry file: {loader.name}")
|
|
392
|
-
loaders_count += 1
|
|
393
|
-
|
|
394
|
-
# Support both Linux (no extension) and Windows (.exe) executables
|
|
395
|
-
for loader in loader_dir.glob("*.exe"):
|
|
396
|
-
logger.info(f"Executable: {loader.name}")
|
|
397
|
-
shutil.copy2(loader, dist_dir / loader.name)
|
|
398
|
-
loaders_count += 1
|
|
399
|
-
|
|
400
|
-
# Also copy Linux executables (files without extension)
|
|
401
|
-
for loader in loader_dir.glob("*"):
|
|
402
|
-
if loader.is_file() and not loader.name.endswith((".ent", ".exe")):
|
|
403
|
-
logger.info(f"Executable: {loader.name}")
|
|
404
|
-
shutil.copy2(loader, dist_dir / loader.name)
|
|
405
|
-
loaders_count += 1
|
|
406
|
-
|
|
407
|
-
logger.info(f"Found and copied {loaders_count} loader(s) and entry file(s)")
|
|
408
|
-
return loaders_count
|
|
409
|
-
|
|
410
|
-
def _copy_libraries(self, project_dir: Path, dist_dir: Path) -> None:
|
|
411
|
-
"""Copy libraries to dist/site-packages directory."""
|
|
412
|
-
libs_dir = project_dir / "dist" / "libs"
|
|
413
|
-
if not libs_dir.exists():
|
|
414
|
-
return
|
|
415
|
-
|
|
416
|
-
dest_libs_dir = dist_dir / "site-packages"
|
|
417
|
-
if dest_libs_dir.exists():
|
|
418
|
-
shutil.rmtree(dest_libs_dir)
|
|
419
|
-
shutil.copytree(libs_dir, dest_libs_dir)
|
|
420
|
-
logger.info(f"Libraries copied to {dest_libs_dir}")
|
|
421
|
-
|
|
422
|
-
def _create_metadata(self, dist_dir: Path) -> dict[str, Any]:
|
|
423
|
-
"""Create package metadata file."""
|
|
424
|
-
metadata = {
|
|
425
|
-
"version": __version__,
|
|
426
|
-
"build": __build__,
|
|
427
|
-
"assembled_at": asyncio.get_event_loop().time(),
|
|
428
|
-
}
|
|
429
|
-
|
|
430
|
-
metadata_file = dist_dir / "metadata.json"
|
|
431
|
-
with open(metadata_file, "w", encoding="utf-8") as f:
|
|
432
|
-
json.dump(metadata, f, indent=2)
|
|
285
|
+
Args:
|
|
286
|
+
func: Function to execute
|
|
287
|
+
*args: Arguments to pass to the function
|
|
288
|
+
"""
|
|
289
|
+
loop = asyncio.get_running_loop()
|
|
290
|
+
await loop.run_in_executor(None, func, *args)
|
|
433
291
|
|
|
434
|
-
|
|
292
|
+
async def pack_embed_python(self) -> None:
|
|
293
|
+
"""Pack embed python."""
|
|
294
|
+
from sfi.pyembedinstall.pyembedinstall import EmbedInstaller
|
|
435
295
|
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
) -> dict[str, Any]:
|
|
439
|
-
"""Assemble final package."""
|
|
440
|
-
logger.info("Starting package assembly")
|
|
296
|
+
logger.info(LOG_SEPARATOR)
|
|
297
|
+
logger.info("Packing embed python...")
|
|
441
298
|
|
|
442
|
-
|
|
443
|
-
|
|
444
|
-
|
|
299
|
+
def _run():
|
|
300
|
+
installer = EmbedInstaller(
|
|
301
|
+
root_dir=self.root_dir,
|
|
302
|
+
cache_dir=self.config.cache_dir,
|
|
303
|
+
offline=self.config.offline,
|
|
304
|
+
)
|
|
305
|
+
installer.run()
|
|
445
306
|
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
metadata = self._create_metadata(dist_dir)
|
|
307
|
+
await self._run_in_executor(_run)
|
|
308
|
+
logger.info("Embed python packed.")
|
|
449
309
|
|
|
450
|
-
|
|
451
|
-
|
|
310
|
+
async def pack_loaders(self) -> None:
|
|
311
|
+
"""Pack loaders for all projects concurrently."""
|
|
312
|
+
from sfi.pyloadergen.pyloadergen import PyLoaderGenerator
|
|
452
313
|
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
raise
|
|
314
|
+
logger.info(LOG_SEPARATOR)
|
|
315
|
+
logger.info("Packing loaders...")
|
|
456
316
|
|
|
317
|
+
def _run():
|
|
318
|
+
generator = PyLoaderGenerator(root_dir=self.root_dir)
|
|
319
|
+
generator.run()
|
|
457
320
|
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
def __init__(self, directory: Path, config: WorkflowConfig):
|
|
462
|
-
self.directory = directory
|
|
463
|
-
self.config = config
|
|
464
|
-
self.engine = WorkflowEngine(max_concurrent=config.max_concurrent)
|
|
465
|
-
|
|
466
|
-
def _add_parse_task(self) -> None:
|
|
467
|
-
"""Add project parsing task to workflow."""
|
|
468
|
-
parse_task = ParseProjectTask(
|
|
469
|
-
directory=self.directory,
|
|
470
|
-
recursive=self.config.recursive,
|
|
471
|
-
)
|
|
472
|
-
self.engine.add_task(parse_task)
|
|
473
|
-
|
|
474
|
-
def _add_pack_and_install_tasks(self) -> None:
|
|
475
|
-
"""Add pack source and install Python tasks to workflow."""
|
|
476
|
-
pack_task = PackSourceTask(
|
|
477
|
-
directory=self.directory,
|
|
478
|
-
projects_file=self.directory / "projects.json",
|
|
479
|
-
project_name=self.config.project_name,
|
|
480
|
-
)
|
|
481
|
-
self.engine.add_task(pack_task)
|
|
321
|
+
await self._run_in_executor(_run)
|
|
322
|
+
logger.info("Loaders packed.")
|
|
482
323
|
|
|
483
|
-
|
|
484
|
-
|
|
485
|
-
|
|
486
|
-
offline=self.config.offline,
|
|
487
|
-
)
|
|
488
|
-
self.engine.add_task(install_task)
|
|
489
|
-
|
|
490
|
-
def _add_pack_libraries_task(self) -> None:
|
|
491
|
-
"""Add library packing task to workflow."""
|
|
492
|
-
lib_pack_task = PackLibrariesTask(
|
|
493
|
-
project_dir=self.directory,
|
|
494
|
-
output_dir=self.directory / "dist/site-packages",
|
|
495
|
-
cache_dir=self.config.cache_dir,
|
|
496
|
-
python_version=self.config.python_version,
|
|
497
|
-
max_workers=self.config.max_concurrent,
|
|
498
|
-
archive_format=self.config.archive_format,
|
|
499
|
-
mirror=self.config.mirror,
|
|
500
|
-
)
|
|
501
|
-
self.engine.add_task(lib_pack_task)
|
|
324
|
+
async def pack_libraries(self) -> None:
|
|
325
|
+
"""Pack libraries for all projects concurrently."""
|
|
326
|
+
from sfi.pylibpack.pylibpack import PyLibPacker, PyLibPackerConfig
|
|
502
327
|
|
|
503
|
-
|
|
504
|
-
|
|
328
|
+
def _run():
|
|
329
|
+
libpacker = PyLibPacker(
|
|
330
|
+
working_dir=self.root_dir,
|
|
331
|
+
config=PyLibPackerConfig(self.config.cache_dir),
|
|
332
|
+
)
|
|
333
|
+
libpacker.run()
|
|
505
334
|
|
|
506
|
-
|
|
507
|
-
List of task dependencies for assembly phase
|
|
508
|
-
"""
|
|
509
|
-
if not self.config.generate_loader:
|
|
510
|
-
return ["pack_source", "install_python"]
|
|
335
|
+
await self._run_in_executor(_run)
|
|
511
336
|
|
|
512
|
-
|
|
513
|
-
|
|
337
|
+
async def pack_source(self) -> None:
|
|
338
|
+
"""Pack source code for all projects concurrently."""
|
|
339
|
+
from sfi.pysourcepack.pysourcepack import PySourcePacker
|
|
514
340
|
|
|
515
|
-
|
|
516
|
-
|
|
341
|
+
def _run():
|
|
342
|
+
source_packer = PySourcePacker(root_dir=self.root_dir)
|
|
343
|
+
source_packer.run()
|
|
517
344
|
|
|
518
|
-
|
|
345
|
+
await self._run_in_executor(_run)
|
|
519
346
|
|
|
520
|
-
def
|
|
521
|
-
"""
|
|
347
|
+
async def pack_archive(self, archive_format: str) -> None:
|
|
348
|
+
"""Create archive for all projects.
|
|
522
349
|
|
|
523
350
|
Args:
|
|
524
|
-
|
|
351
|
+
archive_format: Archive format (zip, tar, gztar, bztar, xztar, 7z, nsis)
|
|
525
352
|
"""
|
|
526
|
-
|
|
527
|
-
output_dir=self.directory / "dist",
|
|
528
|
-
dependencies=dependencies,
|
|
529
|
-
)
|
|
530
|
-
self.engine.add_task(assemble_task)
|
|
353
|
+
from sfi.pyarchive.pyarchive import PyArchiver, PyArchiveConfig
|
|
531
354
|
|
|
532
|
-
|
|
533
|
-
"
|
|
534
|
-
logger.info("Building packaging workflow")
|
|
355
|
+
logger.info(LOG_SEPARATOR)
|
|
356
|
+
logger.info(f"Creating {archive_format} archives...")
|
|
535
357
|
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
|
|
540
|
-
self._add_assemble_task(dependencies)
|
|
358
|
+
def _run():
|
|
359
|
+
config = PyArchiveConfig(verbose=self.config.debug)
|
|
360
|
+
archiver = PyArchiver(root_dir=self.root_dir, config=config)
|
|
361
|
+
archiver.archive_projects(format=archive_format)
|
|
541
362
|
|
|
542
|
-
|
|
363
|
+
await self._run_in_executor(_run)
|
|
364
|
+
logger.info("Archives created.")
|
|
543
365
|
|
|
544
|
-
def
|
|
545
|
-
"""
|
|
366
|
+
async def build(self) -> dict[str, Any]:
|
|
367
|
+
"""Execute the packaging workflow with concurrent optimization.
|
|
546
368
|
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
logger.info("Workflow execution summary:")
|
|
552
|
-
logger.info(f" Total tasks: {summary['total_tasks']}")
|
|
553
|
-
logger.info(f" Completed: {summary['completed']}")
|
|
554
|
-
logger.info(f" Failed: {summary['failed']}")
|
|
555
|
-
logger.info(f" Success rate: {summary['success_rate'] * 100:.1f}%")
|
|
556
|
-
logger.info(f" Total time: {summary['total_execution_time']:.2f}s")
|
|
557
|
-
logger.info("=" * 50)
|
|
558
|
-
|
|
559
|
-
async def execute(self) -> dict[str, Any]:
|
|
560
|
-
"""Execute the packaging workflow.
|
|
369
|
+
Workflow stages:
|
|
370
|
+
1. Pack embed python (must be first)
|
|
371
|
+
2. Pack loaders, libraries, and source in parallel
|
|
372
|
+
3. Create archive (optional, if archive_type is set)
|
|
561
373
|
|
|
562
374
|
Returns:
|
|
563
375
|
Dict with results and summary
|
|
376
|
+
|
|
377
|
+
Raises:
|
|
378
|
+
FileNotFoundError: If required directories don't exist
|
|
379
|
+
RuntimeError: If any packaging step fails
|
|
564
380
|
"""
|
|
565
381
|
logger.info("Starting packaging workflow execution")
|
|
566
|
-
|
|
567
|
-
self.build_workflow()
|
|
382
|
+
t0 = time.perf_counter()
|
|
568
383
|
|
|
569
384
|
try:
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
385
|
+
# Stage 1: Pack embed python (prerequisite for other tasks)
|
|
386
|
+
await self.pack_embed_python()
|
|
387
|
+
|
|
388
|
+
# Stage 2: Pack loaders, libraries, and source concurrently
|
|
389
|
+
logger.info(LOG_SEPARATOR)
|
|
390
|
+
logger.info("Running parallel tasks: loaders, libraries, source...")
|
|
391
|
+
await asyncio.gather(
|
|
392
|
+
self.pack_loaders(),
|
|
393
|
+
self.pack_libraries(),
|
|
394
|
+
self.pack_source(),
|
|
395
|
+
)
|
|
573
396
|
|
|
574
|
-
|
|
397
|
+
# Stage 3: Create archive (optional)
|
|
398
|
+
if self.config.archive_type:
|
|
399
|
+
await self.pack_archive(self.config.archive_type)
|
|
575
400
|
|
|
401
|
+
except (FileNotFoundError, RuntimeError):
|
|
402
|
+
raise
|
|
576
403
|
except Exception as e:
|
|
577
404
|
logger.error(f"Workflow execution failed: {e}")
|
|
578
|
-
raise
|
|
405
|
+
raise RuntimeError(f"Packaging workflow failed: {e}") from e
|
|
579
406
|
|
|
407
|
+
t1 = time.perf_counter()
|
|
408
|
+
logger.info(LOG_SEPARATOR)
|
|
409
|
+
logger.info(f"Packaging workflow completed in {t1 - t0:.2f}s")
|
|
410
|
+
return {"output_dir": str(self.dist_dir), "metadata": {}}
|
|
580
411
|
|
|
581
|
-
def
|
|
582
|
-
|
|
412
|
+
def list_projects(self) -> None:
|
|
413
|
+
logger.info(f"Listing projects in {self.root_dir}")
|
|
414
|
+
for project in self.projects.values():
|
|
415
|
+
logger.info(f"{project}")
|
|
583
416
|
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
silent: If True, use warning instead of error messages
|
|
417
|
+
def _scan_executables(self) -> list[Path]:
|
|
418
|
+
"""Scan dist directory for executable files.
|
|
587
419
|
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
420
|
+
Returns:
|
|
421
|
+
List of executable file paths found in dist directory.
|
|
422
|
+
"""
|
|
423
|
+
dist_dir = self.root_dir / "dist"
|
|
424
|
+
if not dist_dir.exists():
|
|
425
|
+
return []
|
|
426
|
+
pattern = "*.exe" if is_windows else "*"
|
|
427
|
+
return [f for f in dist_dir.glob(pattern) if f.is_file() and f.suffix == ext]
|
|
592
428
|
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
level(f"No projects.json found in {directory}")
|
|
596
|
-
logger.info("Run 'pypack build' first to create projects.json")
|
|
597
|
-
return None
|
|
429
|
+
def _resolve_executable(self, match_name: str | None) -> Path | None:
|
|
430
|
+
"""Resolve executable by scanning dist directory and matching name.
|
|
598
431
|
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
projects_data = json.load(f)
|
|
432
|
+
Args:
|
|
433
|
+
match_name: Executable name or partial name to match
|
|
602
434
|
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
435
|
+
Returns:
|
|
436
|
+
Path to matched executable, or None if not found/ambiguous.
|
|
437
|
+
"""
|
|
438
|
+
executables = self._scan_executables()
|
|
439
|
+
if not executables:
|
|
606
440
|
return None
|
|
607
441
|
|
|
608
|
-
|
|
609
|
-
|
|
610
|
-
|
|
611
|
-
|
|
612
|
-
|
|
613
|
-
|
|
614
|
-
def _get_project_info(project_info: Any) -> tuple[str, str, str]:
|
|
615
|
-
"""Extract project info from project data.
|
|
616
|
-
|
|
617
|
-
Args:
|
|
618
|
-
project_info: Project info dict or string
|
|
619
|
-
|
|
620
|
-
Returns:
|
|
621
|
-
Tuple of (version, entry, description)
|
|
622
|
-
"""
|
|
623
|
-
if isinstance(project_info, str):
|
|
624
|
-
return "N/A", "N/A", ""
|
|
625
|
-
|
|
626
|
-
version = (
|
|
627
|
-
project_info.get("version", "N/A") if isinstance(project_info, dict) else "N/A"
|
|
628
|
-
)
|
|
629
|
-
entry = "main.py"
|
|
630
|
-
description = (
|
|
631
|
-
project_info.get("description", "") if isinstance(project_info, dict) else ""
|
|
632
|
-
)
|
|
633
|
-
return version, entry, description
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
def _print_project(project_name: str, project_info: Any, index: int) -> None:
|
|
637
|
-
"""Print project information.
|
|
638
|
-
|
|
639
|
-
Args:
|
|
640
|
-
project_name: Name of the project
|
|
641
|
-
project_info: Project info dict or string
|
|
642
|
-
index: Index number for listing
|
|
643
|
-
"""
|
|
644
|
-
version, entry, description = _get_project_info(project_info)
|
|
645
|
-
|
|
646
|
-
logger.info(f"\n[{index}] {project_name}")
|
|
647
|
-
logger.info(f" Version: {version}")
|
|
648
|
-
logger.info(f" Entry: {entry}")
|
|
649
|
-
if description:
|
|
650
|
-
logger.info(f" Description: {description}")
|
|
651
|
-
|
|
652
|
-
|
|
653
|
-
def list_projects(directory: Path) -> None:
|
|
654
|
-
"""List projects from projects.json.
|
|
655
|
-
|
|
656
|
-
Args:
|
|
657
|
-
directory: Project directory containing projects.json
|
|
658
|
-
"""
|
|
659
|
-
projects_data = _load_projects_data(directory, silent=True)
|
|
660
|
-
|
|
661
|
-
if not projects_data:
|
|
662
|
-
return
|
|
663
|
-
|
|
664
|
-
logger.info(f"Found {len(projects_data)} project(s):")
|
|
665
|
-
logger.info("=" * 60)
|
|
666
|
-
|
|
667
|
-
for i, (project_name, project_info) in enumerate(projects_data.items(), 1):
|
|
668
|
-
_print_project(project_name, project_info, i)
|
|
669
|
-
|
|
670
|
-
logger.info("=" * 60)
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
def _get_available_executables(directory: Path) -> list[str]:
|
|
674
|
-
"""Get list of available executables in dist directory.
|
|
675
|
-
|
|
676
|
-
Args:
|
|
677
|
-
directory: Project directory
|
|
678
|
-
|
|
679
|
-
Returns:
|
|
680
|
-
List of executable names (without .exe extension)
|
|
681
|
-
"""
|
|
682
|
-
dist_dir = directory / "dist"
|
|
683
|
-
if not dist_dir.exists():
|
|
684
|
-
logger.error(f"Dist directory not found: {dist_dir}")
|
|
685
|
-
logger.info("Run 'pypack build' first to build the project")
|
|
686
|
-
return []
|
|
687
|
-
|
|
688
|
-
# Support both Windows (.exe) and Linux (no extension) executables
|
|
689
|
-
exe_names = set()
|
|
690
|
-
for exe in dist_dir.glob("*.exe"):
|
|
691
|
-
if exe.is_file():
|
|
692
|
-
exe_names.add(exe.stem)
|
|
693
|
-
|
|
694
|
-
for exe in dist_dir.glob("*"):
|
|
695
|
-
if exe.is_file() and not exe.name.endswith((".ent", ".exe", ".json")):
|
|
696
|
-
exe_names.add(exe.name)
|
|
697
|
-
|
|
698
|
-
available_exes = list(exe_names)
|
|
699
|
-
if not available_exes:
|
|
700
|
-
logger.error("No executables found in dist directory")
|
|
701
|
-
logger.info("Run 'pypack build' first to build the project")
|
|
702
|
-
|
|
703
|
-
return available_exes
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
def _find_matching_executables(
|
|
707
|
-
project_name: str, available_exes: list[str]
|
|
708
|
-
) -> list[str]:
|
|
709
|
-
"""Find executables matching project name.
|
|
710
|
-
|
|
711
|
-
Args:
|
|
712
|
-
project_name: Project name to match
|
|
713
|
-
available_exes: List of available executable names
|
|
714
|
-
|
|
715
|
-
Returns:
|
|
716
|
-
List of matching executable names
|
|
717
|
-
"""
|
|
718
|
-
return [
|
|
719
|
-
exe
|
|
720
|
-
for exe in available_exes
|
|
721
|
-
if exe == project_name or exe.startswith(f"{project_name}-")
|
|
722
|
-
]
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
def _log_available_executables(available_exes: list[str]) -> None:
|
|
726
|
-
"""Log available executable names.
|
|
727
|
-
|
|
728
|
-
Args:
|
|
729
|
-
available_exes: List of available executable names
|
|
730
|
-
"""
|
|
731
|
-
logger.info("Available executables:")
|
|
732
|
-
for exe in available_exes:
|
|
733
|
-
logger.info(f"** {exe} **")
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
def _resolve_exact_project_name(
|
|
737
|
-
project_name: str, available_exes: list[str]
|
|
738
|
-
) -> str | None:
|
|
739
|
-
"""Resolve executable when project name is specified.
|
|
740
|
-
|
|
741
|
-
Args:
|
|
742
|
-
project_name: Project name to resolve
|
|
743
|
-
available_exes: List of available executable names
|
|
744
|
-
|
|
745
|
-
Returns:
|
|
746
|
-
Target executable name or None
|
|
747
|
-
"""
|
|
748
|
-
if project_name in available_exes:
|
|
749
|
-
return project_name
|
|
750
|
-
return None
|
|
751
|
-
|
|
442
|
+
# Auto-select if only one executable and no name specified
|
|
443
|
+
if not match_name:
|
|
444
|
+
if len(executables) == 1:
|
|
445
|
+
return executables[0]
|
|
446
|
+
return None
|
|
752
447
|
|
|
753
|
-
|
|
754
|
-
|
|
755
|
-
|
|
756
|
-
|
|
448
|
+
# Exact match (without extension)
|
|
449
|
+
lower_name = match_name.lower()
|
|
450
|
+
for exe in executables:
|
|
451
|
+
if exe.stem.lower() == lower_name:
|
|
452
|
+
return exe
|
|
757
453
|
|
|
758
|
-
|
|
759
|
-
|
|
760
|
-
|
|
454
|
+
# Fuzzy match (case-insensitive substring)
|
|
455
|
+
matches = [exe for exe in executables if lower_name in exe.stem.lower()]
|
|
456
|
+
if len(matches) == 1:
|
|
457
|
+
return matches[0]
|
|
761
458
|
|
|
762
|
-
Returns:
|
|
763
|
-
Target executable name or None
|
|
764
|
-
"""
|
|
765
|
-
matching_exes = _find_matching_executables(project_name, available_exes)
|
|
766
|
-
|
|
767
|
-
if len(matching_exes) == 1:
|
|
768
|
-
return matching_exes[0]
|
|
769
|
-
elif len(matching_exes) > 1:
|
|
770
|
-
logger.error(f"Multiple executables found for project '{project_name}':")
|
|
771
|
-
for exe in matching_exes:
|
|
772
|
-
logger.info(f" - {exe}")
|
|
773
|
-
logger.info("Please specify the full executable name (e.g., 'docscan-gui')")
|
|
774
459
|
return None
|
|
775
460
|
|
|
776
|
-
|
|
777
|
-
|
|
778
|
-
|
|
779
|
-
|
|
780
|
-
|
|
781
|
-
|
|
461
|
+
def list_executables(self) -> None:
|
|
462
|
+
"""List all executables in dist directory."""
|
|
463
|
+
executables = self._scan_executables()
|
|
464
|
+
if not executables:
|
|
465
|
+
logger.info("No executables found in dist directory")
|
|
466
|
+
return
|
|
467
|
+
logger.info(f"Available executables in {self.root_dir / 'dist'}:")
|
|
468
|
+
for exe in executables:
|
|
469
|
+
logger.info(f" {exe.stem}")
|
|
782
470
|
|
|
783
|
-
|
|
784
|
-
|
|
471
|
+
def run_project(
|
|
472
|
+
self, match_name: str | None, project_args: list[str] | None = None
|
|
473
|
+
) -> None:
|
|
474
|
+
"""Run an executable with fuzzy name matching support.
|
|
785
475
|
|
|
786
|
-
|
|
787
|
-
|
|
788
|
-
|
|
789
|
-
|
|
790
|
-
|
|
791
|
-
|
|
476
|
+
Args:
|
|
477
|
+
match_name: Executable name or partial name to match
|
|
478
|
+
project_args: Additional arguments to pass to the executable
|
|
479
|
+
"""
|
|
480
|
+
exe_path = self._resolve_executable(match_name)
|
|
481
|
+
|
|
482
|
+
if not exe_path and not match_name:
|
|
483
|
+
executables = self._scan_executables()
|
|
484
|
+
if len(executables) == 0:
|
|
485
|
+
logger.error("No executables found in dist directory")
|
|
486
|
+
return
|
|
487
|
+
elif len(executables) > 1:
|
|
488
|
+
logger.error(
|
|
489
|
+
"Multiple executables found. Please specify which one to run:"
|
|
490
|
+
)
|
|
491
|
+
self.list_executables()
|
|
492
|
+
return
|
|
493
|
+
logger.error("Unable to auto-select executable")
|
|
494
|
+
self.list_executables()
|
|
495
|
+
return
|
|
496
|
+
elif not exe_path:
|
|
497
|
+
logger.error(f"Executable '{match_name}' not found")
|
|
498
|
+
self.list_executables()
|
|
499
|
+
return
|
|
792
500
|
|
|
793
|
-
|
|
794
|
-
|
|
795
|
-
|
|
796
|
-
|
|
797
|
-
|
|
501
|
+
logger.info(f"Running {exe_path.stem} in {self.root_dir}")
|
|
502
|
+
cmd = [str(exe_path.resolve())]
|
|
503
|
+
logger.info(f"Command: {cmd}")
|
|
504
|
+
if project_args:
|
|
505
|
+
cmd.extend(project_args)
|
|
506
|
+
logger.info(f"Arguments: {' '.join(project_args)}")
|
|
798
507
|
|
|
508
|
+
try:
|
|
509
|
+
subprocess.run(cmd, check=True)
|
|
510
|
+
logger.info(f"{exe_path.stem} ran successfully")
|
|
511
|
+
except subprocess.CalledProcessError as e:
|
|
512
|
+
logger.error(f"{exe_path.stem} failed with exit code {e.returncode}")
|
|
513
|
+
except FileNotFoundError:
|
|
514
|
+
logger.error(f"Executable not found: {exe_path}")
|
|
515
|
+
except Exception as e:
|
|
516
|
+
logger.error(f"Failed to run {exe_path}: {e}")
|
|
799
517
|
|
|
800
|
-
def _resolve_target_executable(
|
|
801
|
-
project_name: str | None,
|
|
802
|
-
available_exes: list[str],
|
|
803
|
-
projects_data: dict[str, Any],
|
|
804
|
-
) -> str | None:
|
|
805
|
-
"""Resolve which executable to run based on project name.
|
|
806
518
|
|
|
807
|
-
|
|
808
|
-
|
|
809
|
-
available_exes: List of available executable names
|
|
810
|
-
projects_data: Projects data dict
|
|
519
|
+
def parse_args() -> argparse.Namespace:
|
|
520
|
+
"""Parse command line arguments using subcommand structure.
|
|
811
521
|
|
|
812
522
|
Returns:
|
|
813
|
-
|
|
814
|
-
"""
|
|
815
|
-
if not project_name:
|
|
816
|
-
return _resolve_no_project_name(available_exes)
|
|
817
|
-
|
|
818
|
-
exact_match = _resolve_exact_project_name(project_name, available_exes)
|
|
819
|
-
if exact_match:
|
|
820
|
-
return exact_match
|
|
821
|
-
|
|
822
|
-
if project_name in projects_data:
|
|
823
|
-
return _resolve_project_by_name(project_name, available_exes)
|
|
824
|
-
|
|
825
|
-
logger.error(f"Project or executable '{project_name}' not found")
|
|
826
|
-
_log_available_executables(available_exes)
|
|
827
|
-
return None
|
|
828
|
-
|
|
829
|
-
|
|
830
|
-
def _execute_project(exe_path: Path, dist_dir: Path) -> None:
|
|
831
|
-
"""Execute the project executable.
|
|
832
|
-
|
|
833
|
-
Args:
|
|
834
|
-
exe_path: Path to the executable
|
|
835
|
-
dist_dir: Working directory for execution
|
|
836
|
-
"""
|
|
837
|
-
import subprocess
|
|
838
|
-
|
|
839
|
-
try:
|
|
840
|
-
subprocess.run([str(exe_path)], cwd=str(dist_dir), check=True)
|
|
841
|
-
except subprocess.CalledProcessError as e:
|
|
842
|
-
logger.error(f"Project execution failed with exit code {e.returncode}")
|
|
843
|
-
except KeyboardInterrupt:
|
|
844
|
-
logger.info("Project execution interrupted")
|
|
845
|
-
except FileNotFoundError:
|
|
846
|
-
logger.error(f"Failed to execute {exe_path}")
|
|
847
|
-
|
|
848
|
-
|
|
849
|
-
def run_project(project_name: str | None, directory: Path) -> None:
|
|
850
|
-
"""Run a built project.
|
|
851
|
-
|
|
852
|
-
Args:
|
|
853
|
-
project_name: Project name or executable name to run (e.g., 'docscan' or 'docscan-gui')
|
|
854
|
-
directory: Project directory
|
|
523
|
+
Parsed arguments namespace
|
|
855
524
|
"""
|
|
856
|
-
|
|
857
|
-
|
|
858
|
-
return
|
|
859
|
-
|
|
860
|
-
available_exes = _get_available_executables(directory)
|
|
861
|
-
if not available_exes:
|
|
862
|
-
return
|
|
863
|
-
|
|
864
|
-
target_exe_name = _resolve_target_executable(
|
|
865
|
-
project_name, available_exes, projects_data
|
|
525
|
+
parser = argparse.ArgumentParser(
|
|
526
|
+
prog="pypack", description="Python packaging tool with workflow orchestration"
|
|
866
527
|
)
|
|
867
|
-
if not target_exe_name:
|
|
868
|
-
return
|
|
869
|
-
|
|
870
|
-
# Try both Windows (.exe) and Linux (no extension) executables
|
|
871
|
-
exe_path_with_ext = directory / "dist" / f"{target_exe_name}.exe"
|
|
872
|
-
exe_path_no_ext = directory / "dist" / target_exe_name
|
|
873
|
-
|
|
874
|
-
exe_path = None
|
|
875
|
-
if exe_path_with_ext.exists():
|
|
876
|
-
exe_path = exe_path_with_ext
|
|
877
|
-
elif exe_path_no_ext.exists():
|
|
878
|
-
exe_path = exe_path_no_ext
|
|
879
|
-
|
|
880
|
-
if not exe_path:
|
|
881
|
-
logger.error(f"Executable not found for '{target_exe_name}'")
|
|
882
|
-
logger.info("Run 'pypack build' first to build the project")
|
|
883
|
-
return
|
|
884
528
|
|
|
885
|
-
|
|
886
|
-
|
|
887
|
-
|
|
888
|
-
|
|
889
|
-
|
|
890
|
-
def clean_project(directory: Path) -> None:
|
|
891
|
-
"""Clean build artifacts and package files.
|
|
892
|
-
|
|
893
|
-
Args:
|
|
894
|
-
directory: Project directory to clean
|
|
895
|
-
"""
|
|
896
|
-
logger.info("Cleaning build artifacts...")
|
|
897
|
-
|
|
898
|
-
cleaned_dirs, cleaned_files = _clean_build_artifacts(directory)
|
|
899
|
-
|
|
900
|
-
if not cleaned_dirs and not cleaned_files:
|
|
901
|
-
logger.info("No build artifacts found")
|
|
902
|
-
else:
|
|
903
|
-
logger.info(
|
|
904
|
-
f"Cleaned {len(cleaned_dirs)} directories and {len(cleaned_files)} file(s)"
|
|
905
|
-
)
|
|
906
|
-
|
|
907
|
-
|
|
908
|
-
def _should_clean(entry: Path) -> bool:
|
|
909
|
-
"""Check if a directory or file should be cleaned.
|
|
910
|
-
|
|
911
|
-
Args:
|
|
912
|
-
entry: Path to check
|
|
913
|
-
|
|
914
|
-
Returns:
|
|
915
|
-
True if the entry should be cleaned
|
|
916
|
-
"""
|
|
917
|
-
protected_dirs = {
|
|
918
|
-
".git",
|
|
919
|
-
".venv",
|
|
920
|
-
".virtualenv",
|
|
921
|
-
".vscode",
|
|
922
|
-
".idea",
|
|
923
|
-
".codebuddy",
|
|
924
|
-
".qoder",
|
|
925
|
-
}
|
|
926
|
-
|
|
927
|
-
if entry.is_file() and entry.name == "projects.json":
|
|
928
|
-
return True
|
|
929
|
-
|
|
930
|
-
if not entry.is_dir():
|
|
931
|
-
return False
|
|
932
|
-
|
|
933
|
-
if entry.name.startswith(".") and entry.name in protected_dirs:
|
|
934
|
-
return False
|
|
935
|
-
|
|
936
|
-
return (
|
|
937
|
-
entry.name.startswith(".")
|
|
938
|
-
or entry.name.startswith("__")
|
|
939
|
-
or entry.name in ("build", "dist", "pysfi_build", "cbuild", "benchmarks")
|
|
529
|
+
# Create subparsers
|
|
530
|
+
subparsers = parser.add_subparsers(
|
|
531
|
+
dest="command", required=True, help="Available commands"
|
|
940
532
|
)
|
|
941
533
|
|
|
534
|
+
# Version subcommand
|
|
535
|
+
version_parser = subparsers.add_parser(
|
|
536
|
+
"version", aliases=["v"], help="Show version information"
|
|
537
|
+
)
|
|
538
|
+
version_parser.add_argument("--debug", "-d", action="store_true", help="Debug mode")
|
|
942
539
|
|
|
943
|
-
|
|
944
|
-
|
|
945
|
-
|
|
946
|
-
Args:
|
|
947
|
-
directory: Project directory to clean
|
|
948
|
-
|
|
949
|
-
Returns:
|
|
950
|
-
Tuple of (cleaned directories, cleaned files)
|
|
951
|
-
"""
|
|
952
|
-
cleaned_dirs = []
|
|
953
|
-
cleaned_files = []
|
|
954
|
-
|
|
955
|
-
entries_to_clean = [f for f in directory.iterdir() if _should_clean(f)]
|
|
956
|
-
|
|
957
|
-
for entry in entries_to_clean:
|
|
958
|
-
if not entry.exists():
|
|
959
|
-
continue
|
|
960
|
-
|
|
961
|
-
if entry.is_dir():
|
|
962
|
-
logger.info(f"Removing directory: {entry}")
|
|
963
|
-
if _remove_directory(entry):
|
|
964
|
-
cleaned_dirs.append(str(entry))
|
|
965
|
-
logger.info(f" Removed directory: {entry}")
|
|
966
|
-
else:
|
|
967
|
-
logger.warning(f" Failed to remove {entry}")
|
|
968
|
-
elif entry.is_file():
|
|
969
|
-
logger.info(f"Removing file: {entry}")
|
|
970
|
-
if _remove_file(entry):
|
|
971
|
-
cleaned_files.append(str(entry))
|
|
972
|
-
logger.info(f" Removed file: {entry}")
|
|
973
|
-
else:
|
|
974
|
-
logger.warning(f" Failed to remove {entry}")
|
|
975
|
-
|
|
976
|
-
return cleaned_dirs, cleaned_files
|
|
977
|
-
|
|
978
|
-
|
|
979
|
-
def _remove_directory(dir_path: Path) -> bool:
|
|
980
|
-
"""Remove a directory safely.
|
|
981
|
-
|
|
982
|
-
Args:
|
|
983
|
-
dir_path: Directory path to remove
|
|
984
|
-
|
|
985
|
-
Returns:
|
|
986
|
-
True if removal succeeded
|
|
987
|
-
"""
|
|
988
|
-
try:
|
|
989
|
-
shutil.rmtree(dir_path)
|
|
990
|
-
return True
|
|
991
|
-
except Exception:
|
|
992
|
-
return False
|
|
993
|
-
|
|
994
|
-
|
|
995
|
-
def _remove_file(file_path: Path) -> bool:
|
|
996
|
-
"""Remove a file safely.
|
|
997
|
-
|
|
998
|
-
Args:
|
|
999
|
-
file_path: File path to remove
|
|
1000
|
-
|
|
1001
|
-
Returns:
|
|
1002
|
-
True if removal succeeded
|
|
1003
|
-
"""
|
|
1004
|
-
try:
|
|
1005
|
-
file_path.unlink()
|
|
1006
|
-
return True
|
|
1007
|
-
except Exception:
|
|
1008
|
-
return False
|
|
1009
|
-
|
|
1010
|
-
|
|
1011
|
-
def parse_args() -> argparse.Namespace:
|
|
1012
|
-
"""Parse command line arguments.
|
|
1013
|
-
|
|
1014
|
-
Returns:
|
|
1015
|
-
Parsed arguments namespace
|
|
1016
|
-
"""
|
|
1017
|
-
parser = argparse.ArgumentParser(
|
|
1018
|
-
prog="pypack", description="Python packaging tool with workflow orchestration"
|
|
540
|
+
# List subcommand
|
|
541
|
+
list_parser = subparsers.add_parser(
|
|
542
|
+
"list", aliases=["l", "ls"], help="List available projects"
|
|
1019
543
|
)
|
|
544
|
+
list_parser.add_argument("--debug", "-d", action="store_true", help="Debug mode")
|
|
1020
545
|
|
|
1021
|
-
|
|
1022
|
-
|
|
1023
|
-
|
|
1024
|
-
help="Action to perform",
|
|
546
|
+
# Clean subcommand
|
|
547
|
+
clean_parser = subparsers.add_parser(
|
|
548
|
+
"clean", aliases=["c"], help="Clean build artifacts"
|
|
1025
549
|
)
|
|
1026
|
-
|
|
550
|
+
clean_parser.add_argument("--debug", "-d", action="store_true", help="Debug mode")
|
|
551
|
+
|
|
552
|
+
# Run subcommand
|
|
553
|
+
run_parser = subparsers.add_parser("run", aliases=["r"], help="Run a project")
|
|
554
|
+
run_parser.add_argument(
|
|
1027
555
|
"project",
|
|
1028
556
|
type=str,
|
|
1029
|
-
nargs="?",
|
|
1030
|
-
default=None,
|
|
1031
557
|
help="Project or executable name (e.g., 'docscan' or 'docscan-gui')",
|
|
1032
558
|
)
|
|
1033
|
-
|
|
1034
|
-
|
|
559
|
+
run_parser.add_argument(
|
|
560
|
+
"args",
|
|
561
|
+
type=str,
|
|
562
|
+
nargs="*",
|
|
563
|
+
help="Additional arguments to pass to the project",
|
|
564
|
+
)
|
|
565
|
+
run_parser.add_argument("--debug", "-d", action="store_true", help="Debug mode")
|
|
566
|
+
|
|
567
|
+
# Build subcommand
|
|
568
|
+
build_parser = subparsers.add_parser(
|
|
569
|
+
"build", aliases=["b"], help="Build project packages"
|
|
570
|
+
)
|
|
571
|
+
build_parser.add_argument("--debug", "-d", action="store_true", help="Debug mode")
|
|
572
|
+
build_parser.add_argument(
|
|
1035
573
|
"--python-version", type=str, default="3.8.10", help="Python version to install"
|
|
1036
574
|
)
|
|
1037
|
-
|
|
575
|
+
build_parser.add_argument(
|
|
1038
576
|
"--loader-type",
|
|
1039
577
|
type=str,
|
|
1040
|
-
choices=
|
|
1041
|
-
default=
|
|
578
|
+
choices=[lt.value for lt in LoaderType],
|
|
579
|
+
default=LoaderType.CONSOLE.value,
|
|
1042
580
|
help="Loader type",
|
|
1043
581
|
)
|
|
1044
|
-
|
|
582
|
+
build_parser.add_argument(
|
|
1045
583
|
"--entry-suffix",
|
|
1046
584
|
type=str,
|
|
1047
585
|
default=".ent",
|
|
1048
586
|
help="Entry file suffix (default: .ent, alternatives: .py)",
|
|
1049
587
|
)
|
|
1050
|
-
|
|
588
|
+
build_parser.add_argument(
|
|
1051
589
|
"--no-loader", action="store_true", help="Skip loader generation"
|
|
1052
590
|
)
|
|
1053
|
-
|
|
1054
|
-
"-
|
|
591
|
+
build_parser.add_argument(
|
|
592
|
+
"-o", "--offline", action="store_true", help="Offline mode"
|
|
1055
593
|
)
|
|
1056
|
-
|
|
1057
|
-
parser.add_argument(
|
|
594
|
+
build_parser.add_argument(
|
|
1058
595
|
"-j", "--jobs", type=int, default=4, help="Maximum concurrent tasks"
|
|
1059
596
|
)
|
|
1060
597
|
|
|
1061
|
-
# Library packing arguments
|
|
1062
|
-
|
|
598
|
+
# Library packing arguments (build-specific)
|
|
599
|
+
build_parser.add_argument(
|
|
1063
600
|
"--cache-dir",
|
|
1064
601
|
type=str,
|
|
1065
602
|
default=None,
|
|
1066
603
|
help="Custom cache directory for dependencies",
|
|
1067
604
|
)
|
|
1068
|
-
|
|
605
|
+
build_parser.add_argument(
|
|
1069
606
|
"--archive-format",
|
|
1070
607
|
type=str,
|
|
1071
|
-
choices=
|
|
1072
|
-
default=
|
|
608
|
+
choices=[af.value for af in ArchiveFormat],
|
|
609
|
+
default=ArchiveFormat.ZIP.value,
|
|
1073
610
|
help="Archive format for dependencies",
|
|
1074
611
|
)
|
|
1075
|
-
|
|
612
|
+
build_parser.add_argument(
|
|
1076
613
|
"--mirror",
|
|
1077
614
|
type=str,
|
|
1078
|
-
default=
|
|
1079
|
-
choices=
|
|
615
|
+
default=MirrorSource.ALIYUN.value,
|
|
616
|
+
choices=[ms.value for ms in MirrorSource],
|
|
1080
617
|
help="PyPI mirror source for faster downloads",
|
|
1081
618
|
)
|
|
1082
619
|
|
|
1083
|
-
|
|
1084
|
-
|
|
1085
|
-
|
|
1086
|
-
|
|
1087
|
-
|
|
1088
|
-
|
|
620
|
+
# Archive options (optional post-build step)
|
|
621
|
+
build_parser.add_argument(
|
|
622
|
+
"--archive",
|
|
623
|
+
"-a",
|
|
624
|
+
type=str,
|
|
625
|
+
nargs="?",
|
|
626
|
+
const="zip",
|
|
627
|
+
default=None,
|
|
628
|
+
choices=["zip", "tar", "gztar", "bztar", "xztar", "7z", "nsis"],
|
|
629
|
+
help="Create archive after build (default format: zip if flag used)",
|
|
630
|
+
)
|
|
1089
631
|
|
|
1090
|
-
|
|
1091
|
-
await workflow.execute()
|
|
1092
|
-
logger.info("Packaging completed successfully!")
|
|
1093
|
-
except Exception as e:
|
|
1094
|
-
logger.error(f"Packaging failed: {e}")
|
|
1095
|
-
raise
|
|
632
|
+
return parser.parse_args()
|
|
1096
633
|
|
|
1097
634
|
|
|
1098
635
|
def main() -> None:
|
|
1099
|
-
"""Main entry point for package workflow tool."""
|
|
636
|
+
"""Main entry point for package workflow tool using factory pattern."""
|
|
1100
637
|
args = parse_args()
|
|
1101
638
|
|
|
1102
639
|
if args.debug:
|
|
1103
640
|
logging.getLogger().setLevel(logging.DEBUG)
|
|
1104
641
|
|
|
1105
|
-
|
|
1106
|
-
|
|
1107
|
-
return
|
|
1108
|
-
|
|
1109
|
-
if args.action in {"list", "l"}:
|
|
1110
|
-
list_projects(cwd)
|
|
1111
|
-
return
|
|
1112
|
-
|
|
1113
|
-
if args.action in {"run", "r"}:
|
|
1114
|
-
run_project(args.project, cwd)
|
|
1115
|
-
return
|
|
642
|
+
# Build workflow configuration using factory pattern
|
|
643
|
+
config = ConfigFactory.create_from_args(args, cwd)
|
|
1116
644
|
|
|
1117
|
-
|
|
1118
|
-
|
|
1119
|
-
|
|
1120
|
-
|
|
1121
|
-
cache_dir = Path(args.cache_dir) if args.cache_dir else None
|
|
1122
|
-
config = WorkflowConfig(
|
|
1123
|
-
directory=cwd,
|
|
1124
|
-
project_name=args.project,
|
|
1125
|
-
python_version=args.python_version,
|
|
1126
|
-
loader_type=args.loader_type,
|
|
1127
|
-
entry_suffix=args.entry_suffix,
|
|
1128
|
-
generate_loader=not args.no_loader,
|
|
1129
|
-
recursive=args.recursive,
|
|
1130
|
-
offline=args.offline,
|
|
1131
|
-
max_concurrent=args.jobs,
|
|
1132
|
-
debug=args.debug,
|
|
1133
|
-
cache_dir=cache_dir,
|
|
1134
|
-
archive_format=args.archive_format,
|
|
1135
|
-
mirror=args.mirror,
|
|
645
|
+
workflow = PackageWorkflow(
|
|
646
|
+
root_dir=config.directory,
|
|
647
|
+
config=config,
|
|
648
|
+
cleaning_strategy=StandardCleaningStrategy(),
|
|
1136
649
|
)
|
|
1137
650
|
|
|
1138
|
-
|
|
651
|
+
# Handle different subcommands
|
|
652
|
+
if args.command in ("version", "v"):
|
|
653
|
+
logger.info(f"pypack {__version__} (build {__build__})")
|
|
654
|
+
return
|
|
655
|
+
elif args.command in ("list", "l", "ls"):
|
|
656
|
+
workflow.list_projects()
|
|
657
|
+
elif args.command in ("run", "r"):
|
|
658
|
+
workflow.run_project(args.project, args.args)
|
|
659
|
+
elif args.command in ("clean", "c"):
|
|
660
|
+
workflow.clean_project()
|
|
661
|
+
elif args.command in ("build", "b"):
|
|
662
|
+
try:
|
|
663
|
+
asyncio.run(workflow.build())
|
|
664
|
+
logger.info("Packaging completed successfully!")
|
|
665
|
+
except Exception as e:
|
|
666
|
+
logger.error(f"Packaging failed: {e}")
|
|
667
|
+
raise
|
|
1139
668
|
|
|
1140
669
|
|
|
1141
670
|
if __name__ == "__main__":
|