pysfi 0.1.12__py3-none-any.whl → 0.1.14__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pysfi-0.1.12.dist-info → pysfi-0.1.14.dist-info}/METADATA +1 -1
- pysfi-0.1.14.dist-info/RECORD +68 -0
- {pysfi-0.1.12.dist-info → pysfi-0.1.14.dist-info}/entry_points.txt +3 -0
- sfi/__init__.py +19 -2
- sfi/alarmclock/__init__.py +3 -0
- sfi/alarmclock/alarmclock.py +23 -40
- sfi/bumpversion/__init__.py +3 -1
- sfi/bumpversion/bumpversion.py +64 -15
- sfi/cleanbuild/__init__.py +3 -0
- sfi/cleanbuild/cleanbuild.py +5 -1
- sfi/cli.py +25 -4
- sfi/condasetup/__init__.py +1 -0
- sfi/condasetup/condasetup.py +91 -76
- sfi/docdiff/__init__.py +1 -0
- sfi/docdiff/docdiff.py +3 -2
- sfi/docscan/__init__.py +1 -1
- sfi/docscan/docscan.py +78 -23
- sfi/docscan/docscan_gui.py +152 -48
- sfi/filedate/filedate.py +12 -5
- sfi/img2pdf/img2pdf.py +453 -0
- sfi/llmclient/llmclient.py +31 -8
- sfi/llmquantize/llmquantize.py +76 -37
- sfi/llmserver/__init__.py +1 -0
- sfi/llmserver/llmserver.py +63 -13
- sfi/makepython/makepython.py +1145 -201
- sfi/pdfsplit/pdfsplit.py +45 -12
- sfi/pyarchive/__init__.py +1 -0
- sfi/pyarchive/pyarchive.py +908 -278
- sfi/pyembedinstall/pyembedinstall.py +88 -89
- sfi/pylibpack/pylibpack.py +561 -463
- sfi/pyloadergen/pyloadergen.py +372 -218
- sfi/pypack/pypack.py +510 -959
- sfi/pyprojectparse/pyprojectparse.py +337 -40
- sfi/pysourcepack/__init__.py +1 -0
- sfi/pysourcepack/pysourcepack.py +210 -131
- sfi/quizbase/quizbase_gui.py +2 -2
- sfi/taskkill/taskkill.py +168 -59
- sfi/which/which.py +11 -3
- pysfi-0.1.12.dist-info/RECORD +0 -62
- sfi/workflowengine/workflowengine.py +0 -444
- {pysfi-0.1.12.dist-info → pysfi-0.1.14.dist-info}/WHEEL +0 -0
- /sfi/{workflowengine → img2pdf}/__init__.py +0 -0
sfi/pypack/pypack.py
CHANGED
|
@@ -1,30 +1,32 @@
|
|
|
1
|
-
"""Package Workflow - Advanced Python project packaging tool
|
|
1
|
+
"""Package Workflow - Advanced Python project packaging tool.
|
|
2
2
|
|
|
3
3
|
This module provides a comprehensive packaging solution that integrates pyprojectparse,
|
|
4
|
-
pysourcepack, pyembedinstall, and pyloadergen tools
|
|
5
|
-
|
|
4
|
+
pysourcepack, pyembedinstall, and pyloadergen tools with configurable serial and
|
|
5
|
+
parallel execution for optimal efficiency.
|
|
6
|
+
|
|
7
|
+
The module follows established design patterns:
|
|
8
|
+
- Factory pattern for configuration creation
|
|
9
|
+
- Strategy pattern for cleaning operations
|
|
10
|
+
- Builder pattern for workflow construction
|
|
11
|
+
- Singleton pattern for logging configuration
|
|
6
12
|
"""
|
|
7
13
|
|
|
8
14
|
from __future__ import annotations
|
|
9
15
|
|
|
10
16
|
import argparse
|
|
11
17
|
import asyncio
|
|
12
|
-
import json
|
|
13
18
|
import logging
|
|
14
19
|
import platform
|
|
15
20
|
import shutil
|
|
16
|
-
|
|
21
|
+
import subprocess
|
|
22
|
+
import time
|
|
23
|
+
from dataclasses import dataclass, field
|
|
24
|
+
from enum import Enum
|
|
25
|
+
from functools import cached_property
|
|
17
26
|
from pathlib import Path
|
|
18
|
-
from typing import Any
|
|
27
|
+
from typing import Any, Callable, Protocol
|
|
19
28
|
|
|
20
29
|
from sfi.pyprojectparse.pyprojectparse import Project, Solution
|
|
21
|
-
from sfi.pysourcepack.pysourcepack import pack_project
|
|
22
|
-
from sfi.workflowengine.workflowengine import (
|
|
23
|
-
CPUTask,
|
|
24
|
-
IOTask,
|
|
25
|
-
SerialTask,
|
|
26
|
-
WorkflowEngine,
|
|
27
|
-
)
|
|
28
30
|
|
|
29
31
|
logging.basicConfig(
|
|
30
32
|
level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s"
|
|
@@ -38,1104 +40,653 @@ __version__ = "1.0.0"
|
|
|
38
40
|
__build__ = "20260120"
|
|
39
41
|
|
|
40
42
|
|
|
43
|
+
# Enums for better type safety
|
|
44
|
+
class LoaderType(Enum):
|
|
45
|
+
"""Enumeration of supported loader types."""
|
|
46
|
+
|
|
47
|
+
CONSOLE = "console"
|
|
48
|
+
GUI = "gui"
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
class ArchiveFormat(Enum):
|
|
52
|
+
"""Enumeration of supported archive formats."""
|
|
53
|
+
|
|
54
|
+
ZIP = "zip"
|
|
55
|
+
SEVEN_ZIP = "7z"
|
|
56
|
+
NSIS = "nsis"
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
class MirrorSource(Enum):
|
|
60
|
+
"""Enumeration of supported PyPI mirror sources."""
|
|
61
|
+
|
|
62
|
+
PYPI = "pypi"
|
|
63
|
+
TSINGHUA = "tsinghua"
|
|
64
|
+
ALIYUN = "aliyun"
|
|
65
|
+
USTC = "ustc"
|
|
66
|
+
DOUBAN = "douban"
|
|
67
|
+
TENCENT = "tencent"
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
# Protocol for cleaning strategies
|
|
71
|
+
class CleaningStrategy(Protocol):
|
|
72
|
+
"""Protocol for cleaning strategies."""
|
|
73
|
+
|
|
74
|
+
def should_clean(self, entry: Path) -> bool:
|
|
75
|
+
"""Determine if an entry should be cleaned."""
|
|
76
|
+
...
|
|
77
|
+
|
|
78
|
+
def clean_entry(self, entry: Path) -> tuple[bool, str]:
|
|
79
|
+
"""Clean an entry and return success status and message."""
|
|
80
|
+
...
|
|
81
|
+
|
|
82
|
+
|
|
83
|
+
# Constants
|
|
84
|
+
LOG_SEPARATOR = "=" * 50
|
|
85
|
+
PROTECTED_DIRS = {
|
|
86
|
+
".git",
|
|
87
|
+
".venv",
|
|
88
|
+
".virtualenv",
|
|
89
|
+
".vscode",
|
|
90
|
+
".idea",
|
|
91
|
+
".codebuddy",
|
|
92
|
+
".qoder",
|
|
93
|
+
}
|
|
94
|
+
CLEANABLE_DIRS = {"build", "dist", "pysfi_build", "cbuild", "benchmarks"}
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
# Configuration Factory Pattern
|
|
98
|
+
class ConfigFactory:
|
|
99
|
+
"""Factory for creating workflow configurations with validation."""
|
|
100
|
+
|
|
101
|
+
@staticmethod
|
|
102
|
+
def create_from_args(args: argparse.Namespace, cwd: Path) -> WorkflowConfig:
|
|
103
|
+
"""Create configuration from command line arguments."""
|
|
104
|
+
# For commands that don't need project-specific config, use defaults
|
|
105
|
+
project_name = getattr(args, "project", None)
|
|
106
|
+
|
|
107
|
+
return WorkflowConfig(
|
|
108
|
+
directory=cwd,
|
|
109
|
+
project_name=project_name,
|
|
110
|
+
python_version=getattr(args, "python_version", "3.8.10"),
|
|
111
|
+
loader_type=LoaderType(getattr(args, "loader_type", "console")),
|
|
112
|
+
entry_suffix=getattr(args, "entry_suffix", ".ent"),
|
|
113
|
+
generate_loader=not getattr(args, "no_loader", False),
|
|
114
|
+
offline=getattr(args, "offline", False),
|
|
115
|
+
max_concurrent=getattr(args, "jobs", 4),
|
|
116
|
+
debug=getattr(args, "debug", False),
|
|
117
|
+
cache_dir=Path(args.cache_dir)
|
|
118
|
+
if getattr(args, "cache_dir", None)
|
|
119
|
+
else None,
|
|
120
|
+
archive_format=ArchiveFormat(getattr(args, "archive_format", "zip")),
|
|
121
|
+
mirror=MirrorSource(getattr(args, "mirror", "aliyun")),
|
|
122
|
+
archive_type=getattr(args, "archive", None),
|
|
123
|
+
)
|
|
124
|
+
|
|
125
|
+
@staticmethod
|
|
126
|
+
def create_default(directory: Path) -> WorkflowConfig:
|
|
127
|
+
"""Create default configuration."""
|
|
128
|
+
return WorkflowConfig(directory=directory)
|
|
129
|
+
|
|
130
|
+
|
|
41
131
|
@dataclass
|
|
42
132
|
class WorkflowConfig:
|
|
43
|
-
"""Configuration for package workflow."""
|
|
133
|
+
"""Configuration for package workflow with type-safe enums."""
|
|
44
134
|
|
|
45
135
|
directory: Path
|
|
46
136
|
project_name: str | None = None
|
|
47
137
|
python_version: str = "3.8.10"
|
|
48
|
-
loader_type:
|
|
138
|
+
loader_type: LoaderType = LoaderType.CONSOLE
|
|
49
139
|
entry_suffix: str = ".ent"
|
|
50
140
|
generate_loader: bool = True
|
|
51
|
-
recursive: bool = False
|
|
52
141
|
offline: bool = False
|
|
53
142
|
max_concurrent: int = 4
|
|
54
143
|
debug: bool = False
|
|
55
144
|
cache_dir: Path | None = None
|
|
56
|
-
archive_format:
|
|
57
|
-
mirror:
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
@
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
class
|
|
78
|
-
"""
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
def __init__(self, directory: Path, recursive: bool = False, timeout: float = 60.0):
|
|
97
|
-
super().__init__("parse_project", 2.0, [], timeout)
|
|
98
|
-
self.directory = directory
|
|
99
|
-
self.recursive = recursive
|
|
100
|
-
|
|
101
|
-
async def execute(self, context: dict[str, Any]) -> Any:
|
|
102
|
-
"""Execute project parsing."""
|
|
103
|
-
logger.info(f"Starting project parsing: {self.directory}")
|
|
104
|
-
|
|
105
|
-
try:
|
|
106
|
-
projects = Solution.from_directory(
|
|
107
|
-
root_dir=self.directory, recursive=self.recursive
|
|
108
|
-
)
|
|
109
|
-
|
|
110
|
-
logger.info(f"Found {len(projects)} project(s)")
|
|
111
|
-
for project in projects.values():
|
|
112
|
-
logger.info(project)
|
|
113
|
-
|
|
114
|
-
return {
|
|
115
|
-
"projects": projects,
|
|
116
|
-
"projects_file": str(self.directory / "projects.json"),
|
|
117
|
-
}
|
|
118
|
-
|
|
119
|
-
except Exception as e:
|
|
120
|
-
logger.error(f"Failed to parse projects: {e}")
|
|
121
|
-
raise
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
class PackSourceTask(IOTask):
|
|
125
|
-
"""Task to pack source code using pysourcepack."""
|
|
126
|
-
|
|
127
|
-
def __init__(
|
|
128
|
-
self,
|
|
129
|
-
directory: Path,
|
|
130
|
-
projects_file: Path,
|
|
131
|
-
project_name: str | None = None,
|
|
132
|
-
timeout: float = 120.0,
|
|
133
|
-
):
|
|
134
|
-
super().__init__("pack_source", 5.0, ["parse_project"], timeout)
|
|
135
|
-
self.base_dir = directory
|
|
136
|
-
self.projects_file = projects_file
|
|
137
|
-
self.projects: dict[str, Project] = {}
|
|
138
|
-
self.project_name = project_name
|
|
139
|
-
|
|
140
|
-
def _pack_projects(self, projects: dict) -> list[str]:
|
|
141
|
-
"""Pack specified projects.
|
|
142
|
-
|
|
143
|
-
Args:
|
|
144
|
-
projects: Projects data dict
|
|
145
|
-
|
|
146
|
-
Returns:
|
|
147
|
-
List of packed project names
|
|
148
|
-
"""
|
|
149
|
-
project_names = (
|
|
150
|
-
[self.project_name] if self.project_name else list(projects.keys())
|
|
145
|
+
archive_format: ArchiveFormat = ArchiveFormat.ZIP
|
|
146
|
+
mirror: MirrorSource = MirrorSource.ALIYUN
|
|
147
|
+
archive_type: str | None = None
|
|
148
|
+
|
|
149
|
+
@cached_property
|
|
150
|
+
def normalized_directory(self) -> Path:
|
|
151
|
+
"""Get normalized and resolved directory path."""
|
|
152
|
+
return self.directory.resolve()
|
|
153
|
+
|
|
154
|
+
@cached_property
|
|
155
|
+
def dist_dir(self) -> Path:
|
|
156
|
+
"""Get distribution directory path."""
|
|
157
|
+
return self.normalized_directory / "dist"
|
|
158
|
+
|
|
159
|
+
@cached_property
|
|
160
|
+
def build_dir(self) -> Path:
|
|
161
|
+
"""Get build directory path."""
|
|
162
|
+
return self.normalized_directory / "build"
|
|
163
|
+
|
|
164
|
+
|
|
165
|
+
# Strategy Pattern Implementation for Cleaning
|
|
166
|
+
class StandardCleaningStrategy:
|
|
167
|
+
"""Standard cleaning strategy implementation."""
|
|
168
|
+
|
|
169
|
+
def should_clean(self, entry: Path) -> bool:
|
|
170
|
+
"""Determine if entry should be cleaned using standard rules."""
|
|
171
|
+
# Special case: projects.json file should always be cleaned
|
|
172
|
+
if entry.is_file():
|
|
173
|
+
return entry.name == "projects.json"
|
|
174
|
+
|
|
175
|
+
# Protected directories starting with dot
|
|
176
|
+
if entry.name.startswith(".") and entry.name in PROTECTED_DIRS:
|
|
177
|
+
return False
|
|
178
|
+
|
|
179
|
+
# Clean temporary and build directories
|
|
180
|
+
return (
|
|
181
|
+
entry.name.startswith(".")
|
|
182
|
+
or entry.name.startswith("__")
|
|
183
|
+
or entry.name in CLEANABLE_DIRS
|
|
151
184
|
)
|
|
152
|
-
packed_projects = []
|
|
153
|
-
|
|
154
|
-
for name in project_names:
|
|
155
|
-
logger.info(f"Packing project: {name}")
|
|
156
|
-
pack_project(base_dir=self.base_dir, project_name=name, projects=projects)
|
|
157
|
-
packed_projects.append(name)
|
|
158
|
-
|
|
159
|
-
return packed_projects
|
|
160
185
|
|
|
161
|
-
|
|
162
|
-
"""
|
|
163
|
-
|
|
186
|
+
def clean_entry(self, entry: Path) -> tuple[bool, str]:
|
|
187
|
+
"""Clean entry and return success status with message."""
|
|
188
|
+
if not entry.exists():
|
|
189
|
+
return True, "Entry does not exist"
|
|
164
190
|
|
|
165
191
|
try:
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
|
|
172
|
-
|
|
173
|
-
|
|
192
|
+
if entry.is_dir():
|
|
193
|
+
shutil.rmtree(entry)
|
|
194
|
+
return True, f"Removed directory: {entry}"
|
|
195
|
+
elif entry.is_file():
|
|
196
|
+
entry.unlink()
|
|
197
|
+
return True, f"Removed file: {entry}"
|
|
198
|
+
else:
|
|
199
|
+
return False, f"Unknown entry type: {entry}"
|
|
174
200
|
except Exception as e:
|
|
175
|
-
|
|
176
|
-
raise
|
|
201
|
+
return False, f"Failed to remove {entry}: {e}"
|
|
177
202
|
|
|
178
203
|
|
|
179
|
-
|
|
180
|
-
|
|
204
|
+
@dataclass(frozen=True)
|
|
205
|
+
class PackageWorkflow:
|
|
206
|
+
"""Package workflow orchestrator using strategy pattern for cleaning."""
|
|
181
207
|
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
|
|
187
|
-
timeout: float = 300.0,
|
|
188
|
-
):
|
|
189
|
-
super().__init__("install_python", 10.0, ["parse_project"], timeout)
|
|
190
|
-
self.version = version
|
|
191
|
-
self.target_dir = target_dir
|
|
192
|
-
self.offline = offline
|
|
208
|
+
root_dir: Path
|
|
209
|
+
config: WorkflowConfig
|
|
210
|
+
cleaning_strategy: CleaningStrategy = field(
|
|
211
|
+
default_factory=StandardCleaningStrategy
|
|
212
|
+
)
|
|
193
213
|
|
|
194
|
-
|
|
195
|
-
|
|
214
|
+
@cached_property
|
|
215
|
+
def solution(self) -> Solution:
|
|
216
|
+
"""Get the solution for the root directory.
|
|
196
217
|
|
|
197
218
|
Returns:
|
|
198
|
-
|
|
219
|
+
Solution: The solution for the root directory.
|
|
199
220
|
"""
|
|
200
|
-
|
|
201
|
-
if arch in ("amd64", "x86_64"):
|
|
202
|
-
return "amd64"
|
|
203
|
-
elif arch in ("arm64", "aarch64"):
|
|
204
|
-
return "arm64"
|
|
205
|
-
return "amd64"
|
|
221
|
+
return Solution.from_directory(root_dir=self.root_dir)
|
|
206
222
|
|
|
207
|
-
|
|
208
|
-
|
|
223
|
+
@cached_property
|
|
224
|
+
def projects(self) -> dict[str, Project]:
|
|
225
|
+
"""Get the projects for the solution.
|
|
209
226
|
|
|
210
227
|
Returns:
|
|
211
|
-
|
|
228
|
+
dict[str, Project]: The projects for the solution.
|
|
212
229
|
"""
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
|
|
230
|
+
return self.solution.projects
|
|
231
|
+
|
|
232
|
+
@property
|
|
233
|
+
def dist_dir(self) -> Path:
|
|
234
|
+
"""Get distribution directory path."""
|
|
235
|
+
return self.config.dist_dir
|
|
236
|
+
|
|
237
|
+
@property
|
|
238
|
+
def build_dir(self) -> Path:
|
|
239
|
+
"""Get build directory path."""
|
|
240
|
+
return self.config.build_dir
|
|
241
|
+
|
|
242
|
+
def clean_project(self) -> None:
|
|
243
|
+
"""Clean build artifacts and package files using strategy pattern."""
|
|
244
|
+
logger.info("Cleaning build artifacts using strategy pattern...")
|
|
245
|
+
|
|
246
|
+
entries_to_clean = [
|
|
247
|
+
entry
|
|
248
|
+
for entry in self.root_dir.iterdir()
|
|
249
|
+
if self.cleaning_strategy.should_clean(entry)
|
|
250
|
+
]
|
|
251
|
+
|
|
252
|
+
if not entries_to_clean:
|
|
253
|
+
logger.info("No build artifacts found to clean")
|
|
254
|
+
return
|
|
225
255
|
|
|
226
|
-
|
|
227
|
-
|
|
228
|
-
|
|
229
|
-
|
|
230
|
-
|
|
231
|
-
|
|
232
|
-
|
|
256
|
+
# Track cleaning results
|
|
257
|
+
cleaned_dirs: list[str] = []
|
|
258
|
+
cleaned_files: list[str] = []
|
|
259
|
+
failed_operations: list[str] = []
|
|
260
|
+
|
|
261
|
+
for entry in entries_to_clean:
|
|
262
|
+
success, message = self.cleaning_strategy.clean_entry(entry)
|
|
263
|
+
if success:
|
|
264
|
+
if entry.is_dir():
|
|
265
|
+
cleaned_dirs.append(str(entry))
|
|
266
|
+
else:
|
|
267
|
+
cleaned_files.append(str(entry))
|
|
268
|
+
logger.debug(message)
|
|
269
|
+
else:
|
|
270
|
+
failed_operations.append(message)
|
|
271
|
+
logger.warning(message)
|
|
233
272
|
|
|
234
|
-
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
cache_dir=cache_dir,
|
|
238
|
-
offline=self.offline,
|
|
239
|
-
keep_cache=True,
|
|
240
|
-
skip_speed_test=False,
|
|
241
|
-
arch=arch,
|
|
273
|
+
# Summary logging
|
|
274
|
+
logger.info(
|
|
275
|
+
f"Cleaned {len(cleaned_dirs)} directories and {len(cleaned_files)} file(s)"
|
|
242
276
|
)
|
|
243
277
|
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
async def execute(self, context: dict[str, Any]) -> Any:
|
|
247
|
-
"""Execute Python installation."""
|
|
248
|
-
logger.info(f"Starting Python installation: {self.version}")
|
|
249
|
-
|
|
250
|
-
try:
|
|
251
|
-
cache_dir = self._prepare_cache_dir()
|
|
252
|
-
arch = self._get_architecture()
|
|
253
|
-
install_result = self._install_python(cache_dir, arch)
|
|
254
|
-
|
|
255
|
-
logger.info(f"Python {self.version} installed to {self.target_dir}")
|
|
256
|
-
|
|
257
|
-
return {
|
|
258
|
-
"version": self.version,
|
|
259
|
-
"target_dir": str(self.target_dir),
|
|
260
|
-
"install_result": install_result,
|
|
261
|
-
}
|
|
262
|
-
|
|
263
|
-
except Exception as e:
|
|
264
|
-
logger.error(f"Failed to install Python: {e}")
|
|
265
|
-
raise
|
|
266
|
-
|
|
278
|
+
if failed_operations:
|
|
279
|
+
logger.error(f"Failed operations: {len(failed_operations)}")
|
|
267
280
|
|
|
268
|
-
|
|
269
|
-
|
|
270
|
-
|
|
271
|
-
def __init__(
|
|
272
|
-
self,
|
|
273
|
-
project_dir: Path,
|
|
274
|
-
output_dir: Path,
|
|
275
|
-
cache_dir: Path | None = None,
|
|
276
|
-
python_version: str | None = None,
|
|
277
|
-
max_workers: int = 4,
|
|
278
|
-
archive_format: str = "zip",
|
|
279
|
-
mirror: str = "aliyun",
|
|
280
|
-
timeout: float = 300.0,
|
|
281
|
-
):
|
|
282
|
-
super().__init__("pack_libraries", 10.0, ["parse_project"], timeout)
|
|
283
|
-
self.project_dir = project_dir
|
|
284
|
-
self.output_dir = output_dir
|
|
285
|
-
self.cache_dir = cache_dir
|
|
286
|
-
self.python_version = python_version
|
|
287
|
-
self.max_workers = max_workers
|
|
288
|
-
self.archive_format = archive_format
|
|
289
|
-
self.mirror = mirror
|
|
290
|
-
|
|
291
|
-
def _create_packer(self) -> Any:
|
|
292
|
-
"""Create PyLibPack instance.
|
|
281
|
+
async def _run_sync_task(self, name: str, setup_func: Callable[[], None]) -> None:
|
|
282
|
+
"""Run a synchronous task in thread pool executor.
|
|
293
283
|
|
|
294
|
-
|
|
295
|
-
|
|
284
|
+
Args:
|
|
285
|
+
name: Name of the task for logging
|
|
286
|
+
setup_func: Function that returns the task to execute
|
|
296
287
|
"""
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
output_dir=self.output_dir,
|
|
314
|
-
max_workers=self.max_workers,
|
|
288
|
+
logger.info(LOG_SEPARATOR)
|
|
289
|
+
logger.info(f"Packing {name}...")
|
|
290
|
+
|
|
291
|
+
loop = asyncio.get_running_loop()
|
|
292
|
+
await loop.run_in_executor(None, setup_func)
|
|
293
|
+
logger.info(f"{name.capitalize()} packed.")
|
|
294
|
+
|
|
295
|
+
async def pack_embed_python(self) -> None:
|
|
296
|
+
"""Pack embed python."""
|
|
297
|
+
from sfi.pyembedinstall.pyembedinstall import EmbedInstaller
|
|
298
|
+
|
|
299
|
+
def _run():
|
|
300
|
+
installer = EmbedInstaller(
|
|
301
|
+
root_dir=self.root_dir,
|
|
302
|
+
cache_dir=self.config.cache_dir,
|
|
303
|
+
offline=self.config.offline,
|
|
315
304
|
)
|
|
305
|
+
installer.run()
|
|
316
306
|
|
|
317
|
-
|
|
318
|
-
f"Library packing completed: {pack_result.successful}/{pack_result.total}"
|
|
319
|
-
)
|
|
307
|
+
await self._run_sync_task("embed python", _run)
|
|
320
308
|
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
}
|
|
309
|
+
async def pack_loaders(self) -> None:
|
|
310
|
+
"""Pack loaders for all projects concurrently."""
|
|
311
|
+
from sfi.pyloadergen.pyloadergen import PyLoaderGenerator
|
|
325
312
|
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
313
|
+
def _run():
|
|
314
|
+
generator = PyLoaderGenerator(root_dir=self.root_dir)
|
|
315
|
+
generator.run()
|
|
329
316
|
|
|
317
|
+
await self._run_sync_task("loaders", _run)
|
|
330
318
|
|
|
331
|
-
|
|
332
|
-
|
|
319
|
+
async def pack_libraries(self) -> None:
|
|
320
|
+
"""Pack libraries for all projects concurrently."""
|
|
321
|
+
from sfi.pylibpack.pylibpack import PyLibPacker, PyLibPackerConfig
|
|
333
322
|
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
|
|
337
|
-
|
|
338
|
-
|
|
339
|
-
|
|
340
|
-
super().__init__(
|
|
341
|
-
"generate_loader", 100000, ["parse_project", "pack_source"], timeout
|
|
342
|
-
)
|
|
343
|
-
self.base_dir = base_dir
|
|
344
|
-
self.debug = debug
|
|
323
|
+
def _run():
|
|
324
|
+
libpacker = PyLibPacker(
|
|
325
|
+
working_dir=self.root_dir,
|
|
326
|
+
config=PyLibPackerConfig(self.config.cache_dir),
|
|
327
|
+
)
|
|
328
|
+
libpacker.run()
|
|
345
329
|
|
|
346
|
-
|
|
347
|
-
"""Execute loader generation."""
|
|
348
|
-
try:
|
|
349
|
-
from sfi.pyloadergen.pyloadergen import generate_loader
|
|
330
|
+
await self._run_sync_task("libraries", _run)
|
|
350
331
|
|
|
351
|
-
|
|
352
|
-
|
|
353
|
-
|
|
354
|
-
raise
|
|
332
|
+
async def pack_source(self) -> None:
|
|
333
|
+
"""Pack source code for all projects concurrently."""
|
|
334
|
+
from sfi.pysourcepack.pysourcepack import PySourcePacker
|
|
355
335
|
|
|
336
|
+
def _run():
|
|
337
|
+
source_packer = PySourcePacker(root_dir=self.root_dir)
|
|
338
|
+
source_packer.run()
|
|
356
339
|
|
|
357
|
-
|
|
358
|
-
"""Task to assemble final package."""
|
|
340
|
+
await self._run_sync_task("source code", _run)
|
|
359
341
|
|
|
360
|
-
def
|
|
361
|
-
|
|
362
|
-
):
|
|
363
|
-
self.output_dir = output_dir
|
|
364
|
-
super().__init__("assemble_package", self._assemble, dependencies, timeout)
|
|
342
|
+
async def pack_archive(self, archive_format: str) -> None:
|
|
343
|
+
"""Create archive for all projects.
|
|
365
344
|
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
if isinstance(parse_result, dict) and "projects_file" in parse_result:
|
|
371
|
-
return Path(parse_result["projects_file"]).parent
|
|
372
|
-
return Path.cwd()
|
|
345
|
+
Args:
|
|
346
|
+
archive_format: Archive format (zip, tar, gztar, bztar, xztar, 7z, nsis)
|
|
347
|
+
"""
|
|
348
|
+
from sfi.pyarchive.pyarchive import PyArchiveConfig, PyArchiver
|
|
373
349
|
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
|
|
377
|
-
|
|
378
|
-
)
|
|
379
|
-
dist_dir = dist_dir if dist_dir.name == "dist" else dist_dir.parent / "dist"
|
|
380
|
-
dist_dir.mkdir(parents=True, exist_ok=True)
|
|
381
|
-
return dist_dir
|
|
382
|
-
|
|
383
|
-
def _copy_loaders(self, project_dir: Path, dist_dir: Path) -> int:
|
|
384
|
-
"""Copy loaders and entry files to dist directory."""
|
|
385
|
-
loader_dir = project_dir / "dist"
|
|
386
|
-
if not loader_dir.exists():
|
|
387
|
-
return 0
|
|
388
|
-
|
|
389
|
-
loaders_count = 0
|
|
390
|
-
for loader in loader_dir.glob("*.ent"):
|
|
391
|
-
logger.info(f"Entry file: {loader.name}")
|
|
392
|
-
loaders_count += 1
|
|
393
|
-
|
|
394
|
-
# Support both Linux (no extension) and Windows (.exe) executables
|
|
395
|
-
for loader in loader_dir.glob("*.exe"):
|
|
396
|
-
logger.info(f"Executable: {loader.name}")
|
|
397
|
-
shutil.copy2(loader, dist_dir / loader.name)
|
|
398
|
-
loaders_count += 1
|
|
399
|
-
|
|
400
|
-
# Also copy Linux executables (files without extension)
|
|
401
|
-
for loader in loader_dir.glob("*"):
|
|
402
|
-
if loader.is_file() and not loader.name.endswith((".ent", ".exe")):
|
|
403
|
-
logger.info(f"Executable: {loader.name}")
|
|
404
|
-
shutil.copy2(loader, dist_dir / loader.name)
|
|
405
|
-
loaders_count += 1
|
|
406
|
-
|
|
407
|
-
logger.info(f"Found and copied {loaders_count} loader(s) and entry file(s)")
|
|
408
|
-
return loaders_count
|
|
409
|
-
|
|
410
|
-
def _copy_libraries(self, project_dir: Path, dist_dir: Path) -> None:
|
|
411
|
-
"""Copy libraries to dist/site-packages directory."""
|
|
412
|
-
libs_dir = project_dir / "dist" / "libs"
|
|
413
|
-
if not libs_dir.exists():
|
|
414
|
-
return
|
|
350
|
+
def _run():
|
|
351
|
+
config = PyArchiveConfig(verbose=self.config.debug)
|
|
352
|
+
archiver = PyArchiver(root_dir=self.root_dir, config=config)
|
|
353
|
+
archiver.archive_projects(format=archive_format)
|
|
415
354
|
|
|
416
|
-
|
|
417
|
-
if dest_libs_dir.exists():
|
|
418
|
-
shutil.rmtree(dest_libs_dir)
|
|
419
|
-
shutil.copytree(libs_dir, dest_libs_dir)
|
|
420
|
-
logger.info(f"Libraries copied to {dest_libs_dir}")
|
|
355
|
+
await self._run_sync_task(f"{archive_format} archives", _run)
|
|
421
356
|
|
|
422
|
-
def
|
|
423
|
-
"""
|
|
424
|
-
metadata = {
|
|
425
|
-
"version": __version__,
|
|
426
|
-
"build": __build__,
|
|
427
|
-
"assembled_at": asyncio.get_event_loop().time(),
|
|
428
|
-
}
|
|
357
|
+
async def build(self) -> dict[str, Any]:
|
|
358
|
+
"""Execute the packaging workflow with concurrent optimization.
|
|
429
359
|
|
|
430
|
-
|
|
431
|
-
|
|
432
|
-
|
|
360
|
+
Workflow stages:
|
|
361
|
+
1. Pack embed python (must be first)
|
|
362
|
+
2. Pack loaders, libraries, and source in parallel
|
|
363
|
+
3. Create archive (optional, if archive_type is set)
|
|
433
364
|
|
|
434
|
-
|
|
365
|
+
Returns:
|
|
366
|
+
Dict with results and summary including output_dir and metadata
|
|
435
367
|
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
"""
|
|
440
|
-
logger.info("Starting
|
|
368
|
+
Raises:
|
|
369
|
+
FileNotFoundError: If required directories don't exist
|
|
370
|
+
RuntimeError: If any packaging step fails
|
|
371
|
+
"""
|
|
372
|
+
logger.info("Starting packaging workflow execution")
|
|
373
|
+
start_time = time.perf_counter()
|
|
441
374
|
|
|
442
375
|
try:
|
|
443
|
-
|
|
444
|
-
|
|
445
|
-
|
|
446
|
-
|
|
447
|
-
|
|
448
|
-
|
|
376
|
+
# Stage 1: Pack embed python (prerequisite for other tasks)
|
|
377
|
+
await self.pack_embed_python()
|
|
378
|
+
|
|
379
|
+
# Stage 2: Pack loaders, libraries, and source concurrently
|
|
380
|
+
logger.info(LOG_SEPARATOR)
|
|
381
|
+
logger.info("Running parallel tasks: loaders, libraries, source...")
|
|
382
|
+
await asyncio.gather(
|
|
383
|
+
self.pack_loaders(),
|
|
384
|
+
self.pack_libraries(),
|
|
385
|
+
self.pack_source(),
|
|
386
|
+
)
|
|
449
387
|
|
|
450
|
-
|
|
451
|
-
|
|
388
|
+
# Stage 3: Create archive (optional)
|
|
389
|
+
if self.config.archive_type:
|
|
390
|
+
await self.pack_archive(self.config.archive_type)
|
|
452
391
|
|
|
453
|
-
except
|
|
454
|
-
logger.error(f"Failed to assemble package: {e}")
|
|
392
|
+
except (FileNotFoundError, RuntimeError):
|
|
455
393
|
raise
|
|
394
|
+
except Exception as e:
|
|
395
|
+
logger.error(f"Workflow execution failed: {e}")
|
|
396
|
+
raise RuntimeError(f"Packaging workflow failed: {e}") from e
|
|
456
397
|
|
|
398
|
+
elapsed = time.perf_counter() - start_time
|
|
399
|
+
logger.info(LOG_SEPARATOR)
|
|
400
|
+
logger.info(f"Packaging workflow completed in {elapsed:.2f}s")
|
|
401
|
+
return {"output_dir": str(self.dist_dir), "metadata": {}}
|
|
457
402
|
|
|
458
|
-
|
|
459
|
-
|
|
460
|
-
|
|
461
|
-
|
|
462
|
-
|
|
463
|
-
self.config = config
|
|
464
|
-
self.engine = WorkflowEngine(max_concurrent=config.max_concurrent)
|
|
465
|
-
|
|
466
|
-
def _add_parse_task(self) -> None:
|
|
467
|
-
"""Add project parsing task to workflow."""
|
|
468
|
-
parse_task = ParseProjectTask(
|
|
469
|
-
directory=self.directory,
|
|
470
|
-
recursive=self.config.recursive,
|
|
471
|
-
)
|
|
472
|
-
self.engine.add_task(parse_task)
|
|
473
|
-
|
|
474
|
-
def _add_pack_and_install_tasks(self) -> None:
|
|
475
|
-
"""Add pack source and install Python tasks to workflow."""
|
|
476
|
-
pack_task = PackSourceTask(
|
|
477
|
-
directory=self.directory,
|
|
478
|
-
projects_file=self.directory / "projects.json",
|
|
479
|
-
project_name=self.config.project_name,
|
|
480
|
-
)
|
|
481
|
-
self.engine.add_task(pack_task)
|
|
482
|
-
|
|
483
|
-
install_task = InstallPythonTask(
|
|
484
|
-
version=self.config.python_version,
|
|
485
|
-
target_dir=self.directory / "dist" / "runtime",
|
|
486
|
-
offline=self.config.offline,
|
|
487
|
-
)
|
|
488
|
-
self.engine.add_task(install_task)
|
|
489
|
-
|
|
490
|
-
def _add_pack_libraries_task(self) -> None:
|
|
491
|
-
"""Add library packing task to workflow."""
|
|
492
|
-
lib_pack_task = PackLibrariesTask(
|
|
493
|
-
project_dir=self.directory,
|
|
494
|
-
output_dir=self.directory / "dist/site-packages",
|
|
495
|
-
cache_dir=self.config.cache_dir,
|
|
496
|
-
python_version=self.config.python_version,
|
|
497
|
-
max_workers=self.config.max_concurrent,
|
|
498
|
-
archive_format=self.config.archive_format,
|
|
499
|
-
mirror=self.config.mirror,
|
|
500
|
-
)
|
|
501
|
-
self.engine.add_task(lib_pack_task)
|
|
403
|
+
def list_projects(self) -> None:
|
|
404
|
+
"""List all available projects."""
|
|
405
|
+
logger.info(f"Listing projects in {self.root_dir}")
|
|
406
|
+
for project in self.projects.values():
|
|
407
|
+
logger.info(f" - {project}")
|
|
502
408
|
|
|
503
|
-
def
|
|
504
|
-
"""
|
|
409
|
+
def _scan_executables(self) -> list[Path]:
|
|
410
|
+
"""Scan dist directory for executable files.
|
|
505
411
|
|
|
506
412
|
Returns:
|
|
507
|
-
List of
|
|
413
|
+
List of executable file paths found in dist directory.
|
|
508
414
|
"""
|
|
509
|
-
|
|
510
|
-
|
|
415
|
+
dist_dir = self.dist_dir
|
|
416
|
+
if not dist_dir.exists():
|
|
417
|
+
return []
|
|
418
|
+
return [f for f in dist_dir.glob(f"*{ext}") if f.is_file()]
|
|
511
419
|
|
|
512
|
-
|
|
513
|
-
|
|
420
|
+
def _resolve_executable(self, match_name: str | None) -> Path | None:
|
|
421
|
+
"""Resolve executable by scanning dist directory and matching name.
|
|
514
422
|
|
|
515
|
-
|
|
516
|
-
|
|
423
|
+
Args:
|
|
424
|
+
match_name: Executable name or partial name to match
|
|
517
425
|
|
|
518
|
-
|
|
426
|
+
Returns:
|
|
427
|
+
Path to matched executable, or None if not found/ambiguous.
|
|
428
|
+
"""
|
|
429
|
+
executables = self._scan_executables()
|
|
430
|
+
if not executables:
|
|
431
|
+
return None
|
|
519
432
|
|
|
520
|
-
|
|
521
|
-
|
|
433
|
+
# Auto-select if only one executable and no name specified
|
|
434
|
+
if not match_name:
|
|
435
|
+
return executables[0] if len(executables) == 1 else None
|
|
522
436
|
|
|
523
|
-
|
|
524
|
-
dependencies: List of task dependencies
|
|
525
|
-
"""
|
|
526
|
-
assemble_task = AssemblePackageTask(
|
|
527
|
-
output_dir=self.directory / "dist",
|
|
528
|
-
dependencies=dependencies,
|
|
529
|
-
)
|
|
530
|
-
self.engine.add_task(assemble_task)
|
|
437
|
+
lower_name = match_name.lower()
|
|
531
438
|
|
|
532
|
-
|
|
533
|
-
|
|
534
|
-
|
|
439
|
+
# Try exact match (without extension)
|
|
440
|
+
for exe in executables:
|
|
441
|
+
if exe.stem.lower() == lower_name:
|
|
442
|
+
return exe
|
|
535
443
|
|
|
536
|
-
|
|
537
|
-
|
|
538
|
-
|
|
539
|
-
dependencies = self._add_loader_task()
|
|
540
|
-
self._add_assemble_task(dependencies)
|
|
444
|
+
# Try fuzzy match (case-insensitive substring)
|
|
445
|
+
matches = [exe for exe in executables if lower_name in exe.stem.lower()]
|
|
446
|
+
return matches[0] if len(matches) == 1 else None
|
|
541
447
|
|
|
542
|
-
|
|
448
|
+
def list_executables(self) -> None:
|
|
449
|
+
"""List all executables in dist directory."""
|
|
450
|
+
executables = self._scan_executables()
|
|
451
|
+
if not executables:
|
|
452
|
+
logger.info("No executables found in dist directory")
|
|
453
|
+
return
|
|
454
|
+
logger.info(f"Available executables in {self.dist_dir}:")
|
|
455
|
+
for exe in executables:
|
|
456
|
+
logger.info(f" - {exe.stem}")
|
|
543
457
|
|
|
544
|
-
def
|
|
545
|
-
|
|
458
|
+
def run_project(
|
|
459
|
+
self, match_name: str | None, project_args: list[str] | None = None
|
|
460
|
+
) -> None:
|
|
461
|
+
"""Run an executable with fuzzy name matching support.
|
|
546
462
|
|
|
547
463
|
Args:
|
|
548
|
-
|
|
464
|
+
match_name: Executable name or partial name to match
|
|
465
|
+
project_args: Additional arguments to pass to the executable
|
|
549
466
|
"""
|
|
550
|
-
|
|
551
|
-
logger.info("Workflow execution summary:")
|
|
552
|
-
logger.info(f" Total tasks: {summary['total_tasks']}")
|
|
553
|
-
logger.info(f" Completed: {summary['completed']}")
|
|
554
|
-
logger.info(f" Failed: {summary['failed']}")
|
|
555
|
-
logger.info(f" Success rate: {summary['success_rate'] * 100:.1f}%")
|
|
556
|
-
logger.info(f" Total time: {summary['total_execution_time']:.2f}s")
|
|
557
|
-
logger.info("=" * 50)
|
|
558
|
-
|
|
559
|
-
async def execute(self) -> dict[str, Any]:
|
|
560
|
-
"""Execute the packaging workflow.
|
|
467
|
+
exe_path = self._resolve_executable(match_name)
|
|
561
468
|
|
|
562
|
-
|
|
563
|
-
|
|
564
|
-
|
|
565
|
-
|
|
469
|
+
# Handle executable not found cases
|
|
470
|
+
if not exe_path:
|
|
471
|
+
self._handle_executable_not_found(match_name)
|
|
472
|
+
return
|
|
566
473
|
|
|
567
|
-
|
|
474
|
+
# Build and execute command
|
|
475
|
+
cmd = self._build_executable_command(exe_path, project_args)
|
|
568
476
|
|
|
569
477
|
try:
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
478
|
+
subprocess.run(cmd, check=True, cwd=self.root_dir)
|
|
479
|
+
logger.info(f"{exe_path.stem} ran successfully")
|
|
480
|
+
except subprocess.CalledProcessError as e:
|
|
481
|
+
logger.error(f"{exe_path.stem} failed with exit code {e.returncode}")
|
|
482
|
+
except FileNotFoundError:
|
|
483
|
+
logger.error(f"Executable not found: {exe_path}")
|
|
576
484
|
except Exception as e:
|
|
577
|
-
logger.error(f"
|
|
578
|
-
raise
|
|
579
|
-
|
|
485
|
+
logger.error(f"Failed to run {exe_path}: {e}")
|
|
580
486
|
|
|
581
|
-
def
|
|
582
|
-
|
|
487
|
+
def _handle_executable_not_found(self, match_name: str | None) -> None:
|
|
488
|
+
"""Handle cases where executable cannot be found.
|
|
583
489
|
|
|
584
|
-
|
|
585
|
-
|
|
586
|
-
|
|
587
|
-
|
|
588
|
-
|
|
589
|
-
|
|
590
|
-
|
|
591
|
-
|
|
490
|
+
Args:
|
|
491
|
+
match_name: Executable name that was being searched for
|
|
492
|
+
"""
|
|
493
|
+
executables = self._scan_executables()
|
|
494
|
+
|
|
495
|
+
if not match_name:
|
|
496
|
+
if len(executables) == 0:
|
|
497
|
+
logger.error("No executables found in dist directory")
|
|
498
|
+
elif len(executables) > 1:
|
|
499
|
+
logger.error(
|
|
500
|
+
"Multiple executables found. Please specify which one to run:"
|
|
501
|
+
)
|
|
502
|
+
self.list_executables()
|
|
503
|
+
else:
|
|
504
|
+
logger.error("Unable to auto-select executable")
|
|
505
|
+
else:
|
|
506
|
+
logger.error(f"Executable '{match_name}' not found")
|
|
592
507
|
|
|
593
|
-
|
|
594
|
-
|
|
595
|
-
level(f"No projects.json found in {directory}")
|
|
596
|
-
logger.info("Run 'pypack build' first to create projects.json")
|
|
597
|
-
return None
|
|
508
|
+
if len(executables) > 0:
|
|
509
|
+
self.list_executables()
|
|
598
510
|
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
511
|
+
def _build_executable_command(
|
|
512
|
+
self, exe_path: Path, project_args: list[str] | None
|
|
513
|
+
) -> list[str]:
|
|
514
|
+
"""Build command list for executable execution.
|
|
602
515
|
|
|
603
|
-
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
return None
|
|
607
|
-
|
|
608
|
-
return projects_data
|
|
609
|
-
except Exception as e:
|
|
610
|
-
logger.error(f"Failed to load projects.json: {e}")
|
|
611
|
-
return None
|
|
516
|
+
Args:
|
|
517
|
+
exe_path: Path to executable
|
|
518
|
+
project_args: Additional arguments to pass
|
|
612
519
|
|
|
520
|
+
Returns:
|
|
521
|
+
List of command arguments
|
|
522
|
+
"""
|
|
523
|
+
cmd = [str(exe_path.resolve())]
|
|
524
|
+
if project_args:
|
|
525
|
+
cmd.extend(project_args)
|
|
526
|
+
logger.info(f"Arguments: {' '.join(project_args)}")
|
|
527
|
+
logger.info(f"Running: {' '.join(cmd)}")
|
|
528
|
+
return cmd
|
|
613
529
|
|
|
614
|
-
def _get_project_info(project_info: Any) -> tuple[str, str, str]:
|
|
615
|
-
"""Extract project info from project data.
|
|
616
530
|
|
|
617
|
-
|
|
618
|
-
|
|
531
|
+
def parse_args() -> argparse.Namespace:
|
|
532
|
+
"""Parse command line arguments using subcommand structure.
|
|
619
533
|
|
|
620
534
|
Returns:
|
|
621
|
-
|
|
535
|
+
Parsed arguments namespace
|
|
622
536
|
"""
|
|
623
|
-
|
|
624
|
-
|
|
625
|
-
|
|
626
|
-
version = (
|
|
627
|
-
project_info.get("version", "N/A") if isinstance(project_info, dict) else "N/A"
|
|
628
|
-
)
|
|
629
|
-
entry = "main.py"
|
|
630
|
-
description = (
|
|
631
|
-
project_info.get("description", "") if isinstance(project_info, dict) else ""
|
|
537
|
+
parser = argparse.ArgumentParser(
|
|
538
|
+
prog="pypack", description="Python packaging tool with workflow orchestration"
|
|
632
539
|
)
|
|
633
|
-
return version, entry, description
|
|
634
|
-
|
|
635
|
-
|
|
636
|
-
def _print_project(project_name: str, project_info: Any, index: int) -> None:
|
|
637
|
-
"""Print project information.
|
|
638
540
|
|
|
639
|
-
|
|
640
|
-
|
|
641
|
-
|
|
642
|
-
index: Index number for listing
|
|
643
|
-
"""
|
|
644
|
-
version, entry, description = _get_project_info(project_info)
|
|
645
|
-
|
|
646
|
-
logger.info(f"\n[{index}] {project_name}")
|
|
647
|
-
logger.info(f" Version: {version}")
|
|
648
|
-
logger.info(f" Entry: {entry}")
|
|
649
|
-
if description:
|
|
650
|
-
logger.info(f" Description: {description}")
|
|
651
|
-
|
|
652
|
-
|
|
653
|
-
def list_projects(directory: Path) -> None:
|
|
654
|
-
"""List projects from projects.json.
|
|
655
|
-
|
|
656
|
-
Args:
|
|
657
|
-
directory: Project directory containing projects.json
|
|
658
|
-
"""
|
|
659
|
-
projects_data = _load_projects_data(directory, silent=True)
|
|
660
|
-
|
|
661
|
-
if not projects_data:
|
|
662
|
-
return
|
|
663
|
-
|
|
664
|
-
logger.info(f"Found {len(projects_data)} project(s):")
|
|
665
|
-
logger.info("=" * 60)
|
|
666
|
-
|
|
667
|
-
for i, (project_name, project_info) in enumerate(projects_data.items(), 1):
|
|
668
|
-
_print_project(project_name, project_info, i)
|
|
669
|
-
|
|
670
|
-
logger.info("=" * 60)
|
|
671
|
-
|
|
672
|
-
|
|
673
|
-
def _get_available_executables(directory: Path) -> list[str]:
|
|
674
|
-
"""Get list of available executables in dist directory.
|
|
675
|
-
|
|
676
|
-
Args:
|
|
677
|
-
directory: Project directory
|
|
678
|
-
|
|
679
|
-
Returns:
|
|
680
|
-
List of executable names (without .exe extension)
|
|
681
|
-
"""
|
|
682
|
-
dist_dir = directory / "dist"
|
|
683
|
-
if not dist_dir.exists():
|
|
684
|
-
logger.error(f"Dist directory not found: {dist_dir}")
|
|
685
|
-
logger.info("Run 'pypack build' first to build the project")
|
|
686
|
-
return []
|
|
687
|
-
|
|
688
|
-
# Support both Windows (.exe) and Linux (no extension) executables
|
|
689
|
-
exe_names = set()
|
|
690
|
-
for exe in dist_dir.glob("*.exe"):
|
|
691
|
-
if exe.is_file():
|
|
692
|
-
exe_names.add(exe.stem)
|
|
693
|
-
|
|
694
|
-
for exe in dist_dir.glob("*"):
|
|
695
|
-
if exe.is_file() and not exe.name.endswith((".ent", ".exe", ".json")):
|
|
696
|
-
exe_names.add(exe.name)
|
|
697
|
-
|
|
698
|
-
available_exes = list(exe_names)
|
|
699
|
-
if not available_exes:
|
|
700
|
-
logger.error("No executables found in dist directory")
|
|
701
|
-
logger.info("Run 'pypack build' first to build the project")
|
|
702
|
-
|
|
703
|
-
return available_exes
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
def _find_matching_executables(
|
|
707
|
-
project_name: str, available_exes: list[str]
|
|
708
|
-
) -> list[str]:
|
|
709
|
-
"""Find executables matching project name.
|
|
710
|
-
|
|
711
|
-
Args:
|
|
712
|
-
project_name: Project name to match
|
|
713
|
-
available_exes: List of available executable names
|
|
714
|
-
|
|
715
|
-
Returns:
|
|
716
|
-
List of matching executable names
|
|
717
|
-
"""
|
|
718
|
-
return [
|
|
719
|
-
exe
|
|
720
|
-
for exe in available_exes
|
|
721
|
-
if exe == project_name or exe.startswith(f"{project_name}-")
|
|
722
|
-
]
|
|
723
|
-
|
|
724
|
-
|
|
725
|
-
def _log_available_executables(available_exes: list[str]) -> None:
|
|
726
|
-
"""Log available executable names.
|
|
727
|
-
|
|
728
|
-
Args:
|
|
729
|
-
available_exes: List of available executable names
|
|
730
|
-
"""
|
|
731
|
-
logger.info("Available executables:")
|
|
732
|
-
for exe in available_exes:
|
|
733
|
-
logger.info(f"** {exe} **")
|
|
734
|
-
|
|
735
|
-
|
|
736
|
-
def _resolve_exact_project_name(
|
|
737
|
-
project_name: str, available_exes: list[str]
|
|
738
|
-
) -> str | None:
|
|
739
|
-
"""Resolve executable when project name is specified.
|
|
740
|
-
|
|
741
|
-
Args:
|
|
742
|
-
project_name: Project name to resolve
|
|
743
|
-
available_exes: List of available executable names
|
|
744
|
-
|
|
745
|
-
Returns:
|
|
746
|
-
Target executable name or None
|
|
747
|
-
"""
|
|
748
|
-
if project_name in available_exes:
|
|
749
|
-
return project_name
|
|
750
|
-
return None
|
|
751
|
-
|
|
752
|
-
|
|
753
|
-
def _resolve_project_by_name(
|
|
754
|
-
project_name: str, available_exes: list[str]
|
|
755
|
-
) -> str | None:
|
|
756
|
-
"""Resolve executable by matching project name.
|
|
757
|
-
|
|
758
|
-
Args:
|
|
759
|
-
project_name: Project name to resolve
|
|
760
|
-
available_exes: List of available executable names
|
|
761
|
-
|
|
762
|
-
Returns:
|
|
763
|
-
Target executable name or None
|
|
764
|
-
"""
|
|
765
|
-
matching_exes = _find_matching_executables(project_name, available_exes)
|
|
766
|
-
|
|
767
|
-
if len(matching_exes) == 1:
|
|
768
|
-
return matching_exes[0]
|
|
769
|
-
elif len(matching_exes) > 1:
|
|
770
|
-
logger.error(f"Multiple executables found for project '{project_name}':")
|
|
771
|
-
for exe in matching_exes:
|
|
772
|
-
logger.info(f" - {exe}")
|
|
773
|
-
logger.info("Please specify the full executable name (e.g., 'docscan-gui')")
|
|
774
|
-
return None
|
|
775
|
-
|
|
776
|
-
logger.error(f"No executable found for project '{project_name}'")
|
|
777
|
-
return None
|
|
541
|
+
# Add common arguments to parent parser
|
|
542
|
+
parent_parser = argparse.ArgumentParser(add_help=False)
|
|
543
|
+
parent_parser.add_argument("--debug", "-d", action="store_true", help="Debug mode")
|
|
778
544
|
|
|
779
|
-
|
|
780
|
-
|
|
781
|
-
|
|
782
|
-
|
|
783
|
-
Args:
|
|
784
|
-
available_exes: List of available executable names
|
|
785
|
-
|
|
786
|
-
Returns:
|
|
787
|
-
Target executable name or None
|
|
788
|
-
"""
|
|
789
|
-
if len(available_exes) == 1:
|
|
790
|
-
logger.info(f"Running single executable: {available_exes[0]}")
|
|
791
|
-
return available_exes[0]
|
|
792
|
-
|
|
793
|
-
logger.error(
|
|
794
|
-
"Multiple executables found. Please specify executable name with --project"
|
|
545
|
+
# Create subparsers
|
|
546
|
+
subparsers = parser.add_subparsers(
|
|
547
|
+
dest="command", required=True, help="Available commands"
|
|
795
548
|
)
|
|
796
|
-
_log_available_executables(available_exes)
|
|
797
|
-
return None
|
|
798
|
-
|
|
799
|
-
|
|
800
|
-
def _resolve_target_executable(
|
|
801
|
-
project_name: str | None,
|
|
802
|
-
available_exes: list[str],
|
|
803
|
-
projects_data: dict[str, Any],
|
|
804
|
-
) -> str | None:
|
|
805
|
-
"""Resolve which executable to run based on project name.
|
|
806
|
-
|
|
807
|
-
Args:
|
|
808
|
-
project_name: Project or executable name to run
|
|
809
|
-
available_exes: List of available executable names
|
|
810
|
-
projects_data: Projects data dict
|
|
811
549
|
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
exact_match = _resolve_exact_project_name(project_name, available_exes)
|
|
819
|
-
if exact_match:
|
|
820
|
-
return exact_match
|
|
821
|
-
|
|
822
|
-
if project_name in projects_data:
|
|
823
|
-
return _resolve_project_by_name(project_name, available_exes)
|
|
824
|
-
|
|
825
|
-
logger.error(f"Project or executable '{project_name}' not found")
|
|
826
|
-
_log_available_executables(available_exes)
|
|
827
|
-
return None
|
|
828
|
-
|
|
829
|
-
|
|
830
|
-
def _execute_project(exe_path: Path, dist_dir: Path) -> None:
|
|
831
|
-
"""Execute the project executable.
|
|
832
|
-
|
|
833
|
-
Args:
|
|
834
|
-
exe_path: Path to the executable
|
|
835
|
-
dist_dir: Working directory for execution
|
|
836
|
-
"""
|
|
837
|
-
import subprocess
|
|
838
|
-
|
|
839
|
-
try:
|
|
840
|
-
subprocess.run([str(exe_path)], cwd=str(dist_dir), check=True)
|
|
841
|
-
except subprocess.CalledProcessError as e:
|
|
842
|
-
logger.error(f"Project execution failed with exit code {e.returncode}")
|
|
843
|
-
except KeyboardInterrupt:
|
|
844
|
-
logger.info("Project execution interrupted")
|
|
845
|
-
except FileNotFoundError:
|
|
846
|
-
logger.error(f"Failed to execute {exe_path}")
|
|
847
|
-
|
|
848
|
-
|
|
849
|
-
def run_project(project_name: str | None, directory: Path) -> None:
|
|
850
|
-
"""Run a built project.
|
|
851
|
-
|
|
852
|
-
Args:
|
|
853
|
-
project_name: Project name or executable name to run (e.g., 'docscan' or 'docscan-gui')
|
|
854
|
-
directory: Project directory
|
|
855
|
-
"""
|
|
856
|
-
projects_data = _load_projects_data(directory)
|
|
857
|
-
if not projects_data:
|
|
858
|
-
return
|
|
859
|
-
|
|
860
|
-
available_exes = _get_available_executables(directory)
|
|
861
|
-
if not available_exes:
|
|
862
|
-
return
|
|
863
|
-
|
|
864
|
-
target_exe_name = _resolve_target_executable(
|
|
865
|
-
project_name, available_exes, projects_data
|
|
550
|
+
# Version subcommand
|
|
551
|
+
subparsers.add_parser(
|
|
552
|
+
"version",
|
|
553
|
+
aliases=["v"],
|
|
554
|
+
help="Show version information",
|
|
555
|
+
parents=[parent_parser],
|
|
866
556
|
)
|
|
867
|
-
if not target_exe_name:
|
|
868
|
-
return
|
|
869
|
-
|
|
870
|
-
# Try both Windows (.exe) and Linux (no extension) executables
|
|
871
|
-
exe_path_with_ext = directory / "dist" / f"{target_exe_name}.exe"
|
|
872
|
-
exe_path_no_ext = directory / "dist" / target_exe_name
|
|
873
557
|
|
|
874
|
-
|
|
875
|
-
|
|
876
|
-
|
|
877
|
-
|
|
878
|
-
|
|
879
|
-
|
|
880
|
-
if not exe_path:
|
|
881
|
-
logger.error(f"Executable not found for '{target_exe_name}'")
|
|
882
|
-
logger.info("Run 'pypack build' first to build the project")
|
|
883
|
-
return
|
|
884
|
-
|
|
885
|
-
logger.info(f"Starting: {target_exe_name}")
|
|
886
|
-
logger.info(f"Executable: {exe_path}")
|
|
887
|
-
_execute_project(exe_path, directory / "dist")
|
|
888
|
-
|
|
889
|
-
|
|
890
|
-
def clean_project(directory: Path) -> None:
|
|
891
|
-
"""Clean build artifacts and package files.
|
|
892
|
-
|
|
893
|
-
Args:
|
|
894
|
-
directory: Project directory to clean
|
|
895
|
-
"""
|
|
896
|
-
logger.info("Cleaning build artifacts...")
|
|
897
|
-
|
|
898
|
-
cleaned_dirs, cleaned_files = _clean_build_artifacts(directory)
|
|
899
|
-
|
|
900
|
-
if not cleaned_dirs and not cleaned_files:
|
|
901
|
-
logger.info("No build artifacts found")
|
|
902
|
-
else:
|
|
903
|
-
logger.info(
|
|
904
|
-
f"Cleaned {len(cleaned_dirs)} directories and {len(cleaned_files)} file(s)"
|
|
905
|
-
)
|
|
906
|
-
|
|
907
|
-
|
|
908
|
-
def _should_clean(entry: Path) -> bool:
|
|
909
|
-
"""Check if a directory or file should be cleaned.
|
|
910
|
-
|
|
911
|
-
Args:
|
|
912
|
-
entry: Path to check
|
|
913
|
-
|
|
914
|
-
Returns:
|
|
915
|
-
True if the entry should be cleaned
|
|
916
|
-
"""
|
|
917
|
-
protected_dirs = {
|
|
918
|
-
".git",
|
|
919
|
-
".venv",
|
|
920
|
-
".virtualenv",
|
|
921
|
-
".vscode",
|
|
922
|
-
".idea",
|
|
923
|
-
".codebuddy",
|
|
924
|
-
".qoder",
|
|
925
|
-
}
|
|
926
|
-
|
|
927
|
-
if entry.is_file() and entry.name == "projects.json":
|
|
928
|
-
return True
|
|
929
|
-
|
|
930
|
-
if not entry.is_dir():
|
|
931
|
-
return False
|
|
932
|
-
|
|
933
|
-
if entry.name.startswith(".") and entry.name in protected_dirs:
|
|
934
|
-
return False
|
|
935
|
-
|
|
936
|
-
return (
|
|
937
|
-
entry.name.startswith(".")
|
|
938
|
-
or entry.name.startswith("__")
|
|
939
|
-
or entry.name in ("build", "dist", "pysfi_build", "cbuild", "benchmarks")
|
|
558
|
+
# List subcommand
|
|
559
|
+
subparsers.add_parser(
|
|
560
|
+
"list",
|
|
561
|
+
aliases=["l", "ls"],
|
|
562
|
+
help="List available projects",
|
|
563
|
+
parents=[parent_parser],
|
|
940
564
|
)
|
|
941
565
|
|
|
942
|
-
|
|
943
|
-
|
|
944
|
-
|
|
945
|
-
|
|
946
|
-
Args:
|
|
947
|
-
directory: Project directory to clean
|
|
948
|
-
|
|
949
|
-
Returns:
|
|
950
|
-
Tuple of (cleaned directories, cleaned files)
|
|
951
|
-
"""
|
|
952
|
-
cleaned_dirs = []
|
|
953
|
-
cleaned_files = []
|
|
954
|
-
|
|
955
|
-
entries_to_clean = [f for f in directory.iterdir() if _should_clean(f)]
|
|
956
|
-
|
|
957
|
-
for entry in entries_to_clean:
|
|
958
|
-
if not entry.exists():
|
|
959
|
-
continue
|
|
960
|
-
|
|
961
|
-
if entry.is_dir():
|
|
962
|
-
logger.info(f"Removing directory: {entry}")
|
|
963
|
-
if _remove_directory(entry):
|
|
964
|
-
cleaned_dirs.append(str(entry))
|
|
965
|
-
logger.info(f" Removed directory: {entry}")
|
|
966
|
-
else:
|
|
967
|
-
logger.warning(f" Failed to remove {entry}")
|
|
968
|
-
elif entry.is_file():
|
|
969
|
-
logger.info(f"Removing file: {entry}")
|
|
970
|
-
if _remove_file(entry):
|
|
971
|
-
cleaned_files.append(str(entry))
|
|
972
|
-
logger.info(f" Removed file: {entry}")
|
|
973
|
-
else:
|
|
974
|
-
logger.warning(f" Failed to remove {entry}")
|
|
975
|
-
|
|
976
|
-
return cleaned_dirs, cleaned_files
|
|
977
|
-
|
|
978
|
-
|
|
979
|
-
def _remove_directory(dir_path: Path) -> bool:
|
|
980
|
-
"""Remove a directory safely.
|
|
981
|
-
|
|
982
|
-
Args:
|
|
983
|
-
dir_path: Directory path to remove
|
|
984
|
-
|
|
985
|
-
Returns:
|
|
986
|
-
True if removal succeeded
|
|
987
|
-
"""
|
|
988
|
-
try:
|
|
989
|
-
shutil.rmtree(dir_path)
|
|
990
|
-
return True
|
|
991
|
-
except Exception:
|
|
992
|
-
return False
|
|
993
|
-
|
|
994
|
-
|
|
995
|
-
def _remove_file(file_path: Path) -> bool:
|
|
996
|
-
"""Remove a file safely.
|
|
997
|
-
|
|
998
|
-
Args:
|
|
999
|
-
file_path: File path to remove
|
|
1000
|
-
|
|
1001
|
-
Returns:
|
|
1002
|
-
True if removal succeeded
|
|
1003
|
-
"""
|
|
1004
|
-
try:
|
|
1005
|
-
file_path.unlink()
|
|
1006
|
-
return True
|
|
1007
|
-
except Exception:
|
|
1008
|
-
return False
|
|
1009
|
-
|
|
1010
|
-
|
|
1011
|
-
def parse_args() -> argparse.Namespace:
|
|
1012
|
-
"""Parse command line arguments.
|
|
1013
|
-
|
|
1014
|
-
Returns:
|
|
1015
|
-
Parsed arguments namespace
|
|
1016
|
-
"""
|
|
1017
|
-
parser = argparse.ArgumentParser(
|
|
1018
|
-
prog="pypack", description="Python packaging tool with workflow orchestration"
|
|
566
|
+
# Clean subcommand
|
|
567
|
+
subparsers.add_parser(
|
|
568
|
+
"clean", aliases=["c"], help="Clean build artifacts", parents=[parent_parser]
|
|
1019
569
|
)
|
|
1020
570
|
|
|
1021
|
-
|
|
1022
|
-
|
|
1023
|
-
|
|
1024
|
-
help="Action to perform",
|
|
571
|
+
# Run subcommand
|
|
572
|
+
run_parser = subparsers.add_parser(
|
|
573
|
+
"run", aliases=["r"], help="Run a project", parents=[parent_parser]
|
|
1025
574
|
)
|
|
1026
|
-
|
|
575
|
+
run_parser.add_argument(
|
|
1027
576
|
"project",
|
|
1028
577
|
type=str,
|
|
1029
578
|
nargs="?",
|
|
1030
|
-
default=None,
|
|
1031
579
|
help="Project or executable name (e.g., 'docscan' or 'docscan-gui')",
|
|
1032
580
|
)
|
|
1033
|
-
|
|
1034
|
-
|
|
581
|
+
run_parser.add_argument(
|
|
582
|
+
"args",
|
|
583
|
+
type=str,
|
|
584
|
+
nargs="*",
|
|
585
|
+
help="Additional arguments to pass to the project",
|
|
586
|
+
)
|
|
587
|
+
|
|
588
|
+
# Build subcommand
|
|
589
|
+
build_parser = subparsers.add_parser(
|
|
590
|
+
"build", aliases=["b"], help="Build project packages", parents=[parent_parser]
|
|
591
|
+
)
|
|
592
|
+
build_parser.add_argument(
|
|
1035
593
|
"--python-version", type=str, default="3.8.10", help="Python version to install"
|
|
1036
594
|
)
|
|
1037
|
-
|
|
595
|
+
build_parser.add_argument(
|
|
1038
596
|
"--loader-type",
|
|
1039
597
|
type=str,
|
|
1040
|
-
choices=
|
|
1041
|
-
default=
|
|
598
|
+
choices=[lt.value for lt in LoaderType],
|
|
599
|
+
default=LoaderType.CONSOLE.value,
|
|
1042
600
|
help="Loader type",
|
|
1043
601
|
)
|
|
1044
|
-
|
|
602
|
+
build_parser.add_argument(
|
|
1045
603
|
"--entry-suffix",
|
|
1046
604
|
type=str,
|
|
1047
605
|
default=".ent",
|
|
1048
606
|
help="Entry file suffix (default: .ent, alternatives: .py)",
|
|
1049
607
|
)
|
|
1050
|
-
|
|
608
|
+
build_parser.add_argument(
|
|
1051
609
|
"--no-loader", action="store_true", help="Skip loader generation"
|
|
1052
610
|
)
|
|
1053
|
-
|
|
1054
|
-
"-
|
|
611
|
+
build_parser.add_argument(
|
|
612
|
+
"-o", "--offline", action="store_true", help="Offline mode"
|
|
1055
613
|
)
|
|
1056
|
-
|
|
1057
|
-
parser.add_argument(
|
|
614
|
+
build_parser.add_argument(
|
|
1058
615
|
"-j", "--jobs", type=int, default=4, help="Maximum concurrent tasks"
|
|
1059
616
|
)
|
|
1060
617
|
|
|
1061
|
-
# Library packing arguments
|
|
1062
|
-
|
|
618
|
+
# Library packing arguments (build-specific)
|
|
619
|
+
build_parser.add_argument(
|
|
1063
620
|
"--cache-dir",
|
|
1064
621
|
type=str,
|
|
1065
622
|
default=None,
|
|
1066
623
|
help="Custom cache directory for dependencies",
|
|
1067
624
|
)
|
|
1068
|
-
|
|
625
|
+
build_parser.add_argument(
|
|
1069
626
|
"--archive-format",
|
|
1070
627
|
type=str,
|
|
1071
|
-
choices=
|
|
1072
|
-
default=
|
|
628
|
+
choices=[af.value for af in ArchiveFormat],
|
|
629
|
+
default=ArchiveFormat.ZIP.value,
|
|
1073
630
|
help="Archive format for dependencies",
|
|
1074
631
|
)
|
|
1075
|
-
|
|
632
|
+
build_parser.add_argument(
|
|
1076
633
|
"--mirror",
|
|
1077
634
|
type=str,
|
|
1078
|
-
default=
|
|
1079
|
-
choices=
|
|
635
|
+
default=MirrorSource.ALIYUN.value,
|
|
636
|
+
choices=[ms.value for ms in MirrorSource],
|
|
1080
637
|
help="PyPI mirror source for faster downloads",
|
|
1081
638
|
)
|
|
1082
639
|
|
|
1083
|
-
|
|
1084
|
-
|
|
1085
|
-
|
|
1086
|
-
|
|
1087
|
-
|
|
1088
|
-
|
|
640
|
+
# Archive options (optional post-build step)
|
|
641
|
+
build_parser.add_argument(
|
|
642
|
+
"--archive",
|
|
643
|
+
"-a",
|
|
644
|
+
type=str,
|
|
645
|
+
nargs="?",
|
|
646
|
+
const="zip",
|
|
647
|
+
default=None,
|
|
648
|
+
choices=["zip", "tar", "gztar", "bztar", "xztar", "7z", "nsis"],
|
|
649
|
+
help="Create archive after build (default format: zip if flag used)",
|
|
650
|
+
)
|
|
1089
651
|
|
|
1090
|
-
|
|
1091
|
-
await workflow.execute()
|
|
1092
|
-
logger.info("Packaging completed successfully!")
|
|
1093
|
-
except Exception as e:
|
|
1094
|
-
logger.error(f"Packaging failed: {e}")
|
|
1095
|
-
raise
|
|
652
|
+
return parser.parse_args()
|
|
1096
653
|
|
|
1097
654
|
|
|
1098
655
|
def main() -> None:
|
|
1099
|
-
"""Main entry point for package workflow tool."""
|
|
656
|
+
"""Main entry point for package workflow tool using factory pattern."""
|
|
1100
657
|
args = parse_args()
|
|
1101
658
|
|
|
659
|
+
# Configure logging level
|
|
1102
660
|
if args.debug:
|
|
1103
661
|
logging.getLogger().setLevel(logging.DEBUG)
|
|
1104
662
|
|
|
1105
|
-
|
|
1106
|
-
|
|
1107
|
-
|
|
1108
|
-
|
|
1109
|
-
|
|
1110
|
-
|
|
1111
|
-
|
|
1112
|
-
|
|
1113
|
-
if args.action in {"run", "r"}:
|
|
1114
|
-
run_project(args.project, cwd)
|
|
1115
|
-
return
|
|
1116
|
-
|
|
1117
|
-
if args.action in {"clean", "c"}:
|
|
1118
|
-
clean_project(cwd)
|
|
1119
|
-
return
|
|
1120
|
-
|
|
1121
|
-
cache_dir = Path(args.cache_dir) if args.cache_dir else None
|
|
1122
|
-
config = WorkflowConfig(
|
|
1123
|
-
directory=cwd,
|
|
1124
|
-
project_name=args.project,
|
|
1125
|
-
python_version=args.python_version,
|
|
1126
|
-
loader_type=args.loader_type,
|
|
1127
|
-
entry_suffix=args.entry_suffix,
|
|
1128
|
-
generate_loader=not args.no_loader,
|
|
1129
|
-
recursive=args.recursive,
|
|
1130
|
-
offline=args.offline,
|
|
1131
|
-
max_concurrent=args.jobs,
|
|
1132
|
-
debug=args.debug,
|
|
1133
|
-
cache_dir=cache_dir,
|
|
1134
|
-
archive_format=args.archive_format,
|
|
1135
|
-
mirror=args.mirror,
|
|
663
|
+
# Build workflow configuration using factory pattern
|
|
664
|
+
config = ConfigFactory.create_from_args(args, cwd)
|
|
665
|
+
|
|
666
|
+
workflow = PackageWorkflow(
|
|
667
|
+
root_dir=config.directory,
|
|
668
|
+
config=config,
|
|
669
|
+
cleaning_strategy=StandardCleaningStrategy(),
|
|
1136
670
|
)
|
|
1137
671
|
|
|
1138
|
-
|
|
672
|
+
# Command dispatch using pattern matching
|
|
673
|
+
command = args.command
|
|
674
|
+
|
|
675
|
+
if command in ("version", "v"):
|
|
676
|
+
logger.info(f"pypack {__version__} (build {__build__})")
|
|
677
|
+
elif command in ("list", "l", "ls"):
|
|
678
|
+
workflow.list_projects()
|
|
679
|
+
elif command in ("run", "r"):
|
|
680
|
+
workflow.run_project(args.project, args.args)
|
|
681
|
+
elif command in ("clean", "c"):
|
|
682
|
+
workflow.clean_project()
|
|
683
|
+
elif command in ("build", "b"):
|
|
684
|
+
try:
|
|
685
|
+
asyncio.run(workflow.build())
|
|
686
|
+
logger.info("Packaging completed successfully!")
|
|
687
|
+
except Exception as e:
|
|
688
|
+
logger.error(f"Packaging failed: {e}")
|
|
689
|
+
raise
|
|
1139
690
|
|
|
1140
691
|
|
|
1141
692
|
if __name__ == "__main__":
|