pysfi 0.1.7__py3-none-any.whl → 0.1.10__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pysfi-0.1.7.dist-info → pysfi-0.1.10.dist-info}/METADATA +5 -3
- pysfi-0.1.10.dist-info/RECORD +39 -0
- {pysfi-0.1.7.dist-info → pysfi-0.1.10.dist-info}/entry_points.txt +4 -1
- sfi/__init__.py +1 -1
- sfi/bumpversion/__init__.py +1 -1
- sfi/docscan/__init__.py +1 -1
- sfi/docscan/docscan.py +407 -103
- sfi/docscan/docscan_gui.py +1282 -596
- sfi/docscan/lang/eng.py +152 -0
- sfi/docscan/lang/zhcn.py +170 -0
- sfi/embedinstall/embedinstall.py +77 -17
- sfi/makepython/makepython.py +29 -28
- sfi/pdfsplit/pdfsplit.py +173 -173
- sfi/pylibpack/__init__.py +0 -0
- sfi/pylibpack/pylibpack.py +913 -0
- sfi/pyloadergen/pyloadergen.py +697 -111
- sfi/pypack/__init__.py +0 -0
- sfi/pypack/pypack.py +791 -0
- sfi/pysourcepack/pysourcepack.py +369 -0
- sfi/workflowengine/__init__.py +0 -0
- sfi/workflowengine/workflowengine.py +444 -0
- pysfi-0.1.7.dist-info/RECORD +0 -31
- sfi/pypacker/fspacker.py +0 -91
- {pysfi-0.1.7.dist-info → pysfi-0.1.10.dist-info}/WHEEL +0 -0
- /sfi/{pypacker → docscan/lang}/__init__.py +0 -0
sfi/pypack/pypack.py
ADDED
|
@@ -0,0 +1,791 @@
|
|
|
1
|
+
"""Package Workflow - Advanced Python project packaging tool with workflow orchestration.
|
|
2
|
+
|
|
3
|
+
This module provides a comprehensive packaging solution that integrates projectparse,
|
|
4
|
+
pysourcepack, embedinstall, and pyloadergen tools through a workflow engine to achieve
|
|
5
|
+
mixed serial and parallel execution for optimal efficiency.
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
10
|
+
import argparse
|
|
11
|
+
import asyncio
|
|
12
|
+
import json
|
|
13
|
+
import logging
|
|
14
|
+
import platform
|
|
15
|
+
import shutil
|
|
16
|
+
from pathlib import Path
|
|
17
|
+
from typing import Any
|
|
18
|
+
|
|
19
|
+
from sfi.workflowengine.workflowengine import (
|
|
20
|
+
CPUTask,
|
|
21
|
+
IOTask,
|
|
22
|
+
SerialTask,
|
|
23
|
+
WorkflowEngine,
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(levelname)s - %(message)s")
|
|
27
|
+
logger = logging.getLogger(__name__)
|
|
28
|
+
|
|
29
|
+
__version__ = "1.0.0"
|
|
30
|
+
__build__ = "20260120"
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
class ParseProjectTask(IOTask):
|
|
34
|
+
"""Task to parse project configuration using projectparse."""
|
|
35
|
+
|
|
36
|
+
def __init__(self, directory: Path, recursive: bool = False, timeout: float = 60.0):
|
|
37
|
+
super().__init__("parse_project", 2.0, [], timeout)
|
|
38
|
+
self.directory = directory
|
|
39
|
+
self.recursive = recursive
|
|
40
|
+
|
|
41
|
+
async def execute(self, context: dict[str, Any]) -> Any:
|
|
42
|
+
"""Execute project parsing."""
|
|
43
|
+
logger.info(f"Starting project parsing: {self.directory}")
|
|
44
|
+
|
|
45
|
+
try:
|
|
46
|
+
# Import projectparse module
|
|
47
|
+
from sfi.projectparse.projectparse import parse_project_data
|
|
48
|
+
|
|
49
|
+
# Parse project
|
|
50
|
+
projects = parse_project_data(self.directory, recursive=self.recursive)
|
|
51
|
+
|
|
52
|
+
# Save to projects.json
|
|
53
|
+
output_file = self.directory / "projects.json"
|
|
54
|
+
with open(output_file, "w", encoding="utf-8") as f:
|
|
55
|
+
json.dump(projects, f, indent=2, ensure_ascii=False)
|
|
56
|
+
|
|
57
|
+
logger.info(f"Found {len(projects)} project(s)")
|
|
58
|
+
for project_name, project_info in projects.items():
|
|
59
|
+
version = project_info.get("version", "N/A") if isinstance(project_info, dict) else "N/A"
|
|
60
|
+
logger.info(f" - {project_name}: {version}")
|
|
61
|
+
|
|
62
|
+
return {"projects": projects, "projects_file": str(output_file)}
|
|
63
|
+
|
|
64
|
+
except Exception as e:
|
|
65
|
+
logger.error(f"Failed to parse projects: {e}")
|
|
66
|
+
raise
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
class PackSourceTask(IOTask):
|
|
70
|
+
"""Task to pack source code using pysourcepack."""
|
|
71
|
+
|
|
72
|
+
def __init__(self, directory: Path, projects_file: Path, project_name: str | None = None, timeout: float = 120.0):
|
|
73
|
+
super().__init__("pack_source", 5.0, ["parse_project"], timeout)
|
|
74
|
+
self.directory = directory
|
|
75
|
+
self.projects_file = projects_file
|
|
76
|
+
self.project_name = project_name
|
|
77
|
+
|
|
78
|
+
async def execute(self, context: dict[str, Any]) -> Any:
|
|
79
|
+
"""Execute source packing."""
|
|
80
|
+
logger.info(f"Starting source packing: {self.directory}")
|
|
81
|
+
|
|
82
|
+
try:
|
|
83
|
+
# Import pysourcepack module
|
|
84
|
+
from sfi.pysourcepack.pysourcepack import load_projects, pack_project
|
|
85
|
+
|
|
86
|
+
# Load projects
|
|
87
|
+
projects = load_projects(self.projects_file)
|
|
88
|
+
|
|
89
|
+
# Pack specified project or all projects
|
|
90
|
+
# projects is a dict: {project_name: project_info}
|
|
91
|
+
project_names = [self.project_name] if self.project_name else list(projects.keys())
|
|
92
|
+
|
|
93
|
+
packed_projects = []
|
|
94
|
+
for name in project_names:
|
|
95
|
+
logger.info(f"Packing project: {name}")
|
|
96
|
+
pack_project(name, projects, self.directory, output_dir=Path("dist/src"))
|
|
97
|
+
packed_projects.append(name)
|
|
98
|
+
|
|
99
|
+
logger.info(f"Packed {len(packed_projects)} project(s)")
|
|
100
|
+
|
|
101
|
+
return {"packed_projects": packed_projects, "output_dir": "dist/src"}
|
|
102
|
+
|
|
103
|
+
except Exception as e:
|
|
104
|
+
logger.error(f"Failed to pack source: {e}")
|
|
105
|
+
raise
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
class InstallPythonTask(IOTask):
|
|
109
|
+
"""Task to install embedded Python using embedinstall."""
|
|
110
|
+
|
|
111
|
+
def __init__(
|
|
112
|
+
self,
|
|
113
|
+
version: str,
|
|
114
|
+
target_dir: Path,
|
|
115
|
+
offline: bool = False,
|
|
116
|
+
timeout: float = 300.0,
|
|
117
|
+
):
|
|
118
|
+
super().__init__("install_python", 10.0, ["parse_project"], timeout)
|
|
119
|
+
self.version = version
|
|
120
|
+
self.target_dir = target_dir
|
|
121
|
+
self.offline = offline
|
|
122
|
+
|
|
123
|
+
async def execute(self, context: dict[str, Any]) -> Any:
|
|
124
|
+
"""Execute Python installation."""
|
|
125
|
+
logger.info(f"Starting Python installation: {self.version}")
|
|
126
|
+
|
|
127
|
+
try:
|
|
128
|
+
# Import embedinstall module
|
|
129
|
+
from sfi.embedinstall.embedinstall import DEFAULT_CACHE_DIR, install_embed_python
|
|
130
|
+
|
|
131
|
+
# Setup cache directory (use default cache directory)
|
|
132
|
+
cache_dir = DEFAULT_CACHE_DIR
|
|
133
|
+
cache_dir.mkdir(parents=True, exist_ok=True)
|
|
134
|
+
|
|
135
|
+
# Determine architecture
|
|
136
|
+
arch = platform.machine().lower()
|
|
137
|
+
if arch in ("amd64", "x86_64"):
|
|
138
|
+
arch = "amd64"
|
|
139
|
+
elif arch in ("arm64", "aarch64"):
|
|
140
|
+
arch = "arm64"
|
|
141
|
+
else:
|
|
142
|
+
arch = "amd64"
|
|
143
|
+
|
|
144
|
+
# Install embedded Python
|
|
145
|
+
install_result = install_embed_python(
|
|
146
|
+
target_dir=self.target_dir,
|
|
147
|
+
version=self.version,
|
|
148
|
+
cache_dir=cache_dir,
|
|
149
|
+
offline=self.offline,
|
|
150
|
+
keep_cache=True, # Keep cache for future use
|
|
151
|
+
skip_speed_test=False,
|
|
152
|
+
arch=arch,
|
|
153
|
+
)
|
|
154
|
+
|
|
155
|
+
logger.info(f"Python {self.version} installed to {self.target_dir}")
|
|
156
|
+
|
|
157
|
+
return {
|
|
158
|
+
"version": self.version,
|
|
159
|
+
"target_dir": str(self.target_dir),
|
|
160
|
+
"install_result": install_result,
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
except Exception as e:
|
|
164
|
+
logger.error(f"Failed to install Python: {e}")
|
|
165
|
+
raise
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
class PackLibrariesTask(IOTask):
|
|
169
|
+
"""Task to pack dependencies using pylibpack."""
|
|
170
|
+
|
|
171
|
+
def __init__(
|
|
172
|
+
self,
|
|
173
|
+
project_dir: Path,
|
|
174
|
+
output_dir: Path,
|
|
175
|
+
cache_dir: Path | None = None,
|
|
176
|
+
python_version: str | None = None,
|
|
177
|
+
max_workers: int = 4,
|
|
178
|
+
archive_format: str = "zip",
|
|
179
|
+
mirror: str = "aliyun",
|
|
180
|
+
timeout: float = 300.0,
|
|
181
|
+
):
|
|
182
|
+
super().__init__("pack_libraries", 10.0, ["parse_project"], timeout)
|
|
183
|
+
self.project_dir = project_dir
|
|
184
|
+
self.output_dir = output_dir
|
|
185
|
+
self.cache_dir = cache_dir
|
|
186
|
+
self.python_version = python_version
|
|
187
|
+
self.max_workers = max_workers
|
|
188
|
+
self.archive_format = archive_format
|
|
189
|
+
self.mirror = mirror
|
|
190
|
+
|
|
191
|
+
async def execute(self, context: dict[str, Any]) -> Any:
|
|
192
|
+
"""Execute library packing."""
|
|
193
|
+
logger.info(f"Starting library packing: {self.project_dir}")
|
|
194
|
+
|
|
195
|
+
try:
|
|
196
|
+
# Import pylibpack module
|
|
197
|
+
from sfi.pylibpack.pylibpack import PyLibPack
|
|
198
|
+
|
|
199
|
+
# Initialize packer with mirror support
|
|
200
|
+
packer = PyLibPack(cache_dir=self.cache_dir, python_version=self.python_version, mirror=self.mirror)
|
|
201
|
+
|
|
202
|
+
# Pack dependencies
|
|
203
|
+
pack_result = packer.pack(
|
|
204
|
+
base_dir=self.project_dir,
|
|
205
|
+
output_dir=self.output_dir,
|
|
206
|
+
max_workers=self.max_workers,
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
logger.info(f"Library packing completed: {pack_result.successful}/{pack_result.total}")
|
|
210
|
+
|
|
211
|
+
return {
|
|
212
|
+
"pack_result": pack_result,
|
|
213
|
+
"packages_dir": pack_result.output_dir,
|
|
214
|
+
}
|
|
215
|
+
|
|
216
|
+
except Exception as e:
|
|
217
|
+
logger.error(f"Failed to pack libraries: {e}")
|
|
218
|
+
raise
|
|
219
|
+
|
|
220
|
+
|
|
221
|
+
class GenerateLoaderTask(CPUTask):
|
|
222
|
+
"""Task to generate loader using pyloadergen."""
|
|
223
|
+
|
|
224
|
+
def __init__(
|
|
225
|
+
self,
|
|
226
|
+
project_name: str | None,
|
|
227
|
+
is_debug: bool = False,
|
|
228
|
+
compiler: str | None = None,
|
|
229
|
+
timeout: float = 60.0,
|
|
230
|
+
project_dir: Path | None = None, # Added for backward compatibility with tests
|
|
231
|
+
output_dir: Path | None = None, # Added for backward compatibility with tests
|
|
232
|
+
):
|
|
233
|
+
super().__init__("generate_loader", 100000, ["parse_project", "pack_source"], timeout)
|
|
234
|
+
self.project_name = project_name
|
|
235
|
+
self.is_debug = is_debug
|
|
236
|
+
self.compiler = compiler
|
|
237
|
+
|
|
238
|
+
async def execute(self, inputs: dict[str, Any]) -> Any:
|
|
239
|
+
"""Execute loader generation."""
|
|
240
|
+
try:
|
|
241
|
+
# Import pyloadergen module
|
|
242
|
+
from sfi.pyloadergen.pyloadergen import find_compiler, generate_loader
|
|
243
|
+
|
|
244
|
+
# Get projects.json path from parse_project task
|
|
245
|
+
parse_result = inputs["parse_project"].data
|
|
246
|
+
if not isinstance(parse_result, dict):
|
|
247
|
+
logger.error("Parse project result is not a dictionary")
|
|
248
|
+
raise ValueError("Invalid parse result format")
|
|
249
|
+
|
|
250
|
+
projects_file = parse_result.get("projects_file")
|
|
251
|
+
if not projects_file:
|
|
252
|
+
logger.error("projects.json path not found in parse result")
|
|
253
|
+
raise ValueError("Missing projects_file in parse result")
|
|
254
|
+
|
|
255
|
+
projects_file = Path(projects_file)
|
|
256
|
+
projects = parse_result.get("projects", {})
|
|
257
|
+
|
|
258
|
+
# Determine project name
|
|
259
|
+
if self.project_name and self.project_name in projects:
|
|
260
|
+
project_name = self.project_name
|
|
261
|
+
elif len(projects) == 1:
|
|
262
|
+
project_name = next(iter(projects.keys()))
|
|
263
|
+
elif self.project_name:
|
|
264
|
+
logger.error(f"Project '{self.project_name}' not found in parsed projects")
|
|
265
|
+
raise ValueError(f"Project not found: {self.project_name}")
|
|
266
|
+
else:
|
|
267
|
+
logger.error("Multiple projects found but no project name specified")
|
|
268
|
+
raise ValueError("Please specify project name when multiple projects exist")
|
|
269
|
+
|
|
270
|
+
logger.info(f"Starting loader generation for project: {project_name}")
|
|
271
|
+
|
|
272
|
+
# Find compiler if not specified
|
|
273
|
+
compiler = self.compiler
|
|
274
|
+
if compiler is None:
|
|
275
|
+
compiler = find_compiler()
|
|
276
|
+
if compiler is None:
|
|
277
|
+
logger.warning("No compiler found, loader generation may fail")
|
|
278
|
+
|
|
279
|
+
# Generate loader using simplified generate_loader function
|
|
280
|
+
success = generate_loader(
|
|
281
|
+
project_name=project_name,
|
|
282
|
+
projects_json_path=projects_file,
|
|
283
|
+
is_debug=self.is_debug,
|
|
284
|
+
compiler=compiler,
|
|
285
|
+
)
|
|
286
|
+
|
|
287
|
+
if success:
|
|
288
|
+
# Output executable path
|
|
289
|
+
project_dir = projects_file.parent
|
|
290
|
+
output_dir = project_dir / "dist"
|
|
291
|
+
output_exe = output_dir / f"{project_name}.exe"
|
|
292
|
+
output_ent = output_dir / f"{project_name}.ent"
|
|
293
|
+
|
|
294
|
+
logger.info(f"Loader generated successfully: {output_exe}")
|
|
295
|
+
logger.info(f"Entry file: {output_ent}")
|
|
296
|
+
|
|
297
|
+
return {
|
|
298
|
+
"project_name": project_name,
|
|
299
|
+
"output_exe": str(output_exe),
|
|
300
|
+
"entry_file": str(output_ent),
|
|
301
|
+
"success": True,
|
|
302
|
+
}
|
|
303
|
+
else:
|
|
304
|
+
logger.error(f"Failed to generate loader for {project_name}")
|
|
305
|
+
return {
|
|
306
|
+
"project_name": project_name,
|
|
307
|
+
"success": False,
|
|
308
|
+
}
|
|
309
|
+
|
|
310
|
+
except Exception as e:
|
|
311
|
+
logger.error(f"Failed to generate loader: {e}")
|
|
312
|
+
raise
|
|
313
|
+
|
|
314
|
+
|
|
315
|
+
class AssemblePackageTask(SerialTask):
|
|
316
|
+
"""Task to assemble final package."""
|
|
317
|
+
|
|
318
|
+
def __init__(self, output_dir: Path, dependencies: list[str], timeout: float = 60.0):
|
|
319
|
+
self.output_dir = output_dir
|
|
320
|
+
|
|
321
|
+
def assemble(inputs: dict[str, Any], state: dict[str, Any]) -> Any:
|
|
322
|
+
"""Assemble final package."""
|
|
323
|
+
logger.info("Starting package assembly")
|
|
324
|
+
|
|
325
|
+
try:
|
|
326
|
+
# Determine project directory from the first input if available
|
|
327
|
+
project_dir = None
|
|
328
|
+
if "parse_project" in inputs and hasattr(inputs["parse_project"], "data"):
|
|
329
|
+
parse_result = inputs["parse_project"].data
|
|
330
|
+
if isinstance(parse_result, dict) and "projects_file" in parse_result:
|
|
331
|
+
project_dir = Path(parse_result["projects_file"]).parent
|
|
332
|
+
if not project_dir:
|
|
333
|
+
project_dir = Path.cwd()
|
|
334
|
+
|
|
335
|
+
# Create dist directory
|
|
336
|
+
dist_dir = self.output_dir.parent if self.output_dir.is_file() else self.output_dir
|
|
337
|
+
dist_dir = dist_dir if dist_dir.name == "dist" else dist_dir.parent / "dist"
|
|
338
|
+
dist_dir.mkdir(parents=True, exist_ok=True)
|
|
339
|
+
|
|
340
|
+
# Python runtime is already in dist/runtime from InstallPythonTask
|
|
341
|
+
# No need to copy
|
|
342
|
+
|
|
343
|
+
# Copy loaders and entry files to dist (already in dist, just log)
|
|
344
|
+
loader_dir = project_dir / "dist"
|
|
345
|
+
if loader_dir.exists():
|
|
346
|
+
loaders_count = 0
|
|
347
|
+
for loader in loader_dir.glob("*.ent"):
|
|
348
|
+
logger.info(f"Entry file: {loader.name}")
|
|
349
|
+
loaders_count += 1
|
|
350
|
+
for loader in loader_dir.glob("*.exe"):
|
|
351
|
+
logger.info(f"Executable: {loader.name}")
|
|
352
|
+
# Copy loader to dist
|
|
353
|
+
shutil.copy2(loader, dist_dir / loader.name)
|
|
354
|
+
loaders_count += 1
|
|
355
|
+
logger.info(f"Found and copied {loaders_count} loader(s) and entry file(s)")
|
|
356
|
+
|
|
357
|
+
# Copy libraries to dist/site-packages
|
|
358
|
+
libs_dir = project_dir / "dist" / "libs"
|
|
359
|
+
if libs_dir.exists():
|
|
360
|
+
dest_libs_dir = dist_dir / "site-packages"
|
|
361
|
+
if dest_libs_dir.exists():
|
|
362
|
+
shutil.rmtree(dest_libs_dir)
|
|
363
|
+
shutil.copytree(libs_dir, dest_libs_dir)
|
|
364
|
+
logger.info(f"Libraries copied to {dest_libs_dir}")
|
|
365
|
+
|
|
366
|
+
# Create package metadata
|
|
367
|
+
metadata = {
|
|
368
|
+
"version": __version__,
|
|
369
|
+
"build": __build__,
|
|
370
|
+
"assembled_at": asyncio.get_event_loop().time(),
|
|
371
|
+
}
|
|
372
|
+
|
|
373
|
+
metadata_file = dist_dir / "metadata.json"
|
|
374
|
+
with open(metadata_file, "w", encoding="utf-8") as f:
|
|
375
|
+
json.dump(metadata, f, indent=2)
|
|
376
|
+
|
|
377
|
+
logger.info(f"Package assembled: {dist_dir}")
|
|
378
|
+
|
|
379
|
+
return {"output_dir": str(dist_dir), "metadata": metadata}
|
|
380
|
+
|
|
381
|
+
except Exception as e:
|
|
382
|
+
logger.error(f"Failed to assemble package: {e}")
|
|
383
|
+
raise
|
|
384
|
+
|
|
385
|
+
super().__init__("assemble_package", assemble, dependencies, timeout)
|
|
386
|
+
|
|
387
|
+
|
|
388
|
+
class PackageWorkflow:
|
|
389
|
+
"""Package workflow orchestrator."""
|
|
390
|
+
|
|
391
|
+
def __init__(self, directory: Path, config: dict[str, Any]):
|
|
392
|
+
self.directory = directory
|
|
393
|
+
self.config = config
|
|
394
|
+
self.engine = WorkflowEngine(max_concurrent=config.get("max_concurrent", 4))
|
|
395
|
+
|
|
396
|
+
def build_workflow(self) -> WorkflowEngine:
|
|
397
|
+
"""Build the packaging workflow."""
|
|
398
|
+
logger.info("Building packaging workflow")
|
|
399
|
+
|
|
400
|
+
# Phase 1: Parse project
|
|
401
|
+
parse_task = ParseProjectTask(
|
|
402
|
+
directory=self.directory,
|
|
403
|
+
recursive=self.config.get("recursive", False),
|
|
404
|
+
)
|
|
405
|
+
self.engine.add_task(parse_task)
|
|
406
|
+
|
|
407
|
+
# Phase 2: Pack source, install Python, and pack libraries (parallel)
|
|
408
|
+
pack_task = PackSourceTask(
|
|
409
|
+
directory=self.directory,
|
|
410
|
+
projects_file=self.directory / "projects.json",
|
|
411
|
+
project_name=self.config.get("project_name"),
|
|
412
|
+
)
|
|
413
|
+
self.engine.add_task(pack_task)
|
|
414
|
+
|
|
415
|
+
install_task = InstallPythonTask(
|
|
416
|
+
version=self.config.get("python_version", "3.8.10"),
|
|
417
|
+
target_dir=self.directory / "dist" / "runtime",
|
|
418
|
+
offline=self.config.get("offline", False),
|
|
419
|
+
)
|
|
420
|
+
self.engine.add_task(install_task)
|
|
421
|
+
|
|
422
|
+
# Pack libraries task (always enabled unless explicitly skipped)
|
|
423
|
+
if not self.config.get("skip_pack_libraries", False):
|
|
424
|
+
cache_dir_value = self.config.get("cache_dir")
|
|
425
|
+
cache_dir = Path(cache_dir_value) if cache_dir_value else None
|
|
426
|
+
lib_pack_task = PackLibrariesTask(
|
|
427
|
+
project_dir=self.directory,
|
|
428
|
+
output_dir=self.directory / "dist/site-packages",
|
|
429
|
+
cache_dir=cache_dir,
|
|
430
|
+
python_version=self.config.get("python_version", "3.8.10"),
|
|
431
|
+
max_workers=self.config.get("max_concurrent", 4),
|
|
432
|
+
archive_format=self.config.get("archive_format", "zip"),
|
|
433
|
+
mirror=self.config.get("mirror", "aliyun"),
|
|
434
|
+
)
|
|
435
|
+
self.engine.add_task(lib_pack_task)
|
|
436
|
+
|
|
437
|
+
# Phase 3: Generate loader
|
|
438
|
+
if self.config.get("generate_loader", False):
|
|
439
|
+
# Create loader task - project name will be determined from parse_project results
|
|
440
|
+
loader_task = GenerateLoaderTask(
|
|
441
|
+
project_name=self.config.get("project_name"),
|
|
442
|
+
is_debug=self.config.get("debug", False),
|
|
443
|
+
)
|
|
444
|
+
self.engine.add_task(loader_task)
|
|
445
|
+
assembly_deps = ["pack_source", "install_python", "generate_loader"]
|
|
446
|
+
else:
|
|
447
|
+
assembly_deps = ["pack_source", "install_python"]
|
|
448
|
+
|
|
449
|
+
# Add pack_libraries to assembly dependencies (always added unless skipped)
|
|
450
|
+
if not self.config.get("skip_pack_libraries", False):
|
|
451
|
+
assembly_deps.append("pack_libraries")
|
|
452
|
+
|
|
453
|
+
# Phase 4: Assemble final package
|
|
454
|
+
assemble_task = AssemblePackageTask(
|
|
455
|
+
output_dir=self.directory / "dist",
|
|
456
|
+
dependencies=assembly_deps,
|
|
457
|
+
)
|
|
458
|
+
self.engine.add_task(assemble_task)
|
|
459
|
+
|
|
460
|
+
return self.engine
|
|
461
|
+
|
|
462
|
+
async def execute(self) -> dict[str, Any]:
|
|
463
|
+
"""Execute the packaging workflow."""
|
|
464
|
+
logger.info("Starting packaging workflow execution")
|
|
465
|
+
|
|
466
|
+
# Build workflow
|
|
467
|
+
self.build_workflow()
|
|
468
|
+
|
|
469
|
+
# Execute workflow
|
|
470
|
+
try:
|
|
471
|
+
results = await self.engine.execute_workflow()
|
|
472
|
+
|
|
473
|
+
# Get summary
|
|
474
|
+
summary = self.engine.get_execution_summary()
|
|
475
|
+
|
|
476
|
+
logger.info("=" * 50)
|
|
477
|
+
logger.info("Workflow execution summary:")
|
|
478
|
+
logger.info(f" Total tasks: {summary['total_tasks']}")
|
|
479
|
+
logger.info(f" Completed: {summary['completed']}")
|
|
480
|
+
logger.info(f" Failed: {summary['failed']}")
|
|
481
|
+
logger.info(f" Success rate: {summary['success_rate'] * 100:.1f}%")
|
|
482
|
+
logger.info(f" Total time: {summary['total_execution_time']:.2f}s")
|
|
483
|
+
logger.info("=" * 50)
|
|
484
|
+
|
|
485
|
+
return {"results": results, "summary": summary}
|
|
486
|
+
|
|
487
|
+
except Exception as e:
|
|
488
|
+
logger.error(f"Workflow execution failed: {e}")
|
|
489
|
+
raise
|
|
490
|
+
|
|
491
|
+
|
|
492
|
+
def list_projects(directory: Path) -> None:
|
|
493
|
+
"""List projects from projects.json.
|
|
494
|
+
|
|
495
|
+
Args:
|
|
496
|
+
directory: Project directory containing projects.json
|
|
497
|
+
"""
|
|
498
|
+
projects_file = directory / "projects.json"
|
|
499
|
+
|
|
500
|
+
if not projects_file.exists():
|
|
501
|
+
logger.warning(f"No projects.json found in {directory}")
|
|
502
|
+
logger.info("Run 'pypack build' first to create projects.json")
|
|
503
|
+
return
|
|
504
|
+
|
|
505
|
+
try:
|
|
506
|
+
with open(projects_file, encoding="utf-8") as f:
|
|
507
|
+
projects_data = json.load(f)
|
|
508
|
+
|
|
509
|
+
# projects_data is a dict: {project_name: project_info}
|
|
510
|
+
if not projects_data:
|
|
511
|
+
logger.info("No projects found in projects.json")
|
|
512
|
+
return
|
|
513
|
+
|
|
514
|
+
logger.info(f"Found {len(projects_data)} project(s):")
|
|
515
|
+
logger.info("=" * 60)
|
|
516
|
+
for i, (project_name, project_info) in enumerate(projects_data.items(), 1):
|
|
517
|
+
# Handle both dict and str (in case of malformed data)
|
|
518
|
+
if isinstance(project_info, str):
|
|
519
|
+
version = "N/A"
|
|
520
|
+
entry = "N/A"
|
|
521
|
+
description = ""
|
|
522
|
+
else:
|
|
523
|
+
version = project_info.get("version", "N/A") if isinstance(project_info, dict) else "N/A"
|
|
524
|
+
entry = "main.py" # Default entry file
|
|
525
|
+
description = project_info.get("description", "") if isinstance(project_info, dict) else ""
|
|
526
|
+
|
|
527
|
+
logger.info(f"\n[{i}] {project_name}")
|
|
528
|
+
logger.info(f" Version: {version}")
|
|
529
|
+
logger.info(f" Entry: {entry}")
|
|
530
|
+
if description:
|
|
531
|
+
logger.info(f" Description: {description}")
|
|
532
|
+
|
|
533
|
+
logger.info("=" * 60)
|
|
534
|
+
|
|
535
|
+
except Exception as e:
|
|
536
|
+
logger.error(f"Failed to load projects.json: {e}")
|
|
537
|
+
|
|
538
|
+
|
|
539
|
+
def run_project(project_name: str | None, directory: Path) -> None:
|
|
540
|
+
"""Run a built project.
|
|
541
|
+
|
|
542
|
+
Args:
|
|
543
|
+
project_name: Project name or executable name to run (e.g., 'docscan' or 'docscan-gui')
|
|
544
|
+
directory: Project directory
|
|
545
|
+
"""
|
|
546
|
+
# Load projects
|
|
547
|
+
projects_file = directory / "projects.json"
|
|
548
|
+
|
|
549
|
+
if not projects_file.exists():
|
|
550
|
+
logger.error(f"No projects.json found in {directory}")
|
|
551
|
+
logger.info("Run 'pypack build' first to create projects.json")
|
|
552
|
+
return
|
|
553
|
+
|
|
554
|
+
try:
|
|
555
|
+
with open(projects_file, encoding="utf-8") as f:
|
|
556
|
+
projects_data = json.load(f)
|
|
557
|
+
|
|
558
|
+
# projects_data is a dict: {project_name: project_info}
|
|
559
|
+
if not projects_data:
|
|
560
|
+
logger.error("No projects found in projects.json")
|
|
561
|
+
return
|
|
562
|
+
|
|
563
|
+
# Find all available executables in dist directory
|
|
564
|
+
dist_dir = directory / "dist"
|
|
565
|
+
if not dist_dir.exists():
|
|
566
|
+
logger.error(f"Dist directory not found: {dist_dir}")
|
|
567
|
+
logger.info("Run 'pypack build' first to build the project")
|
|
568
|
+
return
|
|
569
|
+
|
|
570
|
+
available_exes = [exe.stem for exe in dist_dir.glob("*.exe")]
|
|
571
|
+
if not available_exes:
|
|
572
|
+
logger.error("No executables found in dist directory")
|
|
573
|
+
logger.info("Run 'pypack build' first to build the project")
|
|
574
|
+
return
|
|
575
|
+
|
|
576
|
+
# Determine which executable to run
|
|
577
|
+
target_exe_name = None
|
|
578
|
+
if project_name:
|
|
579
|
+
# Check if project_name is an exact executable name (e.g., 'docscan-gui')
|
|
580
|
+
if project_name in available_exes:
|
|
581
|
+
target_exe_name = project_name
|
|
582
|
+
# Check if project_name matches a project in projects.json
|
|
583
|
+
elif project_name in projects_data:
|
|
584
|
+
# Look for matching executables
|
|
585
|
+
matching_exes = [
|
|
586
|
+
exe for exe in available_exes if exe == project_name or exe.startswith(f"{project_name}-")
|
|
587
|
+
]
|
|
588
|
+
if len(matching_exes) == 1:
|
|
589
|
+
target_exe_name = matching_exes[0]
|
|
590
|
+
elif len(matching_exes) > 1:
|
|
591
|
+
logger.error(f"Multiple executables found for project '{project_name}':")
|
|
592
|
+
for exe in matching_exes:
|
|
593
|
+
logger.info(f" - {exe}")
|
|
594
|
+
logger.info("Please specify the full executable name (e.g., 'docscan-gui')")
|
|
595
|
+
return
|
|
596
|
+
else:
|
|
597
|
+
logger.error(f"No executable found for project '{project_name}'")
|
|
598
|
+
return
|
|
599
|
+
else:
|
|
600
|
+
logger.error(f"Project or executable '{project_name}' not found")
|
|
601
|
+
logger.info("Available executables:")
|
|
602
|
+
for exe in available_exes:
|
|
603
|
+
logger.info(f"** {exe} **")
|
|
604
|
+
return
|
|
605
|
+
else:
|
|
606
|
+
# No project_name specified
|
|
607
|
+
if len(available_exes) == 1:
|
|
608
|
+
target_exe_name = available_exes[0]
|
|
609
|
+
logger.info(f"Running single executable: {target_exe_name}")
|
|
610
|
+
else:
|
|
611
|
+
logger.error("Multiple executables found. Please specify executable name with --project")
|
|
612
|
+
logger.info("Available executables:")
|
|
613
|
+
for exe in available_exes:
|
|
614
|
+
logger.info(f"** {exe} **")
|
|
615
|
+
return
|
|
616
|
+
|
|
617
|
+
# Find the executable
|
|
618
|
+
exe_path = dist_dir / f"{target_exe_name}.exe"
|
|
619
|
+
|
|
620
|
+
if not exe_path.exists():
|
|
621
|
+
logger.error(f"Executable not found: {exe_path}")
|
|
622
|
+
logger.info("Run 'pypack build' first to build the project")
|
|
623
|
+
return
|
|
624
|
+
|
|
625
|
+
logger.info(f"Starting: {target_exe_name}")
|
|
626
|
+
logger.info(f"Executable: {exe_path}")
|
|
627
|
+
|
|
628
|
+
import subprocess
|
|
629
|
+
|
|
630
|
+
try:
|
|
631
|
+
# Run from dist directory so the executable can find runtime and entry files
|
|
632
|
+
subprocess.run([str(exe_path)], cwd=str(dist_dir), check=True)
|
|
633
|
+
except subprocess.CalledProcessError as e:
|
|
634
|
+
logger.error(f"Project execution failed with exit code {e.returncode}")
|
|
635
|
+
except KeyboardInterrupt:
|
|
636
|
+
logger.info("Project execution interrupted")
|
|
637
|
+
except FileNotFoundError:
|
|
638
|
+
logger.error(f"Failed to execute {exe_path}")
|
|
639
|
+
|
|
640
|
+
except Exception as e:
|
|
641
|
+
logger.error(f"Failed to run project: {e}")
|
|
642
|
+
|
|
643
|
+
|
|
644
|
+
def clean_project(directory: Path) -> None:
|
|
645
|
+
"""Clean build artifacts and package files.
|
|
646
|
+
|
|
647
|
+
Args:
|
|
648
|
+
directory: Project directory to clean
|
|
649
|
+
"""
|
|
650
|
+
|
|
651
|
+
# Directories to clean
|
|
652
|
+
def should_clean_entries(dir_path: Path) -> bool:
|
|
653
|
+
return any([
|
|
654
|
+
(dir_path.name.startswith(".") and dir_path.is_dir())
|
|
655
|
+
or (dir_path.name.startswith("__") and dir_path.is_dir())
|
|
656
|
+
or (dir_path.name == "build" and dir_path.is_dir())
|
|
657
|
+
or (dir_path.name == "dist" and dir_path.is_dir())
|
|
658
|
+
or (dir_path.name == "pysfi_build" and dir_path.is_dir())
|
|
659
|
+
or (dir_path.name == "cbuild" and dir_path.is_dir())
|
|
660
|
+
or (dir_path.name == "benchmarks" and dir_path.is_dir())
|
|
661
|
+
or (dir_path.name == "projects.json" and dir_path.is_file())
|
|
662
|
+
])
|
|
663
|
+
|
|
664
|
+
logger.info("Cleaning build artifacts...")
|
|
665
|
+
cleaned_dirs = []
|
|
666
|
+
cleaned_files = []
|
|
667
|
+
entries_to_clean = [f for f in directory.iterdir() if should_clean_entries(f)]
|
|
668
|
+
for entry in entries_to_clean:
|
|
669
|
+
if entry.exists():
|
|
670
|
+
if entry.is_dir():
|
|
671
|
+
logger.info(f"Removing directory: {entry}")
|
|
672
|
+
try:
|
|
673
|
+
shutil.rmtree(entry)
|
|
674
|
+
cleaned_dirs.append(str(entry))
|
|
675
|
+
logger.info(f" Removed directory: {entry}")
|
|
676
|
+
except Exception as e:
|
|
677
|
+
logger.warning(f" Failed to remove {entry}: {e}")
|
|
678
|
+
elif entry.is_file():
|
|
679
|
+
logger.info(f"Removing file: {entry}")
|
|
680
|
+
try:
|
|
681
|
+
entry.unlink()
|
|
682
|
+
cleaned_files.append(str(entry))
|
|
683
|
+
logger.info(f" Removed file: {entry}")
|
|
684
|
+
except Exception as e:
|
|
685
|
+
logger.warning(f" Failed to remove {entry}: {e}")
|
|
686
|
+
|
|
687
|
+
if not cleaned_dirs and not cleaned_files:
|
|
688
|
+
logger.info("No build artifacts found")
|
|
689
|
+
else:
|
|
690
|
+
logger.info(f"Cleaned {len(cleaned_dirs)} directorie(s) and {len(cleaned_files)} file(s)")
|
|
691
|
+
|
|
692
|
+
|
|
693
|
+
def main() -> None:
|
|
694
|
+
"""Main entry point for package workflow tool."""
|
|
695
|
+
parser = argparse.ArgumentParser(prog="pypack", description="Python packaging tool with workflow orchestration")
|
|
696
|
+
|
|
697
|
+
parser.add_argument(
|
|
698
|
+
"action",
|
|
699
|
+
choices=("build", "b", "list", "l", "run", "r", "clean", "c", "version", "v"),
|
|
700
|
+
help="Action to perform",
|
|
701
|
+
)
|
|
702
|
+
parser.add_argument(
|
|
703
|
+
"-p", "--project", type=str, default=None, help="Project or executable name (e.g., 'docscan' or 'docscan-gui')"
|
|
704
|
+
)
|
|
705
|
+
parser.add_argument("--debug", "-d", action="store_true", help="Debug mode")
|
|
706
|
+
parser.add_argument("--python-version", type=str, default="3.8.10", help="Python version to install")
|
|
707
|
+
parser.add_argument("--loader-type", type=str, choices=("console", "gui"), default="console", help="Loader type")
|
|
708
|
+
parser.add_argument(
|
|
709
|
+
"--entry-suffix", type=str, default=".ent", help="Entry file suffix (default: .ent, alternatives: .py)"
|
|
710
|
+
)
|
|
711
|
+
parser.add_argument("--no-loader", action="store_true", help="Skip loader generation")
|
|
712
|
+
parser.add_argument("-r", "--recursive", action="store_true", help="Parse projects recursively")
|
|
713
|
+
parser.add_argument("-o", "--offline", action="store_true", help="Offline mode")
|
|
714
|
+
parser.add_argument("-j", "--jobs", type=int, default=4, help="Maximum concurrent tasks")
|
|
715
|
+
|
|
716
|
+
# Library packing arguments
|
|
717
|
+
parser.add_argument("--no-pack-libs", action="store_true", help="Skip packing project dependencies")
|
|
718
|
+
parser.add_argument("--cache-dir", type=str, default=None, help="Custom cache directory for dependencies")
|
|
719
|
+
parser.add_argument(
|
|
720
|
+
"--archive-format",
|
|
721
|
+
type=str,
|
|
722
|
+
choices=("zip", "tar", "gztar", "bztar", "xztar"),
|
|
723
|
+
default="zip",
|
|
724
|
+
help="Archive format for dependencies",
|
|
725
|
+
)
|
|
726
|
+
parser.add_argument(
|
|
727
|
+
"--mirror",
|
|
728
|
+
type=str,
|
|
729
|
+
default="aliyun",
|
|
730
|
+
choices=("pypi", "tsinghua", "aliyun", "ustc", "douban", "tencent"),
|
|
731
|
+
help="PyPI mirror source for faster downloads",
|
|
732
|
+
)
|
|
733
|
+
|
|
734
|
+
args = parser.parse_args()
|
|
735
|
+
working_dir = Path.cwd()
|
|
736
|
+
|
|
737
|
+
# Setup logging
|
|
738
|
+
if args.debug:
|
|
739
|
+
logging.getLogger().setLevel(logging.DEBUG)
|
|
740
|
+
|
|
741
|
+
# Version command
|
|
742
|
+
if args.action in {"version", "v"}:
|
|
743
|
+
logger.info(f"pypack {__version__} (build {__build__})")
|
|
744
|
+
return
|
|
745
|
+
|
|
746
|
+
# List command
|
|
747
|
+
if args.action in {"list", "l"}:
|
|
748
|
+
list_projects(working_dir)
|
|
749
|
+
return
|
|
750
|
+
|
|
751
|
+
# Run command
|
|
752
|
+
if args.action in {"run", "r"}:
|
|
753
|
+
run_project(args.project, working_dir)
|
|
754
|
+
return
|
|
755
|
+
|
|
756
|
+
# Clean command
|
|
757
|
+
if args.action in {"clean", "c"}:
|
|
758
|
+
clean_project(working_dir)
|
|
759
|
+
return
|
|
760
|
+
|
|
761
|
+
# Build configuration
|
|
762
|
+
config = {
|
|
763
|
+
"directory": working_dir,
|
|
764
|
+
"project_name": args.project,
|
|
765
|
+
"python_version": args.python_version,
|
|
766
|
+
"loader_type": args.loader_type,
|
|
767
|
+
"entry_suffix": args.entry_suffix,
|
|
768
|
+
"generate_loader": not args.no_loader,
|
|
769
|
+
"recursive": args.recursive,
|
|
770
|
+
"offline": args.offline,
|
|
771
|
+
"max_concurrent": args.jobs,
|
|
772
|
+
"debug": args.debug,
|
|
773
|
+
"skip_pack_libraries": args.no_pack_libs,
|
|
774
|
+
"cache_dir": args.cache_dir,
|
|
775
|
+
"archive_format": args.archive_format,
|
|
776
|
+
"mirror": args.mirror,
|
|
777
|
+
}
|
|
778
|
+
|
|
779
|
+
# Create and execute workflow
|
|
780
|
+
workflow = PackageWorkflow(directory=working_dir, config=config)
|
|
781
|
+
|
|
782
|
+
try:
|
|
783
|
+
asyncio.run(workflow.execute())
|
|
784
|
+
logger.info("Packaging completed successfully!")
|
|
785
|
+
except Exception as e:
|
|
786
|
+
logger.error(f"Packaging failed: {e}")
|
|
787
|
+
raise
|
|
788
|
+
|
|
789
|
+
|
|
790
|
+
if __name__ == "__main__":
|
|
791
|
+
main()
|