pysfi 0.1.13__py3-none-any.whl → 0.1.14__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pysfi-0.1.13.dist-info → pysfi-0.1.14.dist-info}/METADATA +1 -1
- {pysfi-0.1.13.dist-info → pysfi-0.1.14.dist-info}/RECORD +29 -31
- {pysfi-0.1.13.dist-info → pysfi-0.1.14.dist-info}/entry_points.txt +1 -0
- sfi/__init__.py +20 -5
- sfi/alarmclock/__init__.py +3 -3
- sfi/bumpversion/__init__.py +5 -5
- sfi/bumpversion/bumpversion.py +64 -15
- sfi/cleanbuild/__init__.py +3 -3
- sfi/cleanbuild/cleanbuild.py +5 -1
- sfi/cli.py +13 -2
- sfi/condasetup/__init__.py +1 -1
- sfi/condasetup/condasetup.py +91 -76
- sfi/docdiff/__init__.py +1 -1
- sfi/docdiff/docdiff.py +3 -2
- sfi/docscan/__init__.py +3 -3
- sfi/docscan/docscan.py +78 -23
- sfi/docscan/docscan_gui.py +5 -5
- sfi/filedate/filedate.py +12 -5
- sfi/img2pdf/img2pdf.py +5 -5
- sfi/llmquantize/llmquantize.py +44 -33
- sfi/llmserver/__init__.py +1 -1
- sfi/makepython/makepython.py +880 -319
- sfi/pdfsplit/pdfsplit.py +45 -12
- sfi/pyarchive/__init__.py +1 -1
- sfi/pylibpack/pylibpack.py +5 -13
- sfi/pypack/pypack.py +127 -105
- sfi/pyprojectparse/pyprojectparse.py +11 -14
- sfi/pysourcepack/__init__.py +1 -1
- sfi/workflowengine/__init__.py +0 -0
- sfi/workflowengine/workflowengine.py +0 -547
- {pysfi-0.1.13.dist-info → pysfi-0.1.14.dist-info}/WHEEL +0 -0
sfi/pdfsplit/pdfsplit.py
CHANGED
|
@@ -7,7 +7,6 @@ from pathlib import Path
|
|
|
7
7
|
import fitz
|
|
8
8
|
|
|
9
9
|
logging.basicConfig(level=logging.INFO, format="%(message)s")
|
|
10
|
-
cwd = Path.cwd()
|
|
11
10
|
logger = logging.getLogger(__name__)
|
|
12
11
|
|
|
13
12
|
|
|
@@ -50,7 +49,9 @@ def split_by_number(input_file: Path, output_file: Path, number: int) -> None:
|
|
|
50
49
|
|
|
51
50
|
end_page = min(current_page + pages_in_this_part, total_pages)
|
|
52
51
|
|
|
53
|
-
part_file =
|
|
52
|
+
part_file = (
|
|
53
|
+
output_file.parent / f"{output_file.stem}_part{i + 1}{output_file.suffix}"
|
|
54
|
+
)
|
|
54
55
|
part_doc = fitz.open()
|
|
55
56
|
|
|
56
57
|
for page_num in range(current_page, end_page):
|
|
@@ -58,7 +59,9 @@ def split_by_number(input_file: Path, output_file: Path, number: int) -> None:
|
|
|
58
59
|
|
|
59
60
|
part_doc.save(part_file)
|
|
60
61
|
part_doc.close()
|
|
61
|
-
logger.info(
|
|
62
|
+
logger.info(
|
|
63
|
+
f"Created part {i + 1}: {part_file} (pages {current_page + 1}-{end_page})"
|
|
64
|
+
)
|
|
62
65
|
|
|
63
66
|
current_page = end_page
|
|
64
67
|
|
|
@@ -77,7 +80,10 @@ def split_by_size(input_file: Path, output_file: Path, size: int) -> None:
|
|
|
77
80
|
|
|
78
81
|
while start_page < total_pages:
|
|
79
82
|
end_page = min(start_page + size, total_pages)
|
|
80
|
-
part_file =
|
|
83
|
+
part_file = (
|
|
84
|
+
output_file.parent
|
|
85
|
+
/ f"{output_file.stem}_part{part + 1}{output_file.suffix}"
|
|
86
|
+
)
|
|
81
87
|
part_doc = fitz.open()
|
|
82
88
|
|
|
83
89
|
for page_num in range(start_page, end_page):
|
|
@@ -85,7 +91,9 @@ def split_by_size(input_file: Path, output_file: Path, size: int) -> None:
|
|
|
85
91
|
|
|
86
92
|
part_doc.save(part_file)
|
|
87
93
|
part_doc.close()
|
|
88
|
-
logger.info(
|
|
94
|
+
logger.info(
|
|
95
|
+
f"Created part {part + 1}: {part_file} (pages {start_page + 1}-{end_page})"
|
|
96
|
+
)
|
|
89
97
|
|
|
90
98
|
start_page = end_page
|
|
91
99
|
part += 1
|
|
@@ -122,18 +130,37 @@ def split_by_range(input_file: Path, output_file: Path, range_str: str) -> None:
|
|
|
122
130
|
|
|
123
131
|
|
|
124
132
|
def main() -> None:
|
|
133
|
+
"""Main entry point for pdfsplit CLI."""
|
|
125
134
|
parser = argparse.ArgumentParser(description="Split PDF files")
|
|
126
135
|
parser.add_argument("input", help="Input PDF file")
|
|
127
|
-
parser.add_argument(
|
|
128
|
-
|
|
129
|
-
|
|
136
|
+
parser.add_argument(
|
|
137
|
+
"output", nargs="?", help="Output PDF file (optional for -n and -s modes)"
|
|
138
|
+
)
|
|
139
|
+
parser.add_argument(
|
|
140
|
+
"-o",
|
|
141
|
+
"--output-dir",
|
|
142
|
+
default=".",
|
|
143
|
+
help="Output directory (default: current directory)",
|
|
144
|
+
)
|
|
145
|
+
parser.add_argument(
|
|
146
|
+
"-f",
|
|
147
|
+
"--output-format",
|
|
148
|
+
help="Output file format pattern, e.g., 'split_{part:02d}.pdf'",
|
|
149
|
+
)
|
|
130
150
|
parser.add_argument("-v", "--verbose", action="store_true", help="Verbose output")
|
|
131
151
|
|
|
132
152
|
# Split by number, size, or range
|
|
133
153
|
group = parser.add_mutually_exclusive_group(required=True)
|
|
134
154
|
group.add_argument("-n", "--number", type=int, help="Number of splits")
|
|
135
|
-
group.add_argument(
|
|
136
|
-
|
|
155
|
+
group.add_argument(
|
|
156
|
+
"-s", "--size", type=int, default=1, help="Size of each split in pages"
|
|
157
|
+
)
|
|
158
|
+
group.add_argument(
|
|
159
|
+
"-r",
|
|
160
|
+
"--range",
|
|
161
|
+
type=str,
|
|
162
|
+
help="Range of pages to extract, e.g., '1,2,4-10,15-20,25-'",
|
|
163
|
+
)
|
|
137
164
|
|
|
138
165
|
args = parser.parse_args()
|
|
139
166
|
|
|
@@ -142,7 +169,9 @@ def main() -> None:
|
|
|
142
169
|
|
|
143
170
|
output_dir = Path(args.output_dir)
|
|
144
171
|
if not output_dir.is_dir():
|
|
145
|
-
logger.error(
|
|
172
|
+
logger.error(
|
|
173
|
+
f"Output directory {args.output_dir} does not exist, please check the path."
|
|
174
|
+
)
|
|
146
175
|
return
|
|
147
176
|
|
|
148
177
|
input_file = Path(args.input)
|
|
@@ -157,7 +186,11 @@ def main() -> None:
|
|
|
157
186
|
return
|
|
158
187
|
|
|
159
188
|
if not args.range:
|
|
160
|
-
output_file =
|
|
189
|
+
output_file = (
|
|
190
|
+
output_dir / (input_file.stem + "_split.pdf")
|
|
191
|
+
if not args.output
|
|
192
|
+
else Path(args.output)
|
|
193
|
+
)
|
|
161
194
|
else:
|
|
162
195
|
output_file = Path(args.output)
|
|
163
196
|
|
sfi/pyarchive/__init__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
|
|
1
|
+
|
sfi/pylibpack/pylibpack.py
CHANGED
|
@@ -54,7 +54,7 @@ DEFAULT_MAX_WORKERS: Final[int] = 4
|
|
|
54
54
|
DEFAULT_MIRROR: Final[str] = "aliyun"
|
|
55
55
|
DEFAULT_OPTIMIZE: Final[bool] = True
|
|
56
56
|
|
|
57
|
-
PYPI_MIRRORS = {
|
|
57
|
+
PYPI_MIRRORS: Final[dict[str, str]] = {
|
|
58
58
|
"pypi": "https://pypi.org/simple",
|
|
59
59
|
"tsinghua": "https://pypi.tuna.tsinghua.edu.cn/simple",
|
|
60
60
|
"aliyun": "https://mirrors.aliyun.com/pypi/simple/",
|
|
@@ -915,14 +915,10 @@ class LibraryCache:
|
|
|
915
915
|
@staticmethod
|
|
916
916
|
def _should_skip_dist_info(file_path: Path) -> bool:
|
|
917
917
|
"""Check if the file path should be skipped because it's a dist-info directory."""
|
|
918
|
-
|
|
919
|
-
if name.endswith(".dist-info"):
|
|
918
|
+
if file_path.name.endswith(".dist-info"):
|
|
920
919
|
return True
|
|
921
920
|
# Check if any parent directory ends with .dist-info
|
|
922
|
-
for part in file_path.parts
|
|
923
|
-
if part.endswith(".dist-info"):
|
|
924
|
-
return True
|
|
925
|
-
return False
|
|
921
|
+
return any(part.endswith(".dist-info") for part in file_path.parts)
|
|
926
922
|
|
|
927
923
|
def clear_cache(self) -> None:
|
|
928
924
|
"""Clear all cached packages."""
|
|
@@ -1410,14 +1406,10 @@ class PyLibPacker:
|
|
|
1410
1406
|
@staticmethod
|
|
1411
1407
|
def _should_skip_dist_info(file_path: Path) -> bool:
|
|
1412
1408
|
"""Check if the file path should be skipped because it's a dist-info directory."""
|
|
1413
|
-
|
|
1414
|
-
if name.endswith(".dist-info"):
|
|
1409
|
+
if file_path.name.endswith(".dist-info"):
|
|
1415
1410
|
return True
|
|
1416
1411
|
# Check if any parent directory ends with .dist-info
|
|
1417
|
-
for part in file_path.parts
|
|
1418
|
-
if part.endswith(".dist-info"):
|
|
1419
|
-
return True
|
|
1420
|
-
return False
|
|
1412
|
+
return any(part.endswith(".dist-info") for part in file_path.parts)
|
|
1421
1413
|
|
|
1422
1414
|
def run(self) -> None:
|
|
1423
1415
|
"""Pack project dependencies from base directory with concurrent processing."""
|
sfi/pypack/pypack.py
CHANGED
|
@@ -24,7 +24,7 @@ from dataclasses import dataclass, field
|
|
|
24
24
|
from enum import Enum
|
|
25
25
|
from functools import cached_property
|
|
26
26
|
from pathlib import Path
|
|
27
|
-
from typing import Any, Protocol
|
|
27
|
+
from typing import Any, Callable, Protocol
|
|
28
28
|
|
|
29
29
|
from sfi.pyprojectparse.pyprojectparse import Project, Solution
|
|
30
30
|
|
|
@@ -148,18 +148,18 @@ class WorkflowConfig:
|
|
|
148
148
|
|
|
149
149
|
@cached_property
|
|
150
150
|
def normalized_directory(self) -> Path:
|
|
151
|
-
"""Get normalized directory path."""
|
|
151
|
+
"""Get normalized and resolved directory path."""
|
|
152
152
|
return self.directory.resolve()
|
|
153
153
|
|
|
154
154
|
@cached_property
|
|
155
155
|
def dist_dir(self) -> Path:
|
|
156
156
|
"""Get distribution directory path."""
|
|
157
|
-
return self.
|
|
157
|
+
return self.normalized_directory / "dist"
|
|
158
158
|
|
|
159
159
|
@cached_property
|
|
160
160
|
def build_dir(self) -> Path:
|
|
161
161
|
"""Get build directory path."""
|
|
162
|
-
return self.
|
|
162
|
+
return self.normalized_directory / "build"
|
|
163
163
|
|
|
164
164
|
|
|
165
165
|
# Strategy Pattern Implementation for Cleaning
|
|
@@ -169,11 +169,8 @@ class StandardCleaningStrategy:
|
|
|
169
169
|
def should_clean(self, entry: Path) -> bool:
|
|
170
170
|
"""Determine if entry should be cleaned using standard rules."""
|
|
171
171
|
# Special case: projects.json file should always be cleaned
|
|
172
|
-
if entry.is_file()
|
|
173
|
-
return
|
|
174
|
-
|
|
175
|
-
if not entry.is_dir():
|
|
176
|
-
return False
|
|
172
|
+
if entry.is_file():
|
|
173
|
+
return entry.name == "projects.json"
|
|
177
174
|
|
|
178
175
|
# Protected directories starting with dot
|
|
179
176
|
if entry.name.startswith(".") and entry.name in PROTECTED_DIRS:
|
|
@@ -232,12 +229,12 @@ class PackageWorkflow:
|
|
|
232
229
|
"""
|
|
233
230
|
return self.solution.projects
|
|
234
231
|
|
|
235
|
-
@
|
|
232
|
+
@property
|
|
236
233
|
def dist_dir(self) -> Path:
|
|
237
234
|
"""Get distribution directory path."""
|
|
238
235
|
return self.config.dist_dir
|
|
239
236
|
|
|
240
|
-
@
|
|
237
|
+
@property
|
|
241
238
|
def build_dir(self) -> Path:
|
|
242
239
|
"""Get build directory path."""
|
|
243
240
|
return self.config.build_dir
|
|
@@ -246,16 +243,21 @@ class PackageWorkflow:
|
|
|
246
243
|
"""Clean build artifacts and package files using strategy pattern."""
|
|
247
244
|
logger.info("Cleaning build artifacts using strategy pattern...")
|
|
248
245
|
|
|
249
|
-
cleaned_dirs: list[str] = []
|
|
250
|
-
cleaned_files: list[str] = []
|
|
251
|
-
failed_operations: list[str] = []
|
|
252
|
-
|
|
253
246
|
entries_to_clean = [
|
|
254
247
|
entry
|
|
255
248
|
for entry in self.root_dir.iterdir()
|
|
256
249
|
if self.cleaning_strategy.should_clean(entry)
|
|
257
250
|
]
|
|
258
251
|
|
|
252
|
+
if not entries_to_clean:
|
|
253
|
+
logger.info("No build artifacts found to clean")
|
|
254
|
+
return
|
|
255
|
+
|
|
256
|
+
# Track cleaning results
|
|
257
|
+
cleaned_dirs: list[str] = []
|
|
258
|
+
cleaned_files: list[str] = []
|
|
259
|
+
failed_operations: list[str] = []
|
|
260
|
+
|
|
259
261
|
for entry in entries_to_clean:
|
|
260
262
|
success, message = self.cleaning_strategy.clean_entry(entry)
|
|
261
263
|
if success:
|
|
@@ -263,39 +265,37 @@ class PackageWorkflow:
|
|
|
263
265
|
cleaned_dirs.append(str(entry))
|
|
264
266
|
else:
|
|
265
267
|
cleaned_files.append(str(entry))
|
|
266
|
-
logger.
|
|
268
|
+
logger.debug(message)
|
|
267
269
|
else:
|
|
268
270
|
failed_operations.append(message)
|
|
269
271
|
logger.warning(message)
|
|
270
272
|
|
|
271
273
|
# Summary logging
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
logger.info(
|
|
276
|
-
f"Cleaned {len(cleaned_dirs)} directories and {len(cleaned_files)} file(s)"
|
|
277
|
-
)
|
|
274
|
+
logger.info(
|
|
275
|
+
f"Cleaned {len(cleaned_dirs)} directories and {len(cleaned_files)} file(s)"
|
|
276
|
+
)
|
|
278
277
|
|
|
279
278
|
if failed_operations:
|
|
280
279
|
logger.error(f"Failed operations: {len(failed_operations)}")
|
|
281
280
|
|
|
282
|
-
async def
|
|
283
|
-
"""Run a synchronous
|
|
281
|
+
async def _run_sync_task(self, name: str, setup_func: Callable[[], None]) -> None:
|
|
282
|
+
"""Run a synchronous task in thread pool executor.
|
|
284
283
|
|
|
285
284
|
Args:
|
|
286
|
-
|
|
287
|
-
|
|
285
|
+
name: Name of the task for logging
|
|
286
|
+
setup_func: Function that returns the task to execute
|
|
288
287
|
"""
|
|
288
|
+
logger.info(LOG_SEPARATOR)
|
|
289
|
+
logger.info(f"Packing {name}...")
|
|
290
|
+
|
|
289
291
|
loop = asyncio.get_running_loop()
|
|
290
|
-
await loop.run_in_executor(None,
|
|
292
|
+
await loop.run_in_executor(None, setup_func)
|
|
293
|
+
logger.info(f"{name.capitalize()} packed.")
|
|
291
294
|
|
|
292
295
|
async def pack_embed_python(self) -> None:
|
|
293
296
|
"""Pack embed python."""
|
|
294
297
|
from sfi.pyembedinstall.pyembedinstall import EmbedInstaller
|
|
295
298
|
|
|
296
|
-
logger.info(LOG_SEPARATOR)
|
|
297
|
-
logger.info("Packing embed python...")
|
|
298
|
-
|
|
299
299
|
def _run():
|
|
300
300
|
installer = EmbedInstaller(
|
|
301
301
|
root_dir=self.root_dir,
|
|
@@ -304,22 +304,17 @@ class PackageWorkflow:
|
|
|
304
304
|
)
|
|
305
305
|
installer.run()
|
|
306
306
|
|
|
307
|
-
await self.
|
|
308
|
-
logger.info("Embed python packed.")
|
|
307
|
+
await self._run_sync_task("embed python", _run)
|
|
309
308
|
|
|
310
309
|
async def pack_loaders(self) -> None:
|
|
311
310
|
"""Pack loaders for all projects concurrently."""
|
|
312
311
|
from sfi.pyloadergen.pyloadergen import PyLoaderGenerator
|
|
313
312
|
|
|
314
|
-
logger.info(LOG_SEPARATOR)
|
|
315
|
-
logger.info("Packing loaders...")
|
|
316
|
-
|
|
317
313
|
def _run():
|
|
318
314
|
generator = PyLoaderGenerator(root_dir=self.root_dir)
|
|
319
315
|
generator.run()
|
|
320
316
|
|
|
321
|
-
await self.
|
|
322
|
-
logger.info("Loaders packed.")
|
|
317
|
+
await self._run_sync_task("loaders", _run)
|
|
323
318
|
|
|
324
319
|
async def pack_libraries(self) -> None:
|
|
325
320
|
"""Pack libraries for all projects concurrently."""
|
|
@@ -332,7 +327,7 @@ class PackageWorkflow:
|
|
|
332
327
|
)
|
|
333
328
|
libpacker.run()
|
|
334
329
|
|
|
335
|
-
await self.
|
|
330
|
+
await self._run_sync_task("libraries", _run)
|
|
336
331
|
|
|
337
332
|
async def pack_source(self) -> None:
|
|
338
333
|
"""Pack source code for all projects concurrently."""
|
|
@@ -342,7 +337,7 @@ class PackageWorkflow:
|
|
|
342
337
|
source_packer = PySourcePacker(root_dir=self.root_dir)
|
|
343
338
|
source_packer.run()
|
|
344
339
|
|
|
345
|
-
await self.
|
|
340
|
+
await self._run_sync_task("source code", _run)
|
|
346
341
|
|
|
347
342
|
async def pack_archive(self, archive_format: str) -> None:
|
|
348
343
|
"""Create archive for all projects.
|
|
@@ -350,18 +345,14 @@ class PackageWorkflow:
|
|
|
350
345
|
Args:
|
|
351
346
|
archive_format: Archive format (zip, tar, gztar, bztar, xztar, 7z, nsis)
|
|
352
347
|
"""
|
|
353
|
-
from sfi.pyarchive.pyarchive import
|
|
354
|
-
|
|
355
|
-
logger.info(LOG_SEPARATOR)
|
|
356
|
-
logger.info(f"Creating {archive_format} archives...")
|
|
348
|
+
from sfi.pyarchive.pyarchive import PyArchiveConfig, PyArchiver
|
|
357
349
|
|
|
358
350
|
def _run():
|
|
359
351
|
config = PyArchiveConfig(verbose=self.config.debug)
|
|
360
352
|
archiver = PyArchiver(root_dir=self.root_dir, config=config)
|
|
361
353
|
archiver.archive_projects(format=archive_format)
|
|
362
354
|
|
|
363
|
-
await self.
|
|
364
|
-
logger.info("Archives created.")
|
|
355
|
+
await self._run_sync_task(f"{archive_format} archives", _run)
|
|
365
356
|
|
|
366
357
|
async def build(self) -> dict[str, Any]:
|
|
367
358
|
"""Execute the packaging workflow with concurrent optimization.
|
|
@@ -372,14 +363,14 @@ class PackageWorkflow:
|
|
|
372
363
|
3. Create archive (optional, if archive_type is set)
|
|
373
364
|
|
|
374
365
|
Returns:
|
|
375
|
-
Dict with results and summary
|
|
366
|
+
Dict with results and summary including output_dir and metadata
|
|
376
367
|
|
|
377
368
|
Raises:
|
|
378
369
|
FileNotFoundError: If required directories don't exist
|
|
379
370
|
RuntimeError: If any packaging step fails
|
|
380
371
|
"""
|
|
381
372
|
logger.info("Starting packaging workflow execution")
|
|
382
|
-
|
|
373
|
+
start_time = time.perf_counter()
|
|
383
374
|
|
|
384
375
|
try:
|
|
385
376
|
# Stage 1: Pack embed python (prerequisite for other tasks)
|
|
@@ -404,15 +395,16 @@ class PackageWorkflow:
|
|
|
404
395
|
logger.error(f"Workflow execution failed: {e}")
|
|
405
396
|
raise RuntimeError(f"Packaging workflow failed: {e}") from e
|
|
406
397
|
|
|
407
|
-
|
|
398
|
+
elapsed = time.perf_counter() - start_time
|
|
408
399
|
logger.info(LOG_SEPARATOR)
|
|
409
|
-
logger.info(f"Packaging workflow completed in {
|
|
400
|
+
logger.info(f"Packaging workflow completed in {elapsed:.2f}s")
|
|
410
401
|
return {"output_dir": str(self.dist_dir), "metadata": {}}
|
|
411
402
|
|
|
412
403
|
def list_projects(self) -> None:
|
|
404
|
+
"""List all available projects."""
|
|
413
405
|
logger.info(f"Listing projects in {self.root_dir}")
|
|
414
406
|
for project in self.projects.values():
|
|
415
|
-
logger.info(f"{project}")
|
|
407
|
+
logger.info(f" - {project}")
|
|
416
408
|
|
|
417
409
|
def _scan_executables(self) -> list[Path]:
|
|
418
410
|
"""Scan dist directory for executable files.
|
|
@@ -420,11 +412,10 @@ class PackageWorkflow:
|
|
|
420
412
|
Returns:
|
|
421
413
|
List of executable file paths found in dist directory.
|
|
422
414
|
"""
|
|
423
|
-
dist_dir = self.
|
|
415
|
+
dist_dir = self.dist_dir
|
|
424
416
|
if not dist_dir.exists():
|
|
425
417
|
return []
|
|
426
|
-
|
|
427
|
-
return [f for f in dist_dir.glob(pattern) if f.is_file() and f.suffix == ext]
|
|
418
|
+
return [f for f in dist_dir.glob(f"*{ext}") if f.is_file()]
|
|
428
419
|
|
|
429
420
|
def _resolve_executable(self, match_name: str | None) -> Path | None:
|
|
430
421
|
"""Resolve executable by scanning dist directory and matching name.
|
|
@@ -441,22 +432,18 @@ class PackageWorkflow:
|
|
|
441
432
|
|
|
442
433
|
# Auto-select if only one executable and no name specified
|
|
443
434
|
if not match_name:
|
|
444
|
-
if len(executables) == 1
|
|
445
|
-
return executables[0]
|
|
446
|
-
return None
|
|
435
|
+
return executables[0] if len(executables) == 1 else None
|
|
447
436
|
|
|
448
|
-
# Exact match (without extension)
|
|
449
437
|
lower_name = match_name.lower()
|
|
438
|
+
|
|
439
|
+
# Try exact match (without extension)
|
|
450
440
|
for exe in executables:
|
|
451
441
|
if exe.stem.lower() == lower_name:
|
|
452
442
|
return exe
|
|
453
443
|
|
|
454
|
-
#
|
|
444
|
+
# Try fuzzy match (case-insensitive substring)
|
|
455
445
|
matches = [exe for exe in executables if lower_name in exe.stem.lower()]
|
|
456
|
-
if len(matches) == 1
|
|
457
|
-
return matches[0]
|
|
458
|
-
|
|
459
|
-
return None
|
|
446
|
+
return matches[0] if len(matches) == 1 else None
|
|
460
447
|
|
|
461
448
|
def list_executables(self) -> None:
|
|
462
449
|
"""List all executables in dist directory."""
|
|
@@ -464,9 +451,9 @@ class PackageWorkflow:
|
|
|
464
451
|
if not executables:
|
|
465
452
|
logger.info("No executables found in dist directory")
|
|
466
453
|
return
|
|
467
|
-
logger.info(f"Available executables in {self.
|
|
454
|
+
logger.info(f"Available executables in {self.dist_dir}:")
|
|
468
455
|
for exe in executables:
|
|
469
|
-
logger.info(f" {exe.stem}")
|
|
456
|
+
logger.info(f" - {exe.stem}")
|
|
470
457
|
|
|
471
458
|
def run_project(
|
|
472
459
|
self, match_name: str | None, project_args: list[str] | None = None
|
|
@@ -479,41 +466,66 @@ class PackageWorkflow:
|
|
|
479
466
|
"""
|
|
480
467
|
exe_path = self._resolve_executable(match_name)
|
|
481
468
|
|
|
482
|
-
|
|
483
|
-
|
|
469
|
+
# Handle executable not found cases
|
|
470
|
+
if not exe_path:
|
|
471
|
+
self._handle_executable_not_found(match_name)
|
|
472
|
+
return
|
|
473
|
+
|
|
474
|
+
# Build and execute command
|
|
475
|
+
cmd = self._build_executable_command(exe_path, project_args)
|
|
476
|
+
|
|
477
|
+
try:
|
|
478
|
+
subprocess.run(cmd, check=True, cwd=self.root_dir)
|
|
479
|
+
logger.info(f"{exe_path.stem} ran successfully")
|
|
480
|
+
except subprocess.CalledProcessError as e:
|
|
481
|
+
logger.error(f"{exe_path.stem} failed with exit code {e.returncode}")
|
|
482
|
+
except FileNotFoundError:
|
|
483
|
+
logger.error(f"Executable not found: {exe_path}")
|
|
484
|
+
except Exception as e:
|
|
485
|
+
logger.error(f"Failed to run {exe_path}: {e}")
|
|
486
|
+
|
|
487
|
+
def _handle_executable_not_found(self, match_name: str | None) -> None:
|
|
488
|
+
"""Handle cases where executable cannot be found.
|
|
489
|
+
|
|
490
|
+
Args:
|
|
491
|
+
match_name: Executable name that was being searched for
|
|
492
|
+
"""
|
|
493
|
+
executables = self._scan_executables()
|
|
494
|
+
|
|
495
|
+
if not match_name:
|
|
484
496
|
if len(executables) == 0:
|
|
485
497
|
logger.error("No executables found in dist directory")
|
|
486
|
-
return
|
|
487
498
|
elif len(executables) > 1:
|
|
488
499
|
logger.error(
|
|
489
500
|
"Multiple executables found. Please specify which one to run:"
|
|
490
501
|
)
|
|
491
502
|
self.list_executables()
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
return
|
|
496
|
-
elif not exe_path:
|
|
503
|
+
else:
|
|
504
|
+
logger.error("Unable to auto-select executable")
|
|
505
|
+
else:
|
|
497
506
|
logger.error(f"Executable '{match_name}' not found")
|
|
507
|
+
|
|
508
|
+
if len(executables) > 0:
|
|
498
509
|
self.list_executables()
|
|
499
|
-
return
|
|
500
510
|
|
|
501
|
-
|
|
511
|
+
def _build_executable_command(
|
|
512
|
+
self, exe_path: Path, project_args: list[str] | None
|
|
513
|
+
) -> list[str]:
|
|
514
|
+
"""Build command list for executable execution.
|
|
515
|
+
|
|
516
|
+
Args:
|
|
517
|
+
exe_path: Path to executable
|
|
518
|
+
project_args: Additional arguments to pass
|
|
519
|
+
|
|
520
|
+
Returns:
|
|
521
|
+
List of command arguments
|
|
522
|
+
"""
|
|
502
523
|
cmd = [str(exe_path.resolve())]
|
|
503
|
-
logger.info(f"Command: {cmd}")
|
|
504
524
|
if project_args:
|
|
505
525
|
cmd.extend(project_args)
|
|
506
526
|
logger.info(f"Arguments: {' '.join(project_args)}")
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
subprocess.run(cmd, check=True)
|
|
510
|
-
logger.info(f"{exe_path.stem} ran successfully")
|
|
511
|
-
except subprocess.CalledProcessError as e:
|
|
512
|
-
logger.error(f"{exe_path.stem} failed with exit code {e.returncode}")
|
|
513
|
-
except FileNotFoundError:
|
|
514
|
-
logger.error(f"Executable not found: {exe_path}")
|
|
515
|
-
except Exception as e:
|
|
516
|
-
logger.error(f"Failed to run {exe_path}: {e}")
|
|
527
|
+
logger.info(f"Running: {' '.join(cmd)}")
|
|
528
|
+
return cmd
|
|
517
529
|
|
|
518
530
|
|
|
519
531
|
def parse_args() -> argparse.Namespace:
|
|
@@ -526,34 +538,44 @@ def parse_args() -> argparse.Namespace:
|
|
|
526
538
|
prog="pypack", description="Python packaging tool with workflow orchestration"
|
|
527
539
|
)
|
|
528
540
|
|
|
541
|
+
# Add common arguments to parent parser
|
|
542
|
+
parent_parser = argparse.ArgumentParser(add_help=False)
|
|
543
|
+
parent_parser.add_argument("--debug", "-d", action="store_true", help="Debug mode")
|
|
544
|
+
|
|
529
545
|
# Create subparsers
|
|
530
546
|
subparsers = parser.add_subparsers(
|
|
531
547
|
dest="command", required=True, help="Available commands"
|
|
532
548
|
)
|
|
533
549
|
|
|
534
550
|
# Version subcommand
|
|
535
|
-
|
|
536
|
-
"version",
|
|
551
|
+
subparsers.add_parser(
|
|
552
|
+
"version",
|
|
553
|
+
aliases=["v"],
|
|
554
|
+
help="Show version information",
|
|
555
|
+
parents=[parent_parser],
|
|
537
556
|
)
|
|
538
|
-
version_parser.add_argument("--debug", "-d", action="store_true", help="Debug mode")
|
|
539
557
|
|
|
540
558
|
# List subcommand
|
|
541
|
-
|
|
542
|
-
"list",
|
|
559
|
+
subparsers.add_parser(
|
|
560
|
+
"list",
|
|
561
|
+
aliases=["l", "ls"],
|
|
562
|
+
help="List available projects",
|
|
563
|
+
parents=[parent_parser],
|
|
543
564
|
)
|
|
544
|
-
list_parser.add_argument("--debug", "-d", action="store_true", help="Debug mode")
|
|
545
565
|
|
|
546
566
|
# Clean subcommand
|
|
547
|
-
|
|
548
|
-
"clean", aliases=["c"], help="Clean build artifacts"
|
|
567
|
+
subparsers.add_parser(
|
|
568
|
+
"clean", aliases=["c"], help="Clean build artifacts", parents=[parent_parser]
|
|
549
569
|
)
|
|
550
|
-
clean_parser.add_argument("--debug", "-d", action="store_true", help="Debug mode")
|
|
551
570
|
|
|
552
571
|
# Run subcommand
|
|
553
|
-
run_parser = subparsers.add_parser(
|
|
572
|
+
run_parser = subparsers.add_parser(
|
|
573
|
+
"run", aliases=["r"], help="Run a project", parents=[parent_parser]
|
|
574
|
+
)
|
|
554
575
|
run_parser.add_argument(
|
|
555
576
|
"project",
|
|
556
577
|
type=str,
|
|
578
|
+
nargs="?",
|
|
557
579
|
help="Project or executable name (e.g., 'docscan' or 'docscan-gui')",
|
|
558
580
|
)
|
|
559
581
|
run_parser.add_argument(
|
|
@@ -562,13 +584,11 @@ def parse_args() -> argparse.Namespace:
|
|
|
562
584
|
nargs="*",
|
|
563
585
|
help="Additional arguments to pass to the project",
|
|
564
586
|
)
|
|
565
|
-
run_parser.add_argument("--debug", "-d", action="store_true", help="Debug mode")
|
|
566
587
|
|
|
567
588
|
# Build subcommand
|
|
568
589
|
build_parser = subparsers.add_parser(
|
|
569
|
-
"build", aliases=["b"], help="Build project packages"
|
|
590
|
+
"build", aliases=["b"], help="Build project packages", parents=[parent_parser]
|
|
570
591
|
)
|
|
571
|
-
build_parser.add_argument("--debug", "-d", action="store_true", help="Debug mode")
|
|
572
592
|
build_parser.add_argument(
|
|
573
593
|
"--python-version", type=str, default="3.8.10", help="Python version to install"
|
|
574
594
|
)
|
|
@@ -636,6 +656,7 @@ def main() -> None:
|
|
|
636
656
|
"""Main entry point for package workflow tool using factory pattern."""
|
|
637
657
|
args = parse_args()
|
|
638
658
|
|
|
659
|
+
# Configure logging level
|
|
639
660
|
if args.debug:
|
|
640
661
|
logging.getLogger().setLevel(logging.DEBUG)
|
|
641
662
|
|
|
@@ -648,17 +669,18 @@ def main() -> None:
|
|
|
648
669
|
cleaning_strategy=StandardCleaningStrategy(),
|
|
649
670
|
)
|
|
650
671
|
|
|
651
|
-
#
|
|
652
|
-
|
|
672
|
+
# Command dispatch using pattern matching
|
|
673
|
+
command = args.command
|
|
674
|
+
|
|
675
|
+
if command in ("version", "v"):
|
|
653
676
|
logger.info(f"pypack {__version__} (build {__build__})")
|
|
654
|
-
|
|
655
|
-
elif args.command in ("list", "l", "ls"):
|
|
677
|
+
elif command in ("list", "l", "ls"):
|
|
656
678
|
workflow.list_projects()
|
|
657
|
-
elif
|
|
679
|
+
elif command in ("run", "r"):
|
|
658
680
|
workflow.run_project(args.project, args.args)
|
|
659
|
-
elif
|
|
681
|
+
elif command in ("clean", "c"):
|
|
660
682
|
workflow.clean_project()
|
|
661
|
-
elif
|
|
683
|
+
elif command in ("build", "b"):
|
|
662
684
|
try:
|
|
663
685
|
asyncio.run(workflow.build())
|
|
664
686
|
logger.info("Packaging completed successfully!")
|