pysfi 0.1.13__py3-none-any.whl → 0.1.15__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {pysfi-0.1.13.dist-info → pysfi-0.1.15.dist-info}/METADATA +1 -1
- {pysfi-0.1.13.dist-info → pysfi-0.1.15.dist-info}/RECORD +35 -35
- {pysfi-0.1.13.dist-info → pysfi-0.1.15.dist-info}/entry_points.txt +2 -0
- sfi/__init__.py +20 -5
- sfi/alarmclock/__init__.py +3 -3
- sfi/bumpversion/__init__.py +5 -5
- sfi/bumpversion/bumpversion.py +64 -15
- sfi/cleanbuild/__init__.py +3 -3
- sfi/cleanbuild/cleanbuild.py +5 -1
- sfi/cli.py +13 -2
- sfi/condasetup/__init__.py +1 -1
- sfi/condasetup/condasetup.py +91 -76
- sfi/docdiff/__init__.py +1 -1
- sfi/docdiff/docdiff.py +3 -2
- sfi/docscan/__init__.py +3 -3
- sfi/docscan/docscan.py +78 -23
- sfi/docscan/docscan_gui.py +5 -5
- sfi/filedate/filedate.py +12 -5
- sfi/img2pdf/img2pdf.py +5 -5
- sfi/llmquantize/llmquantize.py +44 -33
- sfi/llmserver/__init__.py +1 -1
- sfi/makepython/makepython.py +880 -319
- sfi/pdfcrypt/__init__.py +30 -0
- sfi/pdfcrypt/pdfcrypt.py +435 -0
- sfi/pdfsplit/pdfsplit.py +45 -12
- sfi/pyarchive/__init__.py +1 -1
- sfi/pyarchive/pyarchive.py +1 -1
- sfi/pyembedinstall/pyembedinstall.py +1 -1
- sfi/pylibpack/pylibpack.py +5 -13
- sfi/pyloadergen/pyloadergen.py +6 -3
- sfi/pypack/pypack.py +131 -105
- sfi/pyprojectparse/pyprojectparse.py +19 -44
- sfi/pysourcepack/__init__.py +1 -1
- sfi/pysourcepack/pysourcepack.py +11 -14
- sfi/workflowengine/__init__.py +0 -0
- sfi/workflowengine/workflowengine.py +0 -547
- {pysfi-0.1.13.dist-info → pysfi-0.1.15.dist-info}/WHEEL +0 -0
sfi/pypack/pypack.py
CHANGED
|
@@ -24,7 +24,7 @@ from dataclasses import dataclass, field
|
|
|
24
24
|
from enum import Enum
|
|
25
25
|
from functools import cached_property
|
|
26
26
|
from pathlib import Path
|
|
27
|
-
from typing import Any, Protocol
|
|
27
|
+
from typing import Any, Callable, Protocol
|
|
28
28
|
|
|
29
29
|
from sfi.pyprojectparse.pyprojectparse import Project, Solution
|
|
30
30
|
|
|
@@ -148,18 +148,18 @@ class WorkflowConfig:
|
|
|
148
148
|
|
|
149
149
|
@cached_property
|
|
150
150
|
def normalized_directory(self) -> Path:
|
|
151
|
-
"""Get normalized directory path."""
|
|
151
|
+
"""Get normalized and resolved directory path."""
|
|
152
152
|
return self.directory.resolve()
|
|
153
153
|
|
|
154
154
|
@cached_property
|
|
155
155
|
def dist_dir(self) -> Path:
|
|
156
156
|
"""Get distribution directory path."""
|
|
157
|
-
return self.
|
|
157
|
+
return self.normalized_directory / "dist"
|
|
158
158
|
|
|
159
159
|
@cached_property
|
|
160
160
|
def build_dir(self) -> Path:
|
|
161
161
|
"""Get build directory path."""
|
|
162
|
-
return self.
|
|
162
|
+
return self.normalized_directory / "build"
|
|
163
163
|
|
|
164
164
|
|
|
165
165
|
# Strategy Pattern Implementation for Cleaning
|
|
@@ -169,11 +169,8 @@ class StandardCleaningStrategy:
|
|
|
169
169
|
def should_clean(self, entry: Path) -> bool:
|
|
170
170
|
"""Determine if entry should be cleaned using standard rules."""
|
|
171
171
|
# Special case: projects.json file should always be cleaned
|
|
172
|
-
if entry.is_file()
|
|
173
|
-
return
|
|
174
|
-
|
|
175
|
-
if not entry.is_dir():
|
|
176
|
-
return False
|
|
172
|
+
if entry.is_file():
|
|
173
|
+
return entry.name == "projects.json"
|
|
177
174
|
|
|
178
175
|
# Protected directories starting with dot
|
|
179
176
|
if entry.name.startswith(".") and entry.name in PROTECTED_DIRS:
|
|
@@ -233,11 +230,15 @@ class PackageWorkflow:
|
|
|
233
230
|
return self.solution.projects
|
|
234
231
|
|
|
235
232
|
@cached_property
|
|
233
|
+
def sorted_projects(self) -> dict[str, Project]:
|
|
234
|
+
return dict(sorted(self.projects.items()))
|
|
235
|
+
|
|
236
|
+
@property
|
|
236
237
|
def dist_dir(self) -> Path:
|
|
237
238
|
"""Get distribution directory path."""
|
|
238
239
|
return self.config.dist_dir
|
|
239
240
|
|
|
240
|
-
@
|
|
241
|
+
@property
|
|
241
242
|
def build_dir(self) -> Path:
|
|
242
243
|
"""Get build directory path."""
|
|
243
244
|
return self.config.build_dir
|
|
@@ -246,16 +247,21 @@ class PackageWorkflow:
|
|
|
246
247
|
"""Clean build artifacts and package files using strategy pattern."""
|
|
247
248
|
logger.info("Cleaning build artifacts using strategy pattern...")
|
|
248
249
|
|
|
249
|
-
cleaned_dirs: list[str] = []
|
|
250
|
-
cleaned_files: list[str] = []
|
|
251
|
-
failed_operations: list[str] = []
|
|
252
|
-
|
|
253
250
|
entries_to_clean = [
|
|
254
251
|
entry
|
|
255
252
|
for entry in self.root_dir.iterdir()
|
|
256
253
|
if self.cleaning_strategy.should_clean(entry)
|
|
257
254
|
]
|
|
258
255
|
|
|
256
|
+
if not entries_to_clean:
|
|
257
|
+
logger.info("No build artifacts found to clean")
|
|
258
|
+
return
|
|
259
|
+
|
|
260
|
+
# Track cleaning results
|
|
261
|
+
cleaned_dirs: list[str] = []
|
|
262
|
+
cleaned_files: list[str] = []
|
|
263
|
+
failed_operations: list[str] = []
|
|
264
|
+
|
|
259
265
|
for entry in entries_to_clean:
|
|
260
266
|
success, message = self.cleaning_strategy.clean_entry(entry)
|
|
261
267
|
if success:
|
|
@@ -263,39 +269,37 @@ class PackageWorkflow:
|
|
|
263
269
|
cleaned_dirs.append(str(entry))
|
|
264
270
|
else:
|
|
265
271
|
cleaned_files.append(str(entry))
|
|
266
|
-
logger.
|
|
272
|
+
logger.debug(message)
|
|
267
273
|
else:
|
|
268
274
|
failed_operations.append(message)
|
|
269
275
|
logger.warning(message)
|
|
270
276
|
|
|
271
277
|
# Summary logging
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
logger.info(
|
|
276
|
-
f"Cleaned {len(cleaned_dirs)} directories and {len(cleaned_files)} file(s)"
|
|
277
|
-
)
|
|
278
|
+
logger.info(
|
|
279
|
+
f"Cleaned {len(cleaned_dirs)} directories and {len(cleaned_files)} file(s)"
|
|
280
|
+
)
|
|
278
281
|
|
|
279
282
|
if failed_operations:
|
|
280
283
|
logger.error(f"Failed operations: {len(failed_operations)}")
|
|
281
284
|
|
|
282
|
-
async def
|
|
283
|
-
"""Run a synchronous
|
|
285
|
+
async def _run_sync_task(self, name: str, setup_func: Callable[[], None]) -> None:
|
|
286
|
+
"""Run a synchronous task in thread pool executor.
|
|
284
287
|
|
|
285
288
|
Args:
|
|
286
|
-
|
|
287
|
-
|
|
289
|
+
name: Name of the task for logging
|
|
290
|
+
setup_func: Function that returns the task to execute
|
|
288
291
|
"""
|
|
292
|
+
logger.info(LOG_SEPARATOR)
|
|
293
|
+
logger.info(f"Packing {name}...")
|
|
294
|
+
|
|
289
295
|
loop = asyncio.get_running_loop()
|
|
290
|
-
await loop.run_in_executor(None,
|
|
296
|
+
await loop.run_in_executor(None, setup_func)
|
|
297
|
+
logger.info(f"{name.capitalize()} packed.")
|
|
291
298
|
|
|
292
299
|
async def pack_embed_python(self) -> None:
|
|
293
300
|
"""Pack embed python."""
|
|
294
301
|
from sfi.pyembedinstall.pyembedinstall import EmbedInstaller
|
|
295
302
|
|
|
296
|
-
logger.info(LOG_SEPARATOR)
|
|
297
|
-
logger.info("Packing embed python...")
|
|
298
|
-
|
|
299
303
|
def _run():
|
|
300
304
|
installer = EmbedInstaller(
|
|
301
305
|
root_dir=self.root_dir,
|
|
@@ -304,22 +308,17 @@ class PackageWorkflow:
|
|
|
304
308
|
)
|
|
305
309
|
installer.run()
|
|
306
310
|
|
|
307
|
-
await self.
|
|
308
|
-
logger.info("Embed python packed.")
|
|
311
|
+
await self._run_sync_task("embed python", _run)
|
|
309
312
|
|
|
310
313
|
async def pack_loaders(self) -> None:
|
|
311
314
|
"""Pack loaders for all projects concurrently."""
|
|
312
315
|
from sfi.pyloadergen.pyloadergen import PyLoaderGenerator
|
|
313
316
|
|
|
314
|
-
logger.info(LOG_SEPARATOR)
|
|
315
|
-
logger.info("Packing loaders...")
|
|
316
|
-
|
|
317
317
|
def _run():
|
|
318
318
|
generator = PyLoaderGenerator(root_dir=self.root_dir)
|
|
319
319
|
generator.run()
|
|
320
320
|
|
|
321
|
-
await self.
|
|
322
|
-
logger.info("Loaders packed.")
|
|
321
|
+
await self._run_sync_task("loaders", _run)
|
|
323
322
|
|
|
324
323
|
async def pack_libraries(self) -> None:
|
|
325
324
|
"""Pack libraries for all projects concurrently."""
|
|
@@ -332,7 +331,7 @@ class PackageWorkflow:
|
|
|
332
331
|
)
|
|
333
332
|
libpacker.run()
|
|
334
333
|
|
|
335
|
-
await self.
|
|
334
|
+
await self._run_sync_task("libraries", _run)
|
|
336
335
|
|
|
337
336
|
async def pack_source(self) -> None:
|
|
338
337
|
"""Pack source code for all projects concurrently."""
|
|
@@ -342,7 +341,7 @@ class PackageWorkflow:
|
|
|
342
341
|
source_packer = PySourcePacker(root_dir=self.root_dir)
|
|
343
342
|
source_packer.run()
|
|
344
343
|
|
|
345
|
-
await self.
|
|
344
|
+
await self._run_sync_task("source code", _run)
|
|
346
345
|
|
|
347
346
|
async def pack_archive(self, archive_format: str) -> None:
|
|
348
347
|
"""Create archive for all projects.
|
|
@@ -350,18 +349,14 @@ class PackageWorkflow:
|
|
|
350
349
|
Args:
|
|
351
350
|
archive_format: Archive format (zip, tar, gztar, bztar, xztar, 7z, nsis)
|
|
352
351
|
"""
|
|
353
|
-
from sfi.pyarchive.pyarchive import
|
|
354
|
-
|
|
355
|
-
logger.info(LOG_SEPARATOR)
|
|
356
|
-
logger.info(f"Creating {archive_format} archives...")
|
|
352
|
+
from sfi.pyarchive.pyarchive import PyArchiveConfig, PyArchiver
|
|
357
353
|
|
|
358
354
|
def _run():
|
|
359
355
|
config = PyArchiveConfig(verbose=self.config.debug)
|
|
360
356
|
archiver = PyArchiver(root_dir=self.root_dir, config=config)
|
|
361
357
|
archiver.archive_projects(format=archive_format)
|
|
362
358
|
|
|
363
|
-
await self.
|
|
364
|
-
logger.info("Archives created.")
|
|
359
|
+
await self._run_sync_task(f"{archive_format} archives", _run)
|
|
365
360
|
|
|
366
361
|
async def build(self) -> dict[str, Any]:
|
|
367
362
|
"""Execute the packaging workflow with concurrent optimization.
|
|
@@ -372,14 +367,14 @@ class PackageWorkflow:
|
|
|
372
367
|
3. Create archive (optional, if archive_type is set)
|
|
373
368
|
|
|
374
369
|
Returns:
|
|
375
|
-
Dict with results and summary
|
|
370
|
+
Dict with results and summary including output_dir and metadata
|
|
376
371
|
|
|
377
372
|
Raises:
|
|
378
373
|
FileNotFoundError: If required directories don't exist
|
|
379
374
|
RuntimeError: If any packaging step fails
|
|
380
375
|
"""
|
|
381
376
|
logger.info("Starting packaging workflow execution")
|
|
382
|
-
|
|
377
|
+
start_time = time.perf_counter()
|
|
383
378
|
|
|
384
379
|
try:
|
|
385
380
|
# Stage 1: Pack embed python (prerequisite for other tasks)
|
|
@@ -404,15 +399,16 @@ class PackageWorkflow:
|
|
|
404
399
|
logger.error(f"Workflow execution failed: {e}")
|
|
405
400
|
raise RuntimeError(f"Packaging workflow failed: {e}") from e
|
|
406
401
|
|
|
407
|
-
|
|
402
|
+
elapsed = time.perf_counter() - start_time
|
|
408
403
|
logger.info(LOG_SEPARATOR)
|
|
409
|
-
logger.info(f"Packaging workflow completed in {
|
|
404
|
+
logger.info(f"Packaging workflow completed in {elapsed:.2f}s")
|
|
410
405
|
return {"output_dir": str(self.dist_dir), "metadata": {}}
|
|
411
406
|
|
|
412
407
|
def list_projects(self) -> None:
|
|
408
|
+
"""List all available projects."""
|
|
413
409
|
logger.info(f"Listing projects in {self.root_dir}")
|
|
414
|
-
for project in self.
|
|
415
|
-
logger.info(f"{project}")
|
|
410
|
+
for project in self.sorted_projects.values():
|
|
411
|
+
logger.info(f" - {project}")
|
|
416
412
|
|
|
417
413
|
def _scan_executables(self) -> list[Path]:
|
|
418
414
|
"""Scan dist directory for executable files.
|
|
@@ -420,11 +416,10 @@ class PackageWorkflow:
|
|
|
420
416
|
Returns:
|
|
421
417
|
List of executable file paths found in dist directory.
|
|
422
418
|
"""
|
|
423
|
-
dist_dir = self.
|
|
419
|
+
dist_dir = self.dist_dir
|
|
424
420
|
if not dist_dir.exists():
|
|
425
421
|
return []
|
|
426
|
-
|
|
427
|
-
return [f for f in dist_dir.glob(pattern) if f.is_file() and f.suffix == ext]
|
|
422
|
+
return [f for f in dist_dir.glob(f"*{ext}") if f.is_file()]
|
|
428
423
|
|
|
429
424
|
def _resolve_executable(self, match_name: str | None) -> Path | None:
|
|
430
425
|
"""Resolve executable by scanning dist directory and matching name.
|
|
@@ -441,22 +436,18 @@ class PackageWorkflow:
|
|
|
441
436
|
|
|
442
437
|
# Auto-select if only one executable and no name specified
|
|
443
438
|
if not match_name:
|
|
444
|
-
if len(executables) == 1
|
|
445
|
-
return executables[0]
|
|
446
|
-
return None
|
|
439
|
+
return executables[0] if len(executables) == 1 else None
|
|
447
440
|
|
|
448
|
-
# Exact match (without extension)
|
|
449
441
|
lower_name = match_name.lower()
|
|
442
|
+
|
|
443
|
+
# Try exact match (without extension)
|
|
450
444
|
for exe in executables:
|
|
451
445
|
if exe.stem.lower() == lower_name:
|
|
452
446
|
return exe
|
|
453
447
|
|
|
454
|
-
#
|
|
448
|
+
# Try fuzzy match (case-insensitive substring)
|
|
455
449
|
matches = [exe for exe in executables if lower_name in exe.stem.lower()]
|
|
456
|
-
if len(matches) == 1
|
|
457
|
-
return matches[0]
|
|
458
|
-
|
|
459
|
-
return None
|
|
450
|
+
return matches[0] if len(matches) == 1 else None
|
|
460
451
|
|
|
461
452
|
def list_executables(self) -> None:
|
|
462
453
|
"""List all executables in dist directory."""
|
|
@@ -464,9 +455,9 @@ class PackageWorkflow:
|
|
|
464
455
|
if not executables:
|
|
465
456
|
logger.info("No executables found in dist directory")
|
|
466
457
|
return
|
|
467
|
-
logger.info(f"Available executables in {self.
|
|
458
|
+
logger.info(f"Available executables in {self.dist_dir}:")
|
|
468
459
|
for exe in executables:
|
|
469
|
-
logger.info(f" {exe.stem}")
|
|
460
|
+
logger.info(f" - {exe.stem}")
|
|
470
461
|
|
|
471
462
|
def run_project(
|
|
472
463
|
self, match_name: str | None, project_args: list[str] | None = None
|
|
@@ -479,41 +470,66 @@ class PackageWorkflow:
|
|
|
479
470
|
"""
|
|
480
471
|
exe_path = self._resolve_executable(match_name)
|
|
481
472
|
|
|
482
|
-
|
|
483
|
-
|
|
473
|
+
# Handle executable not found cases
|
|
474
|
+
if not exe_path:
|
|
475
|
+
self._handle_executable_not_found(match_name)
|
|
476
|
+
return
|
|
477
|
+
|
|
478
|
+
# Build and execute command
|
|
479
|
+
cmd = self._build_executable_command(exe_path, project_args)
|
|
480
|
+
|
|
481
|
+
try:
|
|
482
|
+
subprocess.run(cmd, check=True, cwd=self.root_dir)
|
|
483
|
+
logger.info(f"{exe_path.stem} ran successfully")
|
|
484
|
+
except subprocess.CalledProcessError as e:
|
|
485
|
+
logger.error(f"{exe_path.stem} failed with exit code {e.returncode}")
|
|
486
|
+
except FileNotFoundError:
|
|
487
|
+
logger.error(f"Executable not found: {exe_path}")
|
|
488
|
+
except Exception as e:
|
|
489
|
+
logger.error(f"Failed to run {exe_path}: {e}")
|
|
490
|
+
|
|
491
|
+
def _handle_executable_not_found(self, match_name: str | None) -> None:
|
|
492
|
+
"""Handle cases where executable cannot be found.
|
|
493
|
+
|
|
494
|
+
Args:
|
|
495
|
+
match_name: Executable name that was being searched for
|
|
496
|
+
"""
|
|
497
|
+
executables = self._scan_executables()
|
|
498
|
+
|
|
499
|
+
if not match_name:
|
|
484
500
|
if len(executables) == 0:
|
|
485
501
|
logger.error("No executables found in dist directory")
|
|
486
|
-
return
|
|
487
502
|
elif len(executables) > 1:
|
|
488
503
|
logger.error(
|
|
489
504
|
"Multiple executables found. Please specify which one to run:"
|
|
490
505
|
)
|
|
491
506
|
self.list_executables()
|
|
492
|
-
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
return
|
|
496
|
-
elif not exe_path:
|
|
507
|
+
else:
|
|
508
|
+
logger.error("Unable to auto-select executable")
|
|
509
|
+
else:
|
|
497
510
|
logger.error(f"Executable '{match_name}' not found")
|
|
511
|
+
|
|
512
|
+
if len(executables) > 0:
|
|
498
513
|
self.list_executables()
|
|
499
|
-
return
|
|
500
514
|
|
|
501
|
-
|
|
515
|
+
def _build_executable_command(
|
|
516
|
+
self, exe_path: Path, project_args: list[str] | None
|
|
517
|
+
) -> list[str]:
|
|
518
|
+
"""Build command list for executable execution.
|
|
519
|
+
|
|
520
|
+
Args:
|
|
521
|
+
exe_path: Path to executable
|
|
522
|
+
project_args: Additional arguments to pass
|
|
523
|
+
|
|
524
|
+
Returns:
|
|
525
|
+
List of command arguments
|
|
526
|
+
"""
|
|
502
527
|
cmd = [str(exe_path.resolve())]
|
|
503
|
-
logger.info(f"Command: {cmd}")
|
|
504
528
|
if project_args:
|
|
505
529
|
cmd.extend(project_args)
|
|
506
530
|
logger.info(f"Arguments: {' '.join(project_args)}")
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
subprocess.run(cmd, check=True)
|
|
510
|
-
logger.info(f"{exe_path.stem} ran successfully")
|
|
511
|
-
except subprocess.CalledProcessError as e:
|
|
512
|
-
logger.error(f"{exe_path.stem} failed with exit code {e.returncode}")
|
|
513
|
-
except FileNotFoundError:
|
|
514
|
-
logger.error(f"Executable not found: {exe_path}")
|
|
515
|
-
except Exception as e:
|
|
516
|
-
logger.error(f"Failed to run {exe_path}: {e}")
|
|
531
|
+
logger.info(f"Running: {' '.join(cmd)}")
|
|
532
|
+
return cmd
|
|
517
533
|
|
|
518
534
|
|
|
519
535
|
def parse_args() -> argparse.Namespace:
|
|
@@ -526,34 +542,44 @@ def parse_args() -> argparse.Namespace:
|
|
|
526
542
|
prog="pypack", description="Python packaging tool with workflow orchestration"
|
|
527
543
|
)
|
|
528
544
|
|
|
545
|
+
# Add common arguments to parent parser
|
|
546
|
+
parent_parser = argparse.ArgumentParser(add_help=False)
|
|
547
|
+
parent_parser.add_argument("--debug", "-d", action="store_true", help="Debug mode")
|
|
548
|
+
|
|
529
549
|
# Create subparsers
|
|
530
550
|
subparsers = parser.add_subparsers(
|
|
531
551
|
dest="command", required=True, help="Available commands"
|
|
532
552
|
)
|
|
533
553
|
|
|
534
554
|
# Version subcommand
|
|
535
|
-
|
|
536
|
-
"version",
|
|
555
|
+
subparsers.add_parser(
|
|
556
|
+
"version",
|
|
557
|
+
aliases=["v"],
|
|
558
|
+
help="Show version information",
|
|
559
|
+
parents=[parent_parser],
|
|
537
560
|
)
|
|
538
|
-
version_parser.add_argument("--debug", "-d", action="store_true", help="Debug mode")
|
|
539
561
|
|
|
540
562
|
# List subcommand
|
|
541
|
-
|
|
542
|
-
"list",
|
|
563
|
+
subparsers.add_parser(
|
|
564
|
+
"list",
|
|
565
|
+
aliases=["l", "ls"],
|
|
566
|
+
help="List available projects",
|
|
567
|
+
parents=[parent_parser],
|
|
543
568
|
)
|
|
544
|
-
list_parser.add_argument("--debug", "-d", action="store_true", help="Debug mode")
|
|
545
569
|
|
|
546
570
|
# Clean subcommand
|
|
547
|
-
|
|
548
|
-
"clean", aliases=["c"], help="Clean build artifacts"
|
|
571
|
+
subparsers.add_parser(
|
|
572
|
+
"clean", aliases=["c"], help="Clean build artifacts", parents=[parent_parser]
|
|
549
573
|
)
|
|
550
|
-
clean_parser.add_argument("--debug", "-d", action="store_true", help="Debug mode")
|
|
551
574
|
|
|
552
575
|
# Run subcommand
|
|
553
|
-
run_parser = subparsers.add_parser(
|
|
576
|
+
run_parser = subparsers.add_parser(
|
|
577
|
+
"run", aliases=["r"], help="Run a project", parents=[parent_parser]
|
|
578
|
+
)
|
|
554
579
|
run_parser.add_argument(
|
|
555
580
|
"project",
|
|
556
581
|
type=str,
|
|
582
|
+
nargs="?",
|
|
557
583
|
help="Project or executable name (e.g., 'docscan' or 'docscan-gui')",
|
|
558
584
|
)
|
|
559
585
|
run_parser.add_argument(
|
|
@@ -562,13 +588,11 @@ def parse_args() -> argparse.Namespace:
|
|
|
562
588
|
nargs="*",
|
|
563
589
|
help="Additional arguments to pass to the project",
|
|
564
590
|
)
|
|
565
|
-
run_parser.add_argument("--debug", "-d", action="store_true", help="Debug mode")
|
|
566
591
|
|
|
567
592
|
# Build subcommand
|
|
568
593
|
build_parser = subparsers.add_parser(
|
|
569
|
-
"build", aliases=["b"], help="Build project packages"
|
|
594
|
+
"build", aliases=["b"], help="Build project packages", parents=[parent_parser]
|
|
570
595
|
)
|
|
571
|
-
build_parser.add_argument("--debug", "-d", action="store_true", help="Debug mode")
|
|
572
596
|
build_parser.add_argument(
|
|
573
597
|
"--python-version", type=str, default="3.8.10", help="Python version to install"
|
|
574
598
|
)
|
|
@@ -636,6 +660,7 @@ def main() -> None:
|
|
|
636
660
|
"""Main entry point for package workflow tool using factory pattern."""
|
|
637
661
|
args = parse_args()
|
|
638
662
|
|
|
663
|
+
# Configure logging level
|
|
639
664
|
if args.debug:
|
|
640
665
|
logging.getLogger().setLevel(logging.DEBUG)
|
|
641
666
|
|
|
@@ -648,17 +673,18 @@ def main() -> None:
|
|
|
648
673
|
cleaning_strategy=StandardCleaningStrategy(),
|
|
649
674
|
)
|
|
650
675
|
|
|
651
|
-
#
|
|
652
|
-
|
|
676
|
+
# Command dispatch using pattern matching
|
|
677
|
+
command = args.command
|
|
678
|
+
|
|
679
|
+
if command in ("version", "v"):
|
|
653
680
|
logger.info(f"pypack {__version__} (build {__build__})")
|
|
654
|
-
|
|
655
|
-
elif args.command in ("list", "l", "ls"):
|
|
681
|
+
elif command in ("list", "l", "ls"):
|
|
656
682
|
workflow.list_projects()
|
|
657
|
-
elif
|
|
683
|
+
elif command in ("run", "r"):
|
|
658
684
|
workflow.run_project(args.project, args.args)
|
|
659
|
-
elif
|
|
685
|
+
elif command in ("clean", "c"):
|
|
660
686
|
workflow.clean_project()
|
|
661
|
-
elif
|
|
687
|
+
elif command in ("build", "b"):
|
|
662
688
|
try:
|
|
663
689
|
asyncio.run(workflow.build())
|
|
664
690
|
logger.info("Packaging completed successfully!")
|
|
@@ -23,7 +23,8 @@ import time
|
|
|
23
23
|
from dataclasses import dataclass, field
|
|
24
24
|
from functools import cached_property
|
|
25
25
|
from pathlib import Path
|
|
26
|
-
from
|
|
26
|
+
from re import Pattern
|
|
27
|
+
from typing import Any, Final
|
|
27
28
|
|
|
28
29
|
if sys.version_info >= (3, 11):
|
|
29
30
|
import tomllib
|
|
@@ -39,12 +40,12 @@ cwd = Path.cwd()
|
|
|
39
40
|
is_windows = platform.system() == "Windows"
|
|
40
41
|
|
|
41
42
|
# Precompiled regex for dependency name extraction (optimization)
|
|
42
|
-
_DEP_NAME_PATTERN = re.compile(r"^([a-zA-Z0-9._-]+)")
|
|
43
|
-
_EXTRA_PATTERN = re.compile(r"\[([^\]]+)\]")
|
|
44
|
-
_VERSION_PATTERN = re.compile(r"[<>=!~].*$")
|
|
43
|
+
_DEP_NAME_PATTERN: Final[Pattern[str]] = re.compile(r"^([a-zA-Z0-9._-]+)")
|
|
44
|
+
_EXTRA_PATTERN: Final[Pattern[str]] = re.compile(r"\[([^\]]+)\]")
|
|
45
|
+
_VERSION_PATTERN: Final[Pattern[str]] = re.compile(r"[<>=!~].*$")
|
|
45
46
|
|
|
46
47
|
# Qt-related keywords and dependencies for faster detection
|
|
47
|
-
_QT_DEPENDENCIES: frozenset[str] = frozenset((
|
|
48
|
+
_QT_DEPENDENCIES: Final[frozenset[str]] = frozenset((
|
|
48
49
|
"Qt",
|
|
49
50
|
"PySide",
|
|
50
51
|
"PyQt",
|
|
@@ -57,10 +58,10 @@ _QT_DEPENDENCIES: frozenset[str] = frozenset((
|
|
|
57
58
|
))
|
|
58
59
|
|
|
59
60
|
# GUI-related keywords for faster detection
|
|
60
|
-
_GUI_KEYWORDS: frozenset[str] = frozenset(("gui", "desktop"))
|
|
61
|
+
_GUI_KEYWORDS: Final[frozenset[str]] = frozenset(("gui", "desktop"))
|
|
61
62
|
|
|
62
63
|
# Required attributes for project validation (module-level constant for performance)
|
|
63
|
-
_REQUIRED_ATTRS: frozenset[str] = frozenset(("name", "version", "description"))
|
|
64
|
+
_REQUIRED_ATTRS: Final[frozenset[str]] = frozenset(("name", "version", "description"))
|
|
64
65
|
|
|
65
66
|
|
|
66
67
|
@dataclass(frozen=True)
|
|
@@ -456,7 +457,6 @@ class Solution:
|
|
|
456
457
|
|
|
457
458
|
root_dir: Path
|
|
458
459
|
projects: dict[str, Project]
|
|
459
|
-
update: bool = False
|
|
460
460
|
start_time: float = field(default_factory=time.perf_counter)
|
|
461
461
|
time_stamp: datetime.datetime = field(default_factory=datetime.datetime.now)
|
|
462
462
|
|
|
@@ -465,7 +465,6 @@ class Solution:
|
|
|
465
465
|
f"<Solution(\n"
|
|
466
466
|
f" root_dir={self.root_dir!r},\n"
|
|
467
467
|
f" projects: {len(self.projects)},\n"
|
|
468
|
-
f" update={self.update!r},\n"
|
|
469
468
|
f" time_used={self.elapsed_time:.4f}s,\n"
|
|
470
469
|
f" timestamp={self.time_stamp!r}\n"
|
|
471
470
|
f")>"
|
|
@@ -569,9 +568,7 @@ class Solution:
|
|
|
569
568
|
return None
|
|
570
569
|
|
|
571
570
|
@classmethod
|
|
572
|
-
def from_toml_files(
|
|
573
|
-
cls, root_dir: Path, toml_files: list[Path], update: bool = False
|
|
574
|
-
) -> Solution:
|
|
571
|
+
def from_toml_files(cls, root_dir: Path, toml_files: list[Path]) -> Solution:
|
|
575
572
|
"""Create a Solution instance by parsing multiple pyproject.toml files.
|
|
576
573
|
|
|
577
574
|
Args:
|
|
@@ -619,12 +616,10 @@ class Solution:
|
|
|
619
616
|
|
|
620
617
|
projects[project.name] = project
|
|
621
618
|
|
|
622
|
-
return cls(root_dir=root_dir, projects=projects
|
|
619
|
+
return cls(root_dir=root_dir, projects=projects)
|
|
623
620
|
|
|
624
621
|
@classmethod
|
|
625
|
-
def from_json_data(
|
|
626
|
-
cls, root_dir: Path, json_data: dict[str, Any], update: bool = False
|
|
627
|
-
) -> Solution:
|
|
622
|
+
def from_json_data(cls, root_dir: Path, json_data: dict[str, Any]) -> Solution:
|
|
628
623
|
"""Create a Solution instance from JSON data.
|
|
629
624
|
|
|
630
625
|
Args:
|
|
@@ -659,10 +654,10 @@ class Solution:
|
|
|
659
654
|
except Exception as e:
|
|
660
655
|
logger.error(f"Unknown error loading project data from JSON data: {e}")
|
|
661
656
|
|
|
662
|
-
return cls(root_dir=root_dir, projects=projects
|
|
657
|
+
return cls(root_dir=root_dir, projects=projects)
|
|
663
658
|
|
|
664
659
|
@classmethod
|
|
665
|
-
def from_json_file(cls, json_file: Path
|
|
660
|
+
def from_json_file(cls, json_file: Path) -> Solution:
|
|
666
661
|
"""Create a Solution instance from a JSON file.
|
|
667
662
|
|
|
668
663
|
Args:
|
|
@@ -680,6 +675,8 @@ class Solution:
|
|
|
680
675
|
logger.debug(f"Loading project data from {json_file}...")
|
|
681
676
|
with json_file.open("r", encoding="utf-8") as f:
|
|
682
677
|
loaded_data = json.load(f)
|
|
678
|
+
logger.debug(f"\t - Loaded project data from {json_file}")
|
|
679
|
+
return cls.from_json_data(json_file.parent, loaded_data)
|
|
683
680
|
except (OSError, json.JSONDecodeError, KeyError) as e:
|
|
684
681
|
logger.error(f"Error loading project data from {json_file}: {e}")
|
|
685
682
|
return cls(root_dir=json_file.parent, projects={})
|
|
@@ -687,11 +684,8 @@ class Solution:
|
|
|
687
684
|
logger.error(f"Unknown error loading project data from {json_file}: {e}")
|
|
688
685
|
return cls(root_dir=json_file.parent, projects={})
|
|
689
686
|
|
|
690
|
-
logger.debug(f"\t - Loaded project data from {json_file}")
|
|
691
|
-
return cls.from_json_data(json_file.parent, loaded_data, update=update)
|
|
692
|
-
|
|
693
687
|
@classmethod
|
|
694
|
-
def from_directory(cls, root_dir: Path
|
|
688
|
+
def from_directory(cls, root_dir: Path) -> Solution:
|
|
695
689
|
"""Create a Solution instance by scanning a directory for pyproject.toml files.
|
|
696
690
|
|
|
697
691
|
This method recursively searches the given directory for pyproject.toml files,
|
|
@@ -709,13 +703,9 @@ class Solution:
|
|
|
709
703
|
logger.error(f"Error: {root_dir} is not a directory")
|
|
710
704
|
return cls(root_dir=root_dir, projects={})
|
|
711
705
|
|
|
712
|
-
# Use walrus operator to avoid intermediate variable
|
|
713
|
-
if not update and (project_json := root_dir / "projects.json").is_file():
|
|
714
|
-
return cls.from_json_file(project_json, update=update)
|
|
715
|
-
|
|
716
706
|
logger.debug(f"Parsing pyproject.toml in {root_dir}...")
|
|
717
707
|
toml_files = list(root_dir.rglob("pyproject.toml"))
|
|
718
|
-
return cls.from_toml_files(root_dir, toml_files
|
|
708
|
+
return cls.from_toml_files(root_dir, toml_files)
|
|
719
709
|
|
|
720
710
|
def _write_project_json(self):
|
|
721
711
|
"""Write the project data to a projects.json file for caching.
|
|
@@ -729,12 +719,6 @@ class Solution:
|
|
|
729
719
|
"""
|
|
730
720
|
# Cache json_file reference to avoid repeated cached_property access
|
|
731
721
|
json_file = self.json_file
|
|
732
|
-
if json_file.exists() and not self.update:
|
|
733
|
-
logger.info(
|
|
734
|
-
f"\t - Skip write project data file {json_file}, already exists"
|
|
735
|
-
)
|
|
736
|
-
return
|
|
737
|
-
|
|
738
722
|
try:
|
|
739
723
|
# Pre-cache raw_data access to avoid repeated property access
|
|
740
724
|
serializable_data = {
|
|
@@ -746,14 +730,11 @@ class Solution:
|
|
|
746
730
|
|
|
747
731
|
with json_file.open("w", encoding="utf-8") as f:
|
|
748
732
|
json.dump(serializable_data, f, indent=2, ensure_ascii=False)
|
|
733
|
+
logger.info(f"Output written to {json_file}")
|
|
749
734
|
except (OSError, json.JSONDecodeError, KeyError) as e:
|
|
750
735
|
logger.error(f"Error writing output to {json_file}: {e}")
|
|
751
|
-
return
|
|
752
736
|
except Exception as e:
|
|
753
737
|
logger.error(f"Unknown error writing output to {json_file}: {e}")
|
|
754
|
-
return
|
|
755
|
-
else:
|
|
756
|
-
logger.info(f"Output written to {json_file}")
|
|
757
738
|
|
|
758
739
|
|
|
759
740
|
def create_parser() -> argparse.ArgumentParser:
|
|
@@ -775,12 +756,6 @@ def create_parser() -> argparse.ArgumentParser:
|
|
|
775
756
|
parser.add_argument(
|
|
776
757
|
"--debug", "-d", action="store_true", help="Enable debug logging output"
|
|
777
758
|
)
|
|
778
|
-
parser.add_argument(
|
|
779
|
-
"--update",
|
|
780
|
-
"-u",
|
|
781
|
-
action="store_true",
|
|
782
|
-
help="Force update by re-parsing projects instead of using cache",
|
|
783
|
-
)
|
|
784
759
|
return parser
|
|
785
760
|
|
|
786
761
|
|
|
@@ -797,4 +772,4 @@ def main() -> None:
|
|
|
797
772
|
if args.debug:
|
|
798
773
|
logger.setLevel(logging.DEBUG)
|
|
799
774
|
|
|
800
|
-
Solution.from_directory(Path(args.directory)
|
|
775
|
+
Solution.from_directory(Path(args.directory))
|
sfi/pysourcepack/__init__.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
|
|
1
|
+
|