pysfi 0.1.11__py3-none-any.whl → 0.1.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,19 +1,180 @@
1
+ """PyArchive - Multi-format project archiver with configurable compression.
2
+
3
+ This module provides a comprehensive archiving solution that supports multiple
4
+ compression formats (ZIP, TAR, 7z, NSIS) with configurable compression levels
5
+ and filtering capabilities.
6
+
7
+ The module follows established design patterns:
8
+ - Dataclass pattern for configuration management with persistence
9
+ - Frozen dataclass pattern for immutable archiver instances
10
+ - Strategy pattern for different archive format implementations
11
+ - Factory pattern for archive function selection
12
+ - Builder pattern for NSIS script generation
13
+ - Singleton pattern for logging configuration
14
+ - Enum pattern for type-safe format definitions
15
+ """
16
+
1
17
  from __future__ import annotations
2
18
 
3
19
  import argparse
20
+ import atexit
21
+ import enum
4
22
  import json
5
23
  import logging
6
24
  import shutil
7
25
  import subprocess
26
+ import tarfile
27
+ import tempfile
28
+ import zipfile
29
+ from dataclasses import dataclass, field
30
+ from functools import cached_property
8
31
  from pathlib import Path
9
- from typing import Any
32
+ from typing import Any, Final
10
33
 
11
34
  from sfi.pyprojectparse.pyprojectparse import Project, Solution
12
35
 
13
- logging.basicConfig(level=logging.INFO, format="%(levelname)s: %(message)s")
36
+ __version__ = "1.0.0"
37
+ __build__ = "20260202"
38
+ __author__ = "pysfi Development Team"
39
+
40
+ # Configuration constants
41
+ CONFIG_FILE: Final[Path] = Path.home() / ".pysfi" / "pyarchive.json"
42
+ DEFAULT_CACHE_DIR: Final[Path] = Path.home() / ".pysfi" / ".cache" / "pyarchive"
43
+
44
+ # Constants for archive operations
45
+ DEFAULT_COMPRESSION_LEVEL: Final[int] = 6
46
+ MAX_COMPRESSION_LEVEL: Final[int] = 9
47
+ MIN_COMPRESSION_LEVEL: Final[int] = 0
48
+ DEFAULT_OUTPUT_DIR: Final[str] = "build"
49
+ DEFAULT_DIST_DIR: Final[str] = "dist"
50
+ DEFAULT_MAX_WORKERS: Final[int] = 4
51
+
52
+ # File extensions for different archive formats
53
+ ARCHIVE_EXTENSIONS: Final[dict[str, str]] = {
54
+ "zip": "zip",
55
+ "tar": "tar",
56
+ "gztar": "tar.gz",
57
+ "bztar": "tar.bz2",
58
+ "xztar": "tar.xz",
59
+ "7z": "7z",
60
+ "nsis": "setup.exe",
61
+ }
62
+
63
+ # NSIS script constants
64
+ NSIS_INCLUDE_FILE: Final[str] = "MUI2.nsh"
65
+ NSIS_ABORT_WARNING: Final[str] = "MUI_ABORTWARNING"
66
+ NSIS_LANGUAGE: Final[str] = "English"
67
+ NSIS_REGISTRY_KEY: Final[str] = "Software"
68
+ NSIS_PROGRAM_FILES: Final[str] = "$PROGRAMFILES"
69
+ NSIS_START_MENU: Final[str] = "$SMPROGRAMS"
70
+ NSIS_INSTALL_DIR: Final[str] = "$INSTDIR"
71
+
72
+ # NSIS page macros
73
+ NSIS_PAGES: Final[list[str]] = [
74
+ "MUI_PAGE_WELCOME",
75
+ "MUI_PAGE_COMPONENTS",
76
+ "MUI_PAGE_DIRECTORY",
77
+ "MUI_PAGE_INSTFILES",
78
+ "MUI_PAGE_FINISH",
79
+ ]
80
+
81
+ NSIS_UNPAGES: Final[list[str]] = [
82
+ "MUI_UNPAGE_WELCOME",
83
+ "MUI_UNPAGE_CONFIRM",
84
+ "MUI_UNPAGE_INSTFILES",
85
+ "MUI_UNPAGE_FINISH",
86
+ ]
87
+
88
+ # NSIS section names
89
+ NSIS_MAIN_SECTION: Final[str] = "required"
90
+ NSIS_SHORTCUT_SECTION: Final[str] = "Start Menu Shortcuts"
91
+ NSIS_UNINSTALL_SECTION: Final[str] = "Uninstall"
92
+
93
+ # File operation constants
94
+ FILE_PERMISSION_READ: Final[int] = 0o444
95
+ FILE_PERMISSION_WRITE: Final[int] = 0o666
96
+ FILE_PERMISSION_EXECUTE: Final[int] = 0o777
97
+
98
+ # System command constants
99
+ CMD_WHERE: Final[str] = "where"
100
+ CMD_WHICH: Final[str] = "which"
101
+ CMD_7Z: Final[str] = "7z"
102
+ CMD_MAKENSIS: Final[str] = "makensis"
103
+
104
+ # Temporary directory constants
105
+ TEMP_DIR_PREFIX: Final[str] = "pyarchive_"
106
+
107
+ # Logging format constants
108
+ LOG_FORMAT: Final[str] = "%(levelname)s: %(message)s"
109
+ LOG_LEVEL_INFO: Final[str] = "INFO"
110
+ LOG_LEVEL_DEBUG: Final[str] = "DEBUG"
111
+ LOG_LEVEL_ERROR: Final[str] = "ERROR"
112
+
113
+ logging.basicConfig(level=logging.INFO, format=LOG_FORMAT)
14
114
  logger = logging.getLogger(__name__)
15
115
 
16
- ARCHIVE_FORMATS: frozenset[str] = frozenset(["zip", "7z", "nsis"])
116
+
117
+ class ArchiveError(Exception):
118
+ """Base exception for archive operations."""
119
+
120
+ pass
121
+
122
+
123
+ class ArchiveFormatError(ArchiveError):
124
+ """Exception raised for unsupported archive formats."""
125
+
126
+ pass
127
+
128
+
129
+ class ArchiveCommandError(ArchiveError):
130
+ """Exception raised when external command is not available."""
131
+
132
+ pass
133
+
134
+
135
+ class ArchiveCreationError(ArchiveError):
136
+ """Exception raised when archive creation fails."""
137
+
138
+ pass
139
+
140
+
141
+ class ArchiveFormat(enum.Enum):
142
+ """Enumeration of supported archive formats."""
143
+
144
+ ZIP = "zip"
145
+ TAR = "tar"
146
+ GZTAR = "gztar"
147
+ BZTAR = "bztar"
148
+ XZTAR = "xztar"
149
+ SEVEN_ZIP = "7z"
150
+ NSIS = "nsis"
151
+
152
+ @classmethod
153
+ def all_formats(cls) -> frozenset[str]:
154
+ """Get all supported format strings."""
155
+ return frozenset(fmt.value for fmt in cls)
156
+
157
+ @property
158
+ def description(self) -> str:
159
+ """Get human-readable description of the format."""
160
+ descriptions = {
161
+ self.ZIP: "ZIP archive (DEFLATE compression)",
162
+ self.TAR: "TAR archive (no compression)",
163
+ self.GZTAR: "TAR archive with gzip compression",
164
+ self.BZTAR: "TAR archive with bzip2 compression",
165
+ self.XZTAR: "TAR archive with xz compression",
166
+ self.SEVEN_ZIP: "7-Zip archive (high compression)",
167
+ self.NSIS: "NSIS Windows installer",
168
+ }
169
+ return descriptions.get(self, f"{self.value} format")
170
+
171
+ @property
172
+ def file_extension(self) -> str:
173
+ """Get the file extension for this format."""
174
+ return ARCHIVE_EXTENSIONS.get(self.value, self.value)
175
+
176
+
177
+ ARCHIVE_FORMATS: frozenset[str] = ArchiveFormat.all_formats()
17
178
 
18
179
  DEFAULT_IGNORE_PATTERNS: frozenset[str] = frozenset([
19
180
  "__pycache__",
@@ -24,7 +185,6 @@ DEFAULT_IGNORE_PATTERNS: frozenset[str] = frozenset([
24
185
  ".pytest_cache",
25
186
  ".coverage",
26
187
  "*.egg-info",
27
- "dist",
28
188
  "build",
29
189
  "*.log",
30
190
  ".DS_Store",
@@ -34,6 +194,76 @@ DEFAULT_IGNORE_PATTERNS: frozenset[str] = frozenset([
34
194
  ])
35
195
 
36
196
 
197
+ @dataclass
198
+ class PyArchiveConfig:
199
+ """Configuration for PyArchive with persistent settings."""
200
+
201
+ compression_level: int = DEFAULT_COMPRESSION_LEVEL
202
+ verbose: bool = False
203
+ preserve_permissions: bool = True
204
+ max_workers: int = DEFAULT_MAX_WORKERS
205
+ cache_dir: Path | None = None
206
+ output_dir: str = DEFAULT_OUTPUT_DIR
207
+ dist_dir: str = DEFAULT_DIST_DIR
208
+
209
+ def __post_init__(self) -> None:
210
+ """Initialize configuration and load from file if exists."""
211
+ if self.cache_dir is None:
212
+ self.cache_dir = DEFAULT_CACHE_DIR
213
+ object.__setattr__(self, "cache_dir", DEFAULT_CACHE_DIR)
214
+
215
+ self.cache_dir.mkdir(parents=True, exist_ok=True)
216
+
217
+ # Load existing configuration from file
218
+ if CONFIG_FILE.exists():
219
+ try:
220
+ config_data = json.loads(CONFIG_FILE.read_text(encoding="utf-8"))
221
+ # Update configuration items, keeping defaults as fallback
222
+ for key, value in config_data.items():
223
+ if hasattr(self, key) and isinstance(
224
+ value, type(getattr(self, key))
225
+ ):
226
+ setattr(self, key, value)
227
+ except (json.JSONDecodeError, TypeError, AttributeError) as e:
228
+ logger.warning(f"Could not load config from {CONFIG_FILE}: {e}")
229
+
230
+ # Validate compression level
231
+ if not MIN_COMPRESSION_LEVEL <= self.compression_level <= MAX_COMPRESSION_LEVEL:
232
+ raise ValueError(
233
+ f"Compression level must be between {MIN_COMPRESSION_LEVEL} and {MAX_COMPRESSION_LEVEL}"
234
+ )
235
+
236
+ def save(self) -> None:
237
+ """Save current configuration to file."""
238
+ CONFIG_FILE.parent.mkdir(parents=True, exist_ok=True)
239
+ config_dict = {
240
+ "compression_level": self.compression_level,
241
+ "verbose": self.verbose,
242
+ "preserve_permissions": self.preserve_permissions,
243
+ "max_workers": self.max_workers,
244
+ "cache_dir": str(self.cache_dir),
245
+ "output_dir": self.output_dir,
246
+ "dist_dir": self.dist_dir,
247
+ }
248
+ CONFIG_FILE.write_text(json.dumps(config_dict, indent=4), encoding="utf-8")
249
+
250
+
251
+ @dataclass
252
+ class ArchiveOptions:
253
+ """Options for archive creation (legacy compatibility)."""
254
+
255
+ compression_level: int = 6 # 0-9 for most formats
256
+ verbose: bool = False
257
+ preserve_permissions: bool = True
258
+
259
+ def __post_init__(self) -> None:
260
+ """Validate compression level."""
261
+ if not MIN_COMPRESSION_LEVEL <= self.compression_level <= MAX_COMPRESSION_LEVEL:
262
+ raise ValueError(
263
+ f"Compression level must be between {MIN_COMPRESSION_LEVEL} and {MAX_COMPRESSION_LEVEL}"
264
+ )
265
+
266
+
37
267
  def should_ignore(file_path: Path, ignore_patterns: set[str]) -> bool:
38
268
  """Check if a file should be ignored based on patterns."""
39
269
  from fnmatch import fnmatch
@@ -60,320 +290,692 @@ def load_projects(config_file: Path) -> dict[str, Any]:
60
290
  logger.error(f"Configuration file not found: {config_file}")
61
291
  return {}
62
292
 
63
- with open(config_file, encoding="utf-8") as f:
64
- return json.load(f)
293
+ try:
294
+ with open(config_file, encoding="utf-8") as f:
295
+ return json.load(f)
296
+ except json.JSONDecodeError as e:
297
+ logger.error(f"Invalid JSON in configuration file {config_file}: {e}")
298
+ return {}
65
299
 
66
300
 
67
- def get_project_directory(project_name: str, base_dir: Path) -> Path | None:
68
- """Locate the project directory."""
69
- project_path = base_dir / project_name
70
- if project_path.exists() and project_path.is_dir():
71
- return project_path
301
+ @dataclass(frozen=True)
302
+ class PyArchiver:
303
+ """Main archiver class for creating project archives."""
72
304
 
73
- return None
305
+ root_dir: Path
306
+ config: PyArchiveConfig = field(default_factory=PyArchiveConfig)
74
307
 
308
+ def __post_init__(self) -> None:
309
+ """Initialize archiver and register config auto-save."""
310
+ atexit.register(self.config.save)
75
311
 
76
- def check_command_available(command: str) -> bool:
77
- """Check if a command is available in the system PATH."""
78
- try:
79
- subprocess.run(
80
- ["where", command] if shutil.which("where") else ["which", command],
81
- capture_output=True,
82
- check=True,
83
- shell=True,
84
- )
85
- return True
86
- except (subprocess.CalledProcessError, FileNotFoundError):
87
- return False
312
+ @cached_property
313
+ def solution(self) -> Solution:
314
+ """Get the solution from the target directory."""
315
+ return Solution.from_directory(self.root_dir, update=True)
88
316
 
317
+ @cached_property
318
+ def projects(self) -> dict[str, Project]:
319
+ """Get all projects in the solution."""
320
+ return self.solution.projects
89
321
 
90
- def archive_zip(
91
- dist_dir: Path,
92
- output_file: Path,
93
- ignore_patterns: set[str],
94
- ) -> bool:
95
- """Create ZIP archive using Python's zipfile module."""
96
- try:
97
- import zipfile
98
-
99
- logger.info(f"Creating ZIP archive: {output_file}, using files: {dist_dir}")
100
- with zipfile.ZipFile(output_file, "w", zipfile.ZIP_DEFLATED) as zipf:
101
- for file_path in dist_dir.rglob("*"):
102
- if file_path.is_file() and not should_ignore(
103
- file_path, ignore_patterns
104
- ):
105
- arcname = file_path.relative_to(dist_dir)
106
- zipf.write(file_path, arcname)
107
- logger.debug(f"Added: {arcname}")
108
-
109
- logger.info(f"ZIP archive created successfully: {output_file}")
110
- return True
111
- except Exception as e:
112
- logger.error(f"Failed to create ZIP archive: {e}")
113
- return False
322
+ @cached_property
323
+ def default_ignore_patterns(self) -> set[str]:
324
+ """Get default ignore patterns."""
325
+ return set(DEFAULT_IGNORE_PATTERNS)
114
326
 
327
+ def should_ignore(
328
+ self, file_path: Path, ignore_patterns: set[str] | None = None
329
+ ) -> bool:
330
+ """Check if a file should be ignored based on patterns."""
331
+ from fnmatch import fnmatch
115
332
 
116
- def archive_7z(
117
- dist_dir: Path,
118
- output_file: Path,
119
- ignore_patterns: set[str],
120
- ) -> bool:
121
- """Create 7z archive using 7z command."""
122
- if not check_command_available("7z"):
123
- logger.error("7z command not found. Please install 7-Zip.")
124
- return False
333
+ patterns = ignore_patterns or self.default_ignore_patterns
125
334
 
126
- try:
127
- logger.info(f"Creating 7z archive: {output_file}")
128
- output_file.parent.mkdir(parents=True, exist_ok=True)
335
+ # Convert to string for pattern matching
336
+ file_str = str(file_path)
129
337
 
130
- # Create temp directory with filtered files
131
- import tempfile
338
+ for pattern in patterns:
339
+ if pattern.startswith("*."):
340
+ # Check just the filename
341
+ if fnmatch(file_path.name, pattern):
342
+ return True
343
+ else:
344
+ # Check full path
345
+ if pattern in file_str or fnmatch(file_str, pattern):
346
+ return True
132
347
 
133
- with tempfile.TemporaryDirectory() as temp_dir:
134
- temp_path = Path(temp_dir) / dist_dir.name
135
- shutil.copytree(
136
- dist_dir, temp_path, ignore=shutil.ignore_patterns(*ignore_patterns)
137
- )
348
+ return False
138
349
 
139
- cmd = ["7z", "a", "-t7z", str(output_file), str(temp_path / "*")]
140
- subprocess.run(cmd, check=True, shell=True)
350
+ def check_command_available(self, command: str) -> bool:
351
+ """Check if a command is available in the system PATH."""
352
+ try:
353
+ cmd = (
354
+ [CMD_WHERE, command]
355
+ if shutil.which(CMD_WHERE)
356
+ else [CMD_WHICH, command]
357
+ )
358
+ subprocess.run(
359
+ cmd,
360
+ capture_output=True,
361
+ check=True,
362
+ shell=True,
363
+ )
364
+ return True
365
+ except (subprocess.CalledProcessError, FileNotFoundError):
366
+ return False
141
367
 
142
- logger.info(f"7z archive created successfully: {output_file}")
143
- return True
144
- except subprocess.CalledProcessError as e:
145
- logger.error(f"Failed to create 7z archive: {e}")
146
- return False
147
- except Exception as e:
148
- logger.error(f"Unexpected error creating 7z archive: {e}")
149
- return False
368
+ def validate_project(self, project: Project) -> bool:
369
+ """Validate project configuration before archiving.
150
370
 
371
+ Args:
372
+ project: Project to validate
151
373
 
152
- def create_nsis_script(
153
- project: Project,
154
- dist_dir: Path,
155
- output_file: Path,
156
- ) -> Path | None:
157
- """Generate NSIS script file."""
158
- # Convert source_dir to use forward slashes for NSIS compatibility
159
- source_dir_str = str(dist_dir).replace("\\", "/")
160
-
161
- nsis_script = f"""
162
- !include "MUI2.nsh"
163
-
164
- Name "{project.name}"
165
- OutFile "{output_file.name}"
166
- InstallDir "$PROGRAMFILES\\{project.name}"
167
- InstallDirRegKey HKCU "Software\\{project.name}" ""
168
- RequestExecutionLevel admin
169
- VIProductVersion {project.version.replace(".", ",")}.0
170
- VIAddVersionKey "ProductName" "{project.name}"
171
- VIAddVersionKey "ProductVersion" "{project.version}"
172
- VIAddVersionKey "FileDescription" "{project.description}"
173
- VIAddVersionKey "FileVersion" "{project.version}"
174
-
175
- !define MUI_ABORTWARNING
176
- !insertmacro MUI_PAGE_WELCOME
177
- !insertmacro MUI_PAGE_LICENSE "LICENSE"
178
- !insertmacro MUI_PAGE_COMPONENTS
179
- !insertmacro MUI_PAGE_DIRECTORY
180
- !insertmacro MUI_PAGE_INSTFILES
181
- !insertmacro MUI_PAGE_FINISH
182
-
183
- !insertmacro MUI_UNPAGE_WELCOME
184
- !insertmacro MUI_UNPAGE_CONFIRM
185
- !insertmacro MUI_UNPAGE_INSTFILES
186
- !insertmacro MUI_UNPAGE_FINISH
187
-
188
- !insertmacro MUI_LANGUAGE "English"
189
-
190
- Section "{project.name} (required)" SecMain
191
- SectionIn RO
192
- SetOutPath "$INSTDIR"
193
- File /r "{source_dir_str}\\*.*"
194
- SectionEnd
195
-
196
- Section "Start Menu Shortcuts"
197
- CreateDirectory "$SMPROGRAMS\\{project.name}"
198
- CreateShortcut "$SMPROGRAMS\\{project.name}\\{project.name}.lnk" "$INSTDIR\\main.py"
199
- SectionEnd
200
-
201
- Section "Uninstall"
202
- Delete "$SMPROGRAMS\\{project.name}\\*.*"
203
- RMDir "$SMPROGRAMS\\{project.name}"
204
- RMDir /r "$INSTDIR"
205
- DeleteRegKey /ifempty HKCU "Software\\{project.name}"
206
- SectionEnd
207
- """
374
+ Returns:
375
+ True if project is valid, False otherwise
208
376
 
209
- script_file = output_file.parent / f"{project.name}_installer.nsi"
210
- try:
211
- with open(script_file, "w", encoding="utf-8") as f:
212
- f.write(nsis_script)
213
- logger.info(f"NSIS script generated: {script_file}")
214
- return script_file
215
- except Exception as e:
216
- logger.error(f"Failed to generate NSIS script: {e}")
217
- return None
377
+ Raises:
378
+ ValueError: If project configuration is invalid
379
+ """
380
+ if not project.name:
381
+ raise ValueError("Project name cannot be empty")
218
382
 
383
+ if not project.version:
384
+ raise ValueError("Project version cannot be empty")
219
385
 
220
- def archive_nsis(
221
- project: Project,
222
- dist_dir: Path,
223
- output_file: Path,
224
- ignore_patterns: set[str],
225
- ) -> bool:
226
- """Create NSIS installer using makensis command."""
227
- if not check_command_available("makensis"):
228
- logger.error("makensis command not found. Please install NSIS.")
229
- return False
386
+ # Validate version format (basic check)
387
+ version_parts = project.version.split(".")
388
+ if len(version_parts) < 2:
389
+ logger.warning(
390
+ f"Project {project.name} has unusual version format: {project.version}"
391
+ )
230
392
 
231
- try:
232
- logger.info(f"Creating NSIS installer: {output_file}")
233
- output_file.parent.mkdir(parents=True, exist_ok=True)
393
+ return True
234
394
 
235
- # Create temp directory with filtered files
236
- import tempfile
395
+ def get_project_directory(self, project_name: str) -> Path | None:
396
+ """Locate the project directory."""
397
+ project_path = self.root_dir / project_name
398
+ if project_path.exists() and project_path.is_dir():
399
+ return project_path
400
+ return None
237
401
 
238
- with tempfile.TemporaryDirectory() as temp_dir:
239
- temp_path = Path(temp_dir) / dist_dir.name
240
- shutil.copytree(
241
- dist_dir, temp_path, ignore=shutil.ignore_patterns(*ignore_patterns)
402
+ def _prepare_archive_paths(
403
+ self, project: Project, file_extension: str
404
+ ) -> tuple[Path, Path]:
405
+ """Prepare source and destination paths for archiving."""
406
+ dist_dir = self.root_dir / self.config.dist_dir
407
+ output_dir = self.root_dir / self.config.output_dir
408
+ output_dir.mkdir(parents=True, exist_ok=True)
409
+ output_file = output_dir / f"{project.name}-{project.version}.{file_extension}"
410
+ return dist_dir, output_file
411
+
412
+ def _get_archive_function_and_extension(
413
+ self, format_str: str
414
+ ) -> tuple[callable, str] | tuple[None, None]:
415
+ """Get the appropriate archive function and file extension for the given format."""
416
+ format_functions = {
417
+ "zip": (self._archive_zip, ArchiveFormat.ZIP.file_extension),
418
+ "tar": (self._archive_tar, ArchiveFormat.TAR.file_extension),
419
+ "gztar": (self._archive_gztar, ArchiveFormat.GZTAR.file_extension),
420
+ "bztar": (self._archive_bztar, ArchiveFormat.BZTAR.file_extension),
421
+ "xztar": (self._archive_xztar, ArchiveFormat.XZTAR.file_extension),
422
+ "7z": (self._archive_7z, ArchiveFormat.SEVEN_ZIP.file_extension),
423
+ "nsis": (self._archive_nsis, ArchiveFormat.NSIS.file_extension),
424
+ }
425
+ return format_functions.get(format_str, (None, None))
426
+
427
+ def archive_project(
428
+ self,
429
+ project: Project,
430
+ format: str,
431
+ ignore_patterns: set[str] | None = None,
432
+ ) -> bool:
433
+ """Archive a single project.
434
+
435
+ Args:
436
+ project: Project to archive
437
+ format: Archive format (zip, tar, gztar, etc.)
438
+ ignore_patterns: Patterns to ignore during archiving
439
+
440
+ Returns:
441
+ True if archiving succeeded, False otherwise
442
+ """
443
+ logger.info(f"Processing project: {project.name}")
444
+
445
+ # Validate project configuration
446
+ try:
447
+ self.validate_project(project)
448
+ except ValueError as e:
449
+ logger.error(f"Project validation failed: {e}")
450
+ return False
451
+
452
+ # Validate and get archive function
453
+ archive_func, file_extension = self._get_archive_function_and_extension(format)
454
+ if archive_func is None:
455
+ logger.error(f"Unsupported format: {format}")
456
+ return False
457
+
458
+ # Prepare paths
459
+ dist_dir, output_file = self._prepare_archive_paths(project, file_extension)
460
+
461
+ if not dist_dir.exists():
462
+ logger.warning(
463
+ f"Project dist directory not found: {project.name}, please build project first"
242
464
  )
465
+ return False
466
+
467
+ patterns = ignore_patterns or self.default_ignore_patterns
243
468
 
244
- script_file = create_nsis_script(
469
+ # Handle NSIS specially (requires project parameter)
470
+ if format == "nsis":
471
+ return self._archive_nsis(
245
472
  project=project,
246
- dist_dir=temp_path,
473
+ dist_dir=dist_dir,
247
474
  output_file=output_file,
475
+ ignore_patterns=patterns,
248
476
  )
249
- if not script_file or not script_file.exists():
250
- return False
251
477
 
252
- cmd = ["makensis", str(script_file)]
253
- subprocess.run(cmd, check=True, shell=True)
478
+ # Call appropriate archive function
479
+ return archive_func(
480
+ dist_dir=dist_dir,
481
+ output_file=output_file,
482
+ ignore_patterns=patterns,
483
+ )
254
484
 
255
- logger.info(f"NSIS installer created successfully: {output_file}")
256
- return True
257
- except subprocess.CalledProcessError as e:
258
- logger.error(f"Failed to create NSIS installer: {e}")
259
- return False
260
- except Exception as e:
261
- logger.error(f"Unexpected error creating NSIS installer: {e}")
262
- return False
485
+ def archive_projects(
486
+ self,
487
+ format: str,
488
+ projects_to_archive: list[str] | None = None,
489
+ ignore_patterns: set[str] | None = None,
490
+ ) -> tuple[int, int]:
491
+ """Archive multiple projects and return (success_count, total_count).
492
+
493
+ Args:
494
+ format: Archive format (zip, tar, gztar, bztar, xztar, 7z, nsis)
495
+ projects_to_archive: List of project names to archive (None = all)
496
+ ignore_patterns: Additional patterns to ignore during archiving
497
+
498
+ Returns:
499
+ Tuple of (success_count, total_count)
500
+
501
+ Raises:
502
+ ArchiveFormatError: If the specified format is not supported
503
+ """
504
+ # Validate inputs
505
+ if format not in ARCHIVE_FORMATS:
506
+ logger.error(
507
+ f"Unsupported format: {format}. Supported formats: {', '.join(ARCHIVE_FORMATS)}"
508
+ )
509
+ return 0, 0
263
510
 
511
+ logger.debug(f"Archiving projects in {self.root_dir} to `{format}` format")
512
+ logger.debug(f"Compression level: {self.config.compression_level}")
513
+ if self.config.verbose:
514
+ logger.debug("Verbose mode enabled")
264
515
 
265
- def archive_project(
266
- project: Project,
267
- directory: Path,
268
- format: str,
269
- ignore_patterns: set[str],
270
- ) -> bool:
271
- """Archive a single project."""
272
- logger.info(f"Processing project: {project.name}")
516
+ # Determine projects to archive
517
+ projects_to_archive = projects_to_archive or list(self.projects.keys())
518
+ if not projects_to_archive:
519
+ logger.error("No projects to archive")
520
+ return 0, 0
273
521
 
274
- dist_dir = directory / "dist"
275
- if not dist_dir:
276
- logger.warning(
277
- f"Project dist directory not found: {project.name}, please build project first"
278
- )
279
- return False
522
+ logger.debug(f"Archiving projects: {', '.join(projects_to_archive)}")
280
523
 
281
- output_dir = directory / "build"
282
- output_dir.mkdir(parents=True, exist_ok=True)
283
- if format == "nsis":
284
- output_file = output_dir / f"{project.name}-{project.version}-setup.exe"
285
- return archive_nsis(
286
- project=project,
287
- dist_dir=dist_dir,
288
- output_file=output_file,
289
- ignore_patterns=ignore_patterns,
290
- )
291
- else:
292
- extension = "zip" if format == "zip" else "7z"
293
- output_file = output_dir / f"{project.name}-{project.version}.{extension}"
524
+ # Process projects
525
+ success_count = 0
526
+ total_count = 0
294
527
 
295
- if format == "zip":
296
- return archive_zip(
297
- dist_dir=dist_dir,
298
- output_file=output_file,
528
+ for project_name in projects_to_archive:
529
+ if project_name not in self.projects:
530
+ logger.warning(f"Project not found: {project_name}")
531
+ continue
532
+
533
+ project = self.projects[project_name]
534
+ total_count += 1
535
+
536
+ if self.archive_project(
537
+ project=project,
538
+ format=format,
299
539
  ignore_patterns=ignore_patterns,
540
+ ):
541
+ success_count += 1
542
+
543
+ # Report results
544
+ if success_count:
545
+ logger.info(
546
+ f"Archiving complete: {success_count}/{total_count} projects successfully archived"
300
547
  )
301
- elif format == "7z":
302
- return archive_7z(
303
- dist_dir=dist_dir,
304
- output_file=output_file,
305
- ignore_patterns=ignore_patterns,
548
+ else:
549
+ logger.error("Archiving failed")
550
+
551
+ return success_count, total_count
552
+
553
+ def get_archive_info(self, project: Project, format: str) -> dict[str, Any]:
554
+ """Get information about an archive without creating it.
555
+
556
+ Args:
557
+ project: Project to get archive info for
558
+ format: Archive format
559
+
560
+ Returns:
561
+ Dictionary containing archive information (name, extension, estimated_size, etc.)
562
+ """
563
+ _, file_extension = self._get_archive_function_and_extension(format)
564
+ if file_extension is None:
565
+ return {}
566
+
567
+ dist_dir, output_file = self._prepare_archive_paths(project, file_extension)
568
+
569
+ info = {
570
+ "project_name": project.name,
571
+ "project_version": project.version,
572
+ "format": format,
573
+ "output_file": str(output_file),
574
+ "output_dir": str(output_file.parent),
575
+ "dist_dir": str(dist_dir),
576
+ "dist_exists": dist_dir.exists(),
577
+ }
578
+
579
+ # Calculate source size if dist exists
580
+ if dist_dir.exists():
581
+ total_size = sum(
582
+ f.stat().st_size for f in dist_dir.rglob("*") if f.is_file()
306
583
  )
584
+ info["source_size_bytes"] = total_size
585
+ info["source_size_mb"] = round(total_size / (1024 * 1024), 2)
586
+
587
+ return info
588
+
589
+ def _archive_tar(
590
+ self,
591
+ dist_dir: Path,
592
+ output_file: Path,
593
+ ignore_patterns: set[str],
594
+ ) -> bool:
595
+ """Create TAR archive (no compression)."""
596
+ try:
597
+ logger.info(f"Creating TAR archive: {output_file}")
598
+ output_file.parent.mkdir(parents=True, exist_ok=True)
599
+
600
+ with tarfile.open(output_file, "w", format=tarfile.PAX_FORMAT) as tar:
601
+ for file_path in dist_dir.rglob("*"):
602
+ if file_path.is_file() and not self.should_ignore(
603
+ file_path, ignore_patterns
604
+ ):
605
+ arcname = file_path.relative_to(dist_dir)
606
+ tarinfo = tar.gettarinfo(str(file_path), str(arcname))
607
+
608
+ if self.config.preserve_permissions:
609
+ # Preserve file permissions
610
+ tarinfo.mode = file_path.stat().st_mode
611
+
612
+ with open(file_path, "rb") as f:
613
+ tar.addfile(tarinfo, f)
614
+
615
+ if self.config.verbose:
616
+ logger.debug(f"Added: {arcname}")
617
+
618
+ logger.info(f"TAR archive created successfully: {output_file}")
619
+ return True
620
+ except Exception as e:
621
+ logger.error(f"Failed to create TAR archive: {e}")
622
+ return False
623
+
624
+ def _archive_gztar(
625
+ self,
626
+ dist_dir: Path,
627
+ output_file: Path,
628
+ ignore_patterns: set[str],
629
+ ) -> bool:
630
+ """Create gzip-compressed TAR archive."""
631
+ try:
632
+ logger.info(f"Creating gzip-compressed TAR archive: {output_file}")
633
+ output_file.parent.mkdir(parents=True, exist_ok=True)
634
+
635
+ # Map compression level to gzip compresslevel (0-9)
636
+ compresslevel = max(0, min(9, self.config.compression_level))
637
+
638
+ with tarfile.open(output_file, "w:gz", compresslevel=compresslevel) as tar:
639
+ for file_path in dist_dir.rglob("*"):
640
+ if file_path.is_file() and not self.should_ignore(
641
+ file_path, ignore_patterns
642
+ ):
643
+ arcname = file_path.relative_to(dist_dir)
644
+ tar.add(str(file_path), str(arcname))
645
+ if self.config.verbose:
646
+ logger.debug(f"Added: {arcname}")
647
+
648
+ logger.info(
649
+ f"Gzip-compressed TAR archive created successfully: {output_file}"
650
+ )
651
+ return True
652
+ except Exception as e:
653
+ logger.error(f"Failed to create gzip-compressed TAR archive: {e}")
654
+ return False
655
+
656
+ def _archive_bztar(
657
+ self,
658
+ dist_dir: Path,
659
+ output_file: Path,
660
+ ignore_patterns: set[str],
661
+ ) -> bool:
662
+ """Create bzip2-compressed TAR archive."""
663
+ try:
664
+ logger.info(f"Creating bzip2-compressed TAR archive: {output_file}")
665
+ output_file.parent.mkdir(parents=True, exist_ok=True)
666
+
667
+ with tarfile.open(output_file, "w:bz2") as tar:
668
+ for file_path in dist_dir.rglob("*"):
669
+ if file_path.is_file() and not self.should_ignore(
670
+ file_path, ignore_patterns
671
+ ):
672
+ arcname = file_path.relative_to(dist_dir)
673
+ tar.add(str(file_path), str(arcname))
674
+ if self.config.verbose:
675
+ logger.debug(f"Added: {arcname}")
676
+
677
+ logger.info(
678
+ f"Bzip2-compressed TAR archive created successfully: {output_file}"
679
+ )
680
+ return True
681
+ except Exception as e:
682
+ logger.error(f"Failed to create bzip2-compressed TAR archive: {e}")
683
+ return False
684
+
685
+ def _archive_xztar(
686
+ self,
687
+ dist_dir: Path,
688
+ output_file: Path,
689
+ ignore_patterns: set[str],
690
+ ) -> bool:
691
+ """Create xz-compressed TAR archive."""
692
+ try:
693
+ logger.info(f"Creating xz-compressed TAR archive: {output_file}")
694
+ output_file.parent.mkdir(parents=True, exist_ok=True)
695
+
696
+ with tarfile.open(output_file, "w:xz") as tar:
697
+ for file_path in dist_dir.rglob("*"):
698
+ if file_path.is_file() and not self.should_ignore(
699
+ file_path, ignore_patterns
700
+ ):
701
+ arcname = file_path.relative_to(dist_dir)
702
+ tar.add(str(file_path), str(arcname))
703
+ if self.config.verbose:
704
+ logger.debug(f"Added: {arcname}")
705
+
706
+ logger.info(
707
+ f"XZ-compressed TAR archive created successfully: {output_file}"
708
+ )
709
+ return True
710
+ except Exception as e:
711
+ logger.error(f"Failed to create xz-compressed TAR archive: {e}")
712
+ return False
713
+
714
+ def _archive_zip(
715
+ self,
716
+ dist_dir: Path,
717
+ output_file: Path,
718
+ ignore_patterns: set[str],
719
+ ) -> bool:
720
+ """Create ZIP archive using Python's zipfile module."""
721
+ try:
722
+ logger.info(f"Creating ZIP archive: {output_file}")
723
+ output_file.parent.mkdir(parents=True, exist_ok=True)
724
+
725
+ # Map compression level to ZIP compression (0-9)
726
+ compresslevel = max(0, min(9, self.config.compression_level))
727
+
728
+ with zipfile.ZipFile(
729
+ output_file, "w", zipfile.ZIP_DEFLATED, compresslevel=compresslevel
730
+ ) as zipf:
731
+ for file_path in dist_dir.rglob("*"):
732
+ if file_path.is_file() and not self.should_ignore(
733
+ file_path, ignore_patterns
734
+ ):
735
+ arcname = file_path.relative_to(dist_dir)
736
+ zipf.write(file_path, arcname)
737
+ if self.config.verbose:
738
+ logger.debug(f"Added: {arcname}")
739
+
740
+ logger.info(f"ZIP archive created successfully: {output_file}")
741
+ return True
742
+ except Exception as e:
743
+ logger.error(f"Failed to create ZIP archive: {e}")
744
+ return False
745
+
746
+ def _archive_7z(
747
+ self,
748
+ dist_dir: Path,
749
+ output_file: Path,
750
+ ignore_patterns: set[str],
751
+ ) -> bool:
752
+ """Create 7z archive using 7z command."""
753
+ if not self.check_command_available(CMD_7Z):
754
+ logger.error(f"{CMD_7Z} command not found. Please install 7-Zip.")
755
+ return False
756
+
757
+ try:
758
+ logger.info(f"Creating 7z archive: {output_file}")
759
+ output_file.parent.mkdir(parents=True, exist_ok=True)
760
+
761
+ # Create temp directory with filtered files
762
+ with tempfile.TemporaryDirectory(prefix=TEMP_DIR_PREFIX) as temp_dir:
763
+ temp_path = Path(temp_dir) / dist_dir.name
764
+ shutil.copytree(
765
+ dist_dir, temp_path, ignore=shutil.ignore_patterns(*ignore_patterns)
766
+ )
767
+
768
+ cmd = [CMD_7Z, "a", "-t7z", str(output_file), str(temp_path / "*")]
769
+ subprocess.run(cmd, check=True, shell=True)
770
+
771
+ logger.info(f"7z archive created successfully: {output_file}")
772
+ return True
773
+ except subprocess.CalledProcessError as e:
774
+ logger.error(f"Failed to create 7z archive: {e}")
775
+ return False
776
+ except Exception as e:
777
+ logger.error(f"Unexpected error creating 7z archive: {e}")
778
+ return False
779
+
780
+ def _generate_nsis_header(self, project: Project, output_file: Path) -> str:
781
+ """Generate NSIS script header section."""
782
+ # Extract major.minor.patch from version for VIProductVersion
783
+ version_parts = project.version.split(".")
784
+ if len(version_parts) >= 3:
785
+ major, minor, patch = version_parts[0], version_parts[1], version_parts[2]
786
+ elif len(version_parts) == 2:
787
+ major, minor, patch = version_parts[0], version_parts[1], "0"
788
+ else:
789
+ major, minor, patch = version_parts[0], "0", "0"
307
790
 
308
- logger.error(f"Unsupported format: {format}")
309
- return False
310
-
311
-
312
- def archive_projects(
313
- directory: Path,
314
- format: str,
315
- projects_to_archive: list | None = None,
316
- ignore_patterns: set | None = None,
317
- ) -> None:
318
- """Archive all projects in the directory."""
319
- if format not in ARCHIVE_FORMATS:
320
- logger.error(
321
- f"Unsupported format: {format}. Supported formats: {', '.join(ARCHIVE_FORMATS)}"
791
+ # Ensure description is not empty for NSIS
792
+ description = (
793
+ project.description
794
+ if project.description
795
+ else f"{project.name} installation package"
322
796
  )
323
- return
324
- else:
325
- logger.debug(f"Archiving projects in {directory} to `{format}` format")
326
-
327
- ignore_patterns = (
328
- ignore_patterns | DEFAULT_IGNORE_PATTERNS
329
- if ignore_patterns
330
- else set(DEFAULT_IGNORE_PATTERNS)
331
- )
332
- logger.debug(f"Ignoring patterns: {', '.join(ignore_patterns)}")
333
-
334
- project_config = Solution.from_directory(root_dir=directory)
335
- projects = project_config.projects
336
-
337
- if not projects:
338
- logger.error("No projects found in configuration")
339
- return
340
- else:
341
- logger.debug(f"Found {len(projects)} projects")
342
-
343
- projects_to_archive = projects_to_archive or list(projects.keys())
344
- if not projects_to_archive:
345
- logger.error("No projects to archive")
346
- return
347
- else:
348
- logger.debug(f"Archiving projects: {', '.join(projects_to_archive)}")
349
797
 
350
- success_count = 0
351
- total_count = 0
352
- for project_name in projects_to_archive:
353
- if project_name not in projects:
354
- logger.warning(f"Project not found: {project_name}")
355
- continue
356
- project = projects[project_name]
357
- total_count += 1
358
- if archive_project(
359
- project=project,
360
- directory=directory,
361
- format=format,
362
- ignore_patterns=ignore_patterns,
363
- ):
364
- success_count += 1
365
-
366
- if success_count:
367
- logger.info(
368
- f"Archiving complete: {success_count}/{total_count} projects successfully archived"
369
- )
370
- else:
371
- logger.error("Archiving failed")
798
+ return f"""!include "{NSIS_INCLUDE_FILE}"
799
+
800
+ Name "{project.name}"
801
+ OutFile "{output_file.name}"
802
+ InstallDir "{NSIS_PROGRAM_FILES}\\{project.name}"
803
+ InstallDirRegKey HKCU "{NSIS_REGISTRY_KEY}\\{project.name}" ""
804
+ RequestExecutionLevel admin
805
+ VIProductVersion {major}.{minor}.{patch}.0
806
+ VIAddVersionKey "ProductName" "{project.name}"
807
+ VIAddVersionKey "ProductVersion" "{project.version}"
808
+ VIAddVersionKey "FileDescription" "{description}"
809
+ VIAddVersionKey "FileVersion" "{project.version}"
810
+ VIAddVersionKey "LegalCopyright" "Copyright © {project.name} Development Team"
811
+ """
812
+
813
+ def _generate_nsis_pages(self) -> str:
814
+ """Generate NSIS page definitions."""
815
+ pages_content = "\n".join([f"!insertmacro {page}" for page in NSIS_PAGES])
816
+ unpages_content = "\n".join([f"!insertmacro {page}" for page in NSIS_UNPAGES])
817
+
818
+ return f"""!define {NSIS_ABORT_WARNING}
819
+ {pages_content}
820
+
821
+ {unpages_content}
822
+
823
+ !insertmacro MUI_LANGUAGE "{NSIS_LANGUAGE}"
824
+ """
825
+
826
+ def _generate_nsis_sections(self, project: Project, source_dir_str: str) -> str:
827
+ """Generate NSIS section definitions."""
828
+
829
+ return f"""Section "{project.name} ({NSIS_MAIN_SECTION})" SecMain
830
+ SectionIn RO
831
+ SetOutPath "{NSIS_INSTALL_DIR}"
832
+ File /r "{source_dir_str}\\*.*"
833
+
834
+ ; Write the uninstaller
835
+ WriteUninstaller "$INSTDIR\\Uninstall.exe"
836
+
837
+ ; Add registry keys for Add/Remove Programs
838
+ WriteRegStr HKCU "Software\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\{project.name}" "DisplayName" "{project.name}"
839
+ WriteRegStr HKCU "Software\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\{project.name}" "DisplayVersion" "{project.version}"
840
+ WriteRegStr HKCU "Software\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\{project.name}" "DisplayIcon" "$WINDIR\\system32\\imageres.dll,0"
841
+ WriteRegStr HKCU "Software\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\{project.name}" "UninstallString" "$INSTDIR\\Uninstall.exe"
842
+ WriteRegStr HKCU "Software\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\{project.name}" "InstallLocation" "$INSTDIR"
843
+ WriteRegStr HKCU "Software\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\{project.name}" "Publisher" "{project.name} Development Team"
844
+ SectionEnd
845
+
846
+ Section "{NSIS_SHORTCUT_SECTION}"
847
+ CreateDirectory "$SMPROGRAMS\\{project.name}"
848
+ ; Create shortcut for main executable using project's exe_name
849
+ ; Since we verified the executable exists, we can create the shortcut directly
850
+ CreateShortcut "$SMPROGRAMS\\{project.name}\\{project.name}.lnk" "$INSTDIR\\{project.exe_name}" "" "$WINDIR\\system32\\shell32.dll" 15
851
+ CreateShortCut "$SMPROGRAMS\\{project.name}\\Uninstall.lnk" "$INSTDIR\\Uninstall.exe" "" "$WINDIR\\system32\\shell32.dll" 27
852
+ ; Create desktop shortcut
853
+ CreateShortCut "$DESKTOP\\{project.name}.lnk" "$INSTDIR\\{project.exe_name}" "" "$WINDIR\\system32\\shell32.dll" 15
854
+ SectionEnd
855
+
856
+ Section "{NSIS_UNINSTALL_SECTION}"
857
+ Delete "$INSTDIR\\*.*"
858
+ Delete "$SMPROGRAMS\\{project.name}\\*.*"
859
+ RMDir "$SMPROGRAMS\\{project.name}"
860
+ RMDir /r "$INSTDIR"
861
+ DeleteRegKey /ifempty HKCU "{NSIS_REGISTRY_KEY}\\{project.name}"
862
+ ; Remove desktop shortcut on uninstall
863
+ Delete "$DESKTOP\\{project.name}.lnk"
864
+ ; Remove Add/Remove Programs registry keys
865
+ DeleteRegKey HKCU "Software\\Microsoft\\Windows\\CurrentVersion\\Uninstall\\{project.name}"
866
+ SectionEnd
867
+ """
868
+
869
+ def _create_nsis_script(
870
+ self,
871
+ project: Project,
872
+ dist_dir: Path,
873
+ output_file: Path,
874
+ ) -> Path | None:
875
+ """Generate NSIS script file."""
876
+ # Convert source_dir to use forward slashes for NSIS compatibility
877
+ source_dir_str = str(dist_dir).replace("\\", "/")
878
+
879
+ # Generate script components
880
+ header = self._generate_nsis_header(project, output_file)
881
+ pages = self._generate_nsis_pages()
882
+ sections = self._generate_nsis_sections(project, source_dir_str)
883
+
884
+ # Combine all parts
885
+ nsis_script = f"""{header}
886
+ {pages}
887
+ {sections}"""
888
+
889
+ script_file = output_file.parent / f"{project.name}_installer.nsi"
890
+ try:
891
+ with open(script_file, "w", encoding="utf-8") as f:
892
+ f.write(nsis_script)
893
+ logger.info(f"NSIS script generated: {script_file}")
894
+ return script_file
895
+ except Exception as e:
896
+ logger.error(f"Failed to generate NSIS script: {e}")
897
+ return None
898
+
899
+ def _archive_nsis(
900
+ self,
901
+ project: Project,
902
+ dist_dir: Path,
903
+ output_file: Path,
904
+ ignore_patterns: set[str],
905
+ ) -> bool:
906
+ """Create NSIS installer using makensis command."""
907
+ if not self.check_command_available(CMD_MAKENSIS):
908
+ logger.error(f"{CMD_MAKENSIS} command not found. Please install NSIS.")
909
+ return False
910
+
911
+ # Check if the project has a valid executable file before proceeding
912
+ expected_exe_path = dist_dir / project.exe_name
913
+ if not expected_exe_path.exists():
914
+ logger.error(
915
+ f"Project executable '{project.exe_name}' not found in {dist_dir}. Cannot create NSIS installer."
916
+ )
917
+ return False
918
+
919
+ try:
920
+ logger.info(f"Creating NSIS installer: {output_file}")
921
+ output_file.parent.mkdir(parents=True, exist_ok=True)
922
+
923
+ # Create temp directory with filtered files
924
+ with tempfile.TemporaryDirectory(prefix=TEMP_DIR_PREFIX) as temp_dir:
925
+ temp_path = Path(temp_dir) / dist_dir.name
926
+ shutil.copytree(
927
+ dist_dir, temp_path, ignore=shutil.ignore_patterns(*ignore_patterns)
928
+ )
929
+
930
+ # Also copy LICENSE file from project root if it exists
931
+ license_file = self.root_dir / "LICENSE"
932
+ logger.debug(f"Looking for LICENSE file at: {license_file}")
933
+ logger.debug(f"LICENSE file exists: {license_file.exists()}")
934
+ if license_file.exists():
935
+ shutil.copy2(license_file, temp_path / "LICENSE")
936
+ logger.debug(
937
+ f"Copied LICENSE file to temporary directory: {temp_path / 'LICENSE'}"
938
+ )
939
+ logger.debug(
940
+ f"Temporary LICENSE file exists: {(temp_path / 'LICENSE').exists()}"
941
+ )
942
+
943
+ script_file = self._create_nsis_script(
944
+ project=project,
945
+ dist_dir=temp_path,
946
+ output_file=output_file,
947
+ )
948
+ if not script_file or not script_file.exists():
949
+ return False
950
+
951
+ cmd = [CMD_MAKENSIS, str(script_file)]
952
+ subprocess.run(cmd, check=True, shell=True)
953
+
954
+ logger.info(f"NSIS installer created successfully: {output_file}")
955
+ return True
956
+ except subprocess.CalledProcessError as e:
957
+ logger.error(f"Failed to create NSIS installer: {e}")
958
+ return False
959
+ except Exception as e:
960
+ logger.error(f"Unexpected error creating NSIS installer: {e}")
961
+ return False
962
+
963
+
964
+ def list_supported_formats() -> str:
965
+ """Generate a formatted list of supported formats with descriptions."""
966
+ format_lines = []
967
+ for fmt in ArchiveFormat:
968
+ format_lines.append(f" {fmt.value:<8} - {fmt.description}")
969
+ return "\n".join(format_lines)
372
970
 
373
971
 
374
972
  def create_parser() -> argparse.ArgumentParser:
375
973
  """Create parser for command line arguments."""
376
- parser = argparse.ArgumentParser(description="Archive projects in directory")
974
+ parser = argparse.ArgumentParser(
975
+ description="Archive projects in directory with various compression formats",
976
+ epilog=f"Supported formats:\n{list_supported_formats()}",
977
+ formatter_class=argparse.RawDescriptionHelpFormatter,
978
+ )
377
979
  parser.add_argument(
378
980
  "directory",
379
981
  type=Path,
@@ -382,13 +984,31 @@ def create_parser() -> argparse.ArgumentParser:
382
984
  help="Directory to archive for projects.",
383
985
  )
384
986
  parser.add_argument("--debug", "-d", action="store_true", help="Debug mode")
987
+ parser.add_argument(
988
+ "--verbose", "-v", action="store_true", help="Verbose output during archiving"
989
+ )
385
990
  parser.add_argument(
386
991
  "--format",
387
992
  "-f",
388
993
  type=str,
389
994
  default="zip",
390
- choices=ARCHIVE_FORMATS,
391
- help=f"Archive format ({', '.join(ARCHIVE_FORMATS)})",
995
+ choices=sorted(ARCHIVE_FORMATS),
996
+ help="Archive format (default: zip)",
997
+ )
998
+ parser.add_argument(
999
+ "--compression-level",
1000
+ "-c",
1001
+ type=int,
1002
+ default=6,
1003
+ choices=range(0, 10),
1004
+ metavar="0-9",
1005
+ help="Compression level (0-9, default: 6)",
1006
+ )
1007
+ parser.add_argument(
1008
+ "--preserve-permissions",
1009
+ "-P",
1010
+ action="store_true",
1011
+ help="Preserve file permissions (TAR formats only)",
392
1012
  )
393
1013
  parser.add_argument(
394
1014
  "--project",
@@ -410,8 +1030,18 @@ def main():
410
1030
  if args.debug:
411
1031
  logger.setLevel(logging.DEBUG)
412
1032
 
413
- archive_projects(
414
- directory=args.directory,
1033
+ # Create configuration from arguments
1034
+ config = PyArchiveConfig(
1035
+ compression_level=args.compression_level,
1036
+ verbose=args.verbose or args.debug,
1037
+ preserve_permissions=args.preserve_permissions,
1038
+ )
1039
+
1040
+ # Create archiver instance
1041
+ archiver = PyArchiver(root_dir=args.directory, config=config)
1042
+
1043
+ # Archive projects
1044
+ archiver.archive_projects(
415
1045
  format=args.format,
416
1046
  projects_to_archive=args.project,
417
1047
  ignore_patterns=set(args.ignore or []),