stouputils 1.1.1__tar.gz → 1.1.4__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: stouputils
3
- Version: 1.1.1
3
+ Version: 1.1.4
4
4
  Summary: Stouputils is a collection of utility modules designed to simplify and enhance the development process. It includes a range of tools for tasks such as execution of doctests, display utilities, decorators, as well as context managers, and many more.
5
5
  Project-URL: Homepage, https://github.com/Stoupy51/stouputils
6
6
  Project-URL: Issues, https://github.com/Stoupy51/stouputils/issues
@@ -5,7 +5,7 @@ build-backend = "hatchling.build"
5
5
 
6
6
  [project]
7
7
  name = "stouputils"
8
- version = "1.1.1"
8
+ version = "1.1.4"
9
9
  description = "Stouputils is a collection of utility modules designed to simplify and enhance the development process. It includes a range of tools for tasks such as execution of doctests, display utilities, decorators, as well as context managers, and many more."
10
10
  readme = "README.md"
11
11
  requires-python = ">=3.10"
@@ -32,5 +32,5 @@ Issues = "https://github.com/Stoupy51/stouputils/issues"
32
32
  typeCheckingMode = "strict"
33
33
 
34
34
  [tool.hatch.build]
35
- include = ["src/**"]
35
+ include = ["src"]
36
36
 
@@ -1,15 +0,0 @@
1
-
2
- # Imports
3
- from .print import *
4
- from .archive import *
5
- from .io import *
6
- from .decorators import *
7
- from .ctx import *
8
- from .parallel import *
9
- from .all_doctests import *
10
- from .collections import *
11
- from .backup import *
12
-
13
- # Folders
14
- from .continuous_delivery import *
15
-
@@ -1,98 +0,0 @@
1
- """
2
- This module is used to run all the doctests for all the modules in a given directory.
3
-
4
- .. image:: https://raw.githubusercontent.com/Stoupy51/stouputils/refs/heads/main/assets/all_doctests_module.gif
5
- :alt: stouputils all_doctests examples
6
- """
7
-
8
- # Imports
9
- import os
10
- import sys
11
- from .print import info, error, progress
12
- from .decorators import measure_time, handle_error, LogLevels
13
- from . import decorators
14
- from doctest import TestResults, testmod
15
- from types import ModuleType
16
- import importlib
17
- import pkgutil
18
-
19
- def test_module_with_progress(module: ModuleType, separator: str) -> TestResults:
20
- @measure_time(progress, message=f"Testing module '{module.__name__}' {separator}took")
21
- def internal() -> TestResults:
22
- return testmod(m=module)
23
- return internal()
24
-
25
- # Main program
26
- def launch_tests(root_dir: str, importing_errors: LogLevels = LogLevels.WARNING_TRACEBACK, strict: bool = True) -> None:
27
- """ Main function to launch tests for all modules in the given directory.
28
-
29
- Args:
30
- root_dir (str): Root directory to search for modules
31
- importing_errors (LogLevels): Log level for the errors when importing modules
32
- strict (bool): Modify the force_raise_exception variable to True in the decorators module
33
-
34
- Examples:
35
- >>> launch_tests("unknown_dir")
36
- Traceback (most recent call last):
37
- ...
38
- ValueError: No modules found in 'unknown_dir'
39
-
40
- .. code-block:: python
41
-
42
- > launch_tests("/path/to/source")
43
- [PROGRESS HH:MM:SS] Importing module 'module1' took 0.001s
44
- [PROGRESS HH:MM:SS] Importing module 'module2' took 0.002s
45
- [PROGRESS HH:MM:SS] Importing module 'module3' took 0.003s
46
- [PROGRESS HH:MM:SS] Importing module 'module4' took 0.004s
47
- [INFO HH:MM:SS] Testing 4 modules...
48
- [PROGRESS HH:MM:SS] Testing module 'module1' took 0.005s
49
- [PROGRESS HH:MM:SS] Testing module 'module2' took 0.006s
50
- [PROGRESS HH:MM:SS] Testing module 'module3' took 0.007s
51
- [PROGRESS HH:MM:SS] Testing module 'module4' took 0.008s
52
- """
53
- if strict:
54
- old_value: bool = strict
55
- decorators.force_raise_exception = True
56
- strict = old_value
57
-
58
-
59
- # Get all modules from folder
60
- sys.path.insert(0, root_dir)
61
- modules_file_paths: list[str] = []
62
- for directory_path, _, _ in os.walk(root_dir):
63
- for module_info in pkgutil.walk_packages([directory_path]):
64
- absolute_module_path: str = os.path.join(directory_path, module_info.name)
65
- path: str = absolute_module_path.split(root_dir, 1)[1].replace(os.sep, ".")[1:]
66
- if path not in modules_file_paths:
67
- modules_file_paths.append(path)
68
- if not modules_file_paths:
69
- raise ValueError(f"No modules found in '{root_dir}'")
70
-
71
- # Find longest module path for alignment
72
- max_length: int = max(len(path) for path in modules_file_paths)
73
-
74
- # Dynamically import all modules from iacob package recursively using pkgutil and importlib
75
- modules: list[ModuleType] = []
76
- separators: list[str] = []
77
- for module_path in modules_file_paths:
78
- separator: str = " " * (max_length - len(module_path))
79
- @handle_error(error_log=importing_errors)
80
- @measure_time(progress, message=f"Importing module '{module_path}' {separator}took")
81
- def internal() -> None:
82
- modules.append(importlib.import_module(module_path))
83
- separators.append(separator)
84
- internal()
85
-
86
- # Run tests for each module
87
- info(f"Testing {len(modules)} modules...")
88
- separators = [s + " "*(len("Importing") - len("Testing")) for s in separators]
89
- results: list[TestResults] = [test_module_with_progress(module, separator) for module, separator in zip(modules, separators)]
90
-
91
- # Display any error lines for each module at the end of the script
92
- for module, result in zip(modules, results):
93
- if result.failed > 0:
94
- error(f"Errors in module {module.__name__}", exit=False)
95
-
96
- # Reset force_raise_exception back
97
- decorators.force_raise_exception = strict
98
-
@@ -1,119 +0,0 @@
1
- """
2
- This module provides functions for creating and managing archives.
3
-
4
- - make_archive: Create a zip archive from a source directory with consistent file timestamps.
5
- - repair_zip_file: Try to repair a corrupted zip file by ignoring some of the errors
6
-
7
- .. image:: https://raw.githubusercontent.com/Stoupy51/stouputils/refs/heads/main/assets/archive_module.gif
8
- :alt: stouputils archive examples
9
- """
10
-
11
- # Imports
12
- import os
13
- from zipfile import ZipFile, ZipInfo, ZIP_DEFLATED
14
- from .io import clean_path, super_copy
15
- from .decorators import handle_error, LogLevels
16
- from .dont_look.zip_file_override import ZipFileOverride
17
-
18
- # Function that makes an archive with consistency (same zip file each time)
19
- @handle_error()
20
- def make_archive(
21
- source: str,
22
- destinations: list[str]|str = [],
23
- override_time: None | tuple[int, int, int, int, int, int] = None,
24
- create_dir: bool = False
25
- ) -> bool:
26
- """ Create a zip archive from a source directory with consistent file timestamps.
27
- (Meaning deterministic zip file each time)
28
-
29
- Creates a zip archive from the source directory and copies it to one or more destinations.
30
- The archive will have consistent file timestamps across runs if override_time is specified.
31
- Uses maximum compression level (9) with ZIP_DEFLATED algorithm.
32
-
33
- Args:
34
- source (str): The source folder to archive
35
- destinations (list[str]|str): The destination folder(s) or file(s) to copy the archive to
36
- override_time (None | tuple[int, ...]): The constant time to use for the archive (e.g. (2024, 1, 1, 0, 0, 0) for 2024-01-01 00:00:00)
37
- create_dir (bool): Whether to create the destination directory if it doesn't exist (default: False)
38
- Returns:
39
- bool: Always returns True unless any strong error
40
- Examples:
41
-
42
- .. code-block:: python
43
-
44
- > make_archive("/path/to/source", "/path/to/destination.zip")
45
- > make_archive("/path/to/source", ["/path/to/destination.zip", "/path/to/destination2.zip"])
46
- > make_archive("src", "hello_from_year_2085.zip", override_time=(2085,1,1,0,0,0))
47
- """
48
- # Fix copy_destinations type if needed
49
- if destinations and isinstance(destinations, str):
50
- destinations = [destinations]
51
- if not destinations:
52
- raise ValueError("destinations must be a list of at least one destination")
53
-
54
- # Create the archive
55
- destination: str = clean_path(destinations[0])
56
- destination = destination if ".zip" in destination else destination + ".zip"
57
- with ZipFile(destination, "w", compression=ZIP_DEFLATED, compresslevel=9) as zip:
58
- for root, _, files in os.walk(source):
59
- for file in files:
60
- file_path: str = clean_path(os.path.join(root, file))
61
- info: ZipInfo = ZipInfo(file_path)
62
- info.compress_type = ZIP_DEFLATED
63
- if override_time:
64
- info.date_time = override_time
65
- with open(file_path, "rb") as f:
66
- zip.writestr(info, f.read())
67
-
68
- # Copy the archive to the destination(s)
69
- for dest_folder in destinations[1:]:
70
- @handle_error(Exception, message=f"Unable to copy '{destination}' to '{dest_folder}'", error_log=LogLevels.WARNING)
71
- def internal(src: str, dest: str) -> None:
72
- super_copy(src, dest, create_dir=create_dir)
73
- internal(destination, clean_path(dest_folder))
74
-
75
- return True
76
-
77
-
78
-
79
- # Function that repair a corrupted zip file (ignoring some of the errors)
80
- @handle_error()
81
- def repair_zip_file(file_path: str, destination: str) -> bool:
82
- """ Try to repair a corrupted zip file by ignoring some of the errors
83
-
84
- Args:
85
- file_path (str): Path of the zip file to repair
86
- destination (str): Destination of the new file
87
- Returns:
88
- bool: Always returns True unless any strong error
89
-
90
- Examples:
91
-
92
- .. code-block:: python
93
-
94
- > repair_zip_file("/path/to/source.zip", "/path/to/destination.zip")
95
- """
96
- # Check
97
- if not os.path.exists(file_path):
98
- raise FileNotFoundError(f"File '{file_path}' not found")
99
- dirname: str = os.path.dirname(destination)
100
- if dirname and not os.path.exists(dirname):
101
- raise FileNotFoundError(f"Directory '{dirname}' not found")
102
-
103
- # Read it
104
- with ZipFileOverride(file_path, "r") as zip_file:
105
-
106
- # Get a list of all the files in the ZIP file
107
- file_list: list[str] = zip_file.namelist()
108
-
109
- # Create a new ZIP file at the destination
110
- with ZipFileOverride(destination, "w", compression=ZIP_DEFLATED) as new_zip_file:
111
- for file_name in file_list:
112
- try:
113
- new_zip_file.writestr(file_name, zip_file.read(file_name))
114
- except KeyboardInterrupt:
115
- continue
116
-
117
- return True
118
-
119
-
@@ -1,314 +0,0 @@
1
- """
2
- This module provides utilities for backup management.
3
-
4
- - get_file_hash: Computes the SHA-256 hash of a file
5
- - create_delta_backup: Creates a ZIP delta backup, saving only modified or new files while tracking deleted files
6
- - consolidate_backups: Consolidates the files from the given backup and all previous ones into a new ZIP file
7
- - backup_cli: Main entry point for command line usage
8
-
9
- .. image:: https://raw.githubusercontent.com/Stoupy51/stouputils/refs/heads/main/assets/backup_module.gif
10
- :alt: stouputils backup examples
11
- """
12
-
13
- # Standard library imports
14
- import os
15
- import hashlib
16
- import zipfile
17
- import datetime
18
- import fnmatch
19
-
20
- # Local imports
21
- from .decorators import measure_time, handle_error
22
- from .print import info, warning, progress
23
- from .io import clean_path
24
-
25
- # Function to compute the SHA-256 hash of a file
26
- def get_file_hash(file_path: str) -> str | None:
27
- """ Computes the SHA-256 hash of a file.
28
-
29
- Args:
30
- file_path (str): Path to the file
31
- Returns:
32
- str | None: SHA-256 hash as a hexadecimal string or None if an error occurs
33
- """
34
- try:
35
- sha256_hash = hashlib.sha256()
36
- with open(file_path, "rb") as f:
37
- for chunk in iter(lambda: f.read(4096), b""):
38
- sha256_hash.update(chunk)
39
- return sha256_hash.hexdigest()
40
- except Exception as e:
41
- warning(f"Error computing hash for file {file_path}: {e}")
42
- return None
43
-
44
- # Function to extract the stored hash from a ZipInfo object's comment
45
- def extract_hash_from_zipinfo(zip_info: zipfile.ZipInfo) -> str | None:
46
- """ Extracts the stored hash from a ZipInfo object's comment.
47
-
48
- Args:
49
- zip_info (zipfile.ZipInfo): The ZipInfo object representing a file in the ZIP
50
- Returns:
51
- str | None: The stored hash if available, otherwise None
52
- """
53
- comment: bytes | None = zip_info.comment
54
- comment_str: str | None = comment.decode() if comment else None
55
- return comment_str if comment_str and len(comment_str) == 64 else None # Ensure it's a valid SHA-256 hash
56
-
57
- # Function to retrieve all previous backups in a folder
58
- @measure_time(message="Retrieving previous backups")
59
- def get_all_previous_backups(backup_folder: str, all_before: str | None = None) -> dict[str, dict[str, str]]:
60
- """ Retrieves all previous backups in a folder and maps each backup to a dictionary of file paths and their hashes.
61
-
62
- Args:
63
- backup_folder (str): The folder containing previous backup zip files
64
- all_before (str | None): Path to the latest backup ZIP file (If endswith "/latest.zip" or "/", the latest backup will be used)
65
- Returns:
66
- dict[str, dict[str, str]]: Dictionary mapping backup file paths to dictionaries of {file_path: file_hash}
67
- """
68
- backups: dict[str, dict[str, str]] = {}
69
- list_dir: list[str] = sorted([clean_path(os.path.join(backup_folder, f)) for f in os.listdir(backup_folder)])
70
-
71
- # If all_before is provided, don't include backups after it
72
- if isinstance(all_before, str) and not (all_before.endswith("/latest.zip") or all_before.endswith("/") or os.path.isdir(all_before)):
73
- list_dir = list_dir[:list_dir.index(all_before) + 1]
74
-
75
- # Get all the backups
76
- for filename in list_dir:
77
- if filename.endswith(".zip"):
78
- zip_path: str = clean_path(os.path.join(backup_folder, filename))
79
- file_hashes: dict[str, str] = {}
80
-
81
- try:
82
- with zipfile.ZipFile(zip_path, "r") as zipf:
83
- for inf in zipf.infolist():
84
- if inf.filename != "__deleted_files__.txt":
85
- stored_hash: str | None = extract_hash_from_zipinfo(inf)
86
- if stored_hash is not None: # Only store if hash exists
87
- file_hashes[inf.filename] = stored_hash
88
-
89
- backups[zip_path] = file_hashes
90
- except Exception as e:
91
- warning(f"Error reading backup {zip_path}: {e}")
92
-
93
- return dict(reversed(backups.items()))
94
-
95
- # Function to check if a file exists in any previous backup
96
- def is_file_in_any_previous_backup(file_path: str, file_hash: str, previous_backups: dict[str, dict[str, str]]) -> bool:
97
- """ Checks if a file with the same hash exists in any previous backup.
98
-
99
- Args:
100
- file_path (str): The relative path of the file
101
- file_hash (str): The SHA-256 hash of the file
102
- previous_backups (dict[str, dict[str, str]]): Dictionary mapping backup zip paths to their stored file hashes
103
- Returns:
104
- bool: True if the file exists unchanged in any previous backup, False otherwise
105
- """
106
- for file_hashes in previous_backups.values():
107
- if file_hashes.get(file_path) == file_hash:
108
- return True
109
- return False
110
-
111
-
112
- # Main backup function that creates a delta backup (only changed files)
113
- @measure_time(message="Creating ZIP backup")
114
- @handle_error()
115
- def create_delta_backup(source_path: str, destination_folder: str, exclude_patterns: list[str] | None = None) -> None:
116
- """ Creates a ZIP delta backup, saving only modified or new files while tracking deleted files.
117
-
118
- Args:
119
- source_path (str): Path to the source file or directory to back up
120
- destination_folder (str): Path to the folder where the backup will be saved
121
- exclude_patterns (list[str] | None): List of glob patterns to exclude from backup
122
- Examples:
123
-
124
- .. code-block:: python
125
-
126
- > create_delta_backup("/path/to/source", "/path/to/backups", exclude_patterns=["libraries/*", "cache/*"])
127
- [INFO HH:MM:SS] Creating ZIP backup
128
- [INFO HH:MM:SS] Backup created: '/path/to/backups/backup_2025_02_18-10_00_00.zip'
129
- """
130
- source_path = clean_path(os.path.abspath(source_path))
131
- destination_folder = clean_path(os.path.abspath(destination_folder))
132
-
133
- # Setup backup paths and create destination folder
134
- base_name: str = os.path.basename(source_path.rstrip(os.sep)) or "backup"
135
- backup_folder: str = clean_path(os.path.join(destination_folder, base_name))
136
- os.makedirs(backup_folder, exist_ok=True)
137
-
138
- # Get previous backups and track all files
139
- previous_backups: dict[str, dict[str, str]] = get_all_previous_backups(backup_folder)
140
- previous_files: set[str] = {file for backup in previous_backups.values() for file in backup} # Collect all tracked files
141
-
142
- # Create new backup filename with timestamp
143
- timestamp: str = datetime.datetime.now().strftime("%Y_%m_%d-%H_%M_%S")
144
- zip_filename: str = f"{timestamp}.zip"
145
- destination_zip: str = clean_path(os.path.join(backup_folder, zip_filename))
146
-
147
- # Create the ZIP file early to write files as we process them
148
- with zipfile.ZipFile(destination_zip, "w", compression=zipfile.ZIP_DEFLATED, compresslevel=9) as zipf:
149
- deleted_files: set[str] = set()
150
- has_changes: bool = False
151
-
152
- # Process files one by one to avoid memory issues
153
- if os.path.isdir(source_path):
154
- for root, _, files in os.walk(source_path):
155
- for file in files:
156
- full_path: str = clean_path(os.path.join(root, file))
157
- arcname: str = clean_path(os.path.relpath(full_path, start=os.path.dirname(source_path)))
158
-
159
- # Skip file if it matches any exclude pattern
160
- if exclude_patterns and any(fnmatch.fnmatch(arcname, pattern) for pattern in exclude_patterns):
161
- continue
162
-
163
- file_hash: str | None = get_file_hash(full_path)
164
- if file_hash is None:
165
- continue
166
-
167
- # Check if file needs to be backed up
168
- if not is_file_in_any_previous_backup(arcname, file_hash, previous_backups):
169
- try:
170
- zip_info: zipfile.ZipInfo = zipfile.ZipInfo(arcname)
171
- zip_info.compress_type = zipfile.ZIP_DEFLATED
172
- zip_info.comment = file_hash.encode() # Store hash in comment
173
-
174
- # Read and write file in chunks
175
- with open(full_path, "rb") as f:
176
- with zipf.open(zip_info, "w", force_zip64=True) as zf:
177
- for chunk in iter(lambda: f.read(4096), b""):
178
- zf.write(chunk)
179
- has_changes = True
180
- except Exception as e:
181
- warning(f"Error writing file {full_path} to backup: {e}")
182
-
183
- # Track current files for deletion detection
184
- if arcname in previous_files:
185
- previous_files.remove(arcname)
186
- else:
187
- arcname: str = clean_path(os.path.basename(source_path))
188
- file_hash: str | None = get_file_hash(source_path)
189
-
190
- if file_hash is not None and not is_file_in_any_previous_backup(arcname, file_hash, previous_backups):
191
- try:
192
- zip_info: zipfile.ZipInfo = zipfile.ZipInfo(arcname)
193
- zip_info.compress_type = zipfile.ZIP_DEFLATED
194
- zip_info.comment = file_hash.encode()
195
-
196
- with open(source_path, "rb") as f:
197
- with zipf.open(zip_info, "w", force_zip64=True) as zf:
198
- for chunk in iter(lambda: f.read(4096), b""):
199
- zf.write(chunk)
200
- has_changes = True
201
- except Exception as e:
202
- warning(f"Error writing file {source_path} to backup: {e}")
203
-
204
- # Any remaining files in previous_files were deleted
205
- deleted_files = previous_files
206
- if deleted_files:
207
- zipf.writestr("__deleted_files__.txt", "\n".join(deleted_files), compress_type=zipfile.ZIP_DEFLATED)
208
- has_changes = True
209
-
210
- # Remove empty backup if no changes
211
- if not has_changes:
212
- os.remove(destination_zip)
213
- info(f"No files to backup, skipping creation of backup '{destination_zip}'")
214
- else:
215
- info(f"Backup created: '{destination_zip}'")
216
-
217
-
218
- # Function to consolidate multiple backups into one comprehensive backup
219
- @measure_time(message="Consolidating backups")
220
- def consolidate_backups(zip_path: str, destination_zip: str) -> None:
221
- """ Consolidates the files from the given backup and all previous ones into a new ZIP file,
222
- ensuring that the most recent version of each file is kept and deleted files are not restored.
223
-
224
- Args:
225
- zip_path (str): Path to the latest backup ZIP file (If endswith "/latest.zip" or "/", the latest backup will be used)
226
- destination_zip (str): Path to the destination ZIP file where the consolidated backup will be saved
227
- Examples:
228
-
229
- .. code-block:: python
230
-
231
- > consolidate_backups("/path/to/backups/latest.zip", "/path/to/consolidated.zip")
232
- [INFO HH:MM:SS] Consolidating backups
233
- [INFO HH:MM:SS] Consolidated backup created: '/path/to/consolidated.zip'
234
- """
235
- zip_path = clean_path(os.path.abspath(zip_path))
236
- destination_zip = clean_path(os.path.abspath(destination_zip))
237
- zip_folder: str = clean_path(os.path.dirname(zip_path))
238
-
239
- # Get all previous backups up to the specified one
240
- previous_backups: dict[str, dict[str, str]] = get_all_previous_backups(zip_folder, all_before=zip_path)
241
-
242
- deleted_files: set[str] = set()
243
- final_files: set[str] = set()
244
-
245
- # Create destination ZIP file
246
- with zipfile.ZipFile(destination_zip, "w", compression=zipfile.ZIP_DEFLATED, compresslevel=9) as zipf_out:
247
- # Process each backup, tracking deleted files and consolidating files
248
- for backup_path in previous_backups:
249
- with zipfile.ZipFile(backup_path, "r") as zipf_in:
250
- # Process deleted files
251
- if "__deleted_files__.txt" in zipf_in.namelist():
252
- backup_deleted_files: list[str] = zipf_in.read("__deleted_files__.txt").decode().splitlines()
253
- deleted_files.update(backup_deleted_files)
254
-
255
- # Process files
256
- for inf in zipf_in.infolist():
257
- filename: str = inf.filename
258
- if filename \
259
- and filename != "__deleted_files__.txt" \
260
- and filename not in final_files \
261
- and filename not in deleted_files:
262
- final_files.add(filename)
263
-
264
- # Copy file in chunks
265
- with zipf_in.open(inf, "r") as source:
266
- with zipf_out.open(inf, "w", force_zip64=True) as target:
267
- for chunk in iter(lambda: source.read(4096), b""):
268
- target.write(chunk)
269
-
270
- info(f"Consolidated backup created: {destination_zip}")
271
-
272
- # Main entry point for command line usage
273
- @measure_time(progress)
274
- def backup_cli():
275
- """ Main entry point for command line usage.
276
-
277
- Examples:
278
-
279
- .. code-block:: bash
280
-
281
- # Create a delta backup, excluding libraries and cache folders
282
- python -m stouputils.backup delta /path/to/source /path/to/backups -x "libraries/*" "cache/*"
283
-
284
- # Consolidate backups into a single file
285
- python -m stouputils.backup consolidate /path/to/backups/latest.zip /path/to/consolidated.zip
286
- """
287
- import argparse
288
-
289
- # Setup command line argument parser
290
- parser: argparse.ArgumentParser = argparse.ArgumentParser(description="Backup and consolidate files using delta compression.")
291
- subparsers = parser.add_subparsers(dest="command", required=True)
292
-
293
- # Create delta command and its arguments
294
- delta_parser = subparsers.add_parser("delta", help="Create a new delta backup")
295
- delta_parser.add_argument("source", type=str, help="Path to the source directory or file")
296
- delta_parser.add_argument("destination", type=str, help="Path to the destination folder for backups")
297
- delta_parser.add_argument("-x", "--exclude", type=str, nargs="+", help="Glob patterns to exclude from backup", default=[])
298
-
299
- # Create consolidate command and its arguments
300
- consolidate_parser = subparsers.add_parser("consolidate", help="Consolidate existing backups into one")
301
- consolidate_parser.add_argument("backup_zip", type=str, help="Path to the latest backup ZIP file")
302
- consolidate_parser.add_argument("destination_zip", type=str, help="Path to the destination consolidated ZIP file")
303
-
304
- # Parse arguments and execute appropriate command
305
- args: argparse.Namespace = parser.parse_args()
306
-
307
- if args.command == "delta":
308
- create_delta_backup(args.source, args.destination, args.exclude)
309
- elif args.command == "consolidate":
310
- consolidate_backups(args.backup_zip, args.destination_zip)
311
-
312
- if __name__ == "__main__":
313
- backup_cli()
314
-
@@ -1,61 +0,0 @@
1
- """
2
- This module provides utilities for collection manipulation:
3
-
4
- - unique_list: Remove duplicates from a list while preserving order using object id, hash or str
5
-
6
- .. image:: https://raw.githubusercontent.com/Stoupy51/stouputils/refs/heads/main/assets/collections_module.gif
7
- :alt: stouputils collections examples
8
- """
9
-
10
- # Imports
11
- from typing import Any, Literal
12
-
13
- # Functions
14
- def unique_list(list_to_clean: list[Any], method: Literal["id", "hash", "str"] = "str") -> list[Any]:
15
- """ Remove duplicates from the list while keeping the order using ids (default) or hash or str
16
-
17
- Args:
18
- list_to_clean (list[Any]): The list to clean
19
- method (Literal["id", "hash", "str"]): The method to use to identify duplicates
20
- Returns:
21
- list[Any]: The cleaned list
22
-
23
- Examples:
24
- >>> unique_list([1, 2, 3, 2, 1], method="id")
25
- [1, 2, 3]
26
-
27
- >>> s1 = {1, 2, 3}
28
- >>> s2 = {2, 3, 4}
29
- >>> s3 = {1, 2, 3}
30
- >>> unique_list([s1, s2, s1, s1, s3, s2, s3], method="id")
31
- [{1, 2, 3}, {2, 3, 4}, {1, 2, 3}]
32
-
33
- >>> s1 = {1, 2, 3}
34
- >>> s2 = {2, 3, 4}
35
- >>> s3 = {1, 2, 3}
36
- >>> unique_list([s1, s2, s1, s1, s3, s2, s3], method="str")
37
- [{1, 2, 3}, {2, 3, 4}]
38
- """
39
- # Initialize the seen ids set and the result list
40
- seen: set[Any] = set()
41
- result: list[Any] = []
42
-
43
- # Iterate over each item in the list
44
- for item in list_to_clean:
45
- if method == "id":
46
- item_identifier = id(item)
47
- elif method == "hash":
48
- item_identifier = hash(item)
49
- elif method == "str":
50
- item_identifier = str(item)
51
- else:
52
- raise ValueError(f"Invalid method: {method}")
53
-
54
- # If the item id is not in the seen ids set, add it to the seen ids set and append the item to the result list
55
- if item_identifier not in seen:
56
- seen.add(item_identifier)
57
- result.append(item)
58
-
59
- # Return the cleaned list
60
- return result
61
-
@@ -1,7 +0,0 @@
1
-
2
- # Imports
3
- from .cd_utils import *
4
- from .github import *
5
- from .pypi import *
6
- from .pyproject import *
7
-