stouputils 1.12.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- stouputils/__init__.py +40 -0
- stouputils/__init__.pyi +14 -0
- stouputils/__main__.py +81 -0
- stouputils/_deprecated.py +37 -0
- stouputils/_deprecated.pyi +12 -0
- stouputils/all_doctests.py +160 -0
- stouputils/all_doctests.pyi +46 -0
- stouputils/applications/__init__.py +22 -0
- stouputils/applications/__init__.pyi +2 -0
- stouputils/applications/automatic_docs.py +634 -0
- stouputils/applications/automatic_docs.pyi +106 -0
- stouputils/applications/upscaler/__init__.py +39 -0
- stouputils/applications/upscaler/__init__.pyi +3 -0
- stouputils/applications/upscaler/config.py +128 -0
- stouputils/applications/upscaler/config.pyi +18 -0
- stouputils/applications/upscaler/image.py +247 -0
- stouputils/applications/upscaler/image.pyi +109 -0
- stouputils/applications/upscaler/video.py +287 -0
- stouputils/applications/upscaler/video.pyi +60 -0
- stouputils/archive.py +344 -0
- stouputils/archive.pyi +67 -0
- stouputils/backup.py +488 -0
- stouputils/backup.pyi +109 -0
- stouputils/collections.py +244 -0
- stouputils/collections.pyi +86 -0
- stouputils/continuous_delivery/__init__.py +27 -0
- stouputils/continuous_delivery/__init__.pyi +5 -0
- stouputils/continuous_delivery/cd_utils.py +243 -0
- stouputils/continuous_delivery/cd_utils.pyi +129 -0
- stouputils/continuous_delivery/github.py +522 -0
- stouputils/continuous_delivery/github.pyi +162 -0
- stouputils/continuous_delivery/pypi.py +91 -0
- stouputils/continuous_delivery/pypi.pyi +43 -0
- stouputils/continuous_delivery/pyproject.py +147 -0
- stouputils/continuous_delivery/pyproject.pyi +67 -0
- stouputils/continuous_delivery/stubs.py +86 -0
- stouputils/continuous_delivery/stubs.pyi +39 -0
- stouputils/ctx.py +408 -0
- stouputils/ctx.pyi +211 -0
- stouputils/data_science/config/get.py +51 -0
- stouputils/data_science/config/set.py +125 -0
- stouputils/data_science/data_processing/image/__init__.py +66 -0
- stouputils/data_science/data_processing/image/auto_contrast.py +79 -0
- stouputils/data_science/data_processing/image/axis_flip.py +58 -0
- stouputils/data_science/data_processing/image/bias_field_correction.py +74 -0
- stouputils/data_science/data_processing/image/binary_threshold.py +73 -0
- stouputils/data_science/data_processing/image/blur.py +59 -0
- stouputils/data_science/data_processing/image/brightness.py +54 -0
- stouputils/data_science/data_processing/image/canny.py +110 -0
- stouputils/data_science/data_processing/image/clahe.py +92 -0
- stouputils/data_science/data_processing/image/common.py +30 -0
- stouputils/data_science/data_processing/image/contrast.py +53 -0
- stouputils/data_science/data_processing/image/curvature_flow_filter.py +74 -0
- stouputils/data_science/data_processing/image/denoise.py +378 -0
- stouputils/data_science/data_processing/image/histogram_equalization.py +123 -0
- stouputils/data_science/data_processing/image/invert.py +64 -0
- stouputils/data_science/data_processing/image/laplacian.py +60 -0
- stouputils/data_science/data_processing/image/median_blur.py +52 -0
- stouputils/data_science/data_processing/image/noise.py +59 -0
- stouputils/data_science/data_processing/image/normalize.py +65 -0
- stouputils/data_science/data_processing/image/random_erase.py +66 -0
- stouputils/data_science/data_processing/image/resize.py +69 -0
- stouputils/data_science/data_processing/image/rotation.py +80 -0
- stouputils/data_science/data_processing/image/salt_pepper.py +68 -0
- stouputils/data_science/data_processing/image/sharpening.py +55 -0
- stouputils/data_science/data_processing/image/shearing.py +64 -0
- stouputils/data_science/data_processing/image/threshold.py +64 -0
- stouputils/data_science/data_processing/image/translation.py +71 -0
- stouputils/data_science/data_processing/image/zoom.py +83 -0
- stouputils/data_science/data_processing/image_augmentation.py +118 -0
- stouputils/data_science/data_processing/image_preprocess.py +183 -0
- stouputils/data_science/data_processing/prosthesis_detection.py +359 -0
- stouputils/data_science/data_processing/technique.py +481 -0
- stouputils/data_science/dataset/__init__.py +45 -0
- stouputils/data_science/dataset/dataset.py +292 -0
- stouputils/data_science/dataset/dataset_loader.py +135 -0
- stouputils/data_science/dataset/grouping_strategy.py +296 -0
- stouputils/data_science/dataset/image_loader.py +100 -0
- stouputils/data_science/dataset/xy_tuple.py +696 -0
- stouputils/data_science/metric_dictionnary.py +106 -0
- stouputils/data_science/metric_utils.py +847 -0
- stouputils/data_science/mlflow_utils.py +206 -0
- stouputils/data_science/models/abstract_model.py +149 -0
- stouputils/data_science/models/all.py +85 -0
- stouputils/data_science/models/base_keras.py +765 -0
- stouputils/data_science/models/keras/all.py +38 -0
- stouputils/data_science/models/keras/convnext.py +62 -0
- stouputils/data_science/models/keras/densenet.py +50 -0
- stouputils/data_science/models/keras/efficientnet.py +60 -0
- stouputils/data_science/models/keras/mobilenet.py +56 -0
- stouputils/data_science/models/keras/resnet.py +52 -0
- stouputils/data_science/models/keras/squeezenet.py +233 -0
- stouputils/data_science/models/keras/vgg.py +42 -0
- stouputils/data_science/models/keras/xception.py +38 -0
- stouputils/data_science/models/keras_utils/callbacks/__init__.py +20 -0
- stouputils/data_science/models/keras_utils/callbacks/colored_progress_bar.py +219 -0
- stouputils/data_science/models/keras_utils/callbacks/learning_rate_finder.py +148 -0
- stouputils/data_science/models/keras_utils/callbacks/model_checkpoint_v2.py +31 -0
- stouputils/data_science/models/keras_utils/callbacks/progressive_unfreezing.py +249 -0
- stouputils/data_science/models/keras_utils/callbacks/warmup_scheduler.py +66 -0
- stouputils/data_science/models/keras_utils/losses/__init__.py +12 -0
- stouputils/data_science/models/keras_utils/losses/next_generation_loss.py +56 -0
- stouputils/data_science/models/keras_utils/visualizations.py +416 -0
- stouputils/data_science/models/model_interface.py +939 -0
- stouputils/data_science/models/sandbox.py +116 -0
- stouputils/data_science/range_tuple.py +234 -0
- stouputils/data_science/scripts/augment_dataset.py +77 -0
- stouputils/data_science/scripts/exhaustive_process.py +133 -0
- stouputils/data_science/scripts/preprocess_dataset.py +70 -0
- stouputils/data_science/scripts/routine.py +168 -0
- stouputils/data_science/utils.py +285 -0
- stouputils/decorators.py +595 -0
- stouputils/decorators.pyi +242 -0
- stouputils/image.py +441 -0
- stouputils/image.pyi +172 -0
- stouputils/installer/__init__.py +18 -0
- stouputils/installer/__init__.pyi +5 -0
- stouputils/installer/common.py +67 -0
- stouputils/installer/common.pyi +39 -0
- stouputils/installer/downloader.py +101 -0
- stouputils/installer/downloader.pyi +24 -0
- stouputils/installer/linux.py +144 -0
- stouputils/installer/linux.pyi +39 -0
- stouputils/installer/main.py +223 -0
- stouputils/installer/main.pyi +57 -0
- stouputils/installer/windows.py +136 -0
- stouputils/installer/windows.pyi +31 -0
- stouputils/io.py +486 -0
- stouputils/io.pyi +213 -0
- stouputils/parallel.py +453 -0
- stouputils/parallel.pyi +211 -0
- stouputils/print.py +527 -0
- stouputils/print.pyi +146 -0
- stouputils/py.typed +1 -0
- stouputils-1.12.1.dist-info/METADATA +179 -0
- stouputils-1.12.1.dist-info/RECORD +138 -0
- stouputils-1.12.1.dist-info/WHEEL +4 -0
- stouputils-1.12.1.dist-info/entry_points.txt +3 -0
|
@@ -0,0 +1,287 @@
|
|
|
1
|
+
"""
|
|
2
|
+
This module provides utility functions for upscaling videos using waifu2x-ncnn-vulkan.
|
|
3
|
+
|
|
4
|
+
It extracts frames from videos, upscales them individually, and then recombines them
|
|
5
|
+
into a high-quality output video. The process preserves audio from the original video
|
|
6
|
+
and handles configuration of video encoding parameters like bitrate and framerate.
|
|
7
|
+
|
|
8
|
+
Main functionalities:
|
|
9
|
+
|
|
10
|
+
- Extracting frames from videos using FFmpeg
|
|
11
|
+
- Upscaling frames using waifu2x-ncnn-vulkan
|
|
12
|
+
- Recombining frames into videos with optimized bitrates
|
|
13
|
+
- Handling partially processed videos to resume interrupted operations
|
|
14
|
+
- Calculating recommended bitrates based on resolution and framerate
|
|
15
|
+
|
|
16
|
+
The module includes YouTube's recommended bitrate settings for different resolutions,
|
|
17
|
+
framerates, and HDR/SDR content types, ensuring optimal quality for various outputs.
|
|
18
|
+
|
|
19
|
+
Example usage:
|
|
20
|
+
|
|
21
|
+
.. code-block:: python
|
|
22
|
+
|
|
23
|
+
# Imports
|
|
24
|
+
import stouputils.applications.upscaler as app
|
|
25
|
+
from stouputils.io import get_root_path
|
|
26
|
+
|
|
27
|
+
# Constants
|
|
28
|
+
ROOT: str = get_root_path(__file__) + "/upscaler"
|
|
29
|
+
INPUT_FOLDER: str = f"{ROOT}/input"
|
|
30
|
+
PROGRESS_FOLDER: str = f"{ROOT}/progress"
|
|
31
|
+
OUTPUT_FOLDER: str = f"{ROOT}/output"
|
|
32
|
+
|
|
33
|
+
# Main
|
|
34
|
+
if __name__ == "__main__":
|
|
35
|
+
app.video_upscaler_cli(INPUT_FOLDER, PROGRESS_FOLDER, OUTPUT_FOLDER)
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
# Imports
|
|
39
|
+
import os
|
|
40
|
+
import shutil
|
|
41
|
+
import subprocess
|
|
42
|
+
import sys
|
|
43
|
+
from typing import Literal
|
|
44
|
+
|
|
45
|
+
from PIL import Image
|
|
46
|
+
|
|
47
|
+
from ...installer import check_executable
|
|
48
|
+
from ...io import clean_path
|
|
49
|
+
from ...parallel import multithreading
|
|
50
|
+
from ...print import colored_for_loop, debug, error, info, warning
|
|
51
|
+
from .config import FFMPEG_RELEASES, YOUTUBE_BITRATE_RECOMMENDATIONS, Config
|
|
52
|
+
from .image import convert_frame, get_all_files, upscale_folder
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
# Functions
|
|
56
|
+
def get_recommended_bitrate(
|
|
57
|
+
resolution: tuple[int, int], frame_rate: int = 60, upload_type: Literal["SDR","HDR"] = "SDR"
|
|
58
|
+
) -> int:
|
|
59
|
+
""" Get the recommended bitrate (in kbps) for the output video based on the video resolution.
|
|
60
|
+
|
|
61
|
+
Args:
|
|
62
|
+
resolution (tuple[int, int]): Video resolution (width, height).
|
|
63
|
+
frame_rate (int): Frame rate of the video, default is 60.
|
|
64
|
+
upload_type (Literal["SDR","HDR"]): Upload type from which the recommendation is made, default is "SDR".
|
|
65
|
+
|
|
66
|
+
Returns:
|
|
67
|
+
int: The recommended bitrate for the output video (in kbps)
|
|
68
|
+
|
|
69
|
+
Source: https://support.google.com/youtube/answer/1722171?hl=en#zippy=%2Cbitrate
|
|
70
|
+
|
|
71
|
+
Examples:
|
|
72
|
+
>>> # Valid examples
|
|
73
|
+
>>> get_recommended_bitrate((3840, 2160), 60, "SDR")
|
|
74
|
+
68000
|
|
75
|
+
>>> get_recommended_bitrate((1920, 1080), 60, "HDR")
|
|
76
|
+
15000
|
|
77
|
+
>>> get_recommended_bitrate((1920, 1080), 60, "SDR")
|
|
78
|
+
12000
|
|
79
|
+
>>> get_recommended_bitrate((1920, 1080), 30, "SDR")
|
|
80
|
+
8000
|
|
81
|
+
|
|
82
|
+
>>> # Invalid examples
|
|
83
|
+
>>> get_recommended_bitrate((1920, 1080), 60, "Ratio")
|
|
84
|
+
Traceback (most recent call last):
|
|
85
|
+
...
|
|
86
|
+
AssertionError: Invalid upload type: 'Ratio'
|
|
87
|
+
>>> get_recommended_bitrate("1920x1080", 60, "SDR")
|
|
88
|
+
Traceback (most recent call last):
|
|
89
|
+
...
|
|
90
|
+
AssertionError: Invalid resolution: 1920x1080, must be a tuple of two integers
|
|
91
|
+
>>> get_recommended_bitrate((1920, 1080), -10, "SDR")
|
|
92
|
+
Traceback (most recent call last):
|
|
93
|
+
...
|
|
94
|
+
AssertionError: Invalid frame rate: -10, must be a positive integer
|
|
95
|
+
"""
|
|
96
|
+
# Assertions
|
|
97
|
+
assert isinstance(resolution, tuple) and len(resolution) == 2, \
|
|
98
|
+
f"Invalid resolution: {resolution}, must be a tuple of two integers"
|
|
99
|
+
assert isinstance(frame_rate, int) and frame_rate > 0, \
|
|
100
|
+
f"Invalid frame rate: {frame_rate}, must be a positive integer"
|
|
101
|
+
assert upload_type in YOUTUBE_BITRATE_RECOMMENDATIONS, \
|
|
102
|
+
f"Invalid upload type: '{upload_type}'"
|
|
103
|
+
|
|
104
|
+
# Determine frame rate category
|
|
105
|
+
frame_rate_category: str = "high" if frame_rate >= 48 else "standard"
|
|
106
|
+
|
|
107
|
+
# Get the appropriate bitrate dictionary
|
|
108
|
+
resolution_bitrates: dict[int, int] = YOUTUBE_BITRATE_RECOMMENDATIONS[upload_type][frame_rate_category]
|
|
109
|
+
|
|
110
|
+
# Find the appropriate bitrate based on resolution
|
|
111
|
+
max_dimension: int = min(*resolution)
|
|
112
|
+
for min_resolution, bitrate in sorted(resolution_bitrates.items(), reverse=True):
|
|
113
|
+
if max_dimension >= min_resolution:
|
|
114
|
+
return bitrate
|
|
115
|
+
|
|
116
|
+
# Fallback (should never reach here due to the '0' key in dictionaries)
|
|
117
|
+
return 1000
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def check_ffmpeg_executable() -> None:
|
|
121
|
+
if not Config.ffmpeg_executable_checked:
|
|
122
|
+
check_executable(Config.FFMPEG_EXECUTABLE, Config.FFMPEG_CHECK_HELP_TEXT, FFMPEG_RELEASES, append_to_path="bin")
|
|
123
|
+
Config.ffmpeg_executable_checked = True
|
|
124
|
+
|
|
125
|
+
# Routine to handle a video file
|
|
126
|
+
def upscale_video(video_file: str, input_folder: str, progress_folder: str, output_folder: str) -> None:
|
|
127
|
+
""" Handles a video file. """
|
|
128
|
+
# Prepare paths
|
|
129
|
+
input_path: str = f"{input_folder}/{video_file}"
|
|
130
|
+
progress_path: str = f"{progress_folder}/{video_file}"
|
|
131
|
+
p_extracted_path: str = f"{progress_path}/extracted"
|
|
132
|
+
p_upscaled_path: str = f"{progress_path}/upscaled"
|
|
133
|
+
output_path: str = f"{output_folder}/{video_file}"
|
|
134
|
+
os.makedirs(p_extracted_path, exist_ok = True)
|
|
135
|
+
os.makedirs(p_upscaled_path, exist_ok = True)
|
|
136
|
+
|
|
137
|
+
# Check if executable is installed
|
|
138
|
+
check_ffmpeg_executable()
|
|
139
|
+
|
|
140
|
+
## Step 1: Check if the video file is already upscaled or partially processed, if not, extract frames
|
|
141
|
+
# If the video file is already upscaled, skip it
|
|
142
|
+
if os.path.exists(output_path):
|
|
143
|
+
warning(f"'{video_file}' has already been processed, remove it from the output folder to reprocess it.")
|
|
144
|
+
return
|
|
145
|
+
|
|
146
|
+
# If the video is already in the list of videos that have been partially processed, ask to restart or skip
|
|
147
|
+
is_partially_processed: bool = len(os.listdir(p_extracted_path)) > 0 and len(os.listdir(p_upscaled_path)) > 0
|
|
148
|
+
if is_partially_processed:
|
|
149
|
+
info(f"'{video_file}' has already been partially processed, do you want to resume the process? (Y/n)")
|
|
150
|
+
if input().lower() == "n":
|
|
151
|
+
shutil.rmtree(p_upscaled_path, ignore_errors = True)
|
|
152
|
+
os.makedirs(p_upscaled_path, exist_ok = True)
|
|
153
|
+
is_partially_processed = False
|
|
154
|
+
|
|
155
|
+
# If the video is not partially processed, extract frames
|
|
156
|
+
if not is_partially_processed:
|
|
157
|
+
debug(f"Extracting frames from '{video_file}'...")
|
|
158
|
+
|
|
159
|
+
# Extract frames using ffmpeg
|
|
160
|
+
command: list[str] = [Config.FFMPEG_EXECUTABLE, "-i", input_path, f"{p_extracted_path}/%09d.png"]
|
|
161
|
+
subprocess.run(command, capture_output = True)
|
|
162
|
+
|
|
163
|
+
# Convert all frames to JPG format
|
|
164
|
+
frames_to_convert: list[str] = get_all_files(p_extracted_path, ".png")
|
|
165
|
+
if frames_to_convert:
|
|
166
|
+
multithreading(convert_frame, frames_to_convert, desc="Converting frames to JPG format")
|
|
167
|
+
|
|
168
|
+
|
|
169
|
+
## Step 2: Upscale the frames
|
|
170
|
+
# Get all the frames in the progress folder
|
|
171
|
+
all_frames: list[str] = get_all_files(p_extracted_path, ".jpg")
|
|
172
|
+
upscaled_frames: list[str] = get_all_files(p_upscaled_path, ".jpg")
|
|
173
|
+
|
|
174
|
+
# If there are frames to upscale,
|
|
175
|
+
if len(all_frames) > len(upscaled_frames):
|
|
176
|
+
|
|
177
|
+
# Try to get upscaling ratio if any
|
|
178
|
+
upscale_ratio: int = 2
|
|
179
|
+
if upscaled_frames:
|
|
180
|
+
with Image.open(upscaled_frames[0]) as img:
|
|
181
|
+
upscaled_size: tuple[int, int] = img.size
|
|
182
|
+
with Image.open(all_frames[0]) as img:
|
|
183
|
+
extracted_size: tuple[int, int] = img.size
|
|
184
|
+
upscale_ratio = upscaled_size[0] // extracted_size[0]
|
|
185
|
+
info(f"Detected upscaling ratio: {upscale_ratio}")
|
|
186
|
+
else:
|
|
187
|
+
if "--upscale" in sys.argv:
|
|
188
|
+
upscale_index: int = sys.argv.index("--upscale")
|
|
189
|
+
if upscale_index + 1 < len(sys.argv):
|
|
190
|
+
upscale_ratio = int(sys.argv[upscale_index + 1])
|
|
191
|
+
else:
|
|
192
|
+
error(
|
|
193
|
+
"No upscaling ratio provided with --upscale flag. "
|
|
194
|
+
"Please provide a ratio after the flag. (1/2/4/8/16/32)",
|
|
195
|
+
exit=True
|
|
196
|
+
)
|
|
197
|
+
else:
|
|
198
|
+
info("No upscaling ratio provided, please enter one (1/2/4/8/16/32, default=2):")
|
|
199
|
+
upscale_ratio = int(input() or "2")
|
|
200
|
+
|
|
201
|
+
# For each frame that hasn't been upscaled yet, upscale it
|
|
202
|
+
upscale_folder(p_extracted_path, p_upscaled_path, upscale_ratio, slightly_faster_mode=Config.SLIGHTLY_FASTER_MODE)
|
|
203
|
+
|
|
204
|
+
## Step 3: Convert the upscaled frames to a video
|
|
205
|
+
# Get the video bitrate
|
|
206
|
+
if Config.VIDEO_FINAL_BITRATE == -1:
|
|
207
|
+
upscaled_frame: str = get_all_files(p_upscaled_path, ".jpg")[0]
|
|
208
|
+
with Image.open(upscaled_frame) as img:
|
|
209
|
+
upscaled_size: tuple[int, int] = img.size
|
|
210
|
+
video_bitrate: int = get_recommended_bitrate(upscaled_size)
|
|
211
|
+
else:
|
|
212
|
+
video_bitrate: int = Config.VIDEO_FINAL_BITRATE
|
|
213
|
+
|
|
214
|
+
# Get the framerate of the original video
|
|
215
|
+
original_framerate: str = "60"
|
|
216
|
+
ffprobe_command: list[str] = [
|
|
217
|
+
Config.FFPROBE_EXECUTABLE, # Path to the ffprobe executable
|
|
218
|
+
"-v", "error", # Set verbosity level to error (only show errors)
|
|
219
|
+
"-select_streams", "v:0", # Select the first video stream
|
|
220
|
+
"-show_entries", "stream=r_frame_rate", # Show only the frame rate information
|
|
221
|
+
"-of", "default=noprint_wrappers=1:nokey=1", # Format output without wrappers and keys
|
|
222
|
+
input_path, # Path to the input video file
|
|
223
|
+
]
|
|
224
|
+
try:
|
|
225
|
+
result = subprocess.run(ffprobe_command, capture_output=True, text=True, check=True)
|
|
226
|
+
framerate: str = result.stdout.strip()
|
|
227
|
+
if not framerate or '/' not in framerate: # Basic validation
|
|
228
|
+
warning(f"Could not reliably determine framerate for '{video_file}'. Falling back to 60.")
|
|
229
|
+
original_framerate = "60"
|
|
230
|
+
else:
|
|
231
|
+
debug(f"Detected original framerate: {framerate}")
|
|
232
|
+
original_framerate = framerate
|
|
233
|
+
except (subprocess.CalledProcessError, FileNotFoundError) as e:
|
|
234
|
+
warning(f"Failed to get framerate using ffprobe for '{video_file}': {e}. Falling back to 60.")
|
|
235
|
+
|
|
236
|
+
|
|
237
|
+
# Prepare the command to convert the upscaled frames to a video
|
|
238
|
+
subprocess.run([
|
|
239
|
+
Config.FFMPEG_EXECUTABLE,
|
|
240
|
+
"-framerate", original_framerate, # Use the original video's framerate for input frames
|
|
241
|
+
"-i", f"{p_upscaled_path}/%09d.jpg", # Use p_upscaled_path, not upscaled_path
|
|
242
|
+
"-i", input_path, # Input video for sound and metadata
|
|
243
|
+
"-b:v", f"{video_bitrate}k", # Set the video bitrate (in kbps)
|
|
244
|
+
*Config.FFMPEG_ARGS, # Additional arguments from the config
|
|
245
|
+
"-r", original_framerate, # Set the *output* video framerate
|
|
246
|
+
output_path, # Output video
|
|
247
|
+
])
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
def video_upscaler_cli(input_folder: str, progress_folder: str, output_folder: str) -> None:
|
|
251
|
+
""" Upscales videos from an input folder and saves them to an output folder.
|
|
252
|
+
|
|
253
|
+
Uses intermediate folders for extracted and upscaled frames within the progress folder.
|
|
254
|
+
**Handles resuming partially processed videos.**
|
|
255
|
+
|
|
256
|
+
Args:
|
|
257
|
+
input_folder (str): Path to the folder containing input videos.
|
|
258
|
+
progress_folder (str): Path to the folder for storing intermediate files (frames).
|
|
259
|
+
output_folder (str): Path to the folder where upscaled videos will be saved.
|
|
260
|
+
"""
|
|
261
|
+
# Clean paths
|
|
262
|
+
input_folder = clean_path(input_folder)
|
|
263
|
+
progress_folder = clean_path(progress_folder)
|
|
264
|
+
output_folder = clean_path(output_folder)
|
|
265
|
+
os.makedirs(input_folder, exist_ok = True)
|
|
266
|
+
os.makedirs(progress_folder, exist_ok = True)
|
|
267
|
+
os.makedirs(output_folder, exist_ok = True)
|
|
268
|
+
|
|
269
|
+
# Ask if we should shutdown the computer after the script is finished
|
|
270
|
+
info("Do you want to shutdown the computer after the script is finished? (y/N)")
|
|
271
|
+
shutdown_after_script: bool = input().lower() == "y"
|
|
272
|
+
|
|
273
|
+
# Collect all video files in the input folder
|
|
274
|
+
videos: list[str] = [file for file in os.listdir(input_folder) if not file.endswith(".md")]
|
|
275
|
+
|
|
276
|
+
# Handle each video file
|
|
277
|
+
for video in colored_for_loop(videos, desc="Upscaling videos"):
|
|
278
|
+
upscale_video(video, input_folder, progress_folder, output_folder)
|
|
279
|
+
|
|
280
|
+
# Shutdown the computer after the script is finished
|
|
281
|
+
if shutdown_after_script:
|
|
282
|
+
info("Shutting down the computer...")
|
|
283
|
+
if os.name == "nt":
|
|
284
|
+
subprocess.run(["shutdown", "/s", "/t", "0", "/f"], capture_output = False)
|
|
285
|
+
else:
|
|
286
|
+
subprocess.run(["shutdown", "now"], capture_output = False)
|
|
287
|
+
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
from ...installer import check_executable as check_executable
|
|
2
|
+
from ...io import clean_path as clean_path
|
|
3
|
+
from ...parallel import multithreading as multithreading
|
|
4
|
+
from ...print import colored_for_loop as colored_for_loop, debug as debug, error as error, info as info, warning as warning
|
|
5
|
+
from .config import Config as Config, FFMPEG_RELEASES as FFMPEG_RELEASES, YOUTUBE_BITRATE_RECOMMENDATIONS as YOUTUBE_BITRATE_RECOMMENDATIONS
|
|
6
|
+
from .image import convert_frame as convert_frame, get_all_files as get_all_files, upscale_folder as upscale_folder
|
|
7
|
+
from typing import Literal
|
|
8
|
+
|
|
9
|
+
def get_recommended_bitrate(resolution: tuple[int, int], frame_rate: int = 60, upload_type: Literal['SDR', 'HDR'] = 'SDR') -> int:
|
|
10
|
+
''' Get the recommended bitrate (in kbps) for the output video based on the video resolution.
|
|
11
|
+
|
|
12
|
+
\tArgs:
|
|
13
|
+
\t\tresolution (tuple[int, int]): Video resolution (width, height).
|
|
14
|
+
\t\tframe_rate (int): Frame rate of the video, default is 60.
|
|
15
|
+
\t\tupload_type (Literal["SDR","HDR"]): Upload type from which the recommendation is made, default is "SDR".
|
|
16
|
+
|
|
17
|
+
\tReturns:
|
|
18
|
+
\t\tint: The recommended bitrate for the output video (in kbps)
|
|
19
|
+
|
|
20
|
+
\tSource: https://support.google.com/youtube/answer/1722171?hl=en#zippy=%2Cbitrate
|
|
21
|
+
|
|
22
|
+
\tExamples:
|
|
23
|
+
\t\t>>> # Valid examples
|
|
24
|
+
\t\t>>> get_recommended_bitrate((3840, 2160), 60, "SDR")
|
|
25
|
+
\t\t68000
|
|
26
|
+
\t\t>>> get_recommended_bitrate((1920, 1080), 60, "HDR")
|
|
27
|
+
\t\t15000
|
|
28
|
+
\t\t>>> get_recommended_bitrate((1920, 1080), 60, "SDR")
|
|
29
|
+
\t\t12000
|
|
30
|
+
\t\t>>> get_recommended_bitrate((1920, 1080), 30, "SDR")
|
|
31
|
+
\t\t8000
|
|
32
|
+
|
|
33
|
+
\t\t>>> # Invalid examples
|
|
34
|
+
\t\t>>> get_recommended_bitrate((1920, 1080), 60, "Ratio")
|
|
35
|
+
\t\tTraceback (most recent call last):
|
|
36
|
+
\t\t\t...
|
|
37
|
+
\t\tAssertionError: Invalid upload type: \'Ratio\'
|
|
38
|
+
\t\t>>> get_recommended_bitrate("1920x1080", 60, "SDR")
|
|
39
|
+
\t\tTraceback (most recent call last):
|
|
40
|
+
\t\t\t...
|
|
41
|
+
\t\tAssertionError: Invalid resolution: 1920x1080, must be a tuple of two integers
|
|
42
|
+
\t\t>>> get_recommended_bitrate((1920, 1080), -10, "SDR")
|
|
43
|
+
\t\tTraceback (most recent call last):
|
|
44
|
+
\t\t\t...
|
|
45
|
+
\t\tAssertionError: Invalid frame rate: -10, must be a positive integer
|
|
46
|
+
\t'''
|
|
47
|
+
def check_ffmpeg_executable() -> None: ...
|
|
48
|
+
def upscale_video(video_file: str, input_folder: str, progress_folder: str, output_folder: str) -> None:
|
|
49
|
+
""" Handles a video file. """
|
|
50
|
+
def video_upscaler_cli(input_folder: str, progress_folder: str, output_folder: str) -> None:
|
|
51
|
+
""" Upscales videos from an input folder and saves them to an output folder.
|
|
52
|
+
|
|
53
|
+
\tUses intermediate folders for extracted and upscaled frames within the progress folder.
|
|
54
|
+
\t**Handles resuming partially processed videos.**
|
|
55
|
+
|
|
56
|
+
\tArgs:
|
|
57
|
+
\t\tinput_folder (str): Path to the folder containing input videos.
|
|
58
|
+
\t\tprogress_folder (str): Path to the folder for storing intermediate files (frames).
|
|
59
|
+
\t\toutput_folder (str): Path to the folder where upscaled videos will be saved.
|
|
60
|
+
\t"""
|
stouputils/archive.py
ADDED
|
@@ -0,0 +1,344 @@
|
|
|
1
|
+
"""
|
|
2
|
+
This module provides functions for creating and managing archives.
|
|
3
|
+
|
|
4
|
+
- repair_zip_file: Try to repair a corrupted zip file by ignoring some of the errors
|
|
5
|
+
- make_archive: Create a zip archive from a source directory with consistent file timestamps.
|
|
6
|
+
- archive_cli: Main entry point for command line usage
|
|
7
|
+
|
|
8
|
+
.. image:: https://raw.githubusercontent.com/Stoupy51/stouputils/refs/heads/main/assets/archive_module.gif
|
|
9
|
+
:alt: stouputils archive examples
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
# pyright: reportUnusedVariable=false
|
|
13
|
+
# Imports
|
|
14
|
+
import fnmatch
|
|
15
|
+
import os
|
|
16
|
+
from zipfile import ZIP_DEFLATED, ZipFile, ZipInfo
|
|
17
|
+
|
|
18
|
+
from .decorators import LogLevels, handle_error
|
|
19
|
+
from .io import clean_path, super_copy
|
|
20
|
+
from .print import CYAN, GREEN, RESET, debug, error, info
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
# Function that repair a corrupted zip file (ignoring some of the errors)
|
|
24
|
+
@handle_error()
|
|
25
|
+
def repair_zip_file(file_path: str, destination: str) -> bool:
|
|
26
|
+
""" Try to repair a corrupted zip file by ignoring some of the errors
|
|
27
|
+
|
|
28
|
+
This function manually parses the ZIP file structure to extract files
|
|
29
|
+
even when the ZIP file is corrupted. It reads the central directory
|
|
30
|
+
entries and attempts to decompress each file individually.
|
|
31
|
+
|
|
32
|
+
Args:
|
|
33
|
+
file_path (str): Path of the zip file to repair
|
|
34
|
+
destination (str): Destination of the new file
|
|
35
|
+
Returns:
|
|
36
|
+
bool: Always returns True unless any strong error
|
|
37
|
+
|
|
38
|
+
Examples:
|
|
39
|
+
|
|
40
|
+
.. code-block:: python
|
|
41
|
+
|
|
42
|
+
> repair_zip_file("/path/to/source.zip", "/path/to/destination.zip")
|
|
43
|
+
"""
|
|
44
|
+
# Check
|
|
45
|
+
if not os.path.exists(file_path):
|
|
46
|
+
raise FileNotFoundError(f"File '{file_path}' not found")
|
|
47
|
+
dirname: str = os.path.dirname(destination)
|
|
48
|
+
if dirname and not os.path.exists(dirname):
|
|
49
|
+
raise FileNotFoundError(f"Directory '{dirname}' not found")
|
|
50
|
+
|
|
51
|
+
import struct
|
|
52
|
+
import zlib
|
|
53
|
+
|
|
54
|
+
# Read the entire ZIP file into memory
|
|
55
|
+
with open(file_path, 'rb') as f:
|
|
56
|
+
data = f.read()
|
|
57
|
+
|
|
58
|
+
# Find central directory entries
|
|
59
|
+
CENTRAL_SIG = b'PK\x01\x02'
|
|
60
|
+
entries: list[dict[str, int | str]] = []
|
|
61
|
+
idx = 0
|
|
62
|
+
|
|
63
|
+
while True:
|
|
64
|
+
idx = data.find(CENTRAL_SIG, idx)
|
|
65
|
+
if idx == -1:
|
|
66
|
+
break
|
|
67
|
+
# Ensure enough length for central directory header
|
|
68
|
+
if idx + 46 > len(data):
|
|
69
|
+
break
|
|
70
|
+
|
|
71
|
+
header = data[idx:idx+46]
|
|
72
|
+
try:
|
|
73
|
+
(
|
|
74
|
+
sig,
|
|
75
|
+
ver_made, ver_needed, flags, comp_method, mtime, mdate,
|
|
76
|
+
crc, csize, usize,
|
|
77
|
+
name_len, extra_len, comm_len,
|
|
78
|
+
disk_start, int_attr,
|
|
79
|
+
ext_attr, local_off
|
|
80
|
+
) = struct.unpack('<4s6H3L3H2H2L', header)
|
|
81
|
+
|
|
82
|
+
name_start = idx + 46
|
|
83
|
+
if name_start + name_len > len(data):
|
|
84
|
+
idx += 4
|
|
85
|
+
continue
|
|
86
|
+
|
|
87
|
+
name = data[name_start:name_start+name_len].decode('utf-8', errors='replace')
|
|
88
|
+
entries.append({
|
|
89
|
+
'name': name,
|
|
90
|
+
'comp_method': comp_method,
|
|
91
|
+
'csize': csize,
|
|
92
|
+
'usize': usize,
|
|
93
|
+
'local_offset': local_off,
|
|
94
|
+
'crc': crc
|
|
95
|
+
})
|
|
96
|
+
except (struct.error, UnicodeDecodeError):
|
|
97
|
+
# Skip corrupted entries
|
|
98
|
+
pass
|
|
99
|
+
|
|
100
|
+
idx += 4
|
|
101
|
+
|
|
102
|
+
# Create a new ZIP file with recovered entries
|
|
103
|
+
with ZipFile(destination, "w", compression=ZIP_DEFLATED) as new_zip_file:
|
|
104
|
+
for entry in entries:
|
|
105
|
+
try:
|
|
106
|
+
# Get the local header to find data start
|
|
107
|
+
lo: int = int(entry['local_offset'])
|
|
108
|
+
if lo + 30 > len(data):
|
|
109
|
+
continue
|
|
110
|
+
|
|
111
|
+
lh = data[lo:lo+30]
|
|
112
|
+
try:
|
|
113
|
+
_, _, _, _, _, _, _, _, _, name_len, extra_len = struct.unpack('<4sHHHHHLLLHH', lh)
|
|
114
|
+
except struct.error:
|
|
115
|
+
continue
|
|
116
|
+
|
|
117
|
+
data_start: int = lo + 30 + name_len + extra_len
|
|
118
|
+
if data_start + int(entry['csize']) > len(data):
|
|
119
|
+
continue
|
|
120
|
+
|
|
121
|
+
comp_data = data[data_start:data_start+int(entry['csize'])]
|
|
122
|
+
|
|
123
|
+
# Decompress the data
|
|
124
|
+
try:
|
|
125
|
+
if int(entry['comp_method']) == 0: # No compression
|
|
126
|
+
content = comp_data[:int(entry['usize'])]
|
|
127
|
+
elif int(entry['comp_method']) == 8: # Deflate compression
|
|
128
|
+
content = zlib.decompress(comp_data, -zlib.MAX_WBITS)
|
|
129
|
+
else:
|
|
130
|
+
# Unsupported compression method, skip
|
|
131
|
+
continue
|
|
132
|
+
|
|
133
|
+
# Write to new ZIP file
|
|
134
|
+
new_zip_file.writestr(str(entry['name']), content)
|
|
135
|
+
|
|
136
|
+
except (zlib.error, Exception):
|
|
137
|
+
# If decompression fails, try to write raw data as a fallback
|
|
138
|
+
try:
|
|
139
|
+
new_zip_file.writestr(f"{entry['name']!s}.corrupted", comp_data)
|
|
140
|
+
except Exception:
|
|
141
|
+
# Skip completely corrupted entries
|
|
142
|
+
continue
|
|
143
|
+
|
|
144
|
+
except Exception:
|
|
145
|
+
# Skip any entries that cause errors
|
|
146
|
+
continue
|
|
147
|
+
|
|
148
|
+
return True
|
|
149
|
+
|
|
150
|
+
# Function that makes an archive with consistency (same zip file each time)
|
|
151
|
+
@handle_error()
|
|
152
|
+
def make_archive(
|
|
153
|
+
source: str,
|
|
154
|
+
destinations: list[str] | str | None = None,
|
|
155
|
+
override_time: None | tuple[int, int, int, int, int, int] = None,
|
|
156
|
+
create_dir: bool = False,
|
|
157
|
+
ignore_patterns: str | None = None,
|
|
158
|
+
) -> bool:
|
|
159
|
+
""" Create a zip archive from a source directory with consistent file timestamps.
|
|
160
|
+
(Meaning deterministic zip file each time)
|
|
161
|
+
|
|
162
|
+
Creates a zip archive from the source directory and copies it to one or more destinations.
|
|
163
|
+
The archive will have consistent file timestamps across runs if override_time is specified.
|
|
164
|
+
Uses maximum compression level (9) with ZIP_DEFLATED algorithm.
|
|
165
|
+
|
|
166
|
+
Args:
|
|
167
|
+
source (str): The source folder to archive
|
|
168
|
+
destinations (list[str]|str): The destination folder(s) or file(s) to copy the archive to
|
|
169
|
+
override_time (None | tuple[int, ...]): The constant time to use for the archive
|
|
170
|
+
(e.g. (2024, 1, 1, 0, 0, 0) for 2024-01-01 00:00:00)
|
|
171
|
+
create_dir (bool): Whether to create the destination directory if it doesn't exist
|
|
172
|
+
ignore_patterns (str | None): Glob pattern(s) to ignore files. Can be a single pattern or comma-separated patterns (e.g. "*.pyc" or "*.pyc,__pycache__,*.log")
|
|
173
|
+
Returns:
|
|
174
|
+
bool: Always returns True unless any strong error
|
|
175
|
+
Examples:
|
|
176
|
+
|
|
177
|
+
.. code-block:: python
|
|
178
|
+
|
|
179
|
+
> make_archive("/path/to/source", "/path/to/destination.zip")
|
|
180
|
+
> make_archive("/path/to/source", ["/path/to/destination.zip", "/path/to/destination2.zip"])
|
|
181
|
+
> make_archive("src", "hello_from_year_2085.zip", override_time=(2085,1,1,0,0,0))
|
|
182
|
+
> make_archive("src", "output.zip", ignore_patterns="*.pyc")
|
|
183
|
+
> make_archive("src", "output.zip", ignore_patterns="__pycache__")
|
|
184
|
+
> make_archive("src", "output.zip", ignore_patterns="*.pyc,__pycache__,*.log")
|
|
185
|
+
"""
|
|
186
|
+
# Fix copy_destinations type if needed
|
|
187
|
+
if destinations is None:
|
|
188
|
+
destinations = []
|
|
189
|
+
if destinations and isinstance(destinations, str):
|
|
190
|
+
destinations = [destinations]
|
|
191
|
+
if not destinations:
|
|
192
|
+
raise ValueError("destinations must be a list of at least one destination")
|
|
193
|
+
|
|
194
|
+
# Create directories if needed
|
|
195
|
+
if create_dir:
|
|
196
|
+
for dest_file in destinations:
|
|
197
|
+
dest_file = clean_path(dest_file)
|
|
198
|
+
parent_dir = os.path.dirname(dest_file)
|
|
199
|
+
if parent_dir and not os.path.exists(parent_dir):
|
|
200
|
+
os.makedirs(parent_dir, exist_ok=True)
|
|
201
|
+
|
|
202
|
+
# Create the archive
|
|
203
|
+
destination: str = clean_path(destinations[0])
|
|
204
|
+
destination = destination if ".zip" in destination else destination + ".zip"
|
|
205
|
+
|
|
206
|
+
# Parse ignore patterns (can be a single pattern or comma-separated patterns)
|
|
207
|
+
ignore_pattern_list: list[str] = []
|
|
208
|
+
if ignore_patterns:
|
|
209
|
+
ignore_pattern_list = [pattern.strip() for pattern in ignore_patterns.split(',')]
|
|
210
|
+
|
|
211
|
+
def should_ignore(path: str) -> bool:
|
|
212
|
+
"""Check if a file or directory path should be ignored based on patterns."""
|
|
213
|
+
if not ignore_pattern_list:
|
|
214
|
+
return False
|
|
215
|
+
for pattern in ignore_pattern_list:
|
|
216
|
+
if fnmatch.fnmatch(os.path.basename(path), pattern) or fnmatch.fnmatch(path, pattern):
|
|
217
|
+
return True
|
|
218
|
+
return False
|
|
219
|
+
|
|
220
|
+
with ZipFile(destination, "w", compression=ZIP_DEFLATED, compresslevel=9) as zip:
|
|
221
|
+
for root, dirs, files in os.walk(source):
|
|
222
|
+
# Filter out ignored directories in-place to prevent walking into them
|
|
223
|
+
dirs[:] = [d for d in dirs if not should_ignore(d)]
|
|
224
|
+
|
|
225
|
+
for file in files:
|
|
226
|
+
file_path: str = clean_path(os.path.join(root, file))
|
|
227
|
+
rel_path = os.path.relpath(file_path, source)
|
|
228
|
+
|
|
229
|
+
# Skip files that match any ignore pattern
|
|
230
|
+
if should_ignore(file) or should_ignore(rel_path):
|
|
231
|
+
continue
|
|
232
|
+
|
|
233
|
+
info: ZipInfo = ZipInfo(rel_path)
|
|
234
|
+
info.compress_type = ZIP_DEFLATED
|
|
235
|
+
if override_time:
|
|
236
|
+
info.date_time = override_time
|
|
237
|
+
with open(file_path, "rb") as f:
|
|
238
|
+
zip.writestr(info, f.read())
|
|
239
|
+
|
|
240
|
+
# Copy the archive to the destination(s)
|
|
241
|
+
for dest_file in destinations[1:]:
|
|
242
|
+
@handle_error(Exception, message=f"Unable to copy '{destination}' to '{dest_file}'", error_log=LogLevels.WARNING)
|
|
243
|
+
def internal(src: str, dest: str) -> None:
|
|
244
|
+
super_copy(src, dest, create_dir=create_dir)
|
|
245
|
+
internal(destination, clean_path(dest_file))
|
|
246
|
+
|
|
247
|
+
return True
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
# Main entry point for command line usage
|
|
251
|
+
def archive_cli() -> None:
|
|
252
|
+
""" Main entry point for command line usage.
|
|
253
|
+
|
|
254
|
+
Examples:
|
|
255
|
+
|
|
256
|
+
.. code-block:: bash
|
|
257
|
+
|
|
258
|
+
# Repair a corrupted zip file
|
|
259
|
+
python -m stouputils.archive repair /path/to/corrupted.zip /path/to/repaired.zip
|
|
260
|
+
|
|
261
|
+
# Create a zip archive
|
|
262
|
+
python -m stouputils.archive make /path/to/source /path/to/destination.zip
|
|
263
|
+
|
|
264
|
+
# Create a zip archive with ignore patterns
|
|
265
|
+
python -m stouputils.archive make /path/to/source /path/to/destination.zip --ignore "*.pyc,__pycache__"
|
|
266
|
+
"""
|
|
267
|
+
import argparse
|
|
268
|
+
import sys
|
|
269
|
+
|
|
270
|
+
# Check for help or no command
|
|
271
|
+
if len(sys.argv) == 1 or (len(sys.argv) == 2 and sys.argv[1] in ("--help", "-h", "help")):
|
|
272
|
+
separator: str = "─" * 60
|
|
273
|
+
print(f"{CYAN}{separator}{RESET}")
|
|
274
|
+
print(f"{CYAN}stouputils {GREEN}archive {CYAN}utilities{RESET}")
|
|
275
|
+
print(f"{CYAN}{separator}{RESET}")
|
|
276
|
+
print(f"\n{CYAN}Usage:{RESET} stouputils archive <command> [options]")
|
|
277
|
+
print(f"\n{CYAN}Available commands:{RESET}")
|
|
278
|
+
print(f" {GREEN}make{RESET} <source> <destination> [--ignore PATTERNS] [--create-dir]")
|
|
279
|
+
print(" Create a zip archive from source directory")
|
|
280
|
+
print(f" {CYAN}--ignore{RESET} Glob patterns to ignore (comma-separated)")
|
|
281
|
+
print(f" {CYAN}--create-dir{RESET} Create destination directory if needed")
|
|
282
|
+
print(f"\n {GREEN}repair{RESET} <input_file> [output_file]")
|
|
283
|
+
print(" Repair a corrupted zip file")
|
|
284
|
+
print(" If output_file is omitted, adds '_repaired' suffix")
|
|
285
|
+
print(f"{CYAN}{separator}{RESET}")
|
|
286
|
+
return
|
|
287
|
+
|
|
288
|
+
parser = argparse.ArgumentParser(description="Archive utilities")
|
|
289
|
+
subparsers = parser.add_subparsers(dest="command", help="Available commands")
|
|
290
|
+
|
|
291
|
+
# Repair command
|
|
292
|
+
repair_parser = subparsers.add_parser("repair", help="Repair a corrupted zip file")
|
|
293
|
+
repair_parser.add_argument("input_file", help="Path to the corrupted zip file")
|
|
294
|
+
repair_parser.add_argument("output_file", nargs="?", help="Path to the repaired zip file (optional, defaults to input_file with '_repaired' suffix)")
|
|
295
|
+
|
|
296
|
+
# Make archive command
|
|
297
|
+
archive_parser = subparsers.add_parser("make", help="Create a zip archive")
|
|
298
|
+
archive_parser.add_argument("source", help="Source directory to archive")
|
|
299
|
+
archive_parser.add_argument("destination", help="Destination zip file")
|
|
300
|
+
archive_parser.add_argument("--ignore", help="Glob patterns to ignore (comma-separated)")
|
|
301
|
+
archive_parser.add_argument("--create-dir", action="store_true", help="Create destination directory if it doesn't exist")
|
|
302
|
+
|
|
303
|
+
args = parser.parse_args()
|
|
304
|
+
|
|
305
|
+
if args.command == "repair":
|
|
306
|
+
input_file = args.input_file
|
|
307
|
+
if args.output_file:
|
|
308
|
+
output_file = args.output_file
|
|
309
|
+
else:
|
|
310
|
+
# Generate default output filename
|
|
311
|
+
base, ext = os.path.splitext(input_file)
|
|
312
|
+
output_file = f"{base}_repaired{ext}"
|
|
313
|
+
|
|
314
|
+
debug(f"Repairing '{input_file}' to '{output_file}'...")
|
|
315
|
+
try:
|
|
316
|
+
repair_zip_file(input_file, output_file)
|
|
317
|
+
info(f"Successfully repaired zip file: {output_file}")
|
|
318
|
+
except Exception as e:
|
|
319
|
+
error(f"Error repairing zip file: {e}", exit=False)
|
|
320
|
+
sys.exit(1)
|
|
321
|
+
|
|
322
|
+
elif args.command == "make":
|
|
323
|
+
debug(f"Creating archive from '{args.source}' to '{args.destination}'...")
|
|
324
|
+
try:
|
|
325
|
+
make_archive(
|
|
326
|
+
source=args.source,
|
|
327
|
+
destinations=args.destination,
|
|
328
|
+
create_dir=args.create_dir,
|
|
329
|
+
ignore_patterns=args.ignore
|
|
330
|
+
)
|
|
331
|
+
info(f"Successfully created archive: {args.destination}")
|
|
332
|
+
except Exception as e:
|
|
333
|
+
error(f"Error creating archive: {e}", exit=False)
|
|
334
|
+
sys.exit(1)
|
|
335
|
+
|
|
336
|
+
else:
|
|
337
|
+
parser.print_help()
|
|
338
|
+
sys.exit(1)
|
|
339
|
+
|
|
340
|
+
|
|
341
|
+
if __name__ == "__main__":
|
|
342
|
+
archive_cli()
|
|
343
|
+
|
|
344
|
+
|