TonieToolbox 0.5.1__py3-none-any.whl → 0.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- TonieToolbox/__init__.py +2 -1
- TonieToolbox/__main__.py +240 -98
- TonieToolbox/artwork.py +59 -10
- TonieToolbox/audio_conversion.py +33 -29
- TonieToolbox/constants.py +133 -10
- TonieToolbox/dependency_manager.py +679 -184
- TonieToolbox/filename_generator.py +57 -10
- TonieToolbox/integration.py +73 -0
- TonieToolbox/integration_macos.py +613 -0
- TonieToolbox/integration_ubuntu.py +2 -0
- TonieToolbox/integration_windows.py +445 -0
- TonieToolbox/logger.py +9 -10
- TonieToolbox/media_tags.py +19 -100
- TonieToolbox/ogg_page.py +41 -41
- TonieToolbox/opus_packet.py +15 -15
- TonieToolbox/recursive_processor.py +24 -23
- TonieToolbox/tags.py +4 -5
- TonieToolbox/teddycloud.py +164 -51
- TonieToolbox/tonie_analysis.py +26 -24
- TonieToolbox/tonie_file.py +73 -45
- TonieToolbox/tonies_json.py +71 -67
- TonieToolbox/version_handler.py +14 -20
- {tonietoolbox-0.5.1.dist-info → tonietoolbox-0.6.0.dist-info}/METADATA +129 -92
- tonietoolbox-0.6.0.dist-info/RECORD +30 -0
- {tonietoolbox-0.5.1.dist-info → tonietoolbox-0.6.0.dist-info}/WHEEL +1 -1
- tonietoolbox-0.5.1.dist-info/RECORD +0 -26
- {tonietoolbox-0.5.1.dist-info → tonietoolbox-0.6.0.dist-info}/entry_points.txt +0 -0
- {tonietoolbox-0.5.1.dist-info → tonietoolbox-0.6.0.dist-info}/licenses/LICENSE.md +0 -0
- {tonietoolbox-0.5.1.dist-info → tonietoolbox-0.6.0.dist-info}/top_level.txt +0 -0
@@ -1,3 +1,4 @@
|
|
1
|
+
#!/usr/bin/python3
|
1
2
|
"""
|
2
3
|
Dependency management for the TonieToolbox package.
|
3
4
|
|
@@ -9,15 +10,20 @@ import os
|
|
9
10
|
import sys
|
10
11
|
import platform
|
11
12
|
import subprocess
|
13
|
+
import requests
|
14
|
+
from requests.adapters import HTTPAdapter
|
15
|
+
from urllib3.util.retry import Retry
|
12
16
|
import shutil
|
13
17
|
import zipfile
|
14
18
|
import tarfile
|
15
|
-
import urllib.request
|
16
19
|
import time
|
17
|
-
|
20
|
+
import hashlib
|
21
|
+
import tempfile
|
22
|
+
import concurrent.futures
|
23
|
+
from tqdm.auto import tqdm
|
18
24
|
|
19
25
|
from .logger import get_logger
|
20
|
-
logger = get_logger(
|
26
|
+
logger = get_logger(__name__)
|
21
27
|
|
22
28
|
CACHE_DIR = os.path.join(os.path.expanduser("~"), ".tonietoolbox")
|
23
29
|
LIBS_DIR = os.path.join(CACHE_DIR, "libs")
|
@@ -27,15 +33,21 @@ DEPENDENCIES = {
|
|
27
33
|
'windows': {
|
28
34
|
'url': 'https://github.com/BtbN/FFmpeg-Builds/releases/download/latest/ffmpeg-master-latest-win64-gpl.zip',
|
29
35
|
'bin_path': 'bin/ffmpeg.exe',
|
30
|
-
'extract_dir': 'ffmpeg'
|
36
|
+
'extract_dir': 'ffmpeg',
|
37
|
+
'mirrors': [
|
38
|
+
''
|
39
|
+
]
|
31
40
|
},
|
32
41
|
'linux': {
|
33
42
|
'url': 'https://github.com/BtbN/FFmpeg-Builds/releases/download/latest/ffmpeg-master-latest-linux64-gpl.tar.xz',
|
34
43
|
'bin_path': 'ffmpeg',
|
35
|
-
'extract_dir': 'ffmpeg'
|
44
|
+
'extract_dir': 'ffmpeg',
|
45
|
+
'mirrors': [
|
46
|
+
''
|
47
|
+
]
|
36
48
|
},
|
37
49
|
'darwin': {
|
38
|
-
'url': 'https://evermeet.cx/ffmpeg/
|
50
|
+
'url': 'https://evermeet.cx/ffmpeg/get/zip',
|
39
51
|
'bin_path': 'ffmpeg',
|
40
52
|
'extract_dir': 'ffmpeg'
|
41
53
|
}
|
@@ -44,7 +56,10 @@ DEPENDENCIES = {
|
|
44
56
|
'windows': {
|
45
57
|
'url': 'https://archive.mozilla.org/pub/opus/win32/opus-tools-0.2-opus-1.3.zip',
|
46
58
|
'bin_path': 'opusenc.exe',
|
47
|
-
'extract_dir': 'opusenc'
|
59
|
+
'extract_dir': 'opusenc',
|
60
|
+
'mirrors': [
|
61
|
+
''
|
62
|
+
]
|
48
63
|
},
|
49
64
|
'linux': {
|
50
65
|
'package': 'opus-tools'
|
@@ -67,56 +82,324 @@ def get_system():
|
|
67
82
|
|
68
83
|
def get_user_data_dir():
|
69
84
|
"""Get the user data directory for storing downloaded dependencies."""
|
70
|
-
app_dir =
|
85
|
+
app_dir = CACHE_DIR
|
71
86
|
logger.debug("Using application data directory: %s", app_dir)
|
72
87
|
|
73
88
|
os.makedirs(app_dir, exist_ok=True)
|
74
89
|
return app_dir
|
75
90
|
|
76
|
-
def
|
91
|
+
def create_session():
|
92
|
+
"""
|
93
|
+
Create a requests session with retry capabilities.
|
94
|
+
|
95
|
+
Returns:
|
96
|
+
requests.Session: Configured session with retries
|
97
|
+
"""
|
98
|
+
session = requests.Session()
|
99
|
+
retry_strategy = Retry(
|
100
|
+
total=3,
|
101
|
+
backoff_factor=1,
|
102
|
+
status_forcelist=[429, 500, 502, 503, 504],
|
103
|
+
allowed_methods=["HEAD", "GET", "OPTIONS"]
|
104
|
+
)
|
105
|
+
adapter = HTTPAdapter(max_retries=retry_strategy, pool_connections=10, pool_maxsize=10)
|
106
|
+
session.mount("http://", adapter)
|
107
|
+
session.mount("https://", adapter)
|
108
|
+
return session
|
109
|
+
|
110
|
+
def configure_tqdm():
|
111
|
+
"""
|
112
|
+
Configure tqdm to ensure it displays properly in various environments.
|
113
|
+
"""
|
114
|
+
# Check if we're in a notebook environment or standard terminal
|
115
|
+
is_notebook = 'ipykernel' in sys.modules
|
116
|
+
|
117
|
+
# Set global defaults for tqdm
|
118
|
+
tqdm.monitor_interval = 0 # Prevent monitor thread issues
|
119
|
+
|
120
|
+
# Return common kwargs for consistency
|
121
|
+
return {
|
122
|
+
'file': sys.stdout,
|
123
|
+
'leave': True,
|
124
|
+
'dynamic_ncols': True,
|
125
|
+
'mininterval': 0.5,
|
126
|
+
'smoothing': 0.2,
|
127
|
+
'ncols': 100 if not is_notebook else None,
|
128
|
+
'disable': False
|
129
|
+
}
|
130
|
+
|
131
|
+
def download_file(url, destination, chunk_size=1024*1024, timeout=30, use_tqdm=True):
|
77
132
|
"""
|
78
|
-
Download a file from a URL to the specified destination.
|
133
|
+
Download a file from a URL to the specified destination using optimized methods.
|
79
134
|
|
80
135
|
Args:
|
81
136
|
url (str): The URL of the file to download
|
82
137
|
destination (str): The path to save the file to
|
138
|
+
chunk_size (int): Size of chunks to download (default: 1MB)
|
139
|
+
timeout (int): Connection timeout in seconds (default: 30s)
|
140
|
+
use_tqdm (bool): Whether to display a progress bar (default: True)
|
83
141
|
|
84
142
|
Returns:
|
85
143
|
bool: True if download was successful, False otherwise
|
86
144
|
"""
|
87
145
|
try:
|
88
146
|
logger.info("Downloading %s to %s", url, destination)
|
89
|
-
headers = {'User-Agent': 'TonieToolbox-dependency-downloader/1.
|
90
|
-
req = urllib.request.Request(url, headers=headers)
|
147
|
+
headers = {'User-Agent': 'TonieToolbox-dependency-downloader/1.1'}
|
91
148
|
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
149
|
+
# Create a directory for the destination file if it doesn't exist
|
150
|
+
os.makedirs(os.path.dirname(os.path.abspath(destination)), exist_ok=True)
|
151
|
+
|
152
|
+
# Use a session for connection pooling and retries
|
153
|
+
session = create_session()
|
154
|
+
|
155
|
+
# Start with a HEAD request to get the file size before downloading
|
156
|
+
head_response = session.head(url, headers=headers, timeout=timeout)
|
157
|
+
head_response.raise_for_status()
|
158
|
+
file_size = int(head_response.headers.get('Content-Length', 0))
|
159
|
+
logger.debug("File size: %d bytes", file_size)
|
160
|
+
|
161
|
+
# Now start the download
|
162
|
+
response = session.get(url, headers=headers, stream=True, timeout=timeout)
|
163
|
+
response.raise_for_status() # Raise exception for 4XX/5XX status codes
|
164
|
+
# Set up the progress bar
|
165
|
+
desc = os.path.basename(destination)
|
166
|
+
if len(desc) > 25:
|
167
|
+
desc = desc[:22] + "..."
|
168
|
+
|
169
|
+
with open(destination, 'wb') as out_file:
|
170
|
+
if use_tqdm and file_size > 0:
|
171
|
+
# Force tqdm to output to console
|
172
|
+
pbar = tqdm(
|
173
|
+
total=file_size,
|
174
|
+
unit='B',
|
175
|
+
unit_scale=True,
|
176
|
+
desc=desc,
|
177
|
+
**configure_tqdm()
|
178
|
+
)
|
106
179
|
|
107
|
-
|
108
|
-
|
109
|
-
|
180
|
+
for chunk in response.iter_content(chunk_size=chunk_size):
|
181
|
+
if not chunk:
|
182
|
+
continue
|
183
|
+
out_file.write(chunk)
|
184
|
+
pbar.update(len(chunk))
|
185
|
+
pbar.close()
|
186
|
+
# Print an empty line after progress is done
|
187
|
+
print("")
|
188
|
+
else:
|
189
|
+
# Fallback if no file size or tqdm is disabled
|
190
|
+
downloaded = 0
|
191
|
+
for chunk in response.iter_content(chunk_size=chunk_size):
|
192
|
+
if not chunk:
|
193
|
+
continue
|
194
|
+
downloaded += len(chunk)
|
195
|
+
out_file.write(chunk)
|
196
|
+
if file_size > 0:
|
197
|
+
percent = downloaded * 100 / file_size
|
198
|
+
logger.debug("Download progress: %.1f%%", percent)
|
110
199
|
|
111
200
|
logger.info("Download completed successfully")
|
112
201
|
return True
|
113
|
-
except
|
202
|
+
except requests.exceptions.SSLError as e:
|
203
|
+
logger.error("Failed to download %s: SSL Error: %s", url, e)
|
204
|
+
# On macOS, provide more helpful error message for SSL certificate issues
|
205
|
+
if platform.system() == 'Darwin':
|
206
|
+
logger.error("SSL certificate verification failed on macOS. This is a known issue.")
|
207
|
+
logger.error("You can solve this by running: /Applications/Python 3.x/Install Certificates.command")
|
208
|
+
logger.error("Or by using the --auto-download flag which will bypass certificate verification.")
|
209
|
+
return False
|
210
|
+
except requests.exceptions.RequestException as e:
|
114
211
|
logger.error("Failed to download %s: %s", url, e)
|
115
212
|
return False
|
213
|
+
except Exception as e:
|
214
|
+
logger.error("Unexpected error downloading %s: %s", url, e)
|
215
|
+
return False
|
216
|
+
|
217
|
+
def download_file_multipart(url, destination, num_parts=4, chunk_size=1024*1024, timeout=30):
|
218
|
+
"""
|
219
|
+
Download a file in multiple parts concurrently for better performance.
|
220
|
+
|
221
|
+
Args:
|
222
|
+
url (str): The URL of the file to download
|
223
|
+
destination (str): The path to save the file to
|
224
|
+
num_parts (int): Number of parts to download concurrently
|
225
|
+
chunk_size (int): Size of chunks to download (default: 1MB)
|
226
|
+
timeout (int): Connection timeout in seconds (default: 30s)
|
227
|
+
|
228
|
+
Returns:
|
229
|
+
bool: True if download was successful, False otherwise
|
230
|
+
"""
|
231
|
+
try:
|
232
|
+
logger.info("Starting multi-part download of %s with %d parts", url, num_parts)
|
233
|
+
headers = {'User-Agent': 'TonieToolbox-dependency-downloader/1.1'}
|
234
|
+
|
235
|
+
session = create_session()
|
236
|
+
response = session.head(url, headers=headers, timeout=timeout)
|
237
|
+
response.raise_for_status()
|
238
|
+
|
239
|
+
file_size = int(response.headers.get('Content-Length', 0))
|
240
|
+
if file_size <= 0:
|
241
|
+
logger.warning("Multi-part download requested but Content-Length not available, falling back to regular download")
|
242
|
+
return download_file(url, destination, chunk_size, timeout)
|
243
|
+
|
244
|
+
# If file size is too small for multipart, fallback to regular download
|
245
|
+
if file_size < num_parts * 1024 * 1024 * 5: # Less than 5MB per part
|
246
|
+
logger.debug("File size too small for efficient multi-part download, using regular download")
|
247
|
+
return download_file(url, destination, chunk_size, timeout)
|
248
|
+
|
249
|
+
# Calculate part sizes
|
250
|
+
part_size = file_size // num_parts
|
251
|
+
ranges = [(i * part_size, min((i + 1) * part_size - 1, file_size - 1))
|
252
|
+
for i in range(num_parts)]
|
253
|
+
if ranges[-1][1] < file_size - 1:
|
254
|
+
ranges[-1] = (ranges[-1][0], file_size - 1)
|
255
|
+
|
256
|
+
# Create temporary directory for parts
|
257
|
+
temp_dir = tempfile.mkdtemp(prefix="tonietoolbox_download_")
|
258
|
+
part_files = [os.path.join(temp_dir, f"part_{i}") for i in range(num_parts)]
|
259
|
+
|
260
|
+
# Define the download function for each part
|
261
|
+
def download_part(part_idx):
|
262
|
+
start, end = ranges[part_idx]
|
263
|
+
part_path = part_files[part_idx]
|
264
|
+
|
265
|
+
headers_with_range = headers.copy()
|
266
|
+
headers_with_range['Range'] = f'bytes={start}-{end}'
|
267
|
+
|
268
|
+
part_size = end - start + 1
|
269
|
+
|
270
|
+
try:
|
271
|
+
response = session.get(url, headers=headers_with_range, stream=True, timeout=timeout)
|
272
|
+
response.raise_for_status()
|
273
|
+
# Set up progress bar for this part
|
274
|
+
desc = f"Part {part_idx+1}/{num_parts}"
|
275
|
+
with tqdm(
|
276
|
+
total=part_size,
|
277
|
+
unit='B',
|
278
|
+
unit_scale=True,
|
279
|
+
desc=desc,
|
280
|
+
position=part_idx,
|
281
|
+
**configure_tqdm()
|
282
|
+
) as pbar:
|
283
|
+
with open(part_path, 'wb') as f:
|
284
|
+
for chunk in response.iter_content(chunk_size=chunk_size):
|
285
|
+
if not chunk:
|
286
|
+
continue
|
287
|
+
f.write(chunk)
|
288
|
+
pbar.update(len(chunk))
|
289
|
+
|
290
|
+
return True
|
291
|
+
except Exception as e:
|
292
|
+
logger.error("Error downloading part %d: %s", part_idx, str(e))
|
293
|
+
return False
|
294
|
+
|
295
|
+
# Download all parts in parallel
|
296
|
+
logger.info("Starting concurrent download of %d parts...", num_parts)
|
297
|
+
with concurrent.futures.ThreadPoolExecutor(max_workers=num_parts) as executor:
|
298
|
+
futures = [executor.submit(download_part, i) for i in range(num_parts)]
|
299
|
+
all_successful = all(future.result() for future in concurrent.futures.as_completed(futures))
|
300
|
+
|
301
|
+
if not all_successful:
|
302
|
+
logger.error("One or more parts failed to download")
|
303
|
+
|
304
|
+
# Clean up
|
305
|
+
for part_file in part_files:
|
306
|
+
if os.path.exists(part_file):
|
307
|
+
os.remove(part_file)
|
308
|
+
os.rmdir(temp_dir)
|
309
|
+
|
310
|
+
return False
|
311
|
+
|
312
|
+
# Combine all parts into the final file
|
313
|
+
logger.info("All parts downloaded successfully, combining into final file")
|
314
|
+
with open(destination, 'wb') as outfile:
|
315
|
+
for part_file in part_files:
|
316
|
+
with open(part_file, 'rb') as infile:
|
317
|
+
shutil.copyfileobj(infile, outfile)
|
318
|
+
os.remove(part_file)
|
319
|
+
|
320
|
+
# Clean up temp directory
|
321
|
+
os.rmdir(temp_dir)
|
322
|
+
|
323
|
+
logger.info("Multi-part download completed successfully")
|
324
|
+
return True
|
325
|
+
|
326
|
+
except Exception as e:
|
327
|
+
logger.error("Failed multi-part download: %s", str(e))
|
328
|
+
# Fall back to regular download
|
329
|
+
logger.info("Falling back to regular download method")
|
330
|
+
return download_file(url, destination, chunk_size, timeout)
|
331
|
+
|
332
|
+
def smart_download(url, destination, use_multipart=True, min_size_for_multipart=20*1024*1024, num_parts=4, use_tqdm=True):
|
333
|
+
"""
|
334
|
+
Smart download function that selects the best download method based on file size.
|
335
|
+
|
336
|
+
Args:
|
337
|
+
url (str): The URL of the file to download
|
338
|
+
destination (str): The path to save the file to
|
339
|
+
use_multipart (bool): Whether to allow multi-part downloads (default: True)
|
340
|
+
min_size_for_multipart (int): Minimum file size in bytes to use multi-part download (default: 20MB)
|
341
|
+
num_parts (int): Number of parts for multi-part download (default: 4)
|
342
|
+
use_tqdm (bool): Whether to display progress bars (default: True)
|
343
|
+
|
344
|
+
Returns:
|
345
|
+
bool: True if download was successful, False otherwise
|
346
|
+
"""
|
347
|
+
try:
|
348
|
+
# Check if multipart is enabled and get file size
|
349
|
+
if not use_multipart:
|
350
|
+
return download_file(url, destination, use_tqdm=use_tqdm)
|
351
|
+
|
352
|
+
# Create session and check file size
|
353
|
+
session = create_session()
|
354
|
+
response = session.head(url, timeout=30)
|
355
|
+
file_size = int(response.headers.get('Content-Length', 0))
|
356
|
+
|
357
|
+
if file_size >= min_size_for_multipart and use_multipart:
|
358
|
+
logger.info("File size (%d bytes) is suitable for multi-part download", file_size)
|
359
|
+
print(f"Starting multi-part download of {os.path.basename(destination)} ({file_size/1024/1024:.1f} MB)")
|
360
|
+
return download_file_multipart(url, destination, num_parts=num_parts)
|
361
|
+
else:
|
362
|
+
logger.debug("Using standard download method (file size: %d bytes)", file_size)
|
363
|
+
return download_file(url, destination, use_tqdm=use_tqdm)
|
364
|
+
except Exception as e:
|
365
|
+
logger.warning("Error determining download method: %s, falling back to standard download", e)
|
366
|
+
return download_file(url, destination, use_tqdm=use_tqdm)
|
367
|
+
|
368
|
+
def download_with_mirrors(url, destination, mirrors=None):
|
369
|
+
"""
|
370
|
+
Try downloading a file from the primary URL and fall back to mirrors if needed.
|
371
|
+
|
372
|
+
Args:
|
373
|
+
url (str): Primary URL to download from
|
374
|
+
destination (str): Path to save the file to
|
375
|
+
mirrors (list): List of alternative URLs to try if primary fails
|
376
|
+
|
377
|
+
Returns:
|
378
|
+
bool: True if download was successful from any source, False otherwise
|
379
|
+
"""
|
380
|
+
logger.debug("Starting download with primary URL and %s mirrors",
|
381
|
+
"0" if mirrors is None else len(mirrors))
|
382
|
+
|
383
|
+
# Try the primary URL first
|
384
|
+
if smart_download(url, destination):
|
385
|
+
logger.debug("Download successful from primary URL")
|
386
|
+
return True
|
387
|
+
|
388
|
+
# If primary URL fails and we have mirrors, try them
|
389
|
+
if mirrors:
|
390
|
+
for i, mirror_url in enumerate(mirrors, 1):
|
391
|
+
logger.info("Primary download failed, trying mirror %d of %d",
|
392
|
+
i, len(mirrors))
|
393
|
+
if smart_download(mirror_url, destination):
|
394
|
+
logger.info("Download successful from mirror %d", i)
|
395
|
+
return True
|
396
|
+
|
397
|
+
logger.error("All download attempts failed")
|
398
|
+
return False
|
116
399
|
|
117
400
|
def extract_archive(archive_path, extract_dir):
|
118
401
|
"""
|
119
|
-
Extract an archive file to the specified directory.
|
402
|
+
Extract an archive file to the specified directory using optimized methods.
|
120
403
|
|
121
404
|
Args:
|
122
405
|
archive_path (str): Path to the archive file
|
@@ -129,36 +412,66 @@ def extract_archive(archive_path, extract_dir):
|
|
129
412
|
logger.info("Extracting %s to %s", archive_path, extract_dir)
|
130
413
|
os.makedirs(extract_dir, exist_ok=True)
|
131
414
|
|
132
|
-
# Extract to a temporary
|
133
|
-
temp_extract_dir =
|
134
|
-
|
415
|
+
# Extract to a secure temporary directory
|
416
|
+
temp_extract_dir = tempfile.mkdtemp(prefix="tonietoolbox_extract_")
|
417
|
+
logger.debug("Using temporary extraction directory: %s", temp_extract_dir)
|
135
418
|
|
136
419
|
if archive_path.endswith('.zip'):
|
137
420
|
logger.debug("Extracting ZIP archive")
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
421
|
+
try:
|
422
|
+
# Use a with statement for proper cleanup
|
423
|
+
with zipfile.ZipFile(archive_path, 'r') as zip_ref:
|
424
|
+
# Get the list of files for informational purposes
|
425
|
+
files_extracted = zip_ref.namelist()
|
426
|
+
total_size = sum(info.file_size for info in zip_ref.infolist())
|
427
|
+
logger.debug("ZIP contains %d files, total size: %d bytes",
|
428
|
+
len(files_extracted), total_size)
|
429
|
+
|
430
|
+
# Extract with progress indication for large archives
|
431
|
+
if total_size > 50*1024*1024: # 50 MB
|
432
|
+
# Use configure_tqdm() for consistent parameters
|
433
|
+
tqdm_params = configure_tqdm()
|
434
|
+
with tqdm(
|
435
|
+
total=total_size,
|
436
|
+
unit='B',
|
437
|
+
unit_scale=True,
|
438
|
+
desc="Extracting ZIP",
|
439
|
+
**tqdm_params
|
440
|
+
) as pbar:
|
441
|
+
for file in zip_ref.infolist():
|
442
|
+
zip_ref.extract(file, temp_extract_dir)
|
443
|
+
pbar.update(file.file_size)
|
444
|
+
# Print empty line after progress completion
|
445
|
+
print("")
|
446
|
+
else:
|
447
|
+
zip_ref.extractall(temp_extract_dir)
|
448
|
+
except zipfile.BadZipFile as e:
|
449
|
+
logger.error("Bad ZIP file: %s", str(e))
|
450
|
+
return False
|
451
|
+
|
142
452
|
elif archive_path.endswith(('.tar.gz', '.tgz')):
|
143
453
|
logger.debug("Extracting TAR.GZ archive")
|
144
454
|
with tarfile.open(archive_path, 'r:gz') as tar_ref:
|
145
|
-
tar_ref.extractall(temp_extract_dir)
|
146
455
|
files_extracted = tar_ref.getnames()
|
147
|
-
logger.
|
456
|
+
logger.debug("TAR.GZ contains %d files", len(files_extracted))
|
457
|
+
tar_ref.extractall(path=temp_extract_dir)
|
458
|
+
|
148
459
|
elif archive_path.endswith(('.tar.xz', '.txz')):
|
149
460
|
logger.debug("Extracting TAR.XZ archive")
|
150
461
|
with tarfile.open(archive_path, 'r:xz') as tar_ref:
|
151
|
-
tar_ref.extractall(temp_extract_dir)
|
152
462
|
files_extracted = tar_ref.getnames()
|
153
|
-
logger.
|
463
|
+
logger.debug("TAR.XZ contains %d files", len(files_extracted))
|
464
|
+
tar_ref.extractall(path=temp_extract_dir)
|
465
|
+
|
154
466
|
elif archive_path.endswith('.tar'):
|
155
467
|
logger.debug("Extracting TAR archive")
|
156
468
|
with tarfile.open(archive_path, 'r') as tar_ref:
|
157
|
-
tar_ref.extractall(temp_extract_dir)
|
158
469
|
files_extracted = tar_ref.getnames()
|
159
|
-
logger.
|
470
|
+
logger.debug("TAR contains %d files", len(files_extracted))
|
471
|
+
tar_ref.extractall(path=temp_extract_dir)
|
160
472
|
else:
|
161
473
|
logger.error("Unsupported archive format: %s", archive_path)
|
474
|
+
shutil.rmtree(temp_extract_dir, ignore_errors=True)
|
162
475
|
return False
|
163
476
|
|
164
477
|
logger.info("Archive extracted successfully")
|
@@ -230,7 +543,7 @@ def extract_archive(archive_path, extract_dir):
|
|
230
543
|
|
231
544
|
# Clean up the temporary extraction directory
|
232
545
|
try:
|
233
|
-
shutil.rmtree(temp_extract_dir)
|
546
|
+
shutil.rmtree(temp_extract_dir, ignore_errors=True)
|
234
547
|
logger.debug("Removed temporary extraction directory")
|
235
548
|
except Exception as e:
|
236
549
|
logger.warning("Failed to remove temporary extraction directory: %s", e)
|
@@ -329,132 +642,6 @@ def check_binary_in_path(binary_name):
|
|
329
642
|
|
330
643
|
return None
|
331
644
|
|
332
|
-
def install_package(package_name):
|
333
|
-
"""
|
334
|
-
Attempt to install a package using the system's package manager.
|
335
|
-
|
336
|
-
Args:
|
337
|
-
package_name (str): Name of the package to install
|
338
|
-
|
339
|
-
Returns:
|
340
|
-
bool: True if installation was successful, False otherwise
|
341
|
-
"""
|
342
|
-
system = get_system()
|
343
|
-
logger.info("Attempting to install %s on %s", package_name, system)
|
344
|
-
|
345
|
-
try:
|
346
|
-
if system == 'linux':
|
347
|
-
# Try apt-get (Debian/Ubuntu)
|
348
|
-
if shutil.which('apt-get'):
|
349
|
-
logger.info("Installing %s using apt-get", package_name)
|
350
|
-
subprocess.run(['sudo', 'apt-get', 'update'], check=True)
|
351
|
-
subprocess.run(['sudo', 'apt-get', 'install', '-y', package_name], check=True)
|
352
|
-
return True
|
353
|
-
# Try yum (CentOS/RHEL)
|
354
|
-
elif shutil.which('yum'):
|
355
|
-
logger.info("Installing %s using yum", package_name)
|
356
|
-
subprocess.run(['sudo', 'yum', 'install', '-y', package_name], check=True)
|
357
|
-
return True
|
358
|
-
|
359
|
-
elif system == 'darwin':
|
360
|
-
# Try Homebrew
|
361
|
-
if shutil.which('brew'):
|
362
|
-
logger.info("Installing %s using homebrew", package_name)
|
363
|
-
subprocess.run(['brew', 'install', package_name], check=True)
|
364
|
-
return True
|
365
|
-
|
366
|
-
logger.warning("Could not automatically install %s. Please install it manually.", package_name)
|
367
|
-
return False
|
368
|
-
except subprocess.CalledProcessError as e:
|
369
|
-
logger.error("Failed to install %s: %s", package_name, e)
|
370
|
-
return False
|
371
|
-
|
372
|
-
def install_python_package(package_name):
|
373
|
-
"""
|
374
|
-
Attempt to install a Python package using pip.
|
375
|
-
|
376
|
-
Args:
|
377
|
-
package_name (str): Name of the package to install
|
378
|
-
|
379
|
-
Returns:
|
380
|
-
bool: True if installation was successful, False otherwise
|
381
|
-
"""
|
382
|
-
logger.info("Attempting to install Python package: %s", package_name)
|
383
|
-
try:
|
384
|
-
import subprocess
|
385
|
-
import sys
|
386
|
-
|
387
|
-
# Try to install the package using pip
|
388
|
-
subprocess.check_call([sys.executable, "-m", "pip", "install", package_name])
|
389
|
-
logger.info("Successfully installed Python package: %s", package_name)
|
390
|
-
return True
|
391
|
-
except Exception as e:
|
392
|
-
logger.error("Failed to install Python package %s: %s", package_name, str(e))
|
393
|
-
return False
|
394
|
-
|
395
|
-
def check_python_package(package_name):
|
396
|
-
"""
|
397
|
-
Check if a Python package is installed.
|
398
|
-
|
399
|
-
Args:
|
400
|
-
package_name (str): Name of the package to check
|
401
|
-
|
402
|
-
Returns:
|
403
|
-
bool: True if the package is installed, False otherwise
|
404
|
-
"""
|
405
|
-
logger.debug("Checking if Python package is installed: %s", package_name)
|
406
|
-
try:
|
407
|
-
__import__(package_name)
|
408
|
-
logger.debug("Python package %s is installed", package_name)
|
409
|
-
return True
|
410
|
-
except ImportError:
|
411
|
-
logger.debug("Python package %s is not installed", package_name)
|
412
|
-
return False
|
413
|
-
|
414
|
-
def ensure_mutagen(auto_install=True):
|
415
|
-
"""
|
416
|
-
Ensure that the Mutagen library is available, installing it if necessary and allowed.
|
417
|
-
|
418
|
-
Args:
|
419
|
-
auto_install (bool): Whether to automatically install Mutagen if not found (defaults to True)
|
420
|
-
|
421
|
-
Returns:
|
422
|
-
bool: True if Mutagen is available, False otherwise
|
423
|
-
"""
|
424
|
-
logger.debug("Checking if Mutagen is available")
|
425
|
-
|
426
|
-
try:
|
427
|
-
import mutagen
|
428
|
-
logger.debug("Mutagen is already installed")
|
429
|
-
return True
|
430
|
-
except ImportError:
|
431
|
-
logger.debug("Mutagen is not installed")
|
432
|
-
|
433
|
-
if auto_install:
|
434
|
-
logger.info("Auto-install enabled, attempting to install Mutagen")
|
435
|
-
if install_python_package('mutagen'):
|
436
|
-
try:
|
437
|
-
import mutagen
|
438
|
-
logger.info("Successfully installed and imported Mutagen")
|
439
|
-
return True
|
440
|
-
except ImportError:
|
441
|
-
logger.error("Mutagen was installed but could not be imported")
|
442
|
-
else:
|
443
|
-
logger.error("Failed to install Mutagen")
|
444
|
-
else:
|
445
|
-
logger.warning("Mutagen is not installed and --auto-download is not used.")
|
446
|
-
|
447
|
-
return False
|
448
|
-
|
449
|
-
def is_mutagen_available():
|
450
|
-
"""
|
451
|
-
Check if the Mutagen library is available.
|
452
|
-
|
453
|
-
Returns:
|
454
|
-
bool: True if Mutagen is available, False otherwise
|
455
|
-
"""
|
456
|
-
return check_python_package('mutagen')
|
457
|
-
|
458
645
|
def ensure_dependency(dependency_name, auto_download=False):
|
459
646
|
"""
|
460
647
|
Ensure that a dependency is available, downloading it if necessary.
|
@@ -486,7 +673,7 @@ def ensure_dependency(dependency_name, auto_download=False):
|
|
486
673
|
bin_name = dependency_name if dependency_name != 'opusenc' else 'opusenc'
|
487
674
|
|
488
675
|
# Create a specific folder for this dependency
|
489
|
-
dependency_dir = os.path.join(user_data_dir, dependency_name)
|
676
|
+
dependency_dir = os.path.join(user_data_dir, 'libs', dependency_name)
|
490
677
|
|
491
678
|
# First priority: Check if we already downloaded and extracted it previously
|
492
679
|
# When auto_download is True, we'll skip this check and download fresh versions
|
@@ -577,6 +764,7 @@ def ensure_dependency(dependency_name, auto_download=False):
|
|
577
764
|
|
578
765
|
# Set up download paths
|
579
766
|
download_url = dependency_info['url']
|
767
|
+
mirrors = dependency_info.get('mirrors', [])
|
580
768
|
|
581
769
|
# Create dependency-specific directory
|
582
770
|
os.makedirs(dependency_dir, exist_ok=True)
|
@@ -586,7 +774,10 @@ def ensure_dependency(dependency_name, auto_download=False):
|
|
586
774
|
archive_path = os.path.join(dependency_dir, f"{dependency_name}{archive_ext}")
|
587
775
|
logger.debug("Using archive path: %s", archive_path)
|
588
776
|
|
589
|
-
|
777
|
+
# Use our improved download function with mirrors and tqdm progress bar
|
778
|
+
print(f"Downloading {dependency_name}...")
|
779
|
+
if download_with_mirrors(download_url, archive_path, mirrors):
|
780
|
+
print(f"Extracting {dependency_name}...")
|
590
781
|
if extract_archive(archive_path, dependency_dir):
|
591
782
|
binary = find_binary_in_extracted_dir(dependency_dir, binary_path)
|
592
783
|
if binary:
|
@@ -600,29 +791,248 @@ def ensure_dependency(dependency_name, auto_download=False):
|
|
600
791
|
logger.error("Failed to set up %s", dependency_name)
|
601
792
|
return None
|
602
793
|
|
603
|
-
def
|
794
|
+
def install_package(package_name):
|
604
795
|
"""
|
605
|
-
|
796
|
+
Attempt to install a package using the system's package manager.
|
606
797
|
|
607
798
|
Args:
|
608
|
-
|
799
|
+
package_name (str): Name of the package to install
|
800
|
+
|
801
|
+
Returns:
|
802
|
+
bool: True if installation was successful, False otherwise
|
803
|
+
"""
|
804
|
+
system = get_system()
|
805
|
+
logger.info("Attempting to install %s on %s", package_name, system)
|
609
806
|
|
807
|
+
try:
|
808
|
+
if system == 'linux':
|
809
|
+
# Try apt-get (Debian/Ubuntu)
|
810
|
+
if shutil.which('apt-get'):
|
811
|
+
logger.info("Installing %s using apt-get", package_name)
|
812
|
+
subprocess.run(['sudo', 'apt-get', 'update'], check=True)
|
813
|
+
subprocess.run(['sudo', 'apt-get', 'install', '-y', package_name], check=True)
|
814
|
+
return True
|
815
|
+
# Try yum (CentOS/RHEL)
|
816
|
+
elif shutil.which('yum'):
|
817
|
+
logger.info("Installing %s using yum", package_name)
|
818
|
+
subprocess.run(['sudo', 'yum', 'install', '-y', package_name], check=True)
|
819
|
+
return True
|
820
|
+
|
821
|
+
elif system == 'darwin':
|
822
|
+
# Try Homebrew
|
823
|
+
if shutil.which('brew'):
|
824
|
+
logger.info("Installing %s using homebrew", package_name)
|
825
|
+
subprocess.run(['brew', 'install', package_name], check=True)
|
826
|
+
return True
|
827
|
+
|
828
|
+
logger.warning("Could not automatically install %s. Please install it manually.", package_name)
|
829
|
+
return False
|
830
|
+
except subprocess.CalledProcessError as e:
|
831
|
+
logger.error("Failed to install %s: %s", package_name, e)
|
832
|
+
return False
|
833
|
+
|
834
|
+
def get_ffmpeg_binary(auto_download=False):
|
835
|
+
"""
|
836
|
+
Get the path to the FFmpeg binary, downloading it if necessary and allowed.
|
837
|
+
|
838
|
+
Args:
|
839
|
+
auto_download (bool): Whether to automatically download FFmpeg if not found (defaults to False)
|
840
|
+
|
610
841
|
Returns:
|
611
|
-
str: Path to the FFmpeg binary if available
|
842
|
+
str: Path to the FFmpeg binary, or None if not available
|
612
843
|
"""
|
613
|
-
|
844
|
+
logger.debug("Getting FFmpeg binary")
|
845
|
+
|
846
|
+
# Define the expected binary path
|
847
|
+
local_dir = os.path.join(get_user_data_dir(), 'libs', 'ffmpeg')
|
848
|
+
if sys.platform == 'win32':
|
849
|
+
binary_path = os.path.join(local_dir, 'ffmpeg.exe')
|
850
|
+
else:
|
851
|
+
binary_path = os.path.join(local_dir, 'ffmpeg')
|
852
|
+
|
853
|
+
# Check if binary exists
|
854
|
+
if os.path.exists(binary_path) and os.path.isfile(binary_path):
|
855
|
+
logger.debug("FFmpeg binary found at %s", binary_path)
|
856
|
+
return binary_path
|
857
|
+
|
858
|
+
# Check if a system-wide FFmpeg is available
|
859
|
+
try:
|
860
|
+
if sys.platform == 'win32':
|
861
|
+
# On Windows, look for ffmpeg in PATH
|
862
|
+
from shutil import which
|
863
|
+
system_binary = which('ffmpeg')
|
864
|
+
if system_binary:
|
865
|
+
logger.debug("System-wide FFmpeg found at %s", system_binary)
|
866
|
+
return system_binary
|
867
|
+
else:
|
868
|
+
# On Unix-like systems, use 'which' command
|
869
|
+
system_binary = subprocess.check_output(['which', 'ffmpeg']).decode('utf-8').strip()
|
870
|
+
if system_binary:
|
871
|
+
logger.debug("System-wide FFmpeg found at %s", system_binary)
|
872
|
+
return system_binary
|
873
|
+
except (subprocess.SubprocessError, FileNotFoundError):
|
874
|
+
logger.debug("No system-wide FFmpeg found")
|
875
|
+
|
876
|
+
# Download if allowed
|
877
|
+
if auto_download:
|
878
|
+
logger.info("Auto-download enabled, forcing download/installation of ffmpeg")
|
879
|
+
print("Downloading ffmpeg...")
|
880
|
+
|
881
|
+
# Create directory if it doesn't exist
|
882
|
+
os.makedirs(local_dir, exist_ok=True)
|
883
|
+
|
884
|
+
# Download FFmpeg based on platform
|
885
|
+
if sys.platform == 'win32':
|
886
|
+
url = "https://github.com/BtbN/FFmpeg-Builds/releases/download/latest/ffmpeg-master-latest-win64-gpl.zip"
|
887
|
+
archive_path = os.path.join(local_dir, "ffmpeg.zip")
|
888
|
+
|
889
|
+
# Download the file
|
890
|
+
logger.info("Downloading %s to %s", url, archive_path)
|
891
|
+
download_with_mirrors(url, archive_path)
|
892
|
+
|
893
|
+
# Extract the archive
|
894
|
+
print("Extracting ffmpeg...")
|
895
|
+
logger.info("Extracting %s to %s", archive_path, local_dir)
|
896
|
+
extract_archive(archive_path, local_dir)
|
897
|
+
|
898
|
+
# Find the binary in the extracted files
|
899
|
+
for root, dirs, files in os.walk(local_dir):
|
900
|
+
if 'ffmpeg.exe' in files:
|
901
|
+
binary_path = os.path.join(root, 'ffmpeg.exe')
|
902
|
+
break
|
903
|
+
|
904
|
+
# Verify the binary exists
|
905
|
+
if not os.path.exists(binary_path):
|
906
|
+
logger.error("FFmpeg binary not found after extraction")
|
907
|
+
return None
|
908
|
+
|
909
|
+
logger.info("Successfully set up ffmpeg: %s", binary_path)
|
910
|
+
return binary_path
|
911
|
+
|
912
|
+
elif sys.platform == 'darwin': # macOS
|
913
|
+
url = "https://evermeet.cx/ffmpeg/getrelease/ffmpeg/zip"
|
914
|
+
archive_path = os.path.join(local_dir, "ffmpeg.zip")
|
915
|
+
|
916
|
+
# Download and extract
|
917
|
+
download_with_mirrors(url, archive_path)
|
918
|
+
extract_archive(archive_path, local_dir)
|
919
|
+
|
920
|
+
# Make binary executable
|
921
|
+
binary_path = os.path.join(local_dir, "ffmpeg")
|
922
|
+
os.chmod(binary_path, 0o755)
|
923
|
+
logger.info("Successfully set up ffmpeg: %s", binary_path)
|
924
|
+
return binary_path
|
925
|
+
|
926
|
+
else: # Linux and others
|
927
|
+
url = "https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz"
|
928
|
+
archive_path = os.path.join(local_dir, "ffmpeg.tar.xz")
|
929
|
+
|
930
|
+
# Download and extract
|
931
|
+
download_with_mirrors(url, archive_path)
|
932
|
+
extract_archive(archive_path, local_dir)
|
933
|
+
|
934
|
+
# Find the binary in the extracted files
|
935
|
+
for root, dirs, files in os.walk(local_dir):
|
936
|
+
if 'ffmpeg' in files:
|
937
|
+
binary_path = os.path.join(root, 'ffmpeg')
|
938
|
+
os.chmod(binary_path, 0o755)
|
939
|
+
logger.info("Successfully set up ffmpeg: %s", binary_path)
|
940
|
+
return binary_path
|
941
|
+
|
942
|
+
logger.error("FFmpeg binary not found after extraction")
|
943
|
+
return None
|
944
|
+
else:
|
945
|
+
logger.warning("FFmpeg is not available and --auto-download is not used.")
|
946
|
+
return None
|
614
947
|
|
615
948
|
def get_opus_binary(auto_download=False):
|
616
949
|
"""
|
617
|
-
Get the path to the
|
950
|
+
Get the path to the Opus binary, downloading it if necessary and allowed.
|
618
951
|
|
619
952
|
Args:
|
620
|
-
auto_download (bool): Whether to automatically download
|
621
|
-
|
953
|
+
auto_download (bool): Whether to automatically download Opus if not found (defaults to False)
|
954
|
+
|
622
955
|
Returns:
|
623
|
-
str: Path to the
|
956
|
+
str: Path to the Opus binary, or None if not available
|
624
957
|
"""
|
625
|
-
|
958
|
+
logger.debug("Getting Opus binary")
|
959
|
+
|
960
|
+
# Define the expected binary path
|
961
|
+
local_dir = os.path.join(get_user_data_dir(), 'libs', 'opusenc')
|
962
|
+
if sys.platform == 'win32':
|
963
|
+
binary_path = os.path.join(local_dir, 'opusenc.exe')
|
964
|
+
else:
|
965
|
+
binary_path = os.path.join(local_dir, 'opusenc')
|
966
|
+
|
967
|
+
# Check if binary exists
|
968
|
+
if os.path.exists(binary_path) and os.path.isfile(binary_path):
|
969
|
+
logger.debug("Opus binary found at %s", binary_path)
|
970
|
+
return binary_path
|
971
|
+
|
972
|
+
# Check if a system-wide Opus is available
|
973
|
+
try:
|
974
|
+
if sys.platform == 'win32':
|
975
|
+
# On Windows, look for opusenc in PATH
|
976
|
+
from shutil import which
|
977
|
+
system_binary = which('opusenc')
|
978
|
+
if system_binary:
|
979
|
+
logger.debug("System-wide Opus found at %s", system_binary)
|
980
|
+
return system_binary
|
981
|
+
else:
|
982
|
+
# On Unix-like systems, use 'which' command
|
983
|
+
system_binary = subprocess.check_output(['which', 'opusenc']).decode('utf-8').strip()
|
984
|
+
if system_binary:
|
985
|
+
logger.debug("System-wide Opus found at %s", system_binary)
|
986
|
+
return system_binary
|
987
|
+
except (subprocess.SubprocessError, FileNotFoundError):
|
988
|
+
logger.debug("No system-wide Opus found")
|
989
|
+
|
990
|
+
# Download if allowed
|
991
|
+
if auto_download:
|
992
|
+
logger.info("Auto-download enabled, forcing download/installation of opusenc")
|
993
|
+
print("Downloading opusenc...")
|
994
|
+
|
995
|
+
# Create directory if it doesn't exist
|
996
|
+
os.makedirs(local_dir, exist_ok=True)
|
997
|
+
|
998
|
+
# Download Opus based on platform
|
999
|
+
if sys.platform == 'win32':
|
1000
|
+
url = "https://archive.mozilla.org/pub/opus/win32/opus-tools-0.2-opus-1.3.zip"
|
1001
|
+
archive_path = os.path.join(local_dir, "opusenc.zip")
|
1002
|
+
else:
|
1003
|
+
# For non-Windows, we'll need to compile from source or find precompiled binaries
|
1004
|
+
logger.error("Automatic download of Opus for non-Windows platforms is not supported yet")
|
1005
|
+
return None
|
1006
|
+
|
1007
|
+
# Download the file
|
1008
|
+
logger.info("Downloading %s to %s", url, archive_path)
|
1009
|
+
download_with_mirrors(url, archive_path)
|
1010
|
+
|
1011
|
+
# Extract the archive
|
1012
|
+
print("Extracting opusenc...")
|
1013
|
+
logger.info("Extracting %s to %s", archive_path, local_dir)
|
1014
|
+
extract_archive(archive_path, local_dir)
|
1015
|
+
|
1016
|
+
# For Windows, the binary should now be in the directory
|
1017
|
+
if sys.platform == 'win32':
|
1018
|
+
binary_path = os.path.join(local_dir, 'opusenc.exe')
|
1019
|
+
if not os.path.exists(binary_path):
|
1020
|
+
# Try to find it in the extracted directory structure
|
1021
|
+
for root, dirs, files in os.walk(local_dir):
|
1022
|
+
if 'opusenc.exe' in files:
|
1023
|
+
binary_path = os.path.join(root, 'opusenc.exe')
|
1024
|
+
break
|
1025
|
+
|
1026
|
+
# Verify the binary exists
|
1027
|
+
if not os.path.exists(binary_path):
|
1028
|
+
logger.error("Opus binary not found after extraction")
|
1029
|
+
return None
|
1030
|
+
|
1031
|
+
logger.info("Successfully set up opusenc: %s", binary_path)
|
1032
|
+
return binary_path
|
1033
|
+
else:
|
1034
|
+
logger.warning("Opus is not available and --auto-download is not used.")
|
1035
|
+
return None
|
626
1036
|
|
627
1037
|
def get_opus_version(opus_binary=None):
|
628
1038
|
"""
|
@@ -667,4 +1077,89 @@ def get_opus_version(opus_binary=None):
|
|
667
1077
|
|
668
1078
|
except Exception as e:
|
669
1079
|
logger.debug(f"Error getting opusenc version: {str(e)}")
|
670
|
-
return "opusenc from opus-tools XXX" # Fallback
|
1080
|
+
return "opusenc from opus-tools XXX" # Fallback
|
1081
|
+
|
1082
|
+
def check_python_package(package_name):
|
1083
|
+
"""
|
1084
|
+
Check if a Python package is installed.
|
1085
|
+
|
1086
|
+
Args:
|
1087
|
+
package_name (str): Name of the package to check
|
1088
|
+
|
1089
|
+
Returns:
|
1090
|
+
bool: True if the package is installed, False otherwise
|
1091
|
+
"""
|
1092
|
+
logger.debug("Checking if Python package is installed: %s", package_name)
|
1093
|
+
try:
|
1094
|
+
__import__(package_name)
|
1095
|
+
logger.debug("Python package %s is installed", package_name)
|
1096
|
+
return True
|
1097
|
+
except ImportError:
|
1098
|
+
logger.debug("Python package %s is not installed", package_name)
|
1099
|
+
return False
|
1100
|
+
|
1101
|
+
def install_python_package(package_name):
|
1102
|
+
"""
|
1103
|
+
Attempt to install a Python package using pip.
|
1104
|
+
|
1105
|
+
Args:
|
1106
|
+
package_name (str): Name of the package to install
|
1107
|
+
|
1108
|
+
Returns:
|
1109
|
+
bool: True if installation was successful, False otherwise
|
1110
|
+
"""
|
1111
|
+
logger.info("Attempting to install Python package: %s", package_name)
|
1112
|
+
try:
|
1113
|
+
import subprocess
|
1114
|
+
|
1115
|
+
# Try to install the package using pip
|
1116
|
+
subprocess.check_call([sys.executable, "-m", "pip", "install", package_name])
|
1117
|
+
logger.info("Successfully installed Python package: %s", package_name)
|
1118
|
+
return True
|
1119
|
+
except Exception as e:
|
1120
|
+
logger.error("Failed to install Python package %s: %s", package_name, str(e))
|
1121
|
+
return False
|
1122
|
+
|
1123
|
+
def ensure_mutagen(auto_install=True):
|
1124
|
+
"""
|
1125
|
+
Ensure that the Mutagen library is available, installing it if necessary and allowed.
|
1126
|
+
|
1127
|
+
Args:
|
1128
|
+
auto_install (bool): Whether to automatically install Mutagen if not found (defaults to True)
|
1129
|
+
|
1130
|
+
Returns:
|
1131
|
+
bool: True if Mutagen is available, False otherwise
|
1132
|
+
"""
|
1133
|
+
logger.debug("Checking if Mutagen is available")
|
1134
|
+
|
1135
|
+
try:
|
1136
|
+
import mutagen
|
1137
|
+
logger.debug("Mutagen is already installed")
|
1138
|
+
return True
|
1139
|
+
except ImportError:
|
1140
|
+
logger.debug("Mutagen is not installed")
|
1141
|
+
|
1142
|
+
if auto_install:
|
1143
|
+
logger.info("Auto-install enabled, attempting to install Mutagen")
|
1144
|
+
if install_python_package('mutagen'):
|
1145
|
+
try:
|
1146
|
+
import mutagen
|
1147
|
+
logger.info("Successfully installed and imported Mutagen")
|
1148
|
+
return True
|
1149
|
+
except ImportError:
|
1150
|
+
logger.error("Mutagen was installed but could not be imported")
|
1151
|
+
else:
|
1152
|
+
logger.error("Failed to install Mutagen")
|
1153
|
+
else:
|
1154
|
+
logger.warning("Mutagen is not installed and --auto-download is not used.")
|
1155
|
+
|
1156
|
+
return False
|
1157
|
+
|
1158
|
+
def is_mutagen_available():
|
1159
|
+
"""
|
1160
|
+
Check if the Mutagen library is available.
|
1161
|
+
|
1162
|
+
Returns:
|
1163
|
+
bool: True if Mutagen is available, False otherwise
|
1164
|
+
"""
|
1165
|
+
return check_python_package('mutagen')
|