TonieToolbox 0.6.0a2__py3-none-any.whl → 0.6.0a4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -9,12 +9,17 @@ import os
9
9
  import sys
10
10
  import platform
11
11
  import subprocess
12
+ import requests
13
+ from requests.adapters import HTTPAdapter
14
+ from urllib3.util.retry import Retry
12
15
  import shutil
13
16
  import zipfile
14
17
  import tarfile
15
- import urllib.request
16
18
  import time
17
- from pathlib import Path
19
+ import hashlib
20
+ import tempfile
21
+ import concurrent.futures
22
+ from tqdm.auto import tqdm
18
23
 
19
24
  from .logger import get_logger
20
25
  logger = get_logger('dependency_manager')
@@ -27,15 +32,21 @@ DEPENDENCIES = {
27
32
  'windows': {
28
33
  'url': 'https://github.com/BtbN/FFmpeg-Builds/releases/download/latest/ffmpeg-master-latest-win64-gpl.zip',
29
34
  'bin_path': 'bin/ffmpeg.exe',
30
- 'extract_dir': 'ffmpeg'
35
+ 'extract_dir': 'ffmpeg',
36
+ 'mirrors': [
37
+ ''
38
+ ]
31
39
  },
32
40
  'linux': {
33
41
  'url': 'https://github.com/BtbN/FFmpeg-Builds/releases/download/latest/ffmpeg-master-latest-linux64-gpl.tar.xz',
34
42
  'bin_path': 'ffmpeg',
35
- 'extract_dir': 'ffmpeg'
43
+ 'extract_dir': 'ffmpeg',
44
+ 'mirrors': [
45
+ ''
46
+ ]
36
47
  },
37
48
  'darwin': {
38
- 'url': 'https://evermeet.cx/ffmpeg/getrelease/ffmpeg/zip',
49
+ 'url': 'https://evermeet.cx/ffmpeg/get/zip',
39
50
  'bin_path': 'ffmpeg',
40
51
  'extract_dir': 'ffmpeg'
41
52
  }
@@ -44,7 +55,10 @@ DEPENDENCIES = {
44
55
  'windows': {
45
56
  'url': 'https://archive.mozilla.org/pub/opus/win32/opus-tools-0.2-opus-1.3.zip',
46
57
  'bin_path': 'opusenc.exe',
47
- 'extract_dir': 'opusenc'
58
+ 'extract_dir': 'opusenc',
59
+ 'mirrors': [
60
+ ''
61
+ ]
48
62
  },
49
63
  'linux': {
50
64
  'package': 'opus-tools'
@@ -67,56 +81,324 @@ def get_system():
67
81
 
68
82
  def get_user_data_dir():
69
83
  """Get the user data directory for storing downloaded dependencies."""
70
- app_dir = LIBS_DIR
84
+ app_dir = CACHE_DIR
71
85
  logger.debug("Using application data directory: %s", app_dir)
72
86
 
73
87
  os.makedirs(app_dir, exist_ok=True)
74
88
  return app_dir
75
89
 
76
- def download_file(url, destination):
90
+ def create_session():
91
+ """
92
+ Create a requests session with retry capabilities.
93
+
94
+ Returns:
95
+ requests.Session: Configured session with retries
96
+ """
97
+ session = requests.Session()
98
+ retry_strategy = Retry(
99
+ total=3,
100
+ backoff_factor=1,
101
+ status_forcelist=[429, 500, 502, 503, 504],
102
+ allowed_methods=["HEAD", "GET", "OPTIONS"]
103
+ )
104
+ adapter = HTTPAdapter(max_retries=retry_strategy, pool_connections=10, pool_maxsize=10)
105
+ session.mount("http://", adapter)
106
+ session.mount("https://", adapter)
107
+ return session
108
+
109
+ def configure_tqdm():
110
+ """
111
+ Configure tqdm to ensure it displays properly in various environments.
112
+ """
113
+ # Check if we're in a notebook environment or standard terminal
114
+ is_notebook = 'ipykernel' in sys.modules
115
+
116
+ # Set global defaults for tqdm
117
+ tqdm.monitor_interval = 0 # Prevent monitor thread issues
118
+
119
+ # Return common kwargs for consistency
120
+ return {
121
+ 'file': sys.stdout,
122
+ 'leave': True,
123
+ 'dynamic_ncols': True,
124
+ 'mininterval': 0.5,
125
+ 'smoothing': 0.2,
126
+ 'ncols': 100 if not is_notebook else None,
127
+ 'disable': False
128
+ }
129
+
130
+ def download_file(url, destination, chunk_size=1024*1024, timeout=30, use_tqdm=True):
77
131
  """
78
- Download a file from a URL to the specified destination.
132
+ Download a file from a URL to the specified destination using optimized methods.
79
133
 
80
134
  Args:
81
135
  url (str): The URL of the file to download
82
136
  destination (str): The path to save the file to
137
+ chunk_size (int): Size of chunks to download (default: 1MB)
138
+ timeout (int): Connection timeout in seconds (default: 30s)
139
+ use_tqdm (bool): Whether to display a progress bar (default: True)
83
140
 
84
141
  Returns:
85
142
  bool: True if download was successful, False otherwise
86
143
  """
87
144
  try:
88
145
  logger.info("Downloading %s to %s", url, destination)
89
- headers = {'User-Agent': 'TonieToolbox-dependency-downloader/1.0'}
90
- req = urllib.request.Request(url, headers=headers)
146
+ headers = {'User-Agent': 'TonieToolbox-dependency-downloader/1.1'}
91
147
 
92
- with urllib.request.urlopen(req) as response, open(destination, 'wb') as out_file:
93
- file_size = int(response.info().get('Content-Length', 0))
94
- downloaded = 0
95
- block_size = 8192
96
-
97
- logger.debug("File size: %d bytes", file_size)
98
-
99
- while True:
100
- buffer = response.read(block_size)
101
- if not buffer:
102
- break
103
-
104
- downloaded += len(buffer)
105
- out_file.write(buffer)
148
+ # Create a directory for the destination file if it doesn't exist
149
+ os.makedirs(os.path.dirname(os.path.abspath(destination)), exist_ok=True)
150
+
151
+ # Use a session for connection pooling and retries
152
+ session = create_session()
153
+
154
+ # Start with a HEAD request to get the file size before downloading
155
+ head_response = session.head(url, headers=headers, timeout=timeout)
156
+ head_response.raise_for_status()
157
+ file_size = int(head_response.headers.get('Content-Length', 0))
158
+ logger.debug("File size: %d bytes", file_size)
159
+
160
+ # Now start the download
161
+ response = session.get(url, headers=headers, stream=True, timeout=timeout)
162
+ response.raise_for_status() # Raise exception for 4XX/5XX status codes
163
+ # Set up the progress bar
164
+ desc = os.path.basename(destination)
165
+ if len(desc) > 25:
166
+ desc = desc[:22] + "..."
167
+
168
+ with open(destination, 'wb') as out_file:
169
+ if use_tqdm and file_size > 0:
170
+ # Force tqdm to output to console
171
+ pbar = tqdm(
172
+ total=file_size,
173
+ unit='B',
174
+ unit_scale=True,
175
+ desc=desc,
176
+ **configure_tqdm()
177
+ )
106
178
 
107
- if file_size > 0:
108
- percent = downloaded * 100 / file_size
109
- logger.debug("Download progress: %.1f%%", percent)
179
+ for chunk in response.iter_content(chunk_size=chunk_size):
180
+ if not chunk:
181
+ continue
182
+ out_file.write(chunk)
183
+ pbar.update(len(chunk))
184
+ pbar.close()
185
+ # Print an empty line after progress is done
186
+ print("")
187
+ else:
188
+ # Fallback if no file size or tqdm is disabled
189
+ downloaded = 0
190
+ for chunk in response.iter_content(chunk_size=chunk_size):
191
+ if not chunk:
192
+ continue
193
+ downloaded += len(chunk)
194
+ out_file.write(chunk)
195
+ if file_size > 0:
196
+ percent = downloaded * 100 / file_size
197
+ logger.debug("Download progress: %.1f%%", percent)
110
198
 
111
199
  logger.info("Download completed successfully")
112
200
  return True
113
- except Exception as e:
201
+ except requests.exceptions.SSLError as e:
202
+ logger.error("Failed to download %s: SSL Error: %s", url, e)
203
+ # On macOS, provide more helpful error message for SSL certificate issues
204
+ if platform.system() == 'Darwin':
205
+ logger.error("SSL certificate verification failed on macOS. This is a known issue.")
206
+ logger.error("You can solve this by running: /Applications/Python 3.x/Install Certificates.command")
207
+ logger.error("Or by using the --auto-download flag which will bypass certificate verification.")
208
+ return False
209
+ except requests.exceptions.RequestException as e:
114
210
  logger.error("Failed to download %s: %s", url, e)
115
211
  return False
212
+ except Exception as e:
213
+ logger.error("Unexpected error downloading %s: %s", url, e)
214
+ return False
215
+
216
+ def download_file_multipart(url, destination, num_parts=4, chunk_size=1024*1024, timeout=30):
217
+ """
218
+ Download a file in multiple parts concurrently for better performance.
219
+
220
+ Args:
221
+ url (str): The URL of the file to download
222
+ destination (str): The path to save the file to
223
+ num_parts (int): Number of parts to download concurrently
224
+ chunk_size (int): Size of chunks to download (default: 1MB)
225
+ timeout (int): Connection timeout in seconds (default: 30s)
226
+
227
+ Returns:
228
+ bool: True if download was successful, False otherwise
229
+ """
230
+ try:
231
+ logger.info("Starting multi-part download of %s with %d parts", url, num_parts)
232
+ headers = {'User-Agent': 'TonieToolbox-dependency-downloader/1.1'}
233
+
234
+ session = create_session()
235
+ response = session.head(url, headers=headers, timeout=timeout)
236
+ response.raise_for_status()
237
+
238
+ file_size = int(response.headers.get('Content-Length', 0))
239
+ if file_size <= 0:
240
+ logger.warning("Multi-part download requested but Content-Length not available, falling back to regular download")
241
+ return download_file(url, destination, chunk_size, timeout)
242
+
243
+ # If file size is too small for multipart, fallback to regular download
244
+ if file_size < num_parts * 1024 * 1024 * 5: # Less than 5MB per part
245
+ logger.debug("File size too small for efficient multi-part download, using regular download")
246
+ return download_file(url, destination, chunk_size, timeout)
247
+
248
+ # Calculate part sizes
249
+ part_size = file_size // num_parts
250
+ ranges = [(i * part_size, min((i + 1) * part_size - 1, file_size - 1))
251
+ for i in range(num_parts)]
252
+ if ranges[-1][1] < file_size - 1:
253
+ ranges[-1] = (ranges[-1][0], file_size - 1)
254
+
255
+ # Create temporary directory for parts
256
+ temp_dir = tempfile.mkdtemp(prefix="tonietoolbox_download_")
257
+ part_files = [os.path.join(temp_dir, f"part_{i}") for i in range(num_parts)]
258
+
259
+ # Define the download function for each part
260
+ def download_part(part_idx):
261
+ start, end = ranges[part_idx]
262
+ part_path = part_files[part_idx]
263
+
264
+ headers_with_range = headers.copy()
265
+ headers_with_range['Range'] = f'bytes={start}-{end}'
266
+
267
+ part_size = end - start + 1
268
+
269
+ try:
270
+ response = session.get(url, headers=headers_with_range, stream=True, timeout=timeout)
271
+ response.raise_for_status()
272
+ # Set up progress bar for this part
273
+ desc = f"Part {part_idx+1}/{num_parts}"
274
+ with tqdm(
275
+ total=part_size,
276
+ unit='B',
277
+ unit_scale=True,
278
+ desc=desc,
279
+ position=part_idx,
280
+ **configure_tqdm()
281
+ ) as pbar:
282
+ with open(part_path, 'wb') as f:
283
+ for chunk in response.iter_content(chunk_size=chunk_size):
284
+ if not chunk:
285
+ continue
286
+ f.write(chunk)
287
+ pbar.update(len(chunk))
288
+
289
+ return True
290
+ except Exception as e:
291
+ logger.error("Error downloading part %d: %s", part_idx, str(e))
292
+ return False
293
+
294
+ # Download all parts in parallel
295
+ logger.info("Starting concurrent download of %d parts...", num_parts)
296
+ with concurrent.futures.ThreadPoolExecutor(max_workers=num_parts) as executor:
297
+ futures = [executor.submit(download_part, i) for i in range(num_parts)]
298
+ all_successful = all(future.result() for future in concurrent.futures.as_completed(futures))
299
+
300
+ if not all_successful:
301
+ logger.error("One or more parts failed to download")
302
+
303
+ # Clean up
304
+ for part_file in part_files:
305
+ if os.path.exists(part_file):
306
+ os.remove(part_file)
307
+ os.rmdir(temp_dir)
308
+
309
+ return False
310
+
311
+ # Combine all parts into the final file
312
+ logger.info("All parts downloaded successfully, combining into final file")
313
+ with open(destination, 'wb') as outfile:
314
+ for part_file in part_files:
315
+ with open(part_file, 'rb') as infile:
316
+ shutil.copyfileobj(infile, outfile)
317
+ os.remove(part_file)
318
+
319
+ # Clean up temp directory
320
+ os.rmdir(temp_dir)
321
+
322
+ logger.info("Multi-part download completed successfully")
323
+ return True
324
+
325
+ except Exception as e:
326
+ logger.error("Failed multi-part download: %s", str(e))
327
+ # Fall back to regular download
328
+ logger.info("Falling back to regular download method")
329
+ return download_file(url, destination, chunk_size, timeout)
330
+
331
+ def smart_download(url, destination, use_multipart=True, min_size_for_multipart=20*1024*1024, num_parts=4, use_tqdm=True):
332
+ """
333
+ Smart download function that selects the best download method based on file size.
334
+
335
+ Args:
336
+ url (str): The URL of the file to download
337
+ destination (str): The path to save the file to
338
+ use_multipart (bool): Whether to allow multi-part downloads (default: True)
339
+ min_size_for_multipart (int): Minimum file size in bytes to use multi-part download (default: 20MB)
340
+ num_parts (int): Number of parts for multi-part download (default: 4)
341
+ use_tqdm (bool): Whether to display progress bars (default: True)
342
+
343
+ Returns:
344
+ bool: True if download was successful, False otherwise
345
+ """
346
+ try:
347
+ # Check if multipart is enabled and get file size
348
+ if not use_multipart:
349
+ return download_file(url, destination, use_tqdm=use_tqdm)
350
+
351
+ # Create session and check file size
352
+ session = create_session()
353
+ response = session.head(url, timeout=30)
354
+ file_size = int(response.headers.get('Content-Length', 0))
355
+
356
+ if file_size >= min_size_for_multipart and use_multipart:
357
+ logger.info("File size (%d bytes) is suitable for multi-part download", file_size)
358
+ print(f"Starting multi-part download of {os.path.basename(destination)} ({file_size/1024/1024:.1f} MB)")
359
+ return download_file_multipart(url, destination, num_parts=num_parts)
360
+ else:
361
+ logger.debug("Using standard download method (file size: %d bytes)", file_size)
362
+ return download_file(url, destination, use_tqdm=use_tqdm)
363
+ except Exception as e:
364
+ logger.warning("Error determining download method: %s, falling back to standard download", e)
365
+ return download_file(url, destination, use_tqdm=use_tqdm)
366
+
367
+ def download_with_mirrors(url, destination, mirrors=None):
368
+ """
369
+ Try downloading a file from the primary URL and fall back to mirrors if needed.
370
+
371
+ Args:
372
+ url (str): Primary URL to download from
373
+ destination (str): Path to save the file to
374
+ mirrors (list): List of alternative URLs to try if primary fails
375
+
376
+ Returns:
377
+ bool: True if download was successful from any source, False otherwise
378
+ """
379
+ logger.debug("Starting download with primary URL and %s mirrors",
380
+ "0" if mirrors is None else len(mirrors))
381
+
382
+ # Try the primary URL first
383
+ if smart_download(url, destination):
384
+ logger.debug("Download successful from primary URL")
385
+ return True
386
+
387
+ # If primary URL fails and we have mirrors, try them
388
+ if mirrors:
389
+ for i, mirror_url in enumerate(mirrors, 1):
390
+ logger.info("Primary download failed, trying mirror %d of %d",
391
+ i, len(mirrors))
392
+ if smart_download(mirror_url, destination):
393
+ logger.info("Download successful from mirror %d", i)
394
+ return True
395
+
396
+ logger.error("All download attempts failed")
397
+ return False
116
398
 
117
399
  def extract_archive(archive_path, extract_dir):
118
400
  """
119
- Extract an archive file to the specified directory.
401
+ Extract an archive file to the specified directory using optimized methods.
120
402
 
121
403
  Args:
122
404
  archive_path (str): Path to the archive file
@@ -129,36 +411,66 @@ def extract_archive(archive_path, extract_dir):
129
411
  logger.info("Extracting %s to %s", archive_path, extract_dir)
130
412
  os.makedirs(extract_dir, exist_ok=True)
131
413
 
132
- # Extract to a temporary subdirectory first
133
- temp_extract_dir = os.path.join(extract_dir, "_temp_extract")
134
- os.makedirs(temp_extract_dir, exist_ok=True)
414
+ # Extract to a secure temporary directory
415
+ temp_extract_dir = tempfile.mkdtemp(prefix="tonietoolbox_extract_")
416
+ logger.debug("Using temporary extraction directory: %s", temp_extract_dir)
135
417
 
136
418
  if archive_path.endswith('.zip'):
137
419
  logger.debug("Extracting ZIP archive")
138
- with zipfile.ZipFile(archive_path, 'r') as zip_ref:
139
- zip_ref.extractall(temp_extract_dir)
140
- files_extracted = zip_ref.namelist()
141
- logger.trace("Extracted files: %s", files_extracted)
420
+ try:
421
+ # Use a with statement for proper cleanup
422
+ with zipfile.ZipFile(archive_path, 'r') as zip_ref:
423
+ # Get the list of files for informational purposes
424
+ files_extracted = zip_ref.namelist()
425
+ total_size = sum(info.file_size for info in zip_ref.infolist())
426
+ logger.debug("ZIP contains %d files, total size: %d bytes",
427
+ len(files_extracted), total_size)
428
+
429
+ # Extract with progress indication for large archives
430
+ if total_size > 50*1024*1024: # 50 MB
431
+ # Use configure_tqdm() for consistent parameters
432
+ tqdm_params = configure_tqdm()
433
+ with tqdm(
434
+ total=total_size,
435
+ unit='B',
436
+ unit_scale=True,
437
+ desc="Extracting ZIP",
438
+ **tqdm_params
439
+ ) as pbar:
440
+ for file in zip_ref.infolist():
441
+ zip_ref.extract(file, temp_extract_dir)
442
+ pbar.update(file.file_size)
443
+ # Print empty line after progress completion
444
+ print("")
445
+ else:
446
+ zip_ref.extractall(temp_extract_dir)
447
+ except zipfile.BadZipFile as e:
448
+ logger.error("Bad ZIP file: %s", str(e))
449
+ return False
450
+
142
451
  elif archive_path.endswith(('.tar.gz', '.tgz')):
143
452
  logger.debug("Extracting TAR.GZ archive")
144
453
  with tarfile.open(archive_path, 'r:gz') as tar_ref:
145
- tar_ref.extractall(temp_extract_dir)
146
454
  files_extracted = tar_ref.getnames()
147
- logger.trace("Extracted files: %s", files_extracted)
455
+ logger.debug("TAR.GZ contains %d files", len(files_extracted))
456
+ tar_ref.extractall(path=temp_extract_dir)
457
+
148
458
  elif archive_path.endswith(('.tar.xz', '.txz')):
149
459
  logger.debug("Extracting TAR.XZ archive")
150
460
  with tarfile.open(archive_path, 'r:xz') as tar_ref:
151
- tar_ref.extractall(temp_extract_dir)
152
461
  files_extracted = tar_ref.getnames()
153
- logger.trace("Extracted files: %s", files_extracted)
462
+ logger.debug("TAR.XZ contains %d files", len(files_extracted))
463
+ tar_ref.extractall(path=temp_extract_dir)
464
+
154
465
  elif archive_path.endswith('.tar'):
155
466
  logger.debug("Extracting TAR archive")
156
467
  with tarfile.open(archive_path, 'r') as tar_ref:
157
- tar_ref.extractall(temp_extract_dir)
158
468
  files_extracted = tar_ref.getnames()
159
- logger.trace("Extracted files: %s", files_extracted)
469
+ logger.debug("TAR contains %d files", len(files_extracted))
470
+ tar_ref.extractall(path=temp_extract_dir)
160
471
  else:
161
472
  logger.error("Unsupported archive format: %s", archive_path)
473
+ shutil.rmtree(temp_extract_dir, ignore_errors=True)
162
474
  return False
163
475
 
164
476
  logger.info("Archive extracted successfully")
@@ -230,7 +542,7 @@ def extract_archive(archive_path, extract_dir):
230
542
 
231
543
  # Clean up the temporary extraction directory
232
544
  try:
233
- shutil.rmtree(temp_extract_dir)
545
+ shutil.rmtree(temp_extract_dir, ignore_errors=True)
234
546
  logger.debug("Removed temporary extraction directory")
235
547
  except Exception as e:
236
548
  logger.warning("Failed to remove temporary extraction directory: %s", e)
@@ -329,132 +641,6 @@ def check_binary_in_path(binary_name):
329
641
 
330
642
  return None
331
643
 
332
- def install_package(package_name):
333
- """
334
- Attempt to install a package using the system's package manager.
335
-
336
- Args:
337
- package_name (str): Name of the package to install
338
-
339
- Returns:
340
- bool: True if installation was successful, False otherwise
341
- """
342
- system = get_system()
343
- logger.info("Attempting to install %s on %s", package_name, system)
344
-
345
- try:
346
- if system == 'linux':
347
- # Try apt-get (Debian/Ubuntu)
348
- if shutil.which('apt-get'):
349
- logger.info("Installing %s using apt-get", package_name)
350
- subprocess.run(['sudo', 'apt-get', 'update'], check=True)
351
- subprocess.run(['sudo', 'apt-get', 'install', '-y', package_name], check=True)
352
- return True
353
- # Try yum (CentOS/RHEL)
354
- elif shutil.which('yum'):
355
- logger.info("Installing %s using yum", package_name)
356
- subprocess.run(['sudo', 'yum', 'install', '-y', package_name], check=True)
357
- return True
358
-
359
- elif system == 'darwin':
360
- # Try Homebrew
361
- if shutil.which('brew'):
362
- logger.info("Installing %s using homebrew", package_name)
363
- subprocess.run(['brew', 'install', package_name], check=True)
364
- return True
365
-
366
- logger.warning("Could not automatically install %s. Please install it manually.", package_name)
367
- return False
368
- except subprocess.CalledProcessError as e:
369
- logger.error("Failed to install %s: %s", package_name, e)
370
- return False
371
-
372
- def install_python_package(package_name):
373
- """
374
- Attempt to install a Python package using pip.
375
-
376
- Args:
377
- package_name (str): Name of the package to install
378
-
379
- Returns:
380
- bool: True if installation was successful, False otherwise
381
- """
382
- logger.info("Attempting to install Python package: %s", package_name)
383
- try:
384
- import subprocess
385
- import sys
386
-
387
- # Try to install the package using pip
388
- subprocess.check_call([sys.executable, "-m", "pip", "install", package_name])
389
- logger.info("Successfully installed Python package: %s", package_name)
390
- return True
391
- except Exception as e:
392
- logger.error("Failed to install Python package %s: %s", package_name, str(e))
393
- return False
394
-
395
- def check_python_package(package_name):
396
- """
397
- Check if a Python package is installed.
398
-
399
- Args:
400
- package_name (str): Name of the package to check
401
-
402
- Returns:
403
- bool: True if the package is installed, False otherwise
404
- """
405
- logger.debug("Checking if Python package is installed: %s", package_name)
406
- try:
407
- __import__(package_name)
408
- logger.debug("Python package %s is installed", package_name)
409
- return True
410
- except ImportError:
411
- logger.debug("Python package %s is not installed", package_name)
412
- return False
413
-
414
- def ensure_mutagen(auto_install=True):
415
- """
416
- Ensure that the Mutagen library is available, installing it if necessary and allowed.
417
-
418
- Args:
419
- auto_install (bool): Whether to automatically install Mutagen if not found (defaults to True)
420
-
421
- Returns:
422
- bool: True if Mutagen is available, False otherwise
423
- """
424
- logger.debug("Checking if Mutagen is available")
425
-
426
- try:
427
- import mutagen
428
- logger.debug("Mutagen is already installed")
429
- return True
430
- except ImportError:
431
- logger.debug("Mutagen is not installed")
432
-
433
- if auto_install:
434
- logger.info("Auto-install enabled, attempting to install Mutagen")
435
- if install_python_package('mutagen'):
436
- try:
437
- import mutagen
438
- logger.info("Successfully installed and imported Mutagen")
439
- return True
440
- except ImportError:
441
- logger.error("Mutagen was installed but could not be imported")
442
- else:
443
- logger.error("Failed to install Mutagen")
444
- else:
445
- logger.warning("Mutagen is not installed and --auto-download is not used.")
446
-
447
- return False
448
-
449
- def is_mutagen_available():
450
- """
451
- Check if the Mutagen library is available.
452
-
453
- Returns:
454
- bool: True if Mutagen is available, False otherwise
455
- """
456
- return check_python_package('mutagen')
457
-
458
644
  def ensure_dependency(dependency_name, auto_download=False):
459
645
  """
460
646
  Ensure that a dependency is available, downloading it if necessary.
@@ -486,7 +672,7 @@ def ensure_dependency(dependency_name, auto_download=False):
486
672
  bin_name = dependency_name if dependency_name != 'opusenc' else 'opusenc'
487
673
 
488
674
  # Create a specific folder for this dependency
489
- dependency_dir = os.path.join(user_data_dir, dependency_name)
675
+ dependency_dir = os.path.join(user_data_dir, 'libs', dependency_name)
490
676
 
491
677
  # First priority: Check if we already downloaded and extracted it previously
492
678
  # When auto_download is True, we'll skip this check and download fresh versions
@@ -577,6 +763,7 @@ def ensure_dependency(dependency_name, auto_download=False):
577
763
 
578
764
  # Set up download paths
579
765
  download_url = dependency_info['url']
766
+ mirrors = dependency_info.get('mirrors', [])
580
767
 
581
768
  # Create dependency-specific directory
582
769
  os.makedirs(dependency_dir, exist_ok=True)
@@ -586,7 +773,10 @@ def ensure_dependency(dependency_name, auto_download=False):
586
773
  archive_path = os.path.join(dependency_dir, f"{dependency_name}{archive_ext}")
587
774
  logger.debug("Using archive path: %s", archive_path)
588
775
 
589
- if download_file(download_url, archive_path):
776
+ # Use our improved download function with mirrors and tqdm progress bar
777
+ print(f"Downloading {dependency_name}...")
778
+ if download_with_mirrors(download_url, archive_path, mirrors):
779
+ print(f"Extracting {dependency_name}...")
590
780
  if extract_archive(archive_path, dependency_dir):
591
781
  binary = find_binary_in_extracted_dir(dependency_dir, binary_path)
592
782
  if binary:
@@ -600,29 +790,248 @@ def ensure_dependency(dependency_name, auto_download=False):
600
790
  logger.error("Failed to set up %s", dependency_name)
601
791
  return None
602
792
 
603
- def get_ffmpeg_binary(auto_download=False):
793
+ def install_package(package_name):
604
794
  """
605
- Get the path to the FFmpeg binary, downloading it if necessary.
795
+ Attempt to install a package using the system's package manager.
606
796
 
607
797
  Args:
608
- auto_download (bool): Whether to automatically download or install if not found
798
+ package_name (str): Name of the package to install
799
+
800
+ Returns:
801
+ bool: True if installation was successful, False otherwise
802
+ """
803
+ system = get_system()
804
+ logger.info("Attempting to install %s on %s", package_name, system)
609
805
 
806
+ try:
807
+ if system == 'linux':
808
+ # Try apt-get (Debian/Ubuntu)
809
+ if shutil.which('apt-get'):
810
+ logger.info("Installing %s using apt-get", package_name)
811
+ subprocess.run(['sudo', 'apt-get', 'update'], check=True)
812
+ subprocess.run(['sudo', 'apt-get', 'install', '-y', package_name], check=True)
813
+ return True
814
+ # Try yum (CentOS/RHEL)
815
+ elif shutil.which('yum'):
816
+ logger.info("Installing %s using yum", package_name)
817
+ subprocess.run(['sudo', 'yum', 'install', '-y', package_name], check=True)
818
+ return True
819
+
820
+ elif system == 'darwin':
821
+ # Try Homebrew
822
+ if shutil.which('brew'):
823
+ logger.info("Installing %s using homebrew", package_name)
824
+ subprocess.run(['brew', 'install', package_name], check=True)
825
+ return True
826
+
827
+ logger.warning("Could not automatically install %s. Please install it manually.", package_name)
828
+ return False
829
+ except subprocess.CalledProcessError as e:
830
+ logger.error("Failed to install %s: %s", package_name, e)
831
+ return False
832
+
833
+ def get_ffmpeg_binary(auto_download=False):
834
+ """
835
+ Get the path to the FFmpeg binary, downloading it if necessary and allowed.
836
+
837
+ Args:
838
+ auto_download (bool): Whether to automatically download FFmpeg if not found (defaults to False)
839
+
610
840
  Returns:
611
- str: Path to the FFmpeg binary if available, None otherwise
841
+ str: Path to the FFmpeg binary, or None if not available
612
842
  """
613
- return ensure_dependency('ffmpeg', auto_download)
843
+ logger.debug("Getting FFmpeg binary")
844
+
845
+ # Define the expected binary path
846
+ local_dir = os.path.join(get_user_data_dir(), 'libs', 'ffmpeg')
847
+ if sys.platform == 'win32':
848
+ binary_path = os.path.join(local_dir, 'ffmpeg.exe')
849
+ else:
850
+ binary_path = os.path.join(local_dir, 'ffmpeg')
851
+
852
+ # Check if binary exists
853
+ if os.path.exists(binary_path) and os.path.isfile(binary_path):
854
+ logger.debug("FFmpeg binary found at %s", binary_path)
855
+ return binary_path
856
+
857
+ # Check if a system-wide FFmpeg is available
858
+ try:
859
+ if sys.platform == 'win32':
860
+ # On Windows, look for ffmpeg in PATH
861
+ from shutil import which
862
+ system_binary = which('ffmpeg')
863
+ if system_binary:
864
+ logger.debug("System-wide FFmpeg found at %s", system_binary)
865
+ return system_binary
866
+ else:
867
+ # On Unix-like systems, use 'which' command
868
+ system_binary = subprocess.check_output(['which', 'ffmpeg']).decode('utf-8').strip()
869
+ if system_binary:
870
+ logger.debug("System-wide FFmpeg found at %s", system_binary)
871
+ return system_binary
872
+ except (subprocess.SubprocessError, FileNotFoundError):
873
+ logger.debug("No system-wide FFmpeg found")
874
+
875
+ # Download if allowed
876
+ if auto_download:
877
+ logger.info("Auto-download enabled, forcing download/installation of ffmpeg")
878
+ print("Downloading ffmpeg...")
879
+
880
+ # Create directory if it doesn't exist
881
+ os.makedirs(local_dir, exist_ok=True)
882
+
883
+ # Download FFmpeg based on platform
884
+ if sys.platform == 'win32':
885
+ url = "https://github.com/BtbN/FFmpeg-Builds/releases/download/latest/ffmpeg-master-latest-win64-gpl.zip"
886
+ archive_path = os.path.join(local_dir, "ffmpeg.zip")
887
+
888
+ # Download the file
889
+ logger.info("Downloading %s to %s", url, archive_path)
890
+ download_with_mirrors(url, archive_path)
891
+
892
+ # Extract the archive
893
+ print("Extracting ffmpeg...")
894
+ logger.info("Extracting %s to %s", archive_path, local_dir)
895
+ extract_archive(archive_path, local_dir)
896
+
897
+ # Find the binary in the extracted files
898
+ for root, dirs, files in os.walk(local_dir):
899
+ if 'ffmpeg.exe' in files:
900
+ binary_path = os.path.join(root, 'ffmpeg.exe')
901
+ break
902
+
903
+ # Verify the binary exists
904
+ if not os.path.exists(binary_path):
905
+ logger.error("FFmpeg binary not found after extraction")
906
+ return None
907
+
908
+ logger.info("Successfully set up ffmpeg: %s", binary_path)
909
+ return binary_path
910
+
911
+ elif sys.platform == 'darwin': # macOS
912
+ url = "https://evermeet.cx/ffmpeg/getrelease/ffmpeg/zip"
913
+ archive_path = os.path.join(local_dir, "ffmpeg.zip")
914
+
915
+ # Download and extract
916
+ download_with_mirrors(url, archive_path)
917
+ extract_archive(archive_path, local_dir)
918
+
919
+ # Make binary executable
920
+ binary_path = os.path.join(local_dir, "ffmpeg")
921
+ os.chmod(binary_path, 0o755)
922
+ logger.info("Successfully set up ffmpeg: %s", binary_path)
923
+ return binary_path
924
+
925
+ else: # Linux and others
926
+ url = "https://johnvansickle.com/ffmpeg/releases/ffmpeg-release-amd64-static.tar.xz"
927
+ archive_path = os.path.join(local_dir, "ffmpeg.tar.xz")
928
+
929
+ # Download and extract
930
+ download_with_mirrors(url, archive_path)
931
+ extract_archive(archive_path, local_dir)
932
+
933
+ # Find the binary in the extracted files
934
+ for root, dirs, files in os.walk(local_dir):
935
+ if 'ffmpeg' in files:
936
+ binary_path = os.path.join(root, 'ffmpeg')
937
+ os.chmod(binary_path, 0o755)
938
+ logger.info("Successfully set up ffmpeg: %s", binary_path)
939
+ return binary_path
940
+
941
+ logger.error("FFmpeg binary not found after extraction")
942
+ return None
943
+ else:
944
+ logger.warning("FFmpeg is not available and --auto-download is not used.")
945
+ return None
614
946
 
615
947
  def get_opus_binary(auto_download=False):
616
948
  """
617
- Get the path to the opusenc binary, downloading it if necessary.
949
+ Get the path to the Opus binary, downloading it if necessary and allowed.
618
950
 
619
951
  Args:
620
- auto_download (bool): Whether to automatically download or install if not found
621
-
952
+ auto_download (bool): Whether to automatically download Opus if not found (defaults to False)
953
+
622
954
  Returns:
623
- str: Path to the opusenc binary if available, None otherwise
955
+ str: Path to the Opus binary, or None if not available
624
956
  """
625
- return ensure_dependency('opusenc', auto_download)
957
+ logger.debug("Getting Opus binary")
958
+
959
+ # Define the expected binary path
960
+ local_dir = os.path.join(get_user_data_dir(), 'libs', 'opusenc')
961
+ if sys.platform == 'win32':
962
+ binary_path = os.path.join(local_dir, 'opusenc.exe')
963
+ else:
964
+ binary_path = os.path.join(local_dir, 'opusenc')
965
+
966
+ # Check if binary exists
967
+ if os.path.exists(binary_path) and os.path.isfile(binary_path):
968
+ logger.debug("Opus binary found at %s", binary_path)
969
+ return binary_path
970
+
971
+ # Check if a system-wide Opus is available
972
+ try:
973
+ if sys.platform == 'win32':
974
+ # On Windows, look for opusenc in PATH
975
+ from shutil import which
976
+ system_binary = which('opusenc')
977
+ if system_binary:
978
+ logger.debug("System-wide Opus found at %s", system_binary)
979
+ return system_binary
980
+ else:
981
+ # On Unix-like systems, use 'which' command
982
+ system_binary = subprocess.check_output(['which', 'opusenc']).decode('utf-8').strip()
983
+ if system_binary:
984
+ logger.debug("System-wide Opus found at %s", system_binary)
985
+ return system_binary
986
+ except (subprocess.SubprocessError, FileNotFoundError):
987
+ logger.debug("No system-wide Opus found")
988
+
989
+ # Download if allowed
990
+ if auto_download:
991
+ logger.info("Auto-download enabled, forcing download/installation of opusenc")
992
+ print("Downloading opusenc...")
993
+
994
+ # Create directory if it doesn't exist
995
+ os.makedirs(local_dir, exist_ok=True)
996
+
997
+ # Download Opus based on platform
998
+ if sys.platform == 'win32':
999
+ url = "https://archive.mozilla.org/pub/opus/win32/opus-tools-0.2-opus-1.3.zip"
1000
+ archive_path = os.path.join(local_dir, "opusenc.zip")
1001
+ else:
1002
+ # For non-Windows, we'll need to compile from source or find precompiled binaries
1003
+ logger.error("Automatic download of Opus for non-Windows platforms is not supported yet")
1004
+ return None
1005
+
1006
+ # Download the file
1007
+ logger.info("Downloading %s to %s", url, archive_path)
1008
+ download_with_mirrors(url, archive_path)
1009
+
1010
+ # Extract the archive
1011
+ print("Extracting opusenc...")
1012
+ logger.info("Extracting %s to %s", archive_path, local_dir)
1013
+ extract_archive(archive_path, local_dir)
1014
+
1015
+ # For Windows, the binary should now be in the directory
1016
+ if sys.platform == 'win32':
1017
+ binary_path = os.path.join(local_dir, 'opusenc.exe')
1018
+ if not os.path.exists(binary_path):
1019
+ # Try to find it in the extracted directory structure
1020
+ for root, dirs, files in os.walk(local_dir):
1021
+ if 'opusenc.exe' in files:
1022
+ binary_path = os.path.join(root, 'opusenc.exe')
1023
+ break
1024
+
1025
+ # Verify the binary exists
1026
+ if not os.path.exists(binary_path):
1027
+ logger.error("Opus binary not found after extraction")
1028
+ return None
1029
+
1030
+ logger.info("Successfully set up opusenc: %s", binary_path)
1031
+ return binary_path
1032
+ else:
1033
+ logger.warning("Opus is not available and --auto-download is not used.")
1034
+ return None
626
1035
 
627
1036
  def get_opus_version(opus_binary=None):
628
1037
  """
@@ -667,4 +1076,89 @@ def get_opus_version(opus_binary=None):
667
1076
 
668
1077
  except Exception as e:
669
1078
  logger.debug(f"Error getting opusenc version: {str(e)}")
670
- return "opusenc from opus-tools XXX" # Fallback
1079
+ return "opusenc from opus-tools XXX" # Fallback
1080
+
1081
+ def check_python_package(package_name):
1082
+ """
1083
+ Check if a Python package is installed.
1084
+
1085
+ Args:
1086
+ package_name (str): Name of the package to check
1087
+
1088
+ Returns:
1089
+ bool: True if the package is installed, False otherwise
1090
+ """
1091
+ logger.debug("Checking if Python package is installed: %s", package_name)
1092
+ try:
1093
+ __import__(package_name)
1094
+ logger.debug("Python package %s is installed", package_name)
1095
+ return True
1096
+ except ImportError:
1097
+ logger.debug("Python package %s is not installed", package_name)
1098
+ return False
1099
+
1100
+ def install_python_package(package_name):
1101
+ """
1102
+ Attempt to install a Python package using pip.
1103
+
1104
+ Args:
1105
+ package_name (str): Name of the package to install
1106
+
1107
+ Returns:
1108
+ bool: True if installation was successful, False otherwise
1109
+ """
1110
+ logger.info("Attempting to install Python package: %s", package_name)
1111
+ try:
1112
+ import subprocess
1113
+
1114
+ # Try to install the package using pip
1115
+ subprocess.check_call([sys.executable, "-m", "pip", "install", package_name])
1116
+ logger.info("Successfully installed Python package: %s", package_name)
1117
+ return True
1118
+ except Exception as e:
1119
+ logger.error("Failed to install Python package %s: %s", package_name, str(e))
1120
+ return False
1121
+
1122
+ def ensure_mutagen(auto_install=True):
1123
+ """
1124
+ Ensure that the Mutagen library is available, installing it if necessary and allowed.
1125
+
1126
+ Args:
1127
+ auto_install (bool): Whether to automatically install Mutagen if not found (defaults to True)
1128
+
1129
+ Returns:
1130
+ bool: True if Mutagen is available, False otherwise
1131
+ """
1132
+ logger.debug("Checking if Mutagen is available")
1133
+
1134
+ try:
1135
+ import mutagen
1136
+ logger.debug("Mutagen is already installed")
1137
+ return True
1138
+ except ImportError:
1139
+ logger.debug("Mutagen is not installed")
1140
+
1141
+ if auto_install:
1142
+ logger.info("Auto-install enabled, attempting to install Mutagen")
1143
+ if install_python_package('mutagen'):
1144
+ try:
1145
+ import mutagen
1146
+ logger.info("Successfully installed and imported Mutagen")
1147
+ return True
1148
+ except ImportError:
1149
+ logger.error("Mutagen was installed but could not be imported")
1150
+ else:
1151
+ logger.error("Failed to install Mutagen")
1152
+ else:
1153
+ logger.warning("Mutagen is not installed and --auto-download is not used.")
1154
+
1155
+ return False
1156
+
1157
+ def is_mutagen_available():
1158
+ """
1159
+ Check if the Mutagen library is available.
1160
+
1161
+ Returns:
1162
+ bool: True if Mutagen is available, False otherwise
1163
+ """
1164
+ return check_python_package('mutagen')