max-cli 0.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
max_cli/__init__.py ADDED
File without changes
@@ -0,0 +1,145 @@
1
+ import hashlib
2
+ import json
3
+ import time
4
+ from pathlib import Path
5
+ from typing import Any, Optional
6
+
7
+
8
+ class Cache:
9
+ """Simple file-based cache with TTL support."""
10
+
11
+ def __init__(self, cache_dir: Optional[Path] = None, ttl: int = 3600):
12
+ """Initialize cache.
13
+
14
+ Args:
15
+ cache_dir: Directory for cache files. Defaults to ~/.max_cli/cache
16
+ ttl: Time-to-live in seconds for cached items. Default 1 hour.
17
+ """
18
+ self.cache_dir = cache_dir or Path.home() / ".max_cli" / "cache"
19
+ self.cache_dir.mkdir(parents=True, exist_ok=True)
20
+ self.ttl = ttl
21
+
22
+ def get(self, key: str) -> Optional[Any]:
23
+ """Retrieve value from cache if not expired.
24
+
25
+ Args:
26
+ key: Cache key
27
+
28
+ Returns:
29
+ Cached value or None if not found/expired
30
+ """
31
+ cache_file = self.cache_dir / f"{self._hash(key)}.json"
32
+ if not cache_file.exists():
33
+ return None
34
+
35
+ try:
36
+ data = json.loads(cache_file.read_text())
37
+ if data.get("expires", 0) < time.time():
38
+ cache_file.unlink()
39
+ return None
40
+ return data.get("value")
41
+ except (json.JSONDecodeError, OSError):
42
+ return None
43
+
44
+ def set(self, key: str, value: Any, ttl: Optional[int] = None) -> None:
45
+ """Store value in cache.
46
+
47
+ Args:
48
+ key: Cache key
49
+ value: Value to cache
50
+ ttl: Optional TTL override in seconds
51
+ """
52
+ cache_file = self.cache_dir / f"{self._hash(key)}.json"
53
+ expires = time.time() + (ttl if ttl is not None else self.ttl)
54
+ data = {"value": value, "expires": expires}
55
+ cache_file.write_text(json.dumps(data))
56
+
57
+ def delete(self, key: str) -> bool:
58
+ """Delete a cached item.
59
+
60
+ Args:
61
+ key: Cache key
62
+
63
+ Returns:
64
+ True if deleted, False if not found
65
+ """
66
+ cache_file = self.cache_dir / f"{self._hash(key)}.json"
67
+ if cache_file.exists():
68
+ cache_file.unlink()
69
+ return True
70
+ return False
71
+
72
+ def clear(self) -> int:
73
+ """Clear all cache entries.
74
+
75
+ Returns:
76
+ Number of entries cleared
77
+ """
78
+ count = 0
79
+ for f in self.cache_dir.glob("*.json"):
80
+ f.unlink()
81
+ count += 1
82
+ return count
83
+
84
+ def clear_expired(self) -> int:
85
+ """Clear only expired cache entries.
86
+
87
+ Returns:
88
+ Number of expired entries cleared
89
+ """
90
+ count = 0
91
+ now = time.time()
92
+ for f in self.cache_dir.glob("*.json"):
93
+ try:
94
+ data = json.loads(f.read_text())
95
+ if data.get("expires", 0) < now:
96
+ f.unlink()
97
+ count += 1
98
+ except (json.JSONDecodeError, OSError):
99
+ f.unlink()
100
+ count += 1
101
+ return count
102
+
103
+ def _hash(self, key: str) -> str:
104
+ """Generate hash for cache key."""
105
+ return hashlib.md5(key.encode()).hexdigest()
106
+
107
+
108
+ _default_cache: Optional[Cache] = None
109
+
110
+
111
+ def get_default_cache() -> Cache:
112
+ """Get the default cache instance."""
113
+ global _default_cache
114
+ if _default_cache is None:
115
+ _default_cache = Cache()
116
+ return _default_cache
117
+
118
+
119
+ def cached(key_prefix: str, ttl: Optional[int] = None):
120
+ """Decorator for caching function results.
121
+
122
+ Args:
123
+ key_prefix: Prefix for cache key
124
+ ttl: Optional TTL override in seconds
125
+ """
126
+
127
+ def decorator(func):
128
+ def wrapper(*args, **kwargs):
129
+ cache = get_default_cache()
130
+ key_parts = [key_prefix]
131
+ key_parts.extend(str(arg) for arg in args if not isinstance(arg, Path))
132
+ key_parts.extend(f"{k}={v}" for k, v in sorted(kwargs.items()))
133
+ cache_key = ":".join(key_parts)
134
+
135
+ result = cache.get(cache_key)
136
+ if result is not None:
137
+ return result
138
+
139
+ result = func(*args, **kwargs)
140
+ cache.set(cache_key, result, ttl)
141
+ return result
142
+
143
+ return wrapper
144
+
145
+ return decorator
@@ -0,0 +1,83 @@
1
+ from concurrent.futures import ThreadPoolExecutor, as_completed
2
+ from typing import Any, Callable, Dict, List, Optional, TypeVar
3
+
4
+ from rich.progress import Progress, TaskID
5
+
6
+ T = TypeVar("T")
7
+ R = TypeVar("R")
8
+
9
+
10
+ def process_batch_parallel(
11
+ items: List[T],
12
+ processor: Callable[[T], R],
13
+ max_workers: int = 4,
14
+ progress: Optional[Progress] = None,
15
+ task_id: Optional[TaskID] = None,
16
+ ) -> List[Dict[str, Any]]:
17
+ """Process items in parallel with optional progress tracking.
18
+
19
+ Args:
20
+ items: List of items to process
21
+ processor: Function to apply to each item
22
+ max_workers: Maximum number of parallel workers
23
+ progress: Optional Rich Progress instance
24
+ task_id: Optional Rich TaskID for progress tracking
25
+
26
+ Returns:
27
+ List of results, including errors for failed items
28
+ """
29
+ results: List[Dict[str, Any]] = []
30
+
31
+ with ThreadPoolExecutor(max_workers=max_workers) as executor:
32
+ futures = {executor.submit(processor, item): item for item in items}
33
+
34
+ for future in as_completed(futures):
35
+ item = futures[future]
36
+ try:
37
+ result = future.result()
38
+ if isinstance(result, dict):
39
+ results.append(result)
40
+ else:
41
+ results.append({"result": result, "item": item})
42
+ except Exception as e:
43
+ results.append({"error": str(e), "item": item, "success": False})
44
+
45
+ if progress is not None and task_id is not None:
46
+ progress.advance(task_id)
47
+
48
+ return results
49
+
50
+
51
+ def process_batch_sequential(
52
+ items: List[T],
53
+ processor: Callable[[T], R],
54
+ progress: Optional[Progress] = None,
55
+ task_id: Optional[TaskID] = None,
56
+ ) -> List[Dict[str, Any]]:
57
+ """Process items sequentially with optional progress tracking.
58
+
59
+ Args:
60
+ items: List of items to process
61
+ processor: Function to apply to each item
62
+ progress: Optional Rich Progress instance
63
+ task_id: Optional Rich TaskID for progress tracking
64
+
65
+ Returns:
66
+ List of results, including errors for failed items
67
+ """
68
+ results: List[Dict[str, Any]] = []
69
+
70
+ for item in items:
71
+ try:
72
+ result = processor(item)
73
+ if isinstance(result, dict):
74
+ results.append(result)
75
+ else:
76
+ results.append({"result": result, "item": item})
77
+ except Exception as e:
78
+ results.append({"error": str(e), "item": item, "success": False})
79
+
80
+ if progress is not None and task_id is not None:
81
+ progress.advance(task_id)
82
+
83
+ return results
@@ -0,0 +1,40 @@
1
+ class MaxError(Exception):
2
+ """Base class for expected errors in the CLI."""
3
+
4
+ pass
5
+
6
+
7
+ class ResourceNotFoundError(MaxError):
8
+ """Raised when a file or folder is missing."""
9
+
10
+ pass
11
+
12
+
13
+ class ValidationError(MaxError):
14
+ """Raised when input arguments are invalid."""
15
+
16
+ pass
17
+
18
+
19
+ class ConfigurationError(MaxError):
20
+ """Raised when configuration is invalid or missing."""
21
+
22
+ pass
23
+
24
+
25
+ class ProcessingError(MaxError):
26
+ """Raised when file processing fails."""
27
+
28
+ pass
29
+
30
+
31
+ class NetworkError(MaxError):
32
+ """Raised when network operations fail."""
33
+
34
+ pass
35
+
36
+
37
+ class AIError(MaxError):
38
+ """Raised when AI operations fail."""
39
+
40
+ pass
@@ -0,0 +1,22 @@
1
+ from rich.console import Console
2
+ from rich.theme import Theme
3
+
4
+ # Define a custom theme for consistent coloring
5
+ custom_theme = Theme(
6
+ {
7
+ "info": "cyan",
8
+ "warning": "yellow",
9
+ "error": "bold red",
10
+ "success": "bold green",
11
+ }
12
+ )
13
+
14
+ console = Console(theme=custom_theme)
15
+
16
+
17
+ def log_error(message: str):
18
+ console.print(f"[error]✖ Error:[/error] {message}")
19
+
20
+
21
+ def log_success(message: str):
22
+ console.print(f"[success]✔ Success:[/success] {message}")
@@ -0,0 +1,24 @@
1
+ import logging
2
+ from pathlib import Path
3
+ from typing import Optional
4
+
5
+
6
+ def setup_logging(log_level: str = "INFO", log_file: Optional[Path] = None) -> None:
7
+ """Configure logging for the application."""
8
+ format_str = "%(asctime)s | %(name)-20s | %(levelname)-8s | %(message)s"
9
+
10
+ handlers = [logging.StreamHandler()]
11
+
12
+ if log_file:
13
+ handlers.append(logging.FileHandler(log_file))
14
+
15
+ logging.basicConfig(
16
+ level=getattr(logging, log_level.upper()),
17
+ format=format_str,
18
+ handlers=handlers,
19
+ )
20
+
21
+
22
+ def get_logger(name: str) -> logging.Logger:
23
+ """Get a logger instance with the given name."""
24
+ return logging.getLogger(name)
@@ -0,0 +1,51 @@
1
+ import logging
2
+ import time
3
+ from functools import wraps
4
+ from typing import Any, Callable, TypeVar
5
+
6
+ logger = logging.getLogger(__name__)
7
+
8
+ T = TypeVar("T")
9
+
10
+
11
+ def retry(
12
+ max_attempts: int = 3,
13
+ delay: float = 1.0,
14
+ backoff: float = 2.0,
15
+ exceptions: tuple = (Exception,),
16
+ ) -> Callable[[Callable[..., T]], Callable[..., T]]:
17
+ """Decorator for retrying failed operations with exponential backoff.
18
+
19
+ Args:
20
+ max_attempts: Maximum number of retry attempts
21
+ delay: Initial delay between retries in seconds
22
+ backoff: Multiplier for delay after each retry
23
+ exceptions: Tuple of exceptions to catch and retry
24
+ """
25
+
26
+ def decorator(func: Callable[..., T]) -> Callable[..., T]:
27
+ @wraps(func)
28
+ def wrapper(*args: Any, **kwargs: Any) -> T:
29
+ current_delay = delay
30
+ last_exception = None
31
+
32
+ for attempt in range(1, max_attempts + 1):
33
+ try:
34
+ return func(*args, **kwargs)
35
+ except exceptions as e:
36
+ last_exception = e
37
+ if attempt == max_attempts:
38
+ logger.error(f"Failed after {max_attempts} attempts: {e}")
39
+ raise
40
+ logger.warning(
41
+ f"Attempt {attempt}/{max_attempts} failed: {e}. "
42
+ f"Retrying in {current_delay:.1f}s..."
43
+ )
44
+ time.sleep(current_delay)
45
+ current_delay *= backoff
46
+
47
+ raise last_exception # type: ignore[arg-type]
48
+
49
+ return wrapper
50
+
51
+ return decorator
@@ -0,0 +1,40 @@
1
+ import re
2
+ import base64
3
+ from pathlib import Path
4
+
5
+
6
+ def natural_sort_key(s: str) -> list:
7
+ """
8
+ Splits a string into a list of integers and text chunks.
9
+ Used for sorting ["1_doc", "10_doc", "2_doc"] -> ["1_doc", "2_doc", "10_doc"]
10
+ """
11
+ return [
12
+ int(text) if text.isdigit() else text.lower() for text in re.split(r"(\d+)", s)
13
+ ]
14
+
15
+
16
+ def format_size(size_in_bytes: float) -> str:
17
+ """Returns a human-readable file size string, handling negative values (growth)."""
18
+ is_negative = size_in_bytes < 0
19
+ size = abs(size_in_bytes)
20
+
21
+ final_unit = "B"
22
+ for unit in ["B", "KB", "MB", "GB", "TB"]:
23
+ final_unit = unit
24
+ if size < 1024.0:
25
+ break
26
+ size /= 1024.0
27
+
28
+ prefix = "-" if is_negative else ""
29
+ return f"{prefix}{size:.2f} {final_unit}"
30
+
31
+
32
+ def encode_image_to_base64(image_path: Path) -> str:
33
+ """
34
+ Reads a file and returns a base64 string for AI consumption.
35
+ """
36
+ if not image_path.exists():
37
+ raise FileNotFoundError(f"Image not found: {image_path}")
38
+
39
+ with open(image_path, "rb") as image_file:
40
+ return base64.b64encode(image_file.read()).decode("utf-8")
max_cli/config.py ADDED
@@ -0,0 +1,43 @@
1
+ from pydantic import Field
2
+ from pydantic_settings import BaseSettings
3
+ from typing import Optional
4
+ from pathlib import Path
5
+
6
+
7
+ class Settings(BaseSettings):
8
+ APP_NAME: str = "Max CLI"
9
+ DEFAULT_QUALITY: int = 85
10
+
11
+ MAX_WORKERS: int = Field(default=4, ge=1, le=16)
12
+ BATCH_SIZE: int = Field(default=10, ge=1)
13
+
14
+ DOWNLOAD_TIMEOUT: int = Field(default=300, ge=30)
15
+ MAX_RETRIES: int = Field(default=3, ge=0)
16
+
17
+ PROGRESS_BAR: bool = True
18
+ VERBOSE: bool = False
19
+ CONFIRM_DESTRUCTIVE: bool = True
20
+
21
+ # AI Configuration
22
+ # If using OpenAI, leave BASE_URL as None.
23
+ # If using Gemini, set to: https://generativelanguage.googleapis.com/v1beta/openai/
24
+ OPENAI_API_KEY: Optional[str] = None
25
+ OPENAI_BASE_URL: Optional[str] = None
26
+ # Models
27
+ AI_MODEL: str = "gpt-5-nano" # For 'ask', 'chat', 'analyze'
28
+ AI_IMAGE_MODEL: str = "gemini-2.5-flash-image" # For 'create', 'edit'
29
+
30
+ # --- GRAB (DOWNLOADER) DEFAULTS ---
31
+ # These save your preferences
32
+ GRAB_QUALITY: str = "h" # s, m, h, x
33
+ GRAB_AUDIO_FORMAT: str = "mp3" # mp3, m4a, wav
34
+ GRAB_STRIP_PLAYLIST: bool = True # If True, removes '&list=...' from video URLs
35
+ GRAB_INCLUDE_METADATA: bool = True # If True, embeds tags/thumbnails
36
+
37
+ class Config:
38
+ env_file = [str(Path.home() / ".max_config.env"), ".env"]
39
+ env_file_encoding = "utf-8"
40
+ extra = "ignore"
41
+
42
+
43
+ settings = Settings()