pipu-cli 0.1.dev7__py3-none-any.whl → 0.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pipu_cli/__init__.py +2 -2
- pipu_cli/cache.py +275 -0
- pipu_cli/cli.py +866 -817
- pipu_cli/config.py +7 -58
- pipu_cli/config_file.py +79 -0
- pipu_cli/output.py +99 -0
- pipu_cli/package_management.py +1290 -0
- pipu_cli/pretty.py +286 -0
- pipu_cli/requirements.py +100 -0
- pipu_cli/rollback.py +110 -0
- pipu_cli-0.2.1.dist-info/METADATA +422 -0
- pipu_cli-0.2.1.dist-info/RECORD +16 -0
- pipu_cli/common.py +0 -4
- pipu_cli/internals.py +0 -815
- pipu_cli/package_constraints.py +0 -2296
- pipu_cli/thread_safe.py +0 -243
- pipu_cli/ui/__init__.py +0 -51
- pipu_cli/ui/apps.py +0 -1464
- pipu_cli/ui/constants.py +0 -33
- pipu_cli/ui/modal_dialogs.py +0 -1375
- pipu_cli/ui/table_widgets.py +0 -344
- pipu_cli/utils.py +0 -169
- pipu_cli-0.1.dev7.dist-info/METADATA +0 -517
- pipu_cli-0.1.dev7.dist-info/RECORD +0 -19
- {pipu_cli-0.1.dev7.dist-info → pipu_cli-0.2.1.dist-info}/WHEEL +0 -0
- {pipu_cli-0.1.dev7.dist-info → pipu_cli-0.2.1.dist-info}/entry_points.txt +0 -0
- {pipu_cli-0.1.dev7.dist-info → pipu_cli-0.2.1.dist-info}/licenses/LICENSE +0 -0
- {pipu_cli-0.1.dev7.dist-info → pipu_cli-0.2.1.dist-info}/top_level.txt +0 -0
pipu_cli/__init__.py
CHANGED
|
@@ -1,11 +1,11 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
from .config import LOG_LEVEL
|
|
3
3
|
|
|
4
|
-
__version__ = '0.1
|
|
4
|
+
__version__ = '0.2.1'
|
|
5
5
|
|
|
6
6
|
|
|
7
7
|
# Configure logging
|
|
8
|
-
log = logging.getLogger("pipu")
|
|
8
|
+
log = logging.getLogger("pipu-cli")
|
|
9
9
|
|
|
10
10
|
|
|
11
11
|
class LevelSpecificFormatter(logging.Formatter):
|
pipu_cli/cache.py
ADDED
|
@@ -0,0 +1,275 @@
|
|
|
1
|
+
"""Package version caching for pipu.
|
|
2
|
+
|
|
3
|
+
This module provides caching of latest package versions from PyPI to speed up
|
|
4
|
+
repeated runs of pipu. The cache is per-environment, identified by the
|
|
5
|
+
Python executable path, making it compatible with venv, conda, mise, etc.
|
|
6
|
+
|
|
7
|
+
The cache stores only the latest available versions - constraint resolution
|
|
8
|
+
is performed at upgrade time with the current installed package state.
|
|
9
|
+
"""
|
|
10
|
+
|
|
11
|
+
import hashlib
|
|
12
|
+
import json
|
|
13
|
+
import logging
|
|
14
|
+
import sys
|
|
15
|
+
from dataclasses import dataclass, asdict
|
|
16
|
+
from datetime import datetime, timezone
|
|
17
|
+
from pathlib import Path
|
|
18
|
+
from typing import Dict, List, Optional, Any
|
|
19
|
+
|
|
20
|
+
from packaging.version import Version
|
|
21
|
+
|
|
22
|
+
from pipu_cli.config import DEFAULT_CACHE_TTL, CACHE_BASE_DIR
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
logger = logging.getLogger(__name__)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@dataclass
|
|
29
|
+
class CacheData:
|
|
30
|
+
"""Cache data structure - stores latest versions from PyPI."""
|
|
31
|
+
environment_id: str
|
|
32
|
+
python_executable: str
|
|
33
|
+
updated_at: str # ISO format timestamp
|
|
34
|
+
include_prereleases: bool
|
|
35
|
+
# Maps package name (lowercase) to latest version string
|
|
36
|
+
latest_versions: Dict[str, str]
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
def get_environment_id() -> str:
|
|
40
|
+
"""Get a unique identifier for the current Python environment.
|
|
41
|
+
|
|
42
|
+
Uses a hash of the Python executable path to uniquely identify
|
|
43
|
+
the environment. This works with venv, conda, mise, and other
|
|
44
|
+
environment managers.
|
|
45
|
+
|
|
46
|
+
:returns: Short hash identifying the environment
|
|
47
|
+
"""
|
|
48
|
+
executable = sys.executable
|
|
49
|
+
hash_obj = hashlib.sha256(executable.encode())
|
|
50
|
+
return hash_obj.hexdigest()[:12]
|
|
51
|
+
|
|
52
|
+
|
|
53
|
+
def get_cache_dir() -> Path:
|
|
54
|
+
"""Get the cache directory for the current environment.
|
|
55
|
+
|
|
56
|
+
:returns: Path to environment-specific cache directory
|
|
57
|
+
"""
|
|
58
|
+
env_id = get_environment_id()
|
|
59
|
+
return CACHE_BASE_DIR / env_id
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
def get_cache_path() -> Path:
|
|
63
|
+
"""Get the path to the cache file for the current environment.
|
|
64
|
+
|
|
65
|
+
:returns: Path to the cache JSON file
|
|
66
|
+
"""
|
|
67
|
+
return get_cache_dir() / "versions.json"
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def load_cache() -> Optional[CacheData]:
|
|
71
|
+
"""Load cache data from disk.
|
|
72
|
+
|
|
73
|
+
:returns: CacheData object or None if cache doesn't exist or is invalid
|
|
74
|
+
"""
|
|
75
|
+
cache_path = get_cache_path()
|
|
76
|
+
|
|
77
|
+
if not cache_path.exists():
|
|
78
|
+
logger.debug(f"Cache file does not exist: {cache_path}")
|
|
79
|
+
return None
|
|
80
|
+
|
|
81
|
+
try:
|
|
82
|
+
with open(cache_path, 'r') as f:
|
|
83
|
+
data = json.load(f)
|
|
84
|
+
|
|
85
|
+
# Validate the cache is for the current environment
|
|
86
|
+
env_id = get_environment_id()
|
|
87
|
+
if data.get("environment_id") != env_id:
|
|
88
|
+
logger.debug("Cache environment mismatch, ignoring")
|
|
89
|
+
return None
|
|
90
|
+
|
|
91
|
+
return CacheData(
|
|
92
|
+
environment_id=data["environment_id"],
|
|
93
|
+
python_executable=data["python_executable"],
|
|
94
|
+
updated_at=data["updated_at"],
|
|
95
|
+
include_prereleases=data.get("include_prereleases", False),
|
|
96
|
+
latest_versions=data.get("latest_versions", {})
|
|
97
|
+
)
|
|
98
|
+
except (json.JSONDecodeError, KeyError, TypeError) as e:
|
|
99
|
+
logger.debug(f"Failed to load cache: {e}")
|
|
100
|
+
return None
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def save_cache(latest_versions: Dict[str, str], include_prereleases: bool = False) -> Path:
|
|
104
|
+
"""Save latest version data to the cache.
|
|
105
|
+
|
|
106
|
+
:param latest_versions: Dictionary mapping package names (lowercase) to latest version strings
|
|
107
|
+
:param include_prereleases: Whether prereleases were included in version check
|
|
108
|
+
:returns: Path to the saved cache file
|
|
109
|
+
"""
|
|
110
|
+
cache_dir = get_cache_dir()
|
|
111
|
+
cache_dir.mkdir(parents=True, exist_ok=True)
|
|
112
|
+
|
|
113
|
+
cache_path = get_cache_path()
|
|
114
|
+
|
|
115
|
+
cache_data = CacheData(
|
|
116
|
+
environment_id=get_environment_id(),
|
|
117
|
+
python_executable=sys.executable,
|
|
118
|
+
updated_at=datetime.now(timezone.utc).isoformat(),
|
|
119
|
+
include_prereleases=include_prereleases,
|
|
120
|
+
latest_versions=latest_versions
|
|
121
|
+
)
|
|
122
|
+
|
|
123
|
+
with open(cache_path, 'w') as f:
|
|
124
|
+
json.dump(asdict(cache_data), f, indent=2)
|
|
125
|
+
|
|
126
|
+
logger.debug(f"Cache saved to {cache_path}")
|
|
127
|
+
return cache_path
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def is_cache_fresh(ttl_seconds: int = DEFAULT_CACHE_TTL) -> bool:
|
|
131
|
+
"""Check if the cache is fresh (within TTL).
|
|
132
|
+
|
|
133
|
+
:param ttl_seconds: Time-to-live in seconds
|
|
134
|
+
:returns: True if cache exists and is within TTL
|
|
135
|
+
"""
|
|
136
|
+
cache = load_cache()
|
|
137
|
+
if cache is None:
|
|
138
|
+
return False
|
|
139
|
+
|
|
140
|
+
try:
|
|
141
|
+
updated_at = datetime.fromisoformat(cache.updated_at)
|
|
142
|
+
if updated_at.tzinfo is None:
|
|
143
|
+
updated_at = updated_at.replace(tzinfo=timezone.utc)
|
|
144
|
+
|
|
145
|
+
age = datetime.now(timezone.utc) - updated_at
|
|
146
|
+
is_fresh = age.total_seconds() < ttl_seconds
|
|
147
|
+
|
|
148
|
+
logger.debug(f"Cache age: {age.total_seconds():.0f}s, TTL: {ttl_seconds}s, Fresh: {is_fresh}")
|
|
149
|
+
return is_fresh
|
|
150
|
+
except (ValueError, TypeError) as e:
|
|
151
|
+
logger.debug(f"Failed to check cache freshness: {e}")
|
|
152
|
+
return False
|
|
153
|
+
|
|
154
|
+
|
|
155
|
+
def get_cache_age_seconds() -> Optional[float]:
|
|
156
|
+
"""Get the age of the cache in seconds.
|
|
157
|
+
|
|
158
|
+
:returns: Age in seconds or None if cache doesn't exist
|
|
159
|
+
"""
|
|
160
|
+
cache = load_cache()
|
|
161
|
+
if cache is None:
|
|
162
|
+
return None
|
|
163
|
+
|
|
164
|
+
try:
|
|
165
|
+
updated_at = datetime.fromisoformat(cache.updated_at)
|
|
166
|
+
if updated_at.tzinfo is None:
|
|
167
|
+
updated_at = updated_at.replace(tzinfo=timezone.utc)
|
|
168
|
+
|
|
169
|
+
age = datetime.now(timezone.utc) - updated_at
|
|
170
|
+
return age.total_seconds()
|
|
171
|
+
except (ValueError, TypeError):
|
|
172
|
+
return None
|
|
173
|
+
|
|
174
|
+
|
|
175
|
+
def format_cache_age(seconds: Optional[float]) -> str:
|
|
176
|
+
"""Format cache age as human-readable string.
|
|
177
|
+
|
|
178
|
+
:param seconds: Age in seconds
|
|
179
|
+
:returns: Formatted string like "5 minutes ago" or "2 hours ago"
|
|
180
|
+
"""
|
|
181
|
+
if seconds is None:
|
|
182
|
+
return "never"
|
|
183
|
+
|
|
184
|
+
if seconds < 60:
|
|
185
|
+
return f"{int(seconds)} seconds ago"
|
|
186
|
+
elif seconds < 3600:
|
|
187
|
+
minutes = int(seconds / 60)
|
|
188
|
+
return f"{minutes} minute{'s' if minutes != 1 else ''} ago"
|
|
189
|
+
elif seconds < 86400:
|
|
190
|
+
hours = int(seconds / 3600)
|
|
191
|
+
return f"{hours} hour{'s' if hours != 1 else ''} ago"
|
|
192
|
+
else:
|
|
193
|
+
days = int(seconds / 86400)
|
|
194
|
+
return f"{days} day{'s' if days != 1 else ''} ago"
|
|
195
|
+
|
|
196
|
+
|
|
197
|
+
def clear_cache() -> bool:
|
|
198
|
+
"""Delete the cache file for the current environment.
|
|
199
|
+
|
|
200
|
+
:returns: True if cache was deleted, False if it didn't exist
|
|
201
|
+
"""
|
|
202
|
+
cache_path = get_cache_path()
|
|
203
|
+
|
|
204
|
+
if cache_path.exists():
|
|
205
|
+
cache_path.unlink()
|
|
206
|
+
logger.debug(f"Cache cleared: {cache_path}")
|
|
207
|
+
return True
|
|
208
|
+
|
|
209
|
+
return False
|
|
210
|
+
|
|
211
|
+
|
|
212
|
+
def clear_all_caches() -> int:
|
|
213
|
+
"""Delete all cache files for all environments.
|
|
214
|
+
|
|
215
|
+
:returns: Number of cache directories deleted
|
|
216
|
+
"""
|
|
217
|
+
if not CACHE_BASE_DIR.exists():
|
|
218
|
+
return 0
|
|
219
|
+
|
|
220
|
+
count = 0
|
|
221
|
+
for cache_dir in CACHE_BASE_DIR.iterdir():
|
|
222
|
+
if cache_dir.is_dir():
|
|
223
|
+
cache_file = cache_dir / "versions.json"
|
|
224
|
+
if cache_file.exists():
|
|
225
|
+
cache_file.unlink()
|
|
226
|
+
try:
|
|
227
|
+
cache_dir.rmdir()
|
|
228
|
+
count += 1
|
|
229
|
+
except OSError:
|
|
230
|
+
pass # Directory not empty
|
|
231
|
+
|
|
232
|
+
return count
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
def get_cache_info() -> Dict[str, Any]:
|
|
236
|
+
"""Get information about the current cache.
|
|
237
|
+
|
|
238
|
+
:returns: Dictionary with cache metadata
|
|
239
|
+
"""
|
|
240
|
+
cache = load_cache()
|
|
241
|
+
cache_path = get_cache_path()
|
|
242
|
+
|
|
243
|
+
info: Dict[str, Any] = {
|
|
244
|
+
"exists": cache is not None,
|
|
245
|
+
"path": str(cache_path),
|
|
246
|
+
"environment_id": get_environment_id(),
|
|
247
|
+
"python_executable": sys.executable,
|
|
248
|
+
}
|
|
249
|
+
|
|
250
|
+
if cache:
|
|
251
|
+
info["updated_at"] = cache.updated_at
|
|
252
|
+
info["package_count"] = len(cache.latest_versions)
|
|
253
|
+
info["include_prereleases"] = cache.include_prereleases
|
|
254
|
+
age_seconds = get_cache_age_seconds()
|
|
255
|
+
info["age_seconds"] = age_seconds
|
|
256
|
+
info["age_human"] = format_cache_age(age_seconds)
|
|
257
|
+
|
|
258
|
+
return info
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
def build_version_cache(
|
|
262
|
+
latest_versions: Dict[Any, Any]
|
|
263
|
+
) -> Dict[str, str]:
|
|
264
|
+
"""Build cache data from pipu's version check results.
|
|
265
|
+
|
|
266
|
+
:param latest_versions: Dict mapping InstalledPackage to Package with latest version
|
|
267
|
+
:returns: Dictionary mapping package names (lowercase) to latest version strings
|
|
268
|
+
"""
|
|
269
|
+
result: Dict[str, str] = {}
|
|
270
|
+
|
|
271
|
+
for installed_pkg, latest_pkg in latest_versions.items():
|
|
272
|
+
name_lower = installed_pkg.name.lower()
|
|
273
|
+
result[name_lower] = str(latest_pkg.version)
|
|
274
|
+
|
|
275
|
+
return result
|