vuer-cli 0.0.1__py3-none-any.whl → 0.0.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
vuer_cli/__init__.py ADDED
@@ -0,0 +1,20 @@
1
+ """Vuer CLI - Environment Manager for Vuer Hub."""
2
+
3
+ from .add import Add
4
+ from .envs_publish import EnvsPublish, Hub
5
+ from .envs_pull import EnvsPull
6
+ from .main import entrypoint
7
+ from .remove import Remove
8
+ from .sync import Sync
9
+ from .upgrade import Upgrade
10
+
11
+ __all__ = [
12
+ "entrypoint",
13
+ "Hub",
14
+ "Sync",
15
+ "Add",
16
+ "Remove",
17
+ "Upgrade",
18
+ "EnvsPublish",
19
+ "EnvsPull",
20
+ ]
vuer_cli/add.py ADDED
@@ -0,0 +1,93 @@
1
+ """Add command - add an environment spec to environment.json then sync."""
2
+
3
+ from dataclasses import dataclass
4
+ from pathlib import Path
5
+ from typing import Optional
6
+
7
+ import json
8
+
9
+ from .sync import Sync, read_environments_lock
10
+ from .utils import print_error, parse_env_spec, normalize_env_spec
11
+
12
+
13
+ # Use shared parser from utils; legacy '@' syntax is not supported anymore.
14
+
15
+
16
+ @dataclass
17
+ class Add:
18
+ """Add an environment to environment.json and run `vuer sync`.
19
+
20
+ Example:
21
+ vuer add some-environment/v1.2.3
22
+ """
23
+
24
+ # NOTE: We keep these fields for params-proto compatibility, but the primary
25
+ # way to call this command is positional: `vuer add name@version`.
26
+ env: str = "" # Environment spec to add, e.g. "some-environment/v1.2.3"
27
+ name: Optional[str] = None # Unused in current workflow
28
+ version: str = "latest" # Unused in current workflow
29
+
30
+ def __call__(self) -> int:
31
+ """Execute add command."""
32
+ try:
33
+ env_spec = self.env
34
+ if not env_spec:
35
+ # If env is empty, params-proto likely didn't map the positional arg;
36
+ # treat this as a usage error.
37
+ raise ValueError(
38
+ "Missing environment spec. Usage: vuer add some-environment/v1.2.3"
39
+ )
40
+
41
+ name, version = parse_env_spec(env_spec)
42
+ env_spec_normalized = normalize_env_spec(f"{name}/{version}")
43
+
44
+ cwd = Path.cwd()
45
+ lock_path = cwd / "environments-lock.yaml"
46
+
47
+ # Step 2: Check if already present in environments-lock.yaml
48
+ if lock_path.exists():
49
+ existing_deps = read_environments_lock(lock_path)
50
+ if env_spec_normalized in existing_deps:
51
+ print(f"[INFO] Environment {env_spec_normalized} already present in {lock_path}")
52
+ return 0
53
+
54
+ # Step 3: Ensure environment.json has this dependency, then run sync
55
+ env_json_path = cwd / "environment.json"
56
+ if env_json_path.exists():
57
+ with env_json_path.open("r", encoding="utf-8") as f:
58
+ try:
59
+ data = json.load(f)
60
+ except json.JSONDecodeError as e:
61
+ raise ValueError(
62
+ f"Invalid environment.json: {e}"
63
+ ) from e
64
+ else:
65
+ data = {}
66
+
67
+ deps = data.get("dependencies")
68
+ if deps is None:
69
+ deps = {}
70
+ if not isinstance(deps, dict):
71
+ raise ValueError(
72
+ "environment.json 'dependencies' field must be an object"
73
+ )
74
+
75
+ # Add or update the dependency
76
+ deps[name] = version
77
+ data["dependencies"] = deps
78
+
79
+ with env_json_path.open("w", encoding="utf-8") as f:
80
+ json.dump(data, f, indent=2, ensure_ascii=False)
81
+ f.write("\n")
82
+
83
+ print(
84
+ f"[INFO] Added {env_spec_normalized} to environment.json dependencies. Running sync..."
85
+ )
86
+ return Sync()()
87
+
88
+ except (FileNotFoundError, ValueError, RuntimeError) as e:
89
+ print_error(str(e))
90
+ return 1
91
+ except Exception as e:
92
+ print_error(f"Unexpected error: {e}")
93
+ return 1
@@ -0,0 +1,371 @@
1
+ """EnvsPublish command - publish an environment version (npm-style workflow)."""
2
+
3
+ import json
4
+ import tarfile
5
+ import tempfile
6
+ import threading
7
+ import time
8
+ from dataclasses import dataclass
9
+ from pathlib import Path
10
+ from typing import Any, Dict, List
11
+
12
+ from params_proto import EnvVar, proto
13
+
14
+ from .utils import is_dry_run, print_error, spinner, normalize_env_spec
15
+
16
+
17
+ # -- Configuration with environment variable defaults --
18
+
19
+ @proto.prefix
20
+ class Hub:
21
+ """Vuer Hub connection settings."""
22
+
23
+ url: str = EnvVar("VUER_HUB_URL", default="") # Base URL of the Vuer Hub API
24
+ auth_token: str = EnvVar("VUER_AUTH_TOKEN", default="") # JWT token for authentication
25
+
26
+
27
+ # -- Subcommand dataclass --
28
+
29
+ @dataclass
30
+ class EnvsPublish:
31
+ """Publish environment to registry (npm-style).
32
+
33
+ Reads environment.json, creates tgz archive, and uploads to the hub.
34
+ """
35
+
36
+ directory: str = "." # Directory containing environment.json
37
+ timeout: int = 300 # Request timeout in seconds
38
+ tag: str = "latest" # Version tag
39
+ dry_run: bool = False # Simulate without uploading
40
+
41
+ def __call__(self) -> int:
42
+ """Execute envs-publish command."""
43
+ try:
44
+ dry_run = self.dry_run or is_dry_run()
45
+
46
+ if not dry_run:
47
+ if not Hub.url:
48
+ raise RuntimeError(
49
+ "Missing VUER_HUB_URL. Please set the VUER_HUB_URL environment variable "
50
+ "or pass --hub.url on the command line."
51
+ )
52
+ if not Hub.auth_token:
53
+ raise RuntimeError(
54
+ "Missing VUER_AUTH_TOKEN. Please set the VUER_AUTH_TOKEN environment "
55
+ "variable or pass --hub.auth-token on the command line."
56
+ )
57
+
58
+ print(f"[INFO] Reading environment.json from {self.directory}...")
59
+ metadata, envs_metadata = parse_environments_json(self.directory)
60
+ print(f"[INFO] Found package: {metadata['name']}/{metadata['version']}")
61
+
62
+ # Validate dependencies if present
63
+ dependencies = extract_dependencies(envs_metadata)
64
+ if dependencies:
65
+ print(f"[INFO] Validating {len(dependencies)} dependencies...")
66
+ validate_dependencies(dependencies, dry_run, Hub.url, Hub.auth_token)
67
+ print("[INFO] All dependencies are valid.")
68
+ else:
69
+ print("[INFO] No dependencies to validate.")
70
+
71
+ print("[INFO] Creating tgz archive...")
72
+ archive_path = create_tgz_archive(self.directory, metadata)
73
+ print(f"[INFO] Archive created: {archive_path}")
74
+
75
+ publish_to_registry(
76
+ archive_path=archive_path,
77
+ metadata=metadata,
78
+ envs_metadata=envs_metadata,
79
+ hub_url=Hub.url,
80
+ auth_token=Hub.auth_token,
81
+ timeout=self.timeout,
82
+ dry_run=dry_run,
83
+ )
84
+
85
+ return 0
86
+ except FileNotFoundError as e:
87
+ print_error(str(e))
88
+ return 1
89
+ except ValueError as e:
90
+ print_error(str(e))
91
+ return 1
92
+ except RuntimeError as e:
93
+ # RuntimeError from validate_dependencies already prints error message
94
+ # Only print if it wasn't already printed
95
+ if "Dependency validation failed" not in str(e):
96
+ print_error(str(e))
97
+ return 1
98
+ except Exception as e:
99
+ print_error(f"Unexpected error: {e}")
100
+ return 1
101
+
102
+
103
+ # -- Helper functions --
104
+
105
+ def parse_environments_json(directory: str) -> tuple[Dict[str, Any], Dict[str, Any]]:
106
+ """Parse environment.json and extract metadata plus full content.
107
+
108
+ Returns:
109
+ (metadata, full_data)
110
+ """
111
+ envs_path = Path(directory) / "environment.json"
112
+ if not envs_path.exists():
113
+ raise FileNotFoundError(f"environment.json not found in {directory}")
114
+
115
+ try:
116
+ with envs_path.open("r", encoding="utf-8") as f:
117
+ data = json.load(f)
118
+ except json.JSONDecodeError as e:
119
+ raise ValueError(f"Invalid environment.json: {e}") from e
120
+
121
+ metadata = {
122
+ "name": data.get("name", ""),
123
+ "version": data.get("version", ""),
124
+ "description": data.get("description", ""),
125
+ "visibility": data.get("visibility", "PUBLIC"),
126
+ "env_type": data.get("env-type", "") or data.get("env_type", ""),
127
+ }
128
+
129
+ if not metadata["name"]:
130
+ raise ValueError("environment.json must contain 'name' field")
131
+ if not metadata["version"]:
132
+ raise ValueError("environment.json must contain 'version' field")
133
+
134
+ return metadata, data
135
+
136
+
137
+ def extract_dependencies(envs_metadata: Dict[str, Any]) -> List[str]:
138
+ """Extract dependencies from environment.json and convert to list format.
139
+
140
+ Args:
141
+ envs_metadata: Full environment.json content
142
+
143
+ Returns:
144
+ List of dependency specs like ["some-dependency/^1.2.3", ...]
145
+ Returns empty list if no dependencies or dependencies is empty.
146
+ """
147
+ deps_dict = envs_metadata.get("dependencies", {})
148
+ if not deps_dict or not isinstance(deps_dict, dict):
149
+ return []
150
+
151
+ dependencies = []
152
+ for name, version_spec in deps_dict.items():
153
+ if not isinstance(version_spec, str):
154
+ version_spec = str(version_spec)
155
+ dependencies.append(normalize_env_spec(f"{name}/{version_spec}"))
156
+
157
+ return dependencies
158
+
159
+
160
+ def validate_dependencies(
161
+ dependencies: List[str],
162
+ dry_run: bool,
163
+ hub_url: str,
164
+ auth_token: str,
165
+ ) -> None:
166
+ """Validate dependencies with backend API.
167
+
168
+ Args:
169
+ dependencies: List of dependency specs like ["name/version", ...]
170
+ dry_run: Whether to run in dry-run mode
171
+ hub_url: Vuer Hub base URL
172
+ auth_token: Authentication token
173
+
174
+ Raises:
175
+ RuntimeError: If validation fails (non-200 status or error in response)
176
+ """
177
+ if dry_run or is_dry_run():
178
+ print("[INFO] (dry-run) Validating dependencies (simulated)...")
179
+ return
180
+
181
+ if not hub_url:
182
+ raise RuntimeError(
183
+ "Missing VUER_HUB_URL. Cannot validate dependencies without hub URL."
184
+ )
185
+
186
+ import requests
187
+
188
+ url = f"{hub_url.rstrip('/')}/environments/dependencies"
189
+ headers = {}
190
+ if auth_token:
191
+ headers["Authorization"] = f"Bearer {auth_token}"
192
+ headers["Content-Type"] = "application/json"
193
+
194
+ payload = {"name_versionId_list": dependencies}
195
+
196
+ try:
197
+ response = requests.post(url, json=payload, headers=headers, timeout=300)
198
+ except requests.exceptions.RequestException as e:
199
+ raise RuntimeError(f"Failed to validate dependencies: {e}") from e
200
+
201
+ status = response.status_code
202
+
203
+ # Handle non-200 status codes
204
+ if status != 200:
205
+ error_msg = ""
206
+ try:
207
+ data = response.json()
208
+ if isinstance(data, dict):
209
+ error_msg = data.get("error") or data.get("message", "")
210
+ if not error_msg:
211
+ error_msg = json.dumps(data, ensure_ascii=False)
212
+ else:
213
+ error_msg = json.dumps(data, ensure_ascii=False)
214
+ except Exception:
215
+ text = (response.text or "").strip()
216
+ error_msg = text if text else "Unknown error"
217
+
218
+ if error_msg:
219
+ print_error(f"Dependency validation failed ({status}): {error_msg}")
220
+ else:
221
+ print_error(f"Dependency validation failed ({status})")
222
+ raise RuntimeError(f"Dependency validation failed with status {status}")
223
+
224
+ # Status 200: check for error field in response body
225
+ try:
226
+ data = response.json()
227
+ if isinstance(data, dict) and "error" in data:
228
+ error_msg = data["error"]
229
+ print_error(f"Dependency validation failed: {error_msg}")
230
+ raise RuntimeError(f"Dependency validation failed: {error_msg}")
231
+ except (json.JSONDecodeError, ValueError):
232
+ # Response is not JSON or doesn't have error field, assume success
233
+ pass
234
+
235
+
236
+ def create_tgz_archive(directory: str, metadata: Dict[str, Any]) -> str:
237
+ """Create a tgz archive from environment files."""
238
+ archive_name = f"{metadata['name']}-{metadata['version']}.tgz"
239
+ temp_dir = Path(tempfile.gettempdir())
240
+ archive_path = str(temp_dir / archive_name)
241
+
242
+ directory_path = Path(directory).resolve()
243
+
244
+ with tarfile.open(archive_path, "w:gz") as tar:
245
+ for file_path in directory_path.rglob("*"):
246
+ if file_path.is_file():
247
+ arcname = file_path.relative_to(directory_path)
248
+ tar.add(file_path, arcname=arcname)
249
+
250
+ return archive_path
251
+
252
+
253
+ def upload_with_progress(archive_path: str, metadata: Dict[str, Any], timeout: int) -> None:
254
+ """Simulate an upload in dry-run mode."""
255
+ file_path = Path(archive_path)
256
+ total_size = file_path.stat().st_size
257
+ print(f"[INFO] (dry-run) Uploading {file_path.name} ({total_size} bytes)...")
258
+ time.sleep(min(2.0, max(0.1, total_size / (10 * 1024 * 1024))))
259
+
260
+
261
+ def publish_to_registry(
262
+ archive_path: str,
263
+ metadata: Dict[str, Any],
264
+ envs_metadata: Dict[str, Any],
265
+ hub_url: str,
266
+ auth_token: str,
267
+ timeout: int,
268
+ dry_run: bool,
269
+ ) -> None:
270
+ """Publish package to registry via API."""
271
+ print(f"[INFO] Publishing {metadata['name']}/{metadata['version']} to registry...")
272
+ print(f"[INFO] Archive: {archive_path}")
273
+ print(f"[INFO] Metadata: {json.dumps(metadata, indent=2)}")
274
+ print(f"[INFO] environment.json: {json.dumps(envs_metadata, indent=2)}")
275
+ print(f"[INFO] Hub URL: {hub_url}")
276
+ print(f"[INFO] Timeout: {timeout}s")
277
+
278
+ if dry_run or is_dry_run():
279
+ upload_with_progress(archive_path, metadata, timeout)
280
+ print(f"[SUCCESS] (dry-run) Published {metadata['name']}/{metadata['version']} (no network call).")
281
+ return
282
+
283
+ # Import requests lazily to avoid SSL/cert loading in restricted envs.
284
+ import requests
285
+
286
+ url = f"{hub_url.rstrip('/')}/environments/upload"
287
+ file_path = Path(archive_path)
288
+
289
+ with file_path.open("rb") as f:
290
+ files = {
291
+ "package": (file_path.name, f, "application/octet-stream"),
292
+ }
293
+ data = {
294
+ "name": str(metadata["name"]),
295
+ "versionId": str(metadata["version"]),
296
+ "description": str(metadata.get("description", "")),
297
+ "type": str(metadata.get("env_type", "")),
298
+ "visibility": str(metadata.get("visibility", "PUBLIC")),
299
+ }
300
+ # Send full environment.json content as metadata field.
301
+ data["metadata"] = json.dumps(envs_metadata, ensure_ascii=False)
302
+
303
+ headers = {}
304
+ if auth_token:
305
+ headers["Authorization"] = f"Bearer {auth_token}"
306
+
307
+ stop_event = threading.Event()
308
+ spinner_thread = threading.Thread(
309
+ target=spinner,
310
+ args=(f"[INFO] Uploading {file_path.name} ", stop_event),
311
+ daemon=True,
312
+ )
313
+ spinner_thread.start()
314
+ try:
315
+ response = requests.post(
316
+ url,
317
+ data=data,
318
+ files=files,
319
+ headers=headers,
320
+ timeout=timeout,
321
+ )
322
+ finally:
323
+ stop_event.set()
324
+ spinner_thread.join()
325
+
326
+ status = response.status_code
327
+ text = (response.text or "").strip()
328
+
329
+ if status >= 300:
330
+ inline_msg = ""
331
+ try:
332
+ data = response.json()
333
+ if isinstance(data, dict):
334
+ msg = data.get("message")
335
+ err = data.get("error")
336
+ if msg:
337
+ inline_msg = str(msg)
338
+ elif err:
339
+ inline_msg = str(err)
340
+ else:
341
+ inline_msg = json.dumps(data, ensure_ascii=False)
342
+ else:
343
+ inline_msg = json.dumps(data, ensure_ascii=False)
344
+ except Exception:
345
+ inline_msg = text
346
+
347
+ inline_msg = (inline_msg or "").strip()
348
+ if inline_msg:
349
+ raise RuntimeError(f"Publish failed ({status}): {inline_msg}")
350
+ raise RuntimeError(f"Publish failed ({status})")
351
+
352
+ env_id = None
353
+ env_name = metadata.get("name")
354
+ env_version = metadata.get("version")
355
+ try:
356
+ payload = response.json()
357
+ env = payload.get("environment", payload) if isinstance(payload, dict) else {}
358
+ env_id = env.get("environmentId") or env.get("id")
359
+ env_name = env.get("name", env_name)
360
+ env_version = env.get("versionId", env_version)
361
+ except Exception:
362
+ pass
363
+
364
+ print("\n=== Publish Success ===")
365
+ if env_id:
366
+ print(f"ID : {env_id}")
367
+ print(f"Name : {env_name}")
368
+ print(f"Version : {env_version}")
369
+ visibility = metadata.get("visibility", "PUBLIC")
370
+ print(f"Visibility: {visibility}")
371
+
vuer_cli/envs_pull.py ADDED
@@ -0,0 +1,206 @@
1
+ """EnvsPull command - download an environment by ID."""
2
+
3
+ import tarfile
4
+ from dataclasses import dataclass
5
+ from pathlib import Path
6
+ from typing import Dict, Iterable, Optional
7
+
8
+ from tqdm import tqdm
9
+
10
+ from .envs_publish import Hub
11
+ from .utils import is_dry_run, print_error, parse_env_spec
12
+
13
+
14
+ # -- Subcommand dataclass --
15
+
16
+ @dataclass
17
+ class EnvsPull:
18
+ """Download an environment from the registry by ID or name/version."""
19
+
20
+ flag: str = "" # Environment identifier (ID or name/version) to download
21
+ output: str = "downloads" # Destination directory
22
+ filename: Optional[str] = None # Override saved filename
23
+ version: Optional[str] = None # Specific version to download
24
+ timeout: int = 300 # Request timeout in seconds
25
+ skip_progress: bool = False # Disable progress bar
26
+
27
+ def __call__(self) -> int:
28
+ """Execute envs-pull command."""
29
+ try:
30
+ if not is_dry_run():
31
+ if not Hub.url:
32
+ raise RuntimeError(
33
+ "Missing VUER_HUB_URL. Please set the VUER_HUB_URL environment variable "
34
+ "or pass --hub.url on the command line."
35
+ )
36
+ if not Hub.auth_token:
37
+ raise RuntimeError(
38
+ "Missing VUER_AUTH_TOKEN. Please set the VUER_AUTH_TOKEN environment "
39
+ "variable or pass --hub.auth-token on the command line."
40
+ )
41
+
42
+ print(f"[INFO] Pulling environment {self.flag} ...")
43
+ pull_from_registry(
44
+ env_flag=self.flag,
45
+ output_dir=self.output,
46
+ filename=self.filename,
47
+ version=self.version,
48
+ timeout=self.timeout,
49
+ skip_progress=self.skip_progress,
50
+ )
51
+ return 0
52
+ except Exception as e:
53
+ print_error(str(e))
54
+ return 1
55
+
56
+
57
+ # -- Helper functions --
58
+
59
+ def download_with_progress(
60
+ destination: Path,
61
+ total_size: int,
62
+ stream: Iterable[bytes],
63
+ skip_progress: bool,
64
+ ) -> None:
65
+ """Write streamed bytes to destination with an optional progress bar."""
66
+ destination.parent.mkdir(parents=True, exist_ok=True)
67
+
68
+ if skip_progress:
69
+ with destination.open("wb") as f:
70
+ for chunk in stream:
71
+ f.write(chunk)
72
+ return
73
+
74
+ with destination.open("wb") as f, tqdm(
75
+ total=total_size or None,
76
+ unit="B",
77
+ unit_scale=True,
78
+ unit_divisor=1024,
79
+ desc=f"Downloading {destination.name}",
80
+ ncols=100,
81
+ ) as pbar:
82
+ for chunk in stream:
83
+ f.write(chunk)
84
+ pbar.update(len(chunk))
85
+
86
+
87
+ def extract_filename_from_headers(headers: Dict[str, str], default_name: str) -> str:
88
+ """Extract filename from Content-Disposition header (RFC 5987 style)."""
89
+ content_disposition = headers.get("Content-Disposition", "")
90
+ if not content_disposition:
91
+ return default_name
92
+
93
+ if "filename*=" in content_disposition:
94
+ part = content_disposition.split("filename*=")[-1].strip()
95
+ if part.lower().startswith("utf-8''"):
96
+ encoded = part[7:]
97
+ else:
98
+ encoded = part
99
+ encoded = encoded.split(";")[0].strip().strip('"')
100
+ try:
101
+ from urllib.parse import unquote
102
+ candidate = unquote(encoded)
103
+ if candidate:
104
+ return candidate
105
+ except Exception:
106
+ pass
107
+
108
+ if "filename=" in content_disposition:
109
+ candidate = content_disposition.split("filename=")[-1].strip().strip('"')
110
+ candidate = candidate.split(";")[0].strip()
111
+ if candidate:
112
+ return candidate
113
+
114
+ return default_name
115
+
116
+
117
+ def pull_from_registry(
118
+ env_flag: str,
119
+ output_dir: str,
120
+ filename: Optional[str],
121
+ version: Optional[str],
122
+ timeout: int,
123
+ skip_progress: bool,
124
+ ) -> Path:
125
+ """Download environment by ID or name/version and extract archive into a directory."""
126
+ hub_url = Hub.url
127
+ auth_token = Hub.auth_token
128
+ headers = {"Authorization": f"Bearer {auth_token}"} if auth_token else {}
129
+
130
+ # New API: /api/environments/download?environment_id=<id or name@version>
131
+ from urllib.parse import urlencode
132
+
133
+ base_url = f"{hub_url.rstrip('/')}/environments/download"
134
+ query = urlencode({"environment_id": env_flag})
135
+ url = f"{base_url}?{query}"
136
+
137
+ output_dir_path = Path(output_dir).expanduser().resolve()
138
+ output_dir_path.mkdir(parents=True, exist_ok=True)
139
+
140
+ if is_dry_run():
141
+ # If env_flag is a name/version, create nested dirs name/version
142
+ try:
143
+ name, version = parse_env_spec(env_flag)
144
+ env_dir = output_dir_path / name / version
145
+ except Exception:
146
+ env_dir = output_dir_path / str(env_flag)
147
+ env_dir.mkdir(parents=True, exist_ok=True)
148
+ (env_dir / "README.txt").write_text("Dry-run environment content\n")
149
+ print(f"[SUCCESS] (dry-run) Downloaded to {env_dir}")
150
+ return env_dir
151
+
152
+ # Lazy import requests to avoid SSL/cert issues in dry-run/tests.
153
+ import requests
154
+
155
+ with requests.get(url, headers=headers, stream=True, timeout=timeout) as resp:
156
+ resp.raise_for_status()
157
+ total_size = int(resp.headers.get("Content-Length", 0))
158
+ # Use filesystem-safe archive name when env_flag contains '/'
159
+ # When env_flag is name/version, use name-version for filename
160
+ try:
161
+ name, version = parse_env_spec(env_flag)
162
+ safe_name = f"{name}-{version}"
163
+ except Exception:
164
+ safe_name = str(env_flag)
165
+ default_archive_name = f"{safe_name}.tgz"
166
+ archive_name = filename or extract_filename_from_headers(resp.headers, default_archive_name)
167
+ archive_path = output_dir_path / archive_name
168
+
169
+ stream = (chunk for chunk in resp.iter_content(chunk_size=1024 * 512) if chunk)
170
+ download_with_progress(archive_path, total_size, stream, skip_progress)
171
+
172
+ # Derive target directory name from archive filename
173
+ suffixes = "".join(archive_path.suffixes)
174
+ if suffixes.endswith(".tar.gz"):
175
+ base_name = archive_path.name[: -len(".tar.gz")]
176
+ elif suffixes.endswith(".tgz"):
177
+ base_name = archive_path.name[: -len(".tgz")]
178
+ elif suffixes.endswith(".tar"):
179
+ base_name = archive_path.name[: -len(".tar")]
180
+ else:
181
+ base_name = archive_path.stem
182
+
183
+ # Prefer nested directory when env_flag is parseable
184
+ try:
185
+ name, version = parse_env_spec(env_flag)
186
+ env_dir = output_dir_path / name / version
187
+ except Exception:
188
+ env_dir = output_dir_path / base_name
189
+ env_dir.mkdir(parents=True, exist_ok=True)
190
+
191
+ try:
192
+ if tarfile.is_tarfile(archive_path):
193
+ with tarfile.open(archive_path, "r:*") as tar:
194
+ tar.extractall(env_dir)
195
+ archive_path.unlink(missing_ok=True)
196
+ print(f"[SUCCESS] Downloaded and extracted to {env_dir}")
197
+ else:
198
+ print(f"[WARN] Downloaded file is not a tar archive, kept as {archive_path}")
199
+ env_dir = archive_path
200
+ except Exception as e:
201
+ print_error(f"Failed to extract archive: {e}")
202
+ env_dir = archive_path
203
+
204
+ return env_dir
205
+
206
+