vuer-cli 0.0.4__py3-none-any.whl → 0.0.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vuer_cli/add.py +66 -68
- vuer_cli/envs_publish.py +335 -309
- vuer_cli/envs_pull.py +177 -170
- vuer_cli/login.py +459 -0
- vuer_cli/main.py +7 -2
- vuer_cli/remove.py +84 -84
- vuer_cli/scripts/demcap.py +19 -15
- vuer_cli/scripts/mcap_playback.py +661 -0
- vuer_cli/scripts/minimap.py +113 -210
- vuer_cli/scripts/viz_ptc_cams.py +1 -1
- vuer_cli/scripts/viz_ptc_proxie.py +1 -1
- vuer_cli/sync.py +314 -308
- vuer_cli/upgrade.py +118 -126
- {vuer_cli-0.0.4.dist-info → vuer_cli-0.0.5.dist-info}/METADATA +36 -6
- vuer_cli-0.0.5.dist-info/RECORD +22 -0
- vuer_cli/scripts/vuer_ros_bridge.py +0 -210
- vuer_cli-0.0.4.dist-info/RECORD +0 -21
- {vuer_cli-0.0.4.dist-info → vuer_cli-0.0.5.dist-info}/WHEEL +0 -0
- {vuer_cli-0.0.4.dist-info → vuer_cli-0.0.5.dist-info}/entry_points.txt +0 -0
- {vuer_cli-0.0.4.dist-info → vuer_cli-0.0.5.dist-info}/licenses/LICENSE +0 -0
vuer_cli/envs_pull.py
CHANGED
|
@@ -8,199 +8,206 @@ from typing import Dict, Iterable, Optional
|
|
|
8
8
|
from tqdm import tqdm
|
|
9
9
|
|
|
10
10
|
from .envs_publish import Hub
|
|
11
|
-
from .utils import is_dry_run,
|
|
12
|
-
|
|
11
|
+
from .utils import is_dry_run, parse_env_spec, print_error
|
|
13
12
|
|
|
14
13
|
# -- Subcommand dataclass --
|
|
15
14
|
|
|
15
|
+
|
|
16
16
|
@dataclass
|
|
17
17
|
class EnvsPull:
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
18
|
+
"""Download an environment from the registry by ID or name/version."""
|
|
19
|
+
|
|
20
|
+
flag: str = "" # Environment identifier (ID or name/version) to download
|
|
21
|
+
output: str = "downloads" # Destination directory
|
|
22
|
+
filename: Optional[str] = None # Override saved filename
|
|
23
|
+
version: Optional[str] = None # Specific version to download
|
|
24
|
+
timeout: int = 300 # Request timeout in seconds
|
|
25
|
+
skip_progress: bool = False # Disable progress bar
|
|
26
|
+
|
|
27
|
+
def __call__(self) -> int:
|
|
28
|
+
"""Execute envs-pull command."""
|
|
29
|
+
try:
|
|
30
|
+
if not is_dry_run():
|
|
31
|
+
if not Hub.url:
|
|
32
|
+
raise RuntimeError(
|
|
33
|
+
"Missing VUER_HUB_URL. Please set the VUER_HUB_URL environment variable "
|
|
34
|
+
"or pass --hub.url on the command line."
|
|
35
|
+
)
|
|
36
|
+
# Try to get token from credentials file if not in environment
|
|
37
|
+
auth_token = Hub.get_auth_token()
|
|
38
|
+
if not auth_token:
|
|
39
|
+
raise RuntimeError(
|
|
40
|
+
"Missing VUER_AUTH_TOKEN. Please run 'vuer login' to authenticate, "
|
|
41
|
+
"or set the VUER_AUTH_TOKEN environment variable."
|
|
42
|
+
)
|
|
43
|
+
|
|
44
|
+
print(f"[INFO] Pulling environment {self.flag} ...")
|
|
45
|
+
pull_from_registry(
|
|
46
|
+
env_flag=self.flag,
|
|
47
|
+
output_dir=self.output,
|
|
48
|
+
filename=self.filename,
|
|
49
|
+
version=self.version,
|
|
50
|
+
timeout=self.timeout,
|
|
51
|
+
skip_progress=self.skip_progress,
|
|
52
|
+
)
|
|
53
|
+
return 0
|
|
54
|
+
except Exception as e:
|
|
55
|
+
print_error(str(e))
|
|
56
|
+
return 1
|
|
55
57
|
|
|
56
58
|
|
|
57
59
|
# -- Helper functions --
|
|
58
60
|
|
|
61
|
+
|
|
59
62
|
def download_with_progress(
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
63
|
+
destination: Path,
|
|
64
|
+
total_size: int,
|
|
65
|
+
stream: Iterable[bytes],
|
|
66
|
+
skip_progress: bool,
|
|
64
67
|
) -> None:
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
68
|
+
"""Write streamed bytes to destination with an optional progress bar."""
|
|
69
|
+
destination.parent.mkdir(parents=True, exist_ok=True)
|
|
70
|
+
|
|
71
|
+
if skip_progress:
|
|
72
|
+
with destination.open("wb") as f:
|
|
73
|
+
for chunk in stream:
|
|
74
|
+
f.write(chunk)
|
|
75
|
+
return
|
|
76
|
+
|
|
77
|
+
with (
|
|
78
|
+
destination.open("wb") as f,
|
|
79
|
+
tqdm(
|
|
80
|
+
total=total_size or None,
|
|
81
|
+
unit="B",
|
|
82
|
+
unit_scale=True,
|
|
83
|
+
unit_divisor=1024,
|
|
84
|
+
desc=f"Downloading {destination.name}",
|
|
85
|
+
ncols=100,
|
|
86
|
+
) as pbar,
|
|
87
|
+
):
|
|
88
|
+
for chunk in stream:
|
|
89
|
+
f.write(chunk)
|
|
90
|
+
pbar.update(len(chunk))
|
|
85
91
|
|
|
86
92
|
|
|
87
93
|
def extract_filename_from_headers(headers: Dict[str, str], default_name: str) -> str:
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
return default_name
|
|
92
|
-
|
|
93
|
-
if "filename*=" in content_disposition:
|
|
94
|
-
part = content_disposition.split("filename*=")[-1].strip()
|
|
95
|
-
if part.lower().startswith("utf-8''"):
|
|
96
|
-
encoded = part[7:]
|
|
97
|
-
else:
|
|
98
|
-
encoded = part
|
|
99
|
-
encoded = encoded.split(";")[0].strip().strip('"')
|
|
100
|
-
try:
|
|
101
|
-
from urllib.parse import unquote
|
|
102
|
-
candidate = unquote(encoded)
|
|
103
|
-
if candidate:
|
|
104
|
-
return candidate
|
|
105
|
-
except Exception:
|
|
106
|
-
pass
|
|
107
|
-
|
|
108
|
-
if "filename=" in content_disposition:
|
|
109
|
-
candidate = content_disposition.split("filename=")[-1].strip().strip('"')
|
|
110
|
-
candidate = candidate.split(";")[0].strip()
|
|
111
|
-
if candidate:
|
|
112
|
-
return candidate
|
|
113
|
-
|
|
94
|
+
"""Extract filename from Content-Disposition header (RFC 5987 style)."""
|
|
95
|
+
content_disposition = headers.get("Content-Disposition", "")
|
|
96
|
+
if not content_disposition:
|
|
114
97
|
return default_name
|
|
115
98
|
|
|
99
|
+
if "filename*=" in content_disposition:
|
|
100
|
+
part = content_disposition.split("filename*=")[-1].strip()
|
|
101
|
+
if part.lower().startswith("utf-8''"):
|
|
102
|
+
encoded = part[7:]
|
|
103
|
+
else:
|
|
104
|
+
encoded = part
|
|
105
|
+
encoded = encoded.split(";")[0].strip().strip('"')
|
|
106
|
+
try:
|
|
107
|
+
from urllib.parse import unquote
|
|
108
|
+
|
|
109
|
+
candidate = unquote(encoded)
|
|
110
|
+
if candidate:
|
|
111
|
+
return candidate
|
|
112
|
+
except Exception:
|
|
113
|
+
pass
|
|
114
|
+
|
|
115
|
+
if "filename=" in content_disposition:
|
|
116
|
+
candidate = content_disposition.split("filename=")[-1].strip().strip('"')
|
|
117
|
+
candidate = candidate.split(";")[0].strip()
|
|
118
|
+
if candidate:
|
|
119
|
+
return candidate
|
|
120
|
+
|
|
121
|
+
return default_name
|
|
122
|
+
|
|
116
123
|
|
|
117
124
|
def pull_from_registry(
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
125
|
+
env_flag: str,
|
|
126
|
+
output_dir: str,
|
|
127
|
+
filename: Optional[str],
|
|
128
|
+
version: Optional[str],
|
|
129
|
+
timeout: int,
|
|
130
|
+
skip_progress: bool,
|
|
124
131
|
) -> Path:
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
output_dir_path = Path(output_dir).expanduser().resolve()
|
|
138
|
-
output_dir_path.mkdir(parents=True, exist_ok=True)
|
|
139
|
-
|
|
140
|
-
if is_dry_run():
|
|
141
|
-
# If env_flag is a name/version, create nested dirs name/version
|
|
142
|
-
try:
|
|
143
|
-
name, version = parse_env_spec(env_flag)
|
|
144
|
-
env_dir = output_dir_path / name / version
|
|
145
|
-
except Exception:
|
|
146
|
-
env_dir = output_dir_path / str(env_flag)
|
|
147
|
-
env_dir.mkdir(parents=True, exist_ok=True)
|
|
148
|
-
(env_dir / "README.txt").write_text("Dry-run environment content\n")
|
|
149
|
-
print(f"[SUCCESS] (dry-run) Downloaded to {env_dir}")
|
|
150
|
-
return env_dir
|
|
151
|
-
|
|
152
|
-
# Lazy import requests to avoid SSL/cert issues in dry-run/tests.
|
|
153
|
-
import requests
|
|
154
|
-
|
|
155
|
-
with requests.get(url, headers=headers, stream=True, timeout=timeout) as resp:
|
|
156
|
-
resp.raise_for_status()
|
|
157
|
-
total_size = int(resp.headers.get("Content-Length", 0))
|
|
158
|
-
# Use filesystem-safe archive name when env_flag contains '/'
|
|
159
|
-
# When env_flag is name/version, use name-version for filename
|
|
160
|
-
try:
|
|
161
|
-
name, version = parse_env_spec(env_flag)
|
|
162
|
-
safe_name = f"{name}-{version}"
|
|
163
|
-
except Exception:
|
|
164
|
-
safe_name = str(env_flag)
|
|
165
|
-
default_archive_name = f"{safe_name}.tgz"
|
|
166
|
-
archive_name = filename or extract_filename_from_headers(resp.headers, default_archive_name)
|
|
167
|
-
archive_path = output_dir_path / archive_name
|
|
168
|
-
|
|
169
|
-
stream = (chunk for chunk in resp.iter_content(chunk_size=1024 * 512) if chunk)
|
|
170
|
-
download_with_progress(archive_path, total_size, stream, skip_progress)
|
|
171
|
-
|
|
172
|
-
# Derive target directory name from archive filename
|
|
173
|
-
suffixes = "".join(archive_path.suffixes)
|
|
174
|
-
if suffixes.endswith(".tar.gz"):
|
|
175
|
-
base_name = archive_path.name[: -len(".tar.gz")]
|
|
176
|
-
elif suffixes.endswith(".tgz"):
|
|
177
|
-
base_name = archive_path.name[: -len(".tgz")]
|
|
178
|
-
elif suffixes.endswith(".tar"):
|
|
179
|
-
base_name = archive_path.name[: -len(".tar")]
|
|
180
|
-
else:
|
|
181
|
-
base_name = archive_path.stem
|
|
132
|
+
"""Download environment by ID or name/version and extract archive into a directory."""
|
|
133
|
+
hub_url = Hub.url
|
|
134
|
+
auth_token = Hub.get_auth_token()
|
|
135
|
+
headers = {"Authorization": f"Bearer {auth_token}"} if auth_token else {}
|
|
136
|
+
|
|
137
|
+
# New API: /api/environments/download?environment_id=<id or name@version>
|
|
138
|
+
from urllib.parse import urlencode
|
|
139
|
+
|
|
140
|
+
base_url = f"{hub_url.rstrip('/')}/environments/download"
|
|
141
|
+
query = urlencode({"environment_id": env_flag})
|
|
142
|
+
url = f"{base_url}?{query}"
|
|
182
143
|
|
|
183
|
-
|
|
144
|
+
output_dir_path = Path(output_dir).expanduser().resolve()
|
|
145
|
+
output_dir_path.mkdir(parents=True, exist_ok=True)
|
|
146
|
+
|
|
147
|
+
if is_dry_run():
|
|
148
|
+
# If env_flag is a name/version, create nested dirs name/version
|
|
184
149
|
try:
|
|
185
|
-
|
|
186
|
-
|
|
150
|
+
name, version = parse_env_spec(env_flag)
|
|
151
|
+
env_dir = output_dir_path / name / version
|
|
187
152
|
except Exception:
|
|
188
|
-
|
|
153
|
+
env_dir = output_dir_path / str(env_flag)
|
|
189
154
|
env_dir.mkdir(parents=True, exist_ok=True)
|
|
190
|
-
|
|
191
|
-
|
|
192
|
-
if tarfile.is_tarfile(archive_path):
|
|
193
|
-
with tarfile.open(archive_path, "r:*") as tar:
|
|
194
|
-
tar.extractall(env_dir)
|
|
195
|
-
archive_path.unlink(missing_ok=True)
|
|
196
|
-
print(f"[SUCCESS] Downloaded and extracted to {env_dir}")
|
|
197
|
-
else:
|
|
198
|
-
print(f"[WARN] Downloaded file is not a tar archive, kept as {archive_path}")
|
|
199
|
-
env_dir = archive_path
|
|
200
|
-
except Exception as e:
|
|
201
|
-
print_error(f"Failed to extract archive: {e}")
|
|
202
|
-
env_dir = archive_path
|
|
203
|
-
|
|
155
|
+
(env_dir / "README.txt").write_text("Dry-run environment content\n")
|
|
156
|
+
print(f"[SUCCESS] (dry-run) Downloaded to {env_dir}")
|
|
204
157
|
return env_dir
|
|
205
158
|
|
|
159
|
+
# Lazy import requests to avoid SSL/cert issues in dry-run/tests.
|
|
160
|
+
import requests
|
|
161
|
+
|
|
162
|
+
with requests.get(url, headers=headers, stream=True, timeout=timeout) as resp:
|
|
163
|
+
resp.raise_for_status()
|
|
164
|
+
total_size = int(resp.headers.get("Content-Length", 0))
|
|
165
|
+
# Use filesystem-safe archive name when env_flag contains '/'
|
|
166
|
+
# When env_flag is name/version, use name-version for filename
|
|
167
|
+
try:
|
|
168
|
+
name, version = parse_env_spec(env_flag)
|
|
169
|
+
safe_name = f"{name}-{version}"
|
|
170
|
+
except Exception:
|
|
171
|
+
safe_name = str(env_flag)
|
|
172
|
+
default_archive_name = f"{safe_name}.tgz"
|
|
173
|
+
archive_name = filename or extract_filename_from_headers(
|
|
174
|
+
resp.headers, default_archive_name
|
|
175
|
+
)
|
|
176
|
+
archive_path = output_dir_path / archive_name
|
|
177
|
+
|
|
178
|
+
stream = (chunk for chunk in resp.iter_content(chunk_size=1024 * 512) if chunk)
|
|
179
|
+
download_with_progress(archive_path, total_size, stream, skip_progress)
|
|
180
|
+
|
|
181
|
+
# Derive target directory name from archive filename
|
|
182
|
+
suffixes = "".join(archive_path.suffixes)
|
|
183
|
+
if suffixes.endswith(".tar.gz"):
|
|
184
|
+
base_name = archive_path.name[: -len(".tar.gz")]
|
|
185
|
+
elif suffixes.endswith(".tgz"):
|
|
186
|
+
base_name = archive_path.name[: -len(".tgz")]
|
|
187
|
+
elif suffixes.endswith(".tar"):
|
|
188
|
+
base_name = archive_path.name[: -len(".tar")]
|
|
189
|
+
else:
|
|
190
|
+
base_name = archive_path.stem
|
|
191
|
+
|
|
192
|
+
# Prefer nested directory when env_flag is parseable
|
|
193
|
+
try:
|
|
194
|
+
name, version = parse_env_spec(env_flag)
|
|
195
|
+
env_dir = output_dir_path / name / version
|
|
196
|
+
except Exception:
|
|
197
|
+
env_dir = output_dir_path / base_name
|
|
198
|
+
env_dir.mkdir(parents=True, exist_ok=True)
|
|
199
|
+
|
|
200
|
+
try:
|
|
201
|
+
if tarfile.is_tarfile(archive_path):
|
|
202
|
+
with tarfile.open(archive_path, "r:*") as tar:
|
|
203
|
+
tar.extractall(env_dir)
|
|
204
|
+
archive_path.unlink(missing_ok=True)
|
|
205
|
+
print(f"[SUCCESS] Downloaded and extracted to {env_dir}")
|
|
206
|
+
else:
|
|
207
|
+
print(f"[WARN] Downloaded file is not a tar archive, kept as {archive_path}")
|
|
208
|
+
env_dir = archive_path
|
|
209
|
+
except Exception as e:
|
|
210
|
+
print_error(f"Failed to extract archive: {e}")
|
|
211
|
+
env_dir = archive_path
|
|
206
212
|
|
|
213
|
+
return env_dir
|