vuer-cli 0.0.4__py3-none-any.whl → 0.0.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- vuer_cli/add.py +66 -68
- vuer_cli/envs_publish.py +335 -309
- vuer_cli/envs_pull.py +177 -170
- vuer_cli/login.py +459 -0
- vuer_cli/main.py +7 -2
- vuer_cli/remove.py +84 -84
- vuer_cli/scripts/demcap.py +19 -15
- vuer_cli/scripts/mcap_playback.py +661 -0
- vuer_cli/scripts/minimap.py +113 -210
- vuer_cli/scripts/viz_ptc_cams.py +1 -1
- vuer_cli/scripts/viz_ptc_proxie.py +1 -1
- vuer_cli/sync.py +314 -308
- vuer_cli/upgrade.py +118 -126
- {vuer_cli-0.0.4.dist-info → vuer_cli-0.0.5.dist-info}/METADATA +36 -6
- vuer_cli-0.0.5.dist-info/RECORD +22 -0
- vuer_cli/scripts/vuer_ros_bridge.py +0 -210
- vuer_cli-0.0.4.dist-info/RECORD +0 -21
- {vuer_cli-0.0.4.dist-info → vuer_cli-0.0.5.dist-info}/WHEEL +0 -0
- {vuer_cli-0.0.4.dist-info → vuer_cli-0.0.5.dist-info}/entry_points.txt +0 -0
- {vuer_cli-0.0.4.dist-info → vuer_cli-0.0.5.dist-info}/licenses/LICENSE +0 -0
vuer_cli/envs_publish.py
CHANGED
|
@@ -11,361 +11,387 @@ from typing import Any, Dict, List
|
|
|
11
11
|
|
|
12
12
|
from params_proto import EnvVar, proto
|
|
13
13
|
|
|
14
|
-
from .utils import is_dry_run, print_error, spinner
|
|
15
|
-
|
|
14
|
+
from .utils import is_dry_run, normalize_env_spec, print_error, spinner
|
|
16
15
|
|
|
17
16
|
# -- Configuration with environment variable defaults --
|
|
18
17
|
|
|
18
|
+
|
|
19
19
|
@proto.prefix
|
|
20
20
|
class Hub:
|
|
21
|
-
|
|
21
|
+
"""Vuer Hub connection settings."""
|
|
22
22
|
|
|
23
|
-
|
|
24
|
-
|
|
23
|
+
url: str = EnvVar("VUER_HUB_URL", default="") # Base URL of the Vuer Hub API
|
|
24
|
+
auth_token: str = EnvVar(
|
|
25
|
+
"VUER_AUTH_TOKEN", default=""
|
|
26
|
+
) # JWT token for authentication
|
|
25
27
|
|
|
28
|
+
@staticmethod
|
|
29
|
+
def get_auth_token() -> str:
|
|
30
|
+
"""Get auth token from environment variable or credentials file.
|
|
26
31
|
|
|
27
|
-
|
|
32
|
+
Returns:
|
|
33
|
+
Auth token string, or empty string if not found
|
|
34
|
+
"""
|
|
35
|
+
# First try environment variable
|
|
36
|
+
if Hub.auth_token:
|
|
37
|
+
return Hub.auth_token
|
|
28
38
|
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
39
|
+
# Fall back to credentials file
|
|
40
|
+
from .login import load_credentials
|
|
41
|
+
credentials = load_credentials()
|
|
42
|
+
return credentials.get("access_token", "")
|
|
32
43
|
|
|
33
|
-
Reads environment.json, creates tgz archive, and uploads to the hub.
|
|
34
|
-
"""
|
|
35
44
|
|
|
36
|
-
|
|
37
|
-
timeout: int = 300 # Request timeout in seconds
|
|
38
|
-
tag: str = "latest" # Version tag
|
|
39
|
-
dry_run: bool = False # Simulate without uploading
|
|
40
|
-
|
|
41
|
-
def run(self) -> int:
|
|
42
|
-
"""Execute envs-publish command."""
|
|
43
|
-
try:
|
|
44
|
-
dry_run = self.dry_run or is_dry_run()
|
|
45
|
-
|
|
46
|
-
if not dry_run:
|
|
47
|
-
if not Hub.url:
|
|
48
|
-
raise RuntimeError(
|
|
49
|
-
"Missing VUER_HUB_URL. Please set the VUER_HUB_URL environment variable "
|
|
50
|
-
"or pass --hub.url on the command line."
|
|
51
|
-
)
|
|
52
|
-
if not Hub.auth_token:
|
|
53
|
-
raise RuntimeError(
|
|
54
|
-
"Missing VUER_AUTH_TOKEN. Please set the VUER_AUTH_TOKEN environment "
|
|
55
|
-
"variable or pass --hub.auth-token on the command line."
|
|
56
|
-
)
|
|
57
|
-
|
|
58
|
-
print(f"[INFO] Reading environment.json from {self.directory}...")
|
|
59
|
-
metadata, envs_metadata = parse_environments_json(self.directory)
|
|
60
|
-
print(f"[INFO] Found package: {metadata['name']}/{metadata['version']}")
|
|
61
|
-
|
|
62
|
-
# Validate dependencies if present
|
|
63
|
-
dependencies = extract_dependencies(envs_metadata)
|
|
64
|
-
if dependencies:
|
|
65
|
-
print(f"[INFO] Validating {len(dependencies)} dependencies...")
|
|
66
|
-
validate_dependencies(dependencies, dry_run, Hub.url, Hub.auth_token)
|
|
67
|
-
print("[INFO] All dependencies are valid.")
|
|
68
|
-
else:
|
|
69
|
-
print("[INFO] No dependencies to validate.")
|
|
70
|
-
|
|
71
|
-
print("[INFO] Creating tgz archive...")
|
|
72
|
-
archive_path = create_tgz_archive(self.directory, metadata)
|
|
73
|
-
print(f"[INFO] Archive created: {archive_path}")
|
|
74
|
-
|
|
75
|
-
publish_to_registry(
|
|
76
|
-
archive_path=archive_path,
|
|
77
|
-
metadata=metadata,
|
|
78
|
-
envs_metadata=envs_metadata,
|
|
79
|
-
hub_url=Hub.url,
|
|
80
|
-
auth_token=Hub.auth_token,
|
|
81
|
-
timeout=self.timeout,
|
|
82
|
-
dry_run=dry_run,
|
|
83
|
-
)
|
|
84
|
-
|
|
85
|
-
return 0
|
|
86
|
-
except FileNotFoundError as e:
|
|
87
|
-
print_error(str(e))
|
|
88
|
-
return 1
|
|
89
|
-
except ValueError as e:
|
|
90
|
-
print_error(str(e))
|
|
91
|
-
return 1
|
|
92
|
-
except RuntimeError as e:
|
|
93
|
-
# RuntimeError from validate_dependencies already prints error message
|
|
94
|
-
# Only print if it wasn't already printed
|
|
95
|
-
if "Dependency validation failed" not in str(e):
|
|
96
|
-
print_error(str(e))
|
|
97
|
-
return 1
|
|
98
|
-
except Exception as e:
|
|
99
|
-
print_error(f"Unexpected error: {e}")
|
|
100
|
-
return 1
|
|
45
|
+
# -- Subcommand dataclass --
|
|
101
46
|
|
|
102
47
|
|
|
103
|
-
|
|
48
|
+
@dataclass
|
|
49
|
+
class EnvsPublish:
|
|
50
|
+
"""Publish environment to registry (npm-style).
|
|
104
51
|
|
|
105
|
-
|
|
106
|
-
|
|
52
|
+
Reads environment.json, creates tgz archive, and uploads to the hub.
|
|
53
|
+
"""
|
|
107
54
|
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
if not envs_path.exists():
|
|
113
|
-
raise FileNotFoundError(f"environment.json not found in {directory}")
|
|
55
|
+
directory: str = "." # Directory containing environment.json
|
|
56
|
+
timeout: int = 300 # Request timeout in seconds
|
|
57
|
+
tag: str = "latest" # Version tag
|
|
58
|
+
dry_run: bool = False # Simulate without uploading
|
|
114
59
|
|
|
60
|
+
def __call__(self) -> int:
|
|
61
|
+
"""Execute envs-publish command."""
|
|
115
62
|
try:
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
63
|
+
dry_run = self.dry_run or is_dry_run()
|
|
64
|
+
|
|
65
|
+
# Get auth token (from env or credentials file)
|
|
66
|
+
auth_token = Hub.get_auth_token() if not dry_run else ""
|
|
67
|
+
|
|
68
|
+
if not dry_run:
|
|
69
|
+
if not Hub.url:
|
|
70
|
+
raise RuntimeError(
|
|
71
|
+
"Missing VUER_HUB_URL. Please set the VUER_HUB_URL environment variable "
|
|
72
|
+
"or pass --hub.url on the command line."
|
|
73
|
+
)
|
|
74
|
+
if not auth_token:
|
|
75
|
+
raise RuntimeError(
|
|
76
|
+
"Missing VUER_AUTH_TOKEN. Please run 'vuer login' to authenticate, "
|
|
77
|
+
"or set the VUER_AUTH_TOKEN environment variable."
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
print(f"[INFO] Reading environment.json from {self.directory}...")
|
|
81
|
+
metadata, envs_metadata = parse_environments_json(self.directory)
|
|
82
|
+
print(f"[INFO] Found package: {metadata['name']}/{metadata['version']}")
|
|
83
|
+
|
|
84
|
+
# Validate dependencies if present
|
|
85
|
+
dependencies = extract_dependencies(envs_metadata)
|
|
86
|
+
if dependencies:
|
|
87
|
+
print(f"[INFO] Validating {len(dependencies)} dependencies...")
|
|
88
|
+
validate_dependencies(dependencies, dry_run, Hub.url, auth_token)
|
|
89
|
+
print("[INFO] All dependencies are valid.")
|
|
90
|
+
else:
|
|
91
|
+
print("[INFO] No dependencies to validate.")
|
|
92
|
+
|
|
93
|
+
print("[INFO] Creating tgz archive...")
|
|
94
|
+
archive_path = create_tgz_archive(self.directory, metadata)
|
|
95
|
+
print(f"[INFO] Archive created: {archive_path}")
|
|
96
|
+
|
|
97
|
+
publish_to_registry(
|
|
98
|
+
archive_path=archive_path,
|
|
99
|
+
metadata=metadata,
|
|
100
|
+
envs_metadata=envs_metadata,
|
|
101
|
+
hub_url=Hub.url,
|
|
102
|
+
auth_token=auth_token,
|
|
103
|
+
timeout=self.timeout,
|
|
104
|
+
dry_run=dry_run,
|
|
105
|
+
)
|
|
106
|
+
|
|
107
|
+
return 0
|
|
108
|
+
except FileNotFoundError as e:
|
|
109
|
+
print_error(str(e))
|
|
110
|
+
return 1
|
|
111
|
+
except ValueError as e:
|
|
112
|
+
print_error(str(e))
|
|
113
|
+
return 1
|
|
114
|
+
except RuntimeError as e:
|
|
115
|
+
# RuntimeError from validate_dependencies already prints error message
|
|
116
|
+
# Only print if it wasn't already printed
|
|
117
|
+
if "Dependency validation failed" not in str(e):
|
|
118
|
+
print_error(str(e))
|
|
119
|
+
return 1
|
|
120
|
+
except Exception as e:
|
|
121
|
+
print_error(f"Unexpected error: {e}")
|
|
122
|
+
return 1
|
|
128
123
|
|
|
129
|
-
if not metadata["name"]:
|
|
130
|
-
raise ValueError("environment.json must contain 'name' field")
|
|
131
|
-
if not metadata["version"]:
|
|
132
|
-
raise ValueError("environment.json must contain 'version' field")
|
|
133
124
|
|
|
134
|
-
|
|
125
|
+
# -- Helper functions --
|
|
135
126
|
|
|
136
127
|
|
|
137
|
-
def
|
|
138
|
-
|
|
128
|
+
def parse_environments_json(directory: str) -> tuple[Dict[str, Any], Dict[str, Any]]:
|
|
129
|
+
"""Parse environment.json and extract metadata plus full content.
|
|
139
130
|
|
|
140
|
-
|
|
141
|
-
|
|
131
|
+
Returns:
|
|
132
|
+
(metadata, full_data)
|
|
133
|
+
"""
|
|
134
|
+
envs_path = Path(directory) / "environment.json"
|
|
135
|
+
if not envs_path.exists():
|
|
136
|
+
raise FileNotFoundError(f"environment.json not found in {directory}")
|
|
142
137
|
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
if not deps_dict or not isinstance(deps_dict, dict):
|
|
149
|
-
return []
|
|
138
|
+
try:
|
|
139
|
+
with envs_path.open("r", encoding="utf-8") as f:
|
|
140
|
+
data = json.load(f)
|
|
141
|
+
except json.JSONDecodeError as e:
|
|
142
|
+
raise ValueError(f"Invalid environment.json: {e}") from e
|
|
150
143
|
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
144
|
+
metadata = {
|
|
145
|
+
"name": data.get("name", ""),
|
|
146
|
+
"version": data.get("version", ""),
|
|
147
|
+
"description": data.get("description", ""),
|
|
148
|
+
"visibility": data.get("visibility", "PUBLIC"),
|
|
149
|
+
"env_type": data.get("env-type", "") or data.get("env_type", ""),
|
|
150
|
+
}
|
|
156
151
|
|
|
157
|
-
|
|
152
|
+
if not metadata["name"]:
|
|
153
|
+
raise ValueError("environment.json must contain 'name' field")
|
|
154
|
+
if not metadata["version"]:
|
|
155
|
+
raise ValueError("environment.json must contain 'version' field")
|
|
158
156
|
|
|
157
|
+
return metadata, data
|
|
159
158
|
|
|
160
|
-
def validate_dependencies(
|
|
161
|
-
dependencies: List[str],
|
|
162
|
-
dry_run: bool,
|
|
163
|
-
hub_url: str,
|
|
164
|
-
auth_token: str,
|
|
165
|
-
) -> None:
|
|
166
|
-
"""Validate dependencies with backend API.
|
|
167
159
|
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
dry_run: Whether to run in dry-run mode
|
|
171
|
-
hub_url: Vuer Hub base URL
|
|
172
|
-
auth_token: Authentication token
|
|
160
|
+
def extract_dependencies(envs_metadata: Dict[str, Any]) -> List[str]:
|
|
161
|
+
"""Extract dependencies from environment.json and convert to list format.
|
|
173
162
|
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
"""
|
|
177
|
-
if dry_run or is_dry_run():
|
|
178
|
-
print("[INFO] (dry-run) Validating dependencies (simulated)...")
|
|
179
|
-
return
|
|
163
|
+
Args:
|
|
164
|
+
envs_metadata: Full environment.json content
|
|
180
165
|
|
|
181
|
-
|
|
182
|
-
|
|
183
|
-
|
|
184
|
-
|
|
166
|
+
Returns:
|
|
167
|
+
List of dependency specs like ["some-dependency/^1.2.3", ...]
|
|
168
|
+
Returns empty list if no dependencies or dependencies is empty.
|
|
169
|
+
"""
|
|
170
|
+
deps_dict = envs_metadata.get("dependencies", {})
|
|
171
|
+
if not deps_dict or not isinstance(deps_dict, dict):
|
|
172
|
+
return []
|
|
185
173
|
|
|
186
|
-
|
|
174
|
+
dependencies = []
|
|
175
|
+
for name, version_spec in deps_dict.items():
|
|
176
|
+
if not isinstance(version_spec, str):
|
|
177
|
+
version_spec = str(version_spec)
|
|
178
|
+
dependencies.append(normalize_env_spec(f"{name}/{version_spec}"))
|
|
187
179
|
|
|
188
|
-
|
|
189
|
-
headers = {}
|
|
190
|
-
if auth_token:
|
|
191
|
-
headers["Authorization"] = f"Bearer {auth_token}"
|
|
192
|
-
headers["Content-Type"] = "application/json"
|
|
180
|
+
return dependencies
|
|
193
181
|
|
|
194
|
-
payload = {"name_versionId_list": dependencies}
|
|
195
182
|
|
|
183
|
+
def validate_dependencies(
|
|
184
|
+
dependencies: List[str],
|
|
185
|
+
dry_run: bool,
|
|
186
|
+
hub_url: str,
|
|
187
|
+
auth_token: str,
|
|
188
|
+
) -> None:
|
|
189
|
+
"""Validate dependencies with backend API.
|
|
190
|
+
|
|
191
|
+
Args:
|
|
192
|
+
dependencies: List of dependency specs like ["name/version", ...]
|
|
193
|
+
dry_run: Whether to run in dry-run mode
|
|
194
|
+
hub_url: Vuer Hub base URL
|
|
195
|
+
auth_token: Authentication token
|
|
196
|
+
|
|
197
|
+
Raises:
|
|
198
|
+
RuntimeError: If validation fails (non-200 status or error in response)
|
|
199
|
+
"""
|
|
200
|
+
if dry_run or is_dry_run():
|
|
201
|
+
print("[INFO] (dry-run) Validating dependencies (simulated)...")
|
|
202
|
+
return
|
|
203
|
+
|
|
204
|
+
if not hub_url:
|
|
205
|
+
raise RuntimeError(
|
|
206
|
+
"Missing VUER_HUB_URL. Cannot validate dependencies without hub URL."
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
import requests
|
|
210
|
+
|
|
211
|
+
url = f"{hub_url.rstrip('/')}/environments/dependencies"
|
|
212
|
+
headers = {}
|
|
213
|
+
if auth_token:
|
|
214
|
+
headers["Authorization"] = f"Bearer {auth_token}"
|
|
215
|
+
headers["Content-Type"] = "application/json"
|
|
216
|
+
|
|
217
|
+
payload = {"name_versionId_list": dependencies}
|
|
218
|
+
|
|
219
|
+
try:
|
|
220
|
+
response = requests.post(url, json=payload, headers=headers, timeout=300)
|
|
221
|
+
except requests.exceptions.RequestException as e:
|
|
222
|
+
raise RuntimeError(f"Failed to validate dependencies: {e}") from e
|
|
223
|
+
|
|
224
|
+
status = response.status_code
|
|
225
|
+
|
|
226
|
+
# Handle non-200 status codes
|
|
227
|
+
if status != 200:
|
|
228
|
+
error_msg = ""
|
|
196
229
|
try:
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
207
|
-
|
|
208
|
-
|
|
209
|
-
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
|
|
214
|
-
|
|
215
|
-
|
|
216
|
-
|
|
217
|
-
|
|
218
|
-
|
|
219
|
-
|
|
220
|
-
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
# Status 200: check for error field in response body
|
|
225
|
-
try:
|
|
226
|
-
data = response.json()
|
|
227
|
-
if isinstance(data, dict) and "error" in data:
|
|
228
|
-
error_msg = data["error"]
|
|
229
|
-
print_error(f"Dependency validation failed: {error_msg}")
|
|
230
|
-
raise RuntimeError(f"Dependency validation failed: {error_msg}")
|
|
231
|
-
except (json.JSONDecodeError, ValueError):
|
|
232
|
-
# Response is not JSON or doesn't have error field, assume success
|
|
233
|
-
pass
|
|
230
|
+
data = response.json()
|
|
231
|
+
if isinstance(data, dict):
|
|
232
|
+
error_msg = data.get("error") or data.get("message", "")
|
|
233
|
+
if not error_msg:
|
|
234
|
+
error_msg = json.dumps(data, ensure_ascii=False)
|
|
235
|
+
else:
|
|
236
|
+
error_msg = json.dumps(data, ensure_ascii=False)
|
|
237
|
+
except Exception:
|
|
238
|
+
text = (response.text or "").strip()
|
|
239
|
+
error_msg = text if text else "Unknown error"
|
|
240
|
+
|
|
241
|
+
if error_msg:
|
|
242
|
+
print_error(f"Dependency validation failed ({status}): {error_msg}")
|
|
243
|
+
else:
|
|
244
|
+
print_error(f"Dependency validation failed ({status})")
|
|
245
|
+
raise RuntimeError(f"Dependency validation failed with status {status}")
|
|
246
|
+
|
|
247
|
+
# Status 200: check for error field in response body
|
|
248
|
+
try:
|
|
249
|
+
data = response.json()
|
|
250
|
+
if isinstance(data, dict) and "error" in data:
|
|
251
|
+
error_msg = data["error"]
|
|
252
|
+
print_error(f"Dependency validation failed: {error_msg}")
|
|
253
|
+
raise RuntimeError(f"Dependency validation failed: {error_msg}")
|
|
254
|
+
except (json.JSONDecodeError, ValueError):
|
|
255
|
+
# Response is not JSON or doesn't have error field, assume success
|
|
256
|
+
pass
|
|
234
257
|
|
|
235
258
|
|
|
236
259
|
def create_tgz_archive(directory: str, metadata: Dict[str, Any]) -> str:
|
|
237
|
-
|
|
238
|
-
|
|
239
|
-
|
|
240
|
-
|
|
260
|
+
"""Create a tgz archive from environment files."""
|
|
261
|
+
archive_name = f"{metadata['name']}-{metadata['version']}.tgz"
|
|
262
|
+
temp_dir = Path(tempfile.gettempdir())
|
|
263
|
+
archive_path = str(temp_dir / archive_name)
|
|
241
264
|
|
|
242
|
-
|
|
265
|
+
directory_path = Path(directory).resolve()
|
|
243
266
|
|
|
244
|
-
|
|
245
|
-
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
|
|
267
|
+
with tarfile.open(archive_path, "w:gz") as tar:
|
|
268
|
+
for file_path in directory_path.rglob("*"):
|
|
269
|
+
if file_path.is_file():
|
|
270
|
+
arcname = file_path.relative_to(directory_path)
|
|
271
|
+
tar.add(file_path, arcname=arcname)
|
|
249
272
|
|
|
250
|
-
|
|
273
|
+
return archive_path
|
|
251
274
|
|
|
252
275
|
|
|
253
|
-
def upload_with_progress(
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
|
|
257
|
-
|
|
258
|
-
|
|
276
|
+
def upload_with_progress(
|
|
277
|
+
archive_path: str, metadata: Dict[str, Any], timeout: int
|
|
278
|
+
) -> None:
|
|
279
|
+
"""Simulate an upload in dry-run mode."""
|
|
280
|
+
file_path = Path(archive_path)
|
|
281
|
+
total_size = file_path.stat().st_size
|
|
282
|
+
print(f"[INFO] (dry-run) Uploading {file_path.name} ({total_size} bytes)...")
|
|
283
|
+
time.sleep(min(2.0, max(0.1, total_size / (10 * 1024 * 1024))))
|
|
259
284
|
|
|
260
285
|
|
|
261
286
|
def publish_to_registry(
|
|
262
|
-
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
|
|
266
|
-
|
|
267
|
-
|
|
268
|
-
|
|
287
|
+
archive_path: str,
|
|
288
|
+
metadata: Dict[str, Any],
|
|
289
|
+
envs_metadata: Dict[str, Any],
|
|
290
|
+
hub_url: str,
|
|
291
|
+
auth_token: str,
|
|
292
|
+
timeout: int,
|
|
293
|
+
dry_run: bool,
|
|
269
294
|
) -> None:
|
|
270
|
-
|
|
271
|
-
|
|
272
|
-
|
|
273
|
-
|
|
274
|
-
|
|
275
|
-
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
|
|
279
|
-
|
|
280
|
-
|
|
281
|
-
|
|
282
|
-
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
290
|
-
|
|
291
|
-
|
|
292
|
-
|
|
293
|
-
|
|
294
|
-
|
|
295
|
-
|
|
296
|
-
|
|
297
|
-
|
|
298
|
-
|
|
299
|
-
|
|
300
|
-
|
|
301
|
-
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
|
|
305
|
-
|
|
306
|
-
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
|
|
315
|
-
|
|
316
|
-
|
|
317
|
-
|
|
318
|
-
|
|
319
|
-
|
|
320
|
-
|
|
321
|
-
|
|
322
|
-
|
|
323
|
-
|
|
324
|
-
|
|
325
|
-
|
|
326
|
-
|
|
327
|
-
|
|
328
|
-
|
|
329
|
-
|
|
330
|
-
|
|
331
|
-
|
|
332
|
-
|
|
333
|
-
if isinstance(data, dict):
|
|
334
|
-
msg = data.get("message")
|
|
335
|
-
err = data.get("error")
|
|
336
|
-
if msg:
|
|
337
|
-
inline_msg = str(msg)
|
|
338
|
-
elif err:
|
|
339
|
-
inline_msg = str(err)
|
|
340
|
-
else:
|
|
341
|
-
inline_msg = json.dumps(data, ensure_ascii=False)
|
|
342
|
-
else:
|
|
343
|
-
inline_msg = json.dumps(data, ensure_ascii=False)
|
|
344
|
-
except Exception:
|
|
345
|
-
inline_msg = text
|
|
346
|
-
|
|
347
|
-
inline_msg = (inline_msg or "").strip()
|
|
348
|
-
if inline_msg:
|
|
349
|
-
raise RuntimeError(f"Publish failed ({status}): {inline_msg}")
|
|
350
|
-
raise RuntimeError(f"Publish failed ({status})")
|
|
351
|
-
|
|
352
|
-
env_id = None
|
|
353
|
-
env_name = metadata.get("name")
|
|
354
|
-
env_version = metadata.get("version")
|
|
295
|
+
"""Publish package to registry via API."""
|
|
296
|
+
print(f"[INFO] Publishing {metadata['name']}/{metadata['version']} to registry...")
|
|
297
|
+
print(f"[INFO] Archive: {archive_path}")
|
|
298
|
+
print(f"[INFO] Metadata: {json.dumps(metadata, indent=2)}")
|
|
299
|
+
print(f"[INFO] environment.json: {json.dumps(envs_metadata, indent=2)}")
|
|
300
|
+
print(f"[INFO] Hub URL: {hub_url}")
|
|
301
|
+
print(f"[INFO] Timeout: {timeout}s")
|
|
302
|
+
|
|
303
|
+
if dry_run or is_dry_run():
|
|
304
|
+
upload_with_progress(archive_path, metadata, timeout)
|
|
305
|
+
print(
|
|
306
|
+
f"[SUCCESS] (dry-run) Published {metadata['name']}/{metadata['version']} (no network call)."
|
|
307
|
+
)
|
|
308
|
+
return
|
|
309
|
+
|
|
310
|
+
# Import requests lazily to avoid SSL/cert loading in restricted envs.
|
|
311
|
+
import requests
|
|
312
|
+
|
|
313
|
+
url = f"{hub_url.rstrip('/')}/environments/upload"
|
|
314
|
+
file_path = Path(archive_path)
|
|
315
|
+
|
|
316
|
+
with file_path.open("rb") as f:
|
|
317
|
+
files = {
|
|
318
|
+
"package": (file_path.name, f, "application/octet-stream"),
|
|
319
|
+
}
|
|
320
|
+
data = {
|
|
321
|
+
"name": str(metadata["name"]),
|
|
322
|
+
"versionId": str(metadata["version"]),
|
|
323
|
+
"description": str(metadata.get("description", "")),
|
|
324
|
+
"type": str(metadata.get("env_type", "")),
|
|
325
|
+
"visibility": str(metadata.get("visibility", "PUBLIC")),
|
|
326
|
+
}
|
|
327
|
+
# Send full environment.json content as metadata field.
|
|
328
|
+
data["metadata"] = json.dumps(envs_metadata, ensure_ascii=False)
|
|
329
|
+
|
|
330
|
+
headers = {}
|
|
331
|
+
if auth_token:
|
|
332
|
+
headers["Authorization"] = f"Bearer {auth_token}"
|
|
333
|
+
|
|
334
|
+
stop_event = threading.Event()
|
|
335
|
+
spinner_thread = threading.Thread(
|
|
336
|
+
target=spinner,
|
|
337
|
+
args=(f"[INFO] Uploading {file_path.name} ", stop_event),
|
|
338
|
+
daemon=True,
|
|
339
|
+
)
|
|
340
|
+
spinner_thread.start()
|
|
341
|
+
try:
|
|
342
|
+
response = requests.post(
|
|
343
|
+
url,
|
|
344
|
+
data=data,
|
|
345
|
+
files=files,
|
|
346
|
+
headers=headers,
|
|
347
|
+
timeout=timeout,
|
|
348
|
+
)
|
|
349
|
+
finally:
|
|
350
|
+
stop_event.set()
|
|
351
|
+
spinner_thread.join()
|
|
352
|
+
|
|
353
|
+
status = response.status_code
|
|
354
|
+
text = (response.text or "").strip()
|
|
355
|
+
|
|
356
|
+
if status >= 300:
|
|
357
|
+
inline_msg = ""
|
|
355
358
|
try:
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
360
|
-
|
|
359
|
+
data = response.json()
|
|
360
|
+
if isinstance(data, dict):
|
|
361
|
+
msg = data.get("message")
|
|
362
|
+
err = data.get("error")
|
|
363
|
+
if msg:
|
|
364
|
+
inline_msg = str(msg)
|
|
365
|
+
elif err:
|
|
366
|
+
inline_msg = str(err)
|
|
367
|
+
else:
|
|
368
|
+
inline_msg = json.dumps(data, ensure_ascii=False)
|
|
369
|
+
else:
|
|
370
|
+
inline_msg = json.dumps(data, ensure_ascii=False)
|
|
361
371
|
except Exception:
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
365
|
-
if
|
|
366
|
-
|
|
367
|
-
|
|
368
|
-
|
|
369
|
-
|
|
370
|
-
|
|
371
|
-
|
|
372
|
+
inline_msg = text
|
|
373
|
+
|
|
374
|
+
inline_msg = (inline_msg or "").strip()
|
|
375
|
+
if inline_msg:
|
|
376
|
+
raise RuntimeError(f"Publish failed ({status}): {inline_msg}")
|
|
377
|
+
raise RuntimeError(f"Publish failed ({status})")
|
|
378
|
+
|
|
379
|
+
env_id = None
|
|
380
|
+
env_name = metadata.get("name")
|
|
381
|
+
env_version = metadata.get("version")
|
|
382
|
+
try:
|
|
383
|
+
payload = response.json()
|
|
384
|
+
env = payload.get("environment", payload) if isinstance(payload, dict) else {}
|
|
385
|
+
env_id = env.get("environmentId") or env.get("id")
|
|
386
|
+
env_name = env.get("name", env_name)
|
|
387
|
+
env_version = env.get("versionId", env_version)
|
|
388
|
+
except Exception:
|
|
389
|
+
pass
|
|
390
|
+
|
|
391
|
+
print("\n=== Publish Success ===")
|
|
392
|
+
if env_id:
|
|
393
|
+
print(f"ID : {env_id}")
|
|
394
|
+
print(f"Name : {env_name}")
|
|
395
|
+
print(f"Version : {env_version}")
|
|
396
|
+
visibility = metadata.get("visibility", "PUBLIC")
|
|
397
|
+
print(f"Visibility: {visibility}")
|