trossen-cloud-cli 0.1.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- trossen_cloud_cli/__init__.py +7 -0
- trossen_cloud_cli/__main__.py +6 -0
- trossen_cloud_cli/api_client.py +269 -0
- trossen_cloud_cli/auth.py +172 -0
- trossen_cloud_cli/cli.py +109 -0
- trossen_cloud_cli/commands/__init__.py +1 -0
- trossen_cloud_cli/commands/auth.py +41 -0
- trossen_cloud_cli/commands/config.py +88 -0
- trossen_cloud_cli/commands/datasets.py +505 -0
- trossen_cloud_cli/commands/models.py +380 -0
- trossen_cloud_cli/commands/training_jobs.py +349 -0
- trossen_cloud_cli/config.py +125 -0
- trossen_cloud_cli/download.py +178 -0
- trossen_cloud_cli/output.py +58 -0
- trossen_cloud_cli/progress.py +270 -0
- trossen_cloud_cli/types.py +159 -0
- trossen_cloud_cli/upload.py +696 -0
- trossen_cloud_cli-0.1.2.dist-info/METADATA +131 -0
- trossen_cloud_cli-0.1.2.dist-info/RECORD +22 -0
- trossen_cloud_cli-0.1.2.dist-info/WHEEL +4 -0
- trossen_cloud_cli-0.1.2.dist-info/entry_points.txt +2 -0
- trossen_cloud_cli-0.1.2.dist-info/licenses/LICENSE +29 -0
|
@@ -0,0 +1,505 @@
|
|
|
1
|
+
"""Dataset commands."""
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import json
|
|
5
|
+
import re
|
|
6
|
+
import shutil
|
|
7
|
+
import tempfile
|
|
8
|
+
from pathlib import Path
|
|
9
|
+
from typing import Annotated
|
|
10
|
+
|
|
11
|
+
import typer
|
|
12
|
+
from rich.table import Table
|
|
13
|
+
|
|
14
|
+
from ..api_client import ApiClient, ApiError
|
|
15
|
+
from ..auth import require_auth
|
|
16
|
+
from ..download import download_dataset
|
|
17
|
+
from ..output import console, print_error, print_info, print_success
|
|
18
|
+
from ..types import DatasetType, PrivacyLevel
|
|
19
|
+
from ..upload import UploadError, create_and_upload_dataset
|
|
20
|
+
|
|
21
|
+
app = typer.Typer(help="Manage datasets")
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
def is_user_name_format(identifier: str) -> bool:
|
|
25
|
+
"""
|
|
26
|
+
Check if identifier is in <user>/<name> format.
|
|
27
|
+
"""
|
|
28
|
+
return "/" in identifier and not identifier.startswith("/")
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
async def resolve_dataset_identifier(client: ApiClient, identifier: str) -> dict:
|
|
32
|
+
"""
|
|
33
|
+
Resolve a dataset identifier (UUID or user/name) to dataset info.
|
|
34
|
+
"""
|
|
35
|
+
if is_user_name_format(identifier):
|
|
36
|
+
# user/name format
|
|
37
|
+
return await client.get(f"/datasets/{identifier}")
|
|
38
|
+
else:
|
|
39
|
+
# UUID format
|
|
40
|
+
return await client.get(f"/datasets/{identifier}")
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
@app.command("upload")
|
|
44
|
+
def upload(
|
|
45
|
+
path: Annotated[
|
|
46
|
+
Path,
|
|
47
|
+
typer.Argument(
|
|
48
|
+
help="Path to the dataset directory or file to upload",
|
|
49
|
+
exists=True,
|
|
50
|
+
resolve_path=True,
|
|
51
|
+
),
|
|
52
|
+
],
|
|
53
|
+
name: Annotated[
|
|
54
|
+
str,
|
|
55
|
+
typer.Option("--name", "-n", help="Dataset name"),
|
|
56
|
+
],
|
|
57
|
+
dataset_type: Annotated[
|
|
58
|
+
DatasetType,
|
|
59
|
+
typer.Option("--type", "-t", help="Dataset type"),
|
|
60
|
+
] = DatasetType.RAW,
|
|
61
|
+
privacy: Annotated[
|
|
62
|
+
PrivacyLevel,
|
|
63
|
+
typer.Option("--privacy", "-p", help="Privacy level"),
|
|
64
|
+
] = PrivacyLevel.PRIVATE,
|
|
65
|
+
metadata: Annotated[
|
|
66
|
+
str | None,
|
|
67
|
+
typer.Option("--metadata", "-m", help="JSON metadata string"),
|
|
68
|
+
] = None,
|
|
69
|
+
) -> None:
|
|
70
|
+
"""
|
|
71
|
+
Upload a dataset to Trossen Cloud.
|
|
72
|
+
"""
|
|
73
|
+
require_auth()
|
|
74
|
+
|
|
75
|
+
# Parse metadata if provided
|
|
76
|
+
metadata_dict = None
|
|
77
|
+
if metadata:
|
|
78
|
+
try:
|
|
79
|
+
metadata_dict = json.loads(metadata)
|
|
80
|
+
except json.JSONDecodeError:
|
|
81
|
+
print_error("Invalid JSON metadata")
|
|
82
|
+
raise typer.Exit(1)
|
|
83
|
+
|
|
84
|
+
try:
|
|
85
|
+
dataset = asyncio.run(
|
|
86
|
+
create_and_upload_dataset(
|
|
87
|
+
name=name,
|
|
88
|
+
local_path=path,
|
|
89
|
+
dataset_type=dataset_type.value,
|
|
90
|
+
privacy=privacy.value,
|
|
91
|
+
metadata=metadata_dict,
|
|
92
|
+
)
|
|
93
|
+
)
|
|
94
|
+
console.print(f"[bold]ID:[/bold] {dataset['id']}")
|
|
95
|
+
console.print(f"[bold]Name:[/bold] {name}")
|
|
96
|
+
|
|
97
|
+
except KeyboardInterrupt:
|
|
98
|
+
raise typer.Exit(1)
|
|
99
|
+
except UploadError as e:
|
|
100
|
+
print_error(str(e))
|
|
101
|
+
raise typer.Exit(1)
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def _parse_hf_repo_id(repo_id_or_url: str) -> str:
|
|
105
|
+
"""
|
|
106
|
+
Extract a HuggingFace repo ID from a URL or return as-is if already an ID.
|
|
107
|
+
|
|
108
|
+
Accepts:
|
|
109
|
+
- https://huggingface.co/datasets/org/name
|
|
110
|
+
- org/name
|
|
111
|
+
"""
|
|
112
|
+
match = re.match(r"https?://huggingface\.co/datasets/([^/]+/[^/]+?)(?:/.*)?$", repo_id_or_url)
|
|
113
|
+
if match:
|
|
114
|
+
return match.group(1)
|
|
115
|
+
if "/" in repo_id_or_url and not repo_id_or_url.startswith("http"):
|
|
116
|
+
return repo_id_or_url
|
|
117
|
+
raise typer.BadParameter(
|
|
118
|
+
f"Invalid HuggingFace dataset: '{repo_id_or_url}'. "
|
|
119
|
+
"Use a URL (https://huggingface.co/datasets/org/name) or repo ID (org/name)."
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
@app.command("import-hf")
|
|
124
|
+
def import_hf(
|
|
125
|
+
repo: Annotated[
|
|
126
|
+
str,
|
|
127
|
+
typer.Argument(
|
|
128
|
+
help="HuggingFace dataset URL or repo ID (e.g., org/dataset-name)",
|
|
129
|
+
),
|
|
130
|
+
],
|
|
131
|
+
name: Annotated[
|
|
132
|
+
str | None,
|
|
133
|
+
typer.Option("--name", "-n", help="Dataset name (defaults to HF repo name)"),
|
|
134
|
+
] = None,
|
|
135
|
+
dataset_type: Annotated[
|
|
136
|
+
DatasetType,
|
|
137
|
+
typer.Option("--type", "-t", help="Dataset type"),
|
|
138
|
+
] = DatasetType.LEROBOT,
|
|
139
|
+
privacy: Annotated[
|
|
140
|
+
PrivacyLevel,
|
|
141
|
+
typer.Option("--privacy", "-p", help="Privacy level"),
|
|
142
|
+
] = PrivacyLevel.PRIVATE,
|
|
143
|
+
metadata: Annotated[
|
|
144
|
+
str | None,
|
|
145
|
+
typer.Option("--metadata", "-m", help="JSON metadata string"),
|
|
146
|
+
] = None,
|
|
147
|
+
revision: Annotated[
|
|
148
|
+
str | None,
|
|
149
|
+
typer.Option("--revision", "-r", help="Git revision (branch, tag, or commit)"),
|
|
150
|
+
] = None,
|
|
151
|
+
keep_local: Annotated[
|
|
152
|
+
bool,
|
|
153
|
+
typer.Option("--keep-local", help="Keep the downloaded files after upload"),
|
|
154
|
+
] = False,
|
|
155
|
+
) -> None:
|
|
156
|
+
"""
|
|
157
|
+
Import a public HuggingFace dataset into Trossen Cloud.
|
|
158
|
+
|
|
159
|
+
Downloads the dataset from HuggingFace Hub, then uploads it.
|
|
160
|
+
"""
|
|
161
|
+
from huggingface_hub import snapshot_download
|
|
162
|
+
from huggingface_hub.utils import HfHubHTTPError, RepositoryNotFoundError
|
|
163
|
+
|
|
164
|
+
require_auth()
|
|
165
|
+
|
|
166
|
+
repo_id = _parse_hf_repo_id(repo)
|
|
167
|
+
dataset_name = name or repo_id.split("/")[-1]
|
|
168
|
+
|
|
169
|
+
# Parse metadata
|
|
170
|
+
metadata_dict = None
|
|
171
|
+
if metadata:
|
|
172
|
+
try:
|
|
173
|
+
metadata_dict = json.loads(metadata)
|
|
174
|
+
except json.JSONDecodeError:
|
|
175
|
+
print_error("Invalid JSON metadata")
|
|
176
|
+
raise typer.Exit(1)
|
|
177
|
+
|
|
178
|
+
# Add HF source info to metadata
|
|
179
|
+
hf_meta = {"huggingface_repo": repo_id}
|
|
180
|
+
if revision:
|
|
181
|
+
hf_meta["huggingface_revision"] = revision
|
|
182
|
+
if metadata_dict:
|
|
183
|
+
metadata_dict = {**hf_meta, **metadata_dict}
|
|
184
|
+
else:
|
|
185
|
+
metadata_dict = hf_meta
|
|
186
|
+
|
|
187
|
+
# Download from HuggingFace
|
|
188
|
+
tmp_dir = None
|
|
189
|
+
try:
|
|
190
|
+
print_info(f"Downloading from HuggingFace: {repo_id}")
|
|
191
|
+
|
|
192
|
+
tmp_dir = tempfile.mkdtemp(prefix="trc_hf_")
|
|
193
|
+
local_path = Path(
|
|
194
|
+
snapshot_download(
|
|
195
|
+
repo_id=repo_id,
|
|
196
|
+
repo_type="dataset",
|
|
197
|
+
revision=revision,
|
|
198
|
+
local_dir=str(Path(tmp_dir) / dataset_name),
|
|
199
|
+
)
|
|
200
|
+
)
|
|
201
|
+
|
|
202
|
+
print_success(f"Downloaded to {local_path}")
|
|
203
|
+
|
|
204
|
+
# Upload to Trossen Cloud
|
|
205
|
+
dataset = asyncio.run(
|
|
206
|
+
create_and_upload_dataset(
|
|
207
|
+
name=dataset_name,
|
|
208
|
+
local_path=local_path,
|
|
209
|
+
dataset_type=dataset_type.value,
|
|
210
|
+
privacy=privacy.value,
|
|
211
|
+
metadata=metadata_dict,
|
|
212
|
+
)
|
|
213
|
+
)
|
|
214
|
+
console.print(f"[bold]ID:[/bold] {dataset['id']}")
|
|
215
|
+
console.print(f"[bold]Name:[/bold] {dataset_name}")
|
|
216
|
+
|
|
217
|
+
except RepositoryNotFoundError:
|
|
218
|
+
print_error(f"HuggingFace dataset '{repo_id}' not found")
|
|
219
|
+
raise typer.Exit(1)
|
|
220
|
+
except HfHubHTTPError as e:
|
|
221
|
+
print_error(f"HuggingFace download failed: {e}")
|
|
222
|
+
raise typer.Exit(1)
|
|
223
|
+
except KeyboardInterrupt:
|
|
224
|
+
raise typer.Exit(1)
|
|
225
|
+
except UploadError as e:
|
|
226
|
+
print_error(str(e))
|
|
227
|
+
raise typer.Exit(1)
|
|
228
|
+
finally:
|
|
229
|
+
if tmp_dir and not keep_local:
|
|
230
|
+
shutil.rmtree(tmp_dir, ignore_errors=True)
|
|
231
|
+
elif tmp_dir:
|
|
232
|
+
print_info(f"Local copy kept at: {tmp_dir}")
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
@app.command("download")
|
|
236
|
+
def download(
|
|
237
|
+
dataset_id: Annotated[
|
|
238
|
+
str,
|
|
239
|
+
typer.Argument(help="Dataset ID (UUID or <user>/<name>) to download"),
|
|
240
|
+
],
|
|
241
|
+
output_dir: Annotated[
|
|
242
|
+
Path,
|
|
243
|
+
typer.Argument(help="Output directory"),
|
|
244
|
+
],
|
|
245
|
+
) -> None:
|
|
246
|
+
"""
|
|
247
|
+
Download a dataset from Trossen Cloud.
|
|
248
|
+
"""
|
|
249
|
+
require_auth()
|
|
250
|
+
|
|
251
|
+
async def do_download():
|
|
252
|
+
async with ApiClient() as client:
|
|
253
|
+
# Resolve identifier to get the actual ID
|
|
254
|
+
dataset = await resolve_dataset_identifier(client, dataset_id)
|
|
255
|
+
actual_id = dataset["id"]
|
|
256
|
+
# Download using the resolved ID
|
|
257
|
+
await download_dataset(actual_id, output_dir)
|
|
258
|
+
|
|
259
|
+
try:
|
|
260
|
+
asyncio.run(do_download())
|
|
261
|
+
console.print(f"[bold]Path:[/bold] {output_dir}")
|
|
262
|
+
|
|
263
|
+
except KeyboardInterrupt:
|
|
264
|
+
print_error("Download interrupted")
|
|
265
|
+
raise typer.Exit(1)
|
|
266
|
+
except ApiError as e:
|
|
267
|
+
if e.status_code == 404:
|
|
268
|
+
print_error(f"Dataset '{dataset_id}' not found")
|
|
269
|
+
else:
|
|
270
|
+
print_error(f"Download failed: {e.message}")
|
|
271
|
+
raise typer.Exit(1)
|
|
272
|
+
except Exception as e:
|
|
273
|
+
print_error(f"Download failed: {e}")
|
|
274
|
+
raise typer.Exit(1)
|
|
275
|
+
|
|
276
|
+
|
|
277
|
+
@app.command("view")
|
|
278
|
+
def view(
|
|
279
|
+
path: Annotated[
|
|
280
|
+
str,
|
|
281
|
+
typer.Argument(help="Dataset path in <user>/<name> format (e.g., trossen/aloha-demo)"),
|
|
282
|
+
],
|
|
283
|
+
) -> None:
|
|
284
|
+
"""
|
|
285
|
+
View a dataset by user/name path.
|
|
286
|
+
"""
|
|
287
|
+
require_auth()
|
|
288
|
+
|
|
289
|
+
if not is_user_name_format(path):
|
|
290
|
+
print_error("Invalid format. Use <user>/<name> (e.g., trossen/aloha-demo)")
|
|
291
|
+
raise typer.Exit(1)
|
|
292
|
+
|
|
293
|
+
async def fetch():
|
|
294
|
+
async with ApiClient() as client:
|
|
295
|
+
return await resolve_dataset_identifier(client, path)
|
|
296
|
+
|
|
297
|
+
try:
|
|
298
|
+
dataset = asyncio.run(fetch())
|
|
299
|
+
_display_dataset_info(dataset)
|
|
300
|
+
|
|
301
|
+
except ApiError as e:
|
|
302
|
+
if e.status_code == 404:
|
|
303
|
+
print_error(f"Dataset '{path}' not found")
|
|
304
|
+
else:
|
|
305
|
+
print_error(f"Failed to get dataset: {e.message}")
|
|
306
|
+
raise typer.Exit(1)
|
|
307
|
+
|
|
308
|
+
|
|
309
|
+
@app.command("list")
|
|
310
|
+
def list_datasets(
|
|
311
|
+
mine: Annotated[
|
|
312
|
+
bool,
|
|
313
|
+
typer.Option("--mine", help="Show only your datasets"),
|
|
314
|
+
] = False,
|
|
315
|
+
limit: Annotated[
|
|
316
|
+
int,
|
|
317
|
+
typer.Option("--limit", "-l", help="Maximum number of datasets to show"),
|
|
318
|
+
] = 20,
|
|
319
|
+
) -> None:
|
|
320
|
+
"""
|
|
321
|
+
List datasets.
|
|
322
|
+
"""
|
|
323
|
+
require_auth()
|
|
324
|
+
|
|
325
|
+
async def fetch():
|
|
326
|
+
async with ApiClient() as client:
|
|
327
|
+
if mine:
|
|
328
|
+
return await client.get("/datasets/me", params={"limit": limit})
|
|
329
|
+
else:
|
|
330
|
+
return await client.get("/datasets/", params={"limit": limit})
|
|
331
|
+
|
|
332
|
+
try:
|
|
333
|
+
response = asyncio.run(fetch())
|
|
334
|
+
datasets = response if isinstance(response, list) else response.get("items", [])
|
|
335
|
+
|
|
336
|
+
if not datasets:
|
|
337
|
+
print_info("No datasets found")
|
|
338
|
+
return
|
|
339
|
+
|
|
340
|
+
table = Table(title="Datasets", show_edge=False)
|
|
341
|
+
table.add_column("ID", style="table.id", no_wrap=True)
|
|
342
|
+
table.add_column("Name", style="bold")
|
|
343
|
+
table.add_column("Type")
|
|
344
|
+
table.add_column("Privacy")
|
|
345
|
+
|
|
346
|
+
for ds in datasets:
|
|
347
|
+
table.add_row(
|
|
348
|
+
ds["id"],
|
|
349
|
+
ds["name"],
|
|
350
|
+
ds.get("type", "-"),
|
|
351
|
+
ds.get("privacy", "-"),
|
|
352
|
+
)
|
|
353
|
+
|
|
354
|
+
console.print(table)
|
|
355
|
+
|
|
356
|
+
except ApiError as e:
|
|
357
|
+
print_error(f"Failed to list datasets: {e.message}")
|
|
358
|
+
raise typer.Exit(1)
|
|
359
|
+
|
|
360
|
+
|
|
361
|
+
def _display_dataset_info(dataset: dict) -> None:
|
|
362
|
+
"""
|
|
363
|
+
Display dataset information.
|
|
364
|
+
"""
|
|
365
|
+
console.print(f"\n[heading]Dataset: {dataset['name']}[/heading]\n")
|
|
366
|
+
console.print(f"[label]ID:[/label] {dataset['id']}")
|
|
367
|
+
console.print(f"[label]Type:[/label] {dataset.get('type', '-')}")
|
|
368
|
+
console.print(f"[label]Privacy:[/label] {dataset.get('privacy', '-')}")
|
|
369
|
+
console.print(f"[label]Owner:[/label] {dataset.get('user_id', '-')}")
|
|
370
|
+
console.print(f"[label]Created:[/label] {dataset.get('created_at', '-')}")
|
|
371
|
+
|
|
372
|
+
if dataset.get("updated_at"):
|
|
373
|
+
console.print(f"[label]Updated:[/label] {dataset['updated_at']}")
|
|
374
|
+
|
|
375
|
+
if dataset.get("dataset_metadata"):
|
|
376
|
+
console.print("\n[label]Metadata:[/label]")
|
|
377
|
+
console.print_json(data=dataset["dataset_metadata"])
|
|
378
|
+
|
|
379
|
+
|
|
380
|
+
@app.command("info")
|
|
381
|
+
def info(
|
|
382
|
+
dataset_id: Annotated[
|
|
383
|
+
str,
|
|
384
|
+
typer.Argument(help="Dataset ID (UUID or <user>/<name>)"),
|
|
385
|
+
],
|
|
386
|
+
) -> None:
|
|
387
|
+
"""
|
|
388
|
+
Get detailed information about a dataset.
|
|
389
|
+
"""
|
|
390
|
+
require_auth()
|
|
391
|
+
|
|
392
|
+
async def fetch():
|
|
393
|
+
async with ApiClient() as client:
|
|
394
|
+
return await resolve_dataset_identifier(client, dataset_id)
|
|
395
|
+
|
|
396
|
+
try:
|
|
397
|
+
dataset = asyncio.run(fetch())
|
|
398
|
+
_display_dataset_info(dataset)
|
|
399
|
+
|
|
400
|
+
except ApiError as e:
|
|
401
|
+
if e.status_code == 404:
|
|
402
|
+
print_error(f"Dataset '{dataset_id}' not found")
|
|
403
|
+
else:
|
|
404
|
+
print_error(f"Failed to get dataset info: {e.message}")
|
|
405
|
+
raise typer.Exit(1)
|
|
406
|
+
|
|
407
|
+
|
|
408
|
+
@app.command("update")
|
|
409
|
+
def update(
|
|
410
|
+
dataset_id: Annotated[
|
|
411
|
+
str,
|
|
412
|
+
typer.Argument(help="Dataset ID (UUID or <user>/<name>) to update"),
|
|
413
|
+
],
|
|
414
|
+
name: Annotated[
|
|
415
|
+
str | None,
|
|
416
|
+
typer.Option("--name", "-n", help="New dataset name"),
|
|
417
|
+
] = None,
|
|
418
|
+
privacy: Annotated[
|
|
419
|
+
PrivacyLevel | None,
|
|
420
|
+
typer.Option("--privacy", "-p", help="New privacy level"),
|
|
421
|
+
] = None,
|
|
422
|
+
metadata: Annotated[
|
|
423
|
+
str | None,
|
|
424
|
+
typer.Option("--metadata", "-m", help="JSON metadata string"),
|
|
425
|
+
] = None,
|
|
426
|
+
) -> None:
|
|
427
|
+
"""
|
|
428
|
+
Update a dataset's metadata.
|
|
429
|
+
"""
|
|
430
|
+
require_auth()
|
|
431
|
+
|
|
432
|
+
updates: dict = {}
|
|
433
|
+
if name is not None:
|
|
434
|
+
updates["name"] = name
|
|
435
|
+
if privacy is not None:
|
|
436
|
+
updates["privacy"] = privacy.value
|
|
437
|
+
if metadata is not None:
|
|
438
|
+
try:
|
|
439
|
+
updates["dataset_metadata"] = json.loads(metadata)
|
|
440
|
+
except json.JSONDecodeError:
|
|
441
|
+
print_error("Invalid JSON metadata")
|
|
442
|
+
raise typer.Exit(1)
|
|
443
|
+
|
|
444
|
+
if not updates:
|
|
445
|
+
print_error("No updates specified. Use --name, --privacy, or --metadata.")
|
|
446
|
+
raise typer.Exit(1)
|
|
447
|
+
|
|
448
|
+
async def do_update():
|
|
449
|
+
async with ApiClient() as client:
|
|
450
|
+
dataset = await resolve_dataset_identifier(client, dataset_id)
|
|
451
|
+
actual_id = dataset["id"]
|
|
452
|
+
return await client.patch(f"/datasets/{actual_id}", json=updates)
|
|
453
|
+
|
|
454
|
+
try:
|
|
455
|
+
dataset = asyncio.run(do_update())
|
|
456
|
+
print_success("Dataset updated successfully")
|
|
457
|
+
_display_dataset_info(dataset)
|
|
458
|
+
|
|
459
|
+
except ApiError as e:
|
|
460
|
+
if e.status_code == 404:
|
|
461
|
+
print_error(f"Dataset '{dataset_id}' not found")
|
|
462
|
+
else:
|
|
463
|
+
print_error(f"Failed to update dataset: {e.message}")
|
|
464
|
+
raise typer.Exit(1)
|
|
465
|
+
|
|
466
|
+
|
|
467
|
+
@app.command("delete")
|
|
468
|
+
def delete(
|
|
469
|
+
dataset_id: Annotated[
|
|
470
|
+
str,
|
|
471
|
+
typer.Argument(help="Dataset ID (UUID or <user>/<name>) to delete"),
|
|
472
|
+
],
|
|
473
|
+
force: Annotated[
|
|
474
|
+
bool,
|
|
475
|
+
typer.Option("--force", "-f", help="Skip confirmation"),
|
|
476
|
+
] = False,
|
|
477
|
+
) -> None:
|
|
478
|
+
"""
|
|
479
|
+
Delete a dataset.
|
|
480
|
+
"""
|
|
481
|
+
require_auth()
|
|
482
|
+
|
|
483
|
+
if not force:
|
|
484
|
+
confirm = typer.confirm(f"Are you sure you want to delete dataset {dataset_id}?")
|
|
485
|
+
if not confirm:
|
|
486
|
+
print_info("Cancelled")
|
|
487
|
+
return
|
|
488
|
+
|
|
489
|
+
async def do_delete():
|
|
490
|
+
async with ApiClient() as client:
|
|
491
|
+
# Resolve identifier to get the actual ID
|
|
492
|
+
dataset = await resolve_dataset_identifier(client, dataset_id)
|
|
493
|
+
actual_id = dataset["id"]
|
|
494
|
+
await client.delete(f"/datasets/{actual_id}")
|
|
495
|
+
|
|
496
|
+
try:
|
|
497
|
+
asyncio.run(do_delete())
|
|
498
|
+
print_success("Dataset deleted successfully")
|
|
499
|
+
|
|
500
|
+
except ApiError as e:
|
|
501
|
+
if e.status_code == 404:
|
|
502
|
+
print_error(f"Dataset '{dataset_id}' not found")
|
|
503
|
+
else:
|
|
504
|
+
print_error(f"Failed to delete dataset: {e.message}")
|
|
505
|
+
raise typer.Exit(1)
|