anatools 5.1.27__py3-none-any.whl → 6.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- anatools/__init__.py +1 -1
- anatools/anaclient/anaclient.py +16 -15
- anatools/anaclient/api/api.py +2 -1
- anatools/anaclient/api/datasets.py +4 -3
- anatools/anaclient/api/handlers.py +1 -1
- anatools/anaclient/channels.py +50 -25
- anatools/anaclient/datasets.py +37 -8
- anatools/anaclient/helpers.py +11 -10
- anatools/anaclient/services.py +46 -20
- anatools/anaclient/volumes.py +19 -18
- {anatools-5.1.27.data → anatools-6.0.0.data}/scripts/anadeploy +2 -0
- anatools-6.0.0.data/scripts/renderedai +3001 -0
- {anatools-5.1.27.dist-info → anatools-6.0.0.dist-info}/METADATA +1 -1
- {anatools-5.1.27.dist-info → anatools-6.0.0.dist-info}/RECORD +25 -24
- {anatools-5.1.27.dist-info → anatools-6.0.0.dist-info}/WHEEL +1 -1
- {anatools-5.1.27.data → anatools-6.0.0.data}/scripts/ana +0 -0
- {anatools-5.1.27.data → anatools-6.0.0.data}/scripts/anamount +0 -0
- {anatools-5.1.27.data → anatools-6.0.0.data}/scripts/anaprofile +0 -0
- {anatools-5.1.27.data → anatools-6.0.0.data}/scripts/anarules +0 -0
- {anatools-5.1.27.data → anatools-6.0.0.data}/scripts/anaserver +0 -0
- {anatools-5.1.27.data → anatools-6.0.0.data}/scripts/anatransfer +0 -0
- {anatools-5.1.27.data → anatools-6.0.0.data}/scripts/anautils +0 -0
- {anatools-5.1.27.dist-info → anatools-6.0.0.dist-info}/entry_points.txt +0 -0
- {anatools-5.1.27.dist-info → anatools-6.0.0.dist-info}/licenses/LICENSE +0 -0
- {anatools-5.1.27.dist-info → anatools-6.0.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,3001 @@
|
|
|
1
|
+
#!python
|
|
2
|
+
# Copyright 2019-2026 DADoES, Inc.
|
|
3
|
+
#
|
|
4
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
5
|
+
# you may not use this file except in compliance with the License.
|
|
6
|
+
# You may obtain a copy of the License in the root directory in the "LICENSE" file or at:
|
|
7
|
+
#
|
|
8
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
9
|
+
#
|
|
10
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
11
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
12
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
13
|
+
# See the License for the specific language governing permissions and
|
|
14
|
+
# limitations under the License.
|
|
15
|
+
"""
|
|
16
|
+
Rendered.ai CLI - A comprehensive command-line interface for the Rendered.ai Platform.
|
|
17
|
+
|
|
18
|
+
This CLI provides JSON output for all commands, making it suitable for automation
|
|
19
|
+
and integration with AI agents.
|
|
20
|
+
|
|
21
|
+
Environment Variables:
|
|
22
|
+
RENDEREDAI_API_KEY - API key for authentication
|
|
23
|
+
RENDEREDAI_ENVIRONMENT - Environment: prod, test, or dev (default: prod)
|
|
24
|
+
RENDEREDAI_ENDPOINT - Custom API endpoint URL
|
|
25
|
+
|
|
26
|
+
Usage:
|
|
27
|
+
renderedai <resource> <action> [options]
|
|
28
|
+
|
|
29
|
+
Examples:
|
|
30
|
+
renderedai workspaces get --orgid abc123
|
|
31
|
+
renderedai datasets get --workspaceid xyz789 --limit 10
|
|
32
|
+
renderedai volumes create --name "My Volume" --orgid abc123
|
|
33
|
+
renderedai graphs get --workspaceid xyz789 --graphid graph123
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
import argparse
|
|
37
|
+
import json
|
|
38
|
+
import os
|
|
39
|
+
import sys
|
|
40
|
+
from typing import Any, List
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def get_client():
|
|
44
|
+
"""Initialize and return the anatools client."""
|
|
45
|
+
import anatools
|
|
46
|
+
|
|
47
|
+
api_key = os.environ.get('RENDEREDAI_API_KEY')
|
|
48
|
+
environment = os.environ.get('RENDEREDAI_ENVIRONMENT', 'prod')
|
|
49
|
+
endpoint = os.environ.get('RENDEREDAI_ENDPOINT')
|
|
50
|
+
|
|
51
|
+
if not api_key and not endpoint:
|
|
52
|
+
output_error("Authentication required. Set RENDEREDAI_API_KEY environment variable.", "AUTH_REQUIRED")
|
|
53
|
+
sys.exit(1)
|
|
54
|
+
|
|
55
|
+
try:
|
|
56
|
+
client = anatools.client(
|
|
57
|
+
APIKey=api_key,
|
|
58
|
+
environment=environment,
|
|
59
|
+
endpoint=endpoint,
|
|
60
|
+
interactive=False,
|
|
61
|
+
verbose=None
|
|
62
|
+
)
|
|
63
|
+
return client
|
|
64
|
+
except Exception as e:
|
|
65
|
+
output_error(f"Authentication failed: {str(e)}")
|
|
66
|
+
sys.exit(1)
|
|
67
|
+
|
|
68
|
+
|
|
69
|
+
def output_json(data: Any, pretty: bool = True):
|
|
70
|
+
"""Output data as JSON."""
|
|
71
|
+
if pretty:
|
|
72
|
+
print(json.dumps(data, indent=2, default=str))
|
|
73
|
+
else:
|
|
74
|
+
print(json.dumps(data, default=str))
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def output_error(message: str, code: str = "ERROR"):
|
|
78
|
+
"""Output an error message as JSON."""
|
|
79
|
+
output_json({"error": code, "message": message})
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
def require_arg(args, name: str, display_name: str) -> str:
|
|
83
|
+
"""Get a required argument, exit if not provided."""
|
|
84
|
+
value = getattr(args, name, None)
|
|
85
|
+
if not value:
|
|
86
|
+
output_error(f"{display_name} is required. Use --{name}", f"MISSING_{name.upper()}")
|
|
87
|
+
sys.exit(1)
|
|
88
|
+
return value
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
def parse_json_arg(value: str) -> Any:
|
|
92
|
+
"""Parse a JSON string argument."""
|
|
93
|
+
try:
|
|
94
|
+
return json.loads(value)
|
|
95
|
+
except json.JSONDecodeError as e:
|
|
96
|
+
output_error(f"Invalid JSON: {str(e)}")
|
|
97
|
+
sys.exit(1)
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def parse_list_arg(value: str) -> List[str]:
|
|
101
|
+
"""Parse a comma-separated list argument."""
|
|
102
|
+
if not value:
|
|
103
|
+
return []
|
|
104
|
+
return [item.strip() for item in value.split(',')]
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
# =============================================================================
|
|
108
|
+
# WORKSPACES
|
|
109
|
+
# =============================================================================
|
|
110
|
+
|
|
111
|
+
def cmd_workspaces_get(args):
|
|
112
|
+
"""Get workspaces."""
|
|
113
|
+
client = get_client()
|
|
114
|
+
|
|
115
|
+
result = client.get_workspaces(
|
|
116
|
+
workspaceId=args.workspaceid,
|
|
117
|
+
organizationId=args.orgid,
|
|
118
|
+
limit=args.limit,
|
|
119
|
+
fields=parse_list_arg(args.fields) if args.fields else None
|
|
120
|
+
)
|
|
121
|
+
output_json(result)
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
def cmd_workspaces_create(args):
|
|
125
|
+
"""Create a workspace."""
|
|
126
|
+
client = get_client()
|
|
127
|
+
org_id = require_arg(args, 'orgid', 'Organization ID')
|
|
128
|
+
|
|
129
|
+
result = client.create_workspace(
|
|
130
|
+
name=args.name,
|
|
131
|
+
description=args.description or '',
|
|
132
|
+
organizationId=org_id,
|
|
133
|
+
channelIds=parse_list_arg(args.channelids) if args.channelids else [],
|
|
134
|
+
volumeIds=parse_list_arg(args.volumeids) if args.volumeids else [],
|
|
135
|
+
tags=parse_list_arg(args.tags) if args.tags else None
|
|
136
|
+
)
|
|
137
|
+
output_json({"workspaceId": result})
|
|
138
|
+
|
|
139
|
+
|
|
140
|
+
def cmd_workspaces_edit(args):
|
|
141
|
+
"""Edit a workspace."""
|
|
142
|
+
client = get_client()
|
|
143
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
144
|
+
|
|
145
|
+
result = client.edit_workspace(
|
|
146
|
+
workspaceId=workspace_id,
|
|
147
|
+
name=args.name,
|
|
148
|
+
description=args.description,
|
|
149
|
+
channelIds=parse_list_arg(args.channelids) if args.channelids else None,
|
|
150
|
+
volumeIds=parse_list_arg(args.volumeids) if args.volumeids else None,
|
|
151
|
+
tags=parse_list_arg(args.tags) if args.tags else None
|
|
152
|
+
)
|
|
153
|
+
output_json({"success": result})
|
|
154
|
+
|
|
155
|
+
|
|
156
|
+
def cmd_workspaces_delete(args):
|
|
157
|
+
"""Delete a workspace."""
|
|
158
|
+
client = get_client()
|
|
159
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
160
|
+
|
|
161
|
+
client.interactive = False
|
|
162
|
+
result = client.ana_api.deleteWorkspace(workspaceId=workspace_id)
|
|
163
|
+
output_json({"success": result})
|
|
164
|
+
|
|
165
|
+
|
|
166
|
+
# =============================================================================
|
|
167
|
+
# ORGANIZATIONS
|
|
168
|
+
# =============================================================================
|
|
169
|
+
|
|
170
|
+
def cmd_organizations_get(args):
|
|
171
|
+
"""Get organizations."""
|
|
172
|
+
client = get_client()
|
|
173
|
+
|
|
174
|
+
result = client.get_organizations(
|
|
175
|
+
organizationId=args.orgid,
|
|
176
|
+
limit=args.limit,
|
|
177
|
+
fields=parse_list_arg(args.fields) if args.fields else None
|
|
178
|
+
)
|
|
179
|
+
output_json(result)
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
# =============================================================================
|
|
183
|
+
# MEMBERS
|
|
184
|
+
# =============================================================================
|
|
185
|
+
|
|
186
|
+
def cmd_members_get(args):
|
|
187
|
+
"""Get organization members."""
|
|
188
|
+
client = get_client()
|
|
189
|
+
org_id = require_arg(args, 'orgid', 'Organization ID')
|
|
190
|
+
|
|
191
|
+
result = client.get_organization_members(
|
|
192
|
+
organizationId=org_id,
|
|
193
|
+
limit=args.limit,
|
|
194
|
+
fields=parse_list_arg(args.fields) if args.fields else None
|
|
195
|
+
)
|
|
196
|
+
output_json(result)
|
|
197
|
+
|
|
198
|
+
|
|
199
|
+
# =============================================================================
|
|
200
|
+
# DATASETS
|
|
201
|
+
# =============================================================================
|
|
202
|
+
|
|
203
|
+
def cmd_datasets_get(args):
|
|
204
|
+
"""Get datasets."""
|
|
205
|
+
client = get_client()
|
|
206
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
207
|
+
|
|
208
|
+
result = client.get_datasets(
|
|
209
|
+
workspaceId=workspace_id,
|
|
210
|
+
datasetId=args.datasetid,
|
|
211
|
+
limit=args.limit,
|
|
212
|
+
fields=parse_list_arg(args.fields) if args.fields else None
|
|
213
|
+
)
|
|
214
|
+
output_json(result)
|
|
215
|
+
|
|
216
|
+
|
|
217
|
+
def cmd_datasets_create(args):
|
|
218
|
+
"""Create a dataset."""
|
|
219
|
+
client = get_client()
|
|
220
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
221
|
+
|
|
222
|
+
result = client.create_dataset(
|
|
223
|
+
workspaceId=workspace_id,
|
|
224
|
+
name=args.name,
|
|
225
|
+
graphId=args.graphid,
|
|
226
|
+
description=args.description or '',
|
|
227
|
+
runs=args.runs,
|
|
228
|
+
seed=args.seed,
|
|
229
|
+
priority=args.priority,
|
|
230
|
+
tags=parse_list_arg(args.tags) if args.tags else []
|
|
231
|
+
)
|
|
232
|
+
output_json({"datasetId": result})
|
|
233
|
+
|
|
234
|
+
|
|
235
|
+
def cmd_datasets_edit(args):
|
|
236
|
+
"""Edit a dataset."""
|
|
237
|
+
client = get_client()
|
|
238
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
239
|
+
dataset_id = require_arg(args, 'datasetid', 'Dataset ID')
|
|
240
|
+
|
|
241
|
+
result = client.edit_dataset(
|
|
242
|
+
workspaceId=workspace_id,
|
|
243
|
+
datasetId=dataset_id,
|
|
244
|
+
name=args.name,
|
|
245
|
+
description=args.description,
|
|
246
|
+
tags=parse_list_arg(args.tags) if args.tags else None,
|
|
247
|
+
pause=args.pause,
|
|
248
|
+
priority=args.priority
|
|
249
|
+
)
|
|
250
|
+
output_json({"success": result})
|
|
251
|
+
|
|
252
|
+
|
|
253
|
+
def cmd_datasets_delete(args):
|
|
254
|
+
"""Delete a dataset."""
|
|
255
|
+
client = get_client()
|
|
256
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
257
|
+
dataset_id = require_arg(args, 'datasetid', 'Dataset ID')
|
|
258
|
+
|
|
259
|
+
result = client.delete_dataset(
|
|
260
|
+
workspaceId=workspace_id,
|
|
261
|
+
datasetId=dataset_id
|
|
262
|
+
)
|
|
263
|
+
output_json({"success": result})
|
|
264
|
+
|
|
265
|
+
|
|
266
|
+
def cmd_datasets_cancel(args):
|
|
267
|
+
"""Cancel a running dataset job."""
|
|
268
|
+
client = get_client()
|
|
269
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
270
|
+
dataset_id = require_arg(args, 'datasetid', 'Dataset ID')
|
|
271
|
+
|
|
272
|
+
result = client.cancel_dataset(
|
|
273
|
+
workspaceId=workspace_id,
|
|
274
|
+
datasetId=dataset_id
|
|
275
|
+
)
|
|
276
|
+
output_json({"success": result})
|
|
277
|
+
|
|
278
|
+
|
|
279
|
+
def cmd_datasets_download(args):
|
|
280
|
+
"""Download a dataset."""
|
|
281
|
+
client = get_client()
|
|
282
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
283
|
+
dataset_id = require_arg(args, 'datasetid', 'Dataset ID')
|
|
284
|
+
|
|
285
|
+
result = client.download_dataset(
|
|
286
|
+
workspaceId=workspace_id,
|
|
287
|
+
datasetId=dataset_id,
|
|
288
|
+
localDir=args.outputdir
|
|
289
|
+
)
|
|
290
|
+
output_json({"downloadPath": result})
|
|
291
|
+
|
|
292
|
+
|
|
293
|
+
def cmd_datasets_upload(args):
|
|
294
|
+
"""Upload a dataset."""
|
|
295
|
+
client = get_client()
|
|
296
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
297
|
+
|
|
298
|
+
result = client.upload_dataset(
|
|
299
|
+
workspaceId=workspace_id,
|
|
300
|
+
filename=args.file,
|
|
301
|
+
description=args.description,
|
|
302
|
+
tags=parse_list_arg(args.tags) if args.tags else None
|
|
303
|
+
)
|
|
304
|
+
output_json({"datasetId": result})
|
|
305
|
+
|
|
306
|
+
|
|
307
|
+
def cmd_datasets_runs(args):
|
|
308
|
+
"""Get dataset runs."""
|
|
309
|
+
client = get_client()
|
|
310
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
311
|
+
dataset_id = require_arg(args, 'datasetid', 'Dataset ID')
|
|
312
|
+
|
|
313
|
+
result = client.get_dataset_runs(
|
|
314
|
+
workspaceId=workspace_id,
|
|
315
|
+
datasetId=dataset_id,
|
|
316
|
+
state=args.state,
|
|
317
|
+
fields=parse_list_arg(args.fields) if args.fields else None
|
|
318
|
+
)
|
|
319
|
+
output_json(result)
|
|
320
|
+
|
|
321
|
+
|
|
322
|
+
def cmd_datasets_log(args):
|
|
323
|
+
"""Get dataset run log."""
|
|
324
|
+
client = get_client()
|
|
325
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
326
|
+
dataset_id = require_arg(args, 'datasetid', 'Dataset ID')
|
|
327
|
+
run_id = require_arg(args, 'runid', 'Run ID')
|
|
328
|
+
|
|
329
|
+
result = client.get_dataset_log(
|
|
330
|
+
workspaceId=workspace_id,
|
|
331
|
+
datasetId=dataset_id,
|
|
332
|
+
runId=run_id,
|
|
333
|
+
saveLogFile=False,
|
|
334
|
+
fields=parse_list_arg(args.fields) if args.fields else None
|
|
335
|
+
)
|
|
336
|
+
output_json(result)
|
|
337
|
+
|
|
338
|
+
|
|
339
|
+
def cmd_datasets_files(args):
|
|
340
|
+
"""Get dataset files."""
|
|
341
|
+
client = get_client()
|
|
342
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
343
|
+
dataset_id = require_arg(args, 'datasetid', 'Dataset ID')
|
|
344
|
+
|
|
345
|
+
result = client.get_dataset_files(
|
|
346
|
+
workspaceId=workspace_id,
|
|
347
|
+
datasetId=dataset_id,
|
|
348
|
+
path=args.path,
|
|
349
|
+
limit=args.limit
|
|
350
|
+
)
|
|
351
|
+
output_json(result)
|
|
352
|
+
|
|
353
|
+
|
|
354
|
+
def cmd_datasets_jobs(args):
|
|
355
|
+
"""Get dataset jobs."""
|
|
356
|
+
client = get_client()
|
|
357
|
+
|
|
358
|
+
result = client.get_dataset_jobs(
|
|
359
|
+
workspaceId=args.workspaceid,
|
|
360
|
+
organizationId=args.orgid,
|
|
361
|
+
datasetId=args.datasetid,
|
|
362
|
+
limit=args.limit,
|
|
363
|
+
fields=parse_list_arg(args.fields) if args.fields else None
|
|
364
|
+
)
|
|
365
|
+
output_json(result)
|
|
366
|
+
|
|
367
|
+
|
|
368
|
+
def cmd_datasets_create_mixed(args):
|
|
369
|
+
"""Create a mixed dataset from multiple datasets."""
|
|
370
|
+
client = get_client()
|
|
371
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
372
|
+
|
|
373
|
+
parameters = parse_json_arg(args.parameters)
|
|
374
|
+
|
|
375
|
+
result = client.create_mixed_dataset(
|
|
376
|
+
workspaceId=workspace_id,
|
|
377
|
+
name=args.name,
|
|
378
|
+
parameters=parameters,
|
|
379
|
+
description=args.description or '',
|
|
380
|
+
seed=args.seed,
|
|
381
|
+
tags=parse_list_arg(args.tags) if args.tags else None
|
|
382
|
+
)
|
|
383
|
+
output_json({"datasetId": result})
|
|
384
|
+
|
|
385
|
+
|
|
386
|
+
# =============================================================================
|
|
387
|
+
# VOLUMES
|
|
388
|
+
# =============================================================================
|
|
389
|
+
|
|
390
|
+
def cmd_volumes_get(args):
|
|
391
|
+
"""Get volumes."""
|
|
392
|
+
client = get_client()
|
|
393
|
+
|
|
394
|
+
result = client.get_volumes(
|
|
395
|
+
volumeId=args.volumeid,
|
|
396
|
+
workspaceId=args.workspaceid,
|
|
397
|
+
organizationId=args.orgid,
|
|
398
|
+
limit=args.limit,
|
|
399
|
+
fields=parse_list_arg(args.fields) if args.fields else None
|
|
400
|
+
)
|
|
401
|
+
output_json(result)
|
|
402
|
+
|
|
403
|
+
|
|
404
|
+
def cmd_volumes_create(args):
|
|
405
|
+
"""Create a volume."""
|
|
406
|
+
client = get_client()
|
|
407
|
+
org_id = require_arg(args, 'orgid', 'Organization ID')
|
|
408
|
+
|
|
409
|
+
result = client.create_volume(
|
|
410
|
+
name=args.name,
|
|
411
|
+
description=args.description,
|
|
412
|
+
organizationId=org_id,
|
|
413
|
+
permission=args.permission,
|
|
414
|
+
tags=parse_list_arg(args.tags) if args.tags else None
|
|
415
|
+
)
|
|
416
|
+
output_json({"volumeId": result})
|
|
417
|
+
|
|
418
|
+
|
|
419
|
+
def cmd_volumes_edit(args):
|
|
420
|
+
"""Edit a volume."""
|
|
421
|
+
client = get_client()
|
|
422
|
+
volume_id = require_arg(args, 'volumeid', 'Volume ID')
|
|
423
|
+
|
|
424
|
+
result = client.edit_volume(
|
|
425
|
+
volumeId=volume_id,
|
|
426
|
+
name=args.name,
|
|
427
|
+
description=args.description,
|
|
428
|
+
permission=args.permission,
|
|
429
|
+
tags=parse_list_arg(args.tags) if args.tags else None
|
|
430
|
+
)
|
|
431
|
+
output_json({"success": result})
|
|
432
|
+
|
|
433
|
+
|
|
434
|
+
def cmd_volumes_delete(args):
|
|
435
|
+
"""Delete a volume."""
|
|
436
|
+
client = get_client()
|
|
437
|
+
volume_id = require_arg(args, 'volumeid', 'Volume ID')
|
|
438
|
+
|
|
439
|
+
result = client.delete_volume(volumeId=volume_id)
|
|
440
|
+
output_json({"success": result})
|
|
441
|
+
|
|
442
|
+
|
|
443
|
+
# =============================================================================
|
|
444
|
+
# VOLUME-DATA
|
|
445
|
+
# =============================================================================
|
|
446
|
+
|
|
447
|
+
def cmd_volume_data_get(args):
|
|
448
|
+
"""Get volume data."""
|
|
449
|
+
client = get_client()
|
|
450
|
+
volume_id = require_arg(args, 'volumeid', 'Volume ID')
|
|
451
|
+
|
|
452
|
+
result = client.get_volume_data(
|
|
453
|
+
volumeId=volume_id,
|
|
454
|
+
dir=args.dir,
|
|
455
|
+
files=parse_list_arg(args.files) if args.files else None,
|
|
456
|
+
recursive=args.recursive,
|
|
457
|
+
limit=args.limit
|
|
458
|
+
)
|
|
459
|
+
output_json(result)
|
|
460
|
+
|
|
461
|
+
|
|
462
|
+
def cmd_volume_data_upload(args):
|
|
463
|
+
"""Upload data to a volume."""
|
|
464
|
+
client = get_client()
|
|
465
|
+
volume_id = require_arg(args, 'volumeid', 'Volume ID')
|
|
466
|
+
|
|
467
|
+
client.upload_volume_data(
|
|
468
|
+
volumeId=volume_id,
|
|
469
|
+
localDir=args.localdir,
|
|
470
|
+
files=parse_list_arg(args.files) if args.files else None,
|
|
471
|
+
destinationDir=args.destdir,
|
|
472
|
+
sync=args.sync
|
|
473
|
+
)
|
|
474
|
+
output_json({"success": True})
|
|
475
|
+
|
|
476
|
+
|
|
477
|
+
def cmd_volume_data_download(args):
|
|
478
|
+
"""Download data from a volume."""
|
|
479
|
+
client = get_client()
|
|
480
|
+
volume_id = require_arg(args, 'volumeid', 'Volume ID')
|
|
481
|
+
|
|
482
|
+
client.download_volume_data(
|
|
483
|
+
volumeId=volume_id,
|
|
484
|
+
localDir=args.outputdir,
|
|
485
|
+
files=parse_list_arg(args.files) if args.files else [],
|
|
486
|
+
recursive=args.recursive,
|
|
487
|
+
sync=args.sync
|
|
488
|
+
)
|
|
489
|
+
output_json({"success": True})
|
|
490
|
+
|
|
491
|
+
|
|
492
|
+
def cmd_volume_data_delete(args):
|
|
493
|
+
"""Delete data from a volume."""
|
|
494
|
+
client = get_client()
|
|
495
|
+
volume_id = require_arg(args, 'volumeid', 'Volume ID')
|
|
496
|
+
|
|
497
|
+
result = client.delete_volume_data(
|
|
498
|
+
volumeId=volume_id,
|
|
499
|
+
files=parse_list_arg(args.files) if args.files else None
|
|
500
|
+
)
|
|
501
|
+
output_json({"success": result})
|
|
502
|
+
|
|
503
|
+
|
|
504
|
+
def cmd_volume_data_search(args):
|
|
505
|
+
"""Search a volume."""
|
|
506
|
+
client = get_client()
|
|
507
|
+
volume_id = require_arg(args, 'volumeid', 'Volume ID')
|
|
508
|
+
|
|
509
|
+
result = client.search_volume(
|
|
510
|
+
volumeId=volume_id,
|
|
511
|
+
directory=args.dir,
|
|
512
|
+
recursive=args.recursive,
|
|
513
|
+
keywords=parse_list_arg(args.keywords) if args.keywords else None,
|
|
514
|
+
fileformats=parse_list_arg(args.formats) if args.formats else None,
|
|
515
|
+
filetypes=parse_list_arg(args.types) if args.types else None,
|
|
516
|
+
limit=args.limit
|
|
517
|
+
)
|
|
518
|
+
output_json(result)
|
|
519
|
+
|
|
520
|
+
|
|
521
|
+
def cmd_volumes_mount(args):
|
|
522
|
+
"""Mount a volume to local filesystem."""
|
|
523
|
+
import subprocess
|
|
524
|
+
import time
|
|
525
|
+
|
|
526
|
+
client = get_client()
|
|
527
|
+
volume_id = require_arg(args, 'volumeid', 'Volume ID')
|
|
528
|
+
path = args.path or os.getcwd()
|
|
529
|
+
mountexec = args.mountexec or 'goofys'
|
|
530
|
+
home = os.path.expanduser('~')
|
|
531
|
+
|
|
532
|
+
# Verify mount executable is available
|
|
533
|
+
exec_available = False
|
|
534
|
+
for exec_name in ['goofys', 's3fs', 'mount-s3']:
|
|
535
|
+
try:
|
|
536
|
+
subprocess.run([exec_name, '--version'], capture_output=True, check=True)
|
|
537
|
+
if mountexec == exec_name:
|
|
538
|
+
exec_available = True
|
|
539
|
+
break
|
|
540
|
+
elif not exec_available:
|
|
541
|
+
mountexec = exec_name
|
|
542
|
+
exec_available = True
|
|
543
|
+
except:
|
|
544
|
+
pass
|
|
545
|
+
|
|
546
|
+
if not exec_available:
|
|
547
|
+
output_error("No mount executable found. Install goofys, s3fs, or mount-s3.")
|
|
548
|
+
return
|
|
549
|
+
|
|
550
|
+
# Get volume info
|
|
551
|
+
volume_data = client.get_volumes(volumeId=volume_id)
|
|
552
|
+
if not volume_data:
|
|
553
|
+
output_error("Volume not found or permission denied", "VOLUME_NOT_FOUND")
|
|
554
|
+
return
|
|
555
|
+
|
|
556
|
+
if volume_data[0].get('permission') not in ['read', 'write']:
|
|
557
|
+
output_error("Insufficient permissions (view-only)", "PERMISSION_DENIED")
|
|
558
|
+
return
|
|
559
|
+
|
|
560
|
+
# Get mount credentials
|
|
561
|
+
mount_data = client.mount_volumes(volumes=[volume_id])
|
|
562
|
+
if not mount_data:
|
|
563
|
+
output_error("Failed to get mount credentials", "MOUNT_FAILED")
|
|
564
|
+
return
|
|
565
|
+
|
|
566
|
+
# Write AWS credentials
|
|
567
|
+
aws_dir = os.path.join(home, '.aws')
|
|
568
|
+
os.makedirs(aws_dir, exist_ok=True)
|
|
569
|
+
profile_name = f'renderedai-volumes-{volume_id}'
|
|
570
|
+
|
|
571
|
+
# Read existing credentials
|
|
572
|
+
creds_file = os.path.join(aws_dir, 'credentials')
|
|
573
|
+
profiles = {}
|
|
574
|
+
if os.path.exists(creds_file):
|
|
575
|
+
with open(creds_file, 'r') as f:
|
|
576
|
+
current_profile = None
|
|
577
|
+
for line in f:
|
|
578
|
+
line = line.rstrip()
|
|
579
|
+
if line.startswith('[') and line.endswith(']'):
|
|
580
|
+
current_profile = line[1:-1]
|
|
581
|
+
profiles[current_profile] = []
|
|
582
|
+
elif current_profile:
|
|
583
|
+
profiles[current_profile].append(line)
|
|
584
|
+
|
|
585
|
+
# Add new profile
|
|
586
|
+
profiles[profile_name] = [
|
|
587
|
+
f"aws_access_key_id={mount_data['credentials']['accesskeyid']}",
|
|
588
|
+
f"aws_secret_access_key={mount_data['credentials']['accesskey']}",
|
|
589
|
+
f"aws_session_token={mount_data['credentials']['sessiontoken']}"
|
|
590
|
+
]
|
|
591
|
+
|
|
592
|
+
# Write credentials
|
|
593
|
+
with open(creds_file, 'w') as f:
|
|
594
|
+
for profile, lines in profiles.items():
|
|
595
|
+
f.write(f'[{profile}]\n')
|
|
596
|
+
for line in lines:
|
|
597
|
+
if line:
|
|
598
|
+
f.write(f'{line}\n')
|
|
599
|
+
|
|
600
|
+
# Create mount point
|
|
601
|
+
mountpoint = os.path.join(home, '.renderedai', 'volumes', volume_id)
|
|
602
|
+
os.makedirs(mountpoint, exist_ok=True)
|
|
603
|
+
|
|
604
|
+
# Build mount command
|
|
605
|
+
bucket_key = mount_data['keys'][0] if mount_data.get('keys') else None
|
|
606
|
+
if not bucket_key:
|
|
607
|
+
output_error("No bucket key returned", "MOUNT_FAILED")
|
|
608
|
+
return
|
|
609
|
+
|
|
610
|
+
rw_flag = '-o ro' if mount_data.get('rw', ['r'])[0] == 'r' else ''
|
|
611
|
+
|
|
612
|
+
if mountexec == 'goofys':
|
|
613
|
+
command = f'goofys {rw_flag} --profile {profile_name} {bucket_key[:-1]} {mountpoint}'
|
|
614
|
+
elif mountexec == 's3fs':
|
|
615
|
+
command = f's3fs {bucket_key[:-1]} {mountpoint} -o profile={profile_name} -o endpoint=us-west-2 -o url="https://s3-us-west-2.amazonaws.com" {rw_flag}'
|
|
616
|
+
else: # mount-s3
|
|
617
|
+
readonly = '--read-only' if rw_flag else ''
|
|
618
|
+
command = f'mount-s3 {readonly} --profile {profile_name} --prefix {bucket_key[1:]+"/"} {bucket_key[:-1]} {mountpoint}'
|
|
619
|
+
|
|
620
|
+
# Execute mount
|
|
621
|
+
proc = subprocess.Popen(command, shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
|
622
|
+
time.sleep(2) # Wait for mount to complete
|
|
623
|
+
|
|
624
|
+
# Create symlink in target path
|
|
625
|
+
symlink_path = os.path.join(path, 'volumes', volume_id)
|
|
626
|
+
os.makedirs(os.path.join(path, 'volumes'), exist_ok=True)
|
|
627
|
+
if os.path.exists(symlink_path):
|
|
628
|
+
try:
|
|
629
|
+
os.unlink(symlink_path)
|
|
630
|
+
except:
|
|
631
|
+
pass
|
|
632
|
+
try:
|
|
633
|
+
os.symlink(mountpoint, symlink_path)
|
|
634
|
+
except:
|
|
635
|
+
pass
|
|
636
|
+
|
|
637
|
+
# Save mount info
|
|
638
|
+
mountfile = os.path.join(home, '.renderedai', '.mounts.json')
|
|
639
|
+
mounts = {"volumes": {}, "workspaces": {}}
|
|
640
|
+
if os.path.exists(mountfile):
|
|
641
|
+
with open(mountfile, 'r') as f:
|
|
642
|
+
mounts = json.load(f)
|
|
643
|
+
|
|
644
|
+
mounts['volumes'][volume_id] = {
|
|
645
|
+
'status': 'mounted',
|
|
646
|
+
'exec': mountexec,
|
|
647
|
+
'name': volume_data[0].get('name', volume_id),
|
|
648
|
+
'mountpath': mountpoint,
|
|
649
|
+
'symlink': symlink_path,
|
|
650
|
+
'profile': profile_name,
|
|
651
|
+
'pid': proc.pid
|
|
652
|
+
}
|
|
653
|
+
|
|
654
|
+
with open(mountfile, 'w') as f:
|
|
655
|
+
json.dump(mounts, indent=4, sort_keys=True, fp=f)
|
|
656
|
+
|
|
657
|
+
output_json({
|
|
658
|
+
"volumeId": volume_id,
|
|
659
|
+
"name": volume_data[0].get('name'),
|
|
660
|
+
"mountpath": mountpoint,
|
|
661
|
+
"symlink": symlink_path,
|
|
662
|
+
"readonly": mount_data.get('rw', ['r'])[0] == 'r'
|
|
663
|
+
})
|
|
664
|
+
|
|
665
|
+
|
|
666
|
+
def cmd_volumes_unmount(args):
|
|
667
|
+
"""Unmount a volume from local filesystem."""
|
|
668
|
+
import subprocess
|
|
669
|
+
|
|
670
|
+
volume_id = require_arg(args, 'volumeid', 'Volume ID')
|
|
671
|
+
home = os.path.expanduser('~')
|
|
672
|
+
mountfile = os.path.join(home, '.renderedai', '.mounts.json')
|
|
673
|
+
|
|
674
|
+
if not os.path.exists(mountfile):
|
|
675
|
+
output_error("Volume not mounted", "NOT_MOUNTED")
|
|
676
|
+
return
|
|
677
|
+
|
|
678
|
+
with open(mountfile, 'r') as f:
|
|
679
|
+
mounts = json.load(f)
|
|
680
|
+
|
|
681
|
+
mount_info = mounts.get('volumes', {}).get(volume_id)
|
|
682
|
+
if not mount_info or mount_info.get('status') != 'mounted':
|
|
683
|
+
output_error("Volume not mounted", "NOT_MOUNTED")
|
|
684
|
+
return
|
|
685
|
+
|
|
686
|
+
try:
|
|
687
|
+
# Kill processes using the mount
|
|
688
|
+
subprocess.run(["fuser", "-km", mount_info['mountpath']], check=False,
|
|
689
|
+
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
|
690
|
+
|
|
691
|
+
# Remove symlink
|
|
692
|
+
if os.path.exists(mount_info.get('symlink', '')):
|
|
693
|
+
os.unlink(mount_info['symlink'])
|
|
694
|
+
|
|
695
|
+
# Unmount
|
|
696
|
+
subprocess.run(["fusermount", "-uz", mount_info['mountpath']], check=False,
|
|
697
|
+
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
|
698
|
+
subprocess.run(["umount", "-lf", mount_info['mountpath']], check=False,
|
|
699
|
+
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
|
700
|
+
|
|
701
|
+
# Clean up mount directory
|
|
702
|
+
if os.path.isdir(mount_info['mountpath']):
|
|
703
|
+
contents = os.listdir(mount_info['mountpath'])
|
|
704
|
+
if not contents or (len(contents) == 1 and contents[0] == 'lost+found'):
|
|
705
|
+
subprocess.run(["rm", "-rf", mount_info['mountpath']], check=False,
|
|
706
|
+
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
|
707
|
+
|
|
708
|
+
del mounts['volumes'][volume_id]
|
|
709
|
+
|
|
710
|
+
# Save updated mounts
|
|
711
|
+
with open(mountfile, 'w') as f:
|
|
712
|
+
json.dump(mounts, indent=4, sort_keys=True, fp=f)
|
|
713
|
+
|
|
714
|
+
output_json({"volumeId": volume_id, "name": mount_info.get('name'), "success": True})
|
|
715
|
+
|
|
716
|
+
except Exception as e:
|
|
717
|
+
output_error(str(e), "UNMOUNT_FAILED")
|
|
718
|
+
|
|
719
|
+
|
|
720
|
+
def cmd_workspaces_mount(args):
|
|
721
|
+
"""Mount a workspace to local filesystem."""
|
|
722
|
+
import subprocess
|
|
723
|
+
import time
|
|
724
|
+
|
|
725
|
+
client = get_client()
|
|
726
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
727
|
+
path = args.path or os.getcwd()
|
|
728
|
+
mountexec = args.mountexec or 'goofys'
|
|
729
|
+
home = os.path.expanduser('~')
|
|
730
|
+
|
|
731
|
+
# Verify mount executable is available
|
|
732
|
+
exec_available = False
|
|
733
|
+
for exec_name in ['goofys', 's3fs', 'mount-s3']:
|
|
734
|
+
try:
|
|
735
|
+
subprocess.run([exec_name, '--version'], capture_output=True, check=True)
|
|
736
|
+
if mountexec == exec_name:
|
|
737
|
+
exec_available = True
|
|
738
|
+
break
|
|
739
|
+
elif not exec_available:
|
|
740
|
+
mountexec = exec_name
|
|
741
|
+
exec_available = True
|
|
742
|
+
except:
|
|
743
|
+
pass
|
|
744
|
+
|
|
745
|
+
if not exec_available:
|
|
746
|
+
output_error("No mount executable found. Install goofys, s3fs, or mount-s3.")
|
|
747
|
+
return
|
|
748
|
+
|
|
749
|
+
# Get workspace info
|
|
750
|
+
workspace_data = client.get_workspaces(workspaceId=workspace_id)
|
|
751
|
+
if not workspace_data:
|
|
752
|
+
output_error("Workspace not found or permission denied", "WORKSPACE_NOT_FOUND")
|
|
753
|
+
return
|
|
754
|
+
|
|
755
|
+
# Get mount credentials
|
|
756
|
+
mount_data = client.mount_workspaces(workspaces=[workspace_id])
|
|
757
|
+
if not mount_data:
|
|
758
|
+
output_error("Failed to get mount credentials", "MOUNT_FAILED")
|
|
759
|
+
return
|
|
760
|
+
|
|
761
|
+
# Write AWS credentials
|
|
762
|
+
aws_dir = os.path.join(home, '.aws')
|
|
763
|
+
os.makedirs(aws_dir, exist_ok=True)
|
|
764
|
+
profile_name = f'renderedai-workspaces-{workspace_id}'
|
|
765
|
+
|
|
766
|
+
# Read existing credentials
|
|
767
|
+
creds_file = os.path.join(aws_dir, 'credentials')
|
|
768
|
+
profiles = {}
|
|
769
|
+
if os.path.exists(creds_file):
|
|
770
|
+
with open(creds_file, 'r') as f:
|
|
771
|
+
current_profile = None
|
|
772
|
+
for line in f:
|
|
773
|
+
line = line.rstrip()
|
|
774
|
+
if line.startswith('[') and line.endswith(']'):
|
|
775
|
+
current_profile = line[1:-1]
|
|
776
|
+
profiles[current_profile] = []
|
|
777
|
+
elif current_profile:
|
|
778
|
+
profiles[current_profile].append(line)
|
|
779
|
+
|
|
780
|
+
# Add new profile
|
|
781
|
+
profiles[profile_name] = [
|
|
782
|
+
f"aws_access_key_id={mount_data['credentials']['accesskeyid']}",
|
|
783
|
+
f"aws_secret_access_key={mount_data['credentials']['accesskey']}",
|
|
784
|
+
f"aws_session_token={mount_data['credentials']['sessiontoken']}"
|
|
785
|
+
]
|
|
786
|
+
|
|
787
|
+
# Write credentials
|
|
788
|
+
with open(creds_file, 'w') as f:
|
|
789
|
+
for profile, lines in profiles.items():
|
|
790
|
+
f.write(f'[{profile}]\n')
|
|
791
|
+
for line in lines:
|
|
792
|
+
if line:
|
|
793
|
+
f.write(f'{line}\n')
|
|
794
|
+
|
|
795
|
+
# Create mount point
|
|
796
|
+
mountpoint = os.path.join(home, '.renderedai', 'workspaces', workspace_id)
|
|
797
|
+
os.makedirs(mountpoint, exist_ok=True)
|
|
798
|
+
|
|
799
|
+
# Build mount command
|
|
800
|
+
bucket_key = mount_data['keys'][0] if mount_data.get('keys') else None
|
|
801
|
+
if not bucket_key:
|
|
802
|
+
output_error("No bucket key returned", "MOUNT_FAILED")
|
|
803
|
+
return
|
|
804
|
+
|
|
805
|
+
rw_flag = '-o ro' if mount_data.get('rw', ['r'])[0] == 'r' else ''
|
|
806
|
+
|
|
807
|
+
if mountexec == 'goofys':
|
|
808
|
+
command = f'goofys {rw_flag} --profile {profile_name} {bucket_key[:-1]} {mountpoint}'
|
|
809
|
+
elif mountexec == 's3fs':
|
|
810
|
+
command = f's3fs {bucket_key[:-1]} {mountpoint} -o profile={profile_name} -o endpoint=us-west-2 -o url="https://s3-us-west-2.amazonaws.com" {rw_flag}'
|
|
811
|
+
else: # mount-s3
|
|
812
|
+
readonly = '--read-only' if rw_flag else ''
|
|
813
|
+
command = f'mount-s3 {readonly} --profile {profile_name} --prefix {bucket_key[1:]+"/"} {bucket_key[:-1]} {mountpoint}'
|
|
814
|
+
|
|
815
|
+
# Execute mount
|
|
816
|
+
proc = subprocess.Popen(command, shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
|
817
|
+
time.sleep(2) # Wait for mount to complete
|
|
818
|
+
|
|
819
|
+
# Create symlink in target path
|
|
820
|
+
symlink_path = os.path.join(path, 'workspaces', workspace_id)
|
|
821
|
+
os.makedirs(os.path.join(path, 'workspaces'), exist_ok=True)
|
|
822
|
+
if os.path.exists(symlink_path):
|
|
823
|
+
try:
|
|
824
|
+
os.unlink(symlink_path)
|
|
825
|
+
except:
|
|
826
|
+
pass
|
|
827
|
+
try:
|
|
828
|
+
os.symlink(mountpoint, symlink_path)
|
|
829
|
+
except:
|
|
830
|
+
pass
|
|
831
|
+
|
|
832
|
+
# Save mount info
|
|
833
|
+
mountfile = os.path.join(home, '.renderedai', '.mounts.json')
|
|
834
|
+
mounts = {"volumes": {}, "workspaces": {}}
|
|
835
|
+
if os.path.exists(mountfile):
|
|
836
|
+
with open(mountfile, 'r') as f:
|
|
837
|
+
mounts = json.load(f)
|
|
838
|
+
|
|
839
|
+
mounts['workspaces'][workspace_id] = {
|
|
840
|
+
'status': 'mounted',
|
|
841
|
+
'exec': mountexec,
|
|
842
|
+
'name': workspace_data[0].get('name', workspace_id),
|
|
843
|
+
'mountpath': mountpoint,
|
|
844
|
+
'symlink': symlink_path,
|
|
845
|
+
'profile': profile_name,
|
|
846
|
+
'pid': proc.pid
|
|
847
|
+
}
|
|
848
|
+
|
|
849
|
+
with open(mountfile, 'w') as f:
|
|
850
|
+
json.dump(mounts, indent=4, sort_keys=True, fp=f)
|
|
851
|
+
|
|
852
|
+
output_json({
|
|
853
|
+
"workspaceId": workspace_id,
|
|
854
|
+
"name": workspace_data[0].get('name'),
|
|
855
|
+
"mountpath": mountpoint,
|
|
856
|
+
"symlink": symlink_path,
|
|
857
|
+
"readonly": mount_data.get('rw', ['r'])[0] == 'r'
|
|
858
|
+
})
|
|
859
|
+
|
|
860
|
+
|
|
861
|
+
def cmd_workspaces_unmount(args):
|
|
862
|
+
"""Unmount a workspace from local filesystem."""
|
|
863
|
+
import subprocess
|
|
864
|
+
|
|
865
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
866
|
+
home = os.path.expanduser('~')
|
|
867
|
+
mountfile = os.path.join(home, '.renderedai', '.mounts.json')
|
|
868
|
+
|
|
869
|
+
if not os.path.exists(mountfile):
|
|
870
|
+
output_error("Workspace not mounted", "NOT_MOUNTED")
|
|
871
|
+
return
|
|
872
|
+
|
|
873
|
+
with open(mountfile, 'r') as f:
|
|
874
|
+
mounts = json.load(f)
|
|
875
|
+
|
|
876
|
+
mount_info = mounts.get('workspaces', {}).get(workspace_id)
|
|
877
|
+
if not mount_info or mount_info.get('status') != 'mounted':
|
|
878
|
+
output_error("Workspace not mounted", "NOT_MOUNTED")
|
|
879
|
+
return
|
|
880
|
+
|
|
881
|
+
try:
|
|
882
|
+
# Kill processes using the mount
|
|
883
|
+
subprocess.run(["fuser", "-km", mount_info['mountpath']], check=False,
|
|
884
|
+
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
|
885
|
+
|
|
886
|
+
# Remove symlink
|
|
887
|
+
if os.path.exists(mount_info.get('symlink', '')):
|
|
888
|
+
os.unlink(mount_info['symlink'])
|
|
889
|
+
|
|
890
|
+
# Unmount
|
|
891
|
+
subprocess.run(["fusermount", "-uz", mount_info['mountpath']], check=False,
|
|
892
|
+
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
|
893
|
+
subprocess.run(["umount", "-lf", mount_info['mountpath']], check=False,
|
|
894
|
+
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
|
895
|
+
|
|
896
|
+
# Clean up mount directory
|
|
897
|
+
if os.path.isdir(mount_info['mountpath']):
|
|
898
|
+
contents = os.listdir(mount_info['mountpath'])
|
|
899
|
+
if not contents or (len(contents) == 1 and contents[0] == 'lost+found'):
|
|
900
|
+
subprocess.run(["rm", "-rf", mount_info['mountpath']], check=False,
|
|
901
|
+
stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
|
|
902
|
+
|
|
903
|
+
del mounts['workspaces'][workspace_id]
|
|
904
|
+
|
|
905
|
+
# Save updated mounts
|
|
906
|
+
with open(mountfile, 'w') as f:
|
|
907
|
+
json.dump(mounts, indent=4, sort_keys=True, fp=f)
|
|
908
|
+
|
|
909
|
+
output_json({"workspaceId": workspace_id, "name": mount_info.get('name'), "success": True})
|
|
910
|
+
|
|
911
|
+
except Exception as e:
|
|
912
|
+
output_error(str(e), "UNMOUNT_FAILED")
|
|
913
|
+
|
|
914
|
+
|
|
915
|
+
# =============================================================================
|
|
916
|
+
# GRAPHS
|
|
917
|
+
# =============================================================================
|
|
918
|
+
|
|
919
|
+
def cmd_graphs_get(args):
|
|
920
|
+
"""Get graphs."""
|
|
921
|
+
client = get_client()
|
|
922
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
923
|
+
|
|
924
|
+
result = client.get_graphs(
|
|
925
|
+
workspaceId=workspace_id,
|
|
926
|
+
graphId=args.graphid,
|
|
927
|
+
staged=args.staged,
|
|
928
|
+
limit=args.limit,
|
|
929
|
+
fields=parse_list_arg(args.fields) if args.fields else None
|
|
930
|
+
)
|
|
931
|
+
output_json(result)
|
|
932
|
+
|
|
933
|
+
|
|
934
|
+
def cmd_graphs_create(args):
|
|
935
|
+
"""Create a graph (editable)."""
|
|
936
|
+
client = get_client()
|
|
937
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
938
|
+
|
|
939
|
+
result = client.upload_graph(
|
|
940
|
+
workspaceId=workspace_id,
|
|
941
|
+
graph=args.file,
|
|
942
|
+
channelId=args.channelid,
|
|
943
|
+
name=args.name,
|
|
944
|
+
description=args.description,
|
|
945
|
+
staged=False
|
|
946
|
+
)
|
|
947
|
+
output_json({"graphId": result})
|
|
948
|
+
|
|
949
|
+
|
|
950
|
+
def cmd_graphs_edit(args):
|
|
951
|
+
"""Edit a graph."""
|
|
952
|
+
client = get_client()
|
|
953
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
954
|
+
graph_id = require_arg(args, 'graphid', 'Graph ID')
|
|
955
|
+
|
|
956
|
+
result = client.edit_graph(
|
|
957
|
+
workspaceId=workspace_id,
|
|
958
|
+
graphId=graph_id,
|
|
959
|
+
name=args.name,
|
|
960
|
+
description=args.description,
|
|
961
|
+
tags=parse_list_arg(args.tags) if args.tags else None
|
|
962
|
+
)
|
|
963
|
+
output_json({"success": result})
|
|
964
|
+
|
|
965
|
+
|
|
966
|
+
def cmd_graphs_delete(args):
|
|
967
|
+
"""Delete a graph."""
|
|
968
|
+
client = get_client()
|
|
969
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
970
|
+
graph_id = require_arg(args, 'graphid', 'Graph ID')
|
|
971
|
+
|
|
972
|
+
result = client.delete_graph(
|
|
973
|
+
workspaceId=workspace_id,
|
|
974
|
+
graphId=graph_id
|
|
975
|
+
)
|
|
976
|
+
output_json({"success": result})
|
|
977
|
+
|
|
978
|
+
|
|
979
|
+
def cmd_graphs_download(args):
|
|
980
|
+
"""Download a graph to a file."""
|
|
981
|
+
client = get_client()
|
|
982
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
983
|
+
graph_id = require_arg(args, 'graphid', 'Graph ID')
|
|
984
|
+
output_path = args.outputfile or f"{graph_id}.yaml"
|
|
985
|
+
|
|
986
|
+
result = client.download_graph(
|
|
987
|
+
workspaceId=workspace_id,
|
|
988
|
+
graphId=graph_id,
|
|
989
|
+
filepath=output_path
|
|
990
|
+
)
|
|
991
|
+
output_json({"filepath": result})
|
|
992
|
+
|
|
993
|
+
|
|
994
|
+
def cmd_graphs_stage(args):
|
|
995
|
+
"""Stage an existing graph (creates a read-only copy)."""
|
|
996
|
+
import tempfile
|
|
997
|
+
import os
|
|
998
|
+
|
|
999
|
+
client = get_client()
|
|
1000
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
1001
|
+
graph_id = require_arg(args, 'graphid', 'Graph ID')
|
|
1002
|
+
|
|
1003
|
+
# Get the graph metadata
|
|
1004
|
+
graphs = client.get_graphs(workspaceId=workspace_id, graphId=graph_id)
|
|
1005
|
+
if not graphs:
|
|
1006
|
+
output_error(f"Graph {graph_id} not found in workspace")
|
|
1007
|
+
return
|
|
1008
|
+
graph_info = graphs[0]
|
|
1009
|
+
|
|
1010
|
+
# Download the graph to a temp file
|
|
1011
|
+
with tempfile.TemporaryDirectory() as tmpdir:
|
|
1012
|
+
filepath = os.path.join(tmpdir, "graph.yaml")
|
|
1013
|
+
client.download_graph(workspaceId=workspace_id, graphId=graph_id, filepath=filepath)
|
|
1014
|
+
|
|
1015
|
+
# Re-upload as staged
|
|
1016
|
+
name = args.name if args.name else f"{graph_info['name']}-staged"
|
|
1017
|
+
result = client.upload_graph(
|
|
1018
|
+
workspaceId=workspace_id,
|
|
1019
|
+
graph=filepath,
|
|
1020
|
+
channelId=graph_info['channelId'],
|
|
1021
|
+
name=name,
|
|
1022
|
+
description=args.description if args.description else graph_info.get('description', ''),
|
|
1023
|
+
staged=True
|
|
1024
|
+
)
|
|
1025
|
+
output_json({"graphId": result})
|
|
1026
|
+
|
|
1027
|
+
|
|
1028
|
+
# =============================================================================
|
|
1029
|
+
# GRAPH-EDITOR
|
|
1030
|
+
# =============================================================================
|
|
1031
|
+
|
|
1032
|
+
def cmd_graph_editor_open(args):
|
|
1033
|
+
"""Download a graph and its channel schema, then open in the graph editor."""
|
|
1034
|
+
client = get_client()
|
|
1035
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
1036
|
+
graph_id = require_arg(args, 'graphid', 'Graph ID')
|
|
1037
|
+
directory = args.outputdir or os.getcwd()
|
|
1038
|
+
|
|
1039
|
+
# Get graph metadata to find channelId
|
|
1040
|
+
graphs = client.get_graphs(workspaceId=workspace_id, graphId=graph_id)
|
|
1041
|
+
if not graphs:
|
|
1042
|
+
output_error(f"Graph {graph_id} not found", "GRAPH_NOT_FOUND")
|
|
1043
|
+
return
|
|
1044
|
+
|
|
1045
|
+
graph_info = graphs[0]
|
|
1046
|
+
channel_id = graph_info.get('channelId')
|
|
1047
|
+
if not channel_id:
|
|
1048
|
+
output_error("Graph has no associated channel", "NO_CHANNEL")
|
|
1049
|
+
return
|
|
1050
|
+
|
|
1051
|
+
# Create output directory if needed
|
|
1052
|
+
os.makedirs(directory, exist_ok=True)
|
|
1053
|
+
|
|
1054
|
+
# Download graph
|
|
1055
|
+
graph_name = graph_info.get('name', graph_id).replace(' ', '_')
|
|
1056
|
+
graph_path = os.path.join(directory, f"{graph_name}.yaml")
|
|
1057
|
+
client.download_graph(workspaceId=workspace_id, graphId=graph_id, filepath=graph_path)
|
|
1058
|
+
|
|
1059
|
+
# Download channel schema
|
|
1060
|
+
schema = client.get_channel_nodes(channelId=channel_id)
|
|
1061
|
+
if not schema:
|
|
1062
|
+
output_error("Failed to fetch channel schema", "SCHEMA_ERROR")
|
|
1063
|
+
return
|
|
1064
|
+
|
|
1065
|
+
schema_path = os.path.join(directory, f"{channel_id}_schema.json")
|
|
1066
|
+
with open(schema_path, 'w') as f:
|
|
1067
|
+
json.dump(schema, f, indent=2)
|
|
1068
|
+
|
|
1069
|
+
# Write trigger file to open in graph editor
|
|
1070
|
+
trigger_path = os.path.join(os.path.expanduser('~'), '.theia', 'graph-editor-open')
|
|
1071
|
+
os.makedirs(os.path.dirname(trigger_path), exist_ok=True)
|
|
1072
|
+
|
|
1073
|
+
trigger_data = {
|
|
1074
|
+
"graphPath": os.path.abspath(graph_path),
|
|
1075
|
+
"schemaPath": os.path.abspath(schema_path),
|
|
1076
|
+
"autoLayout": True
|
|
1077
|
+
}
|
|
1078
|
+
|
|
1079
|
+
with open(trigger_path, 'w') as f:
|
|
1080
|
+
json.dump(trigger_data, f)
|
|
1081
|
+
|
|
1082
|
+
output_json({
|
|
1083
|
+
"graphPath": os.path.abspath(graph_path),
|
|
1084
|
+
"schemaPath": os.path.abspath(schema_path),
|
|
1085
|
+
"triggerPath": trigger_path,
|
|
1086
|
+
"graphId": graph_id,
|
|
1087
|
+
"channelId": channel_id,
|
|
1088
|
+
"graphName": graph_info.get('name')
|
|
1089
|
+
})
|
|
1090
|
+
|
|
1091
|
+
|
|
1092
|
+
# =============================================================================
|
|
1093
|
+
# CHANNELS
|
|
1094
|
+
# =============================================================================
|
|
1095
|
+
|
|
1096
|
+
def cmd_channels_get(args):
|
|
1097
|
+
"""Get channels."""
|
|
1098
|
+
client = get_client()
|
|
1099
|
+
|
|
1100
|
+
result = client.get_channels(
|
|
1101
|
+
workspaceId=args.workspaceid,
|
|
1102
|
+
organizationId=args.orgid,
|
|
1103
|
+
channelId=args.channelid,
|
|
1104
|
+
limit=args.limit,
|
|
1105
|
+
fields=parse_list_arg(args.fields) if args.fields else None
|
|
1106
|
+
)
|
|
1107
|
+
output_json(result)
|
|
1108
|
+
|
|
1109
|
+
|
|
1110
|
+
def cmd_channels_schema(args):
|
|
1111
|
+
"""Get channel schema."""
|
|
1112
|
+
client = get_client()
|
|
1113
|
+
channel_id = require_arg(args, 'channelid', 'Channel ID')
|
|
1114
|
+
|
|
1115
|
+
result = client.get_channel_nodes(
|
|
1116
|
+
channelId=channel_id,
|
|
1117
|
+
fields=parse_list_arg(args.fields) if args.fields else None
|
|
1118
|
+
)
|
|
1119
|
+
output_json(result)
|
|
1120
|
+
|
|
1121
|
+
|
|
1122
|
+
def cmd_channels_nodes(args):
|
|
1123
|
+
"""Get node documentation."""
|
|
1124
|
+
client = get_client()
|
|
1125
|
+
channel_id = require_arg(args, 'channelid', 'Channel ID')
|
|
1126
|
+
node = require_arg(args, 'node', 'Node name')
|
|
1127
|
+
|
|
1128
|
+
result = client.get_node_documentation(
|
|
1129
|
+
channelId=channel_id,
|
|
1130
|
+
node=node,
|
|
1131
|
+
fields=parse_list_arg(args.fields) if args.fields else None
|
|
1132
|
+
)
|
|
1133
|
+
output_json({"documentation": result})
|
|
1134
|
+
|
|
1135
|
+
|
|
1136
|
+
def cmd_channels_docs(args):
|
|
1137
|
+
"""Get channel documentation."""
|
|
1138
|
+
client = get_client()
|
|
1139
|
+
channel_id = require_arg(args, 'channelid', 'Channel ID')
|
|
1140
|
+
|
|
1141
|
+
result = client.get_channel_documentation(channelId=channel_id)
|
|
1142
|
+
output_json({"documentation": result})
|
|
1143
|
+
|
|
1144
|
+
|
|
1145
|
+
def cmd_channels_get_default_graph(args):
|
|
1146
|
+
"""Get the default graph for a channel."""
|
|
1147
|
+
client = get_client()
|
|
1148
|
+
channel_id = require_arg(args, 'channelid', 'Channel ID')
|
|
1149
|
+
|
|
1150
|
+
result = client.get_default_graph(
|
|
1151
|
+
channelId=channel_id,
|
|
1152
|
+
filepath=args.outputfile
|
|
1153
|
+
)
|
|
1154
|
+
output_json({"filepath": result})
|
|
1155
|
+
|
|
1156
|
+
|
|
1157
|
+
def cmd_channels_set_default_graph(args):
|
|
1158
|
+
"""Set the default graph for a channel."""
|
|
1159
|
+
client = get_client()
|
|
1160
|
+
graph_id = require_arg(args, 'graphid', 'Graph ID')
|
|
1161
|
+
|
|
1162
|
+
result = client.set_default_graph(
|
|
1163
|
+
graphId=graph_id,
|
|
1164
|
+
workspaceId=args.workspaceid
|
|
1165
|
+
)
|
|
1166
|
+
output_json({"success": result})
|
|
1167
|
+
|
|
1168
|
+
|
|
1169
|
+
# =============================================================================
|
|
1170
|
+
# SERVICES
|
|
1171
|
+
# =============================================================================
|
|
1172
|
+
|
|
1173
|
+
def cmd_services_get(args):
|
|
1174
|
+
"""Get services."""
|
|
1175
|
+
client = get_client()
|
|
1176
|
+
|
|
1177
|
+
result = client.get_services(
|
|
1178
|
+
workspaceId=args.workspaceid,
|
|
1179
|
+
organizationId=args.orgid,
|
|
1180
|
+
serviceId=args.serviceid,
|
|
1181
|
+
limit=args.limit,
|
|
1182
|
+
fields=parse_list_arg(args.fields) if args.fields else None
|
|
1183
|
+
)
|
|
1184
|
+
output_json(result)
|
|
1185
|
+
|
|
1186
|
+
|
|
1187
|
+
def cmd_services_create(args):
|
|
1188
|
+
"""Create a service."""
|
|
1189
|
+
client = get_client()
|
|
1190
|
+
org_id = require_arg(args, 'orgid', 'Organization ID')
|
|
1191
|
+
|
|
1192
|
+
result = client.create_service(
|
|
1193
|
+
name=args.name,
|
|
1194
|
+
description=args.description,
|
|
1195
|
+
organizationId=org_id,
|
|
1196
|
+
serviceTypeId=args.type,
|
|
1197
|
+
volumes=parse_list_arg(args.volumes) if args.volumes else [],
|
|
1198
|
+
instance=args.instance,
|
|
1199
|
+
tags=parse_list_arg(args.tags) if args.tags else []
|
|
1200
|
+
)
|
|
1201
|
+
output_json({"serviceId": result})
|
|
1202
|
+
|
|
1203
|
+
|
|
1204
|
+
def cmd_services_edit(args):
|
|
1205
|
+
"""Edit a service."""
|
|
1206
|
+
client = get_client()
|
|
1207
|
+
service_id = require_arg(args, 'serviceid', 'Service ID')
|
|
1208
|
+
|
|
1209
|
+
result = client.edit_service(
|
|
1210
|
+
serviceId=service_id,
|
|
1211
|
+
name=args.name,
|
|
1212
|
+
description=args.description,
|
|
1213
|
+
volumes=parse_list_arg(args.volumes) if args.volumes else None,
|
|
1214
|
+
instance=args.instance,
|
|
1215
|
+
tags=parse_list_arg(args.tags) if args.tags else None
|
|
1216
|
+
)
|
|
1217
|
+
output_json({"success": result})
|
|
1218
|
+
|
|
1219
|
+
|
|
1220
|
+
def cmd_services_delete(args):
|
|
1221
|
+
"""Delete a service."""
|
|
1222
|
+
client = get_client()
|
|
1223
|
+
service_id = require_arg(args, 'serviceid', 'Service ID')
|
|
1224
|
+
|
|
1225
|
+
result = client.delete_service(serviceId=service_id)
|
|
1226
|
+
output_json({"success": result})
|
|
1227
|
+
|
|
1228
|
+
|
|
1229
|
+
def cmd_services_jobs(args):
|
|
1230
|
+
"""Get service jobs."""
|
|
1231
|
+
client = get_client()
|
|
1232
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
1233
|
+
|
|
1234
|
+
result = client.get_service_jobs(
|
|
1235
|
+
workspaceId=workspace_id,
|
|
1236
|
+
jobId=args.jobid,
|
|
1237
|
+
limit=args.limit,
|
|
1238
|
+
fields=parse_list_arg(args.fields) if args.fields else None
|
|
1239
|
+
)
|
|
1240
|
+
output_json(result)
|
|
1241
|
+
|
|
1242
|
+
|
|
1243
|
+
def cmd_services_delete_job(args):
|
|
1244
|
+
"""Delete a service job."""
|
|
1245
|
+
client = get_client()
|
|
1246
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
1247
|
+
job_id = require_arg(args, 'jobid', 'Job ID')
|
|
1248
|
+
|
|
1249
|
+
result = client.delete_service_job(
|
|
1250
|
+
workspaceId=workspace_id,
|
|
1251
|
+
jobId=job_id
|
|
1252
|
+
)
|
|
1253
|
+
output_json({"success": result})
|
|
1254
|
+
|
|
1255
|
+
|
|
1256
|
+
# =============================================================================
|
|
1257
|
+
# API KEYS
|
|
1258
|
+
# =============================================================================
|
|
1259
|
+
|
|
1260
|
+
def cmd_api_keys_get(args):
|
|
1261
|
+
"""Get API keys."""
|
|
1262
|
+
client = get_client()
|
|
1263
|
+
|
|
1264
|
+
result = client.get_api_keys()
|
|
1265
|
+
output_json(result)
|
|
1266
|
+
|
|
1267
|
+
|
|
1268
|
+
def cmd_api_keys_create(args):
|
|
1269
|
+
"""Create an API key."""
|
|
1270
|
+
client = get_client()
|
|
1271
|
+
|
|
1272
|
+
kwargs = {
|
|
1273
|
+
'name': args.name,
|
|
1274
|
+
'scope': args.scope
|
|
1275
|
+
}
|
|
1276
|
+
|
|
1277
|
+
if args.scope == 'organization':
|
|
1278
|
+
org_id = require_arg(args, 'orgid', 'Organization ID')
|
|
1279
|
+
kwargs['organizationId'] = org_id
|
|
1280
|
+
elif args.scope == 'workspace':
|
|
1281
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
1282
|
+
kwargs['workspaceId'] = workspace_id
|
|
1283
|
+
|
|
1284
|
+
if args.expires:
|
|
1285
|
+
kwargs['expiresAt'] = args.expires
|
|
1286
|
+
|
|
1287
|
+
result = client.create_api_key(**kwargs)
|
|
1288
|
+
output_json({"apiKey": result})
|
|
1289
|
+
|
|
1290
|
+
|
|
1291
|
+
def cmd_api_keys_delete(args):
|
|
1292
|
+
"""Delete an API key."""
|
|
1293
|
+
client = get_client()
|
|
1294
|
+
api_key_id = require_arg(args, 'apikeyid', 'API Key ID')
|
|
1295
|
+
|
|
1296
|
+
result = client.delete_api_key(apiKeyId=api_key_id)
|
|
1297
|
+
output_json({"success": result})
|
|
1298
|
+
|
|
1299
|
+
|
|
1300
|
+
# =============================================================================
|
|
1301
|
+
# ANALYTICS
|
|
1302
|
+
# =============================================================================
|
|
1303
|
+
|
|
1304
|
+
def cmd_analytics_get(args):
|
|
1305
|
+
"""Get analytics."""
|
|
1306
|
+
client = get_client()
|
|
1307
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
1308
|
+
|
|
1309
|
+
result = client.get_analytics(
|
|
1310
|
+
workspaceId=workspace_id,
|
|
1311
|
+
datasetId=args.datasetid,
|
|
1312
|
+
analyticsId=args.analyticsid
|
|
1313
|
+
)
|
|
1314
|
+
output_json(result)
|
|
1315
|
+
|
|
1316
|
+
|
|
1317
|
+
def cmd_analytics_create(args):
|
|
1318
|
+
"""Create analytics."""
|
|
1319
|
+
client = get_client()
|
|
1320
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
1321
|
+
dataset_id = require_arg(args, 'datasetid', 'Dataset ID')
|
|
1322
|
+
|
|
1323
|
+
result = client.create_analytics(
|
|
1324
|
+
workspaceId=workspace_id,
|
|
1325
|
+
datasetId=dataset_id,
|
|
1326
|
+
analyticsType=args.type
|
|
1327
|
+
)
|
|
1328
|
+
output_json({"analyticsId": result})
|
|
1329
|
+
|
|
1330
|
+
|
|
1331
|
+
def cmd_analytics_delete(args):
|
|
1332
|
+
"""Delete analytics."""
|
|
1333
|
+
client = get_client()
|
|
1334
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
1335
|
+
analytics_id = require_arg(args, 'analyticsid', 'Analytics ID')
|
|
1336
|
+
|
|
1337
|
+
result = client.delete_analytics(
|
|
1338
|
+
workspaceId=workspace_id,
|
|
1339
|
+
analyticsId=analytics_id
|
|
1340
|
+
)
|
|
1341
|
+
output_json({"success": result})
|
|
1342
|
+
|
|
1343
|
+
|
|
1344
|
+
def cmd_analytics_types(args):
|
|
1345
|
+
"""Get analytics types."""
|
|
1346
|
+
client = get_client()
|
|
1347
|
+
|
|
1348
|
+
result = client.get_analytics_types()
|
|
1349
|
+
output_json(result)
|
|
1350
|
+
|
|
1351
|
+
|
|
1352
|
+
def cmd_analytics_download(args):
|
|
1353
|
+
"""Download analytics results."""
|
|
1354
|
+
client = get_client()
|
|
1355
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
1356
|
+
analytics_id = require_arg(args, 'analyticsid', 'Analytics ID')
|
|
1357
|
+
|
|
1358
|
+
result = client.download_analytics(
|
|
1359
|
+
workspaceId=workspace_id,
|
|
1360
|
+
analyticsId=analytics_id
|
|
1361
|
+
)
|
|
1362
|
+
output_json(result)
|
|
1363
|
+
|
|
1364
|
+
|
|
1365
|
+
def cmd_analytics_edit(args):
|
|
1366
|
+
"""Edit analytics tags."""
|
|
1367
|
+
client = get_client()
|
|
1368
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
1369
|
+
analytics_id = require_arg(args, 'analyticsid', 'Analytics ID')
|
|
1370
|
+
|
|
1371
|
+
result = client.edit_analytics(
|
|
1372
|
+
workspaceId=workspace_id,
|
|
1373
|
+
analyticsId=analytics_id,
|
|
1374
|
+
tags=parse_list_arg(args.tags) if args.tags else []
|
|
1375
|
+
)
|
|
1376
|
+
output_json({"success": result})
|
|
1377
|
+
|
|
1378
|
+
|
|
1379
|
+
# =============================================================================
|
|
1380
|
+
# ANNOTATIONS
|
|
1381
|
+
# =============================================================================
|
|
1382
|
+
|
|
1383
|
+
def cmd_annotations_get(args):
|
|
1384
|
+
"""Get annotations."""
|
|
1385
|
+
client = get_client()
|
|
1386
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
1387
|
+
|
|
1388
|
+
result = client.get_annotations(
|
|
1389
|
+
workspaceId=workspace_id,
|
|
1390
|
+
datasetId=args.datasetid,
|
|
1391
|
+
annotationId=args.annotationid
|
|
1392
|
+
)
|
|
1393
|
+
output_json(result)
|
|
1394
|
+
|
|
1395
|
+
|
|
1396
|
+
def cmd_annotations_create(args):
|
|
1397
|
+
"""Create an annotation."""
|
|
1398
|
+
client = get_client()
|
|
1399
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
1400
|
+
dataset_id = require_arg(args, 'datasetid', 'Dataset ID')
|
|
1401
|
+
|
|
1402
|
+
result = client.create_annotation(
|
|
1403
|
+
workspaceId=workspace_id,
|
|
1404
|
+
datasetId=dataset_id,
|
|
1405
|
+
format=args.format,
|
|
1406
|
+
mapId=args.mapid
|
|
1407
|
+
)
|
|
1408
|
+
output_json({"annotationId": result})
|
|
1409
|
+
|
|
1410
|
+
|
|
1411
|
+
def cmd_annotations_delete(args):
|
|
1412
|
+
"""Delete an annotation."""
|
|
1413
|
+
client = get_client()
|
|
1414
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
1415
|
+
annotation_id = require_arg(args, 'annotationid', 'Annotation ID')
|
|
1416
|
+
|
|
1417
|
+
result = client.delete_annotation(
|
|
1418
|
+
workspaceId=workspace_id,
|
|
1419
|
+
annotationId=annotation_id
|
|
1420
|
+
)
|
|
1421
|
+
output_json({"success": result})
|
|
1422
|
+
|
|
1423
|
+
|
|
1424
|
+
def cmd_annotations_formats(args):
|
|
1425
|
+
"""Get annotation formats."""
|
|
1426
|
+
client = get_client()
|
|
1427
|
+
|
|
1428
|
+
result = client.get_annotation_formats()
|
|
1429
|
+
output_json(result)
|
|
1430
|
+
|
|
1431
|
+
|
|
1432
|
+
def cmd_annotations_download(args):
|
|
1433
|
+
"""Download an annotation."""
|
|
1434
|
+
client = get_client()
|
|
1435
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
1436
|
+
annotation_id = require_arg(args, 'annotationid', 'Annotation ID')
|
|
1437
|
+
|
|
1438
|
+
result = client.download_annotation(
|
|
1439
|
+
workspaceId=workspace_id,
|
|
1440
|
+
annotationId=annotation_id
|
|
1441
|
+
)
|
|
1442
|
+
output_json({"downloadPath": result})
|
|
1443
|
+
|
|
1444
|
+
|
|
1445
|
+
def cmd_annotations_edit(args):
|
|
1446
|
+
"""Edit annotation tags."""
|
|
1447
|
+
client = get_client()
|
|
1448
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
1449
|
+
annotation_id = require_arg(args, 'annotationid', 'Annotation ID')
|
|
1450
|
+
|
|
1451
|
+
result = client.edit_annotation(
|
|
1452
|
+
workspaceId=workspace_id,
|
|
1453
|
+
annotationId=annotation_id,
|
|
1454
|
+
tags=parse_list_arg(args.tags) if args.tags else []
|
|
1455
|
+
)
|
|
1456
|
+
output_json({"success": result})
|
|
1457
|
+
|
|
1458
|
+
|
|
1459
|
+
# =============================================================================
|
|
1460
|
+
# ANNOTATION-MAPS
|
|
1461
|
+
# =============================================================================
|
|
1462
|
+
|
|
1463
|
+
def cmd_annotation_maps_get(args):
|
|
1464
|
+
"""Get annotation maps."""
|
|
1465
|
+
client = get_client()
|
|
1466
|
+
org_id = require_arg(args, 'orgid', 'Organization ID')
|
|
1467
|
+
|
|
1468
|
+
result = client.get_annotation_maps(organizationId=org_id)
|
|
1469
|
+
output_json(result)
|
|
1470
|
+
|
|
1471
|
+
|
|
1472
|
+
def cmd_annotation_maps_upload(args):
|
|
1473
|
+
"""Upload an annotation map."""
|
|
1474
|
+
client = get_client()
|
|
1475
|
+
org_id = require_arg(args, 'orgid', 'Organization ID')
|
|
1476
|
+
map_file = require_arg(args, 'mapfile', 'Map file path')
|
|
1477
|
+
|
|
1478
|
+
result = client.upload_annotation_map(
|
|
1479
|
+
organizationId=org_id,
|
|
1480
|
+
mapfile=map_file,
|
|
1481
|
+
name=args.name,
|
|
1482
|
+
description=args.description,
|
|
1483
|
+
tags=parse_list_arg(args.tags) if args.tags else None
|
|
1484
|
+
)
|
|
1485
|
+
output_json({"mapId": result})
|
|
1486
|
+
|
|
1487
|
+
|
|
1488
|
+
def cmd_annotation_maps_download(args):
|
|
1489
|
+
"""Download an annotation map."""
|
|
1490
|
+
client = get_client()
|
|
1491
|
+
map_id = require_arg(args, 'mapid', 'Map ID')
|
|
1492
|
+
|
|
1493
|
+
result = client.download_annotation_map(
|
|
1494
|
+
mapId=map_id,
|
|
1495
|
+
localDir=args.outputdir
|
|
1496
|
+
)
|
|
1497
|
+
output_json({"downloadPath": result})
|
|
1498
|
+
|
|
1499
|
+
|
|
1500
|
+
def cmd_annotation_maps_delete(args):
|
|
1501
|
+
"""Delete an annotation map."""
|
|
1502
|
+
client = get_client()
|
|
1503
|
+
map_id = require_arg(args, 'mapid', 'Map ID')
|
|
1504
|
+
|
|
1505
|
+
result = client.delete_annotation_map(mapId=map_id)
|
|
1506
|
+
output_json({"success": result})
|
|
1507
|
+
|
|
1508
|
+
|
|
1509
|
+
def cmd_annotation_maps_edit(args):
|
|
1510
|
+
"""Edit an annotation map."""
|
|
1511
|
+
client = get_client()
|
|
1512
|
+
map_id = require_arg(args, 'mapid', 'Map ID')
|
|
1513
|
+
|
|
1514
|
+
result = client.edit_annotation_map(
|
|
1515
|
+
mapId=map_id,
|
|
1516
|
+
name=args.name,
|
|
1517
|
+
description=args.description,
|
|
1518
|
+
tags=parse_list_arg(args.tags) if args.tags else None
|
|
1519
|
+
)
|
|
1520
|
+
output_json({"success": result})
|
|
1521
|
+
|
|
1522
|
+
|
|
1523
|
+
# =============================================================================
|
|
1524
|
+
# GAN
|
|
1525
|
+
# =============================================================================
|
|
1526
|
+
|
|
1527
|
+
def cmd_gan_datasets_get(args):
|
|
1528
|
+
"""Get GAN datasets."""
|
|
1529
|
+
client = get_client()
|
|
1530
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
1531
|
+
|
|
1532
|
+
result = client.get_gan_datasets(
|
|
1533
|
+
workspaceId=workspace_id,
|
|
1534
|
+
datasetId=args.datasetid,
|
|
1535
|
+
gandatasetId=args.gandatasetid,
|
|
1536
|
+
limit=args.limit,
|
|
1537
|
+
fields=parse_list_arg(args.fields) if args.fields else None
|
|
1538
|
+
)
|
|
1539
|
+
output_json(result)
|
|
1540
|
+
|
|
1541
|
+
|
|
1542
|
+
def cmd_gan_datasets_create(args):
|
|
1543
|
+
"""Create a GAN dataset."""
|
|
1544
|
+
client = get_client()
|
|
1545
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
1546
|
+
dataset_id = require_arg(args, 'datasetid', 'Dataset ID')
|
|
1547
|
+
model_id = require_arg(args, 'modelid', 'Model ID')
|
|
1548
|
+
|
|
1549
|
+
result = client.create_gan_dataset(
|
|
1550
|
+
workspaceId=workspace_id,
|
|
1551
|
+
datasetId=dataset_id,
|
|
1552
|
+
modelId=model_id,
|
|
1553
|
+
name=args.name,
|
|
1554
|
+
description=args.description or '',
|
|
1555
|
+
tags=parse_list_arg(args.tags) if args.tags else []
|
|
1556
|
+
)
|
|
1557
|
+
output_json({"datasetId": result})
|
|
1558
|
+
|
|
1559
|
+
|
|
1560
|
+
def cmd_gan_datasets_delete(args):
|
|
1561
|
+
"""Delete a GAN dataset."""
|
|
1562
|
+
client = get_client()
|
|
1563
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
1564
|
+
dataset_id = require_arg(args, 'datasetid', 'Dataset ID')
|
|
1565
|
+
|
|
1566
|
+
result = client.delete_gan_dataset(
|
|
1567
|
+
workspaceId=workspace_id,
|
|
1568
|
+
datasetId=dataset_id
|
|
1569
|
+
)
|
|
1570
|
+
output_json({"success": result})
|
|
1571
|
+
|
|
1572
|
+
|
|
1573
|
+
def cmd_gan_models_get(args):
|
|
1574
|
+
"""Get GAN models."""
|
|
1575
|
+
client = get_client()
|
|
1576
|
+
|
|
1577
|
+
result = client.get_gan_models(
|
|
1578
|
+
organizationId=args.orgid,
|
|
1579
|
+
workspaceId=args.workspaceid,
|
|
1580
|
+
modelId=args.modelid,
|
|
1581
|
+
limit=args.limit,
|
|
1582
|
+
fields=parse_list_arg(args.fields) if args.fields else None
|
|
1583
|
+
)
|
|
1584
|
+
output_json(result)
|
|
1585
|
+
|
|
1586
|
+
|
|
1587
|
+
def cmd_gan_models_upload(args):
|
|
1588
|
+
"""Upload a GAN model."""
|
|
1589
|
+
client = get_client()
|
|
1590
|
+
org_id = require_arg(args, 'orgid', 'Organization ID')
|
|
1591
|
+
model_file = require_arg(args, 'modelfile', 'Model file path')
|
|
1592
|
+
|
|
1593
|
+
result = client.upload_gan_model(
|
|
1594
|
+
organizationId=org_id,
|
|
1595
|
+
modelfile=model_file,
|
|
1596
|
+
name=args.name,
|
|
1597
|
+
description=args.description,
|
|
1598
|
+
flags=args.flags,
|
|
1599
|
+
tags=parse_list_arg(args.tags) if args.tags else None
|
|
1600
|
+
)
|
|
1601
|
+
output_json({"modelId": result})
|
|
1602
|
+
|
|
1603
|
+
|
|
1604
|
+
def cmd_gan_models_download(args):
|
|
1605
|
+
"""Download a GAN model."""
|
|
1606
|
+
client = get_client()
|
|
1607
|
+
model_id = require_arg(args, 'modelid', 'Model ID')
|
|
1608
|
+
|
|
1609
|
+
result = client.download_gan_model(
|
|
1610
|
+
modelId=model_id,
|
|
1611
|
+
localDir=args.outputdir
|
|
1612
|
+
)
|
|
1613
|
+
output_json({"downloadPath": result})
|
|
1614
|
+
|
|
1615
|
+
|
|
1616
|
+
# =============================================================================
|
|
1617
|
+
# UMAP
|
|
1618
|
+
# =============================================================================
|
|
1619
|
+
|
|
1620
|
+
def cmd_umap_get(args):
|
|
1621
|
+
"""Get UMAPs."""
|
|
1622
|
+
client = get_client()
|
|
1623
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
1624
|
+
|
|
1625
|
+
result = client.get_umaps(
|
|
1626
|
+
workspaceId=workspace_id,
|
|
1627
|
+
umapId=args.umapid,
|
|
1628
|
+
datasetId=args.datasetid,
|
|
1629
|
+
limit=args.limit,
|
|
1630
|
+
fields=parse_list_arg(args.fields) if args.fields else None
|
|
1631
|
+
)
|
|
1632
|
+
output_json(result)
|
|
1633
|
+
|
|
1634
|
+
|
|
1635
|
+
def cmd_umap_create(args):
|
|
1636
|
+
"""Create a UMAP."""
|
|
1637
|
+
client = get_client()
|
|
1638
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
1639
|
+
|
|
1640
|
+
dataset_ids = parse_list_arg(require_arg(args, 'datasetids', 'Dataset IDs'))
|
|
1641
|
+
samples = [int(s) for s in parse_list_arg(require_arg(args, 'samples', 'Samples'))]
|
|
1642
|
+
|
|
1643
|
+
result = client.create_umap(
|
|
1644
|
+
workspaceId=workspace_id,
|
|
1645
|
+
name=args.name,
|
|
1646
|
+
datasetIds=dataset_ids,
|
|
1647
|
+
samples=samples,
|
|
1648
|
+
description=args.description,
|
|
1649
|
+
seed=args.seed,
|
|
1650
|
+
tags=parse_list_arg(args.tags) if args.tags else None
|
|
1651
|
+
)
|
|
1652
|
+
output_json({"umapId": result})
|
|
1653
|
+
|
|
1654
|
+
|
|
1655
|
+
def cmd_umap_delete(args):
|
|
1656
|
+
"""Delete a UMAP."""
|
|
1657
|
+
client = get_client()
|
|
1658
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
1659
|
+
umap_id = require_arg(args, 'umapid', 'UMAP ID')
|
|
1660
|
+
|
|
1661
|
+
result = client.delete_umap(
|
|
1662
|
+
workspaceId=workspace_id,
|
|
1663
|
+
umapId=umap_id
|
|
1664
|
+
)
|
|
1665
|
+
output_json({"success": result})
|
|
1666
|
+
|
|
1667
|
+
|
|
1668
|
+
# =============================================================================
|
|
1669
|
+
# SERVERS (Editor)
|
|
1670
|
+
# =============================================================================
|
|
1671
|
+
|
|
1672
|
+
def cmd_servers_get(args):
|
|
1673
|
+
"""Get servers."""
|
|
1674
|
+
client = get_client()
|
|
1675
|
+
|
|
1676
|
+
result = client.get_servers(
|
|
1677
|
+
organizationId=args.orgid,
|
|
1678
|
+
workspaceId=args.workspaceid,
|
|
1679
|
+
serverId=args.serverid,
|
|
1680
|
+
limit=args.limit,
|
|
1681
|
+
fields=parse_list_arg(args.fields) if args.fields else None
|
|
1682
|
+
)
|
|
1683
|
+
output_json(result)
|
|
1684
|
+
|
|
1685
|
+
|
|
1686
|
+
def cmd_servers_create(args):
|
|
1687
|
+
"""Create a server."""
|
|
1688
|
+
client = get_client()
|
|
1689
|
+
|
|
1690
|
+
result = client.create_server(
|
|
1691
|
+
organizationId=args.orgid,
|
|
1692
|
+
workspaceId=args.workspaceid,
|
|
1693
|
+
instance=args.instance,
|
|
1694
|
+
name=args.name
|
|
1695
|
+
)
|
|
1696
|
+
output_json({"serverId": result})
|
|
1697
|
+
|
|
1698
|
+
|
|
1699
|
+
def cmd_servers_delete(args):
|
|
1700
|
+
"""Delete a server."""
|
|
1701
|
+
client = get_client()
|
|
1702
|
+
server_id = require_arg(args, 'serverid', 'Server ID')
|
|
1703
|
+
|
|
1704
|
+
result = client.delete_server(serverId=server_id)
|
|
1705
|
+
output_json({"success": result})
|
|
1706
|
+
|
|
1707
|
+
|
|
1708
|
+
def cmd_servers_start(args):
|
|
1709
|
+
"""Start a server."""
|
|
1710
|
+
client = get_client()
|
|
1711
|
+
server_id = require_arg(args, 'serverid', 'Server ID')
|
|
1712
|
+
|
|
1713
|
+
result = client.start_server(serverId=server_id)
|
|
1714
|
+
output_json({"success": result})
|
|
1715
|
+
|
|
1716
|
+
|
|
1717
|
+
def cmd_servers_stop(args):
|
|
1718
|
+
"""Stop a server."""
|
|
1719
|
+
client = get_client()
|
|
1720
|
+
server_id = require_arg(args, 'serverid', 'Server ID')
|
|
1721
|
+
|
|
1722
|
+
result = client.stop_server(serverId=server_id)
|
|
1723
|
+
output_json({"success": result})
|
|
1724
|
+
|
|
1725
|
+
|
|
1726
|
+
# =============================================================================
|
|
1727
|
+
# ML
|
|
1728
|
+
# =============================================================================
|
|
1729
|
+
|
|
1730
|
+
def cmd_ml_architectures(args):
|
|
1731
|
+
"""Get ML architectures."""
|
|
1732
|
+
client = get_client()
|
|
1733
|
+
|
|
1734
|
+
result = client.get_ml_architectures(
|
|
1735
|
+
fields=parse_list_arg(args.fields) if args.fields else None
|
|
1736
|
+
)
|
|
1737
|
+
output_json(result)
|
|
1738
|
+
|
|
1739
|
+
|
|
1740
|
+
def cmd_ml_models_get(args):
|
|
1741
|
+
"""Get ML models."""
|
|
1742
|
+
client = get_client()
|
|
1743
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
1744
|
+
|
|
1745
|
+
result = client.get_ml_models(
|
|
1746
|
+
workspaceId=workspace_id,
|
|
1747
|
+
datasetId=args.datasetid,
|
|
1748
|
+
modelId=args.modelid,
|
|
1749
|
+
limit=args.limit,
|
|
1750
|
+
fields=parse_list_arg(args.fields) if args.fields else None
|
|
1751
|
+
)
|
|
1752
|
+
output_json(result)
|
|
1753
|
+
|
|
1754
|
+
|
|
1755
|
+
def cmd_ml_models_create(args):
|
|
1756
|
+
"""Create an ML model training job."""
|
|
1757
|
+
client = get_client()
|
|
1758
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
1759
|
+
dataset_id = require_arg(args, 'datasetid', 'Dataset ID')
|
|
1760
|
+
architecture_id = require_arg(args, 'architectureid', 'Architecture ID')
|
|
1761
|
+
parameters = require_arg(args, 'parameters', 'Parameters JSON')
|
|
1762
|
+
|
|
1763
|
+
result = client.create_ml_model(
|
|
1764
|
+
workspaceId=workspace_id,
|
|
1765
|
+
datasetId=dataset_id,
|
|
1766
|
+
architectureId=architecture_id,
|
|
1767
|
+
name=args.name,
|
|
1768
|
+
parameters=parameters,
|
|
1769
|
+
description=args.description,
|
|
1770
|
+
tags=parse_list_arg(args.tags) if args.tags else None
|
|
1771
|
+
)
|
|
1772
|
+
output_json({"modelId": result})
|
|
1773
|
+
|
|
1774
|
+
|
|
1775
|
+
def cmd_ml_models_download(args):
|
|
1776
|
+
"""Download an ML model."""
|
|
1777
|
+
client = get_client()
|
|
1778
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
1779
|
+
model_id = require_arg(args, 'modelid', 'Model ID')
|
|
1780
|
+
|
|
1781
|
+
result = client.download_ml_model(
|
|
1782
|
+
workspaceId=workspace_id,
|
|
1783
|
+
modelId=model_id,
|
|
1784
|
+
checkpoint=args.checkpoint,
|
|
1785
|
+
localDir=args.outputdir
|
|
1786
|
+
)
|
|
1787
|
+
output_json({"downloadPath": result})
|
|
1788
|
+
|
|
1789
|
+
|
|
1790
|
+
def cmd_ml_inferences_get(args):
|
|
1791
|
+
"""Get ML inferences."""
|
|
1792
|
+
client = get_client()
|
|
1793
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
1794
|
+
|
|
1795
|
+
result = client.get_ml_inferences(
|
|
1796
|
+
workspaceId=workspace_id,
|
|
1797
|
+
inferenceId=args.inferenceid,
|
|
1798
|
+
datasetId=args.datasetid,
|
|
1799
|
+
modelId=args.modelid,
|
|
1800
|
+
limit=args.limit,
|
|
1801
|
+
fields=parse_list_arg(args.fields) if args.fields else None
|
|
1802
|
+
)
|
|
1803
|
+
output_json(result)
|
|
1804
|
+
|
|
1805
|
+
|
|
1806
|
+
def cmd_ml_inferences_create(args):
|
|
1807
|
+
"""Create an ML inference job."""
|
|
1808
|
+
client = get_client()
|
|
1809
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
1810
|
+
dataset_id = require_arg(args, 'datasetid', 'Dataset ID')
|
|
1811
|
+
model_id = require_arg(args, 'modelid', 'Model ID')
|
|
1812
|
+
|
|
1813
|
+
result = client.create_ml_inference(
|
|
1814
|
+
workspaceId=workspace_id,
|
|
1815
|
+
datasetId=dataset_id,
|
|
1816
|
+
modelId=model_id,
|
|
1817
|
+
mapId=args.mapid,
|
|
1818
|
+
tags=parse_list_arg(args.tags) if args.tags else None
|
|
1819
|
+
)
|
|
1820
|
+
output_json({"inferenceId": result})
|
|
1821
|
+
|
|
1822
|
+
|
|
1823
|
+
# =============================================================================
|
|
1824
|
+
# INPAINT
|
|
1825
|
+
# =============================================================================
|
|
1826
|
+
|
|
1827
|
+
def cmd_inpaint_get(args):
|
|
1828
|
+
"""Get inpaint jobs."""
|
|
1829
|
+
client = get_client()
|
|
1830
|
+
volume_id = require_arg(args, 'volumeid', 'Volume ID')
|
|
1831
|
+
|
|
1832
|
+
result = client.get_inpaints(
|
|
1833
|
+
volumeId=volume_id,
|
|
1834
|
+
inpaintId=args.inpaintid,
|
|
1835
|
+
limit=args.limit,
|
|
1836
|
+
fields=parse_list_arg(args.fields) if args.fields else None
|
|
1837
|
+
)
|
|
1838
|
+
output_json(result)
|
|
1839
|
+
|
|
1840
|
+
|
|
1841
|
+
def cmd_inpaint_log(args):
|
|
1842
|
+
"""Get inpaint job log."""
|
|
1843
|
+
client = get_client()
|
|
1844
|
+
volume_id = require_arg(args, 'volumeid', 'Volume ID')
|
|
1845
|
+
inpaint_id = require_arg(args, 'inpaintid', 'Inpaint ID')
|
|
1846
|
+
|
|
1847
|
+
result = client.get_inpaint_log(
|
|
1848
|
+
volumeId=volume_id,
|
|
1849
|
+
inpaintId=inpaint_id,
|
|
1850
|
+
fields=parse_list_arg(args.fields) if args.fields else None
|
|
1851
|
+
)
|
|
1852
|
+
output_json(result)
|
|
1853
|
+
|
|
1854
|
+
|
|
1855
|
+
def cmd_inpaint_create(args):
|
|
1856
|
+
"""Create an inpaint job."""
|
|
1857
|
+
client = get_client()
|
|
1858
|
+
volume_id = require_arg(args, 'volumeid', 'Volume ID')
|
|
1859
|
+
location = require_arg(args, 'location', 'Location')
|
|
1860
|
+
|
|
1861
|
+
result = client.create_inpaint(
|
|
1862
|
+
volumeId=volume_id,
|
|
1863
|
+
location=location,
|
|
1864
|
+
files=parse_list_arg(args.files) if args.files else [],
|
|
1865
|
+
destination=args.destination,
|
|
1866
|
+
dilation=args.dilation or 5,
|
|
1867
|
+
inputType=args.inputtype or 'MASK',
|
|
1868
|
+
outputType=args.outputtype or 'PNG'
|
|
1869
|
+
)
|
|
1870
|
+
output_json({"inpaintId": result})
|
|
1871
|
+
|
|
1872
|
+
|
|
1873
|
+
def cmd_inpaint_delete(args):
|
|
1874
|
+
"""Delete an inpaint job."""
|
|
1875
|
+
client = get_client()
|
|
1876
|
+
volume_id = require_arg(args, 'volumeid', 'Volume ID')
|
|
1877
|
+
inpaint_id = require_arg(args, 'inpaintid', 'Inpaint ID')
|
|
1878
|
+
|
|
1879
|
+
result = client.delete_inpaint(
|
|
1880
|
+
volumeId=volume_id,
|
|
1881
|
+
inpaintId=inpaint_id
|
|
1882
|
+
)
|
|
1883
|
+
output_json({"success": result})
|
|
1884
|
+
|
|
1885
|
+
|
|
1886
|
+
# =============================================================================
|
|
1887
|
+
# PREVIEW
|
|
1888
|
+
# =============================================================================
|
|
1889
|
+
|
|
1890
|
+
def cmd_preview_get(args):
|
|
1891
|
+
"""Get a preview job."""
|
|
1892
|
+
client = get_client()
|
|
1893
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
1894
|
+
preview_id = require_arg(args, 'previewid', 'Preview ID')
|
|
1895
|
+
|
|
1896
|
+
result = client.get_preview(
|
|
1897
|
+
workspaceId=workspace_id,
|
|
1898
|
+
previewId=preview_id,
|
|
1899
|
+
fields=parse_list_arg(args.fields) if args.fields else None
|
|
1900
|
+
)
|
|
1901
|
+
output_json(result)
|
|
1902
|
+
|
|
1903
|
+
|
|
1904
|
+
def cmd_preview_create(args):
|
|
1905
|
+
"""Create a preview job."""
|
|
1906
|
+
client = get_client()
|
|
1907
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
1908
|
+
graph_id = require_arg(args, 'graphid', 'Graph ID')
|
|
1909
|
+
|
|
1910
|
+
result = client.create_preview(
|
|
1911
|
+
workspaceId=workspace_id,
|
|
1912
|
+
graphId=graph_id
|
|
1913
|
+
)
|
|
1914
|
+
output_json({"previewId": result})
|
|
1915
|
+
|
|
1916
|
+
|
|
1917
|
+
# =============================================================================
|
|
1918
|
+
# AGENTS
|
|
1919
|
+
# =============================================================================
|
|
1920
|
+
|
|
1921
|
+
def cmd_agents_types(args):
|
|
1922
|
+
"""Get available data types."""
|
|
1923
|
+
client = get_client()
|
|
1924
|
+
|
|
1925
|
+
result = client.get_data_types()
|
|
1926
|
+
output_json(result)
|
|
1927
|
+
|
|
1928
|
+
|
|
1929
|
+
def cmd_agents_fields(args):
|
|
1930
|
+
"""Get fields for a data type."""
|
|
1931
|
+
client = get_client()
|
|
1932
|
+
data_type = require_arg(args, 'type', 'Data type')
|
|
1933
|
+
|
|
1934
|
+
result = client.get_data_fields(type=data_type)
|
|
1935
|
+
output_json(result)
|
|
1936
|
+
|
|
1937
|
+
|
|
1938
|
+
# =============================================================================
|
|
1939
|
+
# RULES
|
|
1940
|
+
# =============================================================================
|
|
1941
|
+
|
|
1942
|
+
def cmd_rules_organization(args):
|
|
1943
|
+
"""Get organization rules."""
|
|
1944
|
+
client = get_client()
|
|
1945
|
+
|
|
1946
|
+
result = client.get_organization_rules(organizationId=args.orgid)
|
|
1947
|
+
output_json({"rules": result})
|
|
1948
|
+
|
|
1949
|
+
|
|
1950
|
+
def cmd_rules_workspace(args):
|
|
1951
|
+
"""Get workspace rules."""
|
|
1952
|
+
client = get_client()
|
|
1953
|
+
|
|
1954
|
+
result = client.get_workspace_rules(workspaceId=args.workspaceid)
|
|
1955
|
+
output_json({"rules": result})
|
|
1956
|
+
|
|
1957
|
+
|
|
1958
|
+
def cmd_rules_service(args):
|
|
1959
|
+
"""Get service rules."""
|
|
1960
|
+
client = get_client()
|
|
1961
|
+
service_id = require_arg(args, 'serviceid', 'Service ID')
|
|
1962
|
+
|
|
1963
|
+
result = client.get_service_rules(serviceId=service_id)
|
|
1964
|
+
output_json({"rules": result})
|
|
1965
|
+
|
|
1966
|
+
|
|
1967
|
+
def cmd_rules_user(args):
|
|
1968
|
+
"""Get user rules."""
|
|
1969
|
+
client = get_client()
|
|
1970
|
+
|
|
1971
|
+
result = client.get_user_rules()
|
|
1972
|
+
output_json({"rules": result})
|
|
1973
|
+
|
|
1974
|
+
|
|
1975
|
+
def cmd_rules_edit_organization(args):
|
|
1976
|
+
"""Edit organization rules."""
|
|
1977
|
+
client = get_client()
|
|
1978
|
+
rules = require_arg(args, 'rules', 'Rules')
|
|
1979
|
+
|
|
1980
|
+
result = client.edit_organization_rules(
|
|
1981
|
+
organizationId=args.orgid,
|
|
1982
|
+
rules=rules
|
|
1983
|
+
)
|
|
1984
|
+
output_json({"success": result})
|
|
1985
|
+
|
|
1986
|
+
|
|
1987
|
+
def cmd_rules_edit_workspace(args):
|
|
1988
|
+
"""Edit workspace rules."""
|
|
1989
|
+
client = get_client()
|
|
1990
|
+
rules = require_arg(args, 'rules', 'Rules')
|
|
1991
|
+
|
|
1992
|
+
result = client.edit_workspace_rules(
|
|
1993
|
+
workspaceId=args.workspaceid,
|
|
1994
|
+
rules=rules
|
|
1995
|
+
)
|
|
1996
|
+
output_json({"success": result})
|
|
1997
|
+
|
|
1998
|
+
|
|
1999
|
+
def cmd_rules_edit_service(args):
|
|
2000
|
+
"""Edit service rules."""
|
|
2001
|
+
client = get_client()
|
|
2002
|
+
service_id = require_arg(args, 'serviceid', 'Service ID')
|
|
2003
|
+
rules = require_arg(args, 'rules', 'Rules')
|
|
2004
|
+
|
|
2005
|
+
result = client.edit_service_rules(
|
|
2006
|
+
serviceId=service_id,
|
|
2007
|
+
rules=rules
|
|
2008
|
+
)
|
|
2009
|
+
output_json({"success": result})
|
|
2010
|
+
|
|
2011
|
+
|
|
2012
|
+
def cmd_rules_edit_user(args):
|
|
2013
|
+
"""Edit user rules."""
|
|
2014
|
+
client = get_client()
|
|
2015
|
+
rules = require_arg(args, 'rules', 'Rules')
|
|
2016
|
+
|
|
2017
|
+
result = client.edit_user_rules(rules=rules)
|
|
2018
|
+
output_json({"success": result})
|
|
2019
|
+
|
|
2020
|
+
|
|
2021
|
+
# =============================================================================
|
|
2022
|
+
# MAIN PARSER
|
|
2023
|
+
# =============================================================================
|
|
2024
|
+
|
|
2025
|
+
def create_parser():
|
|
2026
|
+
"""Create the argument parser with all subcommands."""
|
|
2027
|
+
parser = argparse.ArgumentParser(
|
|
2028
|
+
prog='renderedai',
|
|
2029
|
+
description='Rendered.ai Platform CLI - JSON output for automation and AI agents',
|
|
2030
|
+
formatter_class=argparse.RawDescriptionHelpFormatter,
|
|
2031
|
+
epilog="""
|
|
2032
|
+
Environment Variables:
|
|
2033
|
+
RENDEREDAI_API_KEY API key for authentication (required)
|
|
2034
|
+
RENDEREDAI_ENVIRONMENT Environment: prod, test, dev (default: prod)
|
|
2035
|
+
RENDEREDAI_ENDPOINT Custom API endpoint URL
|
|
2036
|
+
|
|
2037
|
+
Examples:
|
|
2038
|
+
renderedai workspaces get --orgid abc123
|
|
2039
|
+
renderedai datasets get --workspaceid xyz789 --limit 10
|
|
2040
|
+
renderedai volumes create --name "My Volume" --orgid abc123
|
|
2041
|
+
renderedai graphs get --workspaceid xyz789 --graphid graph123
|
|
2042
|
+
"""
|
|
2043
|
+
)
|
|
2044
|
+
|
|
2045
|
+
subparsers = parser.add_subparsers(dest='resource', help='Resource to manage')
|
|
2046
|
+
|
|
2047
|
+
# -------------------------------------------------------------------------
|
|
2048
|
+
# WORKSPACES
|
|
2049
|
+
# -------------------------------------------------------------------------
|
|
2050
|
+
workspaces = subparsers.add_parser('workspaces', help='Manage workspaces')
|
|
2051
|
+
workspaces_sub = workspaces.add_subparsers(dest='action', help='Action')
|
|
2052
|
+
|
|
2053
|
+
# workspaces get
|
|
2054
|
+
ws_get = workspaces_sub.add_parser('get', help='Get workspaces')
|
|
2055
|
+
ws_get.add_argument('--workspaceid', help='Filter by workspace ID')
|
|
2056
|
+
ws_get.add_argument('--orgid', help='Filter by organization ID')
|
|
2057
|
+
ws_get.add_argument('--limit', type=int, help='Maximum results')
|
|
2058
|
+
ws_get.add_argument('--fields', help='Comma-separated fields to return')
|
|
2059
|
+
ws_get.set_defaults(func=cmd_workspaces_get)
|
|
2060
|
+
|
|
2061
|
+
# workspaces create
|
|
2062
|
+
ws_create = workspaces_sub.add_parser('create', help='Create a workspace')
|
|
2063
|
+
ws_create.add_argument('--name', required=True, help='Workspace name')
|
|
2064
|
+
ws_create.add_argument('--description', help='Description')
|
|
2065
|
+
ws_create.add_argument('--orgid', required=True, help='Organization ID')
|
|
2066
|
+
ws_create.add_argument('--channelids', help='Comma-separated channel IDs')
|
|
2067
|
+
ws_create.add_argument('--volumeids', help='Comma-separated volume IDs')
|
|
2068
|
+
ws_create.add_argument('--tags', help='Comma-separated tags')
|
|
2069
|
+
ws_create.set_defaults(func=cmd_workspaces_create)
|
|
2070
|
+
|
|
2071
|
+
# workspaces edit
|
|
2072
|
+
ws_edit = workspaces_sub.add_parser('edit', help='Edit a workspace')
|
|
2073
|
+
ws_edit.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2074
|
+
ws_edit.add_argument('--name', help='New name')
|
|
2075
|
+
ws_edit.add_argument('--description', help='New description')
|
|
2076
|
+
ws_edit.add_argument('--channelids', help='Comma-separated channel IDs')
|
|
2077
|
+
ws_edit.add_argument('--volumeids', help='Comma-separated volume IDs')
|
|
2078
|
+
ws_edit.add_argument('--tags', help='Comma-separated tags')
|
|
2079
|
+
ws_edit.set_defaults(func=cmd_workspaces_edit)
|
|
2080
|
+
|
|
2081
|
+
# workspaces delete
|
|
2082
|
+
ws_delete = workspaces_sub.add_parser('delete', help='Delete a workspace')
|
|
2083
|
+
ws_delete.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2084
|
+
ws_delete.set_defaults(func=cmd_workspaces_delete)
|
|
2085
|
+
|
|
2086
|
+
# workspaces mount
|
|
2087
|
+
ws_mount = workspaces_sub.add_parser('mount', help='Mount a workspace to local filesystem')
|
|
2088
|
+
ws_mount.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2089
|
+
ws_mount.add_argument('--path', help='Local path to mount to (default: current directory)')
|
|
2090
|
+
ws_mount.add_argument('--mountexec', choices=['goofys', 's3fs', 'mount-s3'], help='Mount executable')
|
|
2091
|
+
ws_mount.set_defaults(func=cmd_workspaces_mount)
|
|
2092
|
+
|
|
2093
|
+
# workspaces unmount
|
|
2094
|
+
ws_unmount = workspaces_sub.add_parser('unmount', help='Unmount a workspace from local filesystem')
|
|
2095
|
+
ws_unmount.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2096
|
+
ws_unmount.set_defaults(func=cmd_workspaces_unmount)
|
|
2097
|
+
|
|
2098
|
+
# -------------------------------------------------------------------------
|
|
2099
|
+
# ORGANIZATIONS
|
|
2100
|
+
# -------------------------------------------------------------------------
|
|
2101
|
+
organizations = subparsers.add_parser('organizations', help='Manage organizations')
|
|
2102
|
+
organizations_sub = organizations.add_subparsers(dest='action', help='Action')
|
|
2103
|
+
|
|
2104
|
+
# organizations get
|
|
2105
|
+
org_get = organizations_sub.add_parser('get', help='Get organizations')
|
|
2106
|
+
org_get.add_argument('--orgid', help='Filter by organization ID')
|
|
2107
|
+
org_get.add_argument('--limit', type=int, help='Maximum results')
|
|
2108
|
+
org_get.add_argument('--fields', help='Comma-separated fields to return')
|
|
2109
|
+
org_get.set_defaults(func=cmd_organizations_get)
|
|
2110
|
+
|
|
2111
|
+
# -------------------------------------------------------------------------
|
|
2112
|
+
# MEMBERS
|
|
2113
|
+
# -------------------------------------------------------------------------
|
|
2114
|
+
members = subparsers.add_parser('members', help='Manage organization members')
|
|
2115
|
+
members_sub = members.add_subparsers(dest='action', help='Action')
|
|
2116
|
+
|
|
2117
|
+
# members get
|
|
2118
|
+
members_get = members_sub.add_parser('get', help='Get organization members')
|
|
2119
|
+
members_get.add_argument('--orgid', required=True, help='Organization ID')
|
|
2120
|
+
members_get.add_argument('--limit', type=int, help='Maximum results')
|
|
2121
|
+
members_get.add_argument('--fields', help='Comma-separated fields to return')
|
|
2122
|
+
members_get.set_defaults(func=cmd_members_get)
|
|
2123
|
+
|
|
2124
|
+
# -------------------------------------------------------------------------
|
|
2125
|
+
# DATASETS
|
|
2126
|
+
# -------------------------------------------------------------------------
|
|
2127
|
+
datasets = subparsers.add_parser('datasets', help='Manage datasets')
|
|
2128
|
+
datasets_sub = datasets.add_subparsers(dest='action', help='Action')
|
|
2129
|
+
|
|
2130
|
+
# datasets get
|
|
2131
|
+
ds_get = datasets_sub.add_parser('get', help='Get datasets')
|
|
2132
|
+
ds_get.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2133
|
+
ds_get.add_argument('--datasetid', help='Filter by dataset ID')
|
|
2134
|
+
ds_get.add_argument('--limit', type=int, help='Maximum results')
|
|
2135
|
+
ds_get.add_argument('--fields', help='Comma-separated fields to return')
|
|
2136
|
+
ds_get.set_defaults(func=cmd_datasets_get)
|
|
2137
|
+
|
|
2138
|
+
# datasets create
|
|
2139
|
+
ds_create = datasets_sub.add_parser('create', help='Create a dataset')
|
|
2140
|
+
ds_create.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2141
|
+
ds_create.add_argument('--name', required=True, help='Dataset name')
|
|
2142
|
+
ds_create.add_argument('--graphid', required=True, help='Graph ID')
|
|
2143
|
+
ds_create.add_argument('--description', help='Description')
|
|
2144
|
+
ds_create.add_argument('--runs', type=int, default=1, help='Number of runs')
|
|
2145
|
+
ds_create.add_argument('--seed', type=int, default=1, help='Seed')
|
|
2146
|
+
ds_create.add_argument('--priority', type=int, default=1, help='Priority (1-3)')
|
|
2147
|
+
ds_create.add_argument('--tags', help='Comma-separated tags')
|
|
2148
|
+
ds_create.set_defaults(func=cmd_datasets_create)
|
|
2149
|
+
|
|
2150
|
+
# datasets edit
|
|
2151
|
+
ds_edit = datasets_sub.add_parser('edit', help='Edit a dataset')
|
|
2152
|
+
ds_edit.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2153
|
+
ds_edit.add_argument('--datasetid', required=True, help='Dataset ID')
|
|
2154
|
+
ds_edit.add_argument('--name', help='New name')
|
|
2155
|
+
ds_edit.add_argument('--description', help='New description')
|
|
2156
|
+
ds_edit.add_argument('--tags', help='Comma-separated tags')
|
|
2157
|
+
ds_edit.add_argument('--pause', action='store_true', help='Pause the job')
|
|
2158
|
+
ds_edit.add_argument('--priority', type=int, help='Priority (1-3)')
|
|
2159
|
+
ds_edit.set_defaults(func=cmd_datasets_edit)
|
|
2160
|
+
|
|
2161
|
+
# datasets delete
|
|
2162
|
+
ds_delete = datasets_sub.add_parser('delete', help='Delete a dataset')
|
|
2163
|
+
ds_delete.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2164
|
+
ds_delete.add_argument('--datasetid', required=True, help='Dataset ID')
|
|
2165
|
+
ds_delete.set_defaults(func=cmd_datasets_delete)
|
|
2166
|
+
|
|
2167
|
+
# datasets cancel
|
|
2168
|
+
ds_cancel = datasets_sub.add_parser('cancel', help='Cancel a running job')
|
|
2169
|
+
ds_cancel.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2170
|
+
ds_cancel.add_argument('--datasetid', required=True, help='Dataset ID')
|
|
2171
|
+
ds_cancel.set_defaults(func=cmd_datasets_cancel)
|
|
2172
|
+
|
|
2173
|
+
# datasets download
|
|
2174
|
+
ds_download = datasets_sub.add_parser('download', help='Download a dataset')
|
|
2175
|
+
ds_download.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2176
|
+
ds_download.add_argument('--datasetid', required=True, help='Dataset ID')
|
|
2177
|
+
ds_download.add_argument('--outputdir', help='Output directory')
|
|
2178
|
+
ds_download.set_defaults(func=cmd_datasets_download)
|
|
2179
|
+
|
|
2180
|
+
# datasets upload
|
|
2181
|
+
ds_upload = datasets_sub.add_parser('upload', help='Upload a dataset')
|
|
2182
|
+
ds_upload.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2183
|
+
ds_upload.add_argument('--file', required=True, help='File to upload')
|
|
2184
|
+
ds_upload.add_argument('--description', help='Description')
|
|
2185
|
+
ds_upload.add_argument('--tags', help='Comma-separated tags')
|
|
2186
|
+
ds_upload.set_defaults(func=cmd_datasets_upload)
|
|
2187
|
+
|
|
2188
|
+
# datasets runs
|
|
2189
|
+
ds_runs = datasets_sub.add_parser('runs', help='Get dataset runs')
|
|
2190
|
+
ds_runs.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2191
|
+
ds_runs.add_argument('--datasetid', required=True, help='Dataset ID')
|
|
2192
|
+
ds_runs.add_argument('--state', help='Filter by state')
|
|
2193
|
+
ds_runs.add_argument('--fields', help='Comma-separated fields to return')
|
|
2194
|
+
ds_runs.set_defaults(func=cmd_datasets_runs)
|
|
2195
|
+
|
|
2196
|
+
# datasets log
|
|
2197
|
+
ds_log = datasets_sub.add_parser('log', help='Get dataset run log')
|
|
2198
|
+
ds_log.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2199
|
+
ds_log.add_argument('--datasetid', required=True, help='Dataset ID')
|
|
2200
|
+
ds_log.add_argument('--runid', required=True, help='Run ID')
|
|
2201
|
+
ds_log.add_argument('--fields', help='Comma-separated fields to return')
|
|
2202
|
+
ds_log.set_defaults(func=cmd_datasets_log)
|
|
2203
|
+
|
|
2204
|
+
# datasets files
|
|
2205
|
+
ds_files = datasets_sub.add_parser('files', help='Get dataset files')
|
|
2206
|
+
ds_files.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2207
|
+
ds_files.add_argument('--datasetid', required=True, help='Dataset ID')
|
|
2208
|
+
ds_files.add_argument('--path', help='Path within dataset')
|
|
2209
|
+
ds_files.add_argument('--limit', type=int, default=100, help='Maximum results')
|
|
2210
|
+
ds_files.set_defaults(func=cmd_datasets_files)
|
|
2211
|
+
|
|
2212
|
+
# datasets jobs
|
|
2213
|
+
ds_jobs = datasets_sub.add_parser('jobs', help='Get dataset jobs')
|
|
2214
|
+
ds_jobs.add_argument('--workspaceid', help='Workspace ID')
|
|
2215
|
+
ds_jobs.add_argument('--orgid', help='Organization ID')
|
|
2216
|
+
ds_jobs.add_argument('--datasetid', help='Filter by dataset ID')
|
|
2217
|
+
ds_jobs.add_argument('--limit', type=int, help='Maximum results')
|
|
2218
|
+
ds_jobs.add_argument('--fields', help='Comma-separated fields to return')
|
|
2219
|
+
ds_jobs.set_defaults(func=cmd_datasets_jobs)
|
|
2220
|
+
|
|
2221
|
+
# datasets create-mixed
|
|
2222
|
+
ds_create_mixed = datasets_sub.add_parser('create-mixed', help='Create a mixed dataset')
|
|
2223
|
+
ds_create_mixed.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2224
|
+
ds_create_mixed.add_argument('--name', required=True, help='Dataset name')
|
|
2225
|
+
ds_create_mixed.add_argument('--parameters', required=True, help='JSON parameters: {"datasetId1": {"samples": N, "classes": [...]}, ...}')
|
|
2226
|
+
ds_create_mixed.add_argument('--description', help='Description')
|
|
2227
|
+
ds_create_mixed.add_argument('--seed', type=int, help='Seed')
|
|
2228
|
+
ds_create_mixed.add_argument('--tags', help='Comma-separated tags')
|
|
2229
|
+
ds_create_mixed.set_defaults(func=cmd_datasets_create_mixed)
|
|
2230
|
+
|
|
2231
|
+
# -------------------------------------------------------------------------
|
|
2232
|
+
# VOLUMES
|
|
2233
|
+
# -------------------------------------------------------------------------
|
|
2234
|
+
volumes = subparsers.add_parser('volumes', help='Manage volumes')
|
|
2235
|
+
volumes_sub = volumes.add_subparsers(dest='action', help='Action')
|
|
2236
|
+
|
|
2237
|
+
# volumes get
|
|
2238
|
+
vol_get = volumes_sub.add_parser('get', help='Get volumes')
|
|
2239
|
+
vol_get.add_argument('--volumeid', help='Filter by volume ID')
|
|
2240
|
+
vol_get.add_argument('--workspaceid', help='Filter by workspace ID')
|
|
2241
|
+
vol_get.add_argument('--orgid', help='Filter by organization ID')
|
|
2242
|
+
vol_get.add_argument('--limit', type=int, help='Maximum results')
|
|
2243
|
+
vol_get.add_argument('--fields', help='Comma-separated fields to return')
|
|
2244
|
+
vol_get.set_defaults(func=cmd_volumes_get)
|
|
2245
|
+
|
|
2246
|
+
# volumes create
|
|
2247
|
+
vol_create = volumes_sub.add_parser('create', help='Create a volume')
|
|
2248
|
+
vol_create.add_argument('--name', required=True, help='Volume name')
|
|
2249
|
+
vol_create.add_argument('--description', help='Description')
|
|
2250
|
+
vol_create.add_argument('--orgid', required=True, help='Organization ID')
|
|
2251
|
+
vol_create.add_argument('--permission', choices=['read', 'write', 'view'], help='Permission')
|
|
2252
|
+
vol_create.add_argument('--tags', help='Comma-separated tags')
|
|
2253
|
+
vol_create.set_defaults(func=cmd_volumes_create)
|
|
2254
|
+
|
|
2255
|
+
# volumes edit
|
|
2256
|
+
vol_edit = volumes_sub.add_parser('edit', help='Edit a volume')
|
|
2257
|
+
vol_edit.add_argument('--volumeid', required=True, help='Volume ID')
|
|
2258
|
+
vol_edit.add_argument('--name', help='New name')
|
|
2259
|
+
vol_edit.add_argument('--description', help='New description')
|
|
2260
|
+
vol_edit.add_argument('--permission', choices=['read', 'write', 'view'], help='Permission')
|
|
2261
|
+
vol_edit.add_argument('--tags', help='Comma-separated tags')
|
|
2262
|
+
vol_edit.set_defaults(func=cmd_volumes_edit)
|
|
2263
|
+
|
|
2264
|
+
# volumes delete
|
|
2265
|
+
vol_delete = volumes_sub.add_parser('delete', help='Delete a volume')
|
|
2266
|
+
vol_delete.add_argument('--volumeid', required=True, help='Volume ID')
|
|
2267
|
+
vol_delete.set_defaults(func=cmd_volumes_delete)
|
|
2268
|
+
|
|
2269
|
+
# volumes mount
|
|
2270
|
+
vol_mount = volumes_sub.add_parser('mount', help='Mount a volume to local filesystem')
|
|
2271
|
+
vol_mount.add_argument('--volumeid', required=True, help='Volume ID')
|
|
2272
|
+
vol_mount.add_argument('--path', help='Local path to mount to (default: current directory)')
|
|
2273
|
+
vol_mount.add_argument('--mountexec', choices=['goofys', 's3fs', 'mount-s3'], help='Mount executable')
|
|
2274
|
+
vol_mount.set_defaults(func=cmd_volumes_mount)
|
|
2275
|
+
|
|
2276
|
+
# volumes unmount
|
|
2277
|
+
vol_unmount = volumes_sub.add_parser('unmount', help='Unmount a volume from local filesystem')
|
|
2278
|
+
vol_unmount.add_argument('--volumeid', required=True, help='Volume ID')
|
|
2279
|
+
vol_unmount.set_defaults(func=cmd_volumes_unmount)
|
|
2280
|
+
|
|
2281
|
+
# -------------------------------------------------------------------------
|
|
2282
|
+
# VOLUME-DATA
|
|
2283
|
+
# -------------------------------------------------------------------------
|
|
2284
|
+
volume_data = subparsers.add_parser('volume-data', help='Manage volume data')
|
|
2285
|
+
volume_data_sub = volume_data.add_subparsers(dest='action', help='Action')
|
|
2286
|
+
|
|
2287
|
+
# volume-data get
|
|
2288
|
+
vd_get = volume_data_sub.add_parser('get', help='Get volume data')
|
|
2289
|
+
vd_get.add_argument('--volumeid', required=True, help='Volume ID')
|
|
2290
|
+
vd_get.add_argument('--dir', help='Directory path')
|
|
2291
|
+
vd_get.add_argument('--files', help='Comma-separated file paths')
|
|
2292
|
+
vd_get.add_argument('--recursive', action='store_true', help='Recursive listing')
|
|
2293
|
+
vd_get.add_argument('--limit', type=int, help='Maximum results')
|
|
2294
|
+
vd_get.set_defaults(func=cmd_volume_data_get)
|
|
2295
|
+
|
|
2296
|
+
# volume-data upload
|
|
2297
|
+
vd_upload = volume_data_sub.add_parser('upload', help='Upload data to a volume')
|
|
2298
|
+
vd_upload.add_argument('--volumeid', required=True, help='Volume ID')
|
|
2299
|
+
vd_upload.add_argument('--localdir', help='Local directory')
|
|
2300
|
+
vd_upload.add_argument('--files', help='Comma-separated files to upload')
|
|
2301
|
+
vd_upload.add_argument('--destdir', help='Destination directory in volume')
|
|
2302
|
+
vd_upload.add_argument('--sync', action='store_true', help='Sync mode')
|
|
2303
|
+
vd_upload.set_defaults(func=cmd_volume_data_upload)
|
|
2304
|
+
|
|
2305
|
+
# volume-data download
|
|
2306
|
+
vd_download = volume_data_sub.add_parser('download', help='Download data from a volume')
|
|
2307
|
+
vd_download.add_argument('--volumeid', required=True, help='Volume ID')
|
|
2308
|
+
vd_download.add_argument('--outputdir', help='Output directory')
|
|
2309
|
+
vd_download.add_argument('--files', help='Comma-separated files to download')
|
|
2310
|
+
vd_download.add_argument('--recursive', action='store_true', default=True, help='Recursive download')
|
|
2311
|
+
vd_download.add_argument('--sync', action='store_true', help='Sync mode')
|
|
2312
|
+
vd_download.set_defaults(func=cmd_volume_data_download)
|
|
2313
|
+
|
|
2314
|
+
# volume-data delete
|
|
2315
|
+
vd_delete = volume_data_sub.add_parser('delete', help='Delete data from a volume')
|
|
2316
|
+
vd_delete.add_argument('--volumeid', required=True, help='Volume ID')
|
|
2317
|
+
vd_delete.add_argument('--files', required=True, help='Comma-separated files to delete')
|
|
2318
|
+
vd_delete.set_defaults(func=cmd_volume_data_delete)
|
|
2319
|
+
|
|
2320
|
+
# volume-data search
|
|
2321
|
+
vd_search = volume_data_sub.add_parser('search', help='Search a volume')
|
|
2322
|
+
vd_search.add_argument('--volumeid', required=True, help='Volume ID')
|
|
2323
|
+
vd_search.add_argument('--dir', help='Directory to search')
|
|
2324
|
+
vd_search.add_argument('--recursive', action='store_true', default=True, help='Recursive search')
|
|
2325
|
+
vd_search.add_argument('--keywords', help='Comma-separated keywords')
|
|
2326
|
+
vd_search.add_argument('--formats', help='Comma-separated file formats (e.g., png,jpg)')
|
|
2327
|
+
vd_search.add_argument('--types', help='Comma-separated file types (e.g., Image,3D)')
|
|
2328
|
+
vd_search.add_argument('--limit', type=int, help='Maximum results')
|
|
2329
|
+
vd_search.set_defaults(func=cmd_volume_data_search)
|
|
2330
|
+
|
|
2331
|
+
# -------------------------------------------------------------------------
|
|
2332
|
+
# GRAPHS
|
|
2333
|
+
# -------------------------------------------------------------------------
|
|
2334
|
+
graphs = subparsers.add_parser('graphs', help='Manage graphs')
|
|
2335
|
+
graphs_sub = graphs.add_subparsers(dest='action', help='Action')
|
|
2336
|
+
|
|
2337
|
+
# graphs get
|
|
2338
|
+
gr_get = graphs_sub.add_parser('get', help='Get graphs')
|
|
2339
|
+
gr_get.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2340
|
+
gr_get.add_argument('--graphid', help='Filter by graph ID')
|
|
2341
|
+
gr_get.add_argument('--staged', action='store_true', help='Only staged graphs')
|
|
2342
|
+
gr_get.add_argument('--limit', type=int, help='Maximum results')
|
|
2343
|
+
gr_get.add_argument('--fields', help='Comma-separated fields to return')
|
|
2344
|
+
gr_get.set_defaults(func=cmd_graphs_get)
|
|
2345
|
+
|
|
2346
|
+
# graphs create
|
|
2347
|
+
gr_create = graphs_sub.add_parser('create', help='Create a graph (editable)')
|
|
2348
|
+
gr_create.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2349
|
+
gr_create.add_argument('--file', required=True, help='Graph file (JSON or YAML)')
|
|
2350
|
+
gr_create.add_argument('--channelid', required=True, help='Channel ID')
|
|
2351
|
+
gr_create.add_argument('--name', required=True, help='Graph name')
|
|
2352
|
+
gr_create.add_argument('--description', help='Description')
|
|
2353
|
+
gr_create.set_defaults(func=cmd_graphs_create)
|
|
2354
|
+
|
|
2355
|
+
# graphs edit
|
|
2356
|
+
gr_edit = graphs_sub.add_parser('edit', help='Edit a graph')
|
|
2357
|
+
gr_edit.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2358
|
+
gr_edit.add_argument('--graphid', required=True, help='Graph ID')
|
|
2359
|
+
gr_edit.add_argument('--name', help='New name')
|
|
2360
|
+
gr_edit.add_argument('--description', help='New description')
|
|
2361
|
+
gr_edit.add_argument('--tags', help='Comma-separated tags')
|
|
2362
|
+
gr_edit.set_defaults(func=cmd_graphs_edit)
|
|
2363
|
+
|
|
2364
|
+
# graphs delete
|
|
2365
|
+
gr_delete = graphs_sub.add_parser('delete', help='Delete a graph')
|
|
2366
|
+
gr_delete.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2367
|
+
gr_delete.add_argument('--graphid', required=True, help='Graph ID')
|
|
2368
|
+
gr_delete.set_defaults(func=cmd_graphs_delete)
|
|
2369
|
+
|
|
2370
|
+
# graphs download
|
|
2371
|
+
gr_download = graphs_sub.add_parser('download', help='Download a graph to a file')
|
|
2372
|
+
gr_download.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2373
|
+
gr_download.add_argument('--graphid', required=True, help='Graph ID')
|
|
2374
|
+
gr_download.add_argument('--outputfile', help='Output file path (default: <graphid>.yaml)')
|
|
2375
|
+
gr_download.set_defaults(func=cmd_graphs_download)
|
|
2376
|
+
|
|
2377
|
+
# graphs stage
|
|
2378
|
+
gr_stage = graphs_sub.add_parser('stage', help='Stage an existing graph (creates read-only copy)')
|
|
2379
|
+
gr_stage.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2380
|
+
gr_stage.add_argument('--graphid', required=True, help='Graph ID to stage')
|
|
2381
|
+
gr_stage.add_argument('--name', help='Name for staged graph (default: original-name-staged)')
|
|
2382
|
+
gr_stage.add_argument('--description', help='Description for staged graph')
|
|
2383
|
+
gr_stage.set_defaults(func=cmd_graphs_stage)
|
|
2384
|
+
|
|
2385
|
+
# -------------------------------------------------------------------------
|
|
2386
|
+
# GRAPH-EDITOR
|
|
2387
|
+
# -------------------------------------------------------------------------
|
|
2388
|
+
graph_editor = subparsers.add_parser('graph-editor', help='Graph editor integration')
|
|
2389
|
+
graph_editor_sub = graph_editor.add_subparsers(dest='action', help='Action')
|
|
2390
|
+
|
|
2391
|
+
# graph-editor open
|
|
2392
|
+
ge_open = graph_editor_sub.add_parser('open', help='Download graph and schema, open in graph editor')
|
|
2393
|
+
ge_open.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2394
|
+
ge_open.add_argument('--graphid', required=True, help='Graph ID')
|
|
2395
|
+
ge_open.add_argument('--outputdir', help='Output directory (default: current directory)')
|
|
2396
|
+
ge_open.set_defaults(func=cmd_graph_editor_open)
|
|
2397
|
+
|
|
2398
|
+
# -------------------------------------------------------------------------
|
|
2399
|
+
# CHANNELS
|
|
2400
|
+
# -------------------------------------------------------------------------
|
|
2401
|
+
channels = subparsers.add_parser('channels', help='Manage channels')
|
|
2402
|
+
channels_sub = channels.add_subparsers(dest='action', help='Action')
|
|
2403
|
+
|
|
2404
|
+
# channels get
|
|
2405
|
+
ch_get = channels_sub.add_parser('get', help='Get channels')
|
|
2406
|
+
ch_get.add_argument('--workspaceid', help='Filter by workspace ID')
|
|
2407
|
+
ch_get.add_argument('--orgid', help='Filter by organization ID')
|
|
2408
|
+
ch_get.add_argument('--channelid', help='Filter by channel ID')
|
|
2409
|
+
ch_get.add_argument('--limit', type=int, help='Maximum results')
|
|
2410
|
+
ch_get.add_argument('--fields', help='Comma-separated fields to return')
|
|
2411
|
+
ch_get.set_defaults(func=cmd_channels_get)
|
|
2412
|
+
|
|
2413
|
+
# channels schema
|
|
2414
|
+
ch_schema = channels_sub.add_parser('schema', help='Get channel schema')
|
|
2415
|
+
ch_schema.add_argument('--channelid', required=True, help='Channel ID')
|
|
2416
|
+
ch_schema.add_argument('--fields', help='Comma-separated fields to return')
|
|
2417
|
+
ch_schema.set_defaults(func=cmd_channels_schema)
|
|
2418
|
+
|
|
2419
|
+
# channels nodes
|
|
2420
|
+
ch_nodes = channels_sub.add_parser('nodes', help='Get node documentation')
|
|
2421
|
+
ch_nodes.add_argument('--channelid', required=True, help='Channel ID')
|
|
2422
|
+
ch_nodes.add_argument('--node', required=True, help='Node name')
|
|
2423
|
+
ch_nodes.add_argument('--fields', help='Comma-separated fields to return')
|
|
2424
|
+
ch_nodes.set_defaults(func=cmd_channels_nodes)
|
|
2425
|
+
|
|
2426
|
+
# channels docs
|
|
2427
|
+
ch_docs = channels_sub.add_parser('docs', help='Get channel documentation')
|
|
2428
|
+
ch_docs.add_argument('--channelid', required=True, help='Channel ID')
|
|
2429
|
+
ch_docs.set_defaults(func=cmd_channels_docs)
|
|
2430
|
+
|
|
2431
|
+
# channels get-default-graph
|
|
2432
|
+
ch_get_default = channels_sub.add_parser('get-default-graph', help='Get the default graph for a channel')
|
|
2433
|
+
ch_get_default.add_argument('--channelid', required=True, help='Channel ID')
|
|
2434
|
+
ch_get_default.add_argument('--outputfile', help='Output file path (default: default.yaml)')
|
|
2435
|
+
ch_get_default.set_defaults(func=cmd_channels_get_default_graph)
|
|
2436
|
+
|
|
2437
|
+
# channels set-default-graph
|
|
2438
|
+
ch_set_default = channels_sub.add_parser('set-default-graph', help='Set the default graph for a channel')
|
|
2439
|
+
ch_set_default.add_argument('--graphid', required=True, help='Graph ID')
|
|
2440
|
+
ch_set_default.add_argument('--workspaceid', help='Workspace ID')
|
|
2441
|
+
ch_set_default.set_defaults(func=cmd_channels_set_default_graph)
|
|
2442
|
+
|
|
2443
|
+
# -------------------------------------------------------------------------
|
|
2444
|
+
# SERVICES
|
|
2445
|
+
# -------------------------------------------------------------------------
|
|
2446
|
+
services = subparsers.add_parser('services', help='Manage services')
|
|
2447
|
+
services_sub = services.add_subparsers(dest='action', help='Action')
|
|
2448
|
+
|
|
2449
|
+
# services get
|
|
2450
|
+
svc_get = services_sub.add_parser('get', help='Get services')
|
|
2451
|
+
svc_get.add_argument('--workspaceid', help='Filter by workspace ID')
|
|
2452
|
+
svc_get.add_argument('--orgid', help='Filter by organization ID')
|
|
2453
|
+
svc_get.add_argument('--serviceid', help='Filter by service ID')
|
|
2454
|
+
svc_get.add_argument('--limit', type=int, help='Maximum results')
|
|
2455
|
+
svc_get.add_argument('--fields', help='Comma-separated fields to return')
|
|
2456
|
+
svc_get.set_defaults(func=cmd_services_get)
|
|
2457
|
+
|
|
2458
|
+
# services create
|
|
2459
|
+
svc_create = services_sub.add_parser('create', help='Create a service')
|
|
2460
|
+
svc_create.add_argument('--name', required=True, help='Service name')
|
|
2461
|
+
svc_create.add_argument('--description', help='Description')
|
|
2462
|
+
svc_create.add_argument('--orgid', required=True, help='Organization ID')
|
|
2463
|
+
svc_create.add_argument('--type', default='custom', help='Service type ID')
|
|
2464
|
+
svc_create.add_argument('--volumes', help='Comma-separated volume IDs')
|
|
2465
|
+
svc_create.add_argument('--instance', help='AWS instance type')
|
|
2466
|
+
svc_create.add_argument('--tags', help='Comma-separated tags')
|
|
2467
|
+
svc_create.set_defaults(func=cmd_services_create)
|
|
2468
|
+
|
|
2469
|
+
# services edit
|
|
2470
|
+
svc_edit = services_sub.add_parser('edit', help='Edit a service')
|
|
2471
|
+
svc_edit.add_argument('--serviceid', required=True, help='Service ID')
|
|
2472
|
+
svc_edit.add_argument('--name', help='New name')
|
|
2473
|
+
svc_edit.add_argument('--description', help='New description')
|
|
2474
|
+
svc_edit.add_argument('--volumes', help='Comma-separated volume IDs')
|
|
2475
|
+
svc_edit.add_argument('--instance', help='AWS instance type')
|
|
2476
|
+
svc_edit.add_argument('--tags', help='Comma-separated tags')
|
|
2477
|
+
svc_edit.set_defaults(func=cmd_services_edit)
|
|
2478
|
+
|
|
2479
|
+
# services delete
|
|
2480
|
+
svc_delete = services_sub.add_parser('delete', help='Delete a service')
|
|
2481
|
+
svc_delete.add_argument('--serviceid', required=True, help='Service ID')
|
|
2482
|
+
svc_delete.set_defaults(func=cmd_services_delete)
|
|
2483
|
+
|
|
2484
|
+
# -------------------------------------------------------------------------
|
|
2485
|
+
# SERVICE-JOBS
|
|
2486
|
+
# -------------------------------------------------------------------------
|
|
2487
|
+
service_jobs = subparsers.add_parser('service-jobs', help='Manage service jobs')
|
|
2488
|
+
service_jobs_sub = service_jobs.add_subparsers(dest='action', help='Action')
|
|
2489
|
+
|
|
2490
|
+
# service-jobs get
|
|
2491
|
+
svc_jobs_get = service_jobs_sub.add_parser('get', help='Get service jobs')
|
|
2492
|
+
svc_jobs_get.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2493
|
+
svc_jobs_get.add_argument('--jobid', help='Filter by job ID')
|
|
2494
|
+
svc_jobs_get.add_argument('--limit', type=int, help='Maximum results')
|
|
2495
|
+
svc_jobs_get.add_argument('--fields', help='Comma-separated fields to return')
|
|
2496
|
+
svc_jobs_get.set_defaults(func=cmd_services_jobs)
|
|
2497
|
+
|
|
2498
|
+
# service-jobs delete
|
|
2499
|
+
svc_jobs_delete = service_jobs_sub.add_parser('delete', help='Delete a service job')
|
|
2500
|
+
svc_jobs_delete.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2501
|
+
svc_jobs_delete.add_argument('--jobid', required=True, help='Job ID')
|
|
2502
|
+
svc_jobs_delete.set_defaults(func=cmd_services_delete_job)
|
|
2503
|
+
|
|
2504
|
+
# -------------------------------------------------------------------------
|
|
2505
|
+
# API-KEYS
|
|
2506
|
+
# -------------------------------------------------------------------------
|
|
2507
|
+
api_keys = subparsers.add_parser('api-keys', help='Manage API keys')
|
|
2508
|
+
api_keys_sub = api_keys.add_subparsers(dest='action', help='Action')
|
|
2509
|
+
|
|
2510
|
+
# api-keys get
|
|
2511
|
+
ak_get = api_keys_sub.add_parser('get', help='Get API keys')
|
|
2512
|
+
ak_get.set_defaults(func=cmd_api_keys_get)
|
|
2513
|
+
|
|
2514
|
+
# api-keys create
|
|
2515
|
+
ak_create = api_keys_sub.add_parser('create', help='Create an API key')
|
|
2516
|
+
ak_create.add_argument('--name', required=True, help='API key name')
|
|
2517
|
+
ak_create.add_argument('--scope', required=True, choices=['user', 'organization', 'workspace'], help='Scope')
|
|
2518
|
+
ak_create.add_argument('--orgid', help='Organization ID (for org scope)')
|
|
2519
|
+
ak_create.add_argument('--workspaceid', help='Workspace ID (for workspace scope)')
|
|
2520
|
+
ak_create.add_argument('--expires', help='Expiration date (ISO format)')
|
|
2521
|
+
ak_create.set_defaults(func=cmd_api_keys_create)
|
|
2522
|
+
|
|
2523
|
+
# api-keys delete
|
|
2524
|
+
ak_delete = api_keys_sub.add_parser('delete', help='Delete an API key')
|
|
2525
|
+
ak_delete.add_argument('--apikeyid', required=True, help='API key ID')
|
|
2526
|
+
ak_delete.set_defaults(func=cmd_api_keys_delete)
|
|
2527
|
+
|
|
2528
|
+
# -------------------------------------------------------------------------
|
|
2529
|
+
# ANALYTICS
|
|
2530
|
+
# -------------------------------------------------------------------------
|
|
2531
|
+
analytics = subparsers.add_parser('analytics', help='Manage analytics')
|
|
2532
|
+
analytics_sub = analytics.add_subparsers(dest='action', help='Action')
|
|
2533
|
+
|
|
2534
|
+
# analytics get
|
|
2535
|
+
an_get = analytics_sub.add_parser('get', help='Get analytics')
|
|
2536
|
+
an_get.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2537
|
+
an_get.add_argument('--datasetid', help='Dataset ID')
|
|
2538
|
+
an_get.add_argument('--analyticsid', help='Analytics ID')
|
|
2539
|
+
an_get.set_defaults(func=cmd_analytics_get)
|
|
2540
|
+
|
|
2541
|
+
# analytics create
|
|
2542
|
+
an_create = analytics_sub.add_parser('create', help='Create analytics')
|
|
2543
|
+
an_create.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2544
|
+
an_create.add_argument('--datasetid', required=True, help='Dataset ID')
|
|
2545
|
+
an_create.add_argument('--type', required=True, help='Analytics type')
|
|
2546
|
+
an_create.set_defaults(func=cmd_analytics_create)
|
|
2547
|
+
|
|
2548
|
+
# analytics delete
|
|
2549
|
+
an_delete = analytics_sub.add_parser('delete', help='Delete analytics')
|
|
2550
|
+
an_delete.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2551
|
+
an_delete.add_argument('--analyticsid', required=True, help='Analytics ID')
|
|
2552
|
+
an_delete.set_defaults(func=cmd_analytics_delete)
|
|
2553
|
+
|
|
2554
|
+
# analytics types
|
|
2555
|
+
an_types = analytics_sub.add_parser('types', help='Get analytics types')
|
|
2556
|
+
an_types.set_defaults(func=cmd_analytics_types)
|
|
2557
|
+
|
|
2558
|
+
# analytics download
|
|
2559
|
+
an_download = analytics_sub.add_parser('download', help='Download analytics results')
|
|
2560
|
+
an_download.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2561
|
+
an_download.add_argument('--analyticsid', required=True, help='Analytics ID')
|
|
2562
|
+
an_download.set_defaults(func=cmd_analytics_download)
|
|
2563
|
+
|
|
2564
|
+
# analytics edit
|
|
2565
|
+
an_edit = analytics_sub.add_parser('edit', help='Edit analytics tags')
|
|
2566
|
+
an_edit.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2567
|
+
an_edit.add_argument('--analyticsid', required=True, help='Analytics ID')
|
|
2568
|
+
an_edit.add_argument('--tags', required=True, help='Comma-separated tags')
|
|
2569
|
+
an_edit.set_defaults(func=cmd_analytics_edit)
|
|
2570
|
+
|
|
2571
|
+
# -------------------------------------------------------------------------
|
|
2572
|
+
# ANNOTATIONS
|
|
2573
|
+
# -------------------------------------------------------------------------
|
|
2574
|
+
annotations = subparsers.add_parser('annotations', help='Manage annotations')
|
|
2575
|
+
annotations_sub = annotations.add_subparsers(dest='action', help='Action')
|
|
2576
|
+
|
|
2577
|
+
# annotations get
|
|
2578
|
+
ann_get = annotations_sub.add_parser('get', help='Get annotations')
|
|
2579
|
+
ann_get.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2580
|
+
ann_get.add_argument('--datasetid', help='Dataset ID')
|
|
2581
|
+
ann_get.add_argument('--annotationid', help='Annotation ID')
|
|
2582
|
+
ann_get.set_defaults(func=cmd_annotations_get)
|
|
2583
|
+
|
|
2584
|
+
# annotations create
|
|
2585
|
+
ann_create = annotations_sub.add_parser('create', help='Create an annotation')
|
|
2586
|
+
ann_create.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2587
|
+
ann_create.add_argument('--datasetid', required=True, help='Dataset ID')
|
|
2588
|
+
ann_create.add_argument('--format', required=True, help='Annotation format')
|
|
2589
|
+
ann_create.add_argument('--mapid', help='Annotation map ID')
|
|
2590
|
+
ann_create.set_defaults(func=cmd_annotations_create)
|
|
2591
|
+
|
|
2592
|
+
# annotations download
|
|
2593
|
+
ann_download = annotations_sub.add_parser('download', help='Download an annotation')
|
|
2594
|
+
ann_download.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2595
|
+
ann_download.add_argument('--annotationid', required=True, help='Annotation ID')
|
|
2596
|
+
ann_download.set_defaults(func=cmd_annotations_download)
|
|
2597
|
+
|
|
2598
|
+
# annotations formats
|
|
2599
|
+
ann_formats = annotations_sub.add_parser('formats', help='Get annotation formats')
|
|
2600
|
+
ann_formats.set_defaults(func=cmd_annotations_formats)
|
|
2601
|
+
|
|
2602
|
+
# annotations delete
|
|
2603
|
+
ann_delete = annotations_sub.add_parser('delete', help='Delete an annotation')
|
|
2604
|
+
ann_delete.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2605
|
+
ann_delete.add_argument('--annotationid', required=True, help='Annotation ID')
|
|
2606
|
+
ann_delete.set_defaults(func=cmd_annotations_delete)
|
|
2607
|
+
|
|
2608
|
+
# annotations edit
|
|
2609
|
+
ann_edit = annotations_sub.add_parser('edit', help='Edit annotation tags')
|
|
2610
|
+
ann_edit.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2611
|
+
ann_edit.add_argument('--annotationid', required=True, help='Annotation ID')
|
|
2612
|
+
ann_edit.add_argument('--tags', required=True, help='Comma-separated tags')
|
|
2613
|
+
ann_edit.set_defaults(func=cmd_annotations_edit)
|
|
2614
|
+
|
|
2615
|
+
# -------------------------------------------------------------------------
|
|
2616
|
+
# ANNOTATION MAPS
|
|
2617
|
+
# -------------------------------------------------------------------------
|
|
2618
|
+
annotation_maps = subparsers.add_parser('annotation-maps', help='Manage annotation maps')
|
|
2619
|
+
annotation_maps_sub = annotation_maps.add_subparsers(dest='action', help='Action')
|
|
2620
|
+
|
|
2621
|
+
# annotation-maps get
|
|
2622
|
+
ann_maps_get = annotation_maps_sub.add_parser('get', help='Get annotation maps')
|
|
2623
|
+
ann_maps_get.add_argument('--orgid', required=True, help='Organization ID')
|
|
2624
|
+
ann_maps_get.set_defaults(func=cmd_annotation_maps_get)
|
|
2625
|
+
|
|
2626
|
+
# annotation-maps upload
|
|
2627
|
+
ann_maps_upload = annotation_maps_sub.add_parser('upload', help='Upload an annotation map')
|
|
2628
|
+
ann_maps_upload.add_argument('--orgid', required=True, help='Organization ID')
|
|
2629
|
+
ann_maps_upload.add_argument('--mapfile', required=True, help='Path to map file')
|
|
2630
|
+
ann_maps_upload.add_argument('--name', required=True, help='Map name')
|
|
2631
|
+
ann_maps_upload.add_argument('--description', help='Description')
|
|
2632
|
+
ann_maps_upload.add_argument('--tags', help='Comma-separated tags')
|
|
2633
|
+
ann_maps_upload.set_defaults(func=cmd_annotation_maps_upload)
|
|
2634
|
+
|
|
2635
|
+
# annotation-maps download
|
|
2636
|
+
ann_maps_download = annotation_maps_sub.add_parser('download', help='Download an annotation map')
|
|
2637
|
+
ann_maps_download.add_argument('--mapid', required=True, help='Map ID')
|
|
2638
|
+
ann_maps_download.add_argument('--outputdir', help='Output directory')
|
|
2639
|
+
ann_maps_download.set_defaults(func=cmd_annotation_maps_download)
|
|
2640
|
+
|
|
2641
|
+
# annotation-maps delete
|
|
2642
|
+
ann_maps_delete = annotation_maps_sub.add_parser('delete', help='Delete an annotation map')
|
|
2643
|
+
ann_maps_delete.add_argument('--mapid', required=True, help='Map ID')
|
|
2644
|
+
ann_maps_delete.set_defaults(func=cmd_annotation_maps_delete)
|
|
2645
|
+
|
|
2646
|
+
# annotation-maps edit
|
|
2647
|
+
ann_maps_edit = annotation_maps_sub.add_parser('edit', help='Edit an annotation map')
|
|
2648
|
+
ann_maps_edit.add_argument('--mapid', required=True, help='Map ID')
|
|
2649
|
+
ann_maps_edit.add_argument('--name', help='New name')
|
|
2650
|
+
ann_maps_edit.add_argument('--description', help='New description')
|
|
2651
|
+
ann_maps_edit.add_argument('--tags', help='Comma-separated tags')
|
|
2652
|
+
ann_maps_edit.set_defaults(func=cmd_annotation_maps_edit)
|
|
2653
|
+
|
|
2654
|
+
# -------------------------------------------------------------------------
|
|
2655
|
+
# GAN MODELS
|
|
2656
|
+
# -------------------------------------------------------------------------
|
|
2657
|
+
gan_models = subparsers.add_parser('gan-models', help='Manage GAN models')
|
|
2658
|
+
gan_models_sub = gan_models.add_subparsers(dest='action', help='Action')
|
|
2659
|
+
|
|
2660
|
+
# gan-models get
|
|
2661
|
+
gan_models_get = gan_models_sub.add_parser('get', help='Get GAN models')
|
|
2662
|
+
gan_models_get.add_argument('--orgid', help='Organization ID')
|
|
2663
|
+
gan_models_get.add_argument('--workspaceid', help='Workspace ID')
|
|
2664
|
+
gan_models_get.add_argument('--modelid', help='Model ID')
|
|
2665
|
+
gan_models_get.add_argument('--limit', type=int, help='Maximum results')
|
|
2666
|
+
gan_models_get.add_argument('--fields', help='Comma-separated fields')
|
|
2667
|
+
gan_models_get.set_defaults(func=cmd_gan_models_get)
|
|
2668
|
+
|
|
2669
|
+
# gan-models upload
|
|
2670
|
+
gan_models_upload = gan_models_sub.add_parser('upload', help='Upload a GAN model')
|
|
2671
|
+
gan_models_upload.add_argument('--orgid', required=True, help='Organization ID')
|
|
2672
|
+
gan_models_upload.add_argument('--modelfile', required=True, help='Path to model file')
|
|
2673
|
+
gan_models_upload.add_argument('--name', required=True, help='Model name')
|
|
2674
|
+
gan_models_upload.add_argument('--description', help='Description')
|
|
2675
|
+
gan_models_upload.add_argument('--flags', help='Model flags')
|
|
2676
|
+
gan_models_upload.add_argument('--tags', help='Comma-separated tags')
|
|
2677
|
+
gan_models_upload.set_defaults(func=cmd_gan_models_upload)
|
|
2678
|
+
|
|
2679
|
+
# gan-models download
|
|
2680
|
+
gan_models_download = gan_models_sub.add_parser('download', help='Download a GAN model')
|
|
2681
|
+
gan_models_download.add_argument('--modelid', required=True, help='Model ID')
|
|
2682
|
+
gan_models_download.add_argument('--outputdir', help='Output directory')
|
|
2683
|
+
gan_models_download.set_defaults(func=cmd_gan_models_download)
|
|
2684
|
+
|
|
2685
|
+
# -------------------------------------------------------------------------
|
|
2686
|
+
# GAN DATASETS
|
|
2687
|
+
# -------------------------------------------------------------------------
|
|
2688
|
+
gan_datasets = subparsers.add_parser('gan-datasets', help='Manage GAN datasets')
|
|
2689
|
+
gan_datasets_sub = gan_datasets.add_subparsers(dest='action', help='Action')
|
|
2690
|
+
|
|
2691
|
+
# gan-datasets get
|
|
2692
|
+
gan_ds_get = gan_datasets_sub.add_parser('get', help='Get GAN datasets')
|
|
2693
|
+
gan_ds_get.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2694
|
+
gan_ds_get.add_argument('--datasetid', help='Dataset ID')
|
|
2695
|
+
gan_ds_get.add_argument('--gandatasetid', help='GAN dataset ID')
|
|
2696
|
+
gan_ds_get.add_argument('--limit', type=int, help='Maximum results')
|
|
2697
|
+
gan_ds_get.add_argument('--fields', help='Comma-separated fields')
|
|
2698
|
+
gan_ds_get.set_defaults(func=cmd_gan_datasets_get)
|
|
2699
|
+
|
|
2700
|
+
# gan-datasets create
|
|
2701
|
+
gan_ds_create = gan_datasets_sub.add_parser('create', help='Create a GAN dataset')
|
|
2702
|
+
gan_ds_create.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2703
|
+
gan_ds_create.add_argument('--datasetid', required=True, help='Input dataset ID')
|
|
2704
|
+
gan_ds_create.add_argument('--modelid', required=True, help='GAN model ID')
|
|
2705
|
+
gan_ds_create.add_argument('--name', required=True, help='Dataset name')
|
|
2706
|
+
gan_ds_create.add_argument('--description', help='Description')
|
|
2707
|
+
gan_ds_create.add_argument('--tags', help='Comma-separated tags')
|
|
2708
|
+
gan_ds_create.set_defaults(func=cmd_gan_datasets_create)
|
|
2709
|
+
|
|
2710
|
+
# gan-datasets delete
|
|
2711
|
+
gan_ds_delete = gan_datasets_sub.add_parser('delete', help='Delete a GAN dataset')
|
|
2712
|
+
gan_ds_delete.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2713
|
+
gan_ds_delete.add_argument('--datasetid', required=True, help='Dataset ID')
|
|
2714
|
+
gan_ds_delete.set_defaults(func=cmd_gan_datasets_delete)
|
|
2715
|
+
|
|
2716
|
+
# -------------------------------------------------------------------------
|
|
2717
|
+
# UMAP
|
|
2718
|
+
# -------------------------------------------------------------------------
|
|
2719
|
+
umap = subparsers.add_parser('umap', help='Manage UMAP visualizations')
|
|
2720
|
+
umap_sub = umap.add_subparsers(dest='action', help='Action')
|
|
2721
|
+
|
|
2722
|
+
# umap get
|
|
2723
|
+
umap_get = umap_sub.add_parser('get', help='Get UMAPs')
|
|
2724
|
+
umap_get.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2725
|
+
umap_get.add_argument('--umapid', help='UMAP ID')
|
|
2726
|
+
umap_get.add_argument('--datasetid', help='Dataset ID')
|
|
2727
|
+
umap_get.add_argument('--limit', type=int, help='Maximum results')
|
|
2728
|
+
umap_get.add_argument('--fields', help='Comma-separated fields')
|
|
2729
|
+
umap_get.set_defaults(func=cmd_umap_get)
|
|
2730
|
+
|
|
2731
|
+
# umap create
|
|
2732
|
+
umap_create = umap_sub.add_parser('create', help='Create a UMAP')
|
|
2733
|
+
umap_create.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2734
|
+
umap_create.add_argument('--name', required=True, help='UMAP name')
|
|
2735
|
+
umap_create.add_argument('--datasetids', required=True, help='Comma-separated dataset IDs')
|
|
2736
|
+
umap_create.add_argument('--samples', required=True, help='Comma-separated sample counts')
|
|
2737
|
+
umap_create.add_argument('--description', help='Description')
|
|
2738
|
+
umap_create.add_argument('--seed', type=int, help='Seed')
|
|
2739
|
+
umap_create.add_argument('--tags', help='Comma-separated tags')
|
|
2740
|
+
umap_create.set_defaults(func=cmd_umap_create)
|
|
2741
|
+
|
|
2742
|
+
# umap delete
|
|
2743
|
+
umap_delete = umap_sub.add_parser('delete', help='Delete a UMAP')
|
|
2744
|
+
umap_delete.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2745
|
+
umap_delete.add_argument('--umapid', required=True, help='UMAP ID')
|
|
2746
|
+
umap_delete.set_defaults(func=cmd_umap_delete)
|
|
2747
|
+
|
|
2748
|
+
# -------------------------------------------------------------------------
|
|
2749
|
+
# SERVERS
|
|
2750
|
+
# -------------------------------------------------------------------------
|
|
2751
|
+
servers = subparsers.add_parser('servers', help='Manage development servers')
|
|
2752
|
+
servers_sub = servers.add_subparsers(dest='action', help='Action')
|
|
2753
|
+
|
|
2754
|
+
# servers get
|
|
2755
|
+
srv_get = servers_sub.add_parser('get', help='Get servers')
|
|
2756
|
+
srv_get.add_argument('--orgid', help='Organization ID')
|
|
2757
|
+
srv_get.add_argument('--workspaceid', help='Workspace ID')
|
|
2758
|
+
srv_get.add_argument('--serverid', help='Server ID')
|
|
2759
|
+
srv_get.add_argument('--limit', type=int, help='Maximum results')
|
|
2760
|
+
srv_get.add_argument('--fields', help='Comma-separated fields')
|
|
2761
|
+
srv_get.set_defaults(func=cmd_servers_get)
|
|
2762
|
+
|
|
2763
|
+
# servers create
|
|
2764
|
+
srv_create = servers_sub.add_parser('create', help='Create a server')
|
|
2765
|
+
srv_create.add_argument('--orgid', help='Organization ID')
|
|
2766
|
+
srv_create.add_argument('--workspaceid', help='Workspace ID')
|
|
2767
|
+
srv_create.add_argument('--instance', help='Instance type')
|
|
2768
|
+
srv_create.add_argument('--name', help='Server name')
|
|
2769
|
+
srv_create.set_defaults(func=cmd_servers_create)
|
|
2770
|
+
|
|
2771
|
+
# servers delete
|
|
2772
|
+
srv_delete = servers_sub.add_parser('delete', help='Delete a server')
|
|
2773
|
+
srv_delete.add_argument('--serverid', required=True, help='Server ID')
|
|
2774
|
+
srv_delete.set_defaults(func=cmd_servers_delete)
|
|
2775
|
+
|
|
2776
|
+
# servers start
|
|
2777
|
+
srv_start = servers_sub.add_parser('start', help='Start a server')
|
|
2778
|
+
srv_start.add_argument('--serverid', required=True, help='Server ID')
|
|
2779
|
+
srv_start.set_defaults(func=cmd_servers_start)
|
|
2780
|
+
|
|
2781
|
+
# servers stop
|
|
2782
|
+
srv_stop = servers_sub.add_parser('stop', help='Stop a server')
|
|
2783
|
+
srv_stop.add_argument('--serverid', required=True, help='Server ID')
|
|
2784
|
+
srv_stop.set_defaults(func=cmd_servers_stop)
|
|
2785
|
+
|
|
2786
|
+
# -------------------------------------------------------------------------
|
|
2787
|
+
# ML MODELS
|
|
2788
|
+
# -------------------------------------------------------------------------
|
|
2789
|
+
ml_models = subparsers.add_parser('ml-models', help='Manage ML models')
|
|
2790
|
+
ml_models_sub = ml_models.add_subparsers(dest='action', help='Action')
|
|
2791
|
+
|
|
2792
|
+
# ml-models architectures
|
|
2793
|
+
ml_arch = ml_models_sub.add_parser('architectures', help='Get ML architectures')
|
|
2794
|
+
ml_arch.add_argument('--fields', help='Comma-separated fields')
|
|
2795
|
+
ml_arch.set_defaults(func=cmd_ml_architectures)
|
|
2796
|
+
|
|
2797
|
+
# ml-models get
|
|
2798
|
+
ml_models_get = ml_models_sub.add_parser('get', help='Get ML models')
|
|
2799
|
+
ml_models_get.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2800
|
+
ml_models_get.add_argument('--datasetid', help='Dataset ID')
|
|
2801
|
+
ml_models_get.add_argument('--modelid', help='Model ID')
|
|
2802
|
+
ml_models_get.add_argument('--limit', type=int, help='Maximum results')
|
|
2803
|
+
ml_models_get.add_argument('--fields', help='Comma-separated fields')
|
|
2804
|
+
ml_models_get.set_defaults(func=cmd_ml_models_get)
|
|
2805
|
+
|
|
2806
|
+
# ml-models create
|
|
2807
|
+
ml_models_create = ml_models_sub.add_parser('create', help='Create ML model training job')
|
|
2808
|
+
ml_models_create.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2809
|
+
ml_models_create.add_argument('--datasetid', required=True, help='Dataset ID')
|
|
2810
|
+
ml_models_create.add_argument('--architectureid', required=True, help='Architecture ID')
|
|
2811
|
+
ml_models_create.add_argument('--name', required=True, help='Model name')
|
|
2812
|
+
ml_models_create.add_argument('--parameters', required=True, help='JSON parameters')
|
|
2813
|
+
ml_models_create.add_argument('--description', help='Description')
|
|
2814
|
+
ml_models_create.add_argument('--tags', help='Comma-separated tags')
|
|
2815
|
+
ml_models_create.set_defaults(func=cmd_ml_models_create)
|
|
2816
|
+
|
|
2817
|
+
# ml-models download
|
|
2818
|
+
ml_models_download = ml_models_sub.add_parser('download', help='Download ML model')
|
|
2819
|
+
ml_models_download.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2820
|
+
ml_models_download.add_argument('--modelid', required=True, help='Model ID')
|
|
2821
|
+
ml_models_download.add_argument('--checkpoint', help='Checkpoint to download')
|
|
2822
|
+
ml_models_download.add_argument('--outputdir', help='Output directory')
|
|
2823
|
+
ml_models_download.set_defaults(func=cmd_ml_models_download)
|
|
2824
|
+
|
|
2825
|
+
# -------------------------------------------------------------------------
|
|
2826
|
+
# ML INFERENCES
|
|
2827
|
+
# -------------------------------------------------------------------------
|
|
2828
|
+
ml_inferences = subparsers.add_parser('ml-inferences', help='Manage ML inferences')
|
|
2829
|
+
ml_inferences_sub = ml_inferences.add_subparsers(dest='action', help='Action')
|
|
2830
|
+
|
|
2831
|
+
# ml-inferences get
|
|
2832
|
+
ml_inf_get = ml_inferences_sub.add_parser('get', help='Get ML inferences')
|
|
2833
|
+
ml_inf_get.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2834
|
+
ml_inf_get.add_argument('--inferenceid', help='Inference ID')
|
|
2835
|
+
ml_inf_get.add_argument('--datasetid', help='Dataset ID')
|
|
2836
|
+
ml_inf_get.add_argument('--modelid', help='Model ID')
|
|
2837
|
+
ml_inf_get.add_argument('--limit', type=int, help='Maximum results')
|
|
2838
|
+
ml_inf_get.add_argument('--fields', help='Comma-separated fields')
|
|
2839
|
+
ml_inf_get.set_defaults(func=cmd_ml_inferences_get)
|
|
2840
|
+
|
|
2841
|
+
# ml-inferences create
|
|
2842
|
+
ml_inf_create = ml_inferences_sub.add_parser('create', help='Create ML inference job')
|
|
2843
|
+
ml_inf_create.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2844
|
+
ml_inf_create.add_argument('--datasetid', required=True, help='Dataset ID')
|
|
2845
|
+
ml_inf_create.add_argument('--modelid', required=True, help='Model ID')
|
|
2846
|
+
ml_inf_create.add_argument('--mapid', help='Map ID')
|
|
2847
|
+
ml_inf_create.add_argument('--tags', help='Comma-separated tags')
|
|
2848
|
+
ml_inf_create.set_defaults(func=cmd_ml_inferences_create)
|
|
2849
|
+
|
|
2850
|
+
# -------------------------------------------------------------------------
|
|
2851
|
+
# INPAINT
|
|
2852
|
+
# -------------------------------------------------------------------------
|
|
2853
|
+
inpaint = subparsers.add_parser('inpaint', help='Manage inpaint jobs')
|
|
2854
|
+
inpaint_sub = inpaint.add_subparsers(dest='action', help='Action')
|
|
2855
|
+
|
|
2856
|
+
# inpaint get
|
|
2857
|
+
inp_get = inpaint_sub.add_parser('get', help='Get inpaint jobs')
|
|
2858
|
+
inp_get.add_argument('--volumeid', required=True, help='Volume ID')
|
|
2859
|
+
inp_get.add_argument('--inpaintid', help='Inpaint ID')
|
|
2860
|
+
inp_get.add_argument('--limit', type=int, help='Maximum results')
|
|
2861
|
+
inp_get.add_argument('--fields', help='Comma-separated fields')
|
|
2862
|
+
inp_get.set_defaults(func=cmd_inpaint_get)
|
|
2863
|
+
|
|
2864
|
+
# inpaint log
|
|
2865
|
+
inp_log = inpaint_sub.add_parser('log', help='Get inpaint job log')
|
|
2866
|
+
inp_log.add_argument('--volumeid', required=True, help='Volume ID')
|
|
2867
|
+
inp_log.add_argument('--inpaintid', required=True, help='Inpaint ID')
|
|
2868
|
+
inp_log.add_argument('--fields', help='Comma-separated fields')
|
|
2869
|
+
inp_log.set_defaults(func=cmd_inpaint_log)
|
|
2870
|
+
|
|
2871
|
+
# inpaint create
|
|
2872
|
+
inp_create = inpaint_sub.add_parser('create', help='Create an inpaint job')
|
|
2873
|
+
inp_create.add_argument('--volumeid', required=True, help='Volume ID')
|
|
2874
|
+
inp_create.add_argument('--location', required=True, help='Input location')
|
|
2875
|
+
inp_create.add_argument('--files', help='Comma-separated files to inpaint')
|
|
2876
|
+
inp_create.add_argument('--destination', help='Output destination')
|
|
2877
|
+
inp_create.add_argument('--dilation', type=int, default=5, help='Dilation (default: 5)')
|
|
2878
|
+
inp_create.add_argument('--inputtype', default='MASK', choices=['MASK', 'GEOJSON', 'COCO', 'KITTI', 'PASCAL', 'YOLO'], help='Input type')
|
|
2879
|
+
inp_create.add_argument('--outputtype', default='PNG', choices=['SATRGB_BACKGROUND', 'PNG', 'JPG'], help='Output type')
|
|
2880
|
+
inp_create.set_defaults(func=cmd_inpaint_create)
|
|
2881
|
+
|
|
2882
|
+
# inpaint delete
|
|
2883
|
+
inp_delete = inpaint_sub.add_parser('delete', help='Delete an inpaint job')
|
|
2884
|
+
inp_delete.add_argument('--volumeid', required=True, help='Volume ID')
|
|
2885
|
+
inp_delete.add_argument('--inpaintid', required=True, help='Inpaint ID')
|
|
2886
|
+
inp_delete.set_defaults(func=cmd_inpaint_delete)
|
|
2887
|
+
|
|
2888
|
+
# -------------------------------------------------------------------------
|
|
2889
|
+
# PREVIEW
|
|
2890
|
+
# -------------------------------------------------------------------------
|
|
2891
|
+
preview = subparsers.add_parser('preview', help='Manage preview jobs')
|
|
2892
|
+
preview_sub = preview.add_subparsers(dest='action', help='Action')
|
|
2893
|
+
|
|
2894
|
+
# preview get
|
|
2895
|
+
prv_get = preview_sub.add_parser('get', help='Get a preview job')
|
|
2896
|
+
prv_get.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2897
|
+
prv_get.add_argument('--previewid', required=True, help='Preview ID')
|
|
2898
|
+
prv_get.add_argument('--fields', help='Comma-separated fields')
|
|
2899
|
+
prv_get.set_defaults(func=cmd_preview_get)
|
|
2900
|
+
|
|
2901
|
+
# preview create
|
|
2902
|
+
prv_create = preview_sub.add_parser('create', help='Create a preview job')
|
|
2903
|
+
prv_create.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2904
|
+
prv_create.add_argument('--graphid', required=True, help='Graph ID')
|
|
2905
|
+
prv_create.set_defaults(func=cmd_preview_create)
|
|
2906
|
+
|
|
2907
|
+
# -------------------------------------------------------------------------
|
|
2908
|
+
# AGENTS
|
|
2909
|
+
# -------------------------------------------------------------------------
|
|
2910
|
+
agents = subparsers.add_parser('agents', help='Agent helper functions')
|
|
2911
|
+
agents_sub = agents.add_subparsers(dest='action', help='Action')
|
|
2912
|
+
|
|
2913
|
+
# agents types
|
|
2914
|
+
ag_types = agents_sub.add_parser('types', help='Get available data types')
|
|
2915
|
+
ag_types.set_defaults(func=cmd_agents_types)
|
|
2916
|
+
|
|
2917
|
+
# agents fields
|
|
2918
|
+
ag_fields = agents_sub.add_parser('fields', help='Get fields for a data type')
|
|
2919
|
+
ag_fields.add_argument('--type', required=True, help='Data type')
|
|
2920
|
+
ag_fields.set_defaults(func=cmd_agents_fields)
|
|
2921
|
+
|
|
2922
|
+
# -------------------------------------------------------------------------
|
|
2923
|
+
# RULES
|
|
2924
|
+
# -------------------------------------------------------------------------
|
|
2925
|
+
rules = subparsers.add_parser('rules', help='Manage platform rules')
|
|
2926
|
+
rules_sub = rules.add_subparsers(dest='action', help='Action')
|
|
2927
|
+
|
|
2928
|
+
# rules get-organization
|
|
2929
|
+
rules_get_org = rules_sub.add_parser('get-organization', help='Get organization rules')
|
|
2930
|
+
rules_get_org.add_argument('--orgid', help='Organization ID')
|
|
2931
|
+
rules_get_org.set_defaults(func=cmd_rules_organization)
|
|
2932
|
+
|
|
2933
|
+
# rules edit-organization
|
|
2934
|
+
rules_edit_org = rules_sub.add_parser('edit-organization', help='Edit organization rules')
|
|
2935
|
+
rules_edit_org.add_argument('--orgid', help='Organization ID')
|
|
2936
|
+
rules_edit_org.add_argument('--rules', required=True, help='Rules string')
|
|
2937
|
+
rules_edit_org.set_defaults(func=cmd_rules_edit_organization)
|
|
2938
|
+
|
|
2939
|
+
# rules get-workspace
|
|
2940
|
+
rules_get_ws = rules_sub.add_parser('get-workspace', help='Get workspace rules')
|
|
2941
|
+
rules_get_ws.add_argument('--workspaceid', help='Workspace ID')
|
|
2942
|
+
rules_get_ws.set_defaults(func=cmd_rules_workspace)
|
|
2943
|
+
|
|
2944
|
+
# rules edit-workspace
|
|
2945
|
+
rules_edit_ws = rules_sub.add_parser('edit-workspace', help='Edit workspace rules')
|
|
2946
|
+
rules_edit_ws.add_argument('--workspaceid', help='Workspace ID')
|
|
2947
|
+
rules_edit_ws.add_argument('--rules', required=True, help='Rules string')
|
|
2948
|
+
rules_edit_ws.set_defaults(func=cmd_rules_edit_workspace)
|
|
2949
|
+
|
|
2950
|
+
# rules get-service
|
|
2951
|
+
rules_get_svc = rules_sub.add_parser('get-service', help='Get service rules')
|
|
2952
|
+
rules_get_svc.add_argument('--serviceid', required=True, help='Service ID')
|
|
2953
|
+
rules_get_svc.set_defaults(func=cmd_rules_service)
|
|
2954
|
+
|
|
2955
|
+
# rules edit-service
|
|
2956
|
+
rules_edit_svc = rules_sub.add_parser('edit-service', help='Edit service rules')
|
|
2957
|
+
rules_edit_svc.add_argument('--serviceid', required=True, help='Service ID')
|
|
2958
|
+
rules_edit_svc.add_argument('--rules', required=True, help='Rules string')
|
|
2959
|
+
rules_edit_svc.set_defaults(func=cmd_rules_edit_service)
|
|
2960
|
+
|
|
2961
|
+
# rules get-user
|
|
2962
|
+
rules_get_user = rules_sub.add_parser('get-user', help='Get user rules')
|
|
2963
|
+
rules_get_user.set_defaults(func=cmd_rules_user)
|
|
2964
|
+
|
|
2965
|
+
# rules edit-user
|
|
2966
|
+
rules_edit_user = rules_sub.add_parser('edit-user', help='Edit user rules')
|
|
2967
|
+
rules_edit_user.add_argument('--rules', required=True, help='Rules string')
|
|
2968
|
+
rules_edit_user.set_defaults(func=cmd_rules_edit_user)
|
|
2969
|
+
|
|
2970
|
+
return parser
|
|
2971
|
+
|
|
2972
|
+
|
|
2973
|
+
def main():
|
|
2974
|
+
"""Main entry point."""
|
|
2975
|
+
parser = create_parser()
|
|
2976
|
+
args = parser.parse_args()
|
|
2977
|
+
|
|
2978
|
+
# Check if resource and action are provided
|
|
2979
|
+
if not args.resource:
|
|
2980
|
+
parser.print_help()
|
|
2981
|
+
sys.exit(1)
|
|
2982
|
+
|
|
2983
|
+
if not args.action:
|
|
2984
|
+
# Print help for the resource
|
|
2985
|
+
parser.parse_args([args.resource, '--help'])
|
|
2986
|
+
sys.exit(1)
|
|
2987
|
+
|
|
2988
|
+
# Execute the command
|
|
2989
|
+
if hasattr(args, 'func'):
|
|
2990
|
+
try:
|
|
2991
|
+
args.func(args)
|
|
2992
|
+
except Exception as e:
|
|
2993
|
+
output_error(str(e), "EXECUTION_ERROR")
|
|
2994
|
+
sys.exit(1)
|
|
2995
|
+
else:
|
|
2996
|
+
parser.print_help()
|
|
2997
|
+
sys.exit(1)
|
|
2998
|
+
|
|
2999
|
+
|
|
3000
|
+
if __name__ == '__main__':
|
|
3001
|
+
main()
|