anatools 5.1.28__py3-none-any.whl → 6.0.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. anatools/__init__.py +1 -1
  2. anatools/anaclient/anaclient.py +16 -15
  3. anatools/anaclient/api/api.py +7 -2
  4. anatools/anaclient/api/datasets.py +21 -3
  5. anatools/anaclient/api/handlers.py +1 -1
  6. anatools/anaclient/channels.py +50 -25
  7. anatools/anaclient/datasets.py +94 -7
  8. anatools/anaclient/helpers.py +11 -10
  9. anatools/anaclient/services.py +46 -20
  10. anatools/anaclient/volumes.py +19 -18
  11. anatools/annotations/annotations.py +39 -18
  12. anatools/annotations/draw.py +34 -18
  13. {anatools-5.1.28.data → anatools-6.0.1.data}/scripts/anadeploy +4 -0
  14. anatools-6.0.1.data/scripts/renderedai +4166 -0
  15. {anatools-5.1.28.dist-info → anatools-6.0.1.dist-info}/METADATA +1 -1
  16. {anatools-5.1.28.dist-info → anatools-6.0.1.dist-info}/RECORD +27 -26
  17. {anatools-5.1.28.dist-info → anatools-6.0.1.dist-info}/WHEEL +1 -1
  18. {anatools-5.1.28.data → anatools-6.0.1.data}/scripts/ana +0 -0
  19. {anatools-5.1.28.data → anatools-6.0.1.data}/scripts/anamount +0 -0
  20. {anatools-5.1.28.data → anatools-6.0.1.data}/scripts/anaprofile +0 -0
  21. {anatools-5.1.28.data → anatools-6.0.1.data}/scripts/anarules +0 -0
  22. {anatools-5.1.28.data → anatools-6.0.1.data}/scripts/anaserver +0 -0
  23. {anatools-5.1.28.data → anatools-6.0.1.data}/scripts/anatransfer +0 -0
  24. {anatools-5.1.28.data → anatools-6.0.1.data}/scripts/anautils +0 -0
  25. {anatools-5.1.28.dist-info → anatools-6.0.1.dist-info}/entry_points.txt +0 -0
  26. {anatools-5.1.28.dist-info → anatools-6.0.1.dist-info}/licenses/LICENSE +0 -0
  27. {anatools-5.1.28.dist-info → anatools-6.0.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,4166 @@
1
+ #!python
2
+ # Copyright 2019-2026 DADoES, Inc.
3
+ #
4
+ # Licensed under the Apache License, Version 2.0 (the "License");
5
+ # you may not use this file except in compliance with the License.
6
+ # You may obtain a copy of the License in the root directory in the "LICENSE" file or at:
7
+ #
8
+ # http://www.apache.org/licenses/LICENSE-2.0
9
+ #
10
+ # Unless required by applicable law or agreed to in writing, software
11
+ # distributed under the License is distributed on an "AS IS" BASIS,
12
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
13
+ # See the License for the specific language governing permissions and
14
+ # limitations under the License.
15
+ """
16
+ Rendered.ai CLI - A comprehensive command-line interface for the Rendered.ai Platform.
17
+
18
+ This CLI provides JSON output for all commands, making it suitable for automation
19
+ and integration with AI agents.
20
+
21
+ Environment Variables:
22
+ RENDEREDAI_API_KEY - API key for authentication
23
+ RENDEREDAI_ENVIRONMENT - Environment: prod, test, or dev (default: prod)
24
+ RENDEREDAI_ENDPOINT - Custom API endpoint URL
25
+
26
+ Usage:
27
+ renderedai <resource> <action> [options]
28
+
29
+ Examples:
30
+ renderedai workspaces get --orgid abc123
31
+ renderedai datasets get --workspaceid xyz789 --limit 10
32
+ renderedai volumes create --name "My Volume" --orgid abc123
33
+ renderedai graphs get --workspaceid xyz789 --graphid graph123
34
+ """
35
+
36
+ import argparse
37
+ import json
38
+ import os
39
+ import sys
40
+ from typing import Any, Dict, List, Optional
41
+
42
+ import yaml
43
+
44
+
45
+ def get_client():
46
+ """Initialize and return the anatools client."""
47
+ import anatools
48
+
49
+ api_key = os.environ.get('RENDEREDAI_API_KEY')
50
+ environment = os.environ.get('RENDEREDAI_ENVIRONMENT', 'prod')
51
+ endpoint = os.environ.get('RENDEREDAI_ENDPOINT')
52
+
53
+ if not api_key and not endpoint:
54
+ output_error("Authentication required. Set RENDEREDAI_API_KEY environment variable.", "AUTH_REQUIRED")
55
+ sys.exit(1)
56
+
57
+ try:
58
+ client = anatools.client(
59
+ APIKey=api_key,
60
+ environment=environment,
61
+ endpoint=endpoint,
62
+ interactive=False,
63
+ verbose=None
64
+ )
65
+ return client
66
+ except Exception as e:
67
+ output_error(f"Authentication failed: {str(e)}")
68
+ sys.exit(1)
69
+
70
+
71
+ def output_json(data: Any, pretty: bool = True):
72
+ """Output data as JSON."""
73
+ if pretty:
74
+ print(json.dumps(data, indent=2, default=str))
75
+ else:
76
+ print(json.dumps(data, default=str))
77
+
78
+
79
+ def output_error(message: str, code: str = "ERROR"):
80
+ """Output an error message as JSON."""
81
+ output_json({"error": code, "message": message})
82
+
83
+
84
+ def require_arg(args, name: str, display_name: str) -> str:
85
+ """Get a required argument, exit if not provided."""
86
+ value = getattr(args, name, None)
87
+ if not value:
88
+ output_error(f"{display_name} is required. Use --{name}", f"MISSING_{name.upper()}")
89
+ sys.exit(1)
90
+ return value
91
+
92
+
93
+ def parse_json_arg(value: str) -> Any:
94
+ """Parse a JSON string argument."""
95
+ try:
96
+ return json.loads(value)
97
+ except json.JSONDecodeError as e:
98
+ output_error(f"Invalid JSON: {str(e)}")
99
+ sys.exit(1)
100
+
101
+
102
+ def parse_list_arg(value: str) -> List[str]:
103
+ """Parse a comma-separated list argument."""
104
+ if not value:
105
+ return []
106
+ return [item.strip() for item in value.split(',')]
107
+
108
+
109
+ def load_graph_file(filepath: str) -> Optional[Dict[str, Any]]:
110
+ """Load a graph from a YAML or JSON file.
111
+
112
+ Args:
113
+ filepath: Path to the graph file (.yaml, .yml, or .json)
114
+
115
+ Returns:
116
+ Dict containing the graph data, or None if loading failed
117
+ """
118
+ if not os.path.exists(filepath):
119
+ output_error(f"File not found: {filepath}", "FILE_NOT_FOUND")
120
+ return None
121
+
122
+ try:
123
+ with open(filepath, 'r') as f:
124
+ if filepath.endswith('.json'):
125
+ return json.load(f)
126
+ else: # .yaml or .yml
127
+ return yaml.safe_load(f)
128
+ except (json.JSONDecodeError, yaml.YAMLError) as e:
129
+ output_error(f"Failed to parse file: {str(e)}", "PARSE_ERROR")
130
+ return None
131
+ except Exception as e:
132
+ output_error(f"Failed to read file: {str(e)}", "READ_ERROR")
133
+ return None
134
+
135
+
136
+ def save_graph_file(filepath: str, data: Dict[str, Any]) -> bool:
137
+ """Save a graph to a YAML or JSON file.
138
+
139
+ Args:
140
+ filepath: Path to save the graph file (.yaml, .yml, or .json)
141
+ data: Graph data to save
142
+
143
+ Returns:
144
+ True if save succeeded, False otherwise
145
+ """
146
+ try:
147
+ with open(filepath, 'w') as f:
148
+ if filepath.endswith('.json'):
149
+ json.dump(data, f, indent=2)
150
+ else: # .yaml or .yml
151
+ yaml.dump(data, f, default_flow_style=False, sort_keys=False, allow_unicode=True)
152
+ return True
153
+ except Exception as e:
154
+ output_error(f"Failed to save file: {str(e)}", "WRITE_ERROR")
155
+ return False
156
+
157
+
158
+ # =============================================================================
159
+ # WORKSPACES
160
+ # =============================================================================
161
+
162
+ def cmd_workspaces_get(args):
163
+ """Get workspaces."""
164
+ client = get_client()
165
+
166
+ result = client.get_workspaces(
167
+ workspaceId=args.workspaceid,
168
+ organizationId=args.orgid,
169
+ limit=args.limit,
170
+ fields=parse_list_arg(args.fields) if args.fields else None
171
+ )
172
+ output_json(result)
173
+
174
+
175
+ def cmd_workspaces_create(args):
176
+ """Create a workspace."""
177
+ client = get_client()
178
+ org_id = require_arg(args, 'orgid', 'Organization ID')
179
+
180
+ result = client.create_workspace(
181
+ name=args.name,
182
+ description=args.description or '',
183
+ organizationId=org_id,
184
+ channelIds=parse_list_arg(args.channelids) if args.channelids else [],
185
+ volumeIds=parse_list_arg(args.volumeids) if args.volumeids else [],
186
+ tags=parse_list_arg(args.tags) if args.tags else None
187
+ )
188
+ output_json({"workspaceId": result})
189
+
190
+
191
+ def cmd_workspaces_edit(args):
192
+ """Edit a workspace."""
193
+ client = get_client()
194
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
195
+
196
+ result = client.edit_workspace(
197
+ workspaceId=workspace_id,
198
+ name=args.name,
199
+ description=args.description,
200
+ channelIds=parse_list_arg(args.channelids) if args.channelids else None,
201
+ volumeIds=parse_list_arg(args.volumeids) if args.volumeids else None,
202
+ tags=parse_list_arg(args.tags) if args.tags else None
203
+ )
204
+ output_json({"success": result})
205
+
206
+
207
+ def cmd_workspaces_delete(args):
208
+ """Delete a workspace."""
209
+ client = get_client()
210
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
211
+
212
+ client.interactive = False
213
+ result = client.ana_api.deleteWorkspace(workspaceId=workspace_id)
214
+ output_json({"success": result})
215
+
216
+
217
+ # =============================================================================
218
+ # ORGANIZATIONS
219
+ # =============================================================================
220
+
221
+ def cmd_organizations_get(args):
222
+ """Get organizations."""
223
+ client = get_client()
224
+
225
+ result = client.get_organizations(
226
+ organizationId=args.orgid,
227
+ limit=args.limit,
228
+ fields=parse_list_arg(args.fields) if args.fields else None
229
+ )
230
+ output_json(result)
231
+
232
+
233
+ # =============================================================================
234
+ # MEMBERS
235
+ # =============================================================================
236
+
237
+ def cmd_members_get(args):
238
+ """Get organization members."""
239
+ client = get_client()
240
+ org_id = require_arg(args, 'orgid', 'Organization ID')
241
+
242
+ result = client.get_organization_members(
243
+ organizationId=org_id,
244
+ limit=args.limit,
245
+ fields=parse_list_arg(args.fields) if args.fields else None
246
+ )
247
+ output_json(result)
248
+
249
+
250
+ # =============================================================================
251
+ # DATASETS
252
+ # =============================================================================
253
+
254
+ def cmd_datasets_get(args):
255
+ """Get datasets."""
256
+ client = get_client()
257
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
258
+
259
+ result = client.get_datasets(
260
+ workspaceId=workspace_id,
261
+ datasetId=args.datasetid,
262
+ limit=args.limit,
263
+ fields=parse_list_arg(args.fields) if args.fields else None
264
+ )
265
+ output_json(result)
266
+
267
+
268
+ def cmd_datasets_create(args):
269
+ """Create a dataset."""
270
+ client = get_client()
271
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
272
+
273
+ result = client.create_dataset(
274
+ workspaceId=workspace_id,
275
+ name=args.name,
276
+ graphId=args.graphid,
277
+ description=args.description or '',
278
+ runs=args.runs,
279
+ seed=args.seed,
280
+ priority=args.priority,
281
+ tags=parse_list_arg(args.tags) if args.tags else []
282
+ )
283
+ output_json({"datasetId": result})
284
+
285
+
286
+ def cmd_datasets_edit(args):
287
+ """Edit a dataset."""
288
+ client = get_client()
289
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
290
+ dataset_id = require_arg(args, 'datasetid', 'Dataset ID')
291
+
292
+ result = client.edit_dataset(
293
+ workspaceId=workspace_id,
294
+ datasetId=dataset_id,
295
+ name=args.name,
296
+ description=args.description,
297
+ tags=parse_list_arg(args.tags) if args.tags else None,
298
+ pause=args.pause,
299
+ priority=args.priority
300
+ )
301
+ output_json({"success": result})
302
+
303
+
304
+ def cmd_datasets_delete(args):
305
+ """Delete a dataset."""
306
+ client = get_client()
307
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
308
+ dataset_id = require_arg(args, 'datasetid', 'Dataset ID')
309
+
310
+ result = client.delete_dataset(
311
+ workspaceId=workspace_id,
312
+ datasetId=dataset_id
313
+ )
314
+ output_json({"success": result})
315
+
316
+
317
+ def cmd_datasets_cancel(args):
318
+ """Cancel a running dataset job."""
319
+ client = get_client()
320
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
321
+ dataset_id = require_arg(args, 'datasetid', 'Dataset ID')
322
+
323
+ result = client.cancel_dataset(
324
+ workspaceId=workspace_id,
325
+ datasetId=dataset_id
326
+ )
327
+ output_json({"success": result})
328
+
329
+
330
+ def cmd_datasets_download(args):
331
+ """Download a dataset or a single file from a dataset."""
332
+ client = get_client()
333
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
334
+ dataset_id = require_arg(args, 'datasetid', 'Dataset ID')
335
+
336
+ # If filepath is provided, download a single file; otherwise download the entire dataset
337
+ if args.filepath:
338
+ result = client.download_dataset_file(
339
+ workspaceId=workspace_id,
340
+ datasetId=dataset_id,
341
+ filepath=args.filepath,
342
+ localDir=args.outputdir
343
+ )
344
+ else:
345
+ result = client.download_dataset(
346
+ workspaceId=workspace_id,
347
+ datasetId=dataset_id,
348
+ localDir=args.outputdir
349
+ )
350
+ output_json({"downloadPath": result})
351
+
352
+
353
+ def cmd_datasets_upload(args):
354
+ """Upload a dataset."""
355
+ client = get_client()
356
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
357
+
358
+ result = client.upload_dataset(
359
+ workspaceId=workspace_id,
360
+ filename=args.file,
361
+ description=args.description,
362
+ tags=parse_list_arg(args.tags) if args.tags else None
363
+ )
364
+ output_json({"datasetId": result})
365
+
366
+
367
+ def cmd_datasets_runs(args):
368
+ """Get dataset runs."""
369
+ client = get_client()
370
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
371
+ dataset_id = require_arg(args, 'datasetid', 'Dataset ID')
372
+
373
+ result = client.get_dataset_runs(
374
+ workspaceId=workspace_id,
375
+ datasetId=dataset_id,
376
+ state=args.state,
377
+ fields=parse_list_arg(args.fields) if args.fields else None
378
+ )
379
+ output_json(result)
380
+
381
+
382
+ def cmd_datasets_log(args):
383
+ """Get dataset run log."""
384
+ client = get_client()
385
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
386
+ dataset_id = require_arg(args, 'datasetid', 'Dataset ID')
387
+ run_id = require_arg(args, 'runid', 'Run ID')
388
+
389
+ result = client.get_dataset_log(
390
+ workspaceId=workspace_id,
391
+ datasetId=dataset_id,
392
+ runId=run_id,
393
+ saveLogFile=False,
394
+ fields=parse_list_arg(args.fields) if args.fields else None
395
+ )
396
+ output_json(result)
397
+
398
+
399
+ def cmd_datasets_files(args):
400
+ """Get dataset files."""
401
+ client = get_client()
402
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
403
+ dataset_id = require_arg(args, 'datasetid', 'Dataset ID')
404
+
405
+ result = client.get_dataset_files(
406
+ workspaceId=workspace_id,
407
+ datasetId=dataset_id,
408
+ path=args.path,
409
+ limit=args.limit
410
+ )
411
+ output_json(result)
412
+
413
+
414
+ def cmd_datasets_jobs(args):
415
+ """Get dataset jobs."""
416
+ client = get_client()
417
+
418
+ result = client.get_dataset_jobs(
419
+ workspaceId=args.workspaceid,
420
+ organizationId=args.orgid,
421
+ datasetId=args.datasetid,
422
+ limit=args.limit,
423
+ fields=parse_list_arg(args.fields) if args.fields else None
424
+ )
425
+ output_json(result)
426
+
427
+
428
+ def cmd_datasets_create_mixed(args):
429
+ """Create a mixed dataset from multiple datasets."""
430
+ client = get_client()
431
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
432
+
433
+ parameters = parse_json_arg(args.parameters)
434
+
435
+ result = client.create_mixed_dataset(
436
+ workspaceId=workspace_id,
437
+ name=args.name,
438
+ parameters=parameters,
439
+ description=args.description or '',
440
+ seed=args.seed,
441
+ tags=parse_list_arg(args.tags) if args.tags else None
442
+ )
443
+ output_json({"datasetId": result})
444
+
445
+
446
+ # =============================================================================
447
+ # VOLUMES
448
+ # =============================================================================
449
+
450
+ def cmd_volumes_get(args):
451
+ """Get volumes."""
452
+ client = get_client()
453
+
454
+ result = client.get_volumes(
455
+ volumeId=args.volumeid,
456
+ workspaceId=args.workspaceid,
457
+ organizationId=args.orgid,
458
+ limit=args.limit,
459
+ fields=parse_list_arg(args.fields) if args.fields else None
460
+ )
461
+ output_json(result)
462
+
463
+
464
+ def cmd_volumes_create(args):
465
+ """Create a volume."""
466
+ client = get_client()
467
+ org_id = require_arg(args, 'orgid', 'Organization ID')
468
+
469
+ result = client.create_volume(
470
+ name=args.name,
471
+ description=args.description,
472
+ organizationId=org_id,
473
+ permission=args.permission,
474
+ tags=parse_list_arg(args.tags) if args.tags else None
475
+ )
476
+ output_json({"volumeId": result})
477
+
478
+
479
+ def cmd_volumes_edit(args):
480
+ """Edit a volume."""
481
+ client = get_client()
482
+ volume_id = require_arg(args, 'volumeid', 'Volume ID')
483
+
484
+ result = client.edit_volume(
485
+ volumeId=volume_id,
486
+ name=args.name,
487
+ description=args.description,
488
+ permission=args.permission,
489
+ tags=parse_list_arg(args.tags) if args.tags else None
490
+ )
491
+ output_json({"success": result})
492
+
493
+
494
+ def cmd_volumes_delete(args):
495
+ """Delete a volume."""
496
+ client = get_client()
497
+ volume_id = require_arg(args, 'volumeid', 'Volume ID')
498
+
499
+ result = client.delete_volume(volumeId=volume_id)
500
+ output_json({"success": result})
501
+
502
+
503
+ # =============================================================================
504
+ # VOLUME-DATA
505
+ # =============================================================================
506
+
507
+ def cmd_volume_data_get(args):
508
+ """Get volume data."""
509
+ client = get_client()
510
+ volume_id = require_arg(args, 'volumeid', 'Volume ID')
511
+
512
+ result = client.get_volume_data(
513
+ volumeId=volume_id,
514
+ dir=args.dir,
515
+ files=parse_list_arg(args.files) if args.files else None,
516
+ recursive=args.recursive,
517
+ limit=args.limit
518
+ )
519
+ output_json(result)
520
+
521
+
522
+ def cmd_volume_data_upload(args):
523
+ """Upload data to a volume."""
524
+ client = get_client()
525
+ volume_id = require_arg(args, 'volumeid', 'Volume ID')
526
+
527
+ client.upload_volume_data(
528
+ volumeId=volume_id,
529
+ localDir=args.localdir,
530
+ files=parse_list_arg(args.files) if args.files else None,
531
+ destinationDir=args.destdir,
532
+ sync=args.sync
533
+ )
534
+ output_json({"success": True})
535
+
536
+
537
+ def cmd_volume_data_download(args):
538
+ """Download data from a volume."""
539
+ client = get_client()
540
+ volume_id = require_arg(args, 'volumeid', 'Volume ID')
541
+
542
+ client.download_volume_data(
543
+ volumeId=volume_id,
544
+ localDir=args.outputdir,
545
+ files=parse_list_arg(args.files) if args.files else [],
546
+ recursive=args.recursive,
547
+ sync=args.sync
548
+ )
549
+ output_json({"success": True})
550
+
551
+
552
+ def cmd_volume_data_delete(args):
553
+ """Delete data from a volume."""
554
+ client = get_client()
555
+ volume_id = require_arg(args, 'volumeid', 'Volume ID')
556
+
557
+ result = client.delete_volume_data(
558
+ volumeId=volume_id,
559
+ files=parse_list_arg(args.files) if args.files else None
560
+ )
561
+ output_json({"success": result})
562
+
563
+
564
+ def cmd_volume_data_search(args):
565
+ """Search a volume."""
566
+ client = get_client()
567
+ volume_id = require_arg(args, 'volumeid', 'Volume ID')
568
+
569
+ result = client.search_volume(
570
+ volumeId=volume_id,
571
+ directory=args.dir,
572
+ recursive=args.recursive,
573
+ keywords=parse_list_arg(args.keywords) if args.keywords else None,
574
+ fileformats=parse_list_arg(args.formats) if args.formats else None,
575
+ filetypes=parse_list_arg(args.types) if args.types else None,
576
+ limit=args.limit
577
+ )
578
+ output_json(result)
579
+
580
+
581
+ def cmd_volumes_mount(args):
582
+ """Mount a volume to local filesystem."""
583
+ import subprocess
584
+ import time
585
+
586
+ client = get_client()
587
+ volume_id = require_arg(args, 'volumeid', 'Volume ID')
588
+ path = args.path or os.getcwd()
589
+ mountexec = args.mountexec or 'goofys'
590
+ home = os.path.expanduser('~')
591
+
592
+ # Verify mount executable is available
593
+ exec_available = False
594
+ for exec_name in ['goofys', 's3fs', 'mount-s3']:
595
+ try:
596
+ subprocess.run([exec_name, '--version'], capture_output=True, check=True)
597
+ if mountexec == exec_name:
598
+ exec_available = True
599
+ break
600
+ elif not exec_available:
601
+ mountexec = exec_name
602
+ exec_available = True
603
+ except:
604
+ pass
605
+
606
+ if not exec_available:
607
+ output_error("No mount executable found. Install goofys, s3fs, or mount-s3.")
608
+ return
609
+
610
+ # Get volume info
611
+ volume_data = client.get_volumes(volumeId=volume_id)
612
+ if not volume_data:
613
+ output_error("Volume not found or permission denied", "VOLUME_NOT_FOUND")
614
+ return
615
+
616
+ if volume_data[0].get('permission') not in ['read', 'write']:
617
+ output_error("Insufficient permissions (view-only)", "PERMISSION_DENIED")
618
+ return
619
+
620
+ # Get mount credentials
621
+ mount_data = client.mount_volumes(volumes=[volume_id])
622
+ if not mount_data:
623
+ output_error("Failed to get mount credentials", "MOUNT_FAILED")
624
+ return
625
+
626
+ # Write AWS credentials
627
+ aws_dir = os.path.join(home, '.aws')
628
+ os.makedirs(aws_dir, exist_ok=True)
629
+ profile_name = f'renderedai-volumes-{volume_id}'
630
+
631
+ # Read existing credentials
632
+ creds_file = os.path.join(aws_dir, 'credentials')
633
+ profiles = {}
634
+ if os.path.exists(creds_file):
635
+ with open(creds_file, 'r') as f:
636
+ current_profile = None
637
+ for line in f:
638
+ line = line.rstrip()
639
+ if line.startswith('[') and line.endswith(']'):
640
+ current_profile = line[1:-1]
641
+ profiles[current_profile] = []
642
+ elif current_profile:
643
+ profiles[current_profile].append(line)
644
+
645
+ # Add new profile
646
+ profiles[profile_name] = [
647
+ f"aws_access_key_id={mount_data['credentials']['accesskeyid']}",
648
+ f"aws_secret_access_key={mount_data['credentials']['accesskey']}",
649
+ f"aws_session_token={mount_data['credentials']['sessiontoken']}"
650
+ ]
651
+
652
+ # Write credentials
653
+ with open(creds_file, 'w') as f:
654
+ for profile, lines in profiles.items():
655
+ f.write(f'[{profile}]\n')
656
+ for line in lines:
657
+ if line:
658
+ f.write(f'{line}\n')
659
+
660
+ # Create mount point
661
+ mountpoint = os.path.join(home, '.renderedai', 'volumes', volume_id)
662
+ os.makedirs(mountpoint, exist_ok=True)
663
+
664
+ # Build mount command
665
+ bucket_key = mount_data['keys'][0] if mount_data.get('keys') else None
666
+ if not bucket_key:
667
+ output_error("No bucket key returned", "MOUNT_FAILED")
668
+ return
669
+
670
+ rw_flag = '-o ro' if mount_data.get('rw', ['r'])[0] == 'r' else ''
671
+
672
+ if mountexec == 'goofys':
673
+ command = f'goofys {rw_flag} --profile {profile_name} {bucket_key[:-1]} {mountpoint}'
674
+ elif mountexec == 's3fs':
675
+ command = f's3fs {bucket_key[:-1]} {mountpoint} -o profile={profile_name} -o endpoint=us-west-2 -o url="https://s3-us-west-2.amazonaws.com" {rw_flag}'
676
+ else: # mount-s3
677
+ readonly = '--read-only' if rw_flag else ''
678
+ command = f'mount-s3 {readonly} --profile {profile_name} --prefix {bucket_key[1:]+"/"} {bucket_key[:-1]} {mountpoint}'
679
+
680
+ # Execute mount
681
+ proc = subprocess.Popen(command, shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
682
+ time.sleep(2) # Wait for mount to complete
683
+
684
+ # Create symlink in target path
685
+ symlink_path = os.path.join(path, 'volumes', volume_id)
686
+ os.makedirs(os.path.join(path, 'volumes'), exist_ok=True)
687
+ if os.path.exists(symlink_path):
688
+ try:
689
+ os.unlink(symlink_path)
690
+ except:
691
+ pass
692
+ try:
693
+ os.symlink(mountpoint, symlink_path)
694
+ except:
695
+ pass
696
+
697
+ # Save mount info
698
+ mountfile = os.path.join(home, '.renderedai', '.mounts.json')
699
+ mounts = {"volumes": {}, "workspaces": {}}
700
+ if os.path.exists(mountfile):
701
+ with open(mountfile, 'r') as f:
702
+ mounts = json.load(f)
703
+
704
+ mounts['volumes'][volume_id] = {
705
+ 'status': 'mounted',
706
+ 'exec': mountexec,
707
+ 'name': volume_data[0].get('name', volume_id),
708
+ 'mountpath': mountpoint,
709
+ 'symlink': symlink_path,
710
+ 'profile': profile_name,
711
+ 'pid': proc.pid
712
+ }
713
+
714
+ with open(mountfile, 'w') as f:
715
+ json.dump(mounts, indent=4, sort_keys=True, fp=f)
716
+
717
+ output_json({
718
+ "volumeId": volume_id,
719
+ "name": volume_data[0].get('name'),
720
+ "mountpath": mountpoint,
721
+ "symlink": symlink_path,
722
+ "readonly": mount_data.get('rw', ['r'])[0] == 'r'
723
+ })
724
+
725
+
726
+ def cmd_volumes_unmount(args):
727
+ """Unmount a volume from local filesystem."""
728
+ import subprocess
729
+
730
+ volume_id = require_arg(args, 'volumeid', 'Volume ID')
731
+ home = os.path.expanduser('~')
732
+ mountfile = os.path.join(home, '.renderedai', '.mounts.json')
733
+
734
+ if not os.path.exists(mountfile):
735
+ output_error("Volume not mounted", "NOT_MOUNTED")
736
+ return
737
+
738
+ with open(mountfile, 'r') as f:
739
+ mounts = json.load(f)
740
+
741
+ mount_info = mounts.get('volumes', {}).get(volume_id)
742
+ if not mount_info or mount_info.get('status') != 'mounted':
743
+ output_error("Volume not mounted", "NOT_MOUNTED")
744
+ return
745
+
746
+ try:
747
+ # Kill processes using the mount
748
+ subprocess.run(["fuser", "-km", mount_info['mountpath']], check=False,
749
+ stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
750
+
751
+ # Remove symlink
752
+ if os.path.exists(mount_info.get('symlink', '')):
753
+ os.unlink(mount_info['symlink'])
754
+
755
+ # Unmount
756
+ subprocess.run(["fusermount", "-uz", mount_info['mountpath']], check=False,
757
+ stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
758
+ subprocess.run(["umount", "-lf", mount_info['mountpath']], check=False,
759
+ stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
760
+
761
+ # Clean up mount directory
762
+ if os.path.isdir(mount_info['mountpath']):
763
+ contents = os.listdir(mount_info['mountpath'])
764
+ if not contents or (len(contents) == 1 and contents[0] == 'lost+found'):
765
+ subprocess.run(["rm", "-rf", mount_info['mountpath']], check=False,
766
+ stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
767
+
768
+ del mounts['volumes'][volume_id]
769
+
770
+ # Save updated mounts
771
+ with open(mountfile, 'w') as f:
772
+ json.dump(mounts, indent=4, sort_keys=True, fp=f)
773
+
774
+ output_json({"volumeId": volume_id, "name": mount_info.get('name'), "success": True})
775
+
776
+ except Exception as e:
777
+ output_error(str(e), "UNMOUNT_FAILED")
778
+
779
+
780
+ def cmd_workspaces_mount(args):
781
+ """Mount a workspace to local filesystem."""
782
+ import subprocess
783
+ import time
784
+
785
+ client = get_client()
786
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
787
+ path = args.path or os.getcwd()
788
+ mountexec = args.mountexec or 'goofys'
789
+ home = os.path.expanduser('~')
790
+
791
+ # Verify mount executable is available
792
+ exec_available = False
793
+ for exec_name in ['goofys', 's3fs', 'mount-s3']:
794
+ try:
795
+ subprocess.run([exec_name, '--version'], capture_output=True, check=True)
796
+ if mountexec == exec_name:
797
+ exec_available = True
798
+ break
799
+ elif not exec_available:
800
+ mountexec = exec_name
801
+ exec_available = True
802
+ except:
803
+ pass
804
+
805
+ if not exec_available:
806
+ output_error("No mount executable found. Install goofys, s3fs, or mount-s3.")
807
+ return
808
+
809
+ # Get workspace info
810
+ workspace_data = client.get_workspaces(workspaceId=workspace_id)
811
+ if not workspace_data:
812
+ output_error("Workspace not found or permission denied", "WORKSPACE_NOT_FOUND")
813
+ return
814
+
815
+ # Get mount credentials
816
+ mount_data = client.mount_workspaces(workspaces=[workspace_id])
817
+ if not mount_data:
818
+ output_error("Failed to get mount credentials", "MOUNT_FAILED")
819
+ return
820
+
821
+ # Write AWS credentials
822
+ aws_dir = os.path.join(home, '.aws')
823
+ os.makedirs(aws_dir, exist_ok=True)
824
+ profile_name = f'renderedai-workspaces-{workspace_id}'
825
+
826
+ # Read existing credentials
827
+ creds_file = os.path.join(aws_dir, 'credentials')
828
+ profiles = {}
829
+ if os.path.exists(creds_file):
830
+ with open(creds_file, 'r') as f:
831
+ current_profile = None
832
+ for line in f:
833
+ line = line.rstrip()
834
+ if line.startswith('[') and line.endswith(']'):
835
+ current_profile = line[1:-1]
836
+ profiles[current_profile] = []
837
+ elif current_profile:
838
+ profiles[current_profile].append(line)
839
+
840
+ # Add new profile
841
+ profiles[profile_name] = [
842
+ f"aws_access_key_id={mount_data['credentials']['accesskeyid']}",
843
+ f"aws_secret_access_key={mount_data['credentials']['accesskey']}",
844
+ f"aws_session_token={mount_data['credentials']['sessiontoken']}"
845
+ ]
846
+
847
+ # Write credentials
848
+ with open(creds_file, 'w') as f:
849
+ for profile, lines in profiles.items():
850
+ f.write(f'[{profile}]\n')
851
+ for line in lines:
852
+ if line:
853
+ f.write(f'{line}\n')
854
+
855
+ # Create mount point
856
+ mountpoint = os.path.join(home, '.renderedai', 'workspaces', workspace_id)
857
+ os.makedirs(mountpoint, exist_ok=True)
858
+
859
+ # Build mount command
860
+ bucket_key = mount_data['keys'][0] if mount_data.get('keys') else None
861
+ if not bucket_key:
862
+ output_error("No bucket key returned", "MOUNT_FAILED")
863
+ return
864
+
865
+ rw_flag = '-o ro' if mount_data.get('rw', ['r'])[0] == 'r' else ''
866
+
867
+ if mountexec == 'goofys':
868
+ command = f'goofys {rw_flag} --profile {profile_name} {bucket_key[:-1]} {mountpoint}'
869
+ elif mountexec == 's3fs':
870
+ command = f's3fs {bucket_key[:-1]} {mountpoint} -o profile={profile_name} -o endpoint=us-west-2 -o url="https://s3-us-west-2.amazonaws.com" {rw_flag}'
871
+ else: # mount-s3
872
+ readonly = '--read-only' if rw_flag else ''
873
+ command = f'mount-s3 {readonly} --profile {profile_name} --prefix {bucket_key[1:]+"/"} {bucket_key[:-1]} {mountpoint}'
874
+
875
+ # Execute mount
876
+ proc = subprocess.Popen(command, shell=True, stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
877
+ time.sleep(2) # Wait for mount to complete
878
+
879
+ # Create symlink in target path
880
+ symlink_path = os.path.join(path, 'workspaces', workspace_id)
881
+ os.makedirs(os.path.join(path, 'workspaces'), exist_ok=True)
882
+ if os.path.exists(symlink_path):
883
+ try:
884
+ os.unlink(symlink_path)
885
+ except:
886
+ pass
887
+ try:
888
+ os.symlink(mountpoint, symlink_path)
889
+ except:
890
+ pass
891
+
892
+ # Save mount info
893
+ mountfile = os.path.join(home, '.renderedai', '.mounts.json')
894
+ mounts = {"volumes": {}, "workspaces": {}}
895
+ if os.path.exists(mountfile):
896
+ with open(mountfile, 'r') as f:
897
+ mounts = json.load(f)
898
+
899
+ mounts['workspaces'][workspace_id] = {
900
+ 'status': 'mounted',
901
+ 'exec': mountexec,
902
+ 'name': workspace_data[0].get('name', workspace_id),
903
+ 'mountpath': mountpoint,
904
+ 'symlink': symlink_path,
905
+ 'profile': profile_name,
906
+ 'pid': proc.pid
907
+ }
908
+
909
+ with open(mountfile, 'w') as f:
910
+ json.dump(mounts, indent=4, sort_keys=True, fp=f)
911
+
912
+ output_json({
913
+ "workspaceId": workspace_id,
914
+ "name": workspace_data[0].get('name'),
915
+ "mountpath": mountpoint,
916
+ "symlink": symlink_path,
917
+ "readonly": mount_data.get('rw', ['r'])[0] == 'r'
918
+ })
919
+
920
+
921
+ def cmd_workspaces_unmount(args):
922
+ """Unmount a workspace from local filesystem."""
923
+ import subprocess
924
+
925
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
926
+ home = os.path.expanduser('~')
927
+ mountfile = os.path.join(home, '.renderedai', '.mounts.json')
928
+
929
+ if not os.path.exists(mountfile):
930
+ output_error("Workspace not mounted", "NOT_MOUNTED")
931
+ return
932
+
933
+ with open(mountfile, 'r') as f:
934
+ mounts = json.load(f)
935
+
936
+ mount_info = mounts.get('workspaces', {}).get(workspace_id)
937
+ if not mount_info or mount_info.get('status') != 'mounted':
938
+ output_error("Workspace not mounted", "NOT_MOUNTED")
939
+ return
940
+
941
+ try:
942
+ # Kill processes using the mount
943
+ subprocess.run(["fuser", "-km", mount_info['mountpath']], check=False,
944
+ stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
945
+
946
+ # Remove symlink
947
+ if os.path.exists(mount_info.get('symlink', '')):
948
+ os.unlink(mount_info['symlink'])
949
+
950
+ # Unmount
951
+ subprocess.run(["fusermount", "-uz", mount_info['mountpath']], check=False,
952
+ stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
953
+ subprocess.run(["umount", "-lf", mount_info['mountpath']], check=False,
954
+ stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
955
+
956
+ # Clean up mount directory
957
+ if os.path.isdir(mount_info['mountpath']):
958
+ contents = os.listdir(mount_info['mountpath'])
959
+ if not contents or (len(contents) == 1 and contents[0] == 'lost+found'):
960
+ subprocess.run(["rm", "-rf", mount_info['mountpath']], check=False,
961
+ stdout=subprocess.DEVNULL, stderr=subprocess.DEVNULL)
962
+
963
+ del mounts['workspaces'][workspace_id]
964
+
965
+ # Save updated mounts
966
+ with open(mountfile, 'w') as f:
967
+ json.dump(mounts, indent=4, sort_keys=True, fp=f)
968
+
969
+ output_json({"workspaceId": workspace_id, "name": mount_info.get('name'), "success": True})
970
+
971
+ except Exception as e:
972
+ output_error(str(e), "UNMOUNT_FAILED")
973
+
974
+
975
+ # =============================================================================
976
+ # GRAPHS
977
+ # =============================================================================
978
+
979
+ def cmd_graphs_get(args):
980
+ """Get graphs."""
981
+ client = get_client()
982
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
983
+
984
+ result = client.get_graphs(
985
+ workspaceId=workspace_id,
986
+ graphId=args.graphid,
987
+ staged=args.staged,
988
+ limit=args.limit,
989
+ fields=parse_list_arg(args.fields) if args.fields else None
990
+ )
991
+ output_json(result)
992
+
993
+
994
+ def cmd_graphs_create(args):
995
+ """Create a graph (editable)."""
996
+ client = get_client()
997
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
998
+
999
+ result = client.upload_graph(
1000
+ workspaceId=workspace_id,
1001
+ graph=args.file,
1002
+ channelId=args.channelid,
1003
+ name=args.name,
1004
+ description=args.description,
1005
+ staged=False
1006
+ )
1007
+ output_json({"graphId": result})
1008
+
1009
+
1010
+ def cmd_graphs_edit(args):
1011
+ """Edit a graph."""
1012
+ client = get_client()
1013
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
1014
+ graph_id = require_arg(args, 'graphid', 'Graph ID')
1015
+
1016
+ result = client.edit_graph(
1017
+ workspaceId=workspace_id,
1018
+ graphId=graph_id,
1019
+ name=args.name,
1020
+ description=args.description,
1021
+ tags=parse_list_arg(args.tags) if args.tags else None
1022
+ )
1023
+ output_json({"success": result})
1024
+
1025
+
1026
+ def cmd_graphs_delete(args):
1027
+ """Delete a graph."""
1028
+ client = get_client()
1029
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
1030
+ graph_id = require_arg(args, 'graphid', 'Graph ID')
1031
+
1032
+ result = client.delete_graph(
1033
+ workspaceId=workspace_id,
1034
+ graphId=graph_id
1035
+ )
1036
+ output_json({"success": result})
1037
+
1038
+
1039
+ def cmd_graphs_download(args):
1040
+ """Download a graph to a file."""
1041
+ client = get_client()
1042
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
1043
+ graph_id = require_arg(args, 'graphid', 'Graph ID')
1044
+ output_path = args.outputfile or f"{graph_id}.yaml"
1045
+
1046
+ result = client.download_graph(
1047
+ workspaceId=workspace_id,
1048
+ graphId=graph_id,
1049
+ filepath=output_path
1050
+ )
1051
+ output_json({"filepath": result})
1052
+
1053
+
1054
+ def cmd_graphs_stage(args):
1055
+ """Stage an existing graph (creates a read-only copy)."""
1056
+ import tempfile
1057
+ import os
1058
+
1059
+ client = get_client()
1060
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
1061
+ graph_id = require_arg(args, 'graphid', 'Graph ID')
1062
+
1063
+ # Get the graph metadata
1064
+ graphs = client.get_graphs(workspaceId=workspace_id, graphId=graph_id)
1065
+ if not graphs:
1066
+ output_error(f"Graph {graph_id} not found in workspace")
1067
+ return
1068
+ graph_info = graphs[0]
1069
+
1070
+ # Download the graph to a temp file
1071
+ with tempfile.TemporaryDirectory() as tmpdir:
1072
+ filepath = os.path.join(tmpdir, "graph.yaml")
1073
+ client.download_graph(workspaceId=workspace_id, graphId=graph_id, filepath=filepath)
1074
+
1075
+ # Re-upload as staged
1076
+ name = args.name if args.name else f"{graph_info['name']}-staged"
1077
+ result = client.upload_graph(
1078
+ workspaceId=workspace_id,
1079
+ graph=filepath,
1080
+ channelId=graph_info['channelId'],
1081
+ name=name,
1082
+ description=args.description if args.description else graph_info.get('description', ''),
1083
+ staged=True
1084
+ )
1085
+ output_json({"graphId": result})
1086
+
1087
+
1088
+ # =============================================================================
1089
+ # GRAPH-EDITOR
1090
+ # =============================================================================
1091
+
1092
+ def cmd_graph_editor_open(args):
1093
+ """Download a graph and its channel schema, then open in the graph editor."""
1094
+ client = get_client()
1095
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
1096
+ graph_id = require_arg(args, 'graphid', 'Graph ID')
1097
+ directory = args.outputdir or os.getcwd()
1098
+
1099
+ # Get graph metadata to find channelId
1100
+ graphs = client.get_graphs(workspaceId=workspace_id, graphId=graph_id)
1101
+ if not graphs:
1102
+ output_error(f"Graph {graph_id} not found", "GRAPH_NOT_FOUND")
1103
+ return
1104
+
1105
+ graph_info = graphs[0]
1106
+ channel_id = graph_info.get('channelId')
1107
+ if not channel_id:
1108
+ output_error("Graph has no associated channel", "NO_CHANNEL")
1109
+ return
1110
+
1111
+ # Create output directory if needed
1112
+ os.makedirs(directory, exist_ok=True)
1113
+
1114
+ # Download graph
1115
+ graph_name = graph_info.get('name', graph_id).replace(' ', '_')
1116
+ graph_path = os.path.join(directory, f"{graph_name}.yaml")
1117
+ client.download_graph(workspaceId=workspace_id, graphId=graph_id, filepath=graph_path)
1118
+
1119
+ # Download channel schema
1120
+ schema = client.get_channel_nodes(channelId=channel_id)
1121
+ if not schema:
1122
+ output_error("Failed to fetch channel schema", "SCHEMA_ERROR")
1123
+ return
1124
+
1125
+ schema_path = os.path.join(directory, f"{channel_id}_schema.json")
1126
+ with open(schema_path, 'w') as f:
1127
+ json.dump(schema, f, indent=2)
1128
+
1129
+ # Write trigger file to open in graph editor
1130
+ trigger_path = os.path.join(os.path.expanduser('~'), '.theia', 'graph-editor-open')
1131
+ os.makedirs(os.path.dirname(trigger_path), exist_ok=True)
1132
+
1133
+ trigger_data = {
1134
+ "graphPath": os.path.abspath(graph_path),
1135
+ "schemaPath": os.path.abspath(schema_path),
1136
+ "autoLayout": True
1137
+ }
1138
+
1139
+ with open(trigger_path, 'w') as f:
1140
+ json.dump(trigger_data, f)
1141
+
1142
+ output_json({
1143
+ "graphPath": os.path.abspath(graph_path),
1144
+ "schemaPath": os.path.abspath(schema_path),
1145
+ "triggerPath": trigger_path,
1146
+ "graphId": graph_id,
1147
+ "channelId": channel_id,
1148
+ "graphName": graph_info.get('name')
1149
+ })
1150
+
1151
+
1152
+ def cmd_graph_editor_edit_node(args):
1153
+ """Edit a node's values in a local graph file.
1154
+
1155
+ Modifies the values of an existing node in the graph. Use --values to pass
1156
+ a JSON object with the key-value pairs to update. Existing values not
1157
+ specified in --values are preserved.
1158
+ """
1159
+ filepath = require_arg(args, 'file', 'Graph file path')
1160
+ node_name = require_arg(args, 'node', 'Node name')
1161
+ values_json = require_arg(args, 'values', 'Values JSON')
1162
+
1163
+ graph = load_graph_file(filepath)
1164
+ if graph is None:
1165
+ return
1166
+
1167
+ nodes = graph.get('nodes', {})
1168
+ if node_name not in nodes:
1169
+ output_error(f"Node '{node_name}' not found in graph", "NODE_NOT_FOUND")
1170
+ return
1171
+
1172
+ # Parse the values JSON
1173
+ try:
1174
+ new_values = json.loads(values_json)
1175
+ except json.JSONDecodeError as e:
1176
+ output_error(f"Invalid JSON for --values: {str(e)}", "INVALID_JSON")
1177
+ return
1178
+
1179
+ if not isinstance(new_values, dict):
1180
+ output_error("--values must be a JSON object", "INVALID_VALUES")
1181
+ return
1182
+
1183
+ # Update the node's values
1184
+ node = nodes[node_name]
1185
+ if 'values' not in node:
1186
+ node['values'] = {}
1187
+
1188
+ node['values'].update(new_values)
1189
+
1190
+ if not save_graph_file(filepath, graph):
1191
+ return
1192
+
1193
+ output_json({
1194
+ "success": True,
1195
+ "file": os.path.abspath(filepath),
1196
+ "node": node_name,
1197
+ "updatedValues": new_values,
1198
+ "allValues": node['values']
1199
+ })
1200
+
1201
+
1202
+ def cmd_graph_editor_add_node(args):
1203
+ """Add a new node to a local graph file.
1204
+
1205
+ Adds a node with the specified nodeClass. Use --name for a custom node name
1206
+ (defaults to nodeClass_N where N is auto-incremented). Use --values to set
1207
+ initial parameter values, --location to set x,y position.
1208
+ """
1209
+ filepath = require_arg(args, 'file', 'Graph file path')
1210
+ node_class = require_arg(args, 'nodeclass', 'Node class')
1211
+
1212
+ graph = load_graph_file(filepath)
1213
+ if graph is None:
1214
+ return
1215
+
1216
+ nodes = graph.get('nodes', {})
1217
+
1218
+ # Generate node name if not provided
1219
+ node_name = args.name
1220
+ if not node_name:
1221
+ # Find the next available number for this node class
1222
+ counter = 1
1223
+ while f"{node_class}_{counter}" in nodes:
1224
+ counter += 1
1225
+ node_name = f"{node_class}_{counter}"
1226
+
1227
+ if node_name in nodes:
1228
+ output_error(f"Node '{node_name}' already exists in graph", "NODE_EXISTS")
1229
+ return
1230
+
1231
+ # Parse optional values
1232
+ values = {}
1233
+ if args.values:
1234
+ try:
1235
+ values = json.loads(args.values)
1236
+ if not isinstance(values, dict):
1237
+ output_error("--values must be a JSON object", "INVALID_VALUES")
1238
+ return
1239
+ except json.JSONDecodeError as e:
1240
+ output_error(f"Invalid JSON for --values: {str(e)}", "INVALID_JSON")
1241
+ return
1242
+
1243
+ # Parse optional location
1244
+ location = {"x": 0, "y": 0}
1245
+ if args.location:
1246
+ try:
1247
+ loc = json.loads(args.location)
1248
+ if isinstance(loc, dict) and 'x' in loc and 'y' in loc:
1249
+ location = {"x": loc['x'], "y": loc['y']}
1250
+ elif isinstance(loc, list) and len(loc) >= 2:
1251
+ location = {"x": loc[0], "y": loc[1]}
1252
+ else:
1253
+ output_error("--location must be {\"x\": N, \"y\": N} or [x, y]", "INVALID_LOCATION")
1254
+ return
1255
+ except json.JSONDecodeError as e:
1256
+ output_error(f"Invalid JSON for --location: {str(e)}", "INVALID_JSON")
1257
+ return
1258
+
1259
+ # Create the new node
1260
+ new_node = {
1261
+ "name": node_name,
1262
+ "nodeClass": node_class,
1263
+ "color": args.color or "#808080",
1264
+ "links": {},
1265
+ "location": location,
1266
+ "ports": {
1267
+ "inputs": [],
1268
+ "outputs": []
1269
+ },
1270
+ "values": values
1271
+ }
1272
+
1273
+ if args.tooltip:
1274
+ new_node["tooltip"] = args.tooltip
1275
+
1276
+ nodes[node_name] = new_node
1277
+ graph['nodes'] = nodes
1278
+
1279
+ if not save_graph_file(filepath, graph):
1280
+ return
1281
+
1282
+ output_json({
1283
+ "success": True,
1284
+ "file": os.path.abspath(filepath),
1285
+ "node": node_name,
1286
+ "nodeClass": node_class,
1287
+ "location": location,
1288
+ "values": values
1289
+ })
1290
+
1291
+
1292
+ def cmd_graph_editor_add_link(args):
1293
+ """Add a link between two nodes in a local graph file.
1294
+
1295
+ Creates a connection from a source node's output port to a target node's
1296
+ input port. The link is stored on the target node under its links property.
1297
+ """
1298
+ filepath = require_arg(args, 'file', 'Graph file path')
1299
+ source_node = require_arg(args, 'source', 'Source node name')
1300
+ output_port = require_arg(args, 'output', 'Output port name')
1301
+ target_node = require_arg(args, 'target', 'Target node name')
1302
+ input_port = require_arg(args, 'input', 'Input port name')
1303
+
1304
+ graph = load_graph_file(filepath)
1305
+ if graph is None:
1306
+ return
1307
+
1308
+ nodes = graph.get('nodes', {})
1309
+
1310
+ # Validate source node exists
1311
+ if source_node not in nodes:
1312
+ output_error(f"Source node '{source_node}' not found in graph", "SOURCE_NOT_FOUND")
1313
+ return
1314
+
1315
+ # Validate target node exists
1316
+ if target_node not in nodes:
1317
+ output_error(f"Target node '{target_node}' not found in graph", "TARGET_NOT_FOUND")
1318
+ return
1319
+
1320
+ target = nodes[target_node]
1321
+
1322
+ # Initialize links if not present
1323
+ if 'links' not in target:
1324
+ target['links'] = {}
1325
+
1326
+ # Add or append to the input port's links
1327
+ if input_port not in target['links']:
1328
+ target['links'][input_port] = []
1329
+
1330
+ # Check if this exact link already exists
1331
+ new_link = {
1332
+ "outputPort": output_port,
1333
+ "sourceNode": source_node
1334
+ }
1335
+
1336
+ for existing_link in target['links'][input_port]:
1337
+ if (existing_link.get('outputPort') == output_port and
1338
+ existing_link.get('sourceNode') == source_node):
1339
+ output_error(
1340
+ f"Link already exists: {source_node}.{output_port} -> {target_node}.{input_port}",
1341
+ "LINK_EXISTS"
1342
+ )
1343
+ return
1344
+
1345
+ target['links'][input_port].append(new_link)
1346
+
1347
+ if not save_graph_file(filepath, graph):
1348
+ return
1349
+
1350
+ output_json({
1351
+ "success": True,
1352
+ "file": os.path.abspath(filepath),
1353
+ "link": {
1354
+ "source": source_node,
1355
+ "outputPort": output_port,
1356
+ "target": target_node,
1357
+ "inputPort": input_port
1358
+ }
1359
+ })
1360
+
1361
+
1362
+ def cmd_graph_editor_remove_node(args):
1363
+ """Remove a node from a local graph file.
1364
+
1365
+ Removes the specified node and all links connected to it (both incoming
1366
+ links on this node and outgoing links from other nodes that reference it).
1367
+ """
1368
+ filepath = require_arg(args, 'file', 'Graph file path')
1369
+ node_name = require_arg(args, 'node', 'Node name')
1370
+
1371
+ graph = load_graph_file(filepath)
1372
+ if graph is None:
1373
+ return
1374
+
1375
+ nodes = graph.get('nodes', {})
1376
+
1377
+ if node_name not in nodes:
1378
+ output_error(f"Node '{node_name}' not found in graph", "NODE_NOT_FOUND")
1379
+ return
1380
+
1381
+ # Remove the node
1382
+ removed_node = nodes.pop(node_name)
1383
+
1384
+ # Remove all links that reference this node from other nodes
1385
+ removed_links = []
1386
+ for other_name, other_node in nodes.items():
1387
+ links = other_node.get('links', {})
1388
+ for port_name, port_links in list(links.items()):
1389
+ original_count = len(port_links)
1390
+ port_links[:] = [
1391
+ link for link in port_links
1392
+ if link.get('sourceNode') != node_name
1393
+ ]
1394
+ if len(port_links) < original_count:
1395
+ removed_links.append({
1396
+ "target": other_name,
1397
+ "inputPort": port_name,
1398
+ "source": node_name
1399
+ })
1400
+ # Clean up empty link lists
1401
+ if not port_links:
1402
+ del links[port_name]
1403
+
1404
+ if not save_graph_file(filepath, graph):
1405
+ return
1406
+
1407
+ output_json({
1408
+ "success": True,
1409
+ "file": os.path.abspath(filepath),
1410
+ "removedNode": node_name,
1411
+ "nodeClass": removed_node.get('nodeClass'),
1412
+ "removedLinks": removed_links
1413
+ })
1414
+
1415
+
1416
+ def cmd_graph_editor_remove_link(args):
1417
+ """Remove a link between two nodes in a local graph file.
1418
+
1419
+ Removes the connection from the source node's output port to the target
1420
+ node's input port.
1421
+ """
1422
+ filepath = require_arg(args, 'file', 'Graph file path')
1423
+ source_node = require_arg(args, 'source', 'Source node name')
1424
+ output_port = require_arg(args, 'output', 'Output port name')
1425
+ target_node = require_arg(args, 'target', 'Target node name')
1426
+ input_port = require_arg(args, 'input', 'Input port name')
1427
+
1428
+ graph = load_graph_file(filepath)
1429
+ if graph is None:
1430
+ return
1431
+
1432
+ nodes = graph.get('nodes', {})
1433
+
1434
+ # Validate target node exists
1435
+ if target_node not in nodes:
1436
+ output_error(f"Target node '{target_node}' not found in graph", "TARGET_NOT_FOUND")
1437
+ return
1438
+
1439
+ target = nodes[target_node]
1440
+ links = target.get('links', {})
1441
+
1442
+ if input_port not in links:
1443
+ output_error(
1444
+ f"No links found on input port '{input_port}' of node '{target_node}'",
1445
+ "LINK_NOT_FOUND"
1446
+ )
1447
+ return
1448
+
1449
+ # Find and remove the specific link
1450
+ port_links = links[input_port]
1451
+ original_count = len(port_links)
1452
+
1453
+ port_links[:] = [
1454
+ link for link in port_links
1455
+ if not (link.get('outputPort') == output_port and
1456
+ link.get('sourceNode') == source_node)
1457
+ ]
1458
+
1459
+ if len(port_links) == original_count:
1460
+ output_error(
1461
+ f"Link not found: {source_node}.{output_port} -> {target_node}.{input_port}",
1462
+ "LINK_NOT_FOUND"
1463
+ )
1464
+ return
1465
+
1466
+ # Clean up empty link lists
1467
+ if not port_links:
1468
+ del links[input_port]
1469
+
1470
+ if not save_graph_file(filepath, graph):
1471
+ return
1472
+
1473
+ output_json({
1474
+ "success": True,
1475
+ "file": os.path.abspath(filepath),
1476
+ "removedLink": {
1477
+ "source": source_node,
1478
+ "outputPort": output_port,
1479
+ "target": target_node,
1480
+ "inputPort": input_port
1481
+ }
1482
+ })
1483
+
1484
+
1485
+ def cmd_graph_editor_add_volume_file(args):
1486
+ """Add a VolumeFile node to a local graph file.
1487
+
1488
+ Creates a VolumeFile node that references a file in a Rendered.ai volume.
1489
+ The volume reference uses the format volumeId:/path/to/file.
1490
+ """
1491
+ filepath = require_arg(args, 'file', 'Graph file path')
1492
+ volume_id = require_arg(args, 'volumeid', 'Volume ID')
1493
+ volume_path = require_arg(args, 'path', 'File path in volume')
1494
+
1495
+ graph = load_graph_file(filepath)
1496
+ if graph is None:
1497
+ return
1498
+
1499
+ nodes = graph.get('nodes', {})
1500
+
1501
+ # Generate node name if not provided
1502
+ node_name = args.name
1503
+ if not node_name:
1504
+ counter = 1
1505
+ while f"VolumeFile_{counter}" in nodes:
1506
+ counter += 1
1507
+ node_name = f"VolumeFile_{counter}"
1508
+
1509
+ if node_name in nodes:
1510
+ output_error(f"Node '{node_name}' already exists in graph", "NODE_EXISTS")
1511
+ return
1512
+
1513
+ # Parse optional location
1514
+ location = {"x": 0, "y": 0}
1515
+ if args.location:
1516
+ try:
1517
+ loc = json.loads(args.location)
1518
+ if isinstance(loc, dict) and 'x' in loc and 'y' in loc:
1519
+ location = {"x": loc['x'], "y": loc['y']}
1520
+ elif isinstance(loc, list) and len(loc) >= 2:
1521
+ location = {"x": loc[0], "y": loc[1]}
1522
+ else:
1523
+ output_error("--location must be {\"x\": N, \"y\": N} or [x, y]", "INVALID_LOCATION")
1524
+ return
1525
+ except json.JSONDecodeError as e:
1526
+ output_error(f"Invalid JSON for --location: {str(e)}", "INVALID_JSON")
1527
+ return
1528
+
1529
+ # Normalize path (ensure it starts with /)
1530
+ if not volume_path.startswith('/'):
1531
+ volume_path = '/' + volume_path
1532
+
1533
+ # Build the volume reference
1534
+ volume_ref = f"{volume_id}:{volume_path}"
1535
+
1536
+ # Build tooltip (use volume name if provided, otherwise volume ID)
1537
+ volume_display = args.volumename or volume_id
1538
+ tooltip = f"{volume_display}:{volume_path}"
1539
+
1540
+ # Create the VolumeFile node
1541
+ new_node = {
1542
+ "name": node_name,
1543
+ "nodeClass": "VolumeFile",
1544
+ "color": "#246BB3",
1545
+ "hash": "8d56c9b8e4bae85fd61620e1d4d44a24",
1546
+ "links": {},
1547
+ "location": location,
1548
+ "ports": {
1549
+ "inputs": [
1550
+ {
1551
+ "name": "File",
1552
+ "description": "",
1553
+ "default": volume_ref,
1554
+ "hidden": True
1555
+ }
1556
+ ],
1557
+ "outputs": [
1558
+ {
1559
+ "name": "File",
1560
+ "description": ""
1561
+ }
1562
+ ]
1563
+ },
1564
+ "tooltip": tooltip,
1565
+ "values": {
1566
+ "File": volume_ref
1567
+ }
1568
+ }
1569
+
1570
+ nodes[node_name] = new_node
1571
+ graph['nodes'] = nodes
1572
+
1573
+ if not save_graph_file(filepath, graph):
1574
+ return
1575
+
1576
+ output_json({
1577
+ "success": True,
1578
+ "file": os.path.abspath(filepath),
1579
+ "node": node_name,
1580
+ "nodeClass": "VolumeFile",
1581
+ "volumeId": volume_id,
1582
+ "path": volume_path,
1583
+ "volumeRef": volume_ref,
1584
+ "location": location
1585
+ })
1586
+
1587
+
1588
+ def cmd_graph_editor_add_volume_directory(args):
1589
+ """Add a VolumeDirectory node to a local graph file.
1590
+
1591
+ Creates a VolumeDirectory node that references a directory in a Rendered.ai volume.
1592
+ The volume reference uses the format volumeId:/path/to/directory.
1593
+ """
1594
+ filepath = require_arg(args, 'file', 'Graph file path')
1595
+ volume_id = require_arg(args, 'volumeid', 'Volume ID')
1596
+ volume_path = args.path or '/'
1597
+
1598
+ graph = load_graph_file(filepath)
1599
+ if graph is None:
1600
+ return
1601
+
1602
+ nodes = graph.get('nodes', {})
1603
+
1604
+ # Generate node name if not provided
1605
+ node_name = args.name
1606
+ if not node_name:
1607
+ counter = 1
1608
+ while f"VolumeDirectory_{counter}" in nodes:
1609
+ counter += 1
1610
+ node_name = f"VolumeDirectory_{counter}"
1611
+
1612
+ if node_name in nodes:
1613
+ output_error(f"Node '{node_name}' already exists in graph", "NODE_EXISTS")
1614
+ return
1615
+
1616
+ # Parse optional location
1617
+ location = {"x": 0, "y": 0}
1618
+ if args.location:
1619
+ try:
1620
+ loc = json.loads(args.location)
1621
+ if isinstance(loc, dict) and 'x' in loc and 'y' in loc:
1622
+ location = {"x": loc['x'], "y": loc['y']}
1623
+ elif isinstance(loc, list) and len(loc) >= 2:
1624
+ location = {"x": loc[0], "y": loc[1]}
1625
+ else:
1626
+ output_error("--location must be {\"x\": N, \"y\": N} or [x, y]", "INVALID_LOCATION")
1627
+ return
1628
+ except json.JSONDecodeError as e:
1629
+ output_error(f"Invalid JSON for --location: {str(e)}", "INVALID_JSON")
1630
+ return
1631
+
1632
+ # Normalize path (ensure it starts with /)
1633
+ if not volume_path.startswith('/'):
1634
+ volume_path = '/' + volume_path
1635
+
1636
+ # Build the volume reference
1637
+ volume_ref = f"{volume_id}:{volume_path}"
1638
+
1639
+ # Build tooltip (use volume name if provided, otherwise volume ID)
1640
+ volume_display = args.volumename or volume_id
1641
+ tooltip = f"{volume_display}:{volume_path}"
1642
+
1643
+ # Create the VolumeDirectory node
1644
+ new_node = {
1645
+ "name": node_name,
1646
+ "nodeClass": "VolumeDirectory",
1647
+ "color": "#246BB3",
1648
+ "hash": "a7c19eb160150ee04d82af60c9332d104f0a7f89",
1649
+ "links": {},
1650
+ "location": location,
1651
+ "ports": {
1652
+ "inputs": [
1653
+ {
1654
+ "name": "Directory",
1655
+ "description": "",
1656
+ "default": volume_ref,
1657
+ "hidden": True
1658
+ }
1659
+ ],
1660
+ "outputs": [
1661
+ {
1662
+ "name": "Directory",
1663
+ "description": ""
1664
+ }
1665
+ ]
1666
+ },
1667
+ "tooltip": tooltip,
1668
+ "values": {
1669
+ "Directory": volume_ref
1670
+ }
1671
+ }
1672
+
1673
+ nodes[node_name] = new_node
1674
+ graph['nodes'] = nodes
1675
+
1676
+ if not save_graph_file(filepath, graph):
1677
+ return
1678
+
1679
+ output_json({
1680
+ "success": True,
1681
+ "file": os.path.abspath(filepath),
1682
+ "node": node_name,
1683
+ "nodeClass": "VolumeDirectory",
1684
+ "volumeId": volume_id,
1685
+ "path": volume_path,
1686
+ "volumeRef": volume_ref,
1687
+ "location": location
1688
+ })
1689
+
1690
+
1691
+ def cmd_graph_editor_list_nodes(args):
1692
+ """List all nodes in a local graph file.
1693
+
1694
+ Returns a summary of all nodes including their name, class, location,
1695
+ and connection counts. Useful for understanding graph structure.
1696
+ """
1697
+ filepath = require_arg(args, 'file', 'Graph file path')
1698
+
1699
+ graph = load_graph_file(filepath)
1700
+ if graph is None:
1701
+ return
1702
+
1703
+ nodes = graph.get('nodes', {})
1704
+
1705
+ # Build summary for each node
1706
+ node_list = []
1707
+ for node_name, node_data in nodes.items():
1708
+ # Count incoming links
1709
+ incoming_links = 0
1710
+ link_sources = []
1711
+ for port_name, port_links in node_data.get('links', {}).items():
1712
+ incoming_links += len(port_links)
1713
+ for link in port_links:
1714
+ link_sources.append(f"{link.get('sourceNode')}.{link.get('outputPort')}")
1715
+
1716
+ # Count outgoing links (links from other nodes to this one)
1717
+ outgoing_links = 0
1718
+ link_targets = []
1719
+ for other_name, other_data in nodes.items():
1720
+ if other_name == node_name:
1721
+ continue
1722
+ for port_name, port_links in other_data.get('links', {}).items():
1723
+ for link in port_links:
1724
+ if link.get('sourceNode') == node_name:
1725
+ outgoing_links += 1
1726
+ link_targets.append(f"{other_name}.{port_name}")
1727
+
1728
+ node_summary = {
1729
+ "name": node_name,
1730
+ "nodeClass": node_data.get('nodeClass'),
1731
+ "location": node_data.get('location'),
1732
+ "incomingLinks": incoming_links,
1733
+ "outgoingLinks": outgoing_links,
1734
+ "hasValues": bool(node_data.get('values'))
1735
+ }
1736
+
1737
+ if args.verbose:
1738
+ node_summary["linkSources"] = link_sources
1739
+ node_summary["linkTargets"] = link_targets
1740
+
1741
+ node_list.append(node_summary)
1742
+
1743
+ # Sort by name for consistent output
1744
+ node_list.sort(key=lambda x: x['name'])
1745
+
1746
+ output_json({
1747
+ "file": os.path.abspath(filepath),
1748
+ "nodeCount": len(node_list),
1749
+ "nodes": node_list
1750
+ })
1751
+
1752
+
1753
+ def cmd_graph_editor_get_node(args):
1754
+ """Get detailed information about a specific node in a local graph file.
1755
+
1756
+ Returns full node data including all values, ports, links, and metadata.
1757
+ """
1758
+ filepath = require_arg(args, 'file', 'Graph file path')
1759
+ node_name = require_arg(args, 'node', 'Node name')
1760
+
1761
+ graph = load_graph_file(filepath)
1762
+ if graph is None:
1763
+ return
1764
+
1765
+ nodes = graph.get('nodes', {})
1766
+
1767
+ if node_name not in nodes:
1768
+ output_error(f"Node '{node_name}' not found in graph", "NODE_NOT_FOUND")
1769
+ return
1770
+
1771
+ node_data = nodes[node_name]
1772
+
1773
+ # Find outgoing links (other nodes that link to this one)
1774
+ outgoing_links = []
1775
+ for other_name, other_data in nodes.items():
1776
+ if other_name == node_name:
1777
+ continue
1778
+ for port_name, port_links in other_data.get('links', {}).items():
1779
+ for link in port_links:
1780
+ if link.get('sourceNode') == node_name:
1781
+ outgoing_links.append({
1782
+ "targetNode": other_name,
1783
+ "targetPort": port_name,
1784
+ "outputPort": link.get('outputPort')
1785
+ })
1786
+
1787
+ output_json({
1788
+ "file": os.path.abspath(filepath),
1789
+ "node": node_name,
1790
+ "nodeClass": node_data.get('nodeClass'),
1791
+ "color": node_data.get('color'),
1792
+ "location": node_data.get('location'),
1793
+ "tooltip": node_data.get('tooltip'),
1794
+ "hash": node_data.get('hash'),
1795
+ "values": node_data.get('values', {}),
1796
+ "ports": node_data.get('ports', {}),
1797
+ "incomingLinks": node_data.get('links', {}),
1798
+ "outgoingLinks": outgoing_links
1799
+ })
1800
+
1801
+
1802
+ def cmd_graph_editor_move_node(args):
1803
+ """Move a node to a new location in a local graph file.
1804
+
1805
+ Updates the node's x,y coordinates for visual positioning in the graph editor.
1806
+ """
1807
+ filepath = require_arg(args, 'file', 'Graph file path')
1808
+ node_name = require_arg(args, 'node', 'Node name')
1809
+ location_str = require_arg(args, 'location', 'Location')
1810
+
1811
+ graph = load_graph_file(filepath)
1812
+ if graph is None:
1813
+ return
1814
+
1815
+ nodes = graph.get('nodes', {})
1816
+
1817
+ if node_name not in nodes:
1818
+ output_error(f"Node '{node_name}' not found in graph", "NODE_NOT_FOUND")
1819
+ return
1820
+
1821
+ # Parse location
1822
+ try:
1823
+ loc = json.loads(location_str)
1824
+ if isinstance(loc, dict) and 'x' in loc and 'y' in loc:
1825
+ location = {"x": loc['x'], "y": loc['y']}
1826
+ elif isinstance(loc, list) and len(loc) >= 2:
1827
+ location = {"x": loc[0], "y": loc[1]}
1828
+ else:
1829
+ output_error("--location must be {\"x\": N, \"y\": N} or [x, y]", "INVALID_LOCATION")
1830
+ return
1831
+ except json.JSONDecodeError as e:
1832
+ output_error(f"Invalid JSON for --location: {str(e)}", "INVALID_JSON")
1833
+ return
1834
+
1835
+ old_location = nodes[node_name].get('location', {})
1836
+ nodes[node_name]['location'] = location
1837
+
1838
+ if not save_graph_file(filepath, graph):
1839
+ return
1840
+
1841
+ output_json({
1842
+ "success": True,
1843
+ "file": os.path.abspath(filepath),
1844
+ "node": node_name,
1845
+ "oldLocation": old_location,
1846
+ "newLocation": location
1847
+ })
1848
+
1849
+
1850
+ def cmd_graph_editor_clone_node(args):
1851
+ """Clone an existing node in a local graph file.
1852
+
1853
+ Creates a copy of a node with a new name. The cloned node has the same
1854
+ nodeClass, values, ports, and color, but no links (links must be added
1855
+ separately). The location is offset slightly from the original.
1856
+ """
1857
+ filepath = require_arg(args, 'file', 'Graph file path')
1858
+ source_node = require_arg(args, 'source', 'Source node name')
1859
+
1860
+ graph = load_graph_file(filepath)
1861
+ if graph is None:
1862
+ return
1863
+
1864
+ nodes = graph.get('nodes', {})
1865
+
1866
+ if source_node not in nodes:
1867
+ output_error(f"Source node '{source_node}' not found in graph", "NODE_NOT_FOUND")
1868
+ return
1869
+
1870
+ source_data = nodes[source_node]
1871
+ node_class = source_data.get('nodeClass', 'Unknown')
1872
+
1873
+ # Generate new node name if not provided
1874
+ new_name = args.name
1875
+ if not new_name:
1876
+ counter = 1
1877
+ while f"{node_class}_{counter}" in nodes:
1878
+ counter += 1
1879
+ new_name = f"{node_class}_{counter}"
1880
+
1881
+ if new_name in nodes:
1882
+ output_error(f"Node '{new_name}' already exists in graph", "NODE_EXISTS")
1883
+ return
1884
+
1885
+ # Parse optional location, or offset from source
1886
+ if args.location:
1887
+ try:
1888
+ loc = json.loads(args.location)
1889
+ if isinstance(loc, dict) and 'x' in loc and 'y' in loc:
1890
+ location = {"x": loc['x'], "y": loc['y']}
1891
+ elif isinstance(loc, list) and len(loc) >= 2:
1892
+ location = {"x": loc[0], "y": loc[1]}
1893
+ else:
1894
+ output_error("--location must be {\"x\": N, \"y\": N} or [x, y]", "INVALID_LOCATION")
1895
+ return
1896
+ except json.JSONDecodeError as e:
1897
+ output_error(f"Invalid JSON for --location: {str(e)}", "INVALID_JSON")
1898
+ return
1899
+ else:
1900
+ # Offset from source location
1901
+ source_loc = source_data.get('location', {"x": 0, "y": 0})
1902
+ location = {
1903
+ "x": source_loc.get('x', 0) + 50,
1904
+ "y": source_loc.get('y', 0) + 50
1905
+ }
1906
+
1907
+ # Create the cloned node (deep copy values and ports, but not links)
1908
+ import copy
1909
+ cloned_node = {
1910
+ "name": new_name,
1911
+ "nodeClass": node_class,
1912
+ "color": source_data.get('color', '#808080'),
1913
+ "hash": source_data.get('hash'),
1914
+ "links": {}, # No links for cloned node
1915
+ "location": location,
1916
+ "ports": copy.deepcopy(source_data.get('ports', {"inputs": [], "outputs": []})),
1917
+ "values": copy.deepcopy(source_data.get('values', {}))
1918
+ }
1919
+
1920
+ if source_data.get('tooltip'):
1921
+ cloned_node['tooltip'] = source_data['tooltip']
1922
+
1923
+ nodes[new_name] = cloned_node
1924
+ graph['nodes'] = nodes
1925
+
1926
+ if not save_graph_file(filepath, graph):
1927
+ return
1928
+
1929
+ output_json({
1930
+ "success": True,
1931
+ "file": os.path.abspath(filepath),
1932
+ "sourceNode": source_node,
1933
+ "clonedNode": new_name,
1934
+ "nodeClass": node_class,
1935
+ "location": location
1936
+ })
1937
+
1938
+
1939
+ def cmd_graph_editor_status(args):
1940
+ """Get the status and validation errors from the graph editor.
1941
+
1942
+ Reads the graph editor status file to check for validation errors in
1943
+ open editor sessions. Returns session info including any errors.
1944
+ """
1945
+ status_path = os.path.join(os.path.expanduser('~'), '.theia', 'graph-editor-status.json')
1946
+
1947
+ if not os.path.exists(status_path):
1948
+ output_json({
1949
+ "status": "no_status_file",
1950
+ "message": "No graph editor status file found. The graph editor may not have been opened yet.",
1951
+ "statusPath": status_path,
1952
+ "sessions": []
1953
+ })
1954
+ return
1955
+
1956
+ try:
1957
+ with open(status_path, 'r') as f:
1958
+ status_data = json.load(f)
1959
+ except json.JSONDecodeError as e:
1960
+ output_error(f"Failed to parse status file: {str(e)}", "PARSE_ERROR")
1961
+ return
1962
+ except Exception as e:
1963
+ output_error(f"Failed to read status file: {str(e)}", "READ_ERROR")
1964
+ return
1965
+
1966
+ sessions = status_data.get('sessions', [])
1967
+
1968
+ # If a specific graph file is requested, filter to that session
1969
+ if args.file:
1970
+ target_path = os.path.abspath(args.file)
1971
+ sessions = [s for s in sessions if s.get('graphPath') == target_path]
1972
+
1973
+ # Build response with error summary
1974
+ total_errors = 0
1975
+ sessions_with_errors = 0
1976
+ for session in sessions:
1977
+ errors = session.get('errors', [])
1978
+ if errors:
1979
+ sessions_with_errors += 1
1980
+ total_errors += len(errors)
1981
+
1982
+ output_json({
1983
+ "status": "ok",
1984
+ "statusPath": status_path,
1985
+ "lastUpdated": status_data.get('lastUpdated'),
1986
+ "sessionCount": len(sessions),
1987
+ "sessionsWithErrors": sessions_with_errors,
1988
+ "totalErrors": total_errors,
1989
+ "sessions": sessions
1990
+ })
1991
+
1992
+
1993
+ # =============================================================================
1994
+ # CHANNELS
1995
+ # =============================================================================
1996
+
1997
+ def cmd_channels_get(args):
1998
+ """Get channels."""
1999
+ client = get_client()
2000
+
2001
+ result = client.get_channels(
2002
+ workspaceId=args.workspaceid,
2003
+ organizationId=args.orgid,
2004
+ channelId=args.channelid,
2005
+ limit=args.limit,
2006
+ fields=parse_list_arg(args.fields) if args.fields else None
2007
+ )
2008
+ output_json(result)
2009
+
2010
+
2011
+ def cmd_channels_schema(args):
2012
+ """Get channel schema."""
2013
+ client = get_client()
2014
+ channel_id = require_arg(args, 'channelid', 'Channel ID')
2015
+
2016
+ result = client.get_channel_nodes(
2017
+ channelId=channel_id,
2018
+ fields=parse_list_arg(args.fields) if args.fields else None
2019
+ )
2020
+
2021
+ # Apply filters if specified
2022
+ if args.category or args.subcategory or args.search:
2023
+ filtered = []
2024
+ search_term = args.search.lower() if args.search else None
2025
+
2026
+ for node in result:
2027
+ # Filter by category
2028
+ if args.category and node.get('category', '').lower() != args.category.lower():
2029
+ continue
2030
+
2031
+ # Filter by subcategory
2032
+ if args.subcategory and node.get('subcategory', '').lower() != args.subcategory.lower():
2033
+ continue
2034
+
2035
+ # Search across multiple fields
2036
+ if search_term:
2037
+ searchable_fields = [
2038
+ node.get('name', ''),
2039
+ node.get('category', ''),
2040
+ node.get('subcategory', ''),
2041
+ node.get('tooltip', ''),
2042
+ node.get('description', ''),
2043
+ ]
2044
+ # Also search in input/output names and descriptions
2045
+ for inp in node.get('inputs', []):
2046
+ searchable_fields.append(inp.get('name', ''))
2047
+ searchable_fields.append(inp.get('description', ''))
2048
+ for out in node.get('outputs', []):
2049
+ searchable_fields.append(out.get('name', ''))
2050
+ searchable_fields.append(out.get('description', ''))
2051
+
2052
+ combined = ' '.join(str(f) for f in searchable_fields).lower()
2053
+ if search_term not in combined:
2054
+ continue
2055
+
2056
+ filtered.append(node)
2057
+
2058
+ result = filtered
2059
+
2060
+ # List categories and subcategories if requested
2061
+ if args.list_categories:
2062
+ categories = sorted(set(node.get('category', '') for node in result if node.get('category')))
2063
+ subcategories = sorted(set(node.get('subcategory', '') for node in result if node.get('subcategory')))
2064
+ output_json({
2065
+ "categories": categories,
2066
+ "subcategories": subcategories
2067
+ })
2068
+ return
2069
+
2070
+ # Output names only if requested
2071
+ if args.names_only:
2072
+ names = sorted([node.get('name', '') for node in result])
2073
+ output_json(names)
2074
+ else:
2075
+ output_json(result)
2076
+
2077
+
2078
+ def cmd_channels_nodes(args):
2079
+ """Get node documentation."""
2080
+ client = get_client()
2081
+ channel_id = require_arg(args, 'channelid', 'Channel ID')
2082
+ node = require_arg(args, 'node', 'Node name')
2083
+
2084
+ result = client.get_node_documentation(
2085
+ channelId=channel_id,
2086
+ node=node,
2087
+ fields=parse_list_arg(args.fields) if args.fields else None
2088
+ )
2089
+ output_json({"documentation": result})
2090
+
2091
+
2092
+ def cmd_channels_docs(args):
2093
+ """Get channel documentation."""
2094
+ client = get_client()
2095
+ channel_id = require_arg(args, 'channelid', 'Channel ID')
2096
+
2097
+ result = client.get_channel_documentation(channelId=channel_id)
2098
+ output_json({"documentation": result})
2099
+
2100
+
2101
+ def cmd_channels_get_default_graph(args):
2102
+ """Get the default graph for a channel."""
2103
+ client = get_client()
2104
+ channel_id = require_arg(args, 'channelid', 'Channel ID')
2105
+
2106
+ result = client.get_default_graph(
2107
+ channelId=channel_id,
2108
+ filepath=args.outputfile
2109
+ )
2110
+ output_json({"filepath": result})
2111
+
2112
+
2113
+ def cmd_channels_set_default_graph(args):
2114
+ """Set the default graph for a channel."""
2115
+ client = get_client()
2116
+ graph_id = require_arg(args, 'graphid', 'Graph ID')
2117
+
2118
+ result = client.set_default_graph(
2119
+ graphId=graph_id,
2120
+ workspaceId=args.workspaceid
2121
+ )
2122
+ output_json({"success": result})
2123
+
2124
+
2125
+ # =============================================================================
2126
+ # SERVICES
2127
+ # =============================================================================
2128
+
2129
+ def cmd_services_get(args):
2130
+ """Get services."""
2131
+ client = get_client()
2132
+
2133
+ result = client.get_services(
2134
+ workspaceId=args.workspaceid,
2135
+ organizationId=args.orgid,
2136
+ serviceId=args.serviceid,
2137
+ limit=args.limit,
2138
+ fields=parse_list_arg(args.fields) if args.fields else None
2139
+ )
2140
+ output_json(result)
2141
+
2142
+
2143
+ def cmd_services_create(args):
2144
+ """Create a service."""
2145
+ client = get_client()
2146
+ org_id = require_arg(args, 'orgid', 'Organization ID')
2147
+
2148
+ result = client.create_service(
2149
+ name=args.name,
2150
+ description=args.description,
2151
+ organizationId=org_id,
2152
+ serviceTypeId=args.type,
2153
+ volumes=parse_list_arg(args.volumes) if args.volumes else [],
2154
+ instance=args.instance,
2155
+ tags=parse_list_arg(args.tags) if args.tags else []
2156
+ )
2157
+ output_json({"serviceId": result})
2158
+
2159
+
2160
+ def cmd_services_edit(args):
2161
+ """Edit a service."""
2162
+ client = get_client()
2163
+ service_id = require_arg(args, 'serviceid', 'Service ID')
2164
+
2165
+ result = client.edit_service(
2166
+ serviceId=service_id,
2167
+ name=args.name,
2168
+ description=args.description,
2169
+ volumes=parse_list_arg(args.volumes) if args.volumes else None,
2170
+ instance=args.instance,
2171
+ tags=parse_list_arg(args.tags) if args.tags else None
2172
+ )
2173
+ output_json({"success": result})
2174
+
2175
+
2176
+ def cmd_services_delete(args):
2177
+ """Delete a service."""
2178
+ client = get_client()
2179
+ service_id = require_arg(args, 'serviceid', 'Service ID')
2180
+
2181
+ result = client.delete_service(serviceId=service_id)
2182
+ output_json({"success": result})
2183
+
2184
+
2185
+ def cmd_services_jobs(args):
2186
+ """Get service jobs."""
2187
+ client = get_client()
2188
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
2189
+
2190
+ result = client.get_service_jobs(
2191
+ workspaceId=workspace_id,
2192
+ jobId=args.jobid,
2193
+ limit=args.limit,
2194
+ fields=parse_list_arg(args.fields) if args.fields else None
2195
+ )
2196
+ output_json(result)
2197
+
2198
+
2199
+ def cmd_services_delete_job(args):
2200
+ """Delete a service job."""
2201
+ client = get_client()
2202
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
2203
+ job_id = require_arg(args, 'jobid', 'Job ID')
2204
+
2205
+ result = client.delete_service_job(
2206
+ workspaceId=workspace_id,
2207
+ jobId=job_id
2208
+ )
2209
+ output_json({"success": result})
2210
+
2211
+
2212
+ # =============================================================================
2213
+ # API KEYS
2214
+ # =============================================================================
2215
+
2216
+ def cmd_api_keys_get(args):
2217
+ """Get API keys."""
2218
+ client = get_client()
2219
+
2220
+ result = client.get_api_keys()
2221
+ output_json(result)
2222
+
2223
+
2224
+ def cmd_api_keys_create(args):
2225
+ """Create an API key."""
2226
+ client = get_client()
2227
+
2228
+ kwargs = {
2229
+ 'name': args.name,
2230
+ 'scope': args.scope
2231
+ }
2232
+
2233
+ if args.scope == 'organization':
2234
+ org_id = require_arg(args, 'orgid', 'Organization ID')
2235
+ kwargs['organizationId'] = org_id
2236
+ elif args.scope == 'workspace':
2237
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
2238
+ kwargs['workspaceId'] = workspace_id
2239
+
2240
+ if args.expires:
2241
+ kwargs['expiresAt'] = args.expires
2242
+
2243
+ result = client.create_api_key(**kwargs)
2244
+ output_json({"apiKey": result})
2245
+
2246
+
2247
+ def cmd_api_keys_delete(args):
2248
+ """Delete an API key."""
2249
+ client = get_client()
2250
+ api_key_id = require_arg(args, 'apikeyid', 'API Key ID')
2251
+
2252
+ result = client.delete_api_key(apiKeyId=api_key_id)
2253
+ output_json({"success": result})
2254
+
2255
+
2256
+ # =============================================================================
2257
+ # ANALYTICS
2258
+ # =============================================================================
2259
+
2260
+ def cmd_analytics_get(args):
2261
+ """Get analytics."""
2262
+ client = get_client()
2263
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
2264
+
2265
+ result = client.get_analytics(
2266
+ workspaceId=workspace_id,
2267
+ datasetId=args.datasetid,
2268
+ analyticsId=args.analyticsid
2269
+ )
2270
+ output_json(result)
2271
+
2272
+
2273
+ def cmd_analytics_create(args):
2274
+ """Create analytics."""
2275
+ client = get_client()
2276
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
2277
+ dataset_id = require_arg(args, 'datasetid', 'Dataset ID')
2278
+
2279
+ result = client.create_analytics(
2280
+ workspaceId=workspace_id,
2281
+ datasetId=dataset_id,
2282
+ analyticsType=args.type
2283
+ )
2284
+ output_json({"analyticsId": result})
2285
+
2286
+
2287
+ def cmd_analytics_delete(args):
2288
+ """Delete analytics."""
2289
+ client = get_client()
2290
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
2291
+ analytics_id = require_arg(args, 'analyticsid', 'Analytics ID')
2292
+
2293
+ result = client.delete_analytics(
2294
+ workspaceId=workspace_id,
2295
+ analyticsId=analytics_id
2296
+ )
2297
+ output_json({"success": result})
2298
+
2299
+
2300
+ def cmd_analytics_types(args):
2301
+ """Get analytics types."""
2302
+ client = get_client()
2303
+
2304
+ result = client.get_analytics_types()
2305
+ output_json(result)
2306
+
2307
+
2308
+ def cmd_analytics_download(args):
2309
+ """Download analytics results."""
2310
+ client = get_client()
2311
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
2312
+ analytics_id = require_arg(args, 'analyticsid', 'Analytics ID')
2313
+
2314
+ result = client.download_analytics(
2315
+ workspaceId=workspace_id,
2316
+ analyticsId=analytics_id
2317
+ )
2318
+ output_json(result)
2319
+
2320
+
2321
+ def cmd_analytics_edit(args):
2322
+ """Edit analytics tags."""
2323
+ client = get_client()
2324
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
2325
+ analytics_id = require_arg(args, 'analyticsid', 'Analytics ID')
2326
+
2327
+ result = client.edit_analytics(
2328
+ workspaceId=workspace_id,
2329
+ analyticsId=analytics_id,
2330
+ tags=parse_list_arg(args.tags) if args.tags else []
2331
+ )
2332
+ output_json({"success": result})
2333
+
2334
+
2335
+ # =============================================================================
2336
+ # ANNOTATIONS
2337
+ # =============================================================================
2338
+
2339
+ def cmd_annotations_get(args):
2340
+ """Get annotations."""
2341
+ client = get_client()
2342
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
2343
+
2344
+ result = client.get_annotations(
2345
+ workspaceId=workspace_id,
2346
+ datasetId=args.datasetid,
2347
+ annotationId=args.annotationid
2348
+ )
2349
+ output_json(result)
2350
+
2351
+
2352
+ def cmd_annotations_create(args):
2353
+ """Create an annotation."""
2354
+ client = get_client()
2355
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
2356
+ dataset_id = require_arg(args, 'datasetid', 'Dataset ID')
2357
+
2358
+ result = client.create_annotation(
2359
+ workspaceId=workspace_id,
2360
+ datasetId=dataset_id,
2361
+ format=args.format,
2362
+ mapId=args.mapid
2363
+ )
2364
+ output_json({"annotationId": result})
2365
+
2366
+
2367
+ def cmd_annotations_delete(args):
2368
+ """Delete an annotation."""
2369
+ client = get_client()
2370
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
2371
+ annotation_id = require_arg(args, 'annotationid', 'Annotation ID')
2372
+
2373
+ result = client.delete_annotation(
2374
+ workspaceId=workspace_id,
2375
+ annotationId=annotation_id
2376
+ )
2377
+ output_json({"success": result})
2378
+
2379
+
2380
+ def cmd_annotations_formats(args):
2381
+ """Get annotation formats."""
2382
+ client = get_client()
2383
+
2384
+ result = client.get_annotation_formats()
2385
+ output_json(result)
2386
+
2387
+
2388
+ def cmd_annotations_download(args):
2389
+ """Download an annotation."""
2390
+ client = get_client()
2391
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
2392
+ annotation_id = require_arg(args, 'annotationid', 'Annotation ID')
2393
+
2394
+ result = client.download_annotation(
2395
+ workspaceId=workspace_id,
2396
+ annotationId=annotation_id
2397
+ )
2398
+ output_json({"downloadPath": result})
2399
+
2400
+
2401
+ def cmd_annotations_edit(args):
2402
+ """Edit annotation tags."""
2403
+ client = get_client()
2404
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
2405
+ annotation_id = require_arg(args, 'annotationid', 'Annotation ID')
2406
+
2407
+ result = client.edit_annotation(
2408
+ workspaceId=workspace_id,
2409
+ annotationId=annotation_id,
2410
+ tags=parse_list_arg(args.tags) if args.tags else []
2411
+ )
2412
+ output_json({"success": result})
2413
+
2414
+
2415
+ def cmd_annotations_view(args):
2416
+ """Generate an image with annotations overlayed.
2417
+
2418
+ This command draws annotations (bounding boxes, 3D boxes, or segmentation outlines)
2419
+ on an image from a dataset. The image must be part of a dataset directory structure
2420
+ that includes annotations/ and metadata/ folders with ANA-format annotation files.
2421
+ """
2422
+ from anatools.annotations import annotations
2423
+
2424
+ image_path = require_arg(args, 'imagepath', 'Image path')
2425
+ out_dir = require_arg(args, 'outdir', 'Output directory')
2426
+
2427
+ # Parse draw types - can be comma-separated for multiple types
2428
+ draw_types = parse_list_arg(args.drawtype) if args.drawtype else ['box_2d']
2429
+ valid_types = ['box_2d', 'box_3d', 'segmentation']
2430
+ for dt in draw_types:
2431
+ if dt not in valid_types:
2432
+ output_error(f"Invalid draw type '{dt}'. Must be one of: {', '.join(valid_types)}", "INVALID_DRAW_TYPE")
2433
+ sys.exit(1)
2434
+
2435
+ # Parse optional filters
2436
+ object_ids = None
2437
+ object_types = None
2438
+ if args.objectids:
2439
+ try:
2440
+ object_ids = [int(x) for x in parse_list_arg(args.objectids)]
2441
+ except ValueError:
2442
+ output_error("Object IDs must be integers", "INVALID_OBJECT_IDS")
2443
+ sys.exit(1)
2444
+ if args.objecttypes:
2445
+ object_types = parse_list_arg(args.objecttypes)
2446
+
2447
+ # Parse colors if provided (JSON format)
2448
+ colors = None
2449
+ if args.colors:
2450
+ colors = parse_json_arg(args.colors)
2451
+ # Convert color lists to tuples
2452
+ for key in colors:
2453
+ if isinstance(colors[key], list):
2454
+ colors[key] = tuple(colors[key])
2455
+
2456
+ line_thickness = args.thickness if args.thickness else 1
2457
+
2458
+ ann = annotations()
2459
+ output_paths = []
2460
+
2461
+ try:
2462
+ for draw_type in draw_types:
2463
+ output_path = None
2464
+ if draw_type == 'box_2d':
2465
+ output_path = ann.bounding_box_2d(
2466
+ image_path=image_path,
2467
+ out_dir=out_dir,
2468
+ object_ids=object_ids,
2469
+ object_types=object_types,
2470
+ line_thickness=line_thickness,
2471
+ colors=colors,
2472
+ quiet=True
2473
+ )
2474
+ elif draw_type == 'box_3d':
2475
+ output_path = ann.bounding_box_3d(
2476
+ image_path=image_path,
2477
+ out_dir=out_dir,
2478
+ object_ids=object_ids,
2479
+ object_types=object_types,
2480
+ line_thickness=line_thickness,
2481
+ colors=colors,
2482
+ quiet=True
2483
+ )
2484
+ elif draw_type == 'segmentation':
2485
+ output_path = ann.segmentation(
2486
+ image_path=image_path,
2487
+ out_dir=out_dir,
2488
+ object_ids=object_ids,
2489
+ object_types=object_types,
2490
+ line_thickness=line_thickness,
2491
+ colors=colors,
2492
+ quiet=True
2493
+ )
2494
+
2495
+ if output_path is None:
2496
+ output_error(f"Failed to generate annotated image for draw type '{draw_type}'", "ANNOTATION_ERROR")
2497
+ sys.exit(1)
2498
+ output_paths.append(output_path)
2499
+
2500
+ output_json({
2501
+ "success": True,
2502
+ "outputPaths": output_paths,
2503
+ "drawTypes": draw_types
2504
+ })
2505
+ except FileNotFoundError as e:
2506
+ output_error(f"File not found: {str(e)}", "FILE_NOT_FOUND")
2507
+ sys.exit(1)
2508
+ except Exception as e:
2509
+ output_error(f"Failed to generate annotated image: {str(e)}", "ANNOTATION_ERROR")
2510
+ sys.exit(1)
2511
+
2512
+
2513
+ # =============================================================================
2514
+ # ANNOTATION-MAPS
2515
+ # =============================================================================
2516
+
2517
+ def cmd_annotation_maps_get(args):
2518
+ """Get annotation maps."""
2519
+ client = get_client()
2520
+ org_id = require_arg(args, 'orgid', 'Organization ID')
2521
+
2522
+ result = client.get_annotation_maps(organizationId=org_id)
2523
+ output_json(result)
2524
+
2525
+
2526
+ def cmd_annotation_maps_upload(args):
2527
+ """Upload an annotation map."""
2528
+ client = get_client()
2529
+ org_id = require_arg(args, 'orgid', 'Organization ID')
2530
+ map_file = require_arg(args, 'mapfile', 'Map file path')
2531
+
2532
+ result = client.upload_annotation_map(
2533
+ organizationId=org_id,
2534
+ mapfile=map_file,
2535
+ name=args.name,
2536
+ description=args.description,
2537
+ tags=parse_list_arg(args.tags) if args.tags else None
2538
+ )
2539
+ output_json({"mapId": result})
2540
+
2541
+
2542
+ def cmd_annotation_maps_download(args):
2543
+ """Download an annotation map."""
2544
+ client = get_client()
2545
+ map_id = require_arg(args, 'mapid', 'Map ID')
2546
+
2547
+ result = client.download_annotation_map(
2548
+ mapId=map_id,
2549
+ localDir=args.outputdir
2550
+ )
2551
+ output_json({"downloadPath": result})
2552
+
2553
+
2554
+ def cmd_annotation_maps_delete(args):
2555
+ """Delete an annotation map."""
2556
+ client = get_client()
2557
+ map_id = require_arg(args, 'mapid', 'Map ID')
2558
+
2559
+ result = client.delete_annotation_map(mapId=map_id)
2560
+ output_json({"success": result})
2561
+
2562
+
2563
+ def cmd_annotation_maps_edit(args):
2564
+ """Edit an annotation map."""
2565
+ client = get_client()
2566
+ map_id = require_arg(args, 'mapid', 'Map ID')
2567
+
2568
+ result = client.edit_annotation_map(
2569
+ mapId=map_id,
2570
+ name=args.name,
2571
+ description=args.description,
2572
+ tags=parse_list_arg(args.tags) if args.tags else None
2573
+ )
2574
+ output_json({"success": result})
2575
+
2576
+
2577
+ # =============================================================================
2578
+ # GAN
2579
+ # =============================================================================
2580
+
2581
+ def cmd_gan_datasets_get(args):
2582
+ """Get GAN datasets."""
2583
+ client = get_client()
2584
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
2585
+
2586
+ result = client.get_gan_datasets(
2587
+ workspaceId=workspace_id,
2588
+ datasetId=args.datasetid,
2589
+ gandatasetId=args.gandatasetid,
2590
+ limit=args.limit,
2591
+ fields=parse_list_arg(args.fields) if args.fields else None
2592
+ )
2593
+ output_json(result)
2594
+
2595
+
2596
+ def cmd_gan_datasets_create(args):
2597
+ """Create a GAN dataset."""
2598
+ client = get_client()
2599
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
2600
+ dataset_id = require_arg(args, 'datasetid', 'Dataset ID')
2601
+ model_id = require_arg(args, 'modelid', 'Model ID')
2602
+
2603
+ result = client.create_gan_dataset(
2604
+ workspaceId=workspace_id,
2605
+ datasetId=dataset_id,
2606
+ modelId=model_id,
2607
+ name=args.name,
2608
+ description=args.description or '',
2609
+ tags=parse_list_arg(args.tags) if args.tags else []
2610
+ )
2611
+ output_json({"datasetId": result})
2612
+
2613
+
2614
+ def cmd_gan_datasets_delete(args):
2615
+ """Delete a GAN dataset."""
2616
+ client = get_client()
2617
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
2618
+ dataset_id = require_arg(args, 'datasetid', 'Dataset ID')
2619
+
2620
+ result = client.delete_gan_dataset(
2621
+ workspaceId=workspace_id,
2622
+ datasetId=dataset_id
2623
+ )
2624
+ output_json({"success": result})
2625
+
2626
+
2627
+ def cmd_gan_models_get(args):
2628
+ """Get GAN models."""
2629
+ client = get_client()
2630
+
2631
+ result = client.get_gan_models(
2632
+ organizationId=args.orgid,
2633
+ workspaceId=args.workspaceid,
2634
+ modelId=args.modelid,
2635
+ limit=args.limit,
2636
+ fields=parse_list_arg(args.fields) if args.fields else None
2637
+ )
2638
+ output_json(result)
2639
+
2640
+
2641
+ def cmd_gan_models_upload(args):
2642
+ """Upload a GAN model."""
2643
+ client = get_client()
2644
+ org_id = require_arg(args, 'orgid', 'Organization ID')
2645
+ model_file = require_arg(args, 'modelfile', 'Model file path')
2646
+
2647
+ result = client.upload_gan_model(
2648
+ organizationId=org_id,
2649
+ modelfile=model_file,
2650
+ name=args.name,
2651
+ description=args.description,
2652
+ flags=args.flags,
2653
+ tags=parse_list_arg(args.tags) if args.tags else None
2654
+ )
2655
+ output_json({"modelId": result})
2656
+
2657
+
2658
+ def cmd_gan_models_download(args):
2659
+ """Download a GAN model."""
2660
+ client = get_client()
2661
+ model_id = require_arg(args, 'modelid', 'Model ID')
2662
+
2663
+ result = client.download_gan_model(
2664
+ modelId=model_id,
2665
+ localDir=args.outputdir
2666
+ )
2667
+ output_json({"downloadPath": result})
2668
+
2669
+
2670
+ # =============================================================================
2671
+ # UMAP
2672
+ # =============================================================================
2673
+
2674
+ def cmd_umap_get(args):
2675
+ """Get UMAPs."""
2676
+ client = get_client()
2677
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
2678
+
2679
+ result = client.get_umaps(
2680
+ workspaceId=workspace_id,
2681
+ umapId=args.umapid,
2682
+ datasetId=args.datasetid,
2683
+ limit=args.limit,
2684
+ fields=parse_list_arg(args.fields) if args.fields else None
2685
+ )
2686
+ output_json(result)
2687
+
2688
+
2689
+ def cmd_umap_create(args):
2690
+ """Create a UMAP."""
2691
+ client = get_client()
2692
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
2693
+
2694
+ dataset_ids = parse_list_arg(require_arg(args, 'datasetids', 'Dataset IDs'))
2695
+ samples = [int(s) for s in parse_list_arg(require_arg(args, 'samples', 'Samples'))]
2696
+
2697
+ result = client.create_umap(
2698
+ workspaceId=workspace_id,
2699
+ name=args.name,
2700
+ datasetIds=dataset_ids,
2701
+ samples=samples,
2702
+ description=args.description,
2703
+ seed=args.seed,
2704
+ tags=parse_list_arg(args.tags) if args.tags else None
2705
+ )
2706
+ output_json({"umapId": result})
2707
+
2708
+
2709
+ def cmd_umap_delete(args):
2710
+ """Delete a UMAP."""
2711
+ client = get_client()
2712
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
2713
+ umap_id = require_arg(args, 'umapid', 'UMAP ID')
2714
+
2715
+ result = client.delete_umap(
2716
+ workspaceId=workspace_id,
2717
+ umapId=umap_id
2718
+ )
2719
+ output_json({"success": result})
2720
+
2721
+
2722
+ # =============================================================================
2723
+ # SERVERS (Editor)
2724
+ # =============================================================================
2725
+
2726
+ def cmd_servers_get(args):
2727
+ """Get servers."""
2728
+ client = get_client()
2729
+
2730
+ result = client.get_servers(
2731
+ organizationId=args.orgid,
2732
+ workspaceId=args.workspaceid,
2733
+ serverId=args.serverid,
2734
+ limit=args.limit,
2735
+ fields=parse_list_arg(args.fields) if args.fields else None
2736
+ )
2737
+ output_json(result)
2738
+
2739
+
2740
+ def cmd_servers_create(args):
2741
+ """Create a server."""
2742
+ client = get_client()
2743
+
2744
+ result = client.create_server(
2745
+ organizationId=args.orgid,
2746
+ workspaceId=args.workspaceid,
2747
+ instance=args.instance,
2748
+ name=args.name
2749
+ )
2750
+ output_json({"serverId": result})
2751
+
2752
+
2753
+ def cmd_servers_delete(args):
2754
+ """Delete a server."""
2755
+ client = get_client()
2756
+ server_id = require_arg(args, 'serverid', 'Server ID')
2757
+
2758
+ result = client.delete_server(serverId=server_id)
2759
+ output_json({"success": result})
2760
+
2761
+
2762
+ def cmd_servers_start(args):
2763
+ """Start a server."""
2764
+ client = get_client()
2765
+ server_id = require_arg(args, 'serverid', 'Server ID')
2766
+
2767
+ result = client.start_server(serverId=server_id)
2768
+ output_json({"success": result})
2769
+
2770
+
2771
+ def cmd_servers_stop(args):
2772
+ """Stop a server."""
2773
+ client = get_client()
2774
+ server_id = require_arg(args, 'serverid', 'Server ID')
2775
+
2776
+ result = client.stop_server(serverId=server_id)
2777
+ output_json({"success": result})
2778
+
2779
+
2780
+ # =============================================================================
2781
+ # ML
2782
+ # =============================================================================
2783
+
2784
+ def cmd_ml_architectures(args):
2785
+ """Get ML architectures."""
2786
+ client = get_client()
2787
+
2788
+ result = client.get_ml_architectures(
2789
+ fields=parse_list_arg(args.fields) if args.fields else None
2790
+ )
2791
+ output_json(result)
2792
+
2793
+
2794
+ def cmd_ml_models_get(args):
2795
+ """Get ML models."""
2796
+ client = get_client()
2797
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
2798
+
2799
+ result = client.get_ml_models(
2800
+ workspaceId=workspace_id,
2801
+ datasetId=args.datasetid,
2802
+ modelId=args.modelid,
2803
+ limit=args.limit,
2804
+ fields=parse_list_arg(args.fields) if args.fields else None
2805
+ )
2806
+ output_json(result)
2807
+
2808
+
2809
+ def cmd_ml_models_create(args):
2810
+ """Create an ML model training job."""
2811
+ client = get_client()
2812
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
2813
+ dataset_id = require_arg(args, 'datasetid', 'Dataset ID')
2814
+ architecture_id = require_arg(args, 'architectureid', 'Architecture ID')
2815
+ parameters = require_arg(args, 'parameters', 'Parameters JSON')
2816
+
2817
+ result = client.create_ml_model(
2818
+ workspaceId=workspace_id,
2819
+ datasetId=dataset_id,
2820
+ architectureId=architecture_id,
2821
+ name=args.name,
2822
+ parameters=parameters,
2823
+ description=args.description,
2824
+ tags=parse_list_arg(args.tags) if args.tags else None
2825
+ )
2826
+ output_json({"modelId": result})
2827
+
2828
+
2829
+ def cmd_ml_models_download(args):
2830
+ """Download an ML model."""
2831
+ client = get_client()
2832
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
2833
+ model_id = require_arg(args, 'modelid', 'Model ID')
2834
+
2835
+ result = client.download_ml_model(
2836
+ workspaceId=workspace_id,
2837
+ modelId=model_id,
2838
+ checkpoint=args.checkpoint,
2839
+ localDir=args.outputdir
2840
+ )
2841
+ output_json({"downloadPath": result})
2842
+
2843
+
2844
+ def cmd_ml_inferences_get(args):
2845
+ """Get ML inferences."""
2846
+ client = get_client()
2847
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
2848
+
2849
+ result = client.get_ml_inferences(
2850
+ workspaceId=workspace_id,
2851
+ inferenceId=args.inferenceid,
2852
+ datasetId=args.datasetid,
2853
+ modelId=args.modelid,
2854
+ limit=args.limit,
2855
+ fields=parse_list_arg(args.fields) if args.fields else None
2856
+ )
2857
+ output_json(result)
2858
+
2859
+
2860
+ def cmd_ml_inferences_create(args):
2861
+ """Create an ML inference job."""
2862
+ client = get_client()
2863
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
2864
+ dataset_id = require_arg(args, 'datasetid', 'Dataset ID')
2865
+ model_id = require_arg(args, 'modelid', 'Model ID')
2866
+
2867
+ result = client.create_ml_inference(
2868
+ workspaceId=workspace_id,
2869
+ datasetId=dataset_id,
2870
+ modelId=model_id,
2871
+ mapId=args.mapid,
2872
+ tags=parse_list_arg(args.tags) if args.tags else None
2873
+ )
2874
+ output_json({"inferenceId": result})
2875
+
2876
+
2877
+ # =============================================================================
2878
+ # INPAINT
2879
+ # =============================================================================
2880
+
2881
+ def cmd_inpaint_get(args):
2882
+ """Get inpaint jobs."""
2883
+ client = get_client()
2884
+ volume_id = require_arg(args, 'volumeid', 'Volume ID')
2885
+
2886
+ result = client.get_inpaints(
2887
+ volumeId=volume_id,
2888
+ inpaintId=args.inpaintid,
2889
+ limit=args.limit,
2890
+ fields=parse_list_arg(args.fields) if args.fields else None
2891
+ )
2892
+ output_json(result)
2893
+
2894
+
2895
+ def cmd_inpaint_log(args):
2896
+ """Get inpaint job log."""
2897
+ client = get_client()
2898
+ volume_id = require_arg(args, 'volumeid', 'Volume ID')
2899
+ inpaint_id = require_arg(args, 'inpaintid', 'Inpaint ID')
2900
+
2901
+ result = client.get_inpaint_log(
2902
+ volumeId=volume_id,
2903
+ inpaintId=inpaint_id,
2904
+ fields=parse_list_arg(args.fields) if args.fields else None
2905
+ )
2906
+ output_json(result)
2907
+
2908
+
2909
+ def cmd_inpaint_create(args):
2910
+ """Create an inpaint job."""
2911
+ client = get_client()
2912
+ volume_id = require_arg(args, 'volumeid', 'Volume ID')
2913
+ location = require_arg(args, 'location', 'Location')
2914
+
2915
+ result = client.create_inpaint(
2916
+ volumeId=volume_id,
2917
+ location=location,
2918
+ files=parse_list_arg(args.files) if args.files else [],
2919
+ destination=args.destination,
2920
+ dilation=args.dilation or 5,
2921
+ inputType=args.inputtype or 'MASK',
2922
+ outputType=args.outputtype or 'PNG'
2923
+ )
2924
+ output_json({"inpaintId": result})
2925
+
2926
+
2927
+ def cmd_inpaint_delete(args):
2928
+ """Delete an inpaint job."""
2929
+ client = get_client()
2930
+ volume_id = require_arg(args, 'volumeid', 'Volume ID')
2931
+ inpaint_id = require_arg(args, 'inpaintid', 'Inpaint ID')
2932
+
2933
+ result = client.delete_inpaint(
2934
+ volumeId=volume_id,
2935
+ inpaintId=inpaint_id
2936
+ )
2937
+ output_json({"success": result})
2938
+
2939
+
2940
+ # =============================================================================
2941
+ # PREVIEW
2942
+ # =============================================================================
2943
+
2944
+ def cmd_preview_get(args):
2945
+ """Get a preview job."""
2946
+ client = get_client()
2947
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
2948
+ preview_id = require_arg(args, 'previewid', 'Preview ID')
2949
+
2950
+ result = client.get_preview(
2951
+ workspaceId=workspace_id,
2952
+ previewId=preview_id,
2953
+ fields=parse_list_arg(args.fields) if args.fields else None
2954
+ )
2955
+ output_json(result)
2956
+
2957
+
2958
+ def cmd_preview_create(args):
2959
+ """Create a preview job."""
2960
+ client = get_client()
2961
+ workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
2962
+ graph_id = require_arg(args, 'graphid', 'Graph ID')
2963
+
2964
+ result = client.create_preview(
2965
+ workspaceId=workspace_id,
2966
+ graphId=graph_id
2967
+ )
2968
+ output_json({"previewId": result})
2969
+
2970
+
2971
+ # =============================================================================
2972
+ # AGENTS
2973
+ # =============================================================================
2974
+
2975
+ def cmd_agents_types(args):
2976
+ """Get available data types."""
2977
+ client = get_client()
2978
+
2979
+ result = client.get_data_types()
2980
+ output_json(result)
2981
+
2982
+
2983
+ def cmd_agents_fields(args):
2984
+ """Get fields for a data type."""
2985
+ client = get_client()
2986
+ data_type = require_arg(args, 'type', 'Data type')
2987
+
2988
+ result = client.get_data_fields(type=data_type)
2989
+ output_json(result)
2990
+
2991
+
2992
+ # =============================================================================
2993
+ # RULES
2994
+ # =============================================================================
2995
+
2996
+ def cmd_rules_organization(args):
2997
+ """Get organization rules."""
2998
+ client = get_client()
2999
+
3000
+ result = client.get_organization_rules(organizationId=args.orgid)
3001
+ output_json({"rules": result})
3002
+
3003
+
3004
+ def cmd_rules_workspace(args):
3005
+ """Get workspace rules."""
3006
+ client = get_client()
3007
+
3008
+ result = client.get_workspace_rules(workspaceId=args.workspaceid)
3009
+ output_json({"rules": result})
3010
+
3011
+
3012
+ def cmd_rules_service(args):
3013
+ """Get service rules."""
3014
+ client = get_client()
3015
+ service_id = require_arg(args, 'serviceid', 'Service ID')
3016
+
3017
+ result = client.get_service_rules(serviceId=service_id)
3018
+ output_json({"rules": result})
3019
+
3020
+
3021
+ def cmd_rules_user(args):
3022
+ """Get user rules."""
3023
+ client = get_client()
3024
+
3025
+ result = client.get_user_rules()
3026
+ output_json({"rules": result})
3027
+
3028
+
3029
+ def cmd_rules_edit_organization(args):
3030
+ """Edit organization rules."""
3031
+ client = get_client()
3032
+ rules = require_arg(args, 'rules', 'Rules')
3033
+
3034
+ result = client.edit_organization_rules(
3035
+ organizationId=args.orgid,
3036
+ rules=rules
3037
+ )
3038
+ output_json({"success": result})
3039
+
3040
+
3041
+ def cmd_rules_edit_workspace(args):
3042
+ """Edit workspace rules."""
3043
+ client = get_client()
3044
+ rules = require_arg(args, 'rules', 'Rules')
3045
+
3046
+ result = client.edit_workspace_rules(
3047
+ workspaceId=args.workspaceid,
3048
+ rules=rules
3049
+ )
3050
+ output_json({"success": result})
3051
+
3052
+
3053
+ def cmd_rules_edit_service(args):
3054
+ """Edit service rules."""
3055
+ client = get_client()
3056
+ service_id = require_arg(args, 'serviceid', 'Service ID')
3057
+ rules = require_arg(args, 'rules', 'Rules')
3058
+
3059
+ result = client.edit_service_rules(
3060
+ serviceId=service_id,
3061
+ rules=rules
3062
+ )
3063
+ output_json({"success": result})
3064
+
3065
+
3066
+ def cmd_rules_edit_user(args):
3067
+ """Edit user rules."""
3068
+ client = get_client()
3069
+ rules = require_arg(args, 'rules', 'Rules')
3070
+
3071
+ result = client.edit_user_rules(rules=rules)
3072
+ output_json({"success": result})
3073
+
3074
+
3075
+ # =============================================================================
3076
+ # MAIN PARSER
3077
+ # =============================================================================
3078
+
3079
+ def create_parser():
3080
+ """Create the argument parser with all subcommands."""
3081
+ parser = argparse.ArgumentParser(
3082
+ prog='renderedai',
3083
+ description='Rendered.ai Platform CLI - JSON output for automation and AI agents',
3084
+ formatter_class=argparse.RawDescriptionHelpFormatter,
3085
+ epilog="""
3086
+ Environment Variables:
3087
+ RENDEREDAI_API_KEY API key for authentication (required)
3088
+ RENDEREDAI_ENVIRONMENT Environment: prod, test, dev (default: prod)
3089
+ RENDEREDAI_ENDPOINT Custom API endpoint URL
3090
+
3091
+ Examples:
3092
+ renderedai workspaces get --orgid abc123
3093
+ renderedai datasets get --workspaceid xyz789 --limit 10
3094
+ renderedai volumes create --name "My Volume" --orgid abc123
3095
+ renderedai graphs get --workspaceid xyz789 --graphid graph123
3096
+ """
3097
+ )
3098
+
3099
+ subparsers = parser.add_subparsers(dest='resource', help='Resource to manage')
3100
+
3101
+ # -------------------------------------------------------------------------
3102
+ # WORKSPACES
3103
+ # -------------------------------------------------------------------------
3104
+ workspaces = subparsers.add_parser('workspaces', help='Manage workspaces')
3105
+ workspaces_sub = workspaces.add_subparsers(dest='action', help='Action')
3106
+
3107
+ # workspaces get
3108
+ ws_get = workspaces_sub.add_parser('get', help='Get workspaces')
3109
+ ws_get.add_argument('--workspaceid', help='Filter by workspace ID')
3110
+ ws_get.add_argument('--orgid', help='Filter by organization ID')
3111
+ ws_get.add_argument('--limit', type=int, help='Maximum results')
3112
+ ws_get.add_argument('--fields', help='Comma-separated fields to return')
3113
+ ws_get.set_defaults(func=cmd_workspaces_get)
3114
+
3115
+ # workspaces create
3116
+ ws_create = workspaces_sub.add_parser('create', help='Create a workspace')
3117
+ ws_create.add_argument('--name', required=True, help='Workspace name')
3118
+ ws_create.add_argument('--description', help='Description')
3119
+ ws_create.add_argument('--orgid', required=True, help='Organization ID')
3120
+ ws_create.add_argument('--channelids', help='Comma-separated channel IDs')
3121
+ ws_create.add_argument('--volumeids', help='Comma-separated volume IDs')
3122
+ ws_create.add_argument('--tags', help='Comma-separated tags')
3123
+ ws_create.set_defaults(func=cmd_workspaces_create)
3124
+
3125
+ # workspaces edit
3126
+ ws_edit = workspaces_sub.add_parser('edit', help='Edit a workspace')
3127
+ ws_edit.add_argument('--workspaceid', required=True, help='Workspace ID')
3128
+ ws_edit.add_argument('--name', help='New name')
3129
+ ws_edit.add_argument('--description', help='New description')
3130
+ ws_edit.add_argument('--channelids', help='Comma-separated channel IDs')
3131
+ ws_edit.add_argument('--volumeids', help='Comma-separated volume IDs')
3132
+ ws_edit.add_argument('--tags', help='Comma-separated tags')
3133
+ ws_edit.set_defaults(func=cmd_workspaces_edit)
3134
+
3135
+ # workspaces delete
3136
+ ws_delete = workspaces_sub.add_parser('delete', help='Delete a workspace')
3137
+ ws_delete.add_argument('--workspaceid', required=True, help='Workspace ID')
3138
+ ws_delete.set_defaults(func=cmd_workspaces_delete)
3139
+
3140
+ # workspaces mount
3141
+ ws_mount = workspaces_sub.add_parser('mount', help='Mount a workspace to local filesystem')
3142
+ ws_mount.add_argument('--workspaceid', required=True, help='Workspace ID')
3143
+ ws_mount.add_argument('--path', help='Local path to mount to (default: current directory)')
3144
+ ws_mount.add_argument('--mountexec', choices=['goofys', 's3fs', 'mount-s3'], help='Mount executable')
3145
+ ws_mount.set_defaults(func=cmd_workspaces_mount)
3146
+
3147
+ # workspaces unmount
3148
+ ws_unmount = workspaces_sub.add_parser('unmount', help='Unmount a workspace from local filesystem')
3149
+ ws_unmount.add_argument('--workspaceid', required=True, help='Workspace ID')
3150
+ ws_unmount.set_defaults(func=cmd_workspaces_unmount)
3151
+
3152
+ # -------------------------------------------------------------------------
3153
+ # ORGANIZATIONS
3154
+ # -------------------------------------------------------------------------
3155
+ organizations = subparsers.add_parser('organizations', help='Manage organizations')
3156
+ organizations_sub = organizations.add_subparsers(dest='action', help='Action')
3157
+
3158
+ # organizations get
3159
+ org_get = organizations_sub.add_parser('get', help='Get organizations')
3160
+ org_get.add_argument('--orgid', help='Filter by organization ID')
3161
+ org_get.add_argument('--limit', type=int, help='Maximum results')
3162
+ org_get.add_argument('--fields', help='Comma-separated fields to return')
3163
+ org_get.set_defaults(func=cmd_organizations_get)
3164
+
3165
+ # -------------------------------------------------------------------------
3166
+ # MEMBERS
3167
+ # -------------------------------------------------------------------------
3168
+ members = subparsers.add_parser('members', help='Manage organization members')
3169
+ members_sub = members.add_subparsers(dest='action', help='Action')
3170
+
3171
+ # members get
3172
+ members_get = members_sub.add_parser('get', help='Get organization members')
3173
+ members_get.add_argument('--orgid', required=True, help='Organization ID')
3174
+ members_get.add_argument('--limit', type=int, help='Maximum results')
3175
+ members_get.add_argument('--fields', help='Comma-separated fields to return')
3176
+ members_get.set_defaults(func=cmd_members_get)
3177
+
3178
+ # -------------------------------------------------------------------------
3179
+ # DATASETS
3180
+ # -------------------------------------------------------------------------
3181
+ datasets = subparsers.add_parser('datasets', help='Manage datasets')
3182
+ datasets_sub = datasets.add_subparsers(dest='action', help='Action')
3183
+
3184
+ # datasets get
3185
+ ds_get = datasets_sub.add_parser('get', help='Get datasets')
3186
+ ds_get.add_argument('--workspaceid', required=True, help='Workspace ID')
3187
+ ds_get.add_argument('--datasetid', help='Filter by dataset ID')
3188
+ ds_get.add_argument('--limit', type=int, help='Maximum results')
3189
+ ds_get.add_argument('--fields', help='Comma-separated fields to return')
3190
+ ds_get.set_defaults(func=cmd_datasets_get)
3191
+
3192
+ # datasets create
3193
+ ds_create = datasets_sub.add_parser('create', help='Create a dataset')
3194
+ ds_create.add_argument('--workspaceid', required=True, help='Workspace ID')
3195
+ ds_create.add_argument('--name', required=True, help='Dataset name')
3196
+ ds_create.add_argument('--graphid', required=True, help='Graph ID')
3197
+ ds_create.add_argument('--description', help='Description')
3198
+ ds_create.add_argument('--runs', type=int, default=1, help='Number of runs')
3199
+ ds_create.add_argument('--seed', type=int, default=1, help='Seed')
3200
+ ds_create.add_argument('--priority', type=int, default=1, help='Priority (1-3)')
3201
+ ds_create.add_argument('--tags', help='Comma-separated tags')
3202
+ ds_create.set_defaults(func=cmd_datasets_create)
3203
+
3204
+ # datasets edit
3205
+ ds_edit = datasets_sub.add_parser('edit', help='Edit a dataset')
3206
+ ds_edit.add_argument('--workspaceid', required=True, help='Workspace ID')
3207
+ ds_edit.add_argument('--datasetid', required=True, help='Dataset ID')
3208
+ ds_edit.add_argument('--name', help='New name')
3209
+ ds_edit.add_argument('--description', help='New description')
3210
+ ds_edit.add_argument('--tags', help='Comma-separated tags')
3211
+ ds_edit.add_argument('--pause', action='store_true', help='Pause the job')
3212
+ ds_edit.add_argument('--priority', type=int, help='Priority (1-3)')
3213
+ ds_edit.set_defaults(func=cmd_datasets_edit)
3214
+
3215
+ # datasets delete
3216
+ ds_delete = datasets_sub.add_parser('delete', help='Delete a dataset')
3217
+ ds_delete.add_argument('--workspaceid', required=True, help='Workspace ID')
3218
+ ds_delete.add_argument('--datasetid', required=True, help='Dataset ID')
3219
+ ds_delete.set_defaults(func=cmd_datasets_delete)
3220
+
3221
+ # datasets cancel
3222
+ ds_cancel = datasets_sub.add_parser('cancel', help='Cancel a running job')
3223
+ ds_cancel.add_argument('--workspaceid', required=True, help='Workspace ID')
3224
+ ds_cancel.add_argument('--datasetid', required=True, help='Dataset ID')
3225
+ ds_cancel.set_defaults(func=cmd_datasets_cancel)
3226
+
3227
+ # datasets download
3228
+ ds_download = datasets_sub.add_parser('download', help='Download a dataset or a single file from a dataset')
3229
+ ds_download.add_argument('--workspaceid', required=True, help='Workspace ID')
3230
+ ds_download.add_argument('--datasetid', required=True, help='Dataset ID')
3231
+ ds_download.add_argument('--filepath', help='Relative path to a specific file within the dataset (e.g., "images/000000-1-image.png"). If not provided, downloads the entire dataset.')
3232
+ ds_download.add_argument('--outputdir', help='Output directory')
3233
+ ds_download.set_defaults(func=cmd_datasets_download)
3234
+
3235
+ # datasets upload
3236
+ ds_upload = datasets_sub.add_parser('upload', help='Upload a dataset')
3237
+ ds_upload.add_argument('--workspaceid', required=True, help='Workspace ID')
3238
+ ds_upload.add_argument('--file', required=True, help='File to upload')
3239
+ ds_upload.add_argument('--description', help='Description')
3240
+ ds_upload.add_argument('--tags', help='Comma-separated tags')
3241
+ ds_upload.set_defaults(func=cmd_datasets_upload)
3242
+
3243
+ # datasets runs
3244
+ ds_runs = datasets_sub.add_parser('runs', help='Get dataset runs')
3245
+ ds_runs.add_argument('--workspaceid', required=True, help='Workspace ID')
3246
+ ds_runs.add_argument('--datasetid', required=True, help='Dataset ID')
3247
+ ds_runs.add_argument('--state', help='Filter by state')
3248
+ ds_runs.add_argument('--fields', help='Comma-separated fields to return')
3249
+ ds_runs.set_defaults(func=cmd_datasets_runs)
3250
+
3251
+ # datasets log
3252
+ ds_log = datasets_sub.add_parser('log', help='Get dataset run log')
3253
+ ds_log.add_argument('--workspaceid', required=True, help='Workspace ID')
3254
+ ds_log.add_argument('--datasetid', required=True, help='Dataset ID')
3255
+ ds_log.add_argument('--runid', required=True, help='Run ID')
3256
+ ds_log.add_argument('--fields', help='Comma-separated fields to return')
3257
+ ds_log.set_defaults(func=cmd_datasets_log)
3258
+
3259
+ # datasets files
3260
+ ds_files = datasets_sub.add_parser('files', help='Get dataset files')
3261
+ ds_files.add_argument('--workspaceid', required=True, help='Workspace ID')
3262
+ ds_files.add_argument('--datasetid', required=True, help='Dataset ID')
3263
+ ds_files.add_argument('--path', help='Path within dataset')
3264
+ ds_files.add_argument('--limit', type=int, default=100, help='Maximum results')
3265
+ ds_files.set_defaults(func=cmd_datasets_files)
3266
+
3267
+ # datasets jobs
3268
+ ds_jobs = datasets_sub.add_parser('jobs', help='Get dataset jobs')
3269
+ ds_jobs.add_argument('--workspaceid', help='Workspace ID')
3270
+ ds_jobs.add_argument('--orgid', help='Organization ID')
3271
+ ds_jobs.add_argument('--datasetid', help='Filter by dataset ID')
3272
+ ds_jobs.add_argument('--limit', type=int, help='Maximum results')
3273
+ ds_jobs.add_argument('--fields', help='Comma-separated fields to return')
3274
+ ds_jobs.set_defaults(func=cmd_datasets_jobs)
3275
+
3276
+ # datasets create-mixed
3277
+ ds_create_mixed = datasets_sub.add_parser('create-mixed', help='Create a mixed dataset')
3278
+ ds_create_mixed.add_argument('--workspaceid', required=True, help='Workspace ID')
3279
+ ds_create_mixed.add_argument('--name', required=True, help='Dataset name')
3280
+ ds_create_mixed.add_argument('--parameters', required=True, help='JSON parameters: {"datasetId1": {"samples": N, "classes": [...]}, ...}')
3281
+ ds_create_mixed.add_argument('--description', help='Description')
3282
+ ds_create_mixed.add_argument('--seed', type=int, help='Seed')
3283
+ ds_create_mixed.add_argument('--tags', help='Comma-separated tags')
3284
+ ds_create_mixed.set_defaults(func=cmd_datasets_create_mixed)
3285
+
3286
+ # -------------------------------------------------------------------------
3287
+ # VOLUMES
3288
+ # -------------------------------------------------------------------------
3289
+ volumes = subparsers.add_parser('volumes', help='Manage volumes')
3290
+ volumes_sub = volumes.add_subparsers(dest='action', help='Action')
3291
+
3292
+ # volumes get
3293
+ vol_get = volumes_sub.add_parser('get', help='Get volumes')
3294
+ vol_get.add_argument('--volumeid', help='Filter by volume ID')
3295
+ vol_get.add_argument('--workspaceid', help='Filter by workspace ID')
3296
+ vol_get.add_argument('--orgid', help='Filter by organization ID')
3297
+ vol_get.add_argument('--limit', type=int, help='Maximum results')
3298
+ vol_get.add_argument('--fields', help='Comma-separated fields to return')
3299
+ vol_get.set_defaults(func=cmd_volumes_get)
3300
+
3301
+ # volumes create
3302
+ vol_create = volumes_sub.add_parser('create', help='Create a volume')
3303
+ vol_create.add_argument('--name', required=True, help='Volume name')
3304
+ vol_create.add_argument('--description', help='Description')
3305
+ vol_create.add_argument('--orgid', required=True, help='Organization ID')
3306
+ vol_create.add_argument('--permission', choices=['read', 'write', 'view'], help='Permission')
3307
+ vol_create.add_argument('--tags', help='Comma-separated tags')
3308
+ vol_create.set_defaults(func=cmd_volumes_create)
3309
+
3310
+ # volumes edit
3311
+ vol_edit = volumes_sub.add_parser('edit', help='Edit a volume')
3312
+ vol_edit.add_argument('--volumeid', required=True, help='Volume ID')
3313
+ vol_edit.add_argument('--name', help='New name')
3314
+ vol_edit.add_argument('--description', help='New description')
3315
+ vol_edit.add_argument('--permission', choices=['read', 'write', 'view'], help='Permission')
3316
+ vol_edit.add_argument('--tags', help='Comma-separated tags')
3317
+ vol_edit.set_defaults(func=cmd_volumes_edit)
3318
+
3319
+ # volumes delete
3320
+ vol_delete = volumes_sub.add_parser('delete', help='Delete a volume')
3321
+ vol_delete.add_argument('--volumeid', required=True, help='Volume ID')
3322
+ vol_delete.set_defaults(func=cmd_volumes_delete)
3323
+
3324
+ # volumes mount
3325
+ vol_mount = volumes_sub.add_parser('mount', help='Mount a volume to local filesystem')
3326
+ vol_mount.add_argument('--volumeid', required=True, help='Volume ID')
3327
+ vol_mount.add_argument('--path', help='Local path to mount to (default: current directory)')
3328
+ vol_mount.add_argument('--mountexec', choices=['goofys', 's3fs', 'mount-s3'], help='Mount executable')
3329
+ vol_mount.set_defaults(func=cmd_volumes_mount)
3330
+
3331
+ # volumes unmount
3332
+ vol_unmount = volumes_sub.add_parser('unmount', help='Unmount a volume from local filesystem')
3333
+ vol_unmount.add_argument('--volumeid', required=True, help='Volume ID')
3334
+ vol_unmount.set_defaults(func=cmd_volumes_unmount)
3335
+
3336
+ # -------------------------------------------------------------------------
3337
+ # VOLUME-DATA
3338
+ # -------------------------------------------------------------------------
3339
+ volume_data = subparsers.add_parser('volume-data', help='Manage volume data')
3340
+ volume_data_sub = volume_data.add_subparsers(dest='action', help='Action')
3341
+
3342
+ # volume-data get
3343
+ vd_get = volume_data_sub.add_parser('get', help='Get volume data')
3344
+ vd_get.add_argument('--volumeid', required=True, help='Volume ID')
3345
+ vd_get.add_argument('--dir', help='Directory path')
3346
+ vd_get.add_argument('--files', help='Comma-separated file paths')
3347
+ vd_get.add_argument('--recursive', action='store_true', help='Recursive listing')
3348
+ vd_get.add_argument('--limit', type=int, help='Maximum results')
3349
+ vd_get.set_defaults(func=cmd_volume_data_get)
3350
+
3351
+ # volume-data upload
3352
+ vd_upload = volume_data_sub.add_parser('upload', help='Upload data to a volume')
3353
+ vd_upload.add_argument('--volumeid', required=True, help='Volume ID')
3354
+ vd_upload.add_argument('--localdir', help='Local directory')
3355
+ vd_upload.add_argument('--files', help='Comma-separated files to upload')
3356
+ vd_upload.add_argument('--destdir', help='Destination directory in volume')
3357
+ vd_upload.add_argument('--sync', action='store_true', help='Sync mode')
3358
+ vd_upload.set_defaults(func=cmd_volume_data_upload)
3359
+
3360
+ # volume-data download
3361
+ vd_download = volume_data_sub.add_parser('download', help='Download data from a volume')
3362
+ vd_download.add_argument('--volumeid', required=True, help='Volume ID')
3363
+ vd_download.add_argument('--outputdir', help='Output directory')
3364
+ vd_download.add_argument('--files', help='Comma-separated files to download')
3365
+ vd_download.add_argument('--recursive', action='store_true', default=True, help='Recursive download')
3366
+ vd_download.add_argument('--sync', action='store_true', help='Sync mode')
3367
+ vd_download.set_defaults(func=cmd_volume_data_download)
3368
+
3369
+ # volume-data delete
3370
+ vd_delete = volume_data_sub.add_parser('delete', help='Delete data from a volume')
3371
+ vd_delete.add_argument('--volumeid', required=True, help='Volume ID')
3372
+ vd_delete.add_argument('--files', required=True, help='Comma-separated files to delete')
3373
+ vd_delete.set_defaults(func=cmd_volume_data_delete)
3374
+
3375
+ # volume-data search
3376
+ vd_search = volume_data_sub.add_parser('search', help='Search a volume')
3377
+ vd_search.add_argument('--volumeid', required=True, help='Volume ID')
3378
+ vd_search.add_argument('--dir', help='Directory to search')
3379
+ vd_search.add_argument('--recursive', action='store_true', default=True, help='Recursive search')
3380
+ vd_search.add_argument('--keywords', help='Comma-separated keywords')
3381
+ vd_search.add_argument('--formats', help='Comma-separated file formats (e.g., png,jpg)')
3382
+ vd_search.add_argument('--types', help='Comma-separated file types (e.g., Image,3D)')
3383
+ vd_search.add_argument('--limit', type=int, help='Maximum results')
3384
+ vd_search.set_defaults(func=cmd_volume_data_search)
3385
+
3386
+ # -------------------------------------------------------------------------
3387
+ # GRAPHS
3388
+ # -------------------------------------------------------------------------
3389
+ graphs = subparsers.add_parser('graphs', help='Manage graphs')
3390
+ graphs_sub = graphs.add_subparsers(dest='action', help='Action')
3391
+
3392
+ # graphs get
3393
+ gr_get = graphs_sub.add_parser('get', help='Get graphs')
3394
+ gr_get.add_argument('--workspaceid', required=True, help='Workspace ID')
3395
+ gr_get.add_argument('--graphid', help='Filter by graph ID')
3396
+ gr_get.add_argument('--staged', action='store_true', help='Only staged graphs')
3397
+ gr_get.add_argument('--limit', type=int, help='Maximum results')
3398
+ gr_get.add_argument('--fields', help='Comma-separated fields to return')
3399
+ gr_get.set_defaults(func=cmd_graphs_get)
3400
+
3401
+ # graphs create
3402
+ gr_create = graphs_sub.add_parser('create', help='Create a graph (editable)')
3403
+ gr_create.add_argument('--workspaceid', required=True, help='Workspace ID')
3404
+ gr_create.add_argument('--file', required=True, help='Graph file (JSON or YAML)')
3405
+ gr_create.add_argument('--channelid', required=True, help='Channel ID')
3406
+ gr_create.add_argument('--name', required=True, help='Graph name')
3407
+ gr_create.add_argument('--description', help='Description')
3408
+ gr_create.set_defaults(func=cmd_graphs_create)
3409
+
3410
+ # graphs edit
3411
+ gr_edit = graphs_sub.add_parser('edit', help='Edit a graph')
3412
+ gr_edit.add_argument('--workspaceid', required=True, help='Workspace ID')
3413
+ gr_edit.add_argument('--graphid', required=True, help='Graph ID')
3414
+ gr_edit.add_argument('--name', help='New name')
3415
+ gr_edit.add_argument('--description', help='New description')
3416
+ gr_edit.add_argument('--tags', help='Comma-separated tags')
3417
+ gr_edit.set_defaults(func=cmd_graphs_edit)
3418
+
3419
+ # graphs delete
3420
+ gr_delete = graphs_sub.add_parser('delete', help='Delete a graph')
3421
+ gr_delete.add_argument('--workspaceid', required=True, help='Workspace ID')
3422
+ gr_delete.add_argument('--graphid', required=True, help='Graph ID')
3423
+ gr_delete.set_defaults(func=cmd_graphs_delete)
3424
+
3425
+ # graphs download
3426
+ gr_download = graphs_sub.add_parser('download', help='Download a graph to a file')
3427
+ gr_download.add_argument('--workspaceid', required=True, help='Workspace ID')
3428
+ gr_download.add_argument('--graphid', required=True, help='Graph ID')
3429
+ gr_download.add_argument('--outputfile', help='Output file path (default: <graphid>.yaml)')
3430
+ gr_download.set_defaults(func=cmd_graphs_download)
3431
+
3432
+ # graphs stage
3433
+ gr_stage = graphs_sub.add_parser('stage', help='Stage an existing graph (creates read-only copy)')
3434
+ gr_stage.add_argument('--workspaceid', required=True, help='Workspace ID')
3435
+ gr_stage.add_argument('--graphid', required=True, help='Graph ID to stage')
3436
+ gr_stage.add_argument('--name', help='Name for staged graph (default: original-name-staged)')
3437
+ gr_stage.add_argument('--description', help='Description for staged graph')
3438
+ gr_stage.set_defaults(func=cmd_graphs_stage)
3439
+
3440
+ # -------------------------------------------------------------------------
3441
+ # GRAPH-EDITOR
3442
+ # -------------------------------------------------------------------------
3443
+ graph_editor = subparsers.add_parser('graph-editor', help='Graph editor integration')
3444
+ graph_editor_sub = graph_editor.add_subparsers(dest='action', help='Action')
3445
+
3446
+ # graph-editor open
3447
+ ge_open = graph_editor_sub.add_parser('open', help='Download graph and schema, open in graph editor')
3448
+ ge_open.add_argument('--workspaceid', required=True, help='Workspace ID')
3449
+ ge_open.add_argument('--graphid', required=True, help='Graph ID')
3450
+ ge_open.add_argument('--outputdir', help='Output directory (default: current directory)')
3451
+ ge_open.set_defaults(func=cmd_graph_editor_open)
3452
+
3453
+ # graph-editor edit-node
3454
+ ge_edit_node = graph_editor_sub.add_parser('edit-node', help='Edit a node\'s values in a local graph file')
3455
+ ge_edit_node.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
3456
+ ge_edit_node.add_argument('--node', required=True, help='Name of the node to edit')
3457
+ ge_edit_node.add_argument('--values', required=True, help='JSON object with values to update (e.g., \'{"param": "value"}\')')
3458
+ ge_edit_node.set_defaults(func=cmd_graph_editor_edit_node)
3459
+
3460
+ # graph-editor add-node
3461
+ ge_add_node = graph_editor_sub.add_parser('add-node', help='Add a new node to a local graph file')
3462
+ ge_add_node.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
3463
+ ge_add_node.add_argument('--nodeclass', required=True, help='Node class to instantiate')
3464
+ ge_add_node.add_argument('--name', help='Custom node name (default: nodeClass_N)')
3465
+ ge_add_node.add_argument('--values', help='JSON object with initial values')
3466
+ ge_add_node.add_argument('--location', help='Position as {"x": N, "y": N} or [x, y]')
3467
+ ge_add_node.add_argument('--color', help='Node color as hex (default: #808080)')
3468
+ ge_add_node.add_argument('--tooltip', help='Node tooltip/description')
3469
+ ge_add_node.set_defaults(func=cmd_graph_editor_add_node)
3470
+
3471
+ # graph-editor add-link
3472
+ ge_add_link = graph_editor_sub.add_parser('add-link', help='Add a link between two nodes')
3473
+ ge_add_link.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
3474
+ ge_add_link.add_argument('--source', required=True, help='Source node name')
3475
+ ge_add_link.add_argument('--output', required=True, help='Output port name on source node')
3476
+ ge_add_link.add_argument('--target', required=True, help='Target node name')
3477
+ ge_add_link.add_argument('--input', required=True, help='Input port name on target node')
3478
+ ge_add_link.set_defaults(func=cmd_graph_editor_add_link)
3479
+
3480
+ # graph-editor remove-node
3481
+ ge_remove_node = graph_editor_sub.add_parser('remove-node', help='Remove a node and its links from a graph')
3482
+ ge_remove_node.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
3483
+ ge_remove_node.add_argument('--node', required=True, help='Name of the node to remove')
3484
+ ge_remove_node.set_defaults(func=cmd_graph_editor_remove_node)
3485
+
3486
+ # graph-editor remove-link
3487
+ ge_remove_link = graph_editor_sub.add_parser('remove-link', help='Remove a link between two nodes')
3488
+ ge_remove_link.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
3489
+ ge_remove_link.add_argument('--source', required=True, help='Source node name')
3490
+ ge_remove_link.add_argument('--output', required=True, help='Output port name on source node')
3491
+ ge_remove_link.add_argument('--target', required=True, help='Target node name')
3492
+ ge_remove_link.add_argument('--input', required=True, help='Input port name on target node')
3493
+ ge_remove_link.set_defaults(func=cmd_graph_editor_remove_link)
3494
+
3495
+ # graph-editor add-volume-file
3496
+ ge_add_vol_file = graph_editor_sub.add_parser('add-volume-file', help='Add a VolumeFile node referencing a file in a volume')
3497
+ ge_add_vol_file.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
3498
+ ge_add_vol_file.add_argument('--volumeid', required=True, help='Volume UUID')
3499
+ ge_add_vol_file.add_argument('--path', required=True, help='File path within the volume (e.g., /models/model.blend)')
3500
+ ge_add_vol_file.add_argument('--name', help='Custom node name (default: VolumeFile_N)')
3501
+ ge_add_vol_file.add_argument('--volumename', help='Volume display name for tooltip (default: uses volumeid)')
3502
+ ge_add_vol_file.add_argument('--location', help='Position as {"x": N, "y": N} or [x, y]')
3503
+ ge_add_vol_file.set_defaults(func=cmd_graph_editor_add_volume_file)
3504
+
3505
+ # graph-editor add-volume-directory
3506
+ ge_add_vol_dir = graph_editor_sub.add_parser('add-volume-directory', help='Add a VolumeDirectory node referencing a directory in a volume')
3507
+ ge_add_vol_dir.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
3508
+ ge_add_vol_dir.add_argument('--volumeid', required=True, help='Volume UUID')
3509
+ ge_add_vol_dir.add_argument('--path', help='Directory path within the volume (default: /)')
3510
+ ge_add_vol_dir.add_argument('--name', help='Custom node name (default: VolumeDirectory_N)')
3511
+ ge_add_vol_dir.add_argument('--volumename', help='Volume display name for tooltip (default: uses volumeid)')
3512
+ ge_add_vol_dir.add_argument('--location', help='Position as {"x": N, "y": N} or [x, y]')
3513
+ ge_add_vol_dir.set_defaults(func=cmd_graph_editor_add_volume_directory)
3514
+
3515
+ # graph-editor list-nodes
3516
+ ge_list_nodes = graph_editor_sub.add_parser('list-nodes', help='List all nodes in a local graph file')
3517
+ ge_list_nodes.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
3518
+ ge_list_nodes.add_argument('--verbose', '-v', action='store_true', help='Include link details')
3519
+ ge_list_nodes.set_defaults(func=cmd_graph_editor_list_nodes)
3520
+
3521
+ # graph-editor get-node
3522
+ ge_get_node = graph_editor_sub.add_parser('get-node', help='Get detailed info about a specific node')
3523
+ ge_get_node.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
3524
+ ge_get_node.add_argument('--node', required=True, help='Name of the node to inspect')
3525
+ ge_get_node.set_defaults(func=cmd_graph_editor_get_node)
3526
+
3527
+ # graph-editor move-node
3528
+ ge_move_node = graph_editor_sub.add_parser('move-node', help='Move a node to a new location')
3529
+ ge_move_node.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
3530
+ ge_move_node.add_argument('--node', required=True, help='Name of the node to move')
3531
+ ge_move_node.add_argument('--location', required=True, help='New position as {"x": N, "y": N} or [x, y]')
3532
+ ge_move_node.set_defaults(func=cmd_graph_editor_move_node)
3533
+
3534
+ # graph-editor clone-node
3535
+ ge_clone_node = graph_editor_sub.add_parser('clone-node', help='Clone an existing node')
3536
+ ge_clone_node.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
3537
+ ge_clone_node.add_argument('--source', required=True, help='Name of the node to clone')
3538
+ ge_clone_node.add_argument('--name', help='Name for the cloned node (default: nodeClass_N)')
3539
+ ge_clone_node.add_argument('--location', help='Position as {"x": N, "y": N} or [x, y] (default: offset from source)')
3540
+ ge_clone_node.set_defaults(func=cmd_graph_editor_clone_node)
3541
+
3542
+ # graph-editor status
3543
+ ge_status = graph_editor_sub.add_parser('status', help='Get graph editor status and validation errors')
3544
+ ge_status.add_argument('--file', help='Filter to a specific graph file path')
3545
+ ge_status.set_defaults(func=cmd_graph_editor_status)
3546
+
3547
+ # -------------------------------------------------------------------------
3548
+ # CHANNELS
3549
+ # -------------------------------------------------------------------------
3550
+ channels = subparsers.add_parser('channels', help='Manage channels')
3551
+ channels_sub = channels.add_subparsers(dest='action', help='Action')
3552
+
3553
+ # channels get
3554
+ ch_get = channels_sub.add_parser('get', help='Get channels')
3555
+ ch_get.add_argument('--workspaceid', help='Filter by workspace ID')
3556
+ ch_get.add_argument('--orgid', help='Filter by organization ID')
3557
+ ch_get.add_argument('--channelid', help='Filter by channel ID')
3558
+ ch_get.add_argument('--limit', type=int, help='Maximum results')
3559
+ ch_get.add_argument('--fields', help='Comma-separated fields to return')
3560
+ ch_get.set_defaults(func=cmd_channels_get)
3561
+
3562
+ # channels schema
3563
+ ch_schema = channels_sub.add_parser('schema', help='Get channel schema')
3564
+ ch_schema.add_argument('--channelid', required=True, help='Channel ID')
3565
+ ch_schema.add_argument('--fields', help='Comma-separated fields to return')
3566
+ ch_schema.add_argument('--category', help='Filter by category (e.g., Objects, Backgrounds, Sensors)')
3567
+ ch_schema.add_argument('--subcategory', help='Filter by subcategory (e.g., Aircraft, Vehicles, Ships)')
3568
+ ch_schema.add_argument('--search', help='Case-insensitive search across name, category, subcategory, tooltip, and descriptions')
3569
+ ch_schema.add_argument('--names-only', action='store_true', help='Output only node names (sorted)')
3570
+ ch_schema.add_argument('--list-categories', action='store_true', help='List all available categories and subcategories')
3571
+ ch_schema.set_defaults(func=cmd_channels_schema)
3572
+
3573
+ # channels nodes
3574
+ ch_nodes = channels_sub.add_parser('nodes', help='Get node documentation')
3575
+ ch_nodes.add_argument('--channelid', required=True, help='Channel ID')
3576
+ ch_nodes.add_argument('--node', required=True, help='Node name')
3577
+ ch_nodes.add_argument('--fields', help='Comma-separated fields to return')
3578
+ ch_nodes.set_defaults(func=cmd_channels_nodes)
3579
+
3580
+ # channels docs
3581
+ ch_docs = channels_sub.add_parser('docs', help='Get channel documentation')
3582
+ ch_docs.add_argument('--channelid', required=True, help='Channel ID')
3583
+ ch_docs.set_defaults(func=cmd_channels_docs)
3584
+
3585
+ # channels get-default-graph
3586
+ ch_get_default = channels_sub.add_parser('get-default-graph', help='Get the default graph for a channel')
3587
+ ch_get_default.add_argument('--channelid', required=True, help='Channel ID')
3588
+ ch_get_default.add_argument('--outputfile', help='Output file path (default: default.yaml)')
3589
+ ch_get_default.set_defaults(func=cmd_channels_get_default_graph)
3590
+
3591
+ # channels set-default-graph
3592
+ ch_set_default = channels_sub.add_parser('set-default-graph', help='Set the default graph for a channel')
3593
+ ch_set_default.add_argument('--graphid', required=True, help='Graph ID')
3594
+ ch_set_default.add_argument('--workspaceid', help='Workspace ID')
3595
+ ch_set_default.set_defaults(func=cmd_channels_set_default_graph)
3596
+
3597
+ # -------------------------------------------------------------------------
3598
+ # SERVICES
3599
+ # -------------------------------------------------------------------------
3600
+ services = subparsers.add_parser('services', help='Manage services')
3601
+ services_sub = services.add_subparsers(dest='action', help='Action')
3602
+
3603
+ # services get
3604
+ svc_get = services_sub.add_parser('get', help='Get services')
3605
+ svc_get.add_argument('--workspaceid', help='Filter by workspace ID')
3606
+ svc_get.add_argument('--orgid', help='Filter by organization ID')
3607
+ svc_get.add_argument('--serviceid', help='Filter by service ID')
3608
+ svc_get.add_argument('--limit', type=int, help='Maximum results')
3609
+ svc_get.add_argument('--fields', help='Comma-separated fields to return')
3610
+ svc_get.set_defaults(func=cmd_services_get)
3611
+
3612
+ # services create
3613
+ svc_create = services_sub.add_parser('create', help='Create a service')
3614
+ svc_create.add_argument('--name', required=True, help='Service name')
3615
+ svc_create.add_argument('--description', help='Description')
3616
+ svc_create.add_argument('--orgid', required=True, help='Organization ID')
3617
+ svc_create.add_argument('--type', default='custom', help='Service type ID')
3618
+ svc_create.add_argument('--volumes', help='Comma-separated volume IDs')
3619
+ svc_create.add_argument('--instance', help='AWS instance type')
3620
+ svc_create.add_argument('--tags', help='Comma-separated tags')
3621
+ svc_create.set_defaults(func=cmd_services_create)
3622
+
3623
+ # services edit
3624
+ svc_edit = services_sub.add_parser('edit', help='Edit a service')
3625
+ svc_edit.add_argument('--serviceid', required=True, help='Service ID')
3626
+ svc_edit.add_argument('--name', help='New name')
3627
+ svc_edit.add_argument('--description', help='New description')
3628
+ svc_edit.add_argument('--volumes', help='Comma-separated volume IDs')
3629
+ svc_edit.add_argument('--instance', help='AWS instance type')
3630
+ svc_edit.add_argument('--tags', help='Comma-separated tags')
3631
+ svc_edit.set_defaults(func=cmd_services_edit)
3632
+
3633
+ # services delete
3634
+ svc_delete = services_sub.add_parser('delete', help='Delete a service')
3635
+ svc_delete.add_argument('--serviceid', required=True, help='Service ID')
3636
+ svc_delete.set_defaults(func=cmd_services_delete)
3637
+
3638
+ # -------------------------------------------------------------------------
3639
+ # SERVICE-JOBS
3640
+ # -------------------------------------------------------------------------
3641
+ service_jobs = subparsers.add_parser('service-jobs', help='Manage service jobs')
3642
+ service_jobs_sub = service_jobs.add_subparsers(dest='action', help='Action')
3643
+
3644
+ # service-jobs get
3645
+ svc_jobs_get = service_jobs_sub.add_parser('get', help='Get service jobs')
3646
+ svc_jobs_get.add_argument('--workspaceid', required=True, help='Workspace ID')
3647
+ svc_jobs_get.add_argument('--jobid', help='Filter by job ID')
3648
+ svc_jobs_get.add_argument('--limit', type=int, help='Maximum results')
3649
+ svc_jobs_get.add_argument('--fields', help='Comma-separated fields to return')
3650
+ svc_jobs_get.set_defaults(func=cmd_services_jobs)
3651
+
3652
+ # service-jobs delete
3653
+ svc_jobs_delete = service_jobs_sub.add_parser('delete', help='Delete a service job')
3654
+ svc_jobs_delete.add_argument('--workspaceid', required=True, help='Workspace ID')
3655
+ svc_jobs_delete.add_argument('--jobid', required=True, help='Job ID')
3656
+ svc_jobs_delete.set_defaults(func=cmd_services_delete_job)
3657
+
3658
+ # -------------------------------------------------------------------------
3659
+ # API-KEYS
3660
+ # -------------------------------------------------------------------------
3661
+ api_keys = subparsers.add_parser('api-keys', help='Manage API keys')
3662
+ api_keys_sub = api_keys.add_subparsers(dest='action', help='Action')
3663
+
3664
+ # api-keys get
3665
+ ak_get = api_keys_sub.add_parser('get', help='Get API keys')
3666
+ ak_get.set_defaults(func=cmd_api_keys_get)
3667
+
3668
+ # api-keys create
3669
+ ak_create = api_keys_sub.add_parser('create', help='Create an API key')
3670
+ ak_create.add_argument('--name', required=True, help='API key name')
3671
+ ak_create.add_argument('--scope', required=True, choices=['user', 'organization', 'workspace'], help='Scope')
3672
+ ak_create.add_argument('--orgid', help='Organization ID (for org scope)')
3673
+ ak_create.add_argument('--workspaceid', help='Workspace ID (for workspace scope)')
3674
+ ak_create.add_argument('--expires', help='Expiration date (ISO format)')
3675
+ ak_create.set_defaults(func=cmd_api_keys_create)
3676
+
3677
+ # api-keys delete
3678
+ ak_delete = api_keys_sub.add_parser('delete', help='Delete an API key')
3679
+ ak_delete.add_argument('--apikeyid', required=True, help='API key ID')
3680
+ ak_delete.set_defaults(func=cmd_api_keys_delete)
3681
+
3682
+ # -------------------------------------------------------------------------
3683
+ # ANALYTICS
3684
+ # -------------------------------------------------------------------------
3685
+ analytics = subparsers.add_parser('analytics', help='Manage analytics')
3686
+ analytics_sub = analytics.add_subparsers(dest='action', help='Action')
3687
+
3688
+ # analytics get
3689
+ an_get = analytics_sub.add_parser('get', help='Get analytics')
3690
+ an_get.add_argument('--workspaceid', required=True, help='Workspace ID')
3691
+ an_get.add_argument('--datasetid', help='Dataset ID')
3692
+ an_get.add_argument('--analyticsid', help='Analytics ID')
3693
+ an_get.set_defaults(func=cmd_analytics_get)
3694
+
3695
+ # analytics create
3696
+ an_create = analytics_sub.add_parser('create', help='Create analytics')
3697
+ an_create.add_argument('--workspaceid', required=True, help='Workspace ID')
3698
+ an_create.add_argument('--datasetid', required=True, help='Dataset ID')
3699
+ an_create.add_argument('--type', required=True, help='Analytics type')
3700
+ an_create.set_defaults(func=cmd_analytics_create)
3701
+
3702
+ # analytics delete
3703
+ an_delete = analytics_sub.add_parser('delete', help='Delete analytics')
3704
+ an_delete.add_argument('--workspaceid', required=True, help='Workspace ID')
3705
+ an_delete.add_argument('--analyticsid', required=True, help='Analytics ID')
3706
+ an_delete.set_defaults(func=cmd_analytics_delete)
3707
+
3708
+ # analytics types
3709
+ an_types = analytics_sub.add_parser('types', help='Get analytics types')
3710
+ an_types.set_defaults(func=cmd_analytics_types)
3711
+
3712
+ # analytics download
3713
+ an_download = analytics_sub.add_parser('download', help='Download analytics results')
3714
+ an_download.add_argument('--workspaceid', required=True, help='Workspace ID')
3715
+ an_download.add_argument('--analyticsid', required=True, help='Analytics ID')
3716
+ an_download.set_defaults(func=cmd_analytics_download)
3717
+
3718
+ # analytics edit
3719
+ an_edit = analytics_sub.add_parser('edit', help='Edit analytics tags')
3720
+ an_edit.add_argument('--workspaceid', required=True, help='Workspace ID')
3721
+ an_edit.add_argument('--analyticsid', required=True, help='Analytics ID')
3722
+ an_edit.add_argument('--tags', required=True, help='Comma-separated tags')
3723
+ an_edit.set_defaults(func=cmd_analytics_edit)
3724
+
3725
+ # -------------------------------------------------------------------------
3726
+ # ANNOTATIONS
3727
+ # -------------------------------------------------------------------------
3728
+ annotations = subparsers.add_parser('annotations', help='Manage annotations')
3729
+ annotations_sub = annotations.add_subparsers(dest='action', help='Action')
3730
+
3731
+ # annotations get
3732
+ ann_get = annotations_sub.add_parser('get', help='Get annotations')
3733
+ ann_get.add_argument('--workspaceid', required=True, help='Workspace ID')
3734
+ ann_get.add_argument('--datasetid', help='Dataset ID')
3735
+ ann_get.add_argument('--annotationid', help='Annotation ID')
3736
+ ann_get.set_defaults(func=cmd_annotations_get)
3737
+
3738
+ # annotations create
3739
+ ann_create = annotations_sub.add_parser('create', help='Create an annotation')
3740
+ ann_create.add_argument('--workspaceid', required=True, help='Workspace ID')
3741
+ ann_create.add_argument('--datasetid', required=True, help='Dataset ID')
3742
+ ann_create.add_argument('--format', required=True, help='Annotation format')
3743
+ ann_create.add_argument('--mapid', help='Annotation map ID')
3744
+ ann_create.set_defaults(func=cmd_annotations_create)
3745
+
3746
+ # annotations download
3747
+ ann_download = annotations_sub.add_parser('download', help='Download an annotation')
3748
+ ann_download.add_argument('--workspaceid', required=True, help='Workspace ID')
3749
+ ann_download.add_argument('--annotationid', required=True, help='Annotation ID')
3750
+ ann_download.set_defaults(func=cmd_annotations_download)
3751
+
3752
+ # annotations formats
3753
+ ann_formats = annotations_sub.add_parser('formats', help='Get annotation formats')
3754
+ ann_formats.set_defaults(func=cmd_annotations_formats)
3755
+
3756
+ # annotations delete
3757
+ ann_delete = annotations_sub.add_parser('delete', help='Delete an annotation')
3758
+ ann_delete.add_argument('--workspaceid', required=True, help='Workspace ID')
3759
+ ann_delete.add_argument('--annotationid', required=True, help='Annotation ID')
3760
+ ann_delete.set_defaults(func=cmd_annotations_delete)
3761
+
3762
+ # annotations edit
3763
+ ann_edit = annotations_sub.add_parser('edit', help='Edit annotation tags')
3764
+ ann_edit.add_argument('--workspaceid', required=True, help='Workspace ID')
3765
+ ann_edit.add_argument('--annotationid', required=True, help='Annotation ID')
3766
+ ann_edit.add_argument('--tags', required=True, help='Comma-separated tags')
3767
+ ann_edit.set_defaults(func=cmd_annotations_edit)
3768
+
3769
+ # annotations view
3770
+ ann_view = annotations_sub.add_parser('view', help='Generate image with annotations overlayed')
3771
+ ann_view.add_argument('--imagepath', required=True, help='Path to the image file in the dataset directory')
3772
+ ann_view.add_argument('--outdir', required=True, help='Output directory for the annotated image')
3773
+ ann_view.add_argument('--drawtype', default='box_2d', help='Annotation type(s) to draw: box_2d, box_3d, segmentation (comma-separated for multiple)')
3774
+ ann_view.add_argument('--objectids', help='Comma-separated list of object IDs to annotate (filter)')
3775
+ ann_view.add_argument('--objecttypes', help='Comma-separated list of object types to annotate (filter)')
3776
+ ann_view.add_argument('--thickness', type=int, default=1, help='Line thickness for annotations (default: 1)')
3777
+ ann_view.add_argument('--colors', help='JSON dict of object type to RGB color, e.g. \'{"Car": [255, 0, 0]}\'')
3778
+ ann_view.set_defaults(func=cmd_annotations_view)
3779
+
3780
+ # -------------------------------------------------------------------------
3781
+ # ANNOTATION MAPS
3782
+ # -------------------------------------------------------------------------
3783
+ annotation_maps = subparsers.add_parser('annotation-maps', help='Manage annotation maps')
3784
+ annotation_maps_sub = annotation_maps.add_subparsers(dest='action', help='Action')
3785
+
3786
+ # annotation-maps get
3787
+ ann_maps_get = annotation_maps_sub.add_parser('get', help='Get annotation maps')
3788
+ ann_maps_get.add_argument('--orgid', required=True, help='Organization ID')
3789
+ ann_maps_get.set_defaults(func=cmd_annotation_maps_get)
3790
+
3791
+ # annotation-maps upload
3792
+ ann_maps_upload = annotation_maps_sub.add_parser('upload', help='Upload an annotation map')
3793
+ ann_maps_upload.add_argument('--orgid', required=True, help='Organization ID')
3794
+ ann_maps_upload.add_argument('--mapfile', required=True, help='Path to map file')
3795
+ ann_maps_upload.add_argument('--name', required=True, help='Map name')
3796
+ ann_maps_upload.add_argument('--description', help='Description')
3797
+ ann_maps_upload.add_argument('--tags', help='Comma-separated tags')
3798
+ ann_maps_upload.set_defaults(func=cmd_annotation_maps_upload)
3799
+
3800
+ # annotation-maps download
3801
+ ann_maps_download = annotation_maps_sub.add_parser('download', help='Download an annotation map')
3802
+ ann_maps_download.add_argument('--mapid', required=True, help='Map ID')
3803
+ ann_maps_download.add_argument('--outputdir', help='Output directory')
3804
+ ann_maps_download.set_defaults(func=cmd_annotation_maps_download)
3805
+
3806
+ # annotation-maps delete
3807
+ ann_maps_delete = annotation_maps_sub.add_parser('delete', help='Delete an annotation map')
3808
+ ann_maps_delete.add_argument('--mapid', required=True, help='Map ID')
3809
+ ann_maps_delete.set_defaults(func=cmd_annotation_maps_delete)
3810
+
3811
+ # annotation-maps edit
3812
+ ann_maps_edit = annotation_maps_sub.add_parser('edit', help='Edit an annotation map')
3813
+ ann_maps_edit.add_argument('--mapid', required=True, help='Map ID')
3814
+ ann_maps_edit.add_argument('--name', help='New name')
3815
+ ann_maps_edit.add_argument('--description', help='New description')
3816
+ ann_maps_edit.add_argument('--tags', help='Comma-separated tags')
3817
+ ann_maps_edit.set_defaults(func=cmd_annotation_maps_edit)
3818
+
3819
+ # -------------------------------------------------------------------------
3820
+ # GAN MODELS
3821
+ # -------------------------------------------------------------------------
3822
+ gan_models = subparsers.add_parser('gan-models', help='Manage GAN models')
3823
+ gan_models_sub = gan_models.add_subparsers(dest='action', help='Action')
3824
+
3825
+ # gan-models get
3826
+ gan_models_get = gan_models_sub.add_parser('get', help='Get GAN models')
3827
+ gan_models_get.add_argument('--orgid', help='Organization ID')
3828
+ gan_models_get.add_argument('--workspaceid', help='Workspace ID')
3829
+ gan_models_get.add_argument('--modelid', help='Model ID')
3830
+ gan_models_get.add_argument('--limit', type=int, help='Maximum results')
3831
+ gan_models_get.add_argument('--fields', help='Comma-separated fields')
3832
+ gan_models_get.set_defaults(func=cmd_gan_models_get)
3833
+
3834
+ # gan-models upload
3835
+ gan_models_upload = gan_models_sub.add_parser('upload', help='Upload a GAN model')
3836
+ gan_models_upload.add_argument('--orgid', required=True, help='Organization ID')
3837
+ gan_models_upload.add_argument('--modelfile', required=True, help='Path to model file')
3838
+ gan_models_upload.add_argument('--name', required=True, help='Model name')
3839
+ gan_models_upload.add_argument('--description', help='Description')
3840
+ gan_models_upload.add_argument('--flags', help='Model flags')
3841
+ gan_models_upload.add_argument('--tags', help='Comma-separated tags')
3842
+ gan_models_upload.set_defaults(func=cmd_gan_models_upload)
3843
+
3844
+ # gan-models download
3845
+ gan_models_download = gan_models_sub.add_parser('download', help='Download a GAN model')
3846
+ gan_models_download.add_argument('--modelid', required=True, help='Model ID')
3847
+ gan_models_download.add_argument('--outputdir', help='Output directory')
3848
+ gan_models_download.set_defaults(func=cmd_gan_models_download)
3849
+
3850
+ # -------------------------------------------------------------------------
3851
+ # GAN DATASETS
3852
+ # -------------------------------------------------------------------------
3853
+ gan_datasets = subparsers.add_parser('gan-datasets', help='Manage GAN datasets')
3854
+ gan_datasets_sub = gan_datasets.add_subparsers(dest='action', help='Action')
3855
+
3856
+ # gan-datasets get
3857
+ gan_ds_get = gan_datasets_sub.add_parser('get', help='Get GAN datasets')
3858
+ gan_ds_get.add_argument('--workspaceid', required=True, help='Workspace ID')
3859
+ gan_ds_get.add_argument('--datasetid', help='Dataset ID')
3860
+ gan_ds_get.add_argument('--gandatasetid', help='GAN dataset ID')
3861
+ gan_ds_get.add_argument('--limit', type=int, help='Maximum results')
3862
+ gan_ds_get.add_argument('--fields', help='Comma-separated fields')
3863
+ gan_ds_get.set_defaults(func=cmd_gan_datasets_get)
3864
+
3865
+ # gan-datasets create
3866
+ gan_ds_create = gan_datasets_sub.add_parser('create', help='Create a GAN dataset')
3867
+ gan_ds_create.add_argument('--workspaceid', required=True, help='Workspace ID')
3868
+ gan_ds_create.add_argument('--datasetid', required=True, help='Input dataset ID')
3869
+ gan_ds_create.add_argument('--modelid', required=True, help='GAN model ID')
3870
+ gan_ds_create.add_argument('--name', required=True, help='Dataset name')
3871
+ gan_ds_create.add_argument('--description', help='Description')
3872
+ gan_ds_create.add_argument('--tags', help='Comma-separated tags')
3873
+ gan_ds_create.set_defaults(func=cmd_gan_datasets_create)
3874
+
3875
+ # gan-datasets delete
3876
+ gan_ds_delete = gan_datasets_sub.add_parser('delete', help='Delete a GAN dataset')
3877
+ gan_ds_delete.add_argument('--workspaceid', required=True, help='Workspace ID')
3878
+ gan_ds_delete.add_argument('--datasetid', required=True, help='Dataset ID')
3879
+ gan_ds_delete.set_defaults(func=cmd_gan_datasets_delete)
3880
+
3881
+ # -------------------------------------------------------------------------
3882
+ # UMAP
3883
+ # -------------------------------------------------------------------------
3884
+ umap = subparsers.add_parser('umap', help='Manage UMAP visualizations')
3885
+ umap_sub = umap.add_subparsers(dest='action', help='Action')
3886
+
3887
+ # umap get
3888
+ umap_get = umap_sub.add_parser('get', help='Get UMAPs')
3889
+ umap_get.add_argument('--workspaceid', required=True, help='Workspace ID')
3890
+ umap_get.add_argument('--umapid', help='UMAP ID')
3891
+ umap_get.add_argument('--datasetid', help='Dataset ID')
3892
+ umap_get.add_argument('--limit', type=int, help='Maximum results')
3893
+ umap_get.add_argument('--fields', help='Comma-separated fields')
3894
+ umap_get.set_defaults(func=cmd_umap_get)
3895
+
3896
+ # umap create
3897
+ umap_create = umap_sub.add_parser('create', help='Create a UMAP')
3898
+ umap_create.add_argument('--workspaceid', required=True, help='Workspace ID')
3899
+ umap_create.add_argument('--name', required=True, help='UMAP name')
3900
+ umap_create.add_argument('--datasetids', required=True, help='Comma-separated dataset IDs')
3901
+ umap_create.add_argument('--samples', required=True, help='Comma-separated sample counts')
3902
+ umap_create.add_argument('--description', help='Description')
3903
+ umap_create.add_argument('--seed', type=int, help='Seed')
3904
+ umap_create.add_argument('--tags', help='Comma-separated tags')
3905
+ umap_create.set_defaults(func=cmd_umap_create)
3906
+
3907
+ # umap delete
3908
+ umap_delete = umap_sub.add_parser('delete', help='Delete a UMAP')
3909
+ umap_delete.add_argument('--workspaceid', required=True, help='Workspace ID')
3910
+ umap_delete.add_argument('--umapid', required=True, help='UMAP ID')
3911
+ umap_delete.set_defaults(func=cmd_umap_delete)
3912
+
3913
+ # -------------------------------------------------------------------------
3914
+ # SERVERS
3915
+ # -------------------------------------------------------------------------
3916
+ servers = subparsers.add_parser('servers', help='Manage development servers')
3917
+ servers_sub = servers.add_subparsers(dest='action', help='Action')
3918
+
3919
+ # servers get
3920
+ srv_get = servers_sub.add_parser('get', help='Get servers')
3921
+ srv_get.add_argument('--orgid', help='Organization ID')
3922
+ srv_get.add_argument('--workspaceid', help='Workspace ID')
3923
+ srv_get.add_argument('--serverid', help='Server ID')
3924
+ srv_get.add_argument('--limit', type=int, help='Maximum results')
3925
+ srv_get.add_argument('--fields', help='Comma-separated fields')
3926
+ srv_get.set_defaults(func=cmd_servers_get)
3927
+
3928
+ # servers create
3929
+ srv_create = servers_sub.add_parser('create', help='Create a server')
3930
+ srv_create.add_argument('--orgid', help='Organization ID')
3931
+ srv_create.add_argument('--workspaceid', help='Workspace ID')
3932
+ srv_create.add_argument('--instance', help='Instance type')
3933
+ srv_create.add_argument('--name', help='Server name')
3934
+ srv_create.set_defaults(func=cmd_servers_create)
3935
+
3936
+ # servers delete
3937
+ srv_delete = servers_sub.add_parser('delete', help='Delete a server')
3938
+ srv_delete.add_argument('--serverid', required=True, help='Server ID')
3939
+ srv_delete.set_defaults(func=cmd_servers_delete)
3940
+
3941
+ # servers start
3942
+ srv_start = servers_sub.add_parser('start', help='Start a server')
3943
+ srv_start.add_argument('--serverid', required=True, help='Server ID')
3944
+ srv_start.set_defaults(func=cmd_servers_start)
3945
+
3946
+ # servers stop
3947
+ srv_stop = servers_sub.add_parser('stop', help='Stop a server')
3948
+ srv_stop.add_argument('--serverid', required=True, help='Server ID')
3949
+ srv_stop.set_defaults(func=cmd_servers_stop)
3950
+
3951
+ # -------------------------------------------------------------------------
3952
+ # ML MODELS
3953
+ # -------------------------------------------------------------------------
3954
+ ml_models = subparsers.add_parser('ml-models', help='Manage ML models')
3955
+ ml_models_sub = ml_models.add_subparsers(dest='action', help='Action')
3956
+
3957
+ # ml-models architectures
3958
+ ml_arch = ml_models_sub.add_parser('architectures', help='Get ML architectures')
3959
+ ml_arch.add_argument('--fields', help='Comma-separated fields')
3960
+ ml_arch.set_defaults(func=cmd_ml_architectures)
3961
+
3962
+ # ml-models get
3963
+ ml_models_get = ml_models_sub.add_parser('get', help='Get ML models')
3964
+ ml_models_get.add_argument('--workspaceid', required=True, help='Workspace ID')
3965
+ ml_models_get.add_argument('--datasetid', help='Dataset ID')
3966
+ ml_models_get.add_argument('--modelid', help='Model ID')
3967
+ ml_models_get.add_argument('--limit', type=int, help='Maximum results')
3968
+ ml_models_get.add_argument('--fields', help='Comma-separated fields')
3969
+ ml_models_get.set_defaults(func=cmd_ml_models_get)
3970
+
3971
+ # ml-models create
3972
+ ml_models_create = ml_models_sub.add_parser('create', help='Create ML model training job')
3973
+ ml_models_create.add_argument('--workspaceid', required=True, help='Workspace ID')
3974
+ ml_models_create.add_argument('--datasetid', required=True, help='Dataset ID')
3975
+ ml_models_create.add_argument('--architectureid', required=True, help='Architecture ID')
3976
+ ml_models_create.add_argument('--name', required=True, help='Model name')
3977
+ ml_models_create.add_argument('--parameters', required=True, help='JSON parameters')
3978
+ ml_models_create.add_argument('--description', help='Description')
3979
+ ml_models_create.add_argument('--tags', help='Comma-separated tags')
3980
+ ml_models_create.set_defaults(func=cmd_ml_models_create)
3981
+
3982
+ # ml-models download
3983
+ ml_models_download = ml_models_sub.add_parser('download', help='Download ML model')
3984
+ ml_models_download.add_argument('--workspaceid', required=True, help='Workspace ID')
3985
+ ml_models_download.add_argument('--modelid', required=True, help='Model ID')
3986
+ ml_models_download.add_argument('--checkpoint', help='Checkpoint to download')
3987
+ ml_models_download.add_argument('--outputdir', help='Output directory')
3988
+ ml_models_download.set_defaults(func=cmd_ml_models_download)
3989
+
3990
+ # -------------------------------------------------------------------------
3991
+ # ML INFERENCES
3992
+ # -------------------------------------------------------------------------
3993
+ ml_inferences = subparsers.add_parser('ml-inferences', help='Manage ML inferences')
3994
+ ml_inferences_sub = ml_inferences.add_subparsers(dest='action', help='Action')
3995
+
3996
+ # ml-inferences get
3997
+ ml_inf_get = ml_inferences_sub.add_parser('get', help='Get ML inferences')
3998
+ ml_inf_get.add_argument('--workspaceid', required=True, help='Workspace ID')
3999
+ ml_inf_get.add_argument('--inferenceid', help='Inference ID')
4000
+ ml_inf_get.add_argument('--datasetid', help='Dataset ID')
4001
+ ml_inf_get.add_argument('--modelid', help='Model ID')
4002
+ ml_inf_get.add_argument('--limit', type=int, help='Maximum results')
4003
+ ml_inf_get.add_argument('--fields', help='Comma-separated fields')
4004
+ ml_inf_get.set_defaults(func=cmd_ml_inferences_get)
4005
+
4006
+ # ml-inferences create
4007
+ ml_inf_create = ml_inferences_sub.add_parser('create', help='Create ML inference job')
4008
+ ml_inf_create.add_argument('--workspaceid', required=True, help='Workspace ID')
4009
+ ml_inf_create.add_argument('--datasetid', required=True, help='Dataset ID')
4010
+ ml_inf_create.add_argument('--modelid', required=True, help='Model ID')
4011
+ ml_inf_create.add_argument('--mapid', help='Map ID')
4012
+ ml_inf_create.add_argument('--tags', help='Comma-separated tags')
4013
+ ml_inf_create.set_defaults(func=cmd_ml_inferences_create)
4014
+
4015
+ # -------------------------------------------------------------------------
4016
+ # INPAINT
4017
+ # -------------------------------------------------------------------------
4018
+ inpaint = subparsers.add_parser('inpaint', help='Manage inpaint jobs')
4019
+ inpaint_sub = inpaint.add_subparsers(dest='action', help='Action')
4020
+
4021
+ # inpaint get
4022
+ inp_get = inpaint_sub.add_parser('get', help='Get inpaint jobs')
4023
+ inp_get.add_argument('--volumeid', required=True, help='Volume ID')
4024
+ inp_get.add_argument('--inpaintid', help='Inpaint ID')
4025
+ inp_get.add_argument('--limit', type=int, help='Maximum results')
4026
+ inp_get.add_argument('--fields', help='Comma-separated fields')
4027
+ inp_get.set_defaults(func=cmd_inpaint_get)
4028
+
4029
+ # inpaint log
4030
+ inp_log = inpaint_sub.add_parser('log', help='Get inpaint job log')
4031
+ inp_log.add_argument('--volumeid', required=True, help='Volume ID')
4032
+ inp_log.add_argument('--inpaintid', required=True, help='Inpaint ID')
4033
+ inp_log.add_argument('--fields', help='Comma-separated fields')
4034
+ inp_log.set_defaults(func=cmd_inpaint_log)
4035
+
4036
+ # inpaint create
4037
+ inp_create = inpaint_sub.add_parser('create', help='Create an inpaint job')
4038
+ inp_create.add_argument('--volumeid', required=True, help='Volume ID')
4039
+ inp_create.add_argument('--location', required=True, help='Input location')
4040
+ inp_create.add_argument('--files', help='Comma-separated files to inpaint')
4041
+ inp_create.add_argument('--destination', help='Output destination')
4042
+ inp_create.add_argument('--dilation', type=int, default=5, help='Dilation (default: 5)')
4043
+ inp_create.add_argument('--inputtype', default='MASK', choices=['MASK', 'GEOJSON', 'COCO', 'KITTI', 'PASCAL', 'YOLO'], help='Input type')
4044
+ inp_create.add_argument('--outputtype', default='PNG', choices=['SATRGB_BACKGROUND', 'PNG', 'JPG'], help='Output type')
4045
+ inp_create.set_defaults(func=cmd_inpaint_create)
4046
+
4047
+ # inpaint delete
4048
+ inp_delete = inpaint_sub.add_parser('delete', help='Delete an inpaint job')
4049
+ inp_delete.add_argument('--volumeid', required=True, help='Volume ID')
4050
+ inp_delete.add_argument('--inpaintid', required=True, help='Inpaint ID')
4051
+ inp_delete.set_defaults(func=cmd_inpaint_delete)
4052
+
4053
+ # -------------------------------------------------------------------------
4054
+ # PREVIEW
4055
+ # -------------------------------------------------------------------------
4056
+ preview = subparsers.add_parser('preview', help='Manage preview jobs')
4057
+ preview_sub = preview.add_subparsers(dest='action', help='Action')
4058
+
4059
+ # preview get
4060
+ prv_get = preview_sub.add_parser('get', help='Get a preview job')
4061
+ prv_get.add_argument('--workspaceid', required=True, help='Workspace ID')
4062
+ prv_get.add_argument('--previewid', required=True, help='Preview ID')
4063
+ prv_get.add_argument('--fields', help='Comma-separated fields')
4064
+ prv_get.set_defaults(func=cmd_preview_get)
4065
+
4066
+ # preview create
4067
+ prv_create = preview_sub.add_parser('create', help='Create a preview job')
4068
+ prv_create.add_argument('--workspaceid', required=True, help='Workspace ID')
4069
+ prv_create.add_argument('--graphid', required=True, help='Graph ID')
4070
+ prv_create.set_defaults(func=cmd_preview_create)
4071
+
4072
+ # -------------------------------------------------------------------------
4073
+ # AGENTS
4074
+ # -------------------------------------------------------------------------
4075
+ agents = subparsers.add_parser('agents', help='Agent helper functions')
4076
+ agents_sub = agents.add_subparsers(dest='action', help='Action')
4077
+
4078
+ # agents types
4079
+ ag_types = agents_sub.add_parser('types', help='Get available data types')
4080
+ ag_types.set_defaults(func=cmd_agents_types)
4081
+
4082
+ # agents fields
4083
+ ag_fields = agents_sub.add_parser('fields', help='Get fields for a data type')
4084
+ ag_fields.add_argument('--type', required=True, help='Data type')
4085
+ ag_fields.set_defaults(func=cmd_agents_fields)
4086
+
4087
+ # -------------------------------------------------------------------------
4088
+ # RULES
4089
+ # -------------------------------------------------------------------------
4090
+ rules = subparsers.add_parser('rules', help='Manage platform rules')
4091
+ rules_sub = rules.add_subparsers(dest='action', help='Action')
4092
+
4093
+ # rules get-organization
4094
+ rules_get_org = rules_sub.add_parser('get-organization', help='Get organization rules')
4095
+ rules_get_org.add_argument('--orgid', help='Organization ID')
4096
+ rules_get_org.set_defaults(func=cmd_rules_organization)
4097
+
4098
+ # rules edit-organization
4099
+ rules_edit_org = rules_sub.add_parser('edit-organization', help='Edit organization rules')
4100
+ rules_edit_org.add_argument('--orgid', help='Organization ID')
4101
+ rules_edit_org.add_argument('--rules', required=True, help='Rules string')
4102
+ rules_edit_org.set_defaults(func=cmd_rules_edit_organization)
4103
+
4104
+ # rules get-workspace
4105
+ rules_get_ws = rules_sub.add_parser('get-workspace', help='Get workspace rules')
4106
+ rules_get_ws.add_argument('--workspaceid', help='Workspace ID')
4107
+ rules_get_ws.set_defaults(func=cmd_rules_workspace)
4108
+
4109
+ # rules edit-workspace
4110
+ rules_edit_ws = rules_sub.add_parser('edit-workspace', help='Edit workspace rules')
4111
+ rules_edit_ws.add_argument('--workspaceid', help='Workspace ID')
4112
+ rules_edit_ws.add_argument('--rules', required=True, help='Rules string')
4113
+ rules_edit_ws.set_defaults(func=cmd_rules_edit_workspace)
4114
+
4115
+ # rules get-service
4116
+ rules_get_svc = rules_sub.add_parser('get-service', help='Get service rules')
4117
+ rules_get_svc.add_argument('--serviceid', required=True, help='Service ID')
4118
+ rules_get_svc.set_defaults(func=cmd_rules_service)
4119
+
4120
+ # rules edit-service
4121
+ rules_edit_svc = rules_sub.add_parser('edit-service', help='Edit service rules')
4122
+ rules_edit_svc.add_argument('--serviceid', required=True, help='Service ID')
4123
+ rules_edit_svc.add_argument('--rules', required=True, help='Rules string')
4124
+ rules_edit_svc.set_defaults(func=cmd_rules_edit_service)
4125
+
4126
+ # rules get-user
4127
+ rules_get_user = rules_sub.add_parser('get-user', help='Get user rules')
4128
+ rules_get_user.set_defaults(func=cmd_rules_user)
4129
+
4130
+ # rules edit-user
4131
+ rules_edit_user = rules_sub.add_parser('edit-user', help='Edit user rules')
4132
+ rules_edit_user.add_argument('--rules', required=True, help='Rules string')
4133
+ rules_edit_user.set_defaults(func=cmd_rules_edit_user)
4134
+
4135
+ return parser
4136
+
4137
+
4138
+ def main():
4139
+ """Main entry point."""
4140
+ parser = create_parser()
4141
+ args = parser.parse_args()
4142
+
4143
+ # Check if resource and action are provided
4144
+ if not args.resource:
4145
+ parser.print_help()
4146
+ sys.exit(1)
4147
+
4148
+ if not args.action:
4149
+ # Print help for the resource
4150
+ parser.parse_args([args.resource, '--help'])
4151
+ sys.exit(1)
4152
+
4153
+ # Execute the command
4154
+ if hasattr(args, 'func'):
4155
+ try:
4156
+ args.func(args)
4157
+ except Exception as e:
4158
+ output_error(str(e), "EXECUTION_ERROR")
4159
+ sys.exit(1)
4160
+ else:
4161
+ parser.print_help()
4162
+ sys.exit(1)
4163
+
4164
+
4165
+ if __name__ == '__main__':
4166
+ main()