anatools 6.0.0__py3-none-any.whl → 6.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- anatools/__init__.py +1 -1
- anatools/anaclient/api/api.py +5 -1
- anatools/anaclient/api/datasets.py +21 -3
- anatools/anaclient/api/preview.py +24 -3
- anatools/anaclient/datasets.py +61 -1
- anatools/anaclient/preview.py +26 -4
- anatools/annotations/annotations.py +39 -18
- anatools/annotations/draw.py +34 -18
- {anatools-6.0.0.data → anatools-6.0.2.data}/scripts/anadeploy +3 -1
- {anatools-6.0.0.data → anatools-6.0.2.data}/scripts/anarules +6 -4
- {anatools-6.0.0.data → anatools-6.0.2.data}/scripts/renderedai +1513 -46
- {anatools-6.0.0.dist-info → anatools-6.0.2.dist-info}/METADATA +2 -2
- {anatools-6.0.0.dist-info → anatools-6.0.2.dist-info}/RECORD +23 -23
- {anatools-6.0.0.dist-info → anatools-6.0.2.dist-info}/WHEEL +1 -1
- {anatools-6.0.0.data → anatools-6.0.2.data}/scripts/ana +0 -0
- {anatools-6.0.0.data → anatools-6.0.2.data}/scripts/anamount +0 -0
- {anatools-6.0.0.data → anatools-6.0.2.data}/scripts/anaprofile +0 -0
- {anatools-6.0.0.data → anatools-6.0.2.data}/scripts/anaserver +0 -0
- {anatools-6.0.0.data → anatools-6.0.2.data}/scripts/anatransfer +0 -0
- {anatools-6.0.0.data → anatools-6.0.2.data}/scripts/anautils +0 -0
- {anatools-6.0.0.dist-info → anatools-6.0.2.dist-info}/entry_points.txt +0 -0
- {anatools-6.0.0.dist-info → anatools-6.0.2.dist-info}/licenses/LICENSE +0 -0
- {anatools-6.0.0.dist-info → anatools-6.0.2.dist-info}/top_level.txt +0 -0
|
@@ -37,7 +37,9 @@ import argparse
|
|
|
37
37
|
import json
|
|
38
38
|
import os
|
|
39
39
|
import sys
|
|
40
|
-
from typing import Any, List
|
|
40
|
+
from typing import Any, Dict, List, Optional
|
|
41
|
+
|
|
42
|
+
import yaml
|
|
41
43
|
|
|
42
44
|
|
|
43
45
|
def get_client():
|
|
@@ -104,6 +106,55 @@ def parse_list_arg(value: str) -> List[str]:
|
|
|
104
106
|
return [item.strip() for item in value.split(',')]
|
|
105
107
|
|
|
106
108
|
|
|
109
|
+
def load_graph_file(filepath: str) -> Optional[Dict[str, Any]]:
|
|
110
|
+
"""Load a graph from a YAML or JSON file.
|
|
111
|
+
|
|
112
|
+
Args:
|
|
113
|
+
filepath: Path to the graph file (.yaml, .yml, or .json)
|
|
114
|
+
|
|
115
|
+
Returns:
|
|
116
|
+
Dict containing the graph data, or None if loading failed
|
|
117
|
+
"""
|
|
118
|
+
if not os.path.exists(filepath):
|
|
119
|
+
output_error(f"File not found: {filepath}", "FILE_NOT_FOUND")
|
|
120
|
+
return None
|
|
121
|
+
|
|
122
|
+
try:
|
|
123
|
+
with open(filepath, 'r') as f:
|
|
124
|
+
if filepath.endswith('.json'):
|
|
125
|
+
return json.load(f)
|
|
126
|
+
else: # .yaml or .yml
|
|
127
|
+
return yaml.safe_load(f)
|
|
128
|
+
except (json.JSONDecodeError, yaml.YAMLError) as e:
|
|
129
|
+
output_error(f"Failed to parse file: {str(e)}", "PARSE_ERROR")
|
|
130
|
+
return None
|
|
131
|
+
except Exception as e:
|
|
132
|
+
output_error(f"Failed to read file: {str(e)}", "READ_ERROR")
|
|
133
|
+
return None
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
def save_graph_file(filepath: str, data: Dict[str, Any]) -> bool:
|
|
137
|
+
"""Save a graph to a YAML or JSON file.
|
|
138
|
+
|
|
139
|
+
Args:
|
|
140
|
+
filepath: Path to save the graph file (.yaml, .yml, or .json)
|
|
141
|
+
data: Graph data to save
|
|
142
|
+
|
|
143
|
+
Returns:
|
|
144
|
+
True if save succeeded, False otherwise
|
|
145
|
+
"""
|
|
146
|
+
try:
|
|
147
|
+
with open(filepath, 'w') as f:
|
|
148
|
+
if filepath.endswith('.json'):
|
|
149
|
+
json.dump(data, f, indent=2)
|
|
150
|
+
else: # .yaml or .yml
|
|
151
|
+
yaml.dump(data, f, default_flow_style=False, sort_keys=False, allow_unicode=True)
|
|
152
|
+
return True
|
|
153
|
+
except Exception as e:
|
|
154
|
+
output_error(f"Failed to save file: {str(e)}", "WRITE_ERROR")
|
|
155
|
+
return False
|
|
156
|
+
|
|
157
|
+
|
|
107
158
|
# =============================================================================
|
|
108
159
|
# WORKSPACES
|
|
109
160
|
# =============================================================================
|
|
@@ -277,16 +328,25 @@ def cmd_datasets_cancel(args):
|
|
|
277
328
|
|
|
278
329
|
|
|
279
330
|
def cmd_datasets_download(args):
|
|
280
|
-
"""Download a dataset."""
|
|
331
|
+
"""Download a dataset or a single file from a dataset."""
|
|
281
332
|
client = get_client()
|
|
282
333
|
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
283
334
|
dataset_id = require_arg(args, 'datasetid', 'Dataset ID')
|
|
284
335
|
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
336
|
+
# If filepath is provided, download a single file; otherwise download the entire dataset
|
|
337
|
+
if args.filepath:
|
|
338
|
+
result = client.download_dataset_file(
|
|
339
|
+
workspaceId=workspace_id,
|
|
340
|
+
datasetId=dataset_id,
|
|
341
|
+
filepath=args.filepath,
|
|
342
|
+
localDir=args.outputdir
|
|
343
|
+
)
|
|
344
|
+
else:
|
|
345
|
+
result = client.download_dataset(
|
|
346
|
+
workspaceId=workspace_id,
|
|
347
|
+
datasetId=dataset_id,
|
|
348
|
+
localDir=args.outputdir
|
|
349
|
+
)
|
|
290
350
|
output_json({"downloadPath": result})
|
|
291
351
|
|
|
292
352
|
|
|
@@ -1026,45 +1086,270 @@ def cmd_graphs_stage(args):
|
|
|
1026
1086
|
|
|
1027
1087
|
|
|
1028
1088
|
# =============================================================================
|
|
1029
|
-
#
|
|
1089
|
+
# DATASET-VIEWER
|
|
1030
1090
|
# =============================================================================
|
|
1031
1091
|
|
|
1032
|
-
|
|
1033
|
-
|
|
1034
|
-
client = get_client()
|
|
1035
|
-
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
1036
|
-
graph_id = require_arg(args, 'graphid', 'Graph ID')
|
|
1037
|
-
directory = args.outputdir or os.getcwd()
|
|
1092
|
+
DATASET_VIEWER_TRIGGER_PATH = os.path.join(os.path.expanduser('~'), '.theia', 'dataset-viewer-open')
|
|
1093
|
+
DATASET_VIEWER_STATUS_PATH = os.path.join(os.path.expanduser('~'), '.theia', 'dataset-viewer-status.json')
|
|
1038
1094
|
|
|
1039
|
-
|
|
1040
|
-
|
|
1041
|
-
|
|
1042
|
-
|
|
1095
|
+
|
|
1096
|
+
def _write_dataset_viewer_trigger(action: str, payload: dict):
|
|
1097
|
+
"""Write a trigger file for the dataset viewer extension to pick up."""
|
|
1098
|
+
trigger_data = {"action": action, **payload}
|
|
1099
|
+
trigger_dir = os.path.dirname(DATASET_VIEWER_TRIGGER_PATH)
|
|
1100
|
+
os.makedirs(trigger_dir, exist_ok=True)
|
|
1101
|
+
with open(DATASET_VIEWER_TRIGGER_PATH, 'w') as f:
|
|
1102
|
+
json.dump(trigger_data, f, indent=2)
|
|
1103
|
+
return trigger_data
|
|
1104
|
+
|
|
1105
|
+
|
|
1106
|
+
def cmd_dataset_viewer_open(args):
|
|
1107
|
+
"""Open a dataset folder in the Annotation Viewer."""
|
|
1108
|
+
dataset_path = getattr(args, 'path', None)
|
|
1109
|
+
if not dataset_path:
|
|
1110
|
+
output_error("Dataset path is required. Use --path", "MISSING_PATH")
|
|
1111
|
+
sys.exit(1)
|
|
1112
|
+
|
|
1113
|
+
dataset_path = os.path.abspath(dataset_path)
|
|
1114
|
+
if not os.path.isdir(dataset_path):
|
|
1115
|
+
output_error(f"Directory not found: {dataset_path}", "PATH_NOT_FOUND")
|
|
1116
|
+
sys.exit(1)
|
|
1117
|
+
|
|
1118
|
+
images_dir = os.path.join(dataset_path, 'images')
|
|
1119
|
+
if not os.path.isdir(images_dir):
|
|
1120
|
+
output_error(f"No images/ directory found in {dataset_path}", "NO_IMAGES_DIR")
|
|
1121
|
+
sys.exit(1)
|
|
1122
|
+
|
|
1123
|
+
image_index = getattr(args, 'index', None) or 0
|
|
1124
|
+
|
|
1125
|
+
trigger_data = _write_dataset_viewer_trigger('open', {
|
|
1126
|
+
'datasetPath': dataset_path,
|
|
1127
|
+
'imageIndex': int(image_index),
|
|
1128
|
+
})
|
|
1129
|
+
|
|
1130
|
+
output_json({
|
|
1131
|
+
"status": "ok",
|
|
1132
|
+
"action": "open",
|
|
1133
|
+
"datasetPath": dataset_path,
|
|
1134
|
+
"imageIndex": int(image_index),
|
|
1135
|
+
"triggerPath": DATASET_VIEWER_TRIGGER_PATH,
|
|
1136
|
+
})
|
|
1137
|
+
|
|
1138
|
+
|
|
1139
|
+
def cmd_dataset_viewer_next(args):
|
|
1140
|
+
"""Navigate to the next image in the dataset viewer."""
|
|
1141
|
+
trigger_data = _write_dataset_viewer_trigger('next', {})
|
|
1142
|
+
output_json({"status": "ok", "action": "next", "triggerPath": DATASET_VIEWER_TRIGGER_PATH})
|
|
1143
|
+
|
|
1144
|
+
|
|
1145
|
+
def cmd_dataset_viewer_prev(args):
|
|
1146
|
+
"""Navigate to the previous image in the dataset viewer."""
|
|
1147
|
+
trigger_data = _write_dataset_viewer_trigger('prev', {})
|
|
1148
|
+
output_json({"status": "ok", "action": "prev", "triggerPath": DATASET_VIEWER_TRIGGER_PATH})
|
|
1149
|
+
|
|
1150
|
+
|
|
1151
|
+
def cmd_dataset_viewer_goto(args):
|
|
1152
|
+
"""Navigate to a specific image by index or name."""
|
|
1153
|
+
index = getattr(args, 'index', None)
|
|
1154
|
+
name = getattr(args, 'name', None)
|
|
1155
|
+
|
|
1156
|
+
if index is None and not name:
|
|
1157
|
+
output_error("Specify --index or --name", "MISSING_TARGET")
|
|
1158
|
+
sys.exit(1)
|
|
1159
|
+
|
|
1160
|
+
payload = {}
|
|
1161
|
+
if index is not None:
|
|
1162
|
+
payload['imageIndex'] = int(index)
|
|
1163
|
+
if name:
|
|
1164
|
+
payload['imageName'] = name
|
|
1165
|
+
|
|
1166
|
+
trigger_data = _write_dataset_viewer_trigger('goto', payload)
|
|
1167
|
+
output_json({
|
|
1168
|
+
"status": "ok",
|
|
1169
|
+
"action": "goto",
|
|
1170
|
+
**payload,
|
|
1171
|
+
"triggerPath": DATASET_VIEWER_TRIGGER_PATH,
|
|
1172
|
+
})
|
|
1173
|
+
|
|
1174
|
+
|
|
1175
|
+
def cmd_dataset_viewer_annotations(args):
|
|
1176
|
+
"""Set which annotation types are enabled in the viewer."""
|
|
1177
|
+
types_str = getattr(args, 'types', None)
|
|
1178
|
+
if not types_str:
|
|
1179
|
+
output_error("Annotation types required. Use --types (comma-separated: bbox,bbox3d,segmentation,centroid,mask)", "MISSING_TYPES")
|
|
1180
|
+
sys.exit(1)
|
|
1181
|
+
|
|
1182
|
+
valid_types = {'bbox', 'bbox3d', 'segmentation', 'centroid', 'mask'}
|
|
1183
|
+
types = [t.strip() for t in types_str.split(',')]
|
|
1184
|
+
invalid = [t for t in types if t not in valid_types]
|
|
1185
|
+
if invalid:
|
|
1186
|
+
output_error(f"Invalid annotation types: {', '.join(invalid)}. Valid: {', '.join(sorted(valid_types))}", "INVALID_TYPES")
|
|
1187
|
+
sys.exit(1)
|
|
1188
|
+
|
|
1189
|
+
trigger_data = _write_dataset_viewer_trigger('setAnnotations', {'annotations': types})
|
|
1190
|
+
output_json({
|
|
1191
|
+
"status": "ok",
|
|
1192
|
+
"action": "setAnnotations",
|
|
1193
|
+
"annotations": types,
|
|
1194
|
+
"triggerPath": DATASET_VIEWER_TRIGGER_PATH,
|
|
1195
|
+
})
|
|
1196
|
+
|
|
1197
|
+
|
|
1198
|
+
def cmd_dataset_viewer_filter(args):
|
|
1199
|
+
"""Set object type filter in the viewer."""
|
|
1200
|
+
types_str = getattr(args, 'types', None)
|
|
1201
|
+
|
|
1202
|
+
if types_str:
|
|
1203
|
+
types = [t.strip() for t in types_str.split(',')]
|
|
1204
|
+
else:
|
|
1205
|
+
# No types = show all (clear filter)
|
|
1206
|
+
types = []
|
|
1207
|
+
|
|
1208
|
+
trigger_data = _write_dataset_viewer_trigger('setFilter', {'objectTypes': types})
|
|
1209
|
+
output_json({
|
|
1210
|
+
"status": "ok",
|
|
1211
|
+
"action": "setFilter",
|
|
1212
|
+
"objectTypes": types,
|
|
1213
|
+
"triggerPath": DATASET_VIEWER_TRIGGER_PATH,
|
|
1214
|
+
})
|
|
1215
|
+
|
|
1216
|
+
|
|
1217
|
+
def cmd_dataset_viewer_status(args):
|
|
1218
|
+
"""Get the current status of the dataset annotation viewer."""
|
|
1219
|
+
if not os.path.exists(DATASET_VIEWER_STATUS_PATH):
|
|
1220
|
+
output_json({
|
|
1221
|
+
"status": "no_status_file",
|
|
1222
|
+
"message": "No dataset viewer status file found. The viewer may not have been opened yet.",
|
|
1223
|
+
"statusPath": DATASET_VIEWER_STATUS_PATH,
|
|
1224
|
+
"sessions": [],
|
|
1225
|
+
})
|
|
1043
1226
|
return
|
|
1044
1227
|
|
|
1045
|
-
|
|
1046
|
-
|
|
1047
|
-
|
|
1048
|
-
|
|
1228
|
+
try:
|
|
1229
|
+
with open(DATASET_VIEWER_STATUS_PATH, 'r') as f:
|
|
1230
|
+
status_data = json.load(f)
|
|
1231
|
+
except json.JSONDecodeError as e:
|
|
1232
|
+
output_error(f"Failed to parse status file: {e}", "PARSE_ERROR")
|
|
1233
|
+
return
|
|
1234
|
+
except IOError as e:
|
|
1235
|
+
output_error(f"Failed to read status file: {e}", "READ_ERROR")
|
|
1049
1236
|
return
|
|
1050
1237
|
|
|
1051
|
-
#
|
|
1052
|
-
|
|
1238
|
+
# Optionally filter by dataset path
|
|
1239
|
+
filter_path = getattr(args, 'path', None)
|
|
1240
|
+
if filter_path:
|
|
1241
|
+
filter_path = os.path.abspath(filter_path)
|
|
1242
|
+
sessions = status_data.get('sessions', {})
|
|
1243
|
+
if isinstance(sessions, dict):
|
|
1244
|
+
filtered = {k: v for k, v in sessions.items() if os.path.abspath(k) == filter_path}
|
|
1245
|
+
status_data['sessions'] = filtered
|
|
1053
1246
|
|
|
1054
|
-
|
|
1055
|
-
|
|
1056
|
-
|
|
1057
|
-
|
|
1247
|
+
output_json({
|
|
1248
|
+
"status": "ok",
|
|
1249
|
+
"statusPath": DATASET_VIEWER_STATUS_PATH,
|
|
1250
|
+
**status_data,
|
|
1251
|
+
})
|
|
1252
|
+
|
|
1253
|
+
|
|
1254
|
+
# =============================================================================
|
|
1255
|
+
# GRAPH-EDITOR
|
|
1256
|
+
# =============================================================================
|
|
1257
|
+
|
|
1258
|
+
def cmd_graph_editor_open(args):
|
|
1259
|
+
"""Open a graph in the graph editor.
|
|
1260
|
+
|
|
1261
|
+
Supports two modes:
|
|
1262
|
+
1. Download from platform: --workspaceid and --graphid (downloads graph and schema)
|
|
1263
|
+
2. Local files: --graphfile and --schemafile (uses existing local files)
|
|
1264
|
+
"""
|
|
1265
|
+
# Get arguments
|
|
1266
|
+
workspace_id = getattr(args, 'workspaceid', None)
|
|
1267
|
+
graph_id = getattr(args, 'graphid', None)
|
|
1268
|
+
graph_file = getattr(args, 'graphfile', None)
|
|
1269
|
+
schema_file = getattr(args, 'schemafile', None)
|
|
1270
|
+
|
|
1271
|
+
# Determine mode and validate arguments
|
|
1272
|
+
has_platform_args = workspace_id or graph_id
|
|
1273
|
+
has_local_args = graph_file or schema_file
|
|
1274
|
+
|
|
1275
|
+
if has_platform_args and has_local_args:
|
|
1276
|
+
output_error(
|
|
1277
|
+
"Cannot use both platform arguments (--workspaceid/--graphid) and local file arguments (--graphfile/--schemafile)",
|
|
1278
|
+
"INVALID_ARGS"
|
|
1279
|
+
)
|
|
1280
|
+
return
|
|
1058
1281
|
|
|
1059
|
-
|
|
1060
|
-
|
|
1061
|
-
|
|
1062
|
-
|
|
1282
|
+
if not has_platform_args and not has_local_args:
|
|
1283
|
+
output_error(
|
|
1284
|
+
"Must provide either --workspaceid and --graphid (to download from platform) or --graphfile and --schemafile (to use local files)",
|
|
1285
|
+
"MISSING_ARGS"
|
|
1286
|
+
)
|
|
1063
1287
|
return
|
|
1064
1288
|
|
|
1065
|
-
|
|
1066
|
-
|
|
1067
|
-
|
|
1289
|
+
# Mode 1: Local files
|
|
1290
|
+
if has_local_args:
|
|
1291
|
+
if not graph_file:
|
|
1292
|
+
output_error("--graphfile is required when using local files", "MISSING_GRAPHFILE")
|
|
1293
|
+
return
|
|
1294
|
+
if not schema_file:
|
|
1295
|
+
output_error("--schemafile is required when using local files", "MISSING_SCHEMAFILE")
|
|
1296
|
+
return
|
|
1297
|
+
|
|
1298
|
+
# Validate files exist
|
|
1299
|
+
if not os.path.exists(graph_file):
|
|
1300
|
+
output_error(f"Graph file not found: {graph_file}", "FILE_NOT_FOUND")
|
|
1301
|
+
return
|
|
1302
|
+
if not os.path.exists(schema_file):
|
|
1303
|
+
output_error(f"Schema file not found: {schema_file}", "FILE_NOT_FOUND")
|
|
1304
|
+
return
|
|
1305
|
+
|
|
1306
|
+
graph_path = graph_file
|
|
1307
|
+
schema_path = schema_file
|
|
1308
|
+
graph_id = None
|
|
1309
|
+
channel_id = None
|
|
1310
|
+
graph_name = os.path.basename(graph_file)
|
|
1311
|
+
|
|
1312
|
+
# Mode 2: Download from platform
|
|
1313
|
+
else:
|
|
1314
|
+
if not workspace_id:
|
|
1315
|
+
output_error("--workspaceid is required when downloading from platform", "MISSING_WORKSPACEID")
|
|
1316
|
+
return
|
|
1317
|
+
if not graph_id:
|
|
1318
|
+
output_error("--graphid is required when downloading from platform", "MISSING_GRAPHID")
|
|
1319
|
+
return
|
|
1320
|
+
|
|
1321
|
+
client = get_client()
|
|
1322
|
+
directory = args.outputdir or os.getcwd()
|
|
1323
|
+
|
|
1324
|
+
# Get graph metadata to find channelId
|
|
1325
|
+
graphs = client.get_graphs(workspaceId=workspace_id, graphId=graph_id)
|
|
1326
|
+
if not graphs:
|
|
1327
|
+
output_error(f"Graph {graph_id} not found", "GRAPH_NOT_FOUND")
|
|
1328
|
+
return
|
|
1329
|
+
|
|
1330
|
+
graph_info = graphs[0]
|
|
1331
|
+
channel_id = graph_info.get('channelId')
|
|
1332
|
+
if not channel_id:
|
|
1333
|
+
output_error("Graph has no associated channel", "NO_CHANNEL")
|
|
1334
|
+
return
|
|
1335
|
+
|
|
1336
|
+
# Create output directory if needed
|
|
1337
|
+
os.makedirs(directory, exist_ok=True)
|
|
1338
|
+
|
|
1339
|
+
# Download graph
|
|
1340
|
+
graph_name = graph_info.get('name', graph_id).replace(' ', '_')
|
|
1341
|
+
graph_path = os.path.join(directory, f"{graph_name}.yaml")
|
|
1342
|
+
client.download_graph(workspaceId=workspace_id, graphId=graph_id, filepath=graph_path)
|
|
1343
|
+
|
|
1344
|
+
# Download channel schema
|
|
1345
|
+
schema = client.get_channel_nodes(channelId=channel_id)
|
|
1346
|
+
if not schema:
|
|
1347
|
+
output_error("Failed to fetch channel schema", "SCHEMA_ERROR")
|
|
1348
|
+
return
|
|
1349
|
+
|
|
1350
|
+
schema_path = os.path.join(directory, f"{channel_id}_schema.json")
|
|
1351
|
+
with open(schema_path, 'w') as f:
|
|
1352
|
+
json.dump(schema, f, indent=2)
|
|
1068
1353
|
|
|
1069
1354
|
# Write trigger file to open in graph editor
|
|
1070
1355
|
trigger_path = os.path.join(os.path.expanduser('~'), '.theia', 'graph-editor-open')
|
|
@@ -1079,13 +1364,865 @@ def cmd_graph_editor_open(args):
|
|
|
1079
1364
|
with open(trigger_path, 'w') as f:
|
|
1080
1365
|
json.dump(trigger_data, f)
|
|
1081
1366
|
|
|
1082
|
-
|
|
1367
|
+
# Build output
|
|
1368
|
+
result = {
|
|
1083
1369
|
"graphPath": os.path.abspath(graph_path),
|
|
1084
1370
|
"schemaPath": os.path.abspath(schema_path),
|
|
1085
|
-
"triggerPath": trigger_path
|
|
1086
|
-
|
|
1087
|
-
|
|
1088
|
-
|
|
1371
|
+
"triggerPath": trigger_path
|
|
1372
|
+
}
|
|
1373
|
+
|
|
1374
|
+
# Add platform-specific fields if downloaded
|
|
1375
|
+
if graph_id:
|
|
1376
|
+
result["graphId"] = graph_id
|
|
1377
|
+
if channel_id:
|
|
1378
|
+
result["channelId"] = channel_id
|
|
1379
|
+
if has_local_args:
|
|
1380
|
+
result["mode"] = "local"
|
|
1381
|
+
else:
|
|
1382
|
+
result["mode"] = "platform"
|
|
1383
|
+
result["graphName"] = graph_info.get('name')
|
|
1384
|
+
|
|
1385
|
+
output_json(result)
|
|
1386
|
+
|
|
1387
|
+
|
|
1388
|
+
def cmd_graph_editor_edit_node(args):
|
|
1389
|
+
"""Edit a node's values in a local graph file.
|
|
1390
|
+
|
|
1391
|
+
Modifies the values of an existing node in the graph. Use --values to pass
|
|
1392
|
+
a JSON object with the key-value pairs to update. Existing values not
|
|
1393
|
+
specified in --values are preserved.
|
|
1394
|
+
"""
|
|
1395
|
+
filepath = require_arg(args, 'file', 'Graph file path')
|
|
1396
|
+
node_name = require_arg(args, 'node', 'Node name')
|
|
1397
|
+
values_json = require_arg(args, 'values', 'Values JSON')
|
|
1398
|
+
|
|
1399
|
+
graph = load_graph_file(filepath)
|
|
1400
|
+
if graph is None:
|
|
1401
|
+
return
|
|
1402
|
+
|
|
1403
|
+
nodes = graph.get('nodes', {})
|
|
1404
|
+
if node_name not in nodes:
|
|
1405
|
+
output_error(f"Node '{node_name}' not found in graph", "NODE_NOT_FOUND")
|
|
1406
|
+
return
|
|
1407
|
+
|
|
1408
|
+
# Parse the values JSON
|
|
1409
|
+
try:
|
|
1410
|
+
new_values = json.loads(values_json)
|
|
1411
|
+
except json.JSONDecodeError as e:
|
|
1412
|
+
output_error(f"Invalid JSON for --values: {str(e)}", "INVALID_JSON")
|
|
1413
|
+
return
|
|
1414
|
+
|
|
1415
|
+
if not isinstance(new_values, dict):
|
|
1416
|
+
output_error("--values must be a JSON object", "INVALID_VALUES")
|
|
1417
|
+
return
|
|
1418
|
+
|
|
1419
|
+
# Update the node's values
|
|
1420
|
+
node = nodes[node_name]
|
|
1421
|
+
if 'values' not in node:
|
|
1422
|
+
node['values'] = {}
|
|
1423
|
+
|
|
1424
|
+
node['values'].update(new_values)
|
|
1425
|
+
|
|
1426
|
+
if not save_graph_file(filepath, graph):
|
|
1427
|
+
return
|
|
1428
|
+
|
|
1429
|
+
output_json({
|
|
1430
|
+
"success": True,
|
|
1431
|
+
"file": os.path.abspath(filepath),
|
|
1432
|
+
"node": node_name,
|
|
1433
|
+
"updatedValues": new_values,
|
|
1434
|
+
"allValues": node['values']
|
|
1435
|
+
})
|
|
1436
|
+
|
|
1437
|
+
|
|
1438
|
+
def cmd_graph_editor_add_node(args):
|
|
1439
|
+
"""Add a new node to a local graph file.
|
|
1440
|
+
|
|
1441
|
+
Adds a node with the specified nodeClass. Use --name for a custom node name
|
|
1442
|
+
(defaults to nodeClass_N where N is auto-incremented). Use --values to set
|
|
1443
|
+
initial parameter values, --location to set x,y position.
|
|
1444
|
+
"""
|
|
1445
|
+
filepath = require_arg(args, 'file', 'Graph file path')
|
|
1446
|
+
node_class = require_arg(args, 'nodeclass', 'Node class')
|
|
1447
|
+
|
|
1448
|
+
graph = load_graph_file(filepath)
|
|
1449
|
+
if graph is None:
|
|
1450
|
+
return
|
|
1451
|
+
|
|
1452
|
+
nodes = graph.get('nodes', {})
|
|
1453
|
+
|
|
1454
|
+
# Generate node name if not provided
|
|
1455
|
+
node_name = args.name
|
|
1456
|
+
if not node_name:
|
|
1457
|
+
# Find the next available number for this node class
|
|
1458
|
+
counter = 1
|
|
1459
|
+
while f"{node_class}_{counter}" in nodes:
|
|
1460
|
+
counter += 1
|
|
1461
|
+
node_name = f"{node_class}_{counter}"
|
|
1462
|
+
|
|
1463
|
+
if node_name in nodes:
|
|
1464
|
+
output_error(f"Node '{node_name}' already exists in graph", "NODE_EXISTS")
|
|
1465
|
+
return
|
|
1466
|
+
|
|
1467
|
+
# Parse optional values
|
|
1468
|
+
values = {}
|
|
1469
|
+
if args.values:
|
|
1470
|
+
try:
|
|
1471
|
+
values = json.loads(args.values)
|
|
1472
|
+
if not isinstance(values, dict):
|
|
1473
|
+
output_error("--values must be a JSON object", "INVALID_VALUES")
|
|
1474
|
+
return
|
|
1475
|
+
except json.JSONDecodeError as e:
|
|
1476
|
+
output_error(f"Invalid JSON for --values: {str(e)}", "INVALID_JSON")
|
|
1477
|
+
return
|
|
1478
|
+
|
|
1479
|
+
# Parse optional location
|
|
1480
|
+
location = {"x": 0, "y": 0}
|
|
1481
|
+
if args.location:
|
|
1482
|
+
try:
|
|
1483
|
+
loc = json.loads(args.location)
|
|
1484
|
+
if isinstance(loc, dict) and 'x' in loc and 'y' in loc:
|
|
1485
|
+
location = {"x": loc['x'], "y": loc['y']}
|
|
1486
|
+
elif isinstance(loc, list) and len(loc) >= 2:
|
|
1487
|
+
location = {"x": loc[0], "y": loc[1]}
|
|
1488
|
+
else:
|
|
1489
|
+
output_error("--location must be {\"x\": N, \"y\": N} or [x, y]", "INVALID_LOCATION")
|
|
1490
|
+
return
|
|
1491
|
+
except json.JSONDecodeError as e:
|
|
1492
|
+
output_error(f"Invalid JSON for --location: {str(e)}", "INVALID_JSON")
|
|
1493
|
+
return
|
|
1494
|
+
|
|
1495
|
+
# Create the new node
|
|
1496
|
+
new_node = {
|
|
1497
|
+
"name": node_name,
|
|
1498
|
+
"nodeClass": node_class,
|
|
1499
|
+
"color": args.color or "#808080",
|
|
1500
|
+
"links": {},
|
|
1501
|
+
"location": location,
|
|
1502
|
+
"ports": {
|
|
1503
|
+
"inputs": [],
|
|
1504
|
+
"outputs": []
|
|
1505
|
+
},
|
|
1506
|
+
"values": values
|
|
1507
|
+
}
|
|
1508
|
+
|
|
1509
|
+
if args.tooltip:
|
|
1510
|
+
new_node["tooltip"] = args.tooltip
|
|
1511
|
+
|
|
1512
|
+
nodes[node_name] = new_node
|
|
1513
|
+
graph['nodes'] = nodes
|
|
1514
|
+
|
|
1515
|
+
if not save_graph_file(filepath, graph):
|
|
1516
|
+
return
|
|
1517
|
+
|
|
1518
|
+
output_json({
|
|
1519
|
+
"success": True,
|
|
1520
|
+
"file": os.path.abspath(filepath),
|
|
1521
|
+
"node": node_name,
|
|
1522
|
+
"nodeClass": node_class,
|
|
1523
|
+
"location": location,
|
|
1524
|
+
"values": values
|
|
1525
|
+
})
|
|
1526
|
+
|
|
1527
|
+
|
|
1528
|
+
def cmd_graph_editor_add_link(args):
|
|
1529
|
+
"""Add a link between two nodes in a local graph file.
|
|
1530
|
+
|
|
1531
|
+
Creates a connection from a source node's output port to a target node's
|
|
1532
|
+
input port. The link is stored on the target node under its links property.
|
|
1533
|
+
"""
|
|
1534
|
+
filepath = require_arg(args, 'file', 'Graph file path')
|
|
1535
|
+
source_node = require_arg(args, 'source', 'Source node name')
|
|
1536
|
+
output_port = require_arg(args, 'output', 'Output port name')
|
|
1537
|
+
target_node = require_arg(args, 'target', 'Target node name')
|
|
1538
|
+
input_port = require_arg(args, 'input', 'Input port name')
|
|
1539
|
+
|
|
1540
|
+
graph = load_graph_file(filepath)
|
|
1541
|
+
if graph is None:
|
|
1542
|
+
return
|
|
1543
|
+
|
|
1544
|
+
nodes = graph.get('nodes', {})
|
|
1545
|
+
|
|
1546
|
+
# Validate source node exists
|
|
1547
|
+
if source_node not in nodes:
|
|
1548
|
+
output_error(f"Source node '{source_node}' not found in graph", "SOURCE_NOT_FOUND")
|
|
1549
|
+
return
|
|
1550
|
+
|
|
1551
|
+
# Validate target node exists
|
|
1552
|
+
if target_node not in nodes:
|
|
1553
|
+
output_error(f"Target node '{target_node}' not found in graph", "TARGET_NOT_FOUND")
|
|
1554
|
+
return
|
|
1555
|
+
|
|
1556
|
+
target = nodes[target_node]
|
|
1557
|
+
|
|
1558
|
+
# Initialize links if not present
|
|
1559
|
+
if 'links' not in target:
|
|
1560
|
+
target['links'] = {}
|
|
1561
|
+
|
|
1562
|
+
# Add or append to the input port's links
|
|
1563
|
+
if input_port not in target['links']:
|
|
1564
|
+
target['links'][input_port] = []
|
|
1565
|
+
|
|
1566
|
+
# Check if this exact link already exists
|
|
1567
|
+
new_link = {
|
|
1568
|
+
"outputPort": output_port,
|
|
1569
|
+
"sourceNode": source_node
|
|
1570
|
+
}
|
|
1571
|
+
|
|
1572
|
+
for existing_link in target['links'][input_port]:
|
|
1573
|
+
if (existing_link.get('outputPort') == output_port and
|
|
1574
|
+
existing_link.get('sourceNode') == source_node):
|
|
1575
|
+
output_error(
|
|
1576
|
+
f"Link already exists: {source_node}.{output_port} -> {target_node}.{input_port}",
|
|
1577
|
+
"LINK_EXISTS"
|
|
1578
|
+
)
|
|
1579
|
+
return
|
|
1580
|
+
|
|
1581
|
+
target['links'][input_port].append(new_link)
|
|
1582
|
+
|
|
1583
|
+
if not save_graph_file(filepath, graph):
|
|
1584
|
+
return
|
|
1585
|
+
|
|
1586
|
+
output_json({
|
|
1587
|
+
"success": True,
|
|
1588
|
+
"file": os.path.abspath(filepath),
|
|
1589
|
+
"link": {
|
|
1590
|
+
"source": source_node,
|
|
1591
|
+
"outputPort": output_port,
|
|
1592
|
+
"target": target_node,
|
|
1593
|
+
"inputPort": input_port
|
|
1594
|
+
}
|
|
1595
|
+
})
|
|
1596
|
+
|
|
1597
|
+
|
|
1598
|
+
def cmd_graph_editor_remove_node(args):
|
|
1599
|
+
"""Remove a node from a local graph file.
|
|
1600
|
+
|
|
1601
|
+
Removes the specified node and all links connected to it (both incoming
|
|
1602
|
+
links on this node and outgoing links from other nodes that reference it).
|
|
1603
|
+
"""
|
|
1604
|
+
filepath = require_arg(args, 'file', 'Graph file path')
|
|
1605
|
+
node_name = require_arg(args, 'node', 'Node name')
|
|
1606
|
+
|
|
1607
|
+
graph = load_graph_file(filepath)
|
|
1608
|
+
if graph is None:
|
|
1609
|
+
return
|
|
1610
|
+
|
|
1611
|
+
nodes = graph.get('nodes', {})
|
|
1612
|
+
|
|
1613
|
+
if node_name not in nodes:
|
|
1614
|
+
output_error(f"Node '{node_name}' not found in graph", "NODE_NOT_FOUND")
|
|
1615
|
+
return
|
|
1616
|
+
|
|
1617
|
+
# Remove the node
|
|
1618
|
+
removed_node = nodes.pop(node_name)
|
|
1619
|
+
|
|
1620
|
+
# Remove all links that reference this node from other nodes
|
|
1621
|
+
removed_links = []
|
|
1622
|
+
for other_name, other_node in nodes.items():
|
|
1623
|
+
links = other_node.get('links', {})
|
|
1624
|
+
for port_name, port_links in list(links.items()):
|
|
1625
|
+
original_count = len(port_links)
|
|
1626
|
+
port_links[:] = [
|
|
1627
|
+
link for link in port_links
|
|
1628
|
+
if link.get('sourceNode') != node_name
|
|
1629
|
+
]
|
|
1630
|
+
if len(port_links) < original_count:
|
|
1631
|
+
removed_links.append({
|
|
1632
|
+
"target": other_name,
|
|
1633
|
+
"inputPort": port_name,
|
|
1634
|
+
"source": node_name
|
|
1635
|
+
})
|
|
1636
|
+
# Clean up empty link lists
|
|
1637
|
+
if not port_links:
|
|
1638
|
+
del links[port_name]
|
|
1639
|
+
|
|
1640
|
+
if not save_graph_file(filepath, graph):
|
|
1641
|
+
return
|
|
1642
|
+
|
|
1643
|
+
output_json({
|
|
1644
|
+
"success": True,
|
|
1645
|
+
"file": os.path.abspath(filepath),
|
|
1646
|
+
"removedNode": node_name,
|
|
1647
|
+
"nodeClass": removed_node.get('nodeClass'),
|
|
1648
|
+
"removedLinks": removed_links
|
|
1649
|
+
})
|
|
1650
|
+
|
|
1651
|
+
|
|
1652
|
+
def cmd_graph_editor_remove_link(args):
|
|
1653
|
+
"""Remove a link between two nodes in a local graph file.
|
|
1654
|
+
|
|
1655
|
+
Removes the connection from the source node's output port to the target
|
|
1656
|
+
node's input port.
|
|
1657
|
+
"""
|
|
1658
|
+
filepath = require_arg(args, 'file', 'Graph file path')
|
|
1659
|
+
source_node = require_arg(args, 'source', 'Source node name')
|
|
1660
|
+
output_port = require_arg(args, 'output', 'Output port name')
|
|
1661
|
+
target_node = require_arg(args, 'target', 'Target node name')
|
|
1662
|
+
input_port = require_arg(args, 'input', 'Input port name')
|
|
1663
|
+
|
|
1664
|
+
graph = load_graph_file(filepath)
|
|
1665
|
+
if graph is None:
|
|
1666
|
+
return
|
|
1667
|
+
|
|
1668
|
+
nodes = graph.get('nodes', {})
|
|
1669
|
+
|
|
1670
|
+
# Validate target node exists
|
|
1671
|
+
if target_node not in nodes:
|
|
1672
|
+
output_error(f"Target node '{target_node}' not found in graph", "TARGET_NOT_FOUND")
|
|
1673
|
+
return
|
|
1674
|
+
|
|
1675
|
+
target = nodes[target_node]
|
|
1676
|
+
links = target.get('links', {})
|
|
1677
|
+
|
|
1678
|
+
if input_port not in links:
|
|
1679
|
+
output_error(
|
|
1680
|
+
f"No links found on input port '{input_port}' of node '{target_node}'",
|
|
1681
|
+
"LINK_NOT_FOUND"
|
|
1682
|
+
)
|
|
1683
|
+
return
|
|
1684
|
+
|
|
1685
|
+
# Find and remove the specific link
|
|
1686
|
+
port_links = links[input_port]
|
|
1687
|
+
original_count = len(port_links)
|
|
1688
|
+
|
|
1689
|
+
port_links[:] = [
|
|
1690
|
+
link for link in port_links
|
|
1691
|
+
if not (link.get('outputPort') == output_port and
|
|
1692
|
+
link.get('sourceNode') == source_node)
|
|
1693
|
+
]
|
|
1694
|
+
|
|
1695
|
+
if len(port_links) == original_count:
|
|
1696
|
+
output_error(
|
|
1697
|
+
f"Link not found: {source_node}.{output_port} -> {target_node}.{input_port}",
|
|
1698
|
+
"LINK_NOT_FOUND"
|
|
1699
|
+
)
|
|
1700
|
+
return
|
|
1701
|
+
|
|
1702
|
+
# Clean up empty link lists
|
|
1703
|
+
if not port_links:
|
|
1704
|
+
del links[input_port]
|
|
1705
|
+
|
|
1706
|
+
if not save_graph_file(filepath, graph):
|
|
1707
|
+
return
|
|
1708
|
+
|
|
1709
|
+
output_json({
|
|
1710
|
+
"success": True,
|
|
1711
|
+
"file": os.path.abspath(filepath),
|
|
1712
|
+
"removedLink": {
|
|
1713
|
+
"source": source_node,
|
|
1714
|
+
"outputPort": output_port,
|
|
1715
|
+
"target": target_node,
|
|
1716
|
+
"inputPort": input_port
|
|
1717
|
+
}
|
|
1718
|
+
})
|
|
1719
|
+
|
|
1720
|
+
|
|
1721
|
+
def cmd_graph_editor_add_volume_file(args):
|
|
1722
|
+
"""Add a VolumeFile node to a local graph file.
|
|
1723
|
+
|
|
1724
|
+
Creates a VolumeFile node that references a file in a Rendered.ai volume.
|
|
1725
|
+
The volume reference uses the format volumeId:/path/to/file.
|
|
1726
|
+
"""
|
|
1727
|
+
filepath = require_arg(args, 'file', 'Graph file path')
|
|
1728
|
+
volume_id = require_arg(args, 'volumeid', 'Volume ID')
|
|
1729
|
+
volume_path = require_arg(args, 'path', 'File path in volume')
|
|
1730
|
+
|
|
1731
|
+
graph = load_graph_file(filepath)
|
|
1732
|
+
if graph is None:
|
|
1733
|
+
return
|
|
1734
|
+
|
|
1735
|
+
nodes = graph.get('nodes', {})
|
|
1736
|
+
|
|
1737
|
+
# Generate node name if not provided
|
|
1738
|
+
node_name = args.name
|
|
1739
|
+
if not node_name:
|
|
1740
|
+
counter = 1
|
|
1741
|
+
while f"VolumeFile_{counter}" in nodes:
|
|
1742
|
+
counter += 1
|
|
1743
|
+
node_name = f"VolumeFile_{counter}"
|
|
1744
|
+
|
|
1745
|
+
if node_name in nodes:
|
|
1746
|
+
output_error(f"Node '{node_name}' already exists in graph", "NODE_EXISTS")
|
|
1747
|
+
return
|
|
1748
|
+
|
|
1749
|
+
# Parse optional location
|
|
1750
|
+
location = {"x": 0, "y": 0}
|
|
1751
|
+
if args.location:
|
|
1752
|
+
try:
|
|
1753
|
+
loc = json.loads(args.location)
|
|
1754
|
+
if isinstance(loc, dict) and 'x' in loc and 'y' in loc:
|
|
1755
|
+
location = {"x": loc['x'], "y": loc['y']}
|
|
1756
|
+
elif isinstance(loc, list) and len(loc) >= 2:
|
|
1757
|
+
location = {"x": loc[0], "y": loc[1]}
|
|
1758
|
+
else:
|
|
1759
|
+
output_error("--location must be {\"x\": N, \"y\": N} or [x, y]", "INVALID_LOCATION")
|
|
1760
|
+
return
|
|
1761
|
+
except json.JSONDecodeError as e:
|
|
1762
|
+
output_error(f"Invalid JSON for --location: {str(e)}", "INVALID_JSON")
|
|
1763
|
+
return
|
|
1764
|
+
|
|
1765
|
+
# Normalize path (ensure it starts with /)
|
|
1766
|
+
if not volume_path.startswith('/'):
|
|
1767
|
+
volume_path = '/' + volume_path
|
|
1768
|
+
|
|
1769
|
+
# Build the volume reference
|
|
1770
|
+
volume_ref = f"{volume_id}:{volume_path}"
|
|
1771
|
+
|
|
1772
|
+
# Build tooltip (use volume name if provided, otherwise volume ID)
|
|
1773
|
+
volume_display = args.volumename or volume_id
|
|
1774
|
+
tooltip = f"{volume_display}:{volume_path}"
|
|
1775
|
+
|
|
1776
|
+
# Create the VolumeFile node
|
|
1777
|
+
new_node = {
|
|
1778
|
+
"name": node_name,
|
|
1779
|
+
"nodeClass": "VolumeFile",
|
|
1780
|
+
"color": "#246BB3",
|
|
1781
|
+
"hash": "8d56c9b8e4bae85fd61620e1d4d44a24",
|
|
1782
|
+
"links": {},
|
|
1783
|
+
"location": location,
|
|
1784
|
+
"ports": {
|
|
1785
|
+
"inputs": [
|
|
1786
|
+
{
|
|
1787
|
+
"name": "File",
|
|
1788
|
+
"description": "",
|
|
1789
|
+
"default": volume_ref,
|
|
1790
|
+
"hidden": True
|
|
1791
|
+
}
|
|
1792
|
+
],
|
|
1793
|
+
"outputs": [
|
|
1794
|
+
{
|
|
1795
|
+
"name": "File",
|
|
1796
|
+
"description": ""
|
|
1797
|
+
}
|
|
1798
|
+
]
|
|
1799
|
+
},
|
|
1800
|
+
"tooltip": tooltip,
|
|
1801
|
+
"values": {
|
|
1802
|
+
"File": volume_ref
|
|
1803
|
+
}
|
|
1804
|
+
}
|
|
1805
|
+
|
|
1806
|
+
nodes[node_name] = new_node
|
|
1807
|
+
graph['nodes'] = nodes
|
|
1808
|
+
|
|
1809
|
+
if not save_graph_file(filepath, graph):
|
|
1810
|
+
return
|
|
1811
|
+
|
|
1812
|
+
output_json({
|
|
1813
|
+
"success": True,
|
|
1814
|
+
"file": os.path.abspath(filepath),
|
|
1815
|
+
"node": node_name,
|
|
1816
|
+
"nodeClass": "VolumeFile",
|
|
1817
|
+
"volumeId": volume_id,
|
|
1818
|
+
"path": volume_path,
|
|
1819
|
+
"volumeRef": volume_ref,
|
|
1820
|
+
"location": location
|
|
1821
|
+
})
|
|
1822
|
+
|
|
1823
|
+
|
|
1824
|
+
def cmd_graph_editor_add_volume_directory(args):
|
|
1825
|
+
"""Add a VolumeDirectory node to a local graph file.
|
|
1826
|
+
|
|
1827
|
+
Creates a VolumeDirectory node that references a directory in a Rendered.ai volume.
|
|
1828
|
+
The volume reference uses the format volumeId:/path/to/directory.
|
|
1829
|
+
"""
|
|
1830
|
+
filepath = require_arg(args, 'file', 'Graph file path')
|
|
1831
|
+
volume_id = require_arg(args, 'volumeid', 'Volume ID')
|
|
1832
|
+
volume_path = args.path or '/'
|
|
1833
|
+
|
|
1834
|
+
graph = load_graph_file(filepath)
|
|
1835
|
+
if graph is None:
|
|
1836
|
+
return
|
|
1837
|
+
|
|
1838
|
+
nodes = graph.get('nodes', {})
|
|
1839
|
+
|
|
1840
|
+
# Generate node name if not provided
|
|
1841
|
+
node_name = args.name
|
|
1842
|
+
if not node_name:
|
|
1843
|
+
counter = 1
|
|
1844
|
+
while f"VolumeDirectory_{counter}" in nodes:
|
|
1845
|
+
counter += 1
|
|
1846
|
+
node_name = f"VolumeDirectory_{counter}"
|
|
1847
|
+
|
|
1848
|
+
if node_name in nodes:
|
|
1849
|
+
output_error(f"Node '{node_name}' already exists in graph", "NODE_EXISTS")
|
|
1850
|
+
return
|
|
1851
|
+
|
|
1852
|
+
# Parse optional location
|
|
1853
|
+
location = {"x": 0, "y": 0}
|
|
1854
|
+
if args.location:
|
|
1855
|
+
try:
|
|
1856
|
+
loc = json.loads(args.location)
|
|
1857
|
+
if isinstance(loc, dict) and 'x' in loc and 'y' in loc:
|
|
1858
|
+
location = {"x": loc['x'], "y": loc['y']}
|
|
1859
|
+
elif isinstance(loc, list) and len(loc) >= 2:
|
|
1860
|
+
location = {"x": loc[0], "y": loc[1]}
|
|
1861
|
+
else:
|
|
1862
|
+
output_error("--location must be {\"x\": N, \"y\": N} or [x, y]", "INVALID_LOCATION")
|
|
1863
|
+
return
|
|
1864
|
+
except json.JSONDecodeError as e:
|
|
1865
|
+
output_error(f"Invalid JSON for --location: {str(e)}", "INVALID_JSON")
|
|
1866
|
+
return
|
|
1867
|
+
|
|
1868
|
+
# Normalize path (ensure it starts with /)
|
|
1869
|
+
if not volume_path.startswith('/'):
|
|
1870
|
+
volume_path = '/' + volume_path
|
|
1871
|
+
|
|
1872
|
+
# Build the volume reference
|
|
1873
|
+
volume_ref = f"{volume_id}:{volume_path}"
|
|
1874
|
+
|
|
1875
|
+
# Build tooltip (use volume name if provided, otherwise volume ID)
|
|
1876
|
+
volume_display = args.volumename or volume_id
|
|
1877
|
+
tooltip = f"{volume_display}:{volume_path}"
|
|
1878
|
+
|
|
1879
|
+
# Create the VolumeDirectory node
|
|
1880
|
+
new_node = {
|
|
1881
|
+
"name": node_name,
|
|
1882
|
+
"nodeClass": "VolumeDirectory",
|
|
1883
|
+
"color": "#246BB3",
|
|
1884
|
+
"hash": "a7c19eb160150ee04d82af60c9332d104f0a7f89",
|
|
1885
|
+
"links": {},
|
|
1886
|
+
"location": location,
|
|
1887
|
+
"ports": {
|
|
1888
|
+
"inputs": [
|
|
1889
|
+
{
|
|
1890
|
+
"name": "Directory",
|
|
1891
|
+
"description": "",
|
|
1892
|
+
"default": volume_ref,
|
|
1893
|
+
"hidden": True
|
|
1894
|
+
}
|
|
1895
|
+
],
|
|
1896
|
+
"outputs": [
|
|
1897
|
+
{
|
|
1898
|
+
"name": "Directory",
|
|
1899
|
+
"description": ""
|
|
1900
|
+
}
|
|
1901
|
+
]
|
|
1902
|
+
},
|
|
1903
|
+
"tooltip": tooltip,
|
|
1904
|
+
"values": {
|
|
1905
|
+
"Directory": volume_ref
|
|
1906
|
+
}
|
|
1907
|
+
}
|
|
1908
|
+
|
|
1909
|
+
nodes[node_name] = new_node
|
|
1910
|
+
graph['nodes'] = nodes
|
|
1911
|
+
|
|
1912
|
+
if not save_graph_file(filepath, graph):
|
|
1913
|
+
return
|
|
1914
|
+
|
|
1915
|
+
output_json({
|
|
1916
|
+
"success": True,
|
|
1917
|
+
"file": os.path.abspath(filepath),
|
|
1918
|
+
"node": node_name,
|
|
1919
|
+
"nodeClass": "VolumeDirectory",
|
|
1920
|
+
"volumeId": volume_id,
|
|
1921
|
+
"path": volume_path,
|
|
1922
|
+
"volumeRef": volume_ref,
|
|
1923
|
+
"location": location
|
|
1924
|
+
})
|
|
1925
|
+
|
|
1926
|
+
|
|
1927
|
+
def cmd_graph_editor_list_nodes(args):
|
|
1928
|
+
"""List all nodes in a local graph file.
|
|
1929
|
+
|
|
1930
|
+
Returns a summary of all nodes including their name, class, location,
|
|
1931
|
+
and connection counts. Useful for understanding graph structure.
|
|
1932
|
+
"""
|
|
1933
|
+
filepath = require_arg(args, 'file', 'Graph file path')
|
|
1934
|
+
|
|
1935
|
+
graph = load_graph_file(filepath)
|
|
1936
|
+
if graph is None:
|
|
1937
|
+
return
|
|
1938
|
+
|
|
1939
|
+
nodes = graph.get('nodes', {})
|
|
1940
|
+
|
|
1941
|
+
# Build summary for each node
|
|
1942
|
+
node_list = []
|
|
1943
|
+
for node_name, node_data in nodes.items():
|
|
1944
|
+
# Count incoming links
|
|
1945
|
+
incoming_links = 0
|
|
1946
|
+
link_sources = []
|
|
1947
|
+
for port_name, port_links in node_data.get('links', {}).items():
|
|
1948
|
+
incoming_links += len(port_links)
|
|
1949
|
+
for link in port_links:
|
|
1950
|
+
link_sources.append(f"{link.get('sourceNode')}.{link.get('outputPort')}")
|
|
1951
|
+
|
|
1952
|
+
# Count outgoing links (links from other nodes to this one)
|
|
1953
|
+
outgoing_links = 0
|
|
1954
|
+
link_targets = []
|
|
1955
|
+
for other_name, other_data in nodes.items():
|
|
1956
|
+
if other_name == node_name:
|
|
1957
|
+
continue
|
|
1958
|
+
for port_name, port_links in other_data.get('links', {}).items():
|
|
1959
|
+
for link in port_links:
|
|
1960
|
+
if link.get('sourceNode') == node_name:
|
|
1961
|
+
outgoing_links += 1
|
|
1962
|
+
link_targets.append(f"{other_name}.{port_name}")
|
|
1963
|
+
|
|
1964
|
+
node_summary = {
|
|
1965
|
+
"name": node_name,
|
|
1966
|
+
"nodeClass": node_data.get('nodeClass'),
|
|
1967
|
+
"location": node_data.get('location'),
|
|
1968
|
+
"incomingLinks": incoming_links,
|
|
1969
|
+
"outgoingLinks": outgoing_links,
|
|
1970
|
+
"hasValues": bool(node_data.get('values'))
|
|
1971
|
+
}
|
|
1972
|
+
|
|
1973
|
+
if args.verbose:
|
|
1974
|
+
node_summary["linkSources"] = link_sources
|
|
1975
|
+
node_summary["linkTargets"] = link_targets
|
|
1976
|
+
|
|
1977
|
+
node_list.append(node_summary)
|
|
1978
|
+
|
|
1979
|
+
# Sort by name for consistent output
|
|
1980
|
+
node_list.sort(key=lambda x: x['name'])
|
|
1981
|
+
|
|
1982
|
+
output_json({
|
|
1983
|
+
"file": os.path.abspath(filepath),
|
|
1984
|
+
"nodeCount": len(node_list),
|
|
1985
|
+
"nodes": node_list
|
|
1986
|
+
})
|
|
1987
|
+
|
|
1988
|
+
|
|
1989
|
+
def cmd_graph_editor_get_node(args):
|
|
1990
|
+
"""Get detailed information about a specific node in a local graph file.
|
|
1991
|
+
|
|
1992
|
+
Returns full node data including all values, ports, links, and metadata.
|
|
1993
|
+
"""
|
|
1994
|
+
filepath = require_arg(args, 'file', 'Graph file path')
|
|
1995
|
+
node_name = require_arg(args, 'node', 'Node name')
|
|
1996
|
+
|
|
1997
|
+
graph = load_graph_file(filepath)
|
|
1998
|
+
if graph is None:
|
|
1999
|
+
return
|
|
2000
|
+
|
|
2001
|
+
nodes = graph.get('nodes', {})
|
|
2002
|
+
|
|
2003
|
+
if node_name not in nodes:
|
|
2004
|
+
output_error(f"Node '{node_name}' not found in graph", "NODE_NOT_FOUND")
|
|
2005
|
+
return
|
|
2006
|
+
|
|
2007
|
+
node_data = nodes[node_name]
|
|
2008
|
+
|
|
2009
|
+
# Find outgoing links (other nodes that link to this one)
|
|
2010
|
+
outgoing_links = []
|
|
2011
|
+
for other_name, other_data in nodes.items():
|
|
2012
|
+
if other_name == node_name:
|
|
2013
|
+
continue
|
|
2014
|
+
for port_name, port_links in other_data.get('links', {}).items():
|
|
2015
|
+
for link in port_links:
|
|
2016
|
+
if link.get('sourceNode') == node_name:
|
|
2017
|
+
outgoing_links.append({
|
|
2018
|
+
"targetNode": other_name,
|
|
2019
|
+
"targetPort": port_name,
|
|
2020
|
+
"outputPort": link.get('outputPort')
|
|
2021
|
+
})
|
|
2022
|
+
|
|
2023
|
+
output_json({
|
|
2024
|
+
"file": os.path.abspath(filepath),
|
|
2025
|
+
"node": node_name,
|
|
2026
|
+
"nodeClass": node_data.get('nodeClass'),
|
|
2027
|
+
"color": node_data.get('color'),
|
|
2028
|
+
"location": node_data.get('location'),
|
|
2029
|
+
"tooltip": node_data.get('tooltip'),
|
|
2030
|
+
"hash": node_data.get('hash'),
|
|
2031
|
+
"values": node_data.get('values', {}),
|
|
2032
|
+
"ports": node_data.get('ports', {}),
|
|
2033
|
+
"incomingLinks": node_data.get('links', {}),
|
|
2034
|
+
"outgoingLinks": outgoing_links
|
|
2035
|
+
})
|
|
2036
|
+
|
|
2037
|
+
|
|
2038
|
+
def cmd_graph_editor_move_node(args):
|
|
2039
|
+
"""Move a node to a new location in a local graph file.
|
|
2040
|
+
|
|
2041
|
+
Updates the node's x,y coordinates for visual positioning in the graph editor.
|
|
2042
|
+
"""
|
|
2043
|
+
filepath = require_arg(args, 'file', 'Graph file path')
|
|
2044
|
+
node_name = require_arg(args, 'node', 'Node name')
|
|
2045
|
+
location_str = require_arg(args, 'location', 'Location')
|
|
2046
|
+
|
|
2047
|
+
graph = load_graph_file(filepath)
|
|
2048
|
+
if graph is None:
|
|
2049
|
+
return
|
|
2050
|
+
|
|
2051
|
+
nodes = graph.get('nodes', {})
|
|
2052
|
+
|
|
2053
|
+
if node_name not in nodes:
|
|
2054
|
+
output_error(f"Node '{node_name}' not found in graph", "NODE_NOT_FOUND")
|
|
2055
|
+
return
|
|
2056
|
+
|
|
2057
|
+
# Parse location
|
|
2058
|
+
try:
|
|
2059
|
+
loc = json.loads(location_str)
|
|
2060
|
+
if isinstance(loc, dict) and 'x' in loc and 'y' in loc:
|
|
2061
|
+
location = {"x": loc['x'], "y": loc['y']}
|
|
2062
|
+
elif isinstance(loc, list) and len(loc) >= 2:
|
|
2063
|
+
location = {"x": loc[0], "y": loc[1]}
|
|
2064
|
+
else:
|
|
2065
|
+
output_error("--location must be {\"x\": N, \"y\": N} or [x, y]", "INVALID_LOCATION")
|
|
2066
|
+
return
|
|
2067
|
+
except json.JSONDecodeError as e:
|
|
2068
|
+
output_error(f"Invalid JSON for --location: {str(e)}", "INVALID_JSON")
|
|
2069
|
+
return
|
|
2070
|
+
|
|
2071
|
+
old_location = nodes[node_name].get('location', {})
|
|
2072
|
+
nodes[node_name]['location'] = location
|
|
2073
|
+
|
|
2074
|
+
if not save_graph_file(filepath, graph):
|
|
2075
|
+
return
|
|
2076
|
+
|
|
2077
|
+
output_json({
|
|
2078
|
+
"success": True,
|
|
2079
|
+
"file": os.path.abspath(filepath),
|
|
2080
|
+
"node": node_name,
|
|
2081
|
+
"oldLocation": old_location,
|
|
2082
|
+
"newLocation": location
|
|
2083
|
+
})
|
|
2084
|
+
|
|
2085
|
+
|
|
2086
|
+
def cmd_graph_editor_clone_node(args):
|
|
2087
|
+
"""Clone an existing node in a local graph file.
|
|
2088
|
+
|
|
2089
|
+
Creates a copy of a node with a new name. The cloned node has the same
|
|
2090
|
+
nodeClass, values, ports, and color, but no links (links must be added
|
|
2091
|
+
separately). The location is offset slightly from the original.
|
|
2092
|
+
"""
|
|
2093
|
+
filepath = require_arg(args, 'file', 'Graph file path')
|
|
2094
|
+
source_node = require_arg(args, 'source', 'Source node name')
|
|
2095
|
+
|
|
2096
|
+
graph = load_graph_file(filepath)
|
|
2097
|
+
if graph is None:
|
|
2098
|
+
return
|
|
2099
|
+
|
|
2100
|
+
nodes = graph.get('nodes', {})
|
|
2101
|
+
|
|
2102
|
+
if source_node not in nodes:
|
|
2103
|
+
output_error(f"Source node '{source_node}' not found in graph", "NODE_NOT_FOUND")
|
|
2104
|
+
return
|
|
2105
|
+
|
|
2106
|
+
source_data = nodes[source_node]
|
|
2107
|
+
node_class = source_data.get('nodeClass', 'Unknown')
|
|
2108
|
+
|
|
2109
|
+
# Generate new node name if not provided
|
|
2110
|
+
new_name = args.name
|
|
2111
|
+
if not new_name:
|
|
2112
|
+
counter = 1
|
|
2113
|
+
while f"{node_class}_{counter}" in nodes:
|
|
2114
|
+
counter += 1
|
|
2115
|
+
new_name = f"{node_class}_{counter}"
|
|
2116
|
+
|
|
2117
|
+
if new_name in nodes:
|
|
2118
|
+
output_error(f"Node '{new_name}' already exists in graph", "NODE_EXISTS")
|
|
2119
|
+
return
|
|
2120
|
+
|
|
2121
|
+
# Parse optional location, or offset from source
|
|
2122
|
+
if args.location:
|
|
2123
|
+
try:
|
|
2124
|
+
loc = json.loads(args.location)
|
|
2125
|
+
if isinstance(loc, dict) and 'x' in loc and 'y' in loc:
|
|
2126
|
+
location = {"x": loc['x'], "y": loc['y']}
|
|
2127
|
+
elif isinstance(loc, list) and len(loc) >= 2:
|
|
2128
|
+
location = {"x": loc[0], "y": loc[1]}
|
|
2129
|
+
else:
|
|
2130
|
+
output_error("--location must be {\"x\": N, \"y\": N} or [x, y]", "INVALID_LOCATION")
|
|
2131
|
+
return
|
|
2132
|
+
except json.JSONDecodeError as e:
|
|
2133
|
+
output_error(f"Invalid JSON for --location: {str(e)}", "INVALID_JSON")
|
|
2134
|
+
return
|
|
2135
|
+
else:
|
|
2136
|
+
# Offset from source location
|
|
2137
|
+
source_loc = source_data.get('location', {"x": 0, "y": 0})
|
|
2138
|
+
location = {
|
|
2139
|
+
"x": source_loc.get('x', 0) + 50,
|
|
2140
|
+
"y": source_loc.get('y', 0) + 50
|
|
2141
|
+
}
|
|
2142
|
+
|
|
2143
|
+
# Create the cloned node (deep copy values and ports, but not links)
|
|
2144
|
+
import copy
|
|
2145
|
+
cloned_node = {
|
|
2146
|
+
"name": new_name,
|
|
2147
|
+
"nodeClass": node_class,
|
|
2148
|
+
"color": source_data.get('color', '#808080'),
|
|
2149
|
+
"hash": source_data.get('hash'),
|
|
2150
|
+
"links": {}, # No links for cloned node
|
|
2151
|
+
"location": location,
|
|
2152
|
+
"ports": copy.deepcopy(source_data.get('ports', {"inputs": [], "outputs": []})),
|
|
2153
|
+
"values": copy.deepcopy(source_data.get('values', {}))
|
|
2154
|
+
}
|
|
2155
|
+
|
|
2156
|
+
if source_data.get('tooltip'):
|
|
2157
|
+
cloned_node['tooltip'] = source_data['tooltip']
|
|
2158
|
+
|
|
2159
|
+
nodes[new_name] = cloned_node
|
|
2160
|
+
graph['nodes'] = nodes
|
|
2161
|
+
|
|
2162
|
+
if not save_graph_file(filepath, graph):
|
|
2163
|
+
return
|
|
2164
|
+
|
|
2165
|
+
output_json({
|
|
2166
|
+
"success": True,
|
|
2167
|
+
"file": os.path.abspath(filepath),
|
|
2168
|
+
"sourceNode": source_node,
|
|
2169
|
+
"clonedNode": new_name,
|
|
2170
|
+
"nodeClass": node_class,
|
|
2171
|
+
"location": location
|
|
2172
|
+
})
|
|
2173
|
+
|
|
2174
|
+
|
|
2175
|
+
def cmd_graph_editor_status(args):
|
|
2176
|
+
"""Get the status and validation errors from the graph editor.
|
|
2177
|
+
|
|
2178
|
+
Reads the graph editor status file to check for validation errors in
|
|
2179
|
+
open editor sessions. Returns session info including any errors.
|
|
2180
|
+
"""
|
|
2181
|
+
status_path = os.path.join(os.path.expanduser('~'), '.theia', 'graph-editor-status.json')
|
|
2182
|
+
|
|
2183
|
+
if not os.path.exists(status_path):
|
|
2184
|
+
output_json({
|
|
2185
|
+
"status": "no_status_file",
|
|
2186
|
+
"message": "No graph editor status file found. The graph editor may not have been opened yet.",
|
|
2187
|
+
"statusPath": status_path,
|
|
2188
|
+
"sessions": []
|
|
2189
|
+
})
|
|
2190
|
+
return
|
|
2191
|
+
|
|
2192
|
+
try:
|
|
2193
|
+
with open(status_path, 'r') as f:
|
|
2194
|
+
status_data = json.load(f)
|
|
2195
|
+
except json.JSONDecodeError as e:
|
|
2196
|
+
output_error(f"Failed to parse status file: {str(e)}", "PARSE_ERROR")
|
|
2197
|
+
return
|
|
2198
|
+
except Exception as e:
|
|
2199
|
+
output_error(f"Failed to read status file: {str(e)}", "READ_ERROR")
|
|
2200
|
+
return
|
|
2201
|
+
|
|
2202
|
+
sessions = status_data.get('sessions', [])
|
|
2203
|
+
|
|
2204
|
+
# If a specific graph file is requested, filter to that session
|
|
2205
|
+
if args.file:
|
|
2206
|
+
target_path = os.path.abspath(args.file)
|
|
2207
|
+
sessions = [s for s in sessions if s.get('graphPath') == target_path]
|
|
2208
|
+
|
|
2209
|
+
# Build response with error summary
|
|
2210
|
+
total_errors = 0
|
|
2211
|
+
sessions_with_errors = 0
|
|
2212
|
+
for session in sessions:
|
|
2213
|
+
errors = session.get('errors', [])
|
|
2214
|
+
if errors:
|
|
2215
|
+
sessions_with_errors += 1
|
|
2216
|
+
total_errors += len(errors)
|
|
2217
|
+
|
|
2218
|
+
output_json({
|
|
2219
|
+
"status": "ok",
|
|
2220
|
+
"statusPath": status_path,
|
|
2221
|
+
"lastUpdated": status_data.get('lastUpdated'),
|
|
2222
|
+
"sessionCount": len(sessions),
|
|
2223
|
+
"sessionsWithErrors": sessions_with_errors,
|
|
2224
|
+
"totalErrors": total_errors,
|
|
2225
|
+
"sessions": sessions
|
|
1089
2226
|
})
|
|
1090
2227
|
|
|
1091
2228
|
|
|
@@ -1116,7 +2253,62 @@ def cmd_channels_schema(args):
|
|
|
1116
2253
|
channelId=channel_id,
|
|
1117
2254
|
fields=parse_list_arg(args.fields) if args.fields else None
|
|
1118
2255
|
)
|
|
1119
|
-
|
|
2256
|
+
|
|
2257
|
+
# Apply filters if specified
|
|
2258
|
+
if args.category or args.subcategory or args.search:
|
|
2259
|
+
filtered = []
|
|
2260
|
+
search_term = args.search.lower() if args.search else None
|
|
2261
|
+
|
|
2262
|
+
for node in result:
|
|
2263
|
+
# Filter by category
|
|
2264
|
+
if args.category and node.get('category', '').lower() != args.category.lower():
|
|
2265
|
+
continue
|
|
2266
|
+
|
|
2267
|
+
# Filter by subcategory
|
|
2268
|
+
if args.subcategory and node.get('subcategory', '').lower() != args.subcategory.lower():
|
|
2269
|
+
continue
|
|
2270
|
+
|
|
2271
|
+
# Search across multiple fields
|
|
2272
|
+
if search_term:
|
|
2273
|
+
searchable_fields = [
|
|
2274
|
+
node.get('name', ''),
|
|
2275
|
+
node.get('category', ''),
|
|
2276
|
+
node.get('subcategory', ''),
|
|
2277
|
+
node.get('tooltip', ''),
|
|
2278
|
+
node.get('description', ''),
|
|
2279
|
+
]
|
|
2280
|
+
# Also search in input/output names and descriptions
|
|
2281
|
+
for inp in node.get('inputs', []):
|
|
2282
|
+
searchable_fields.append(inp.get('name', ''))
|
|
2283
|
+
searchable_fields.append(inp.get('description', ''))
|
|
2284
|
+
for out in node.get('outputs', []):
|
|
2285
|
+
searchable_fields.append(out.get('name', ''))
|
|
2286
|
+
searchable_fields.append(out.get('description', ''))
|
|
2287
|
+
|
|
2288
|
+
combined = ' '.join(str(f) for f in searchable_fields).lower()
|
|
2289
|
+
if search_term not in combined:
|
|
2290
|
+
continue
|
|
2291
|
+
|
|
2292
|
+
filtered.append(node)
|
|
2293
|
+
|
|
2294
|
+
result = filtered
|
|
2295
|
+
|
|
2296
|
+
# List categories and subcategories if requested
|
|
2297
|
+
if args.list_categories:
|
|
2298
|
+
categories = sorted(set(node.get('category', '') for node in result if node.get('category')))
|
|
2299
|
+
subcategories = sorted(set(node.get('subcategory', '') for node in result if node.get('subcategory')))
|
|
2300
|
+
output_json({
|
|
2301
|
+
"categories": categories,
|
|
2302
|
+
"subcategories": subcategories
|
|
2303
|
+
})
|
|
2304
|
+
return
|
|
2305
|
+
|
|
2306
|
+
# Output names only if requested
|
|
2307
|
+
if args.names_only:
|
|
2308
|
+
names = sorted([node.get('name', '') for node in result])
|
|
2309
|
+
output_json(names)
|
|
2310
|
+
else:
|
|
2311
|
+
output_json(result)
|
|
1120
2312
|
|
|
1121
2313
|
|
|
1122
2314
|
def cmd_channels_nodes(args):
|
|
@@ -1456,6 +2648,104 @@ def cmd_annotations_edit(args):
|
|
|
1456
2648
|
output_json({"success": result})
|
|
1457
2649
|
|
|
1458
2650
|
|
|
2651
|
+
def cmd_annotations_view(args):
|
|
2652
|
+
"""Generate an image with annotations overlayed.
|
|
2653
|
+
|
|
2654
|
+
This command draws annotations (bounding boxes, 3D boxes, or segmentation outlines)
|
|
2655
|
+
on an image from a dataset. The image must be part of a dataset directory structure
|
|
2656
|
+
that includes annotations/ and metadata/ folders with ANA-format annotation files.
|
|
2657
|
+
"""
|
|
2658
|
+
from anatools.annotations import annotations
|
|
2659
|
+
|
|
2660
|
+
image_path = require_arg(args, 'imagepath', 'Image path')
|
|
2661
|
+
out_dir = require_arg(args, 'outdir', 'Output directory')
|
|
2662
|
+
|
|
2663
|
+
# Parse draw types - can be comma-separated for multiple types
|
|
2664
|
+
draw_types = parse_list_arg(args.drawtype) if args.drawtype else ['box_2d']
|
|
2665
|
+
valid_types = ['box_2d', 'box_3d', 'segmentation']
|
|
2666
|
+
for dt in draw_types:
|
|
2667
|
+
if dt not in valid_types:
|
|
2668
|
+
output_error(f"Invalid draw type '{dt}'. Must be one of: {', '.join(valid_types)}", "INVALID_DRAW_TYPE")
|
|
2669
|
+
sys.exit(1)
|
|
2670
|
+
|
|
2671
|
+
# Parse optional filters
|
|
2672
|
+
object_ids = None
|
|
2673
|
+
object_types = None
|
|
2674
|
+
if args.objectids:
|
|
2675
|
+
try:
|
|
2676
|
+
object_ids = [int(x) for x in parse_list_arg(args.objectids)]
|
|
2677
|
+
except ValueError:
|
|
2678
|
+
output_error("Object IDs must be integers", "INVALID_OBJECT_IDS")
|
|
2679
|
+
sys.exit(1)
|
|
2680
|
+
if args.objecttypes:
|
|
2681
|
+
object_types = parse_list_arg(args.objecttypes)
|
|
2682
|
+
|
|
2683
|
+
# Parse colors if provided (JSON format)
|
|
2684
|
+
colors = None
|
|
2685
|
+
if args.colors:
|
|
2686
|
+
colors = parse_json_arg(args.colors)
|
|
2687
|
+
# Convert color lists to tuples
|
|
2688
|
+
for key in colors:
|
|
2689
|
+
if isinstance(colors[key], list):
|
|
2690
|
+
colors[key] = tuple(colors[key])
|
|
2691
|
+
|
|
2692
|
+
line_thickness = args.thickness if args.thickness else 1
|
|
2693
|
+
|
|
2694
|
+
ann = annotations()
|
|
2695
|
+
output_paths = []
|
|
2696
|
+
|
|
2697
|
+
try:
|
|
2698
|
+
for draw_type in draw_types:
|
|
2699
|
+
output_path = None
|
|
2700
|
+
if draw_type == 'box_2d':
|
|
2701
|
+
output_path = ann.bounding_box_2d(
|
|
2702
|
+
image_path=image_path,
|
|
2703
|
+
out_dir=out_dir,
|
|
2704
|
+
object_ids=object_ids,
|
|
2705
|
+
object_types=object_types,
|
|
2706
|
+
line_thickness=line_thickness,
|
|
2707
|
+
colors=colors,
|
|
2708
|
+
quiet=True
|
|
2709
|
+
)
|
|
2710
|
+
elif draw_type == 'box_3d':
|
|
2711
|
+
output_path = ann.bounding_box_3d(
|
|
2712
|
+
image_path=image_path,
|
|
2713
|
+
out_dir=out_dir,
|
|
2714
|
+
object_ids=object_ids,
|
|
2715
|
+
object_types=object_types,
|
|
2716
|
+
line_thickness=line_thickness,
|
|
2717
|
+
colors=colors,
|
|
2718
|
+
quiet=True
|
|
2719
|
+
)
|
|
2720
|
+
elif draw_type == 'segmentation':
|
|
2721
|
+
output_path = ann.segmentation(
|
|
2722
|
+
image_path=image_path,
|
|
2723
|
+
out_dir=out_dir,
|
|
2724
|
+
object_ids=object_ids,
|
|
2725
|
+
object_types=object_types,
|
|
2726
|
+
line_thickness=line_thickness,
|
|
2727
|
+
colors=colors,
|
|
2728
|
+
quiet=True
|
|
2729
|
+
)
|
|
2730
|
+
|
|
2731
|
+
if output_path is None:
|
|
2732
|
+
output_error(f"Failed to generate annotated image for draw type '{draw_type}'", "ANNOTATION_ERROR")
|
|
2733
|
+
sys.exit(1)
|
|
2734
|
+
output_paths.append(output_path)
|
|
2735
|
+
|
|
2736
|
+
output_json({
|
|
2737
|
+
"success": True,
|
|
2738
|
+
"outputPaths": output_paths,
|
|
2739
|
+
"drawTypes": draw_types
|
|
2740
|
+
})
|
|
2741
|
+
except FileNotFoundError as e:
|
|
2742
|
+
output_error(f"File not found: {str(e)}", "FILE_NOT_FOUND")
|
|
2743
|
+
sys.exit(1)
|
|
2744
|
+
except Exception as e:
|
|
2745
|
+
output_error(f"Failed to generate annotated image: {str(e)}", "ANNOTATION_ERROR")
|
|
2746
|
+
sys.exit(1)
|
|
2747
|
+
|
|
2748
|
+
|
|
1459
2749
|
# =============================================================================
|
|
1460
2750
|
# ANNOTATION-MAPS
|
|
1461
2751
|
# =============================================================================
|
|
@@ -1914,6 +3204,20 @@ def cmd_preview_create(args):
|
|
|
1914
3204
|
output_json({"previewId": result})
|
|
1915
3205
|
|
|
1916
3206
|
|
|
3207
|
+
def cmd_preview_log(args):
|
|
3208
|
+
"""Get preview job log."""
|
|
3209
|
+
client = get_client()
|
|
3210
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
3211
|
+
preview_id = require_arg(args, 'previewid', 'Preview ID')
|
|
3212
|
+
|
|
3213
|
+
result = client.get_preview_log(
|
|
3214
|
+
workspaceId=workspace_id,
|
|
3215
|
+
previewId=preview_id,
|
|
3216
|
+
fields=parse_list_arg(args.fields) if args.fields else None
|
|
3217
|
+
)
|
|
3218
|
+
output_json(result)
|
|
3219
|
+
|
|
3220
|
+
|
|
1917
3221
|
# =============================================================================
|
|
1918
3222
|
# AGENTS
|
|
1919
3223
|
# =============================================================================
|
|
@@ -2171,9 +3475,10 @@ Examples:
|
|
|
2171
3475
|
ds_cancel.set_defaults(func=cmd_datasets_cancel)
|
|
2172
3476
|
|
|
2173
3477
|
# datasets download
|
|
2174
|
-
ds_download = datasets_sub.add_parser('download', help='Download a dataset')
|
|
3478
|
+
ds_download = datasets_sub.add_parser('download', help='Download a dataset or a single file from a dataset')
|
|
2175
3479
|
ds_download.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2176
3480
|
ds_download.add_argument('--datasetid', required=True, help='Dataset ID')
|
|
3481
|
+
ds_download.add_argument('--filepath', help='Relative path to a specific file within the dataset (e.g., "images/000000-1-image.png"). If not provided, downloads the entire dataset.')
|
|
2177
3482
|
ds_download.add_argument('--outputdir', help='Output directory')
|
|
2178
3483
|
ds_download.set_defaults(func=cmd_datasets_download)
|
|
2179
3484
|
|
|
@@ -2389,12 +3694,151 @@ Examples:
|
|
|
2389
3694
|
graph_editor_sub = graph_editor.add_subparsers(dest='action', help='Action')
|
|
2390
3695
|
|
|
2391
3696
|
# graph-editor open
|
|
2392
|
-
ge_open = graph_editor_sub.add_parser('open', help='
|
|
2393
|
-
|
|
2394
|
-
ge_open.add_argument('--
|
|
2395
|
-
ge_open.add_argument('--
|
|
3697
|
+
ge_open = graph_editor_sub.add_parser('open', help='Open graph in editor (download from platform or use local files)')
|
|
3698
|
+
# Option 1: Download from platform
|
|
3699
|
+
ge_open.add_argument('--workspaceid', help='Workspace ID (use with --graphid to download from platform)')
|
|
3700
|
+
ge_open.add_argument('--graphid', help='Graph ID (use with --workspaceid to download from platform)')
|
|
3701
|
+
ge_open.add_argument('--outputdir', help='Output directory for downloaded files (default: current directory)')
|
|
3702
|
+
# Option 2: Use local files
|
|
3703
|
+
ge_open.add_argument('--graphfile', help='Path to local graph file (use with --schemafile)')
|
|
3704
|
+
ge_open.add_argument('--schemafile', help='Path to local schema file (use with --graphfile)')
|
|
2396
3705
|
ge_open.set_defaults(func=cmd_graph_editor_open)
|
|
2397
3706
|
|
|
3707
|
+
# graph-editor edit-node
|
|
3708
|
+
ge_edit_node = graph_editor_sub.add_parser('edit-node', help='Edit a node\'s values in a local graph file')
|
|
3709
|
+
ge_edit_node.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
|
|
3710
|
+
ge_edit_node.add_argument('--node', required=True, help='Name of the node to edit')
|
|
3711
|
+
ge_edit_node.add_argument('--values', required=True, help='JSON object with values to update (e.g., \'{"param": "value"}\')')
|
|
3712
|
+
ge_edit_node.set_defaults(func=cmd_graph_editor_edit_node)
|
|
3713
|
+
|
|
3714
|
+
# graph-editor add-node
|
|
3715
|
+
ge_add_node = graph_editor_sub.add_parser('add-node', help='Add a new node to a local graph file')
|
|
3716
|
+
ge_add_node.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
|
|
3717
|
+
ge_add_node.add_argument('--nodeclass', required=True, help='Node class to instantiate')
|
|
3718
|
+
ge_add_node.add_argument('--name', help='Custom node name (default: nodeClass_N)')
|
|
3719
|
+
ge_add_node.add_argument('--values', help='JSON object with initial values')
|
|
3720
|
+
ge_add_node.add_argument('--location', help='Position as {"x": N, "y": N} or [x, y]')
|
|
3721
|
+
ge_add_node.add_argument('--color', help='Node color as hex (default: #808080)')
|
|
3722
|
+
ge_add_node.add_argument('--tooltip', help='Node tooltip/description')
|
|
3723
|
+
ge_add_node.set_defaults(func=cmd_graph_editor_add_node)
|
|
3724
|
+
|
|
3725
|
+
# graph-editor add-link
|
|
3726
|
+
ge_add_link = graph_editor_sub.add_parser('add-link', help='Add a link between two nodes')
|
|
3727
|
+
ge_add_link.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
|
|
3728
|
+
ge_add_link.add_argument('--source', required=True, help='Source node name')
|
|
3729
|
+
ge_add_link.add_argument('--output', required=True, help='Output port name on source node')
|
|
3730
|
+
ge_add_link.add_argument('--target', required=True, help='Target node name')
|
|
3731
|
+
ge_add_link.add_argument('--input', required=True, help='Input port name on target node')
|
|
3732
|
+
ge_add_link.set_defaults(func=cmd_graph_editor_add_link)
|
|
3733
|
+
|
|
3734
|
+
# graph-editor remove-node
|
|
3735
|
+
ge_remove_node = graph_editor_sub.add_parser('remove-node', help='Remove a node and its links from a graph')
|
|
3736
|
+
ge_remove_node.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
|
|
3737
|
+
ge_remove_node.add_argument('--node', required=True, help='Name of the node to remove')
|
|
3738
|
+
ge_remove_node.set_defaults(func=cmd_graph_editor_remove_node)
|
|
3739
|
+
|
|
3740
|
+
# graph-editor remove-link
|
|
3741
|
+
ge_remove_link = graph_editor_sub.add_parser('remove-link', help='Remove a link between two nodes')
|
|
3742
|
+
ge_remove_link.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
|
|
3743
|
+
ge_remove_link.add_argument('--source', required=True, help='Source node name')
|
|
3744
|
+
ge_remove_link.add_argument('--output', required=True, help='Output port name on source node')
|
|
3745
|
+
ge_remove_link.add_argument('--target', required=True, help='Target node name')
|
|
3746
|
+
ge_remove_link.add_argument('--input', required=True, help='Input port name on target node')
|
|
3747
|
+
ge_remove_link.set_defaults(func=cmd_graph_editor_remove_link)
|
|
3748
|
+
|
|
3749
|
+
# graph-editor add-volume-file
|
|
3750
|
+
ge_add_vol_file = graph_editor_sub.add_parser('add-volume-file', help='Add a VolumeFile node referencing a file in a volume')
|
|
3751
|
+
ge_add_vol_file.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
|
|
3752
|
+
ge_add_vol_file.add_argument('--volumeid', required=True, help='Volume UUID')
|
|
3753
|
+
ge_add_vol_file.add_argument('--path', required=True, help='File path within the volume (e.g., /models/model.blend)')
|
|
3754
|
+
ge_add_vol_file.add_argument('--name', help='Custom node name (default: VolumeFile_N)')
|
|
3755
|
+
ge_add_vol_file.add_argument('--volumename', help='Volume display name for tooltip (default: uses volumeid)')
|
|
3756
|
+
ge_add_vol_file.add_argument('--location', help='Position as {"x": N, "y": N} or [x, y]')
|
|
3757
|
+
ge_add_vol_file.set_defaults(func=cmd_graph_editor_add_volume_file)
|
|
3758
|
+
|
|
3759
|
+
# graph-editor add-volume-directory
|
|
3760
|
+
ge_add_vol_dir = graph_editor_sub.add_parser('add-volume-directory', help='Add a VolumeDirectory node referencing a directory in a volume')
|
|
3761
|
+
ge_add_vol_dir.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
|
|
3762
|
+
ge_add_vol_dir.add_argument('--volumeid', required=True, help='Volume UUID')
|
|
3763
|
+
ge_add_vol_dir.add_argument('--path', help='Directory path within the volume (default: /)')
|
|
3764
|
+
ge_add_vol_dir.add_argument('--name', help='Custom node name (default: VolumeDirectory_N)')
|
|
3765
|
+
ge_add_vol_dir.add_argument('--volumename', help='Volume display name for tooltip (default: uses volumeid)')
|
|
3766
|
+
ge_add_vol_dir.add_argument('--location', help='Position as {"x": N, "y": N} or [x, y]')
|
|
3767
|
+
ge_add_vol_dir.set_defaults(func=cmd_graph_editor_add_volume_directory)
|
|
3768
|
+
|
|
3769
|
+
# graph-editor list-nodes
|
|
3770
|
+
ge_list_nodes = graph_editor_sub.add_parser('list-nodes', help='List all nodes in a local graph file')
|
|
3771
|
+
ge_list_nodes.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
|
|
3772
|
+
ge_list_nodes.add_argument('--verbose', '-v', action='store_true', help='Include link details')
|
|
3773
|
+
ge_list_nodes.set_defaults(func=cmd_graph_editor_list_nodes)
|
|
3774
|
+
|
|
3775
|
+
# graph-editor get-node
|
|
3776
|
+
ge_get_node = graph_editor_sub.add_parser('get-node', help='Get detailed info about a specific node')
|
|
3777
|
+
ge_get_node.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
|
|
3778
|
+
ge_get_node.add_argument('--node', required=True, help='Name of the node to inspect')
|
|
3779
|
+
ge_get_node.set_defaults(func=cmd_graph_editor_get_node)
|
|
3780
|
+
|
|
3781
|
+
# graph-editor move-node
|
|
3782
|
+
ge_move_node = graph_editor_sub.add_parser('move-node', help='Move a node to a new location')
|
|
3783
|
+
ge_move_node.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
|
|
3784
|
+
ge_move_node.add_argument('--node', required=True, help='Name of the node to move')
|
|
3785
|
+
ge_move_node.add_argument('--location', required=True, help='New position as {"x": N, "y": N} or [x, y]')
|
|
3786
|
+
ge_move_node.set_defaults(func=cmd_graph_editor_move_node)
|
|
3787
|
+
|
|
3788
|
+
# graph-editor clone-node
|
|
3789
|
+
ge_clone_node = graph_editor_sub.add_parser('clone-node', help='Clone an existing node')
|
|
3790
|
+
ge_clone_node.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
|
|
3791
|
+
ge_clone_node.add_argument('--source', required=True, help='Name of the node to clone')
|
|
3792
|
+
ge_clone_node.add_argument('--name', help='Name for the cloned node (default: nodeClass_N)')
|
|
3793
|
+
ge_clone_node.add_argument('--location', help='Position as {"x": N, "y": N} or [x, y] (default: offset from source)')
|
|
3794
|
+
ge_clone_node.set_defaults(func=cmd_graph_editor_clone_node)
|
|
3795
|
+
|
|
3796
|
+
# graph-editor status
|
|
3797
|
+
ge_status = graph_editor_sub.add_parser('status', help='Get graph editor status and validation errors')
|
|
3798
|
+
ge_status.add_argument('--file', help='Filter to a specific graph file path')
|
|
3799
|
+
ge_status.set_defaults(func=cmd_graph_editor_status)
|
|
3800
|
+
|
|
3801
|
+
# -------------------------------------------------------------------------
|
|
3802
|
+
# DATASET-VIEWER
|
|
3803
|
+
# -------------------------------------------------------------------------
|
|
3804
|
+
dataset_viewer = subparsers.add_parser('dataset-viewer', help='Dataset annotation viewer integration')
|
|
3805
|
+
dv_sub = dataset_viewer.add_subparsers(dest='action', help='Action')
|
|
3806
|
+
|
|
3807
|
+
# dataset-viewer open
|
|
3808
|
+
dv_open = dv_sub.add_parser('open', help='Open a dataset folder in the Annotation Viewer')
|
|
3809
|
+
dv_open.add_argument('--path', required=True, help='Path to dataset directory (must contain images/ subdirectory)')
|
|
3810
|
+
dv_open.add_argument('--index', type=int, default=0, help='Initial image index (default: 0)')
|
|
3811
|
+
dv_open.set_defaults(func=cmd_dataset_viewer_open)
|
|
3812
|
+
|
|
3813
|
+
# dataset-viewer next
|
|
3814
|
+
dv_next = dv_sub.add_parser('next', help='Navigate to the next image')
|
|
3815
|
+
dv_next.set_defaults(func=cmd_dataset_viewer_next)
|
|
3816
|
+
|
|
3817
|
+
# dataset-viewer prev
|
|
3818
|
+
dv_prev = dv_sub.add_parser('prev', help='Navigate to the previous image')
|
|
3819
|
+
dv_prev.set_defaults(func=cmd_dataset_viewer_prev)
|
|
3820
|
+
|
|
3821
|
+
# dataset-viewer goto
|
|
3822
|
+
dv_goto = dv_sub.add_parser('goto', help='Navigate to a specific image by index or name')
|
|
3823
|
+
dv_goto.add_argument('--index', type=int, help='Image index (0-based)')
|
|
3824
|
+
dv_goto.add_argument('--name', help='Image filename (or partial match)')
|
|
3825
|
+
dv_goto.set_defaults(func=cmd_dataset_viewer_goto)
|
|
3826
|
+
|
|
3827
|
+
# dataset-viewer annotations
|
|
3828
|
+
dv_annotations = dv_sub.add_parser('annotations', help='Set which annotation types are displayed')
|
|
3829
|
+
dv_annotations.add_argument('--types', required=True, help='Comma-separated annotation types: bbox,bbox3d,segmentation,centroid,mask')
|
|
3830
|
+
dv_annotations.set_defaults(func=cmd_dataset_viewer_annotations)
|
|
3831
|
+
|
|
3832
|
+
# dataset-viewer filter
|
|
3833
|
+
dv_filter = dv_sub.add_parser('filter', help='Filter visible objects by type (omit --types to show all)')
|
|
3834
|
+
dv_filter.add_argument('--types', help='Comma-separated object type names to show (omit to clear filter)')
|
|
3835
|
+
dv_filter.set_defaults(func=cmd_dataset_viewer_filter)
|
|
3836
|
+
|
|
3837
|
+
# dataset-viewer status
|
|
3838
|
+
dv_status = dv_sub.add_parser('status', help='Get dataset viewer status')
|
|
3839
|
+
dv_status.add_argument('--path', help='Filter to a specific dataset path')
|
|
3840
|
+
dv_status.set_defaults(func=cmd_dataset_viewer_status)
|
|
3841
|
+
|
|
2398
3842
|
# -------------------------------------------------------------------------
|
|
2399
3843
|
# CHANNELS
|
|
2400
3844
|
# -------------------------------------------------------------------------
|
|
@@ -2414,6 +3858,11 @@ Examples:
|
|
|
2414
3858
|
ch_schema = channels_sub.add_parser('schema', help='Get channel schema')
|
|
2415
3859
|
ch_schema.add_argument('--channelid', required=True, help='Channel ID')
|
|
2416
3860
|
ch_schema.add_argument('--fields', help='Comma-separated fields to return')
|
|
3861
|
+
ch_schema.add_argument('--category', help='Filter by category (e.g., Objects, Backgrounds, Sensors)')
|
|
3862
|
+
ch_schema.add_argument('--subcategory', help='Filter by subcategory (e.g., Aircraft, Vehicles, Ships)')
|
|
3863
|
+
ch_schema.add_argument('--search', help='Case-insensitive search across name, category, subcategory, tooltip, and descriptions')
|
|
3864
|
+
ch_schema.add_argument('--names-only', action='store_true', help='Output only node names (sorted)')
|
|
3865
|
+
ch_schema.add_argument('--list-categories', action='store_true', help='List all available categories and subcategories')
|
|
2417
3866
|
ch_schema.set_defaults(func=cmd_channels_schema)
|
|
2418
3867
|
|
|
2419
3868
|
# channels nodes
|
|
@@ -2612,6 +4061,17 @@ Examples:
|
|
|
2612
4061
|
ann_edit.add_argument('--tags', required=True, help='Comma-separated tags')
|
|
2613
4062
|
ann_edit.set_defaults(func=cmd_annotations_edit)
|
|
2614
4063
|
|
|
4064
|
+
# annotations view
|
|
4065
|
+
ann_view = annotations_sub.add_parser('view', help='Generate image with annotations overlayed')
|
|
4066
|
+
ann_view.add_argument('--imagepath', required=True, help='Path to the image file in the dataset directory')
|
|
4067
|
+
ann_view.add_argument('--outdir', required=True, help='Output directory for the annotated image')
|
|
4068
|
+
ann_view.add_argument('--drawtype', default='box_2d', help='Annotation type(s) to draw: box_2d, box_3d, segmentation (comma-separated for multiple)')
|
|
4069
|
+
ann_view.add_argument('--objectids', help='Comma-separated list of object IDs to annotate (filter)')
|
|
4070
|
+
ann_view.add_argument('--objecttypes', help='Comma-separated list of object types to annotate (filter)')
|
|
4071
|
+
ann_view.add_argument('--thickness', type=int, default=1, help='Line thickness for annotations (default: 1)')
|
|
4072
|
+
ann_view.add_argument('--colors', help='JSON dict of object type to RGB color, e.g. \'{"Car": [255, 0, 0]}\'')
|
|
4073
|
+
ann_view.set_defaults(func=cmd_annotations_view)
|
|
4074
|
+
|
|
2615
4075
|
# -------------------------------------------------------------------------
|
|
2616
4076
|
# ANNOTATION MAPS
|
|
2617
4077
|
# -------------------------------------------------------------------------
|
|
@@ -2904,6 +4364,13 @@ Examples:
|
|
|
2904
4364
|
prv_create.add_argument('--graphid', required=True, help='Graph ID')
|
|
2905
4365
|
prv_create.set_defaults(func=cmd_preview_create)
|
|
2906
4366
|
|
|
4367
|
+
# preview log
|
|
4368
|
+
prv_log = preview_sub.add_parser('log', help='Get preview job log')
|
|
4369
|
+
prv_log.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
4370
|
+
prv_log.add_argument('--previewid', required=True, help='Preview ID')
|
|
4371
|
+
prv_log.add_argument('--fields', help='Comma-separated fields')
|
|
4372
|
+
prv_log.set_defaults(func=cmd_preview_log)
|
|
4373
|
+
|
|
2907
4374
|
# -------------------------------------------------------------------------
|
|
2908
4375
|
# AGENTS
|
|
2909
4376
|
# -------------------------------------------------------------------------
|