anatools 6.0.0__py3-none-any.whl → 6.0.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- anatools/__init__.py +1 -1
- anatools/anaclient/api/api.py +5 -1
- anatools/anaclient/api/datasets.py +21 -3
- anatools/anaclient/datasets.py +61 -1
- anatools/annotations/annotations.py +39 -18
- anatools/annotations/draw.py +34 -18
- {anatools-6.0.0.data → anatools-6.0.1.data}/scripts/anadeploy +2 -0
- {anatools-6.0.0.data → anatools-6.0.1.data}/scripts/renderedai +1174 -9
- {anatools-6.0.0.dist-info → anatools-6.0.1.dist-info}/METADATA +1 -1
- {anatools-6.0.0.dist-info → anatools-6.0.1.dist-info}/RECORD +21 -21
- {anatools-6.0.0.dist-info → anatools-6.0.1.dist-info}/WHEEL +1 -1
- {anatools-6.0.0.data → anatools-6.0.1.data}/scripts/ana +0 -0
- {anatools-6.0.0.data → anatools-6.0.1.data}/scripts/anamount +0 -0
- {anatools-6.0.0.data → anatools-6.0.1.data}/scripts/anaprofile +0 -0
- {anatools-6.0.0.data → anatools-6.0.1.data}/scripts/anarules +0 -0
- {anatools-6.0.0.data → anatools-6.0.1.data}/scripts/anaserver +0 -0
- {anatools-6.0.0.data → anatools-6.0.1.data}/scripts/anatransfer +0 -0
- {anatools-6.0.0.data → anatools-6.0.1.data}/scripts/anautils +0 -0
- {anatools-6.0.0.dist-info → anatools-6.0.1.dist-info}/entry_points.txt +0 -0
- {anatools-6.0.0.dist-info → anatools-6.0.1.dist-info}/licenses/LICENSE +0 -0
- {anatools-6.0.0.dist-info → anatools-6.0.1.dist-info}/top_level.txt +0 -0
|
@@ -37,7 +37,9 @@ import argparse
|
|
|
37
37
|
import json
|
|
38
38
|
import os
|
|
39
39
|
import sys
|
|
40
|
-
from typing import Any, List
|
|
40
|
+
from typing import Any, Dict, List, Optional
|
|
41
|
+
|
|
42
|
+
import yaml
|
|
41
43
|
|
|
42
44
|
|
|
43
45
|
def get_client():
|
|
@@ -104,6 +106,55 @@ def parse_list_arg(value: str) -> List[str]:
|
|
|
104
106
|
return [item.strip() for item in value.split(',')]
|
|
105
107
|
|
|
106
108
|
|
|
109
|
+
def load_graph_file(filepath: str) -> Optional[Dict[str, Any]]:
|
|
110
|
+
"""Load a graph from a YAML or JSON file.
|
|
111
|
+
|
|
112
|
+
Args:
|
|
113
|
+
filepath: Path to the graph file (.yaml, .yml, or .json)
|
|
114
|
+
|
|
115
|
+
Returns:
|
|
116
|
+
Dict containing the graph data, or None if loading failed
|
|
117
|
+
"""
|
|
118
|
+
if not os.path.exists(filepath):
|
|
119
|
+
output_error(f"File not found: {filepath}", "FILE_NOT_FOUND")
|
|
120
|
+
return None
|
|
121
|
+
|
|
122
|
+
try:
|
|
123
|
+
with open(filepath, 'r') as f:
|
|
124
|
+
if filepath.endswith('.json'):
|
|
125
|
+
return json.load(f)
|
|
126
|
+
else: # .yaml or .yml
|
|
127
|
+
return yaml.safe_load(f)
|
|
128
|
+
except (json.JSONDecodeError, yaml.YAMLError) as e:
|
|
129
|
+
output_error(f"Failed to parse file: {str(e)}", "PARSE_ERROR")
|
|
130
|
+
return None
|
|
131
|
+
except Exception as e:
|
|
132
|
+
output_error(f"Failed to read file: {str(e)}", "READ_ERROR")
|
|
133
|
+
return None
|
|
134
|
+
|
|
135
|
+
|
|
136
|
+
def save_graph_file(filepath: str, data: Dict[str, Any]) -> bool:
|
|
137
|
+
"""Save a graph to a YAML or JSON file.
|
|
138
|
+
|
|
139
|
+
Args:
|
|
140
|
+
filepath: Path to save the graph file (.yaml, .yml, or .json)
|
|
141
|
+
data: Graph data to save
|
|
142
|
+
|
|
143
|
+
Returns:
|
|
144
|
+
True if save succeeded, False otherwise
|
|
145
|
+
"""
|
|
146
|
+
try:
|
|
147
|
+
with open(filepath, 'w') as f:
|
|
148
|
+
if filepath.endswith('.json'):
|
|
149
|
+
json.dump(data, f, indent=2)
|
|
150
|
+
else: # .yaml or .yml
|
|
151
|
+
yaml.dump(data, f, default_flow_style=False, sort_keys=False, allow_unicode=True)
|
|
152
|
+
return True
|
|
153
|
+
except Exception as e:
|
|
154
|
+
output_error(f"Failed to save file: {str(e)}", "WRITE_ERROR")
|
|
155
|
+
return False
|
|
156
|
+
|
|
157
|
+
|
|
107
158
|
# =============================================================================
|
|
108
159
|
# WORKSPACES
|
|
109
160
|
# =============================================================================
|
|
@@ -277,16 +328,25 @@ def cmd_datasets_cancel(args):
|
|
|
277
328
|
|
|
278
329
|
|
|
279
330
|
def cmd_datasets_download(args):
|
|
280
|
-
"""Download a dataset."""
|
|
331
|
+
"""Download a dataset or a single file from a dataset."""
|
|
281
332
|
client = get_client()
|
|
282
333
|
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
283
334
|
dataset_id = require_arg(args, 'datasetid', 'Dataset ID')
|
|
284
335
|
|
|
285
|
-
|
|
286
|
-
|
|
287
|
-
|
|
288
|
-
|
|
289
|
-
|
|
336
|
+
# If filepath is provided, download a single file; otherwise download the entire dataset
|
|
337
|
+
if args.filepath:
|
|
338
|
+
result = client.download_dataset_file(
|
|
339
|
+
workspaceId=workspace_id,
|
|
340
|
+
datasetId=dataset_id,
|
|
341
|
+
filepath=args.filepath,
|
|
342
|
+
localDir=args.outputdir
|
|
343
|
+
)
|
|
344
|
+
else:
|
|
345
|
+
result = client.download_dataset(
|
|
346
|
+
workspaceId=workspace_id,
|
|
347
|
+
datasetId=dataset_id,
|
|
348
|
+
localDir=args.outputdir
|
|
349
|
+
)
|
|
290
350
|
output_json({"downloadPath": result})
|
|
291
351
|
|
|
292
352
|
|
|
@@ -1089,6 +1149,847 @@ def cmd_graph_editor_open(args):
|
|
|
1089
1149
|
})
|
|
1090
1150
|
|
|
1091
1151
|
|
|
1152
|
+
def cmd_graph_editor_edit_node(args):
|
|
1153
|
+
"""Edit a node's values in a local graph file.
|
|
1154
|
+
|
|
1155
|
+
Modifies the values of an existing node in the graph. Use --values to pass
|
|
1156
|
+
a JSON object with the key-value pairs to update. Existing values not
|
|
1157
|
+
specified in --values are preserved.
|
|
1158
|
+
"""
|
|
1159
|
+
filepath = require_arg(args, 'file', 'Graph file path')
|
|
1160
|
+
node_name = require_arg(args, 'node', 'Node name')
|
|
1161
|
+
values_json = require_arg(args, 'values', 'Values JSON')
|
|
1162
|
+
|
|
1163
|
+
graph = load_graph_file(filepath)
|
|
1164
|
+
if graph is None:
|
|
1165
|
+
return
|
|
1166
|
+
|
|
1167
|
+
nodes = graph.get('nodes', {})
|
|
1168
|
+
if node_name not in nodes:
|
|
1169
|
+
output_error(f"Node '{node_name}' not found in graph", "NODE_NOT_FOUND")
|
|
1170
|
+
return
|
|
1171
|
+
|
|
1172
|
+
# Parse the values JSON
|
|
1173
|
+
try:
|
|
1174
|
+
new_values = json.loads(values_json)
|
|
1175
|
+
except json.JSONDecodeError as e:
|
|
1176
|
+
output_error(f"Invalid JSON for --values: {str(e)}", "INVALID_JSON")
|
|
1177
|
+
return
|
|
1178
|
+
|
|
1179
|
+
if not isinstance(new_values, dict):
|
|
1180
|
+
output_error("--values must be a JSON object", "INVALID_VALUES")
|
|
1181
|
+
return
|
|
1182
|
+
|
|
1183
|
+
# Update the node's values
|
|
1184
|
+
node = nodes[node_name]
|
|
1185
|
+
if 'values' not in node:
|
|
1186
|
+
node['values'] = {}
|
|
1187
|
+
|
|
1188
|
+
node['values'].update(new_values)
|
|
1189
|
+
|
|
1190
|
+
if not save_graph_file(filepath, graph):
|
|
1191
|
+
return
|
|
1192
|
+
|
|
1193
|
+
output_json({
|
|
1194
|
+
"success": True,
|
|
1195
|
+
"file": os.path.abspath(filepath),
|
|
1196
|
+
"node": node_name,
|
|
1197
|
+
"updatedValues": new_values,
|
|
1198
|
+
"allValues": node['values']
|
|
1199
|
+
})
|
|
1200
|
+
|
|
1201
|
+
|
|
1202
|
+
def cmd_graph_editor_add_node(args):
|
|
1203
|
+
"""Add a new node to a local graph file.
|
|
1204
|
+
|
|
1205
|
+
Adds a node with the specified nodeClass. Use --name for a custom node name
|
|
1206
|
+
(defaults to nodeClass_N where N is auto-incremented). Use --values to set
|
|
1207
|
+
initial parameter values, --location to set x,y position.
|
|
1208
|
+
"""
|
|
1209
|
+
filepath = require_arg(args, 'file', 'Graph file path')
|
|
1210
|
+
node_class = require_arg(args, 'nodeclass', 'Node class')
|
|
1211
|
+
|
|
1212
|
+
graph = load_graph_file(filepath)
|
|
1213
|
+
if graph is None:
|
|
1214
|
+
return
|
|
1215
|
+
|
|
1216
|
+
nodes = graph.get('nodes', {})
|
|
1217
|
+
|
|
1218
|
+
# Generate node name if not provided
|
|
1219
|
+
node_name = args.name
|
|
1220
|
+
if not node_name:
|
|
1221
|
+
# Find the next available number for this node class
|
|
1222
|
+
counter = 1
|
|
1223
|
+
while f"{node_class}_{counter}" in nodes:
|
|
1224
|
+
counter += 1
|
|
1225
|
+
node_name = f"{node_class}_{counter}"
|
|
1226
|
+
|
|
1227
|
+
if node_name in nodes:
|
|
1228
|
+
output_error(f"Node '{node_name}' already exists in graph", "NODE_EXISTS")
|
|
1229
|
+
return
|
|
1230
|
+
|
|
1231
|
+
# Parse optional values
|
|
1232
|
+
values = {}
|
|
1233
|
+
if args.values:
|
|
1234
|
+
try:
|
|
1235
|
+
values = json.loads(args.values)
|
|
1236
|
+
if not isinstance(values, dict):
|
|
1237
|
+
output_error("--values must be a JSON object", "INVALID_VALUES")
|
|
1238
|
+
return
|
|
1239
|
+
except json.JSONDecodeError as e:
|
|
1240
|
+
output_error(f"Invalid JSON for --values: {str(e)}", "INVALID_JSON")
|
|
1241
|
+
return
|
|
1242
|
+
|
|
1243
|
+
# Parse optional location
|
|
1244
|
+
location = {"x": 0, "y": 0}
|
|
1245
|
+
if args.location:
|
|
1246
|
+
try:
|
|
1247
|
+
loc = json.loads(args.location)
|
|
1248
|
+
if isinstance(loc, dict) and 'x' in loc and 'y' in loc:
|
|
1249
|
+
location = {"x": loc['x'], "y": loc['y']}
|
|
1250
|
+
elif isinstance(loc, list) and len(loc) >= 2:
|
|
1251
|
+
location = {"x": loc[0], "y": loc[1]}
|
|
1252
|
+
else:
|
|
1253
|
+
output_error("--location must be {\"x\": N, \"y\": N} or [x, y]", "INVALID_LOCATION")
|
|
1254
|
+
return
|
|
1255
|
+
except json.JSONDecodeError as e:
|
|
1256
|
+
output_error(f"Invalid JSON for --location: {str(e)}", "INVALID_JSON")
|
|
1257
|
+
return
|
|
1258
|
+
|
|
1259
|
+
# Create the new node
|
|
1260
|
+
new_node = {
|
|
1261
|
+
"name": node_name,
|
|
1262
|
+
"nodeClass": node_class,
|
|
1263
|
+
"color": args.color or "#808080",
|
|
1264
|
+
"links": {},
|
|
1265
|
+
"location": location,
|
|
1266
|
+
"ports": {
|
|
1267
|
+
"inputs": [],
|
|
1268
|
+
"outputs": []
|
|
1269
|
+
},
|
|
1270
|
+
"values": values
|
|
1271
|
+
}
|
|
1272
|
+
|
|
1273
|
+
if args.tooltip:
|
|
1274
|
+
new_node["tooltip"] = args.tooltip
|
|
1275
|
+
|
|
1276
|
+
nodes[node_name] = new_node
|
|
1277
|
+
graph['nodes'] = nodes
|
|
1278
|
+
|
|
1279
|
+
if not save_graph_file(filepath, graph):
|
|
1280
|
+
return
|
|
1281
|
+
|
|
1282
|
+
output_json({
|
|
1283
|
+
"success": True,
|
|
1284
|
+
"file": os.path.abspath(filepath),
|
|
1285
|
+
"node": node_name,
|
|
1286
|
+
"nodeClass": node_class,
|
|
1287
|
+
"location": location,
|
|
1288
|
+
"values": values
|
|
1289
|
+
})
|
|
1290
|
+
|
|
1291
|
+
|
|
1292
|
+
def cmd_graph_editor_add_link(args):
|
|
1293
|
+
"""Add a link between two nodes in a local graph file.
|
|
1294
|
+
|
|
1295
|
+
Creates a connection from a source node's output port to a target node's
|
|
1296
|
+
input port. The link is stored on the target node under its links property.
|
|
1297
|
+
"""
|
|
1298
|
+
filepath = require_arg(args, 'file', 'Graph file path')
|
|
1299
|
+
source_node = require_arg(args, 'source', 'Source node name')
|
|
1300
|
+
output_port = require_arg(args, 'output', 'Output port name')
|
|
1301
|
+
target_node = require_arg(args, 'target', 'Target node name')
|
|
1302
|
+
input_port = require_arg(args, 'input', 'Input port name')
|
|
1303
|
+
|
|
1304
|
+
graph = load_graph_file(filepath)
|
|
1305
|
+
if graph is None:
|
|
1306
|
+
return
|
|
1307
|
+
|
|
1308
|
+
nodes = graph.get('nodes', {})
|
|
1309
|
+
|
|
1310
|
+
# Validate source node exists
|
|
1311
|
+
if source_node not in nodes:
|
|
1312
|
+
output_error(f"Source node '{source_node}' not found in graph", "SOURCE_NOT_FOUND")
|
|
1313
|
+
return
|
|
1314
|
+
|
|
1315
|
+
# Validate target node exists
|
|
1316
|
+
if target_node not in nodes:
|
|
1317
|
+
output_error(f"Target node '{target_node}' not found in graph", "TARGET_NOT_FOUND")
|
|
1318
|
+
return
|
|
1319
|
+
|
|
1320
|
+
target = nodes[target_node]
|
|
1321
|
+
|
|
1322
|
+
# Initialize links if not present
|
|
1323
|
+
if 'links' not in target:
|
|
1324
|
+
target['links'] = {}
|
|
1325
|
+
|
|
1326
|
+
# Add or append to the input port's links
|
|
1327
|
+
if input_port not in target['links']:
|
|
1328
|
+
target['links'][input_port] = []
|
|
1329
|
+
|
|
1330
|
+
# Check if this exact link already exists
|
|
1331
|
+
new_link = {
|
|
1332
|
+
"outputPort": output_port,
|
|
1333
|
+
"sourceNode": source_node
|
|
1334
|
+
}
|
|
1335
|
+
|
|
1336
|
+
for existing_link in target['links'][input_port]:
|
|
1337
|
+
if (existing_link.get('outputPort') == output_port and
|
|
1338
|
+
existing_link.get('sourceNode') == source_node):
|
|
1339
|
+
output_error(
|
|
1340
|
+
f"Link already exists: {source_node}.{output_port} -> {target_node}.{input_port}",
|
|
1341
|
+
"LINK_EXISTS"
|
|
1342
|
+
)
|
|
1343
|
+
return
|
|
1344
|
+
|
|
1345
|
+
target['links'][input_port].append(new_link)
|
|
1346
|
+
|
|
1347
|
+
if not save_graph_file(filepath, graph):
|
|
1348
|
+
return
|
|
1349
|
+
|
|
1350
|
+
output_json({
|
|
1351
|
+
"success": True,
|
|
1352
|
+
"file": os.path.abspath(filepath),
|
|
1353
|
+
"link": {
|
|
1354
|
+
"source": source_node,
|
|
1355
|
+
"outputPort": output_port,
|
|
1356
|
+
"target": target_node,
|
|
1357
|
+
"inputPort": input_port
|
|
1358
|
+
}
|
|
1359
|
+
})
|
|
1360
|
+
|
|
1361
|
+
|
|
1362
|
+
def cmd_graph_editor_remove_node(args):
|
|
1363
|
+
"""Remove a node from a local graph file.
|
|
1364
|
+
|
|
1365
|
+
Removes the specified node and all links connected to it (both incoming
|
|
1366
|
+
links on this node and outgoing links from other nodes that reference it).
|
|
1367
|
+
"""
|
|
1368
|
+
filepath = require_arg(args, 'file', 'Graph file path')
|
|
1369
|
+
node_name = require_arg(args, 'node', 'Node name')
|
|
1370
|
+
|
|
1371
|
+
graph = load_graph_file(filepath)
|
|
1372
|
+
if graph is None:
|
|
1373
|
+
return
|
|
1374
|
+
|
|
1375
|
+
nodes = graph.get('nodes', {})
|
|
1376
|
+
|
|
1377
|
+
if node_name not in nodes:
|
|
1378
|
+
output_error(f"Node '{node_name}' not found in graph", "NODE_NOT_FOUND")
|
|
1379
|
+
return
|
|
1380
|
+
|
|
1381
|
+
# Remove the node
|
|
1382
|
+
removed_node = nodes.pop(node_name)
|
|
1383
|
+
|
|
1384
|
+
# Remove all links that reference this node from other nodes
|
|
1385
|
+
removed_links = []
|
|
1386
|
+
for other_name, other_node in nodes.items():
|
|
1387
|
+
links = other_node.get('links', {})
|
|
1388
|
+
for port_name, port_links in list(links.items()):
|
|
1389
|
+
original_count = len(port_links)
|
|
1390
|
+
port_links[:] = [
|
|
1391
|
+
link for link in port_links
|
|
1392
|
+
if link.get('sourceNode') != node_name
|
|
1393
|
+
]
|
|
1394
|
+
if len(port_links) < original_count:
|
|
1395
|
+
removed_links.append({
|
|
1396
|
+
"target": other_name,
|
|
1397
|
+
"inputPort": port_name,
|
|
1398
|
+
"source": node_name
|
|
1399
|
+
})
|
|
1400
|
+
# Clean up empty link lists
|
|
1401
|
+
if not port_links:
|
|
1402
|
+
del links[port_name]
|
|
1403
|
+
|
|
1404
|
+
if not save_graph_file(filepath, graph):
|
|
1405
|
+
return
|
|
1406
|
+
|
|
1407
|
+
output_json({
|
|
1408
|
+
"success": True,
|
|
1409
|
+
"file": os.path.abspath(filepath),
|
|
1410
|
+
"removedNode": node_name,
|
|
1411
|
+
"nodeClass": removed_node.get('nodeClass'),
|
|
1412
|
+
"removedLinks": removed_links
|
|
1413
|
+
})
|
|
1414
|
+
|
|
1415
|
+
|
|
1416
|
+
def cmd_graph_editor_remove_link(args):
|
|
1417
|
+
"""Remove a link between two nodes in a local graph file.
|
|
1418
|
+
|
|
1419
|
+
Removes the connection from the source node's output port to the target
|
|
1420
|
+
node's input port.
|
|
1421
|
+
"""
|
|
1422
|
+
filepath = require_arg(args, 'file', 'Graph file path')
|
|
1423
|
+
source_node = require_arg(args, 'source', 'Source node name')
|
|
1424
|
+
output_port = require_arg(args, 'output', 'Output port name')
|
|
1425
|
+
target_node = require_arg(args, 'target', 'Target node name')
|
|
1426
|
+
input_port = require_arg(args, 'input', 'Input port name')
|
|
1427
|
+
|
|
1428
|
+
graph = load_graph_file(filepath)
|
|
1429
|
+
if graph is None:
|
|
1430
|
+
return
|
|
1431
|
+
|
|
1432
|
+
nodes = graph.get('nodes', {})
|
|
1433
|
+
|
|
1434
|
+
# Validate target node exists
|
|
1435
|
+
if target_node not in nodes:
|
|
1436
|
+
output_error(f"Target node '{target_node}' not found in graph", "TARGET_NOT_FOUND")
|
|
1437
|
+
return
|
|
1438
|
+
|
|
1439
|
+
target = nodes[target_node]
|
|
1440
|
+
links = target.get('links', {})
|
|
1441
|
+
|
|
1442
|
+
if input_port not in links:
|
|
1443
|
+
output_error(
|
|
1444
|
+
f"No links found on input port '{input_port}' of node '{target_node}'",
|
|
1445
|
+
"LINK_NOT_FOUND"
|
|
1446
|
+
)
|
|
1447
|
+
return
|
|
1448
|
+
|
|
1449
|
+
# Find and remove the specific link
|
|
1450
|
+
port_links = links[input_port]
|
|
1451
|
+
original_count = len(port_links)
|
|
1452
|
+
|
|
1453
|
+
port_links[:] = [
|
|
1454
|
+
link for link in port_links
|
|
1455
|
+
if not (link.get('outputPort') == output_port and
|
|
1456
|
+
link.get('sourceNode') == source_node)
|
|
1457
|
+
]
|
|
1458
|
+
|
|
1459
|
+
if len(port_links) == original_count:
|
|
1460
|
+
output_error(
|
|
1461
|
+
f"Link not found: {source_node}.{output_port} -> {target_node}.{input_port}",
|
|
1462
|
+
"LINK_NOT_FOUND"
|
|
1463
|
+
)
|
|
1464
|
+
return
|
|
1465
|
+
|
|
1466
|
+
# Clean up empty link lists
|
|
1467
|
+
if not port_links:
|
|
1468
|
+
del links[input_port]
|
|
1469
|
+
|
|
1470
|
+
if not save_graph_file(filepath, graph):
|
|
1471
|
+
return
|
|
1472
|
+
|
|
1473
|
+
output_json({
|
|
1474
|
+
"success": True,
|
|
1475
|
+
"file": os.path.abspath(filepath),
|
|
1476
|
+
"removedLink": {
|
|
1477
|
+
"source": source_node,
|
|
1478
|
+
"outputPort": output_port,
|
|
1479
|
+
"target": target_node,
|
|
1480
|
+
"inputPort": input_port
|
|
1481
|
+
}
|
|
1482
|
+
})
|
|
1483
|
+
|
|
1484
|
+
|
|
1485
|
+
def cmd_graph_editor_add_volume_file(args):
|
|
1486
|
+
"""Add a VolumeFile node to a local graph file.
|
|
1487
|
+
|
|
1488
|
+
Creates a VolumeFile node that references a file in a Rendered.ai volume.
|
|
1489
|
+
The volume reference uses the format volumeId:/path/to/file.
|
|
1490
|
+
"""
|
|
1491
|
+
filepath = require_arg(args, 'file', 'Graph file path')
|
|
1492
|
+
volume_id = require_arg(args, 'volumeid', 'Volume ID')
|
|
1493
|
+
volume_path = require_arg(args, 'path', 'File path in volume')
|
|
1494
|
+
|
|
1495
|
+
graph = load_graph_file(filepath)
|
|
1496
|
+
if graph is None:
|
|
1497
|
+
return
|
|
1498
|
+
|
|
1499
|
+
nodes = graph.get('nodes', {})
|
|
1500
|
+
|
|
1501
|
+
# Generate node name if not provided
|
|
1502
|
+
node_name = args.name
|
|
1503
|
+
if not node_name:
|
|
1504
|
+
counter = 1
|
|
1505
|
+
while f"VolumeFile_{counter}" in nodes:
|
|
1506
|
+
counter += 1
|
|
1507
|
+
node_name = f"VolumeFile_{counter}"
|
|
1508
|
+
|
|
1509
|
+
if node_name in nodes:
|
|
1510
|
+
output_error(f"Node '{node_name}' already exists in graph", "NODE_EXISTS")
|
|
1511
|
+
return
|
|
1512
|
+
|
|
1513
|
+
# Parse optional location
|
|
1514
|
+
location = {"x": 0, "y": 0}
|
|
1515
|
+
if args.location:
|
|
1516
|
+
try:
|
|
1517
|
+
loc = json.loads(args.location)
|
|
1518
|
+
if isinstance(loc, dict) and 'x' in loc and 'y' in loc:
|
|
1519
|
+
location = {"x": loc['x'], "y": loc['y']}
|
|
1520
|
+
elif isinstance(loc, list) and len(loc) >= 2:
|
|
1521
|
+
location = {"x": loc[0], "y": loc[1]}
|
|
1522
|
+
else:
|
|
1523
|
+
output_error("--location must be {\"x\": N, \"y\": N} or [x, y]", "INVALID_LOCATION")
|
|
1524
|
+
return
|
|
1525
|
+
except json.JSONDecodeError as e:
|
|
1526
|
+
output_error(f"Invalid JSON for --location: {str(e)}", "INVALID_JSON")
|
|
1527
|
+
return
|
|
1528
|
+
|
|
1529
|
+
# Normalize path (ensure it starts with /)
|
|
1530
|
+
if not volume_path.startswith('/'):
|
|
1531
|
+
volume_path = '/' + volume_path
|
|
1532
|
+
|
|
1533
|
+
# Build the volume reference
|
|
1534
|
+
volume_ref = f"{volume_id}:{volume_path}"
|
|
1535
|
+
|
|
1536
|
+
# Build tooltip (use volume name if provided, otherwise volume ID)
|
|
1537
|
+
volume_display = args.volumename or volume_id
|
|
1538
|
+
tooltip = f"{volume_display}:{volume_path}"
|
|
1539
|
+
|
|
1540
|
+
# Create the VolumeFile node
|
|
1541
|
+
new_node = {
|
|
1542
|
+
"name": node_name,
|
|
1543
|
+
"nodeClass": "VolumeFile",
|
|
1544
|
+
"color": "#246BB3",
|
|
1545
|
+
"hash": "8d56c9b8e4bae85fd61620e1d4d44a24",
|
|
1546
|
+
"links": {},
|
|
1547
|
+
"location": location,
|
|
1548
|
+
"ports": {
|
|
1549
|
+
"inputs": [
|
|
1550
|
+
{
|
|
1551
|
+
"name": "File",
|
|
1552
|
+
"description": "",
|
|
1553
|
+
"default": volume_ref,
|
|
1554
|
+
"hidden": True
|
|
1555
|
+
}
|
|
1556
|
+
],
|
|
1557
|
+
"outputs": [
|
|
1558
|
+
{
|
|
1559
|
+
"name": "File",
|
|
1560
|
+
"description": ""
|
|
1561
|
+
}
|
|
1562
|
+
]
|
|
1563
|
+
},
|
|
1564
|
+
"tooltip": tooltip,
|
|
1565
|
+
"values": {
|
|
1566
|
+
"File": volume_ref
|
|
1567
|
+
}
|
|
1568
|
+
}
|
|
1569
|
+
|
|
1570
|
+
nodes[node_name] = new_node
|
|
1571
|
+
graph['nodes'] = nodes
|
|
1572
|
+
|
|
1573
|
+
if not save_graph_file(filepath, graph):
|
|
1574
|
+
return
|
|
1575
|
+
|
|
1576
|
+
output_json({
|
|
1577
|
+
"success": True,
|
|
1578
|
+
"file": os.path.abspath(filepath),
|
|
1579
|
+
"node": node_name,
|
|
1580
|
+
"nodeClass": "VolumeFile",
|
|
1581
|
+
"volumeId": volume_id,
|
|
1582
|
+
"path": volume_path,
|
|
1583
|
+
"volumeRef": volume_ref,
|
|
1584
|
+
"location": location
|
|
1585
|
+
})
|
|
1586
|
+
|
|
1587
|
+
|
|
1588
|
+
def cmd_graph_editor_add_volume_directory(args):
|
|
1589
|
+
"""Add a VolumeDirectory node to a local graph file.
|
|
1590
|
+
|
|
1591
|
+
Creates a VolumeDirectory node that references a directory in a Rendered.ai volume.
|
|
1592
|
+
The volume reference uses the format volumeId:/path/to/directory.
|
|
1593
|
+
"""
|
|
1594
|
+
filepath = require_arg(args, 'file', 'Graph file path')
|
|
1595
|
+
volume_id = require_arg(args, 'volumeid', 'Volume ID')
|
|
1596
|
+
volume_path = args.path or '/'
|
|
1597
|
+
|
|
1598
|
+
graph = load_graph_file(filepath)
|
|
1599
|
+
if graph is None:
|
|
1600
|
+
return
|
|
1601
|
+
|
|
1602
|
+
nodes = graph.get('nodes', {})
|
|
1603
|
+
|
|
1604
|
+
# Generate node name if not provided
|
|
1605
|
+
node_name = args.name
|
|
1606
|
+
if not node_name:
|
|
1607
|
+
counter = 1
|
|
1608
|
+
while f"VolumeDirectory_{counter}" in nodes:
|
|
1609
|
+
counter += 1
|
|
1610
|
+
node_name = f"VolumeDirectory_{counter}"
|
|
1611
|
+
|
|
1612
|
+
if node_name in nodes:
|
|
1613
|
+
output_error(f"Node '{node_name}' already exists in graph", "NODE_EXISTS")
|
|
1614
|
+
return
|
|
1615
|
+
|
|
1616
|
+
# Parse optional location
|
|
1617
|
+
location = {"x": 0, "y": 0}
|
|
1618
|
+
if args.location:
|
|
1619
|
+
try:
|
|
1620
|
+
loc = json.loads(args.location)
|
|
1621
|
+
if isinstance(loc, dict) and 'x' in loc and 'y' in loc:
|
|
1622
|
+
location = {"x": loc['x'], "y": loc['y']}
|
|
1623
|
+
elif isinstance(loc, list) and len(loc) >= 2:
|
|
1624
|
+
location = {"x": loc[0], "y": loc[1]}
|
|
1625
|
+
else:
|
|
1626
|
+
output_error("--location must be {\"x\": N, \"y\": N} or [x, y]", "INVALID_LOCATION")
|
|
1627
|
+
return
|
|
1628
|
+
except json.JSONDecodeError as e:
|
|
1629
|
+
output_error(f"Invalid JSON for --location: {str(e)}", "INVALID_JSON")
|
|
1630
|
+
return
|
|
1631
|
+
|
|
1632
|
+
# Normalize path (ensure it starts with /)
|
|
1633
|
+
if not volume_path.startswith('/'):
|
|
1634
|
+
volume_path = '/' + volume_path
|
|
1635
|
+
|
|
1636
|
+
# Build the volume reference
|
|
1637
|
+
volume_ref = f"{volume_id}:{volume_path}"
|
|
1638
|
+
|
|
1639
|
+
# Build tooltip (use volume name if provided, otherwise volume ID)
|
|
1640
|
+
volume_display = args.volumename or volume_id
|
|
1641
|
+
tooltip = f"{volume_display}:{volume_path}"
|
|
1642
|
+
|
|
1643
|
+
# Create the VolumeDirectory node
|
|
1644
|
+
new_node = {
|
|
1645
|
+
"name": node_name,
|
|
1646
|
+
"nodeClass": "VolumeDirectory",
|
|
1647
|
+
"color": "#246BB3",
|
|
1648
|
+
"hash": "a7c19eb160150ee04d82af60c9332d104f0a7f89",
|
|
1649
|
+
"links": {},
|
|
1650
|
+
"location": location,
|
|
1651
|
+
"ports": {
|
|
1652
|
+
"inputs": [
|
|
1653
|
+
{
|
|
1654
|
+
"name": "Directory",
|
|
1655
|
+
"description": "",
|
|
1656
|
+
"default": volume_ref,
|
|
1657
|
+
"hidden": True
|
|
1658
|
+
}
|
|
1659
|
+
],
|
|
1660
|
+
"outputs": [
|
|
1661
|
+
{
|
|
1662
|
+
"name": "Directory",
|
|
1663
|
+
"description": ""
|
|
1664
|
+
}
|
|
1665
|
+
]
|
|
1666
|
+
},
|
|
1667
|
+
"tooltip": tooltip,
|
|
1668
|
+
"values": {
|
|
1669
|
+
"Directory": volume_ref
|
|
1670
|
+
}
|
|
1671
|
+
}
|
|
1672
|
+
|
|
1673
|
+
nodes[node_name] = new_node
|
|
1674
|
+
graph['nodes'] = nodes
|
|
1675
|
+
|
|
1676
|
+
if not save_graph_file(filepath, graph):
|
|
1677
|
+
return
|
|
1678
|
+
|
|
1679
|
+
output_json({
|
|
1680
|
+
"success": True,
|
|
1681
|
+
"file": os.path.abspath(filepath),
|
|
1682
|
+
"node": node_name,
|
|
1683
|
+
"nodeClass": "VolumeDirectory",
|
|
1684
|
+
"volumeId": volume_id,
|
|
1685
|
+
"path": volume_path,
|
|
1686
|
+
"volumeRef": volume_ref,
|
|
1687
|
+
"location": location
|
|
1688
|
+
})
|
|
1689
|
+
|
|
1690
|
+
|
|
1691
|
+
def cmd_graph_editor_list_nodes(args):
|
|
1692
|
+
"""List all nodes in a local graph file.
|
|
1693
|
+
|
|
1694
|
+
Returns a summary of all nodes including their name, class, location,
|
|
1695
|
+
and connection counts. Useful for understanding graph structure.
|
|
1696
|
+
"""
|
|
1697
|
+
filepath = require_arg(args, 'file', 'Graph file path')
|
|
1698
|
+
|
|
1699
|
+
graph = load_graph_file(filepath)
|
|
1700
|
+
if graph is None:
|
|
1701
|
+
return
|
|
1702
|
+
|
|
1703
|
+
nodes = graph.get('nodes', {})
|
|
1704
|
+
|
|
1705
|
+
# Build summary for each node
|
|
1706
|
+
node_list = []
|
|
1707
|
+
for node_name, node_data in nodes.items():
|
|
1708
|
+
# Count incoming links
|
|
1709
|
+
incoming_links = 0
|
|
1710
|
+
link_sources = []
|
|
1711
|
+
for port_name, port_links in node_data.get('links', {}).items():
|
|
1712
|
+
incoming_links += len(port_links)
|
|
1713
|
+
for link in port_links:
|
|
1714
|
+
link_sources.append(f"{link.get('sourceNode')}.{link.get('outputPort')}")
|
|
1715
|
+
|
|
1716
|
+
# Count outgoing links (links from other nodes to this one)
|
|
1717
|
+
outgoing_links = 0
|
|
1718
|
+
link_targets = []
|
|
1719
|
+
for other_name, other_data in nodes.items():
|
|
1720
|
+
if other_name == node_name:
|
|
1721
|
+
continue
|
|
1722
|
+
for port_name, port_links in other_data.get('links', {}).items():
|
|
1723
|
+
for link in port_links:
|
|
1724
|
+
if link.get('sourceNode') == node_name:
|
|
1725
|
+
outgoing_links += 1
|
|
1726
|
+
link_targets.append(f"{other_name}.{port_name}")
|
|
1727
|
+
|
|
1728
|
+
node_summary = {
|
|
1729
|
+
"name": node_name,
|
|
1730
|
+
"nodeClass": node_data.get('nodeClass'),
|
|
1731
|
+
"location": node_data.get('location'),
|
|
1732
|
+
"incomingLinks": incoming_links,
|
|
1733
|
+
"outgoingLinks": outgoing_links,
|
|
1734
|
+
"hasValues": bool(node_data.get('values'))
|
|
1735
|
+
}
|
|
1736
|
+
|
|
1737
|
+
if args.verbose:
|
|
1738
|
+
node_summary["linkSources"] = link_sources
|
|
1739
|
+
node_summary["linkTargets"] = link_targets
|
|
1740
|
+
|
|
1741
|
+
node_list.append(node_summary)
|
|
1742
|
+
|
|
1743
|
+
# Sort by name for consistent output
|
|
1744
|
+
node_list.sort(key=lambda x: x['name'])
|
|
1745
|
+
|
|
1746
|
+
output_json({
|
|
1747
|
+
"file": os.path.abspath(filepath),
|
|
1748
|
+
"nodeCount": len(node_list),
|
|
1749
|
+
"nodes": node_list
|
|
1750
|
+
})
|
|
1751
|
+
|
|
1752
|
+
|
|
1753
|
+
def cmd_graph_editor_get_node(args):
|
|
1754
|
+
"""Get detailed information about a specific node in a local graph file.
|
|
1755
|
+
|
|
1756
|
+
Returns full node data including all values, ports, links, and metadata.
|
|
1757
|
+
"""
|
|
1758
|
+
filepath = require_arg(args, 'file', 'Graph file path')
|
|
1759
|
+
node_name = require_arg(args, 'node', 'Node name')
|
|
1760
|
+
|
|
1761
|
+
graph = load_graph_file(filepath)
|
|
1762
|
+
if graph is None:
|
|
1763
|
+
return
|
|
1764
|
+
|
|
1765
|
+
nodes = graph.get('nodes', {})
|
|
1766
|
+
|
|
1767
|
+
if node_name not in nodes:
|
|
1768
|
+
output_error(f"Node '{node_name}' not found in graph", "NODE_NOT_FOUND")
|
|
1769
|
+
return
|
|
1770
|
+
|
|
1771
|
+
node_data = nodes[node_name]
|
|
1772
|
+
|
|
1773
|
+
# Find outgoing links (other nodes that link to this one)
|
|
1774
|
+
outgoing_links = []
|
|
1775
|
+
for other_name, other_data in nodes.items():
|
|
1776
|
+
if other_name == node_name:
|
|
1777
|
+
continue
|
|
1778
|
+
for port_name, port_links in other_data.get('links', {}).items():
|
|
1779
|
+
for link in port_links:
|
|
1780
|
+
if link.get('sourceNode') == node_name:
|
|
1781
|
+
outgoing_links.append({
|
|
1782
|
+
"targetNode": other_name,
|
|
1783
|
+
"targetPort": port_name,
|
|
1784
|
+
"outputPort": link.get('outputPort')
|
|
1785
|
+
})
|
|
1786
|
+
|
|
1787
|
+
output_json({
|
|
1788
|
+
"file": os.path.abspath(filepath),
|
|
1789
|
+
"node": node_name,
|
|
1790
|
+
"nodeClass": node_data.get('nodeClass'),
|
|
1791
|
+
"color": node_data.get('color'),
|
|
1792
|
+
"location": node_data.get('location'),
|
|
1793
|
+
"tooltip": node_data.get('tooltip'),
|
|
1794
|
+
"hash": node_data.get('hash'),
|
|
1795
|
+
"values": node_data.get('values', {}),
|
|
1796
|
+
"ports": node_data.get('ports', {}),
|
|
1797
|
+
"incomingLinks": node_data.get('links', {}),
|
|
1798
|
+
"outgoingLinks": outgoing_links
|
|
1799
|
+
})
|
|
1800
|
+
|
|
1801
|
+
|
|
1802
|
+
def cmd_graph_editor_move_node(args):
|
|
1803
|
+
"""Move a node to a new location in a local graph file.
|
|
1804
|
+
|
|
1805
|
+
Updates the node's x,y coordinates for visual positioning in the graph editor.
|
|
1806
|
+
"""
|
|
1807
|
+
filepath = require_arg(args, 'file', 'Graph file path')
|
|
1808
|
+
node_name = require_arg(args, 'node', 'Node name')
|
|
1809
|
+
location_str = require_arg(args, 'location', 'Location')
|
|
1810
|
+
|
|
1811
|
+
graph = load_graph_file(filepath)
|
|
1812
|
+
if graph is None:
|
|
1813
|
+
return
|
|
1814
|
+
|
|
1815
|
+
nodes = graph.get('nodes', {})
|
|
1816
|
+
|
|
1817
|
+
if node_name not in nodes:
|
|
1818
|
+
output_error(f"Node '{node_name}' not found in graph", "NODE_NOT_FOUND")
|
|
1819
|
+
return
|
|
1820
|
+
|
|
1821
|
+
# Parse location
|
|
1822
|
+
try:
|
|
1823
|
+
loc = json.loads(location_str)
|
|
1824
|
+
if isinstance(loc, dict) and 'x' in loc and 'y' in loc:
|
|
1825
|
+
location = {"x": loc['x'], "y": loc['y']}
|
|
1826
|
+
elif isinstance(loc, list) and len(loc) >= 2:
|
|
1827
|
+
location = {"x": loc[0], "y": loc[1]}
|
|
1828
|
+
else:
|
|
1829
|
+
output_error("--location must be {\"x\": N, \"y\": N} or [x, y]", "INVALID_LOCATION")
|
|
1830
|
+
return
|
|
1831
|
+
except json.JSONDecodeError as e:
|
|
1832
|
+
output_error(f"Invalid JSON for --location: {str(e)}", "INVALID_JSON")
|
|
1833
|
+
return
|
|
1834
|
+
|
|
1835
|
+
old_location = nodes[node_name].get('location', {})
|
|
1836
|
+
nodes[node_name]['location'] = location
|
|
1837
|
+
|
|
1838
|
+
if not save_graph_file(filepath, graph):
|
|
1839
|
+
return
|
|
1840
|
+
|
|
1841
|
+
output_json({
|
|
1842
|
+
"success": True,
|
|
1843
|
+
"file": os.path.abspath(filepath),
|
|
1844
|
+
"node": node_name,
|
|
1845
|
+
"oldLocation": old_location,
|
|
1846
|
+
"newLocation": location
|
|
1847
|
+
})
|
|
1848
|
+
|
|
1849
|
+
|
|
1850
|
+
def cmd_graph_editor_clone_node(args):
|
|
1851
|
+
"""Clone an existing node in a local graph file.
|
|
1852
|
+
|
|
1853
|
+
Creates a copy of a node with a new name. The cloned node has the same
|
|
1854
|
+
nodeClass, values, ports, and color, but no links (links must be added
|
|
1855
|
+
separately). The location is offset slightly from the original.
|
|
1856
|
+
"""
|
|
1857
|
+
filepath = require_arg(args, 'file', 'Graph file path')
|
|
1858
|
+
source_node = require_arg(args, 'source', 'Source node name')
|
|
1859
|
+
|
|
1860
|
+
graph = load_graph_file(filepath)
|
|
1861
|
+
if graph is None:
|
|
1862
|
+
return
|
|
1863
|
+
|
|
1864
|
+
nodes = graph.get('nodes', {})
|
|
1865
|
+
|
|
1866
|
+
if source_node not in nodes:
|
|
1867
|
+
output_error(f"Source node '{source_node}' not found in graph", "NODE_NOT_FOUND")
|
|
1868
|
+
return
|
|
1869
|
+
|
|
1870
|
+
source_data = nodes[source_node]
|
|
1871
|
+
node_class = source_data.get('nodeClass', 'Unknown')
|
|
1872
|
+
|
|
1873
|
+
# Generate new node name if not provided
|
|
1874
|
+
new_name = args.name
|
|
1875
|
+
if not new_name:
|
|
1876
|
+
counter = 1
|
|
1877
|
+
while f"{node_class}_{counter}" in nodes:
|
|
1878
|
+
counter += 1
|
|
1879
|
+
new_name = f"{node_class}_{counter}"
|
|
1880
|
+
|
|
1881
|
+
if new_name in nodes:
|
|
1882
|
+
output_error(f"Node '{new_name}' already exists in graph", "NODE_EXISTS")
|
|
1883
|
+
return
|
|
1884
|
+
|
|
1885
|
+
# Parse optional location, or offset from source
|
|
1886
|
+
if args.location:
|
|
1887
|
+
try:
|
|
1888
|
+
loc = json.loads(args.location)
|
|
1889
|
+
if isinstance(loc, dict) and 'x' in loc and 'y' in loc:
|
|
1890
|
+
location = {"x": loc['x'], "y": loc['y']}
|
|
1891
|
+
elif isinstance(loc, list) and len(loc) >= 2:
|
|
1892
|
+
location = {"x": loc[0], "y": loc[1]}
|
|
1893
|
+
else:
|
|
1894
|
+
output_error("--location must be {\"x\": N, \"y\": N} or [x, y]", "INVALID_LOCATION")
|
|
1895
|
+
return
|
|
1896
|
+
except json.JSONDecodeError as e:
|
|
1897
|
+
output_error(f"Invalid JSON for --location: {str(e)}", "INVALID_JSON")
|
|
1898
|
+
return
|
|
1899
|
+
else:
|
|
1900
|
+
# Offset from source location
|
|
1901
|
+
source_loc = source_data.get('location', {"x": 0, "y": 0})
|
|
1902
|
+
location = {
|
|
1903
|
+
"x": source_loc.get('x', 0) + 50,
|
|
1904
|
+
"y": source_loc.get('y', 0) + 50
|
|
1905
|
+
}
|
|
1906
|
+
|
|
1907
|
+
# Create the cloned node (deep copy values and ports, but not links)
|
|
1908
|
+
import copy
|
|
1909
|
+
cloned_node = {
|
|
1910
|
+
"name": new_name,
|
|
1911
|
+
"nodeClass": node_class,
|
|
1912
|
+
"color": source_data.get('color', '#808080'),
|
|
1913
|
+
"hash": source_data.get('hash'),
|
|
1914
|
+
"links": {}, # No links for cloned node
|
|
1915
|
+
"location": location,
|
|
1916
|
+
"ports": copy.deepcopy(source_data.get('ports', {"inputs": [], "outputs": []})),
|
|
1917
|
+
"values": copy.deepcopy(source_data.get('values', {}))
|
|
1918
|
+
}
|
|
1919
|
+
|
|
1920
|
+
if source_data.get('tooltip'):
|
|
1921
|
+
cloned_node['tooltip'] = source_data['tooltip']
|
|
1922
|
+
|
|
1923
|
+
nodes[new_name] = cloned_node
|
|
1924
|
+
graph['nodes'] = nodes
|
|
1925
|
+
|
|
1926
|
+
if not save_graph_file(filepath, graph):
|
|
1927
|
+
return
|
|
1928
|
+
|
|
1929
|
+
output_json({
|
|
1930
|
+
"success": True,
|
|
1931
|
+
"file": os.path.abspath(filepath),
|
|
1932
|
+
"sourceNode": source_node,
|
|
1933
|
+
"clonedNode": new_name,
|
|
1934
|
+
"nodeClass": node_class,
|
|
1935
|
+
"location": location
|
|
1936
|
+
})
|
|
1937
|
+
|
|
1938
|
+
|
|
1939
|
+
def cmd_graph_editor_status(args):
|
|
1940
|
+
"""Get the status and validation errors from the graph editor.
|
|
1941
|
+
|
|
1942
|
+
Reads the graph editor status file to check for validation errors in
|
|
1943
|
+
open editor sessions. Returns session info including any errors.
|
|
1944
|
+
"""
|
|
1945
|
+
status_path = os.path.join(os.path.expanduser('~'), '.theia', 'graph-editor-status.json')
|
|
1946
|
+
|
|
1947
|
+
if not os.path.exists(status_path):
|
|
1948
|
+
output_json({
|
|
1949
|
+
"status": "no_status_file",
|
|
1950
|
+
"message": "No graph editor status file found. The graph editor may not have been opened yet.",
|
|
1951
|
+
"statusPath": status_path,
|
|
1952
|
+
"sessions": []
|
|
1953
|
+
})
|
|
1954
|
+
return
|
|
1955
|
+
|
|
1956
|
+
try:
|
|
1957
|
+
with open(status_path, 'r') as f:
|
|
1958
|
+
status_data = json.load(f)
|
|
1959
|
+
except json.JSONDecodeError as e:
|
|
1960
|
+
output_error(f"Failed to parse status file: {str(e)}", "PARSE_ERROR")
|
|
1961
|
+
return
|
|
1962
|
+
except Exception as e:
|
|
1963
|
+
output_error(f"Failed to read status file: {str(e)}", "READ_ERROR")
|
|
1964
|
+
return
|
|
1965
|
+
|
|
1966
|
+
sessions = status_data.get('sessions', [])
|
|
1967
|
+
|
|
1968
|
+
# If a specific graph file is requested, filter to that session
|
|
1969
|
+
if args.file:
|
|
1970
|
+
target_path = os.path.abspath(args.file)
|
|
1971
|
+
sessions = [s for s in sessions if s.get('graphPath') == target_path]
|
|
1972
|
+
|
|
1973
|
+
# Build response with error summary
|
|
1974
|
+
total_errors = 0
|
|
1975
|
+
sessions_with_errors = 0
|
|
1976
|
+
for session in sessions:
|
|
1977
|
+
errors = session.get('errors', [])
|
|
1978
|
+
if errors:
|
|
1979
|
+
sessions_with_errors += 1
|
|
1980
|
+
total_errors += len(errors)
|
|
1981
|
+
|
|
1982
|
+
output_json({
|
|
1983
|
+
"status": "ok",
|
|
1984
|
+
"statusPath": status_path,
|
|
1985
|
+
"lastUpdated": status_data.get('lastUpdated'),
|
|
1986
|
+
"sessionCount": len(sessions),
|
|
1987
|
+
"sessionsWithErrors": sessions_with_errors,
|
|
1988
|
+
"totalErrors": total_errors,
|
|
1989
|
+
"sessions": sessions
|
|
1990
|
+
})
|
|
1991
|
+
|
|
1992
|
+
|
|
1092
1993
|
# =============================================================================
|
|
1093
1994
|
# CHANNELS
|
|
1094
1995
|
# =============================================================================
|
|
@@ -1116,7 +2017,62 @@ def cmd_channels_schema(args):
|
|
|
1116
2017
|
channelId=channel_id,
|
|
1117
2018
|
fields=parse_list_arg(args.fields) if args.fields else None
|
|
1118
2019
|
)
|
|
1119
|
-
|
|
2020
|
+
|
|
2021
|
+
# Apply filters if specified
|
|
2022
|
+
if args.category or args.subcategory or args.search:
|
|
2023
|
+
filtered = []
|
|
2024
|
+
search_term = args.search.lower() if args.search else None
|
|
2025
|
+
|
|
2026
|
+
for node in result:
|
|
2027
|
+
# Filter by category
|
|
2028
|
+
if args.category and node.get('category', '').lower() != args.category.lower():
|
|
2029
|
+
continue
|
|
2030
|
+
|
|
2031
|
+
# Filter by subcategory
|
|
2032
|
+
if args.subcategory and node.get('subcategory', '').lower() != args.subcategory.lower():
|
|
2033
|
+
continue
|
|
2034
|
+
|
|
2035
|
+
# Search across multiple fields
|
|
2036
|
+
if search_term:
|
|
2037
|
+
searchable_fields = [
|
|
2038
|
+
node.get('name', ''),
|
|
2039
|
+
node.get('category', ''),
|
|
2040
|
+
node.get('subcategory', ''),
|
|
2041
|
+
node.get('tooltip', ''),
|
|
2042
|
+
node.get('description', ''),
|
|
2043
|
+
]
|
|
2044
|
+
# Also search in input/output names and descriptions
|
|
2045
|
+
for inp in node.get('inputs', []):
|
|
2046
|
+
searchable_fields.append(inp.get('name', ''))
|
|
2047
|
+
searchable_fields.append(inp.get('description', ''))
|
|
2048
|
+
for out in node.get('outputs', []):
|
|
2049
|
+
searchable_fields.append(out.get('name', ''))
|
|
2050
|
+
searchable_fields.append(out.get('description', ''))
|
|
2051
|
+
|
|
2052
|
+
combined = ' '.join(str(f) for f in searchable_fields).lower()
|
|
2053
|
+
if search_term not in combined:
|
|
2054
|
+
continue
|
|
2055
|
+
|
|
2056
|
+
filtered.append(node)
|
|
2057
|
+
|
|
2058
|
+
result = filtered
|
|
2059
|
+
|
|
2060
|
+
# List categories and subcategories if requested
|
|
2061
|
+
if args.list_categories:
|
|
2062
|
+
categories = sorted(set(node.get('category', '') for node in result if node.get('category')))
|
|
2063
|
+
subcategories = sorted(set(node.get('subcategory', '') for node in result if node.get('subcategory')))
|
|
2064
|
+
output_json({
|
|
2065
|
+
"categories": categories,
|
|
2066
|
+
"subcategories": subcategories
|
|
2067
|
+
})
|
|
2068
|
+
return
|
|
2069
|
+
|
|
2070
|
+
# Output names only if requested
|
|
2071
|
+
if args.names_only:
|
|
2072
|
+
names = sorted([node.get('name', '') for node in result])
|
|
2073
|
+
output_json(names)
|
|
2074
|
+
else:
|
|
2075
|
+
output_json(result)
|
|
1120
2076
|
|
|
1121
2077
|
|
|
1122
2078
|
def cmd_channels_nodes(args):
|
|
@@ -1456,6 +2412,104 @@ def cmd_annotations_edit(args):
|
|
|
1456
2412
|
output_json({"success": result})
|
|
1457
2413
|
|
|
1458
2414
|
|
|
2415
|
+
def cmd_annotations_view(args):
|
|
2416
|
+
"""Generate an image with annotations overlayed.
|
|
2417
|
+
|
|
2418
|
+
This command draws annotations (bounding boxes, 3D boxes, or segmentation outlines)
|
|
2419
|
+
on an image from a dataset. The image must be part of a dataset directory structure
|
|
2420
|
+
that includes annotations/ and metadata/ folders with ANA-format annotation files.
|
|
2421
|
+
"""
|
|
2422
|
+
from anatools.annotations import annotations
|
|
2423
|
+
|
|
2424
|
+
image_path = require_arg(args, 'imagepath', 'Image path')
|
|
2425
|
+
out_dir = require_arg(args, 'outdir', 'Output directory')
|
|
2426
|
+
|
|
2427
|
+
# Parse draw types - can be comma-separated for multiple types
|
|
2428
|
+
draw_types = parse_list_arg(args.drawtype) if args.drawtype else ['box_2d']
|
|
2429
|
+
valid_types = ['box_2d', 'box_3d', 'segmentation']
|
|
2430
|
+
for dt in draw_types:
|
|
2431
|
+
if dt not in valid_types:
|
|
2432
|
+
output_error(f"Invalid draw type '{dt}'. Must be one of: {', '.join(valid_types)}", "INVALID_DRAW_TYPE")
|
|
2433
|
+
sys.exit(1)
|
|
2434
|
+
|
|
2435
|
+
# Parse optional filters
|
|
2436
|
+
object_ids = None
|
|
2437
|
+
object_types = None
|
|
2438
|
+
if args.objectids:
|
|
2439
|
+
try:
|
|
2440
|
+
object_ids = [int(x) for x in parse_list_arg(args.objectids)]
|
|
2441
|
+
except ValueError:
|
|
2442
|
+
output_error("Object IDs must be integers", "INVALID_OBJECT_IDS")
|
|
2443
|
+
sys.exit(1)
|
|
2444
|
+
if args.objecttypes:
|
|
2445
|
+
object_types = parse_list_arg(args.objecttypes)
|
|
2446
|
+
|
|
2447
|
+
# Parse colors if provided (JSON format)
|
|
2448
|
+
colors = None
|
|
2449
|
+
if args.colors:
|
|
2450
|
+
colors = parse_json_arg(args.colors)
|
|
2451
|
+
# Convert color lists to tuples
|
|
2452
|
+
for key in colors:
|
|
2453
|
+
if isinstance(colors[key], list):
|
|
2454
|
+
colors[key] = tuple(colors[key])
|
|
2455
|
+
|
|
2456
|
+
line_thickness = args.thickness if args.thickness else 1
|
|
2457
|
+
|
|
2458
|
+
ann = annotations()
|
|
2459
|
+
output_paths = []
|
|
2460
|
+
|
|
2461
|
+
try:
|
|
2462
|
+
for draw_type in draw_types:
|
|
2463
|
+
output_path = None
|
|
2464
|
+
if draw_type == 'box_2d':
|
|
2465
|
+
output_path = ann.bounding_box_2d(
|
|
2466
|
+
image_path=image_path,
|
|
2467
|
+
out_dir=out_dir,
|
|
2468
|
+
object_ids=object_ids,
|
|
2469
|
+
object_types=object_types,
|
|
2470
|
+
line_thickness=line_thickness,
|
|
2471
|
+
colors=colors,
|
|
2472
|
+
quiet=True
|
|
2473
|
+
)
|
|
2474
|
+
elif draw_type == 'box_3d':
|
|
2475
|
+
output_path = ann.bounding_box_3d(
|
|
2476
|
+
image_path=image_path,
|
|
2477
|
+
out_dir=out_dir,
|
|
2478
|
+
object_ids=object_ids,
|
|
2479
|
+
object_types=object_types,
|
|
2480
|
+
line_thickness=line_thickness,
|
|
2481
|
+
colors=colors,
|
|
2482
|
+
quiet=True
|
|
2483
|
+
)
|
|
2484
|
+
elif draw_type == 'segmentation':
|
|
2485
|
+
output_path = ann.segmentation(
|
|
2486
|
+
image_path=image_path,
|
|
2487
|
+
out_dir=out_dir,
|
|
2488
|
+
object_ids=object_ids,
|
|
2489
|
+
object_types=object_types,
|
|
2490
|
+
line_thickness=line_thickness,
|
|
2491
|
+
colors=colors,
|
|
2492
|
+
quiet=True
|
|
2493
|
+
)
|
|
2494
|
+
|
|
2495
|
+
if output_path is None:
|
|
2496
|
+
output_error(f"Failed to generate annotated image for draw type '{draw_type}'", "ANNOTATION_ERROR")
|
|
2497
|
+
sys.exit(1)
|
|
2498
|
+
output_paths.append(output_path)
|
|
2499
|
+
|
|
2500
|
+
output_json({
|
|
2501
|
+
"success": True,
|
|
2502
|
+
"outputPaths": output_paths,
|
|
2503
|
+
"drawTypes": draw_types
|
|
2504
|
+
})
|
|
2505
|
+
except FileNotFoundError as e:
|
|
2506
|
+
output_error(f"File not found: {str(e)}", "FILE_NOT_FOUND")
|
|
2507
|
+
sys.exit(1)
|
|
2508
|
+
except Exception as e:
|
|
2509
|
+
output_error(f"Failed to generate annotated image: {str(e)}", "ANNOTATION_ERROR")
|
|
2510
|
+
sys.exit(1)
|
|
2511
|
+
|
|
2512
|
+
|
|
1459
2513
|
# =============================================================================
|
|
1460
2514
|
# ANNOTATION-MAPS
|
|
1461
2515
|
# =============================================================================
|
|
@@ -2171,9 +3225,10 @@ Examples:
|
|
|
2171
3225
|
ds_cancel.set_defaults(func=cmd_datasets_cancel)
|
|
2172
3226
|
|
|
2173
3227
|
# datasets download
|
|
2174
|
-
ds_download = datasets_sub.add_parser('download', help='Download a dataset')
|
|
3228
|
+
ds_download = datasets_sub.add_parser('download', help='Download a dataset or a single file from a dataset')
|
|
2175
3229
|
ds_download.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
2176
3230
|
ds_download.add_argument('--datasetid', required=True, help='Dataset ID')
|
|
3231
|
+
ds_download.add_argument('--filepath', help='Relative path to a specific file within the dataset (e.g., "images/000000-1-image.png"). If not provided, downloads the entire dataset.')
|
|
2177
3232
|
ds_download.add_argument('--outputdir', help='Output directory')
|
|
2178
3233
|
ds_download.set_defaults(func=cmd_datasets_download)
|
|
2179
3234
|
|
|
@@ -2395,6 +3450,100 @@ Examples:
|
|
|
2395
3450
|
ge_open.add_argument('--outputdir', help='Output directory (default: current directory)')
|
|
2396
3451
|
ge_open.set_defaults(func=cmd_graph_editor_open)
|
|
2397
3452
|
|
|
3453
|
+
# graph-editor edit-node
|
|
3454
|
+
ge_edit_node = graph_editor_sub.add_parser('edit-node', help='Edit a node\'s values in a local graph file')
|
|
3455
|
+
ge_edit_node.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
|
|
3456
|
+
ge_edit_node.add_argument('--node', required=True, help='Name of the node to edit')
|
|
3457
|
+
ge_edit_node.add_argument('--values', required=True, help='JSON object with values to update (e.g., \'{"param": "value"}\')')
|
|
3458
|
+
ge_edit_node.set_defaults(func=cmd_graph_editor_edit_node)
|
|
3459
|
+
|
|
3460
|
+
# graph-editor add-node
|
|
3461
|
+
ge_add_node = graph_editor_sub.add_parser('add-node', help='Add a new node to a local graph file')
|
|
3462
|
+
ge_add_node.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
|
|
3463
|
+
ge_add_node.add_argument('--nodeclass', required=True, help='Node class to instantiate')
|
|
3464
|
+
ge_add_node.add_argument('--name', help='Custom node name (default: nodeClass_N)')
|
|
3465
|
+
ge_add_node.add_argument('--values', help='JSON object with initial values')
|
|
3466
|
+
ge_add_node.add_argument('--location', help='Position as {"x": N, "y": N} or [x, y]')
|
|
3467
|
+
ge_add_node.add_argument('--color', help='Node color as hex (default: #808080)')
|
|
3468
|
+
ge_add_node.add_argument('--tooltip', help='Node tooltip/description')
|
|
3469
|
+
ge_add_node.set_defaults(func=cmd_graph_editor_add_node)
|
|
3470
|
+
|
|
3471
|
+
# graph-editor add-link
|
|
3472
|
+
ge_add_link = graph_editor_sub.add_parser('add-link', help='Add a link between two nodes')
|
|
3473
|
+
ge_add_link.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
|
|
3474
|
+
ge_add_link.add_argument('--source', required=True, help='Source node name')
|
|
3475
|
+
ge_add_link.add_argument('--output', required=True, help='Output port name on source node')
|
|
3476
|
+
ge_add_link.add_argument('--target', required=True, help='Target node name')
|
|
3477
|
+
ge_add_link.add_argument('--input', required=True, help='Input port name on target node')
|
|
3478
|
+
ge_add_link.set_defaults(func=cmd_graph_editor_add_link)
|
|
3479
|
+
|
|
3480
|
+
# graph-editor remove-node
|
|
3481
|
+
ge_remove_node = graph_editor_sub.add_parser('remove-node', help='Remove a node and its links from a graph')
|
|
3482
|
+
ge_remove_node.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
|
|
3483
|
+
ge_remove_node.add_argument('--node', required=True, help='Name of the node to remove')
|
|
3484
|
+
ge_remove_node.set_defaults(func=cmd_graph_editor_remove_node)
|
|
3485
|
+
|
|
3486
|
+
# graph-editor remove-link
|
|
3487
|
+
ge_remove_link = graph_editor_sub.add_parser('remove-link', help='Remove a link between two nodes')
|
|
3488
|
+
ge_remove_link.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
|
|
3489
|
+
ge_remove_link.add_argument('--source', required=True, help='Source node name')
|
|
3490
|
+
ge_remove_link.add_argument('--output', required=True, help='Output port name on source node')
|
|
3491
|
+
ge_remove_link.add_argument('--target', required=True, help='Target node name')
|
|
3492
|
+
ge_remove_link.add_argument('--input', required=True, help='Input port name on target node')
|
|
3493
|
+
ge_remove_link.set_defaults(func=cmd_graph_editor_remove_link)
|
|
3494
|
+
|
|
3495
|
+
# graph-editor add-volume-file
|
|
3496
|
+
ge_add_vol_file = graph_editor_sub.add_parser('add-volume-file', help='Add a VolumeFile node referencing a file in a volume')
|
|
3497
|
+
ge_add_vol_file.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
|
|
3498
|
+
ge_add_vol_file.add_argument('--volumeid', required=True, help='Volume UUID')
|
|
3499
|
+
ge_add_vol_file.add_argument('--path', required=True, help='File path within the volume (e.g., /models/model.blend)')
|
|
3500
|
+
ge_add_vol_file.add_argument('--name', help='Custom node name (default: VolumeFile_N)')
|
|
3501
|
+
ge_add_vol_file.add_argument('--volumename', help='Volume display name for tooltip (default: uses volumeid)')
|
|
3502
|
+
ge_add_vol_file.add_argument('--location', help='Position as {"x": N, "y": N} or [x, y]')
|
|
3503
|
+
ge_add_vol_file.set_defaults(func=cmd_graph_editor_add_volume_file)
|
|
3504
|
+
|
|
3505
|
+
# graph-editor add-volume-directory
|
|
3506
|
+
ge_add_vol_dir = graph_editor_sub.add_parser('add-volume-directory', help='Add a VolumeDirectory node referencing a directory in a volume')
|
|
3507
|
+
ge_add_vol_dir.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
|
|
3508
|
+
ge_add_vol_dir.add_argument('--volumeid', required=True, help='Volume UUID')
|
|
3509
|
+
ge_add_vol_dir.add_argument('--path', help='Directory path within the volume (default: /)')
|
|
3510
|
+
ge_add_vol_dir.add_argument('--name', help='Custom node name (default: VolumeDirectory_N)')
|
|
3511
|
+
ge_add_vol_dir.add_argument('--volumename', help='Volume display name for tooltip (default: uses volumeid)')
|
|
3512
|
+
ge_add_vol_dir.add_argument('--location', help='Position as {"x": N, "y": N} or [x, y]')
|
|
3513
|
+
ge_add_vol_dir.set_defaults(func=cmd_graph_editor_add_volume_directory)
|
|
3514
|
+
|
|
3515
|
+
# graph-editor list-nodes
|
|
3516
|
+
ge_list_nodes = graph_editor_sub.add_parser('list-nodes', help='List all nodes in a local graph file')
|
|
3517
|
+
ge_list_nodes.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
|
|
3518
|
+
ge_list_nodes.add_argument('--verbose', '-v', action='store_true', help='Include link details')
|
|
3519
|
+
ge_list_nodes.set_defaults(func=cmd_graph_editor_list_nodes)
|
|
3520
|
+
|
|
3521
|
+
# graph-editor get-node
|
|
3522
|
+
ge_get_node = graph_editor_sub.add_parser('get-node', help='Get detailed info about a specific node')
|
|
3523
|
+
ge_get_node.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
|
|
3524
|
+
ge_get_node.add_argument('--node', required=True, help='Name of the node to inspect')
|
|
3525
|
+
ge_get_node.set_defaults(func=cmd_graph_editor_get_node)
|
|
3526
|
+
|
|
3527
|
+
# graph-editor move-node
|
|
3528
|
+
ge_move_node = graph_editor_sub.add_parser('move-node', help='Move a node to a new location')
|
|
3529
|
+
ge_move_node.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
|
|
3530
|
+
ge_move_node.add_argument('--node', required=True, help='Name of the node to move')
|
|
3531
|
+
ge_move_node.add_argument('--location', required=True, help='New position as {"x": N, "y": N} or [x, y]')
|
|
3532
|
+
ge_move_node.set_defaults(func=cmd_graph_editor_move_node)
|
|
3533
|
+
|
|
3534
|
+
# graph-editor clone-node
|
|
3535
|
+
ge_clone_node = graph_editor_sub.add_parser('clone-node', help='Clone an existing node')
|
|
3536
|
+
ge_clone_node.add_argument('--file', required=True, help='Path to graph file (.yaml, .yml, or .json)')
|
|
3537
|
+
ge_clone_node.add_argument('--source', required=True, help='Name of the node to clone')
|
|
3538
|
+
ge_clone_node.add_argument('--name', help='Name for the cloned node (default: nodeClass_N)')
|
|
3539
|
+
ge_clone_node.add_argument('--location', help='Position as {"x": N, "y": N} or [x, y] (default: offset from source)')
|
|
3540
|
+
ge_clone_node.set_defaults(func=cmd_graph_editor_clone_node)
|
|
3541
|
+
|
|
3542
|
+
# graph-editor status
|
|
3543
|
+
ge_status = graph_editor_sub.add_parser('status', help='Get graph editor status and validation errors')
|
|
3544
|
+
ge_status.add_argument('--file', help='Filter to a specific graph file path')
|
|
3545
|
+
ge_status.set_defaults(func=cmd_graph_editor_status)
|
|
3546
|
+
|
|
2398
3547
|
# -------------------------------------------------------------------------
|
|
2399
3548
|
# CHANNELS
|
|
2400
3549
|
# -------------------------------------------------------------------------
|
|
@@ -2414,6 +3563,11 @@ Examples:
|
|
|
2414
3563
|
ch_schema = channels_sub.add_parser('schema', help='Get channel schema')
|
|
2415
3564
|
ch_schema.add_argument('--channelid', required=True, help='Channel ID')
|
|
2416
3565
|
ch_schema.add_argument('--fields', help='Comma-separated fields to return')
|
|
3566
|
+
ch_schema.add_argument('--category', help='Filter by category (e.g., Objects, Backgrounds, Sensors)')
|
|
3567
|
+
ch_schema.add_argument('--subcategory', help='Filter by subcategory (e.g., Aircraft, Vehicles, Ships)')
|
|
3568
|
+
ch_schema.add_argument('--search', help='Case-insensitive search across name, category, subcategory, tooltip, and descriptions')
|
|
3569
|
+
ch_schema.add_argument('--names-only', action='store_true', help='Output only node names (sorted)')
|
|
3570
|
+
ch_schema.add_argument('--list-categories', action='store_true', help='List all available categories and subcategories')
|
|
2417
3571
|
ch_schema.set_defaults(func=cmd_channels_schema)
|
|
2418
3572
|
|
|
2419
3573
|
# channels nodes
|
|
@@ -2612,6 +3766,17 @@ Examples:
|
|
|
2612
3766
|
ann_edit.add_argument('--tags', required=True, help='Comma-separated tags')
|
|
2613
3767
|
ann_edit.set_defaults(func=cmd_annotations_edit)
|
|
2614
3768
|
|
|
3769
|
+
# annotations view
|
|
3770
|
+
ann_view = annotations_sub.add_parser('view', help='Generate image with annotations overlayed')
|
|
3771
|
+
ann_view.add_argument('--imagepath', required=True, help='Path to the image file in the dataset directory')
|
|
3772
|
+
ann_view.add_argument('--outdir', required=True, help='Output directory for the annotated image')
|
|
3773
|
+
ann_view.add_argument('--drawtype', default='box_2d', help='Annotation type(s) to draw: box_2d, box_3d, segmentation (comma-separated for multiple)')
|
|
3774
|
+
ann_view.add_argument('--objectids', help='Comma-separated list of object IDs to annotate (filter)')
|
|
3775
|
+
ann_view.add_argument('--objecttypes', help='Comma-separated list of object types to annotate (filter)')
|
|
3776
|
+
ann_view.add_argument('--thickness', type=int, default=1, help='Line thickness for annotations (default: 1)')
|
|
3777
|
+
ann_view.add_argument('--colors', help='JSON dict of object type to RGB color, e.g. \'{"Car": [255, 0, 0]}\'')
|
|
3778
|
+
ann_view.set_defaults(func=cmd_annotations_view)
|
|
3779
|
+
|
|
2615
3780
|
# -------------------------------------------------------------------------
|
|
2616
3781
|
# ANNOTATION MAPS
|
|
2617
3782
|
# -------------------------------------------------------------------------
|