anatools 6.0.1__py3-none-any.whl → 6.0.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- anatools/__init__.py +1 -1
- anatools/anaclient/api/preview.py +24 -3
- anatools/anaclient/preview.py +26 -4
- {anatools-6.0.1.data → anatools-6.0.2.data}/scripts/anadeploy +1 -1
- {anatools-6.0.1.data → anatools-6.0.2.data}/scripts/anarules +6 -4
- {anatools-6.0.1.data → anatools-6.0.2.data}/scripts/renderedai +339 -37
- {anatools-6.0.1.dist-info → anatools-6.0.2.dist-info}/METADATA +2 -2
- {anatools-6.0.1.dist-info → anatools-6.0.2.dist-info}/RECORD +18 -18
- {anatools-6.0.1.data → anatools-6.0.2.data}/scripts/ana +0 -0
- {anatools-6.0.1.data → anatools-6.0.2.data}/scripts/anamount +0 -0
- {anatools-6.0.1.data → anatools-6.0.2.data}/scripts/anaprofile +0 -0
- {anatools-6.0.1.data → anatools-6.0.2.data}/scripts/anaserver +0 -0
- {anatools-6.0.1.data → anatools-6.0.2.data}/scripts/anatransfer +0 -0
- {anatools-6.0.1.data → anatools-6.0.2.data}/scripts/anautils +0 -0
- {anatools-6.0.1.dist-info → anatools-6.0.2.dist-info}/WHEEL +0 -0
- {anatools-6.0.1.dist-info → anatools-6.0.2.dist-info}/entry_points.txt +0 -0
- {anatools-6.0.1.dist-info → anatools-6.0.2.dist-info}/licenses/LICENSE +0 -0
- {anatools-6.0.1.dist-info → anatools-6.0.2.dist-info}/top_level.txt +0 -0
anatools/__init__.py
CHANGED
|
@@ -25,17 +25,38 @@ def getPreview(self, workspaceId, previewId, fields=None):
|
|
|
25
25
|
|
|
26
26
|
def createPreview(self, workspaceId, graphId):
|
|
27
27
|
response = self.session.post(
|
|
28
|
-
url = self.url,
|
|
29
|
-
headers = self.headers,
|
|
28
|
+
url = self.url,
|
|
29
|
+
headers = self.headers,
|
|
30
30
|
json = {
|
|
31
31
|
"operationName": "createPreview",
|
|
32
32
|
"variables": {
|
|
33
33
|
"workspaceId": workspaceId,
|
|
34
34
|
"graphId": graphId
|
|
35
35
|
},
|
|
36
|
-
"query": """mutation
|
|
36
|
+
"query": """mutation
|
|
37
37
|
createPreview($workspaceId: String!, $graphId: String!) {
|
|
38
38
|
createPreview(workspaceId: $workspaceId, graphId: $graphId)
|
|
39
39
|
}"""})
|
|
40
40
|
return self.errorhandler(response, "createPreview")
|
|
41
41
|
|
|
42
|
+
|
|
43
|
+
def getPreviewLog(self, workspaceId, previewId, fields=None):
|
|
44
|
+
if fields is None: fields = self.getTypeFields("PreviewLog")
|
|
45
|
+
fields = "\n".join(fields)
|
|
46
|
+
response = self.session.post(
|
|
47
|
+
url = self.url,
|
|
48
|
+
headers = self.headers,
|
|
49
|
+
json = {
|
|
50
|
+
"operationName": "getPreviewLog",
|
|
51
|
+
"variables": {
|
|
52
|
+
"workspaceId": workspaceId,
|
|
53
|
+
"previewId": previewId
|
|
54
|
+
},
|
|
55
|
+
"query": f"""query
|
|
56
|
+
getPreviewLog($workspaceId: String!, $previewId: String!) {{
|
|
57
|
+
getPreviewLog(workspaceId: $workspaceId, previewId: $previewId) {{
|
|
58
|
+
{fields}
|
|
59
|
+
}}
|
|
60
|
+
}}"""})
|
|
61
|
+
return self.errorhandler(response, "getPreviewLog")
|
|
62
|
+
|
anatools/anaclient/preview.py
CHANGED
|
@@ -27,14 +27,14 @@ def get_preview(self, previewId, workspaceId=None, fields=None):
|
|
|
27
27
|
|
|
28
28
|
def create_preview(self, graphId, workspaceId=None):
|
|
29
29
|
"""Creates a preview job.
|
|
30
|
-
|
|
30
|
+
|
|
31
31
|
Parameters
|
|
32
32
|
----------
|
|
33
33
|
graphId: str
|
|
34
34
|
The unique identifier for the graph.
|
|
35
|
-
workspaceId : str
|
|
36
|
-
Workspace ID create the preview in. If none is provided, the default workspace will get used.
|
|
37
|
-
|
|
35
|
+
workspaceId : str
|
|
36
|
+
Workspace ID create the preview in. If none is provided, the default workspace will get used.
|
|
37
|
+
|
|
38
38
|
Returns
|
|
39
39
|
-------
|
|
40
40
|
str
|
|
@@ -43,4 +43,26 @@ def create_preview(self, graphId, workspaceId=None):
|
|
|
43
43
|
if self.check_logout(): return
|
|
44
44
|
if workspaceId is None: workspaceId = self.workspace
|
|
45
45
|
return self.ana_api.createPreview(workspaceId=workspaceId, graphId=graphId)
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def get_preview_log(self, previewId, workspaceId=None, fields=None):
|
|
49
|
+
"""Fetches the logs for a preview job.
|
|
50
|
+
|
|
51
|
+
Parameters
|
|
52
|
+
----------
|
|
53
|
+
previewId : str
|
|
54
|
+
The unique identifier for the preview job.
|
|
55
|
+
workspaceId : str
|
|
56
|
+
Workspace the preview job was run in. If none is provided, the default workspace will get used.
|
|
57
|
+
fields : list
|
|
58
|
+
List of fields to return, leave empty to get all fields.
|
|
59
|
+
|
|
60
|
+
Returns
|
|
61
|
+
-------
|
|
62
|
+
dict
|
|
63
|
+
Preview log information containing previewId, status, and logs.
|
|
64
|
+
"""
|
|
65
|
+
if self.check_logout(): return
|
|
66
|
+
if workspaceId is None: workspaceId = self.workspace
|
|
67
|
+
return self.ana_api.getPreviewLog(workspaceId=workspaceId, previewId=previewId, fields=fields)
|
|
46
68
|
|
|
@@ -523,7 +523,7 @@ try:
|
|
|
523
523
|
print(f'\033[1F\033[FRegistering Service Image...done. [{time.time()-registerstart:.3f}s]\033[K\n\033[K')
|
|
524
524
|
print_color(f"The service has been deployed and is ready to use!", color='brand')
|
|
525
525
|
print(f'Deployment Time: {time.time()-starttime:.3f}s\n')
|
|
526
|
-
client.edit_service(serviceId=remoteservice['serviceId'], schema=json.dumps(localservice.schemas))
|
|
526
|
+
client.edit_service(serviceId=remoteservice['serviceId'], description=localservice.description, schema=json.dumps(localservice.schemas))
|
|
527
527
|
|
|
528
528
|
remoteconfig = {
|
|
529
529
|
"serviceId": remoteservice['serviceId'],
|
|
@@ -107,10 +107,12 @@ if args.workspace is not None:
|
|
|
107
107
|
try:
|
|
108
108
|
schema = json.loads(service['schema'])
|
|
109
109
|
if schema and isinstance(schema, dict):
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
110
|
+
tools = schema.get('tools', schema.get('execs', schema))
|
|
111
|
+
if isinstance(tools, dict):
|
|
112
|
+
for tool in tools.keys():
|
|
113
|
+
description = "No description"
|
|
114
|
+
if isinstance(tools[tool], dict) and 'description' in tools[tool]: description = tools[tool]['description']
|
|
115
|
+
rules+=f"\t\t- {tool} : {description}\n"
|
|
114
116
|
except (json.JSONDecodeError, TypeError) as e:
|
|
115
117
|
if args.verbose: print(f"Warning: Failed to parse schema for service {service['name']}: {e}")
|
|
116
118
|
pass
|
|
@@ -1085,46 +1085,271 @@ def cmd_graphs_stage(args):
|
|
|
1085
1085
|
output_json({"graphId": result})
|
|
1086
1086
|
|
|
1087
1087
|
|
|
1088
|
+
# =============================================================================
|
|
1089
|
+
# DATASET-VIEWER
|
|
1090
|
+
# =============================================================================
|
|
1091
|
+
|
|
1092
|
+
DATASET_VIEWER_TRIGGER_PATH = os.path.join(os.path.expanduser('~'), '.theia', 'dataset-viewer-open')
|
|
1093
|
+
DATASET_VIEWER_STATUS_PATH = os.path.join(os.path.expanduser('~'), '.theia', 'dataset-viewer-status.json')
|
|
1094
|
+
|
|
1095
|
+
|
|
1096
|
+
def _write_dataset_viewer_trigger(action: str, payload: dict):
|
|
1097
|
+
"""Write a trigger file for the dataset viewer extension to pick up."""
|
|
1098
|
+
trigger_data = {"action": action, **payload}
|
|
1099
|
+
trigger_dir = os.path.dirname(DATASET_VIEWER_TRIGGER_PATH)
|
|
1100
|
+
os.makedirs(trigger_dir, exist_ok=True)
|
|
1101
|
+
with open(DATASET_VIEWER_TRIGGER_PATH, 'w') as f:
|
|
1102
|
+
json.dump(trigger_data, f, indent=2)
|
|
1103
|
+
return trigger_data
|
|
1104
|
+
|
|
1105
|
+
|
|
1106
|
+
def cmd_dataset_viewer_open(args):
|
|
1107
|
+
"""Open a dataset folder in the Annotation Viewer."""
|
|
1108
|
+
dataset_path = getattr(args, 'path', None)
|
|
1109
|
+
if not dataset_path:
|
|
1110
|
+
output_error("Dataset path is required. Use --path", "MISSING_PATH")
|
|
1111
|
+
sys.exit(1)
|
|
1112
|
+
|
|
1113
|
+
dataset_path = os.path.abspath(dataset_path)
|
|
1114
|
+
if not os.path.isdir(dataset_path):
|
|
1115
|
+
output_error(f"Directory not found: {dataset_path}", "PATH_NOT_FOUND")
|
|
1116
|
+
sys.exit(1)
|
|
1117
|
+
|
|
1118
|
+
images_dir = os.path.join(dataset_path, 'images')
|
|
1119
|
+
if not os.path.isdir(images_dir):
|
|
1120
|
+
output_error(f"No images/ directory found in {dataset_path}", "NO_IMAGES_DIR")
|
|
1121
|
+
sys.exit(1)
|
|
1122
|
+
|
|
1123
|
+
image_index = getattr(args, 'index', None) or 0
|
|
1124
|
+
|
|
1125
|
+
trigger_data = _write_dataset_viewer_trigger('open', {
|
|
1126
|
+
'datasetPath': dataset_path,
|
|
1127
|
+
'imageIndex': int(image_index),
|
|
1128
|
+
})
|
|
1129
|
+
|
|
1130
|
+
output_json({
|
|
1131
|
+
"status": "ok",
|
|
1132
|
+
"action": "open",
|
|
1133
|
+
"datasetPath": dataset_path,
|
|
1134
|
+
"imageIndex": int(image_index),
|
|
1135
|
+
"triggerPath": DATASET_VIEWER_TRIGGER_PATH,
|
|
1136
|
+
})
|
|
1137
|
+
|
|
1138
|
+
|
|
1139
|
+
def cmd_dataset_viewer_next(args):
|
|
1140
|
+
"""Navigate to the next image in the dataset viewer."""
|
|
1141
|
+
trigger_data = _write_dataset_viewer_trigger('next', {})
|
|
1142
|
+
output_json({"status": "ok", "action": "next", "triggerPath": DATASET_VIEWER_TRIGGER_PATH})
|
|
1143
|
+
|
|
1144
|
+
|
|
1145
|
+
def cmd_dataset_viewer_prev(args):
|
|
1146
|
+
"""Navigate to the previous image in the dataset viewer."""
|
|
1147
|
+
trigger_data = _write_dataset_viewer_trigger('prev', {})
|
|
1148
|
+
output_json({"status": "ok", "action": "prev", "triggerPath": DATASET_VIEWER_TRIGGER_PATH})
|
|
1149
|
+
|
|
1150
|
+
|
|
1151
|
+
def cmd_dataset_viewer_goto(args):
|
|
1152
|
+
"""Navigate to a specific image by index or name."""
|
|
1153
|
+
index = getattr(args, 'index', None)
|
|
1154
|
+
name = getattr(args, 'name', None)
|
|
1155
|
+
|
|
1156
|
+
if index is None and not name:
|
|
1157
|
+
output_error("Specify --index or --name", "MISSING_TARGET")
|
|
1158
|
+
sys.exit(1)
|
|
1159
|
+
|
|
1160
|
+
payload = {}
|
|
1161
|
+
if index is not None:
|
|
1162
|
+
payload['imageIndex'] = int(index)
|
|
1163
|
+
if name:
|
|
1164
|
+
payload['imageName'] = name
|
|
1165
|
+
|
|
1166
|
+
trigger_data = _write_dataset_viewer_trigger('goto', payload)
|
|
1167
|
+
output_json({
|
|
1168
|
+
"status": "ok",
|
|
1169
|
+
"action": "goto",
|
|
1170
|
+
**payload,
|
|
1171
|
+
"triggerPath": DATASET_VIEWER_TRIGGER_PATH,
|
|
1172
|
+
})
|
|
1173
|
+
|
|
1174
|
+
|
|
1175
|
+
def cmd_dataset_viewer_annotations(args):
|
|
1176
|
+
"""Set which annotation types are enabled in the viewer."""
|
|
1177
|
+
types_str = getattr(args, 'types', None)
|
|
1178
|
+
if not types_str:
|
|
1179
|
+
output_error("Annotation types required. Use --types (comma-separated: bbox,bbox3d,segmentation,centroid,mask)", "MISSING_TYPES")
|
|
1180
|
+
sys.exit(1)
|
|
1181
|
+
|
|
1182
|
+
valid_types = {'bbox', 'bbox3d', 'segmentation', 'centroid', 'mask'}
|
|
1183
|
+
types = [t.strip() for t in types_str.split(',')]
|
|
1184
|
+
invalid = [t for t in types if t not in valid_types]
|
|
1185
|
+
if invalid:
|
|
1186
|
+
output_error(f"Invalid annotation types: {', '.join(invalid)}. Valid: {', '.join(sorted(valid_types))}", "INVALID_TYPES")
|
|
1187
|
+
sys.exit(1)
|
|
1188
|
+
|
|
1189
|
+
trigger_data = _write_dataset_viewer_trigger('setAnnotations', {'annotations': types})
|
|
1190
|
+
output_json({
|
|
1191
|
+
"status": "ok",
|
|
1192
|
+
"action": "setAnnotations",
|
|
1193
|
+
"annotations": types,
|
|
1194
|
+
"triggerPath": DATASET_VIEWER_TRIGGER_PATH,
|
|
1195
|
+
})
|
|
1196
|
+
|
|
1197
|
+
|
|
1198
|
+
def cmd_dataset_viewer_filter(args):
|
|
1199
|
+
"""Set object type filter in the viewer."""
|
|
1200
|
+
types_str = getattr(args, 'types', None)
|
|
1201
|
+
|
|
1202
|
+
if types_str:
|
|
1203
|
+
types = [t.strip() for t in types_str.split(',')]
|
|
1204
|
+
else:
|
|
1205
|
+
# No types = show all (clear filter)
|
|
1206
|
+
types = []
|
|
1207
|
+
|
|
1208
|
+
trigger_data = _write_dataset_viewer_trigger('setFilter', {'objectTypes': types})
|
|
1209
|
+
output_json({
|
|
1210
|
+
"status": "ok",
|
|
1211
|
+
"action": "setFilter",
|
|
1212
|
+
"objectTypes": types,
|
|
1213
|
+
"triggerPath": DATASET_VIEWER_TRIGGER_PATH,
|
|
1214
|
+
})
|
|
1215
|
+
|
|
1216
|
+
|
|
1217
|
+
def cmd_dataset_viewer_status(args):
|
|
1218
|
+
"""Get the current status of the dataset annotation viewer."""
|
|
1219
|
+
if not os.path.exists(DATASET_VIEWER_STATUS_PATH):
|
|
1220
|
+
output_json({
|
|
1221
|
+
"status": "no_status_file",
|
|
1222
|
+
"message": "No dataset viewer status file found. The viewer may not have been opened yet.",
|
|
1223
|
+
"statusPath": DATASET_VIEWER_STATUS_PATH,
|
|
1224
|
+
"sessions": [],
|
|
1225
|
+
})
|
|
1226
|
+
return
|
|
1227
|
+
|
|
1228
|
+
try:
|
|
1229
|
+
with open(DATASET_VIEWER_STATUS_PATH, 'r') as f:
|
|
1230
|
+
status_data = json.load(f)
|
|
1231
|
+
except json.JSONDecodeError as e:
|
|
1232
|
+
output_error(f"Failed to parse status file: {e}", "PARSE_ERROR")
|
|
1233
|
+
return
|
|
1234
|
+
except IOError as e:
|
|
1235
|
+
output_error(f"Failed to read status file: {e}", "READ_ERROR")
|
|
1236
|
+
return
|
|
1237
|
+
|
|
1238
|
+
# Optionally filter by dataset path
|
|
1239
|
+
filter_path = getattr(args, 'path', None)
|
|
1240
|
+
if filter_path:
|
|
1241
|
+
filter_path = os.path.abspath(filter_path)
|
|
1242
|
+
sessions = status_data.get('sessions', {})
|
|
1243
|
+
if isinstance(sessions, dict):
|
|
1244
|
+
filtered = {k: v for k, v in sessions.items() if os.path.abspath(k) == filter_path}
|
|
1245
|
+
status_data['sessions'] = filtered
|
|
1246
|
+
|
|
1247
|
+
output_json({
|
|
1248
|
+
"status": "ok",
|
|
1249
|
+
"statusPath": DATASET_VIEWER_STATUS_PATH,
|
|
1250
|
+
**status_data,
|
|
1251
|
+
})
|
|
1252
|
+
|
|
1253
|
+
|
|
1088
1254
|
# =============================================================================
|
|
1089
1255
|
# GRAPH-EDITOR
|
|
1090
1256
|
# =============================================================================
|
|
1091
1257
|
|
|
1092
1258
|
def cmd_graph_editor_open(args):
|
|
1093
|
-
"""
|
|
1094
|
-
client = get_client()
|
|
1095
|
-
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
1096
|
-
graph_id = require_arg(args, 'graphid', 'Graph ID')
|
|
1097
|
-
directory = args.outputdir or os.getcwd()
|
|
1259
|
+
"""Open a graph in the graph editor.
|
|
1098
1260
|
|
|
1099
|
-
|
|
1100
|
-
|
|
1101
|
-
|
|
1102
|
-
|
|
1261
|
+
Supports two modes:
|
|
1262
|
+
1. Download from platform: --workspaceid and --graphid (downloads graph and schema)
|
|
1263
|
+
2. Local files: --graphfile and --schemafile (uses existing local files)
|
|
1264
|
+
"""
|
|
1265
|
+
# Get arguments
|
|
1266
|
+
workspace_id = getattr(args, 'workspaceid', None)
|
|
1267
|
+
graph_id = getattr(args, 'graphid', None)
|
|
1268
|
+
graph_file = getattr(args, 'graphfile', None)
|
|
1269
|
+
schema_file = getattr(args, 'schemafile', None)
|
|
1270
|
+
|
|
1271
|
+
# Determine mode and validate arguments
|
|
1272
|
+
has_platform_args = workspace_id or graph_id
|
|
1273
|
+
has_local_args = graph_file or schema_file
|
|
1274
|
+
|
|
1275
|
+
if has_platform_args and has_local_args:
|
|
1276
|
+
output_error(
|
|
1277
|
+
"Cannot use both platform arguments (--workspaceid/--graphid) and local file arguments (--graphfile/--schemafile)",
|
|
1278
|
+
"INVALID_ARGS"
|
|
1279
|
+
)
|
|
1103
1280
|
return
|
|
1104
1281
|
|
|
1105
|
-
|
|
1106
|
-
|
|
1107
|
-
|
|
1108
|
-
|
|
1282
|
+
if not has_platform_args and not has_local_args:
|
|
1283
|
+
output_error(
|
|
1284
|
+
"Must provide either --workspaceid and --graphid (to download from platform) or --graphfile and --schemafile (to use local files)",
|
|
1285
|
+
"MISSING_ARGS"
|
|
1286
|
+
)
|
|
1109
1287
|
return
|
|
1110
1288
|
|
|
1111
|
-
#
|
|
1112
|
-
|
|
1289
|
+
# Mode 1: Local files
|
|
1290
|
+
if has_local_args:
|
|
1291
|
+
if not graph_file:
|
|
1292
|
+
output_error("--graphfile is required when using local files", "MISSING_GRAPHFILE")
|
|
1293
|
+
return
|
|
1294
|
+
if not schema_file:
|
|
1295
|
+
output_error("--schemafile is required when using local files", "MISSING_SCHEMAFILE")
|
|
1296
|
+
return
|
|
1113
1297
|
|
|
1114
|
-
|
|
1115
|
-
|
|
1116
|
-
|
|
1117
|
-
|
|
1298
|
+
# Validate files exist
|
|
1299
|
+
if not os.path.exists(graph_file):
|
|
1300
|
+
output_error(f"Graph file not found: {graph_file}", "FILE_NOT_FOUND")
|
|
1301
|
+
return
|
|
1302
|
+
if not os.path.exists(schema_file):
|
|
1303
|
+
output_error(f"Schema file not found: {schema_file}", "FILE_NOT_FOUND")
|
|
1304
|
+
return
|
|
1118
1305
|
|
|
1119
|
-
|
|
1120
|
-
|
|
1121
|
-
|
|
1122
|
-
|
|
1123
|
-
|
|
1306
|
+
graph_path = graph_file
|
|
1307
|
+
schema_path = schema_file
|
|
1308
|
+
graph_id = None
|
|
1309
|
+
channel_id = None
|
|
1310
|
+
graph_name = os.path.basename(graph_file)
|
|
1311
|
+
|
|
1312
|
+
# Mode 2: Download from platform
|
|
1313
|
+
else:
|
|
1314
|
+
if not workspace_id:
|
|
1315
|
+
output_error("--workspaceid is required when downloading from platform", "MISSING_WORKSPACEID")
|
|
1316
|
+
return
|
|
1317
|
+
if not graph_id:
|
|
1318
|
+
output_error("--graphid is required when downloading from platform", "MISSING_GRAPHID")
|
|
1319
|
+
return
|
|
1320
|
+
|
|
1321
|
+
client = get_client()
|
|
1322
|
+
directory = args.outputdir or os.getcwd()
|
|
1124
1323
|
|
|
1125
|
-
|
|
1126
|
-
|
|
1127
|
-
|
|
1324
|
+
# Get graph metadata to find channelId
|
|
1325
|
+
graphs = client.get_graphs(workspaceId=workspace_id, graphId=graph_id)
|
|
1326
|
+
if not graphs:
|
|
1327
|
+
output_error(f"Graph {graph_id} not found", "GRAPH_NOT_FOUND")
|
|
1328
|
+
return
|
|
1329
|
+
|
|
1330
|
+
graph_info = graphs[0]
|
|
1331
|
+
channel_id = graph_info.get('channelId')
|
|
1332
|
+
if not channel_id:
|
|
1333
|
+
output_error("Graph has no associated channel", "NO_CHANNEL")
|
|
1334
|
+
return
|
|
1335
|
+
|
|
1336
|
+
# Create output directory if needed
|
|
1337
|
+
os.makedirs(directory, exist_ok=True)
|
|
1338
|
+
|
|
1339
|
+
# Download graph
|
|
1340
|
+
graph_name = graph_info.get('name', graph_id).replace(' ', '_')
|
|
1341
|
+
graph_path = os.path.join(directory, f"{graph_name}.yaml")
|
|
1342
|
+
client.download_graph(workspaceId=workspace_id, graphId=graph_id, filepath=graph_path)
|
|
1343
|
+
|
|
1344
|
+
# Download channel schema
|
|
1345
|
+
schema = client.get_channel_nodes(channelId=channel_id)
|
|
1346
|
+
if not schema:
|
|
1347
|
+
output_error("Failed to fetch channel schema", "SCHEMA_ERROR")
|
|
1348
|
+
return
|
|
1349
|
+
|
|
1350
|
+
schema_path = os.path.join(directory, f"{channel_id}_schema.json")
|
|
1351
|
+
with open(schema_path, 'w') as f:
|
|
1352
|
+
json.dump(schema, f, indent=2)
|
|
1128
1353
|
|
|
1129
1354
|
# Write trigger file to open in graph editor
|
|
1130
1355
|
trigger_path = os.path.join(os.path.expanduser('~'), '.theia', 'graph-editor-open')
|
|
@@ -1139,14 +1364,25 @@ def cmd_graph_editor_open(args):
|
|
|
1139
1364
|
with open(trigger_path, 'w') as f:
|
|
1140
1365
|
json.dump(trigger_data, f)
|
|
1141
1366
|
|
|
1142
|
-
|
|
1367
|
+
# Build output
|
|
1368
|
+
result = {
|
|
1143
1369
|
"graphPath": os.path.abspath(graph_path),
|
|
1144
1370
|
"schemaPath": os.path.abspath(schema_path),
|
|
1145
|
-
"triggerPath": trigger_path
|
|
1146
|
-
|
|
1147
|
-
|
|
1148
|
-
|
|
1149
|
-
|
|
1371
|
+
"triggerPath": trigger_path
|
|
1372
|
+
}
|
|
1373
|
+
|
|
1374
|
+
# Add platform-specific fields if downloaded
|
|
1375
|
+
if graph_id:
|
|
1376
|
+
result["graphId"] = graph_id
|
|
1377
|
+
if channel_id:
|
|
1378
|
+
result["channelId"] = channel_id
|
|
1379
|
+
if has_local_args:
|
|
1380
|
+
result["mode"] = "local"
|
|
1381
|
+
else:
|
|
1382
|
+
result["mode"] = "platform"
|
|
1383
|
+
result["graphName"] = graph_info.get('name')
|
|
1384
|
+
|
|
1385
|
+
output_json(result)
|
|
1150
1386
|
|
|
1151
1387
|
|
|
1152
1388
|
def cmd_graph_editor_edit_node(args):
|
|
@@ -2968,6 +3204,20 @@ def cmd_preview_create(args):
|
|
|
2968
3204
|
output_json({"previewId": result})
|
|
2969
3205
|
|
|
2970
3206
|
|
|
3207
|
+
def cmd_preview_log(args):
|
|
3208
|
+
"""Get preview job log."""
|
|
3209
|
+
client = get_client()
|
|
3210
|
+
workspace_id = require_arg(args, 'workspaceid', 'Workspace ID')
|
|
3211
|
+
preview_id = require_arg(args, 'previewid', 'Preview ID')
|
|
3212
|
+
|
|
3213
|
+
result = client.get_preview_log(
|
|
3214
|
+
workspaceId=workspace_id,
|
|
3215
|
+
previewId=preview_id,
|
|
3216
|
+
fields=parse_list_arg(args.fields) if args.fields else None
|
|
3217
|
+
)
|
|
3218
|
+
output_json(result)
|
|
3219
|
+
|
|
3220
|
+
|
|
2971
3221
|
# =============================================================================
|
|
2972
3222
|
# AGENTS
|
|
2973
3223
|
# =============================================================================
|
|
@@ -3444,10 +3694,14 @@ Examples:
|
|
|
3444
3694
|
graph_editor_sub = graph_editor.add_subparsers(dest='action', help='Action')
|
|
3445
3695
|
|
|
3446
3696
|
# graph-editor open
|
|
3447
|
-
ge_open = graph_editor_sub.add_parser('open', help='
|
|
3448
|
-
|
|
3449
|
-
ge_open.add_argument('--
|
|
3450
|
-
ge_open.add_argument('--
|
|
3697
|
+
ge_open = graph_editor_sub.add_parser('open', help='Open graph in editor (download from platform or use local files)')
|
|
3698
|
+
# Option 1: Download from platform
|
|
3699
|
+
ge_open.add_argument('--workspaceid', help='Workspace ID (use with --graphid to download from platform)')
|
|
3700
|
+
ge_open.add_argument('--graphid', help='Graph ID (use with --workspaceid to download from platform)')
|
|
3701
|
+
ge_open.add_argument('--outputdir', help='Output directory for downloaded files (default: current directory)')
|
|
3702
|
+
# Option 2: Use local files
|
|
3703
|
+
ge_open.add_argument('--graphfile', help='Path to local graph file (use with --schemafile)')
|
|
3704
|
+
ge_open.add_argument('--schemafile', help='Path to local schema file (use with --graphfile)')
|
|
3451
3705
|
ge_open.set_defaults(func=cmd_graph_editor_open)
|
|
3452
3706
|
|
|
3453
3707
|
# graph-editor edit-node
|
|
@@ -3544,6 +3798,47 @@ Examples:
|
|
|
3544
3798
|
ge_status.add_argument('--file', help='Filter to a specific graph file path')
|
|
3545
3799
|
ge_status.set_defaults(func=cmd_graph_editor_status)
|
|
3546
3800
|
|
|
3801
|
+
# -------------------------------------------------------------------------
|
|
3802
|
+
# DATASET-VIEWER
|
|
3803
|
+
# -------------------------------------------------------------------------
|
|
3804
|
+
dataset_viewer = subparsers.add_parser('dataset-viewer', help='Dataset annotation viewer integration')
|
|
3805
|
+
dv_sub = dataset_viewer.add_subparsers(dest='action', help='Action')
|
|
3806
|
+
|
|
3807
|
+
# dataset-viewer open
|
|
3808
|
+
dv_open = dv_sub.add_parser('open', help='Open a dataset folder in the Annotation Viewer')
|
|
3809
|
+
dv_open.add_argument('--path', required=True, help='Path to dataset directory (must contain images/ subdirectory)')
|
|
3810
|
+
dv_open.add_argument('--index', type=int, default=0, help='Initial image index (default: 0)')
|
|
3811
|
+
dv_open.set_defaults(func=cmd_dataset_viewer_open)
|
|
3812
|
+
|
|
3813
|
+
# dataset-viewer next
|
|
3814
|
+
dv_next = dv_sub.add_parser('next', help='Navigate to the next image')
|
|
3815
|
+
dv_next.set_defaults(func=cmd_dataset_viewer_next)
|
|
3816
|
+
|
|
3817
|
+
# dataset-viewer prev
|
|
3818
|
+
dv_prev = dv_sub.add_parser('prev', help='Navigate to the previous image')
|
|
3819
|
+
dv_prev.set_defaults(func=cmd_dataset_viewer_prev)
|
|
3820
|
+
|
|
3821
|
+
# dataset-viewer goto
|
|
3822
|
+
dv_goto = dv_sub.add_parser('goto', help='Navigate to a specific image by index or name')
|
|
3823
|
+
dv_goto.add_argument('--index', type=int, help='Image index (0-based)')
|
|
3824
|
+
dv_goto.add_argument('--name', help='Image filename (or partial match)')
|
|
3825
|
+
dv_goto.set_defaults(func=cmd_dataset_viewer_goto)
|
|
3826
|
+
|
|
3827
|
+
# dataset-viewer annotations
|
|
3828
|
+
dv_annotations = dv_sub.add_parser('annotations', help='Set which annotation types are displayed')
|
|
3829
|
+
dv_annotations.add_argument('--types', required=True, help='Comma-separated annotation types: bbox,bbox3d,segmentation,centroid,mask')
|
|
3830
|
+
dv_annotations.set_defaults(func=cmd_dataset_viewer_annotations)
|
|
3831
|
+
|
|
3832
|
+
# dataset-viewer filter
|
|
3833
|
+
dv_filter = dv_sub.add_parser('filter', help='Filter visible objects by type (omit --types to show all)')
|
|
3834
|
+
dv_filter.add_argument('--types', help='Comma-separated object type names to show (omit to clear filter)')
|
|
3835
|
+
dv_filter.set_defaults(func=cmd_dataset_viewer_filter)
|
|
3836
|
+
|
|
3837
|
+
# dataset-viewer status
|
|
3838
|
+
dv_status = dv_sub.add_parser('status', help='Get dataset viewer status')
|
|
3839
|
+
dv_status.add_argument('--path', help='Filter to a specific dataset path')
|
|
3840
|
+
dv_status.set_defaults(func=cmd_dataset_viewer_status)
|
|
3841
|
+
|
|
3547
3842
|
# -------------------------------------------------------------------------
|
|
3548
3843
|
# CHANNELS
|
|
3549
3844
|
# -------------------------------------------------------------------------
|
|
@@ -4069,6 +4364,13 @@ Examples:
|
|
|
4069
4364
|
prv_create.add_argument('--graphid', required=True, help='Graph ID')
|
|
4070
4365
|
prv_create.set_defaults(func=cmd_preview_create)
|
|
4071
4366
|
|
|
4367
|
+
# preview log
|
|
4368
|
+
prv_log = preview_sub.add_parser('log', help='Get preview job log')
|
|
4369
|
+
prv_log.add_argument('--workspaceid', required=True, help='Workspace ID')
|
|
4370
|
+
prv_log.add_argument('--previewid', required=True, help='Preview ID')
|
|
4371
|
+
prv_log.add_argument('--fields', help='Comma-separated fields')
|
|
4372
|
+
prv_log.set_defaults(func=cmd_preview_log)
|
|
4373
|
+
|
|
4072
4374
|
# -------------------------------------------------------------------------
|
|
4073
4375
|
# AGENTS
|
|
4074
4376
|
# -------------------------------------------------------------------------
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: anatools
|
|
3
|
-
Version: 6.0.
|
|
3
|
+
Version: 6.0.2
|
|
4
4
|
Summary: Tools for development with the Rendered.ai Platform.
|
|
5
5
|
Home-page: https://rendered.ai
|
|
6
6
|
Author: Rendered AI, Inc
|
|
@@ -14,7 +14,7 @@ Requires-Dist: docker
|
|
|
14
14
|
Requires-Dist: numpy
|
|
15
15
|
Requires-Dist: pillow
|
|
16
16
|
Requires-Dist: pyyaml
|
|
17
|
-
Requires-Dist: requests
|
|
17
|
+
Requires-Dist: requests<3.0.0,>=2.32.3
|
|
18
18
|
Requires-Dist: colorama
|
|
19
19
|
Dynamic: author
|
|
20
20
|
Dynamic: author-email
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
anatools/__init__.py,sha256=
|
|
1
|
+
anatools/__init__.py,sha256=oRyTj8OARogJ21SdosiOWxpYkQ8RGuINWDzlOOBS09Y,953
|
|
2
2
|
anatools/anacreate.py,sha256=wK1GKxGSzFdeQtqBkGhpai1sFjSJaKZ2686zrVJPBd0,5622
|
|
3
3
|
anatools/anaclient/__init__.py,sha256=tjCd-MMkWOjhjY1mlbGtHNhEdHLj-W687GiYJHsf_J0,50
|
|
4
4
|
anatools/anaclient/_menu.py,sha256=WSD8h7_SPRq9F6I0ohzkA4L0aLUx7L9YtIihk9eiD_o,5965
|
|
@@ -18,7 +18,7 @@ anatools/anaclient/inpaint.py,sha256=OfAALqhDE7mUAVERySE4j4JQIp9g5nq_IdzKEGUo2G4
|
|
|
18
18
|
anatools/anaclient/llm.py,sha256=-dKeqZ58SFnPa1PKm1iIVxym11ihywaDRizqMWA8lNo,2095
|
|
19
19
|
anatools/anaclient/ml.py,sha256=8lGoDY8SAcSI-lZT6Or4wXeZjc414DSqiQu3P3Laj7M,11799
|
|
20
20
|
anatools/anaclient/organizations.py,sha256=IMq_D1vRRtQJ3WxnqvgK-zGG3VPylfrozxAS55Vca-I,8301
|
|
21
|
-
anatools/anaclient/preview.py,sha256=
|
|
21
|
+
anatools/anaclient/preview.py,sha256=wT5sdKEr-9SNHmQeEyHN4czR1hrXuJJEMllFvXJF2z0,2035
|
|
22
22
|
anatools/anaclient/rules.py,sha256=BA5KHLfyh4ddQYRdGmRwX-hGITL3GolMVzDuBX-hZno,3903
|
|
23
23
|
anatools/anaclient/services.py,sha256=GAHGwxJtD3bcniJTVL0ROgnDg8PK8Paf1cRUz5AbEQA,19887
|
|
24
24
|
anatools/anaclient/umap.py,sha256=Dy37yqoNb8HQWPmCh8ZqEIBMijSTqv9tcRcfO0_IXWQ,4144
|
|
@@ -42,7 +42,7 @@ anatools/anaclient/api/llm.py,sha256=ZJKT91JzLaUxVM6RrIe2Z1rJ3UhJZyxlK9hm52t8u60
|
|
|
42
42
|
anatools/anaclient/api/members.py,sha256=S2Cyx6eAZdyVAux-5wpyZ3nw1uZeDz-Anhd6Ha7M4qI,3494
|
|
43
43
|
anatools/anaclient/api/ml.py,sha256=Jv_kFknGR8rj8xxjnnSrqPcCuyHO6KOJ9EeBh06r4as,11603
|
|
44
44
|
anatools/anaclient/api/organizations.py,sha256=FCXdJORKEnZlsPd3K4WCxBOsCayBd7tZ6sXxNihRLfM,1509
|
|
45
|
-
anatools/anaclient/api/preview.py,sha256=
|
|
45
|
+
anatools/anaclient/api/preview.py,sha256=ipgU0eVRr49KHsqaAK3Edq8oqx8jMjF8jlxYR5-b6p8,2140
|
|
46
46
|
anatools/anaclient/api/rules.py,sha256=yvxbk48m0ZZrWoHpmwS6kC4E2m7iSaY7iaLp0d6xmoc,4780
|
|
47
47
|
anatools/anaclient/api/services.py,sha256=htSjiRz7-kiDivt-xl5wtCSBZPLjDrPjKMKVHcdAM_o,11459
|
|
48
48
|
anatools/anaclient/api/umap.py,sha256=iMRE_z4Umg4-U_uEvdxho93QXOocF3-lN96qHfgbu64,3490
|
|
@@ -140,18 +140,18 @@ anatools/nodes/volume_directory.py,sha256=oe721h7qOplRj6N6NpGuyY1HCM277NSYA-6Uo3
|
|
|
140
140
|
anatools/nodes/volume_directory.yml,sha256=MbyuLUlcqWIlQadYcn4Rvf6roypqh5IiP3dP57TilbY,901
|
|
141
141
|
anatools/nodes/volume_file.py,sha256=YA4zCyRvVzF_9mMefGx29JLE7o9i6-NPaC40BFGv4_c,557
|
|
142
142
|
anatools/nodes/volume_file.yml,sha256=i8bo9QeQmTLeWjv9Rh4EDoDqwOBULNPV7SMLO5AK8DI,862
|
|
143
|
-
anatools-6.0.
|
|
144
|
-
anatools-6.0.
|
|
145
|
-
anatools-6.0.
|
|
146
|
-
anatools-6.0.
|
|
147
|
-
anatools-6.0.
|
|
148
|
-
anatools-6.0.
|
|
149
|
-
anatools-6.0.
|
|
150
|
-
anatools-6.0.
|
|
151
|
-
anatools-6.0.
|
|
152
|
-
anatools-6.0.
|
|
153
|
-
anatools-6.0.
|
|
154
|
-
anatools-6.0.
|
|
155
|
-
anatools-6.0.
|
|
156
|
-
anatools-6.0.
|
|
157
|
-
anatools-6.0.
|
|
143
|
+
anatools-6.0.2.data/scripts/ana,sha256=qe7LDHRgJRPyomzAumdcYy0D5SuaUVag0N8SVevpxcU,5739
|
|
144
|
+
anatools-6.0.2.data/scripts/anadeploy,sha256=8A3JAJDztq_wf0EFJf_BgLBpikUwzaDJr2afsX2IAHk,38720
|
|
145
|
+
anatools-6.0.2.data/scripts/anamount,sha256=AiaUgaaVVREFY2FLYRmSA8xgSwbfiF9NJYi91SWRA1I,6186
|
|
146
|
+
anatools-6.0.2.data/scripts/anaprofile,sha256=1YUUwHiSa4ORQsxVf2HaSakayNTwMTNeF2snEMSJQAM,9779
|
|
147
|
+
anatools-6.0.2.data/scripts/anarules,sha256=FDHAiEqSELiEUAYyulRMmgN_WI6fD1tyo34SVjjg8Eo,9117
|
|
148
|
+
anatools-6.0.2.data/scripts/anaserver,sha256=QB1k_vhXAFGMOi9SNIFwgzkzN5LzJeVLtIVkp1oHq4I,8343
|
|
149
|
+
anatools-6.0.2.data/scripts/anatransfer,sha256=GbMLjgA3TP4Oo2mbUxWnkkSC4nKpw1DWta-WVfcNftw,14564
|
|
150
|
+
anatools-6.0.2.data/scripts/anautils,sha256=fziapZuKuBO0VKRgb4C4Js8p9zxxh8OHQmmkNdo3t3E,9530
|
|
151
|
+
anatools-6.0.2.data/scripts/renderedai,sha256=qM8GraquQgPlacIlGfnT_Ia86418BJ_k2XymN_hbfq0,169196
|
|
152
|
+
anatools-6.0.2.dist-info/licenses/LICENSE,sha256=aw0uaPvFzrHLJxBvuRqUcE2_srfM32-1suya9HbZCY8,1072
|
|
153
|
+
anatools-6.0.2.dist-info/METADATA,sha256=mFdurueuJbirRpf0N9lnagRLQkvF8xwseVrnTDWVSpA,8232
|
|
154
|
+
anatools-6.0.2.dist-info/WHEEL,sha256=wUyA8OaulRlbfwMtmQsvNngGrxQHAvkKcvRmdizlJi0,92
|
|
155
|
+
anatools-6.0.2.dist-info/entry_points.txt,sha256=KsZUmvbH3HXC2CdVpE2GZNR2u_cJNVIbm6BnD658FgM,54
|
|
156
|
+
anatools-6.0.2.dist-info/top_level.txt,sha256=p7xa5bG7NX8pSMJOvRunSz1d7rGPGsBd5-A4gzD4r6w,9
|
|
157
|
+
anatools-6.0.2.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|