supervisely 6.73.465__py3-none-any.whl → 6.73.467__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of supervisely might be problematic. Click here for more details.
- supervisely/api/dataset_api.py +74 -12
- supervisely/nn/inference/inference.py +10 -9
- supervisely/nn/inference/inference_request.py +3 -9
- supervisely/nn/inference/session.py +3 -1
- {supervisely-6.73.465.dist-info → supervisely-6.73.467.dist-info}/METADATA +1 -1
- {supervisely-6.73.465.dist-info → supervisely-6.73.467.dist-info}/RECORD +10 -10
- {supervisely-6.73.465.dist-info → supervisely-6.73.467.dist-info}/LICENSE +0 -0
- {supervisely-6.73.465.dist-info → supervisely-6.73.467.dist-info}/WHEEL +0 -0
- {supervisely-6.73.465.dist-info → supervisely-6.73.467.dist-info}/entry_points.txt +0 -0
- {supervisely-6.73.465.dist-info → supervisely-6.73.467.dist-info}/top_level.txt +0 -0
supervisely/api/dataset_api.py
CHANGED
|
@@ -1021,13 +1021,66 @@ class DatasetApi(UpdateableModule, RemoveableModuleApi):
|
|
|
1021
1021
|
|
|
1022
1022
|
return dataset_tree
|
|
1023
1023
|
|
|
1024
|
-
def
|
|
1024
|
+
def _yield_tree(
|
|
1025
|
+
self, tree: Dict[DatasetInfo, Dict], path: List[str]
|
|
1026
|
+
) -> Generator[Tuple[List[str], DatasetInfo], None, None]:
|
|
1027
|
+
"""
|
|
1028
|
+
Helper method for recursive tree traversal.
|
|
1029
|
+
Yields tuples of (path, dataset) for all datasets in the tree. For each node (dataset) at the current level,
|
|
1030
|
+
yields its (path, dataset) before recursively traversing and yielding from its children.
|
|
1031
|
+
|
|
1032
|
+
:param tree: Tree structure to yield from.
|
|
1033
|
+
:type tree: Dict[DatasetInfo, Dict]
|
|
1034
|
+
:param path: Current path (used for recursion).
|
|
1035
|
+
:type path: List[str]
|
|
1036
|
+
:return: Generator of tuples of (path, dataset).
|
|
1037
|
+
:rtype: Generator[Tuple[List[str], DatasetInfo], None, None]
|
|
1038
|
+
"""
|
|
1039
|
+
for dataset, children in tree.items():
|
|
1040
|
+
yield path, dataset
|
|
1041
|
+
new_path = path + [dataset.name]
|
|
1042
|
+
if children:
|
|
1043
|
+
yield from self._yield_tree(children, new_path)
|
|
1044
|
+
|
|
1045
|
+
def _find_dataset_in_tree(
|
|
1046
|
+
self, tree: Dict[DatasetInfo, Dict], target_id: int, path: List[str] = None
|
|
1047
|
+
) -> Tuple[Optional[DatasetInfo], Optional[Dict], List[str]]:
|
|
1048
|
+
"""Find a specific dataset in the tree and return its subtree and path.
|
|
1049
|
+
|
|
1050
|
+
:param tree: Tree structure to search in.
|
|
1051
|
+
:type tree: Dict[DatasetInfo, Dict]
|
|
1052
|
+
:param target_id: ID of the dataset to find.
|
|
1053
|
+
:type target_id: int
|
|
1054
|
+
:param path: Current path (used for recursion).
|
|
1055
|
+
:type path: List[str], optional
|
|
1056
|
+
:return: Tuple of (found_dataset, its_subtree, path_to_dataset).
|
|
1057
|
+
:rtype: Tuple[Optional[DatasetInfo], Optional[Dict], List[str]]
|
|
1058
|
+
"""
|
|
1059
|
+
if path is None:
|
|
1060
|
+
path = []
|
|
1061
|
+
|
|
1062
|
+
for dataset, children in tree.items():
|
|
1063
|
+
if dataset.id == target_id:
|
|
1064
|
+
return dataset, children, path
|
|
1065
|
+
# Search in children
|
|
1066
|
+
if children:
|
|
1067
|
+
found_dataset, found_children, found_path = self._find_dataset_in_tree(
|
|
1068
|
+
children, target_id, path + [dataset.name]
|
|
1069
|
+
)
|
|
1070
|
+
if found_dataset is not None:
|
|
1071
|
+
return found_dataset, found_children, found_path
|
|
1072
|
+
return None, None, []
|
|
1073
|
+
|
|
1074
|
+
def tree(self, project_id: int, dataset_id: Optional[int] = None) -> Generator[Tuple[List[str], DatasetInfo], None, None]:
|
|
1025
1075
|
"""Yields tuples of (path, dataset) for all datasets in the project.
|
|
1026
1076
|
Path of the dataset is a list of parents, e.g. ["ds1", "ds2", "ds3"].
|
|
1027
1077
|
For root datasets, the path is an empty list.
|
|
1028
1078
|
|
|
1029
1079
|
:param project_id: Project ID in which the Dataset is located.
|
|
1030
1080
|
:type project_id: int
|
|
1081
|
+
:param dataset_id: Optional Dataset ID to start the tree from. If provided, only yields
|
|
1082
|
+
the subtree starting from this dataset (including the dataset itself and all its children).
|
|
1083
|
+
:type dataset_id: Optional[int]
|
|
1031
1084
|
:return: Generator of tuples of (path, dataset).
|
|
1032
1085
|
:rtype: Generator[Tuple[List[str], DatasetInfo], None, None]
|
|
1033
1086
|
:Usage example:
|
|
@@ -1040,11 +1093,17 @@ class DatasetApi(UpdateableModule, RemoveableModuleApi):
|
|
|
1040
1093
|
|
|
1041
1094
|
project_id = 123
|
|
1042
1095
|
|
|
1096
|
+
# Get all datasets in the project
|
|
1043
1097
|
for parents, dataset in api.dataset.tree(project_id):
|
|
1044
1098
|
parents: List[str]
|
|
1045
1099
|
dataset: sly.DatasetInfo
|
|
1046
1100
|
print(parents, dataset.name)
|
|
1047
1101
|
|
|
1102
|
+
# Get only a specific branch starting from dataset_id = 456
|
|
1103
|
+
for parents, dataset in api.dataset.tree(project_id, dataset_id=456):
|
|
1104
|
+
parents: List[str]
|
|
1105
|
+
dataset: sly.DatasetInfo
|
|
1106
|
+
print(parents, dataset.name)
|
|
1048
1107
|
|
|
1049
1108
|
# Output:
|
|
1050
1109
|
# [] ds1
|
|
@@ -1052,17 +1111,20 @@ class DatasetApi(UpdateableModule, RemoveableModuleApi):
|
|
|
1052
1111
|
# ["ds1", "ds2"] ds3
|
|
1053
1112
|
"""
|
|
1054
1113
|
|
|
1055
|
-
|
|
1056
|
-
|
|
1057
|
-
|
|
1058
|
-
|
|
1059
|
-
|
|
1060
|
-
|
|
1061
|
-
|
|
1062
|
-
|
|
1063
|
-
|
|
1064
|
-
|
|
1065
|
-
|
|
1114
|
+
full_tree = self.get_tree(project_id)
|
|
1115
|
+
|
|
1116
|
+
if dataset_id is None:
|
|
1117
|
+
# Return the full tree
|
|
1118
|
+
yield from self._yield_tree(full_tree, [])
|
|
1119
|
+
else:
|
|
1120
|
+
# Find the specific dataset and return only its subtree
|
|
1121
|
+
target_dataset, subtree, dataset_path = self._find_dataset_in_tree(full_tree, dataset_id)
|
|
1122
|
+
if target_dataset is not None:
|
|
1123
|
+
# Yield the target dataset first, then its children
|
|
1124
|
+
yield dataset_path, target_dataset
|
|
1125
|
+
if subtree:
|
|
1126
|
+
new_path = dataset_path + [target_dataset.name]
|
|
1127
|
+
yield from self._yield_tree(subtree, new_path)
|
|
1066
1128
|
|
|
1067
1129
|
def get_nested(self, project_id: int, dataset_id: int) -> List[DatasetInfo]:
|
|
1068
1130
|
"""Returns a list of all nested datasets in the specified dataset.
|
|
@@ -1374,6 +1374,7 @@ class Inference:
|
|
|
1374
1374
|
|
|
1375
1375
|
if tracker == "botsort":
|
|
1376
1376
|
from supervisely.nn.tracker import BotSortTracker
|
|
1377
|
+
|
|
1377
1378
|
device = tracker_settings.get("device", self.device)
|
|
1378
1379
|
logger.debug(f"Initializing BotSort tracker with device: {device}")
|
|
1379
1380
|
return BotSortTracker(settings=tracker_settings, device=device)
|
|
@@ -1419,7 +1420,7 @@ class Inference:
|
|
|
1419
1420
|
def get_tracking_settings(self) -> Dict[str, Dict[str, Any]]:
|
|
1420
1421
|
"""
|
|
1421
1422
|
Get default parameters for all available tracking algorithms.
|
|
1422
|
-
|
|
1423
|
+
|
|
1423
1424
|
Returns:
|
|
1424
1425
|
{"botsort": {"track_high_thresh": 0.6, ...}}
|
|
1425
1426
|
Empty dict if tracking not supported.
|
|
@@ -1437,6 +1438,7 @@ class Inference:
|
|
|
1437
1438
|
try:
|
|
1438
1439
|
if tracker_name == "botsort":
|
|
1439
1440
|
from supervisely.nn.tracker import BotSortTracker
|
|
1441
|
+
|
|
1440
1442
|
trackers_params[tracker_name] = BotSortTracker.get_default_params()
|
|
1441
1443
|
# Add other trackers here as elif blocks
|
|
1442
1444
|
else:
|
|
@@ -1448,7 +1450,7 @@ class Inference:
|
|
|
1448
1450
|
for tracker_name, params in trackers_params.items():
|
|
1449
1451
|
trackers_params[tracker_name] = {
|
|
1450
1452
|
k: v for k, v in params.items() if k not in INTERNAL_FIELDS
|
|
1451
|
-
|
|
1453
|
+
}
|
|
1452
1454
|
return trackers_params
|
|
1453
1455
|
|
|
1454
1456
|
def get_human_readable_info(self, replace_none_with: Optional[str] = None):
|
|
@@ -2277,8 +2279,8 @@ class Inference:
|
|
|
2277
2279
|
frame_index=frame_index,
|
|
2278
2280
|
video_id=video_info.id,
|
|
2279
2281
|
dataset_id=video_info.dataset_id,
|
|
2280
|
-
|
|
2281
|
-
|
|
2282
|
+
project_id=video_info.project_id,
|
|
2283
|
+
)
|
|
2282
2284
|
for ann, frame_index in zip(anns, batch)
|
|
2283
2285
|
]
|
|
2284
2286
|
for pred, this_slides_data in zip(predictions, slides_data):
|
|
@@ -3671,10 +3673,7 @@ class Inference:
|
|
|
3671
3673
|
data = {**inference_request.to_json(), **log_extra}
|
|
3672
3674
|
if inference_request.stage != InferenceRequest.Stage.INFERENCE:
|
|
3673
3675
|
data["progress"] = {"current": 0, "total": 1}
|
|
3674
|
-
logger.debug(
|
|
3675
|
-
f"Sending inference progress with uuid:",
|
|
3676
|
-
extra=data,
|
|
3677
|
-
)
|
|
3676
|
+
logger.debug(f"Sending inference progress with uuid:", extra=data)
|
|
3678
3677
|
return data
|
|
3679
3678
|
|
|
3680
3679
|
@server.post(f"/pop_inference_results")
|
|
@@ -4667,6 +4666,7 @@ def _filter_duplicated_predictions_from_ann_cpu(
|
|
|
4667
4666
|
|
|
4668
4667
|
return pred_ann.clone(labels=new_labels)
|
|
4669
4668
|
|
|
4669
|
+
|
|
4670
4670
|
def _filter_duplicated_predictions_from_ann(
|
|
4671
4671
|
gt_ann: Annotation, pred_ann: Annotation, iou_threshold: float
|
|
4672
4672
|
) -> Annotation:
|
|
@@ -5367,7 +5367,8 @@ def get_value_for_keys(data: dict, keys: List, ignore_none: bool = False):
|
|
|
5367
5367
|
return data[key]
|
|
5368
5368
|
return None
|
|
5369
5369
|
|
|
5370
|
-
|
|
5370
|
+
|
|
5371
|
+
def torch_load_safe(checkpoint_path: str, device: str = "cpu"):
|
|
5371
5372
|
import torch # pylint: disable=import-error
|
|
5372
5373
|
|
|
5373
5374
|
# TODO: handle torch.load(weights_only=True) - change in torch 2.6.0
|
|
@@ -14,13 +14,6 @@ from supervisely.sly_logger import logger
|
|
|
14
14
|
from supervisely.task.progress import Progress
|
|
15
15
|
|
|
16
16
|
|
|
17
|
-
def generate_uuid(self) -> str:
|
|
18
|
-
"""
|
|
19
|
-
Generates a unique UUID for the inference request.
|
|
20
|
-
"""
|
|
21
|
-
return uuid.uuid5(namespace=uuid.NAMESPACE_URL, name=f"{time.time()}-{rand_str(10)}").hex
|
|
22
|
-
|
|
23
|
-
|
|
24
17
|
class InferenceRequest:
|
|
25
18
|
class Stage:
|
|
26
19
|
PREPARING = "Preparing model for inference..."
|
|
@@ -59,7 +52,7 @@ class InferenceRequest:
|
|
|
59
52
|
self._created_at = time.monotonic()
|
|
60
53
|
self._updated_at = self._created_at
|
|
61
54
|
self._finished = False
|
|
62
|
-
|
|
55
|
+
|
|
63
56
|
self.tracker = None
|
|
64
57
|
|
|
65
58
|
self.global_progress = None
|
|
@@ -252,7 +245,8 @@ class InferenceRequest:
|
|
|
252
245
|
status_data.pop(key, None)
|
|
253
246
|
status_data.update(self.get_usage())
|
|
254
247
|
return status_data
|
|
255
|
-
|
|
248
|
+
|
|
249
|
+
|
|
256
250
|
class GlobalProgress:
|
|
257
251
|
def __init__(self):
|
|
258
252
|
self.progress = Progress(message="Ready", total_cnt=1)
|
|
@@ -543,7 +543,9 @@ class SessionJSON:
|
|
|
543
543
|
t0 = time.time()
|
|
544
544
|
while not has_started and not timeout_exceeded:
|
|
545
545
|
resp = self._get_inference_progress()
|
|
546
|
-
|
|
546
|
+
pending_results = resp.get("pending_results", None)
|
|
547
|
+
has_results = bool(pending_results)
|
|
548
|
+
has_started = bool(resp.get("result")) or resp["progress"]["total"] != 1 or has_results
|
|
547
549
|
if not has_started:
|
|
548
550
|
time.sleep(delay)
|
|
549
551
|
timeout_exceeded = timeout and time.time() - t0 > timeout
|
|
@@ -26,7 +26,7 @@ supervisely/api/annotation_api.py,sha256=TNOqVGE94FqDc7WDLBZiDY3aSIiyTQwn_bZv5_C
|
|
|
26
26
|
supervisely/api/api.py,sha256=_ZiC1R2lu2eHXw_vBMUiylr-jQcU9-nZZvH5jJHVqoc,68828
|
|
27
27
|
supervisely/api/app_api.py,sha256=OMgmZM7I5nmTn7P9J0F6fpNwWnFE-UO3wzlL1Rciqh4,79038
|
|
28
28
|
supervisely/api/constants.py,sha256=WfqIcEpRnU4Mcfb6q0njeRs2VVSoTAJaIyrqBkBjP8I,253
|
|
29
|
-
supervisely/api/dataset_api.py,sha256=
|
|
29
|
+
supervisely/api/dataset_api.py,sha256=VxJH59G_EApGeFCw-Eu4RB2qowb5wMIj0eHusvxeGKM,51946
|
|
30
30
|
supervisely/api/entities_collection_api.py,sha256=Be13HsfMFLmq9XpiOfQog0Y569kbUn52hXv6x5vX3Vg,22624
|
|
31
31
|
supervisely/api/file_api.py,sha256=gNXNsikocSYRojoZrVmXIqXycqXm0e320piAwaLN6JI,92978
|
|
32
32
|
supervisely/api/github_api.py,sha256=NIexNjEer9H5rf5sw2LEZd7C1WR-tK4t6IZzsgeAAwQ,623
|
|
@@ -906,9 +906,9 @@ supervisely/nn/benchmark/visualization/widgets/table/__init__.py,sha256=47DEQpj8
|
|
|
906
906
|
supervisely/nn/benchmark/visualization/widgets/table/table.py,sha256=atmDnF1Af6qLQBUjLhK18RMDKAYlxnsuVHMSEa5a-e8,4319
|
|
907
907
|
supervisely/nn/inference/__init__.py,sha256=QFukX2ip-U7263aEPCF_UCFwj6EujbMnsgrXp5Bbt8I,1623
|
|
908
908
|
supervisely/nn/inference/cache.py,sha256=Hkxvu70rrB-j7ztQ4TBOxQePAxiKS7Erdb2FmK7aetY,35795
|
|
909
|
-
supervisely/nn/inference/inference.py,sha256=
|
|
910
|
-
supervisely/nn/inference/inference_request.py,sha256=
|
|
911
|
-
supervisely/nn/inference/session.py,sha256=
|
|
909
|
+
supervisely/nn/inference/inference.py,sha256=c-jzwBuPiFdMfu78mSc6qw6SO59LZ9IwePyafEkiL04,230468
|
|
910
|
+
supervisely/nn/inference/inference_request.py,sha256=Hyf5R8gxaUCoxljNReIhbf5bK7ZMRtpV6C-flh8mgcQ,14660
|
|
911
|
+
supervisely/nn/inference/session.py,sha256=17abxb237JMftb12gPV-R07N6bUZwIaL4N39K-BupAQ,36357
|
|
912
912
|
supervisely/nn/inference/uploader.py,sha256=Dn5MfMRq7tclEWpP0B9fJjTiQPBpwumfXxC8-lOYgnM,5659
|
|
913
913
|
supervisely/nn/inference/video_inference.py,sha256=8Bshjr6rDyLay5Za8IB8Dr6FURMO2R_v7aELasO8pR4,5746
|
|
914
914
|
supervisely/nn/inference/gui/__init__.py,sha256=wCxd-lF5Zhcwsis-wScDA8n1Gk_1O00PKgDviUZ3F1U,221
|
|
@@ -1129,9 +1129,9 @@ supervisely/worker_proto/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZ
|
|
|
1129
1129
|
supervisely/worker_proto/worker_api_pb2.py,sha256=VQfi5JRBHs2pFCK1snec3JECgGnua3Xjqw_-b3aFxuM,59142
|
|
1130
1130
|
supervisely/worker_proto/worker_api_pb2_grpc.py,sha256=3BwQXOaP9qpdi0Dt9EKG--Lm8KGN0C5AgmUfRv77_Jk,28940
|
|
1131
1131
|
supervisely_lib/__init__.py,sha256=yRwzEQmVwSd6lUQoAUdBngKEOlnoQ6hA9ZcoZGJRNC4,331
|
|
1132
|
-
supervisely-6.73.
|
|
1133
|
-
supervisely-6.73.
|
|
1134
|
-
supervisely-6.73.
|
|
1135
|
-
supervisely-6.73.
|
|
1136
|
-
supervisely-6.73.
|
|
1137
|
-
supervisely-6.73.
|
|
1132
|
+
supervisely-6.73.467.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
1133
|
+
supervisely-6.73.467.dist-info/METADATA,sha256=y8n5gJ_mKQyF43P0JvdgAS6xVG3IJ_WxW1XhH1Nax8k,35604
|
|
1134
|
+
supervisely-6.73.467.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
|
|
1135
|
+
supervisely-6.73.467.dist-info/entry_points.txt,sha256=U96-5Hxrp2ApRjnCoUiUhWMqijqh8zLR03sEhWtAcms,102
|
|
1136
|
+
supervisely-6.73.467.dist-info/top_level.txt,sha256=kcFVwb7SXtfqZifrZaSE3owHExX4gcNYe7Q2uoby084,28
|
|
1137
|
+
supervisely-6.73.467.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|