supervisely 6.73.410__py3-none-any.whl → 6.73.470__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of supervisely might be problematic. Click here for more details.

Files changed (190) hide show
  1. supervisely/__init__.py +136 -1
  2. supervisely/_utils.py +81 -0
  3. supervisely/annotation/json_geometries_map.py +2 -0
  4. supervisely/annotation/label.py +80 -3
  5. supervisely/api/annotation_api.py +9 -9
  6. supervisely/api/api.py +67 -43
  7. supervisely/api/app_api.py +72 -5
  8. supervisely/api/dataset_api.py +108 -33
  9. supervisely/api/entity_annotation/figure_api.py +113 -49
  10. supervisely/api/image_api.py +82 -0
  11. supervisely/api/module_api.py +10 -0
  12. supervisely/api/nn/deploy_api.py +15 -9
  13. supervisely/api/nn/ecosystem_models_api.py +201 -0
  14. supervisely/api/nn/neural_network_api.py +12 -3
  15. supervisely/api/pointcloud/pointcloud_api.py +38 -0
  16. supervisely/api/pointcloud/pointcloud_episode_annotation_api.py +3 -0
  17. supervisely/api/project_api.py +213 -6
  18. supervisely/api/task_api.py +11 -1
  19. supervisely/api/video/video_annotation_api.py +4 -2
  20. supervisely/api/video/video_api.py +79 -1
  21. supervisely/api/video/video_figure_api.py +24 -11
  22. supervisely/api/volume/volume_api.py +38 -0
  23. supervisely/app/__init__.py +1 -1
  24. supervisely/app/content.py +14 -6
  25. supervisely/app/fastapi/__init__.py +1 -0
  26. supervisely/app/fastapi/custom_static_files.py +1 -1
  27. supervisely/app/fastapi/multi_user.py +88 -0
  28. supervisely/app/fastapi/subapp.py +175 -42
  29. supervisely/app/fastapi/templating.py +1 -1
  30. supervisely/app/fastapi/websocket.py +77 -9
  31. supervisely/app/singleton.py +21 -0
  32. supervisely/app/v1/app_service.py +18 -2
  33. supervisely/app/v1/constants.py +7 -1
  34. supervisely/app/widgets/__init__.py +11 -1
  35. supervisely/app/widgets/agent_selector/template.html +1 -0
  36. supervisely/app/widgets/card/card.py +20 -0
  37. supervisely/app/widgets/dataset_thumbnail/dataset_thumbnail.py +11 -2
  38. supervisely/app/widgets/dataset_thumbnail/template.html +3 -1
  39. supervisely/app/widgets/deploy_model/deploy_model.py +750 -0
  40. supervisely/app/widgets/dialog/dialog.py +12 -0
  41. supervisely/app/widgets/dialog/template.html +2 -1
  42. supervisely/app/widgets/dropdown_checkbox_selector/__init__.py +0 -0
  43. supervisely/app/widgets/dropdown_checkbox_selector/dropdown_checkbox_selector.py +87 -0
  44. supervisely/app/widgets/dropdown_checkbox_selector/template.html +12 -0
  45. supervisely/app/widgets/ecosystem_model_selector/__init__.py +0 -0
  46. supervisely/app/widgets/ecosystem_model_selector/ecosystem_model_selector.py +195 -0
  47. supervisely/app/widgets/experiment_selector/experiment_selector.py +454 -263
  48. supervisely/app/widgets/fast_table/fast_table.py +713 -126
  49. supervisely/app/widgets/fast_table/script.js +492 -95
  50. supervisely/app/widgets/fast_table/style.css +54 -0
  51. supervisely/app/widgets/fast_table/template.html +45 -5
  52. supervisely/app/widgets/heatmap/__init__.py +0 -0
  53. supervisely/app/widgets/heatmap/heatmap.py +523 -0
  54. supervisely/app/widgets/heatmap/script.js +378 -0
  55. supervisely/app/widgets/heatmap/style.css +227 -0
  56. supervisely/app/widgets/heatmap/template.html +21 -0
  57. supervisely/app/widgets/input_tag/input_tag.py +102 -15
  58. supervisely/app/widgets/input_tag_list/__init__.py +0 -0
  59. supervisely/app/widgets/input_tag_list/input_tag_list.py +274 -0
  60. supervisely/app/widgets/input_tag_list/template.html +70 -0
  61. supervisely/app/widgets/radio_table/radio_table.py +10 -2
  62. supervisely/app/widgets/radio_tabs/radio_tabs.py +18 -2
  63. supervisely/app/widgets/radio_tabs/template.html +1 -0
  64. supervisely/app/widgets/select/select.py +6 -4
  65. supervisely/app/widgets/select_dataset/select_dataset.py +6 -0
  66. supervisely/app/widgets/select_dataset_tree/select_dataset_tree.py +83 -7
  67. supervisely/app/widgets/table/table.py +68 -13
  68. supervisely/app/widgets/tabs/tabs.py +22 -6
  69. supervisely/app/widgets/tabs/template.html +5 -1
  70. supervisely/app/widgets/transfer/style.css +3 -0
  71. supervisely/app/widgets/transfer/template.html +3 -1
  72. supervisely/app/widgets/transfer/transfer.py +48 -45
  73. supervisely/app/widgets/tree_select/tree_select.py +2 -0
  74. supervisely/convert/image/csv/csv_converter.py +24 -15
  75. supervisely/convert/pointcloud/nuscenes_conv/nuscenes_converter.py +43 -41
  76. supervisely/convert/pointcloud_episodes/nuscenes_conv/nuscenes_converter.py +75 -51
  77. supervisely/convert/pointcloud_episodes/nuscenes_conv/nuscenes_helper.py +137 -124
  78. supervisely/convert/video/video_converter.py +2 -2
  79. supervisely/geometry/polyline_3d.py +110 -0
  80. supervisely/io/env.py +161 -1
  81. supervisely/nn/artifacts/__init__.py +1 -1
  82. supervisely/nn/artifacts/artifacts.py +10 -2
  83. supervisely/nn/artifacts/detectron2.py +1 -0
  84. supervisely/nn/artifacts/hrda.py +1 -0
  85. supervisely/nn/artifacts/mmclassification.py +20 -0
  86. supervisely/nn/artifacts/mmdetection.py +5 -3
  87. supervisely/nn/artifacts/mmsegmentation.py +1 -0
  88. supervisely/nn/artifacts/ritm.py +1 -0
  89. supervisely/nn/artifacts/rtdetr.py +1 -0
  90. supervisely/nn/artifacts/unet.py +1 -0
  91. supervisely/nn/artifacts/utils.py +3 -0
  92. supervisely/nn/artifacts/yolov5.py +2 -0
  93. supervisely/nn/artifacts/yolov8.py +1 -0
  94. supervisely/nn/benchmark/semantic_segmentation/metric_provider.py +18 -18
  95. supervisely/nn/experiments.py +9 -0
  96. supervisely/nn/inference/cache.py +37 -17
  97. supervisely/nn/inference/gui/serving_gui_template.py +39 -13
  98. supervisely/nn/inference/inference.py +953 -211
  99. supervisely/nn/inference/inference_request.py +15 -8
  100. supervisely/nn/inference/instance_segmentation/instance_segmentation.py +1 -0
  101. supervisely/nn/inference/object_detection/object_detection.py +1 -0
  102. supervisely/nn/inference/predict_app/__init__.py +0 -0
  103. supervisely/nn/inference/predict_app/gui/__init__.py +0 -0
  104. supervisely/nn/inference/predict_app/gui/classes_selector.py +160 -0
  105. supervisely/nn/inference/predict_app/gui/gui.py +915 -0
  106. supervisely/nn/inference/predict_app/gui/input_selector.py +344 -0
  107. supervisely/nn/inference/predict_app/gui/model_selector.py +77 -0
  108. supervisely/nn/inference/predict_app/gui/output_selector.py +179 -0
  109. supervisely/nn/inference/predict_app/gui/preview.py +93 -0
  110. supervisely/nn/inference/predict_app/gui/settings_selector.py +881 -0
  111. supervisely/nn/inference/predict_app/gui/tags_selector.py +110 -0
  112. supervisely/nn/inference/predict_app/gui/utils.py +399 -0
  113. supervisely/nn/inference/predict_app/predict_app.py +176 -0
  114. supervisely/nn/inference/session.py +47 -39
  115. supervisely/nn/inference/tracking/bbox_tracking.py +5 -1
  116. supervisely/nn/inference/tracking/point_tracking.py +5 -1
  117. supervisely/nn/inference/tracking/tracker_interface.py +4 -0
  118. supervisely/nn/inference/uploader.py +9 -5
  119. supervisely/nn/model/model_api.py +44 -22
  120. supervisely/nn/model/prediction.py +15 -1
  121. supervisely/nn/model/prediction_session.py +70 -14
  122. supervisely/nn/prediction_dto.py +7 -0
  123. supervisely/nn/tracker/__init__.py +6 -8
  124. supervisely/nn/tracker/base_tracker.py +54 -0
  125. supervisely/nn/tracker/botsort/__init__.py +1 -0
  126. supervisely/nn/tracker/botsort/botsort_config.yaml +30 -0
  127. supervisely/nn/tracker/botsort/osnet_reid/__init__.py +0 -0
  128. supervisely/nn/tracker/botsort/osnet_reid/osnet.py +566 -0
  129. supervisely/nn/tracker/botsort/osnet_reid/osnet_reid_interface.py +88 -0
  130. supervisely/nn/tracker/botsort/tracker/__init__.py +0 -0
  131. supervisely/nn/tracker/{bot_sort → botsort/tracker}/basetrack.py +1 -2
  132. supervisely/nn/tracker/{utils → botsort/tracker}/gmc.py +51 -59
  133. supervisely/nn/tracker/{deep_sort/deep_sort → botsort/tracker}/kalman_filter.py +71 -33
  134. supervisely/nn/tracker/botsort/tracker/matching.py +202 -0
  135. supervisely/nn/tracker/{bot_sort/bot_sort.py → botsort/tracker/mc_bot_sort.py} +68 -81
  136. supervisely/nn/tracker/botsort_tracker.py +273 -0
  137. supervisely/nn/tracker/calculate_metrics.py +264 -0
  138. supervisely/nn/tracker/utils.py +273 -0
  139. supervisely/nn/tracker/visualize.py +520 -0
  140. supervisely/nn/training/gui/gui.py +152 -49
  141. supervisely/nn/training/gui/hyperparameters_selector.py +1 -1
  142. supervisely/nn/training/gui/model_selector.py +8 -6
  143. supervisely/nn/training/gui/train_val_splits_selector.py +144 -71
  144. supervisely/nn/training/gui/training_artifacts.py +3 -1
  145. supervisely/nn/training/train_app.py +225 -46
  146. supervisely/project/pointcloud_episode_project.py +12 -8
  147. supervisely/project/pointcloud_project.py +12 -8
  148. supervisely/project/project.py +221 -75
  149. supervisely/template/experiment/experiment.html.jinja +105 -55
  150. supervisely/template/experiment/experiment_generator.py +258 -112
  151. supervisely/template/experiment/header.html.jinja +31 -13
  152. supervisely/template/experiment/sly-style.css +7 -2
  153. supervisely/versions.json +3 -1
  154. supervisely/video/sampling.py +42 -20
  155. supervisely/video/video.py +41 -12
  156. supervisely/video_annotation/video_figure.py +38 -4
  157. supervisely/volume/stl_converter.py +2 -0
  158. supervisely/worker_api/agent_rpc.py +24 -1
  159. supervisely/worker_api/rpc_servicer.py +31 -7
  160. {supervisely-6.73.410.dist-info → supervisely-6.73.470.dist-info}/METADATA +22 -14
  161. {supervisely-6.73.410.dist-info → supervisely-6.73.470.dist-info}/RECORD +167 -148
  162. supervisely_lib/__init__.py +6 -1
  163. supervisely/app/widgets/experiment_selector/style.css +0 -27
  164. supervisely/app/widgets/experiment_selector/template.html +0 -61
  165. supervisely/nn/tracker/bot_sort/__init__.py +0 -21
  166. supervisely/nn/tracker/bot_sort/fast_reid_interface.py +0 -152
  167. supervisely/nn/tracker/bot_sort/matching.py +0 -127
  168. supervisely/nn/tracker/bot_sort/sly_tracker.py +0 -401
  169. supervisely/nn/tracker/deep_sort/__init__.py +0 -6
  170. supervisely/nn/tracker/deep_sort/deep_sort/__init__.py +0 -1
  171. supervisely/nn/tracker/deep_sort/deep_sort/detection.py +0 -49
  172. supervisely/nn/tracker/deep_sort/deep_sort/iou_matching.py +0 -81
  173. supervisely/nn/tracker/deep_sort/deep_sort/linear_assignment.py +0 -202
  174. supervisely/nn/tracker/deep_sort/deep_sort/nn_matching.py +0 -176
  175. supervisely/nn/tracker/deep_sort/deep_sort/track.py +0 -166
  176. supervisely/nn/tracker/deep_sort/deep_sort/tracker.py +0 -145
  177. supervisely/nn/tracker/deep_sort/deep_sort.py +0 -301
  178. supervisely/nn/tracker/deep_sort/generate_clip_detections.py +0 -90
  179. supervisely/nn/tracker/deep_sort/preprocessing.py +0 -70
  180. supervisely/nn/tracker/deep_sort/sly_tracker.py +0 -273
  181. supervisely/nn/tracker/tracker.py +0 -285
  182. supervisely/nn/tracker/utils/kalman_filter.py +0 -492
  183. supervisely/nn/tracking/__init__.py +0 -1
  184. supervisely/nn/tracking/boxmot.py +0 -114
  185. supervisely/nn/tracking/tracking.py +0 -24
  186. /supervisely/{nn/tracker/utils → app/widgets/deploy_model}/__init__.py +0 -0
  187. {supervisely-6.73.410.dist-info → supervisely-6.73.470.dist-info}/LICENSE +0 -0
  188. {supervisely-6.73.410.dist-info → supervisely-6.73.470.dist-info}/WHEEL +0 -0
  189. {supervisely-6.73.410.dist-info → supervisely-6.73.470.dist-info}/entry_points.txt +0 -0
  190. {supervisely-6.73.410.dist-info → supervisely-6.73.470.dist-info}/top_level.txt +0 -0
@@ -140,7 +140,7 @@ def check_workflow_compatibility(api, min_instance_version: str) -> bool:
140
140
  "instance_version", api.instance_version
141
141
  )
142
142
 
143
- if instance_version == "unknown":
143
+ if instance_version is None or instance_version == "unknown":
144
144
  # to check again on the next call
145
145
  del _workflow_compatibility_version_cache["instance_version"]
146
146
  logger.info(
@@ -1394,9 +1394,75 @@ class AppApi(TaskApi):
1394
1394
  """get_url"""
1395
1395
  return f"/apps/sessions/{task_id}"
1396
1396
 
1397
- def download_git_file(self, app_id, version, file_path, save_path):
1398
- """download_git_file"""
1399
- raise NotImplementedError()
1397
+ def download_git_file(
1398
+ self,
1399
+ module_id,
1400
+ save_path,
1401
+ app_id=None,
1402
+ version=None,
1403
+ file_path=None,
1404
+ file_key=None,
1405
+ log_progress=True,
1406
+ ext_logger=None,
1407
+ ):
1408
+ """
1409
+ Download a file from app repository. File should be added in the app config under `files` key.
1410
+
1411
+ :param module_id: ID of the module
1412
+ :type module_id: int
1413
+ :param save_path: Path to save the file
1414
+ :type save_path: str
1415
+ :param app_id: ID of the app
1416
+ :type app_id: int
1417
+ :param version: Version of the app
1418
+ :type version: str
1419
+ :param file_path: Path to the file in the app github repository
1420
+ :type file_path: str
1421
+ :param file_key: Key of the file in the app github repository
1422
+ :type file_key: str
1423
+ :param log_progress: If True, will log the progress of the download
1424
+ :type log_progress: bool
1425
+ :param ext_logger: Logger to use for logging
1426
+ :type ext_logger: Logger
1427
+ :return: None
1428
+ :rtype: None
1429
+ """
1430
+ if file_path is None and file_key is None:
1431
+ raise ValueError("Either file_path or file_key must be provided")
1432
+ payload = {
1433
+ ApiField.MODULE_ID: module_id,
1434
+ }
1435
+ if version is not None:
1436
+ payload[ApiField.VERSION] = version
1437
+ if app_id is not None:
1438
+ payload[ApiField.APP_ID] = app_id
1439
+ if file_path is not None:
1440
+ payload[ApiField.FILE_PATH] = file_path
1441
+ if file_key is not None:
1442
+ payload[ApiField.FILE_KEY] = file_key
1443
+
1444
+ response = self._api.post("ecosystem.file.download", payload, stream=True)
1445
+ progress = None
1446
+ if log_progress:
1447
+ if ext_logger is None:
1448
+ ext_logger = logger
1449
+
1450
+ length = None
1451
+ # Content-Length
1452
+ if "Content-Length" in response.headers:
1453
+ length = int(response.headers["Content-Length"])
1454
+ progress = Progress("Downloading: ", length, ext_logger=ext_logger, is_size=True)
1455
+
1456
+ mb1 = 1024 * 1024
1457
+ ensure_base_path(save_path)
1458
+ with open(save_path, "wb") as fd:
1459
+ log_size = 0
1460
+ for chunk in response.iter_content(chunk_size=mb1):
1461
+ fd.write(chunk)
1462
+ log_size += len(chunk)
1463
+ if log_progress and log_size > mb1 and progress is not None:
1464
+ progress.iters_done_report(log_size)
1465
+ log_size = 0
1400
1466
 
1401
1467
  def download_git_archive(
1402
1468
  self,
@@ -1418,6 +1484,7 @@ class AppApi(TaskApi):
1418
1484
  payload[ApiField.APP_ID] = app_id
1419
1485
 
1420
1486
  response = self._api.post("ecosystem.file.download", payload, stream=True)
1487
+ progress = None
1421
1488
  if log_progress:
1422
1489
  if ext_logger is None:
1423
1490
  ext_logger = logger
@@ -1435,7 +1502,7 @@ class AppApi(TaskApi):
1435
1502
  for chunk in response.iter_content(chunk_size=mb1):
1436
1503
  fd.write(chunk)
1437
1504
  log_size += len(chunk)
1438
- if log_progress and log_size > mb1:
1505
+ if log_progress and log_size > mb1 and progress is not None:
1439
1506
  progress.iters_done_report(log_size)
1440
1507
  log_size = 0
1441
1508
 
@@ -185,6 +185,7 @@ class DatasetApi(UpdateableModule, RemoveableModuleApi):
185
185
  filters: Optional[List[Dict[str, str]]] = None,
186
186
  recursive: Optional[bool] = False,
187
187
  parent_id: Optional[int] = None,
188
+ include_custom_data: Optional[bool] = False,
188
189
  ) -> List[DatasetInfo]:
189
190
  """
190
191
  Returns list of dataset in the given project, or list of nested datasets
@@ -200,6 +201,9 @@ class DatasetApi(UpdateableModule, RemoveableModuleApi):
200
201
  :type recursive: bool, optional
201
202
  :param parent_id: Parent Dataset ID. If set to None, the search will be performed at the top level of the Project,
202
203
  otherwise the search will be performed in the specified Dataset.
204
+ :type parent_id: Union[int, None], optional
205
+ :param include_custom_data: If True, the response will include the `custom_data` field for each Dataset.
206
+ :type include_custom_data: bool, optional
203
207
  :return: List of all Datasets with information for the given Project. See :class:`info_sequence<info_sequence>`
204
208
  :rtype: :class:`List[DatasetInfo]`
205
209
  :Usage example:
@@ -246,14 +250,16 @@ class DatasetApi(UpdateableModule, RemoveableModuleApi):
246
250
  filters.append({"field": ApiField.PARENT_ID, "operator": "=", "value": parent_id})
247
251
  recursive = True
248
252
 
249
- return self.get_list_all_pages(
250
- "datasets.list",
251
- {
252
- ApiField.PROJECT_ID: project_id,
253
- ApiField.FILTER: filters,
254
- ApiField.RECURSIVE: recursive,
255
- },
256
- )
253
+ method = "datasets.list"
254
+ data = {
255
+ ApiField.PROJECT_ID: project_id,
256
+ ApiField.FILTER: filters,
257
+ ApiField.RECURSIVE: recursive,
258
+ }
259
+ if include_custom_data:
260
+ data[ApiField.EXTRA_FIELDS] = [ApiField.CUSTOM_DATA]
261
+
262
+ return self.get_list_all_pages(method, data)
257
263
 
258
264
  def get_info_by_id(self, id: int, raise_error: Optional[bool] = False) -> DatasetInfo:
259
265
  """
@@ -304,6 +310,7 @@ class DatasetApi(UpdateableModule, RemoveableModuleApi):
304
310
  description: Optional[str] = "",
305
311
  change_name_if_conflict: Optional[bool] = False,
306
312
  parent_id: Optional[int] = None,
313
+ custom_data: Optional[Dict[Any, Any]] = None,
307
314
  ) -> DatasetInfo:
308
315
  """
309
316
  Create Dataset with given name in the given Project.
@@ -318,6 +325,9 @@ class DatasetApi(UpdateableModule, RemoveableModuleApi):
318
325
  :type change_name_if_conflict: bool, optional
319
326
  :param parent_id: Parent Dataset ID. If set to None, then the Dataset will be created at
320
327
  the top level of the Project, otherwise the Dataset will be created in a specified Dataset.
328
+ :type parent_id: Union[int, None]
329
+ :param custom_data: Custom data to store in the Dataset.
330
+ :type custom_data: Dict[Any, Any], optional
321
331
  :return: Information about Dataset. See :class:`info_sequence<info_sequence>`
322
332
  :rtype: :class:`DatasetInfo`
323
333
  :Usage example:
@@ -345,15 +355,16 @@ class DatasetApi(UpdateableModule, RemoveableModuleApi):
345
355
  change_name_if_conflict=change_name_if_conflict,
346
356
  parent_id=parent_id,
347
357
  )
348
- response = self._api.post(
349
- "datasets.add",
350
- {
351
- ApiField.PROJECT_ID: project_id,
352
- ApiField.NAME: effective_name,
353
- ApiField.DESCRIPTION: description,
354
- ApiField.PARENT_ID: parent_id,
355
- },
356
- )
358
+ method = "datasets.add"
359
+ payload = {
360
+ ApiField.PROJECT_ID: project_id,
361
+ ApiField.NAME: effective_name,
362
+ ApiField.DESCRIPTION: description,
363
+ ApiField.PARENT_ID: parent_id,
364
+ }
365
+ if custom_data is not None:
366
+ payload[ApiField.CUSTOM_DATA] = custom_data
367
+ response = self._api.post(method, payload)
357
368
  return self._convert_json_info(response.json())
358
369
 
359
370
  def get_or_create(
@@ -564,6 +575,7 @@ class DatasetApi(UpdateableModule, RemoveableModuleApi):
564
575
  new_dataset_name,
565
576
  dataset.description,
566
577
  change_name_if_conflict=change_name_if_conflict,
578
+ custom_data=dataset.custom_data,
567
579
  )
568
580
  items_api.copy_batch(
569
581
  new_dataset.id, src_item_ids, change_name_if_conflict, with_annotations
@@ -797,6 +809,7 @@ class DatasetApi(UpdateableModule, RemoveableModuleApi):
797
809
  sort_order: Optional[str] = None,
798
810
  per_page: Optional[int] = None,
799
811
  page: Union[int, Literal["all"]] = "all",
812
+ include_custom_data: Optional[bool] = False,
800
813
  ) -> dict:
801
814
  """
802
815
  List all available datasets from all available teams for the user that match the specified filtering criteria.
@@ -807,22 +820,20 @@ class DatasetApi(UpdateableModule, RemoveableModuleApi):
807
820
  - 'operator': Takes values '=', 'eq', '!=', 'not', 'in', '!in', '>', 'gt', '>=', 'gte', '<', 'lt', '<=', 'lte'
808
821
  - 'value': Takes on values according to the meaning of 'field' or null
809
822
  :type filters: List[Dict[str, str]], optional
810
-
811
823
  :param sort: Specifies by which parameter to sort the project list.
812
824
  Takes values 'id', 'name', 'size', 'createdAt', 'updatedAt'
813
825
  :type sort: str, optional
814
-
815
826
  :param sort_order: Determines which value to list from.
816
827
  :type sort_order: str, optional
817
-
818
828
  :param per_page: Number of first items found to be returned.
819
829
  'None' will return the first page with a default size of 20000 datasets.
820
830
  :type per_page: int, optional
821
-
822
831
  :param page: Page number, used to retrieve the following items if the number of them found is more than per_page.
823
832
  The default value is 'all', which retrieves all available datasets.
824
833
  'None' will return the first page with datasets, the amount of which is set in param 'per_page'.
825
834
  :type page: Union[int, Literal["all"]], optional
835
+ :param include_custom_data: If True, the response will include the `custom_data` field for each Dataset.
836
+ :type include_custom_data: bool, optional
826
837
 
827
838
  :return: Search response information and 'DatasetInfo' of all datasets that are searched by a given criterion.
828
839
  :rtype: dict
@@ -899,6 +910,8 @@ class DatasetApi(UpdateableModule, RemoveableModuleApi):
899
910
  request_body[ApiField.PER_PAGE] = per_page
900
911
  if page is not None and page != "all":
901
912
  request_body[ApiField.PAGE] = page
913
+ if include_custom_data:
914
+ request_body[ApiField.EXTRA_FIELDS] = [ApiField.CUSTOM_DATA]
902
915
 
903
916
  first_response = self._api.post(method, request_body).json()
904
917
 
@@ -1008,13 +1021,66 @@ class DatasetApi(UpdateableModule, RemoveableModuleApi):
1008
1021
 
1009
1022
  return dataset_tree
1010
1023
 
1011
- def tree(self, project_id: int) -> Generator[Tuple[List[str], DatasetInfo], None, None]:
1024
+ def _yield_tree(
1025
+ self, tree: Dict[DatasetInfo, Dict], path: List[str]
1026
+ ) -> Generator[Tuple[List[str], DatasetInfo], None, None]:
1027
+ """
1028
+ Helper method for recursive tree traversal.
1029
+ Yields tuples of (path, dataset) for all datasets in the tree. For each node (dataset) at the current level,
1030
+ yields its (path, dataset) before recursively traversing and yielding from its children.
1031
+
1032
+ :param tree: Tree structure to yield from.
1033
+ :type tree: Dict[DatasetInfo, Dict]
1034
+ :param path: Current path (used for recursion).
1035
+ :type path: List[str]
1036
+ :return: Generator of tuples of (path, dataset).
1037
+ :rtype: Generator[Tuple[List[str], DatasetInfo], None, None]
1038
+ """
1039
+ for dataset, children in tree.items():
1040
+ yield path, dataset
1041
+ new_path = path + [dataset.name]
1042
+ if children:
1043
+ yield from self._yield_tree(children, new_path)
1044
+
1045
+ def _find_dataset_in_tree(
1046
+ self, tree: Dict[DatasetInfo, Dict], target_id: int, path: List[str] = None
1047
+ ) -> Tuple[Optional[DatasetInfo], Optional[Dict], List[str]]:
1048
+ """Find a specific dataset in the tree and return its subtree and path.
1049
+
1050
+ :param tree: Tree structure to search in.
1051
+ :type tree: Dict[DatasetInfo, Dict]
1052
+ :param target_id: ID of the dataset to find.
1053
+ :type target_id: int
1054
+ :param path: Current path (used for recursion).
1055
+ :type path: List[str], optional
1056
+ :return: Tuple of (found_dataset, its_subtree, path_to_dataset).
1057
+ :rtype: Tuple[Optional[DatasetInfo], Optional[Dict], List[str]]
1058
+ """
1059
+ if path is None:
1060
+ path = []
1061
+
1062
+ for dataset, children in tree.items():
1063
+ if dataset.id == target_id:
1064
+ return dataset, children, path
1065
+ # Search in children
1066
+ if children:
1067
+ found_dataset, found_children, found_path = self._find_dataset_in_tree(
1068
+ children, target_id, path + [dataset.name]
1069
+ )
1070
+ if found_dataset is not None:
1071
+ return found_dataset, found_children, found_path
1072
+ return None, None, []
1073
+
1074
+ def tree(self, project_id: int, dataset_id: Optional[int] = None) -> Generator[Tuple[List[str], DatasetInfo], None, None]:
1012
1075
  """Yields tuples of (path, dataset) for all datasets in the project.
1013
1076
  Path of the dataset is a list of parents, e.g. ["ds1", "ds2", "ds3"].
1014
1077
  For root datasets, the path is an empty list.
1015
1078
 
1016
1079
  :param project_id: Project ID in which the Dataset is located.
1017
1080
  :type project_id: int
1081
+ :param dataset_id: Optional Dataset ID to start the tree from. If provided, only yields
1082
+ the subtree starting from this dataset (including the dataset itself and all its children).
1083
+ :type dataset_id: Optional[int]
1018
1084
  :return: Generator of tuples of (path, dataset).
1019
1085
  :rtype: Generator[Tuple[List[str], DatasetInfo], None, None]
1020
1086
  :Usage example:
@@ -1027,11 +1093,17 @@ class DatasetApi(UpdateableModule, RemoveableModuleApi):
1027
1093
 
1028
1094
  project_id = 123
1029
1095
 
1096
+ # Get all datasets in the project
1030
1097
  for parents, dataset in api.dataset.tree(project_id):
1031
1098
  parents: List[str]
1032
1099
  dataset: sly.DatasetInfo
1033
1100
  print(parents, dataset.name)
1034
1101
 
1102
+ # Get only a specific branch starting from dataset_id = 456
1103
+ for parents, dataset in api.dataset.tree(project_id, dataset_id=456):
1104
+ parents: List[str]
1105
+ dataset: sly.DatasetInfo
1106
+ print(parents, dataset.name)
1035
1107
 
1036
1108
  # Output:
1037
1109
  # [] ds1
@@ -1039,17 +1111,20 @@ class DatasetApi(UpdateableModule, RemoveableModuleApi):
1039
1111
  # ["ds1", "ds2"] ds3
1040
1112
  """
1041
1113
 
1042
- def yield_tree(
1043
- tree: Dict[DatasetInfo, Dict], path: List[str]
1044
- ) -> Generator[Tuple[List[str], DatasetInfo], None, None]:
1045
- """Yields tuples of (path, dataset) for all datasets in the tree."""
1046
- for dataset, children in tree.items():
1047
- yield path, dataset
1048
- new_path = path + [dataset.name]
1049
- if children:
1050
- yield from yield_tree(children, new_path)
1051
-
1052
- yield from yield_tree(self.get_tree(project_id), [])
1114
+ full_tree = self.get_tree(project_id)
1115
+
1116
+ if dataset_id is None:
1117
+ # Return the full tree
1118
+ yield from self._yield_tree(full_tree, [])
1119
+ else:
1120
+ # Find the specific dataset and return only its subtree
1121
+ target_dataset, subtree, dataset_path = self._find_dataset_in_tree(full_tree, dataset_id)
1122
+ if target_dataset is not None:
1123
+ # Yield the target dataset first, then its children
1124
+ yield dataset_path, target_dataset
1125
+ if subtree:
1126
+ new_path = dataset_path + [target_dataset.name]
1127
+ yield from self._yield_tree(subtree, new_path)
1053
1128
 
1054
1129
  def get_nested(self, project_id: int, dataset_id: int) -> List[DatasetInfo]:
1055
1130
  """Returns a list of all nested datasets in the specified dataset.
@@ -24,6 +24,7 @@ from requests_toolbelt import MultipartDecoder, MultipartEncoder
24
24
  from tqdm import tqdm
25
25
 
26
26
  from supervisely._utils import batched, logger, run_coroutine
27
+ from supervisely.annotation.label import LabelingStatus
27
28
  from supervisely.api.module_api import ApiField, ModuleApi, RemoveableBulkModuleApi
28
29
  from supervisely.geometry.rectangle import Rectangle
29
30
  from supervisely.video_annotation.key_id_map import KeyIdMap
@@ -221,6 +222,8 @@ class FigureApi(RemoveableBulkModuleApi):
221
222
  "meta",
222
223
  "area",
223
224
  "priority",
225
+ "nnCreated",
226
+ "nnUpdated",
224
227
  ]
225
228
  return self._get_info_by_id(id, "figures.info", {ApiField.FIELDS: fields})
226
229
 
@@ -233,6 +236,7 @@ class FigureApi(RemoveableBulkModuleApi):
233
236
  geometry_type: str,
234
237
  track_id: Optional[int] = None,
235
238
  custom_data: Optional[dict] = None,
239
+ status: Optional[LabelingStatus] = None,
236
240
  ) -> int:
237
241
  """"""
238
242
  input_figure = {
@@ -242,6 +246,13 @@ class FigureApi(RemoveableBulkModuleApi):
242
246
  ApiField.GEOMETRY: geometry_json,
243
247
  }
244
248
 
249
+ if status is None:
250
+ status = LabelingStatus.MANUAL
251
+
252
+ nn_created, nn_updated = LabelingStatus.to_flags(status)
253
+ input_figure[ApiField.NN_CREATED] = nn_created
254
+ input_figure[ApiField.NN_UPDATED] = nn_updated
255
+
245
256
  if track_id is not None:
246
257
  input_figure[ApiField.TRACK_ID] = track_id
247
258
 
@@ -376,6 +387,8 @@ class FigureApi(RemoveableBulkModuleApi):
376
387
  ApiField.AREA,
377
388
  ApiField.PRIORITY,
378
389
  ApiField.CUSTOM_DATA,
390
+ ApiField.NN_CREATED,
391
+ ApiField.NN_UPDATED,
379
392
  ]
380
393
  figures_infos = self.get_list_all_pages(
381
394
  "figures.list",
@@ -496,6 +509,8 @@ class FigureApi(RemoveableBulkModuleApi):
496
509
  ApiField.AREA,
497
510
  ApiField.PRIORITY,
498
511
  ApiField.CUSTOM_DATA,
512
+ ApiField.NN_CREATED,
513
+ ApiField.NN_UPDATED,
499
514
  ]
500
515
  if skip_geometry is True:
501
516
  fields = [x for x in fields if x != ApiField.GEOMETRY]
@@ -580,10 +595,13 @@ class FigureApi(RemoveableBulkModuleApi):
580
595
  """
581
596
  geometries = {}
582
597
  for idx, part in self._download_geometries_generator(ids):
583
- if progress_cb is not None:
584
- progress_cb(len(part.content))
585
- geometry_json = json.loads(part.content)
586
- geometries[idx] = geometry_json
598
+ try:
599
+ if progress_cb is not None:
600
+ progress_cb(len(part.content))
601
+ geometry_json = json.loads(part.content)
602
+ geometries[idx] = geometry_json
603
+ except Exception as e:
604
+ raise RuntimeError(f"Failed to decode geometry for figure ID {idx}") from e
587
605
 
588
606
  if len(geometries) != len(ids):
589
607
  raise RuntimeError("Not all geometries were downloaded")
@@ -800,6 +818,7 @@ class FigureApi(RemoveableBulkModuleApi):
800
818
  skip_geometry: bool = False,
801
819
  semaphore: Optional[asyncio.Semaphore] = None,
802
820
  log_progress: bool = True,
821
+ batch_size: int = 300,
803
822
  ) -> Dict[int, List[FigureInfo]]:
804
823
  """
805
824
  Asynchronously download figures for the given dataset ID. Can be filtered by image IDs.
@@ -815,6 +834,10 @@ class FigureApi(RemoveableBulkModuleApi):
815
834
  :type semaphore: Optional[asyncio.Semaphore], optional
816
835
  :param log_progress: If True, log the progress of the download.
817
836
  :type log_progress: bool, optional
837
+ :param batch_size: Size of the batch for downloading figures per 1 request. Default is 300.
838
+ Used for batching image_ids when filtering by specific images.
839
+ Adjust this value for optimal performance, value cannot exceed 500.
840
+ :type batch_size: int, optional
818
841
  :return: A dictionary where keys are image IDs and values are lists of figures.
819
842
  :rtype: Dict[int, List[FigureInfo]]
820
843
 
@@ -849,75 +872,110 @@ class FigureApi(RemoveableBulkModuleApi):
849
872
  ApiField.AREA,
850
873
  ApiField.PRIORITY,
851
874
  ApiField.CUSTOM_DATA,
875
+ ApiField.NN_CREATED,
876
+ ApiField.NN_UPDATED,
852
877
  ]
853
878
  if skip_geometry is True:
854
879
  fields = [x for x in fields if x != ApiField.GEOMETRY]
855
880
 
856
- if image_ids is None:
857
- filters = []
858
- else:
859
- filters = [
860
- {
861
- ApiField.FIELD: ApiField.ENTITY_ID,
862
- ApiField.OPERATOR: "in",
863
- ApiField.VALUE: image_ids,
864
- }
865
- ]
866
-
867
- data = {
881
+ # Base data setup
882
+ base_data = {
868
883
  ApiField.DATASET_ID: dataset_id,
869
884
  ApiField.FIELDS: fields,
870
- ApiField.FILTER: filters,
871
885
  }
872
886
 
873
- # Get first page to determine total pages
874
887
  if semaphore is None:
875
888
  semaphore = self._api.get_default_semaphore()
876
- images_figures = defaultdict(list)
877
- pages_count = None
878
- total = 0
879
- tasks = []
880
889
 
881
- async def _get_page(page_data, page_num):
890
+ async def _get_page_figures(page_data, semaphore, progress_cb: tqdm = None):
891
+ """Helper function to get figures from a single page"""
882
892
  async with semaphore:
883
893
  response = await self._api.post_async("figures.list", page_data)
884
894
  response_json = response.json()
885
- nonlocal pages_count, total
886
- pages_count = response_json["pagesCount"]
887
- if page_num == 1:
888
- total = response_json["total"]
889
895
 
890
896
  page_figures = []
891
897
  for info in response_json["entities"]:
892
898
  figure_info = self._convert_json_info(info, True)
893
899
  page_figures.append(figure_info)
900
+ if progress_cb is not None:
901
+ progress_cb.update(len(response_json["entities"]))
894
902
  return page_figures
895
903
 
896
- # Get first page
897
- data[ApiField.PAGE] = 1
898
- first_page_figures = await _get_page(data, 1)
904
+ async def _get_all_pages(ids_filter, progress_cb: tqdm = None):
905
+ """Internal function to process all pages for given filter"""
906
+ data = base_data.copy()
907
+ data[ApiField.FILTER] = ids_filter
908
+
909
+ # Get first page to determine pagination
910
+ data[ApiField.PAGE] = 1
911
+ async with semaphore:
912
+ response = await self._api.post_async("figures.list", data)
913
+ response_json = response.json()
914
+
915
+ pages_count = response_json["pagesCount"]
916
+ all_figures = []
917
+
918
+ # Process first page
919
+ for info in response_json["entities"]:
920
+ figure_info = self._convert_json_info(info, True)
921
+ all_figures.append(figure_info)
922
+ if progress_cb is not None:
923
+ progress_cb.update(len(response_json["entities"]))
924
+
925
+ # Process remaining pages in parallel if needed
926
+ if pages_count > 1:
927
+ tasks = []
928
+ for page in range(2, pages_count + 1):
929
+ page_data = data.copy()
930
+ page_data[ApiField.PAGE] = page
931
+ tasks.append(
932
+ asyncio.create_task(
933
+ _get_page_figures(page_data, semaphore, progress_cb=progress_cb)
934
+ )
935
+ )
936
+
937
+ if tasks:
938
+ page_results = await asyncio.gather(*tasks)
939
+ for page_figures in page_results:
940
+ all_figures.extend(page_figures)
941
+
942
+ return all_figures
899
943
 
900
944
  if log_progress:
901
- progress_cb = tqdm(total=total, desc="Downloading figures")
945
+ progress_cb = tqdm(desc="Downloading figures", unit="figure", total=0)
946
+ else:
947
+ progress_cb = None
902
948
 
903
- for figure in first_page_figures:
904
- images_figures[figure.entity_id].append(figure)
905
- if log_progress:
906
- progress_cb.update(1)
907
-
908
- # Get rest of the pages in parallel
909
- if pages_count > 1:
910
- for page in range(2, pages_count + 1):
911
- page_data = data.copy()
912
- page_data[ApiField.PAGE] = page
913
- tasks.append(asyncio.create_task(_get_page(page_data, page)))
914
-
915
- for task in asyncio.as_completed(tasks):
916
- page_figures = await task
917
- for figure in page_figures:
918
- images_figures[figure.entity_id].append(figure)
919
- if log_progress:
920
- progress_cb.update(1)
949
+ # Strategy: batch processing based on image_ids
950
+ tasks = []
951
+
952
+ if image_ids is None:
953
+ # Single task for all figures in dataset
954
+ filters = []
955
+ tasks.append(_get_all_pages(filters, progress_cb=progress_cb))
956
+ else:
957
+ # Batch image_ids and create tasks for each batch
958
+ for batch_ids in batched(image_ids, batch_size):
959
+ filters = [
960
+ {
961
+ ApiField.FIELD: ApiField.ENTITY_ID,
962
+ ApiField.OPERATOR: "in",
963
+ ApiField.VALUE: list(batch_ids),
964
+ }
965
+ ]
966
+ tasks.append(_get_all_pages(filters, progress_cb=progress_cb))
967
+ # Small delay between batches to reduce server load
968
+ await asyncio.sleep(0.02)
969
+
970
+ # Execute all tasks in parallel and collect results
971
+ all_results = await asyncio.gather(*tasks)
972
+
973
+ # Combine results from all batches
974
+ images_figures = defaultdict(list)
975
+
976
+ for batch_figures in all_results:
977
+ for figure in batch_figures:
978
+ images_figures[figure.entity_id].append(figure)
921
979
 
922
980
  return dict(images_figures)
923
981
 
@@ -928,6 +986,7 @@ class FigureApi(RemoveableBulkModuleApi):
928
986
  skip_geometry: bool = False,
929
987
  semaphore: Optional[asyncio.Semaphore] = None,
930
988
  log_progress: bool = True,
989
+ batch_size: int = 300,
931
990
  ) -> Dict[int, List[FigureInfo]]:
932
991
  """
933
992
  Download figures for the given dataset ID. Can be filtered by image IDs.
@@ -945,6 +1004,10 @@ class FigureApi(RemoveableBulkModuleApi):
945
1004
  :type semaphore: Optional[asyncio.Semaphore], optional
946
1005
  :param log_progress: If True, log the progress of the download.
947
1006
  :type log_progress: bool, optional
1007
+ :param batch_size: Size of the batch for downloading figures per 1 request. Default is 300.
1008
+ Used for batching image_ids when filtering by specific images.
1009
+ Adjust this value for optimal performance, value cannot exceed 500.
1010
+ :type batch_size: int, optional
948
1011
 
949
1012
  :return: A dictionary where keys are image IDs and values are lists of figures.
950
1013
  :rtype: Dict[int, List[FigureInfo]]
@@ -970,6 +1033,7 @@ class FigureApi(RemoveableBulkModuleApi):
970
1033
  skip_geometry=skip_geometry,
971
1034
  semaphore=semaphore,
972
1035
  log_progress=log_progress,
1036
+ batch_size=batch_size,
973
1037
  )
974
1038
  )
975
1039
  except Exception: