supervisely 6.73.363__py3-none-any.whl → 6.73.364__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -1,3 +1,6 @@
1
+ # coding: utf-8
2
+ """deploy pretrained and custom models"""
3
+
1
4
  from __future__ import annotations
2
5
 
3
6
  import time
@@ -1,3 +1,6 @@
1
+ # coding: utf-8
2
+ """deploy and connect to to running inference apps"""
3
+
1
4
  from __future__ import annotations
2
5
 
3
6
  from typing import Dict, List
supervisely/io/fs.py CHANGED
@@ -1153,6 +1153,12 @@ def download(
1153
1153
  def copy_dir_recursively(
1154
1154
  src_dir: str, dst_dir: str, progress_cb: Optional[Union[tqdm, Callable]] = None
1155
1155
  ) -> List[str]:
1156
+ mkdir(dst_dir)
1157
+
1158
+ for rel_sub_dir in get_subdirs(src_dir, recursive=True):
1159
+ dst_sub_dir = os.path.join(dst_dir, rel_sub_dir)
1160
+ mkdir(dst_sub_dir)
1161
+
1156
1162
  files = list_files_recursively(src_dir)
1157
1163
  for src_file_path in files:
1158
1164
  dst_file_path = os.path.normpath(src_file_path.replace(src_dir, dst_dir))
@@ -1,3 +1,6 @@
1
+ # coding: utf-8
2
+ """load and inference models"""
3
+
1
4
  from __future__ import annotations
2
5
  import os
3
6
  from os import PathLike
@@ -1,3 +1,6 @@
1
+ # coding: utf-8
2
+ """single prediction object"""
3
+
1
4
  from __future__ import annotations
2
5
 
3
6
  import atexit
@@ -65,6 +65,7 @@ from supervisely.project.download import (
65
65
  download_fast,
66
66
  download_to_cache,
67
67
  get_cache_size,
68
+ get_dataset_path,
68
69
  is_cached,
69
70
  )
70
71
 
@@ -925,10 +926,15 @@ class TrainApp:
925
926
  :param total_images: Total number of images to download.
926
927
  :type total_images: int
927
928
  """
929
+ ds_paths = {
930
+ info.id: get_dataset_path(self._api, dataset_infos, info.id) for info in dataset_infos
931
+ }
928
932
  to_download = [
929
- info for info in dataset_infos if not is_cached(self.project_info.id, info.name)
933
+ info for info in dataset_infos if not is_cached(self.project_info.id, ds_paths[info.id])
934
+ ]
935
+ cached = [
936
+ info for info in dataset_infos if is_cached(self.project_info.id, ds_paths[info.id])
930
937
  ]
931
- cached = [info for info in dataset_infos if is_cached(self.project_info.id, info.name)]
932
938
 
933
939
  logger.info(self._get_cache_log_message(cached, to_download))
934
940
  with self.progress_bar_main(message="Downloading input data", total=total_images) as pbar:
@@ -955,7 +961,7 @@ class TrainApp:
955
961
  copy_from_cache(
956
962
  project_id=self.project_info.id,
957
963
  dest_dir=self.project_dir,
958
- dataset_names=[ds_info.name for ds_info in dataset_infos],
964
+ dataset_names=[ds_paths[info.id] for info in dataset_infos],
959
965
  progress_cb=pbar.update,
960
966
  )
961
967
  self.progress_bar_main.hide()
@@ -415,7 +415,7 @@ def _get_ds_full_name(
415
415
  if dataset_info.parent_id is None:
416
416
  return dataset_info.name + suffix
417
417
  parent = next((ds_info for ds_info in all_ds_infos if ds_info.id == dataset_info.parent_id))
418
- return _get_ds_full_name(parent, all_ds_infos, "/" + dataset_info.name)
418
+ return _get_ds_full_name(parent, all_ds_infos, "/" + dataset_info.name + suffix)
419
419
 
420
420
 
421
421
  def _validate_dataset(
@@ -501,11 +501,11 @@ def _validate(
501
501
  ):
502
502
  project_id = project_info.id
503
503
  to_download, cached = _split_by_cache(
504
- project_id, [_get_dataset_path(api, dataset_infos, info.id) for info in dataset_infos]
504
+ project_id, [get_dataset_path(api, dataset_infos, info.id) for info in dataset_infos]
505
505
  )
506
506
  to_download, cached = set(to_download), set(cached)
507
507
  for dataset_info in dataset_infos:
508
- ds_path = _get_dataset_path(api, dataset_infos, dataset_info.id)
508
+ ds_path = get_dataset_path(api, dataset_infos, dataset_info.id)
509
509
  if ds_path in to_download:
510
510
  continue
511
511
  if not _validate_dataset(
@@ -615,7 +615,7 @@ def download_to_cache(
615
615
  dataset_infos = all_ds_infos
616
616
  else:
617
617
  dataset_infos = [ds_info for ds_info in all_ds_infos if ds_info.id in dataset_ids]
618
- path_to_info = {_get_dataset_path(api, dataset_infos, info.id): info for info in dataset_infos}
618
+ path_to_info = {get_dataset_path(api, dataset_infos, info.id): info for info in dataset_infos}
619
619
  to_download, cached = _validate(api, project_info, project_meta, dataset_infos, all_ds_infos)
620
620
  if progress_cb is not None:
621
621
  cached_items_n = sum(path_to_info[ds_path].items_count for ds_path in cached)
@@ -632,24 +632,28 @@ def download_to_cache(
632
632
  return to_download, cached
633
633
 
634
634
 
635
- def _get_dataset_parents(api, dataset_infos, dataset_id):
635
+ def _get_dataset_parents(api: Api, dataset_infos: List[DatasetInfo], dataset_id):
636
636
  dataset_infos_dict = {info.id: info for info in dataset_infos}
637
- this_dataset_info = dataset_infos_dict.get(dataset_id, api.dataset.get_info_by_id(dataset_id))
637
+ this_dataset_info = dataset_infos_dict.get(dataset_id, None)
638
+ if this_dataset_info is None:
639
+ this_dataset_info = api.dataset.get_info_by_id(dataset_id)
638
640
  if this_dataset_info.parent_id is None:
639
641
  return []
640
642
  parent = _get_dataset_parents(
641
643
  api, list(dataset_infos_dict.values()), this_dataset_info.parent_id
642
644
  )
643
- this_parent_name = dataset_infos_dict.get(
644
- this_dataset_info.parent_id, api.dataset.get_info_by_id(dataset_id)
645
- ).name
646
- return [*parent, this_parent_name]
645
+ this_parent = dataset_infos_dict.get(this_dataset_info.parent_id, None)
646
+ if this_parent is None:
647
+ this_parent = api.dataset.get_info_by_id(this_dataset_info.parent_id)
648
+ return [*parent, this_parent.name]
647
649
 
648
650
 
649
- def _get_dataset_path(api: Api, dataset_infos: List[DatasetInfo], dataset_id: int) -> str:
651
+ def get_dataset_path(api: Api, dataset_infos: List[DatasetInfo], dataset_id: int) -> str:
650
652
  parents = _get_dataset_parents(api, dataset_infos, dataset_id)
651
653
  dataset_infos_dict = {info.id: info for info in dataset_infos}
652
- this_dataset_info = dataset_infos_dict.get(dataset_id, api.dataset.get_info_by_id(dataset_id))
654
+ this_dataset_info = dataset_infos_dict.get(dataset_id, None)
655
+ if this_dataset_info is None:
656
+ this_dataset_info = api.dataset.get_info_by_id(dataset_id)
653
657
  return Dataset._get_dataset_path(this_dataset_info.name, parents)
654
658
 
655
659
 
@@ -612,7 +612,7 @@ class Dataset(KeyObject):
612
612
  logger.warning(f"Failed to read blob offset file {offset_file_path}: {str(e)}")
613
613
 
614
614
  if len(img_names) == 0 and len(raw_ann_names) == 0:
615
- logger.info("Dataset {!r} is empty".format(self.name))
615
+ logger.debug(f"Dataset '{self.name}' is empty")
616
616
  # raise RuntimeError("Dataset {!r} is empty".format(self.name))
617
617
 
618
618
  if len(img_names) == 0: # items_names polyfield
@@ -1641,10 +1641,10 @@ class Dataset(KeyObject):
1641
1641
  """
1642
1642
  if self.item_exists(item_name):
1643
1643
  data_path, ann_path = self.get_item_paths(item_name)
1644
- img_info_path = self.get_img_info_path(item_name)
1644
+ item_info_path = self.get_item_info_path(item_name)
1645
1645
  silent_remove(data_path)
1646
1646
  silent_remove(ann_path)
1647
- silent_remove(img_info_path)
1647
+ silent_remove(item_info_path)
1648
1648
  self._item_to_ann.pop(item_name)
1649
1649
  return True
1650
1650
  return False
@@ -1318,6 +1318,7 @@ def download_video_project(
1318
1318
 
1319
1319
  key_id_map = KeyIdMap()
1320
1320
 
1321
+ project_fs = None
1321
1322
  meta = ProjectMeta.from_json(api.project.get_meta(project_id, with_settings=True))
1322
1323
  if os.path.exists(dest_dir) and resume_download:
1323
1324
  dump_json_file(meta.to_json(), os.path.join(dest_dir, "meta.json"))
@@ -1337,12 +1338,16 @@ def download_video_project(
1337
1338
  log_progress = False
1338
1339
 
1339
1340
  dataset_ids = set(dataset_ids) if (dataset_ids is not None) else None
1341
+ existing_datasets = {dataset.path: dataset for dataset in project_fs.datasets}
1340
1342
  for parents, dataset in api.dataset.tree(project_id):
1341
1343
  if dataset_ids is not None and dataset.id not in dataset_ids:
1342
1344
  continue
1343
1345
 
1344
1346
  dataset_path = Dataset._get_dataset_path(dataset.name, parents)
1345
- dataset_fs = project_fs.create_dataset(dataset.name, dataset_path)
1347
+ if dataset_path in existing_datasets:
1348
+ dataset_fs = existing_datasets[dataset_path]
1349
+ else:
1350
+ dataset_fs = project_fs.create_dataset(dataset.name, dataset_path)
1346
1351
  videos = api.video.get_list(dataset.id)
1347
1352
 
1348
1353
  ds_progress = progress_cb
@@ -1499,6 +1504,9 @@ def upload_video_project(
1499
1504
  item_paths.append(video_path)
1500
1505
  ann_paths.append(ann_path)
1501
1506
 
1507
+ if len(item_paths) == 0:
1508
+ continue
1509
+
1502
1510
  ds_progress = progress_cb
1503
1511
  if log_progress is True:
1504
1512
  ds_progress = tqdm_sly(
@@ -1622,6 +1630,7 @@ async def download_video_project_async(
1622
1630
 
1623
1631
  key_id_map = KeyIdMap()
1624
1632
 
1633
+ project_fs = None
1625
1634
  meta = ProjectMeta.from_json(api.project.get_meta(project_id, with_settings=True))
1626
1635
  if os.path.exists(dest_dir) and resume_download:
1627
1636
  dump_json_file(meta.to_json(), os.path.join(dest_dir, "meta.json"))
@@ -1647,7 +1656,11 @@ async def download_video_project_async(
1647
1656
 
1648
1657
  dataset_path = Dataset._get_dataset_path(dataset.name, parents)
1649
1658
 
1650
- dataset_fs = project_fs.create_dataset(dataset.name, dataset_path)
1659
+ existing_datasets = {dataset.path: dataset for dataset in project_fs.datasets}
1660
+ if dataset_path in existing_datasets:
1661
+ dataset_fs = existing_datasets[dataset_path]
1662
+ else:
1663
+ dataset_fs = project_fs.create_dataset(dataset.name, dataset_path)
1651
1664
  videos = api.video.get_list(dataset.id)
1652
1665
 
1653
1666
  if log_progress is True:
@@ -1769,7 +1782,7 @@ async def _download_project_item_async(
1769
1782
  try:
1770
1783
  await dataset_fs.add_item_file_async(
1771
1784
  video.name,
1772
- video_file_path,
1785
+ None,
1773
1786
  ann=video_ann,
1774
1787
  _validate_item=False,
1775
1788
  _use_hardlink=True,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: supervisely
3
- Version: 6.73.363
3
+ Version: 6.73.364
4
4
  Summary: Supervisely Python SDK.
5
5
  Home-page: https://github.com/supervisely/supervisely
6
6
  Author: Supervisely
@@ -55,8 +55,8 @@ supervisely/api/entity_annotation/figure_api.py,sha256=rmsE3L_JfqN94sLN637pQ0syi
55
55
  supervisely/api/entity_annotation/object_api.py,sha256=gbcNvN_KY6G80Me8fHKQgryc2Co7VU_kfFd1GYILZ4E,8875
56
56
  supervisely/api/entity_annotation/tag_api.py,sha256=h19YsJzJLDp0VIicQzoYCRyVhY149KY7pUysb4XX0RI,11114
57
57
  supervisely/api/nn/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
58
- supervisely/api/nn/deploy_api.py,sha256=2n0dKGfgte4Hh5WnYQTNhuUE2PjPgr5REqT5J6XkJXQ,34267
59
- supervisely/api/nn/neural_network_api.py,sha256=GqdrfwSNfES7JiVCL0hJZsDJ_lXGGkWJmAarq8lkyDE,10395
58
+ supervisely/api/nn/deploy_api.py,sha256=V7zWO8yoroFDifIqLIFYmZA72tLQriH5kYAhN0-50co,34326
59
+ supervisely/api/nn/neural_network_api.py,sha256=vZyYBaKQzLJX9G3SAt09LmsxNLC8h88oYJ9b_PACzp0,10466
60
60
  supervisely/api/pointcloud/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
61
61
  supervisely/api/pointcloud/pointcloud_annotation_api.py,sha256=x2Bw_1ZaGZffc89k670LWQiwMhlb4CbB-6suDpHJRgg,11256
62
62
  supervisely/api/pointcloud/pointcloud_api.py,sha256=pn72znCr5hkAfgniXxfD6Vi8-HqRb1Nrf6l23-HQ7Bc,53277
@@ -720,7 +720,7 @@ supervisely/io/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
720
720
  supervisely/io/docker_utils.py,sha256=hb_HXGM8IYB0PF-nD7NxMwaHgzaxIFxofsUzQ_RCUZI,7935
721
721
  supervisely/io/env.py,sha256=DLsoRhouPT-y5wJzzJBs7zhJ2UxOIvIcQVbVLP5Yx7U,18256
722
722
  supervisely/io/exception_handlers.py,sha256=_nAgMFeE94bCxEvWakR82hMtdOJUyn7Gc7OymMxI9WI,36484
723
- supervisely/io/fs.py,sha256=RWqw-HbKgam9-q1DeDCom94E1rnc1WEKorpx8mpUivw,63381
723
+ supervisely/io/fs.py,sha256=GSgD6dMYd-NECTYlN7BB1jWlxQXWkHGCN5ZtqUpWcpo,63547
724
724
  supervisely/io/fs_cache.py,sha256=985gvBGzveLcDudgz10E4EWVjP9jxdU1Pa0GFfCBoCA,6520
725
725
  supervisely/io/github_utils.py,sha256=jGmvQJ5bjtACuSFABzrxL0jJdh14SezovrHp8T-9y8g,1779
726
726
  supervisely/io/json.py,sha256=25gBqA8nkKZW1xvssdmRYuJrO5fmIR0Z5cZGePfrJV4,8539
@@ -963,8 +963,8 @@ supervisely/nn/legacy/pytorch/weights.py,sha256=Zb9kcpUCg6ykr7seO53CkKSQa2K44wo8
963
963
  supervisely/nn/legacy/training/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
964
964
  supervisely/nn/legacy/training/eval_planner.py,sha256=zN9b0_CX7sWGdC8e6riTvD-NOUc3_Xduyhj00S7PEIo,1311
965
965
  supervisely/nn/model/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
966
- supervisely/nn/model/model_api.py,sha256=3dh0rLVT_u7B_ZDdt7kmYGVTmmetwTIRcmGYHg3VGj4,9617
967
- supervisely/nn/model/prediction.py,sha256=2Skw5bd5OXrDYS_6-TuxDIfNum8qNFzTExJJGZsC-EA,11259
966
+ supervisely/nn/model/model_api.py,sha256=rIne6ymvL2aVwzSTJMGxAbevE0U-f-WKhDciqaneqRc,9666
967
+ supervisely/nn/model/prediction.py,sha256=N3oO9s3NDiC5CFvW8utfU8rz3bfpCl37Sk4VEBH94Bc,11307
968
968
  supervisely/nn/model/prediction_session.py,sha256=3a3seJ7vCVfoUbbSqLxfomqYqUffpEkkeT11YRnBA3o,24438
969
969
  supervisely/nn/tracker/__init__.py,sha256=LiojByb5kGsTQ49lWuboEh7B4JUwM1vfz81J8kJlLYo,337
970
970
  supervisely/nn/tracker/tracker.py,sha256=Hs0c07l9IVF86jvjloNAGJsSZAHuNQZf0kVaUHfw3Fs,9694
@@ -994,7 +994,7 @@ supervisely/nn/tracking/__init__.py,sha256=Ld1ed7ZZQZPkhX-5Xr-UbHZx5zLCm2-tInHnP
994
994
  supervisely/nn/tracking/boxmot.py,sha256=H9cQjYGL9nX_TLrfKDChhljTIiE9lffcgbwWCf_4PJU,4277
995
995
  supervisely/nn/tracking/tracking.py,sha256=WNrNm02B1pspA3d_AmzSJ-54RZTqWV2NZiC7FHe88bo,857
996
996
  supervisely/nn/training/__init__.py,sha256=gY4PCykJ-42MWKsqb9kl-skemKa8yB6t_fb5kzqR66U,111
997
- supervisely/nn/training/train_app.py,sha256=lhFQ_bm7dxDuiKHMDmU3wWP5lTRniGXkjw6JlXARrk8,112067
997
+ supervisely/nn/training/train_app.py,sha256=23nBt9fxa8LBIzFT_8cKwXoxRu2kkCmAC1-9dnUFgPg,112259
998
998
  supervisely/nn/training/gui/__init__.py,sha256=Nqnn8clbgv-5l0PgxcTOldg8mkMKrFn4TvPL-rYUUGg,1
999
999
  supervisely/nn/training/gui/classes_selector.py,sha256=Bpp-RFDQqcZ0kLJmS6ZnExkdscWwRusvF4vbWjEsKlQ,3926
1000
1000
  supervisely/nn/training/gui/gui.py,sha256=_8l7dXoQGs3c5iIAUzWPGC6AQbN920iLW1riBDDV3hY,43183
@@ -1032,16 +1032,16 @@ supervisely/pointcloud_episodes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm
1032
1032
  supervisely/pointcloud_episodes/pointcloud_episodes.py,sha256=cRXdtw7bMsbsdVQjxfWxFSESrO-LGiqqsZyyExl2Mbg,3430
1033
1033
  supervisely/project/__init__.py,sha256=hlzdj9Pgy53Q3qdP8LMtGTChvZHQuuShdtui2eRUQeE,2601
1034
1034
  supervisely/project/data_version.py,sha256=P5Lui6i64pYeJWmAdGJDv8GRXxjfpSSZ8zT_MxIrynE,19553
1035
- supervisely/project/download.py,sha256=nhxID-kbsNTgIY9l1lnRuUlzKrsJw80X07jEElyl3sE,28466
1035
+ supervisely/project/download.py,sha256=b6mpvkKA37yuVrRyv2ctfnTuS5j161LREfxlt6KkT1A,28677
1036
1036
  supervisely/project/pointcloud_episode_project.py,sha256=yiWdNBQiI6f1O9sr1pg8JHW6O-w3XUB1rikJNn3Oung,41866
1037
1037
  supervisely/project/pointcloud_project.py,sha256=Kx1Vaes-krwG3BiRRtHRLQxb9G5m5bTHPN9IzRqmNWo,49399
1038
- supervisely/project/project.py,sha256=RokArUC17P7HXssoYXSKVCBo6DJvclwD-ozW7ysMWww,238002
1038
+ supervisely/project/project.py,sha256=a--aXygVMQDHVLKpLQBh5kVo4ZKEH9pnCrzYPOCoNA8,237998
1039
1039
  supervisely/project/project_meta.py,sha256=26s8IiHC5Pg8B1AQi6_CrsWteioJP2in00cRNe8QlW0,51423
1040
1040
  supervisely/project/project_settings.py,sha256=NLThzU_DCynOK6hkHhVdFyezwprn9UqlnrLDe_3qhkY,9347
1041
1041
  supervisely/project/project_type.py,sha256=7mQ7zg6r7Bm2oFn5aR8n_PeLqMmOaPZd6ph7Z8ZISTw,608
1042
1042
  supervisely/project/readme_template.md,sha256=VovSn591tcpz2xiwGgErm34iGHVxuGxXGPX6-iDiS88,9251
1043
1043
  supervisely/project/upload.py,sha256=AjgHYgVZwUE25ygC5pqvFjdAladbyB8T78mlet5Qpho,3750
1044
- supervisely/project/video_project.py,sha256=zAtB3YpW9tC9Tc3qfapbQ9O2nhAWU2wDjMuS5sepXqc,65297
1044
+ supervisely/project/video_project.py,sha256=FNMaj5ZD54PlIZBFXag1ZooUQAKEQrw8hbiggZsN_XM,65797
1045
1045
  supervisely/project/volume_project.py,sha256=Kn9VEvWuKKZvL2nx6B6bjSvHuoZhAOxEc6DvPRexUco,22666
1046
1046
  supervisely/pyscripts_utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
1047
1047
  supervisely/pyscripts_utils/utils.py,sha256=scEwHJvHRQa8NHIOn2eTwH6-Zc8CGdLoxM-WzH9jcRo,314
@@ -1097,9 +1097,9 @@ supervisely/worker_proto/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZ
1097
1097
  supervisely/worker_proto/worker_api_pb2.py,sha256=VQfi5JRBHs2pFCK1snec3JECgGnua3Xjqw_-b3aFxuM,59142
1098
1098
  supervisely/worker_proto/worker_api_pb2_grpc.py,sha256=3BwQXOaP9qpdi0Dt9EKG--Lm8KGN0C5AgmUfRv77_Jk,28940
1099
1099
  supervisely_lib/__init__.py,sha256=7-3QnN8Zf0wj8NCr2oJmqoQWMKKPKTECvjH9pd2S5vY,159
1100
- supervisely-6.73.363.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
1101
- supervisely-6.73.363.dist-info/METADATA,sha256=6PAf_uEkpJWOi9CeWKDgUCmCGH7wEJeK7Gu41pIYqu8,35151
1102
- supervisely-6.73.363.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
1103
- supervisely-6.73.363.dist-info/entry_points.txt,sha256=U96-5Hxrp2ApRjnCoUiUhWMqijqh8zLR03sEhWtAcms,102
1104
- supervisely-6.73.363.dist-info/top_level.txt,sha256=kcFVwb7SXtfqZifrZaSE3owHExX4gcNYe7Q2uoby084,28
1105
- supervisely-6.73.363.dist-info/RECORD,,
1100
+ supervisely-6.73.364.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
1101
+ supervisely-6.73.364.dist-info/METADATA,sha256=0BL3k_htpIh5B0XQb8NCxjYct8s-iRkC2OLOxRV48Yo,35151
1102
+ supervisely-6.73.364.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
1103
+ supervisely-6.73.364.dist-info/entry_points.txt,sha256=U96-5Hxrp2ApRjnCoUiUhWMqijqh8zLR03sEhWtAcms,102
1104
+ supervisely-6.73.364.dist-info/top_level.txt,sha256=kcFVwb7SXtfqZifrZaSE3owHExX4gcNYe7Q2uoby084,28
1105
+ supervisely-6.73.364.dist-info/RECORD,,