supervisely 6.73.323__py3-none-any.whl → 6.73.325__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
supervisely/_utils.py CHANGED
@@ -471,9 +471,20 @@ def get_or_create_event_loop() -> asyncio.AbstractEventLoop:
471
471
  return loop
472
472
 
473
473
 
474
- def sync_call(coro):
474
+ def run_coroutine(coroutine):
475
475
  """
476
- This function is used to run asynchronous functions in synchronous context.
476
+ Runs an asynchronous coroutine in a synchronous context and waits for its result.
477
+
478
+ This function checks if an event loop is already running:
479
+ - If a loop is running, it schedules the coroutine using `asyncio.run_coroutine_threadsafe()`
480
+ and waits for the result.
481
+ - If no loop is running, it creates one and executes the coroutine with `run_until_complete()`.
482
+
483
+ This ensures compatibility with both synchronous and asynchronous environments
484
+ without creating unnecessary event loops.
485
+
486
+ ⚠️ Note: This method is preferable when working with `asyncio` objects like `Semaphore`,
487
+ since it avoids issues with mismatched event loops.
477
488
 
478
489
  :param coro: Asynchronous function.
479
490
  :type coro: Coroutine
@@ -484,13 +495,13 @@ def sync_call(coro):
484
495
 
485
496
  .. code-block:: python
486
497
 
487
- from supervisely.utils import sync_call
498
+ from supervisely._utils import run_coroutine
488
499
 
489
500
  async def async_function():
490
501
  await asyncio.sleep(1)
491
502
  return "Hello, World!"
492
503
  coro = async_function()
493
- result = sync_call(coro)
504
+ result = run_coroutine(coro)
494
505
  print(result)
495
506
  # Output: Hello, World!
496
507
  """
@@ -498,10 +509,10 @@ def sync_call(coro):
498
509
  loop = get_or_create_event_loop()
499
510
 
500
511
  if loop.is_running():
501
- future = asyncio.run_coroutine_threadsafe(coro, loop=loop)
512
+ future = asyncio.run_coroutine_threadsafe(coroutine, loop=loop)
502
513
  return future.result()
503
514
  else:
504
- return loop.run_until_complete(coro)
515
+ return loop.run_until_complete(coroutine)
505
516
 
506
517
 
507
518
  def get_filename_from_headers(url):
@@ -23,8 +23,9 @@ from typing_extensions import Literal
23
23
 
24
24
  import supervisely.io.env as env
25
25
  import supervisely.io.fs as sly_fs
26
- from supervisely._utils import batched, rand_str
26
+ from supervisely._utils import batched, rand_str, run_coroutine
27
27
  from supervisely.api.module_api import ApiField, ModuleApiBase
28
+ from supervisely.api.remote_storage_api import RemoteStorageApi
28
29
  from supervisely.io.fs import (
29
30
  ensure_base_path,
30
31
  get_file_ext,
@@ -1420,7 +1421,8 @@ class FileApi(ModuleApiBase):
1420
1421
  api.file.upload_directory(9, local_path, path_to_dir)
1421
1422
  """
1422
1423
  if not remote_dir.startswith("/"):
1423
- remote_dir = "/" + remote_dir
1424
+ if not RemoteStorageApi.is_bucket_url(remote_dir):
1425
+ remote_dir = "/" + remote_dir
1424
1426
 
1425
1427
  if self.dir_exists(team_id, remote_dir):
1426
1428
  if change_name_if_conflict is True:
@@ -2302,3 +2304,89 @@ class FileApi(ModuleApiBase):
2302
2304
  else:
2303
2305
  raise e
2304
2306
  return res_remote_dir
2307
+
2308
+ def upload_directory_fast(
2309
+ self,
2310
+ team_id: int,
2311
+ local_dir: str,
2312
+ remote_dir: str,
2313
+ change_name_if_conflict: Optional[bool] = True,
2314
+ progress_cb: Optional[Union[tqdm, Callable]] = None,
2315
+ replace_if_conflict: Optional[bool] = False,
2316
+ enable_fallback: Optional[bool] = True,
2317
+ ) -> str:
2318
+ """
2319
+ Upload Directory to Team Files from local path in fast mode.
2320
+ Files are uploaded asynchronously. If an error occurs, the method will fallback to synchronous upload.
2321
+
2322
+ :param team_id: Team ID in Supervisely.
2323
+ :type team_id: int
2324
+ :param local_dir: Path to local Directory.
2325
+ :type local_dir: str
2326
+ :param remote_dir: Path to Directory in Team Files.
2327
+ :type remote_dir: str
2328
+ :param change_name_if_conflict: Checks if given name already exists and adds suffix to the end of the name.
2329
+ :type change_name_if_conflict: bool, optional
2330
+ :param progress_cb: Function for tracking download progress in bytes.
2331
+ :type progress_cb: Progress, optional
2332
+ :param replace_if_conflict: If True, replace existing dir.
2333
+ :type replace_if_conflict: bool, optional
2334
+ :param enable_fallback: If True, the method will fallback to synchronous upload if an error occurs.
2335
+ :type enable_fallback: bool, optional
2336
+ :return: Path to Directory in Team Files
2337
+ :rtype: :class:`str`
2338
+ """
2339
+ coroutine = self.upload_directory_async(
2340
+ team_id=team_id,
2341
+ local_dir=local_dir,
2342
+ remote_dir=remote_dir,
2343
+ change_name_if_conflict=change_name_if_conflict,
2344
+ progress_size_cb=progress_cb,
2345
+ replace_if_conflict=replace_if_conflict,
2346
+ enable_fallback=enable_fallback,
2347
+ )
2348
+ return run_coroutine(coroutine)
2349
+
2350
+ def upload_bulk_fast(
2351
+ self,
2352
+ team_id: int,
2353
+ src_paths: List[str],
2354
+ dst_paths: List[str],
2355
+ semaphore: Optional[asyncio.Semaphore] = None,
2356
+ progress_cb: Optional[Union[tqdm, Callable]] = None,
2357
+ progress_cb_type: Literal["number", "size"] = "size",
2358
+ enable_fallback: Optional[bool] = True,
2359
+ ) -> None:
2360
+ """
2361
+ Upload multiple files from local paths to Team Files in fast mode.
2362
+ Files are uploaded asynchronously. If an error occurs, the method will fallback to synchronous upload.
2363
+
2364
+ :param team_id: Team ID in Supervisely.
2365
+ :type team_id: int
2366
+ :param src_paths: List of local paths to files.
2367
+ :type src_paths: List[str]
2368
+ :param dst_paths: List of paths to save files in Team Files.
2369
+ :type dst_paths: List[str]
2370
+ :param semaphore: Semaphore for limiting the number of simultaneous uploads.
2371
+ :type semaphore: asyncio.Semaphore, optional
2372
+ :param progress_cb: Function for tracking download progress.
2373
+ :type progress_cb: tqdm or callable, optional
2374
+ :param progress_cb_type: Type of progress callback. Can be "number" or "size". Default is "size".
2375
+ "size" is used to track the number of transferred bytes.
2376
+ "number" is used to track the number of transferred files.
2377
+ :type progress_cb_type: Literal["number", "size"], optional
2378
+ :param enable_fallback: If True, the method will fallback to synchronous upload if an error occurs.
2379
+ :type enable_fallback: bool, optional
2380
+ :return: None
2381
+ :rtype: :class:`NoneType`
2382
+ """
2383
+ coroutine = self.upload_bulk_async(
2384
+ team_id=team_id,
2385
+ src_paths=src_paths,
2386
+ dst_paths=dst_paths,
2387
+ semaphore=semaphore,
2388
+ progress_cb=progress_cb,
2389
+ progress_cb_type=progress_cb_type,
2390
+ enable_fallback=enable_fallback,
2391
+ )
2392
+ return run_coroutine(coroutine)
@@ -3,6 +3,20 @@ import concurrent.futures
3
3
 
4
4
 
5
5
  def run_sync(coroutine):
6
+ """
7
+ Runs an asynchronous coroutine in a separate thread and waits for its result.
8
+ It is useful for running async functions in a synchronous
9
+ environment.
10
+
11
+ This method creates a new thread using ThreadPoolExecutor and executes the coroutine
12
+ inside a new event loop.
13
+
14
+ ⚠️ Note: This function creates a new event loop every time it is called,
15
+ which can cause issues when using objects tied to a specific loop (e.g., asyncio.Semaphore).
16
+
17
+ :param coroutine: coroutine to run
18
+ :return: result of coroutine
19
+ """
6
20
  try:
7
21
  with concurrent.futures.ThreadPoolExecutor(max_workers=1) as executor:
8
22
  result = executor.submit(
@@ -57,12 +57,12 @@ class ImportManager:
57
57
  self._labeling_interface = labeling_interface
58
58
  self._upload_as_links = upload_as_links
59
59
  self._remote_files_map = {}
60
+ self._modality = project_type
60
61
 
61
62
  self._input_data = self._prepare_input_data(input_data)
62
63
  self._unpack_archives(self._input_data)
63
64
  remove_junk_from_dir(self._input_data)
64
65
 
65
- self._modality = project_type
66
66
  self._converter = self.get_converter()
67
67
  if isinstance(self._converter, (HighColorDepthImageConverter, CSVConverter)):
68
68
  self._converter.team_id = self._team_id
@@ -112,17 +112,27 @@ class ImportManager:
112
112
  logger.info(f"Input data is a local file: {input_data}. Will use its directory")
113
113
  return os.path.dirname(input_data)
114
114
  elif self._api.storage.exists(self._team_id, input_data):
115
- if self._upload_as_links:
115
+ if self._upload_as_links and str(self._modality) in [
116
+ ProjectType.IMAGES.value,
117
+ ProjectType.VIDEOS.value,
118
+ ]:
116
119
  logger.info(f"Input data is a remote file: {input_data}. Scanning...")
117
- return self._scan_remote_files(input_data)
120
+ return self._reproduce_remote_files(input_data)
118
121
  else:
122
+ if self._upload_as_links and str(self._modality) == ProjectType.VOLUMES.value:
123
+ self._scan_remote_files(input_data)
119
124
  logger.info(f"Input data is a remote file: {input_data}. Downloading...")
120
125
  return self._download_input_data(input_data)
121
126
  elif self._api.storage.dir_exists(self._team_id, input_data):
122
- if self._upload_as_links:
127
+ if self._upload_as_links and str(self._modality) in [
128
+ ProjectType.IMAGES.value,
129
+ ProjectType.VIDEOS.value,
130
+ ]:
123
131
  logger.info(f"Input data is a remote directory: {input_data}. Scanning...")
124
- return self._scan_remote_files(input_data, is_dir=True)
132
+ return self._reproduce_remote_files(input_data, is_dir=True)
125
133
  else:
134
+ if self._upload_as_links and str(self._modality) == ProjectType.VOLUMES.value:
135
+ self._scan_remote_files(input_data, is_dir=True)
126
136
  logger.info(f"Input data is a remote directory: {input_data}. Downloading...")
127
137
  return self._download_input_data(input_data, is_dir=True)
128
138
  else:
@@ -160,7 +170,35 @@ class ImportManager:
160
170
  return local_path
161
171
 
162
172
  def _scan_remote_files(self, remote_path, is_dir=False):
163
- """Scan remote directory and create dummy structure locally"""
173
+ """
174
+ Scan remote directory. Collect local-remote paths mapping
175
+ Will be used to save relations between uploaded files and remote files (for volumes).
176
+ """
177
+
178
+ dir_path = remote_path.rstrip("/") if is_dir else os.path.dirname(remote_path)
179
+ dir_name = os.path.basename(dir_path)
180
+
181
+ local_path = os.path.join(get_data_dir(), dir_name)
182
+
183
+ if is_dir:
184
+ files = self._api.storage.list(self._team_id, remote_path, include_folders=False)
185
+ else:
186
+ files = [self._api.storage.get_info_by_path(self._team_id, remote_path)]
187
+
188
+ unique_directories = set()
189
+ for file in files:
190
+ new_path = file.path.replace(dir_path, local_path)
191
+ self._remote_files_map[new_path] = file.path
192
+ unique_directories.add(str(Path(file.path).parent))
193
+
194
+ logger.info(f"Scanned remote directories:\n - " + "\n - ".join(unique_directories))
195
+ return local_path
196
+
197
+ def _reproduce_remote_files(self, remote_path, is_dir=False):
198
+ """
199
+ Scan remote directory and create dummy structure locally.
200
+ Will be used to detect annotation format (by dataset structure) remotely.
201
+ """
164
202
 
165
203
  dir_path = remote_path.rstrip("/") if is_dir else os.path.dirname(remote_path)
166
204
  dir_name = os.path.basename(dir_path)
@@ -2,3 +2,6 @@
2
2
  from supervisely.convert.volume.sly.sly_volume_converter import SLYVolumeConverter
3
3
  from supervisely.convert.volume.dicom.dicom_converter import DICOMConverter
4
4
  from supervisely.convert.volume.nii.nii_volume_converter import NiiConverter
5
+ from supervisely.convert.volume.nii.nii_planes_volume_converter import (
6
+ NiiPlaneStructuredConverter,
7
+ )
@@ -0,0 +1,114 @@
1
+ import os
2
+ from collections import defaultdict
3
+ from pathlib import Path
4
+
5
+ from supervisely import ProjectMeta, logger
6
+ from supervisely.annotation.obj_class import ObjClass
7
+ from supervisely.convert.volume.nii import nii_volume_helper as helper
8
+ from supervisely.convert.volume.nii.nii_volume_converter import NiiConverter
9
+ from supervisely.convert.volume.volume_converter import VolumeConverter
10
+ from supervisely.geometry.mask_3d import Mask3D
11
+ from supervisely.io.fs import get_file_name
12
+ from supervisely.volume.volume import is_nifti_file
13
+ from supervisely.volume_annotation.volume_annotation import VolumeAnnotation
14
+ from supervisely.volume_annotation.volume_object import VolumeObject
15
+
16
+
17
+ class NiiPlaneStructuredConverter(NiiConverter, VolumeConverter):
18
+ """Convert NIfTI 3D volume file to Supervisely format.
19
+ The NIfTI file should be structured as follows:
20
+ - <prefix>_anatomic_<idx>.nii (or .nii.gz)
21
+ - <prefix>_inference_<idx>.nii (or .nii.gz)
22
+ where <prefix> is one of the following: cor, sag, axl
23
+ <idx> is the index of the volume (to match volumes with annotations)
24
+
25
+ Supports .nii and .nii.gz files.
26
+
27
+ Example:
28
+ 📂 .
29
+ ├── 🩻 axl_anatomic_1.nii
30
+ ├── 🩻 axl_inference_1.nii class 1 (may contain multiple instances of the same class)
31
+ ├── 🩻 cor_anatomic_1.nii
32
+ ├── 🩻 cor_inference_1.nii class 1
33
+ ├── 🩻 sag_anatomic_1.nii
34
+ ├── 🩻 sag_inference_1.nii class 1
35
+ ├── 🩻 sag_inference_2.nii class 2
36
+ └── 🩻 sag_inference_3.nii class 3
37
+ """
38
+
39
+ def validate_format(self) -> bool:
40
+ # create Items
41
+ converted_dir_name = "converted"
42
+
43
+ volumes_dict = defaultdict(list)
44
+ ann_dict = defaultdict(list)
45
+
46
+ for root, _, files in os.walk(self._input_data):
47
+ if converted_dir_name in root:
48
+ continue
49
+ for file in files:
50
+ path = os.path.join(root, file)
51
+ if is_nifti_file(path):
52
+ full_name = get_file_name(path)
53
+ if full_name.endswith(".nii"):
54
+ full_name = get_file_name(full_name)
55
+ prefix = full_name.split("_")[0]
56
+ if prefix not in helper.PlanePrefix.values():
57
+ continue
58
+ name = full_name.split("_")[1]
59
+ idx = 1 if len(name.split("_")) < 3 else int(name.split("_")[2])
60
+ if name in helper.LABEL_NAME or name[:-1] in helper.LABEL_NAME:
61
+ ann_dict[prefix].append(path)
62
+ else:
63
+ volumes_dict[prefix].append(path)
64
+
65
+ self._items = []
66
+ for prefix, paths in volumes_dict.items():
67
+ if len(paths) == 1:
68
+ item = self.Item(item_path=paths[0])
69
+ item.ann_data = ann_dict.get(prefix)
70
+ self._items.append(item)
71
+ elif len(paths) > 1:
72
+ logger.info(
73
+ f"Found {len(paths)} volumes with prefix {prefix}. Will try to match them by directories."
74
+ )
75
+ for path in paths:
76
+ item = self.Item(item_path=path)
77
+ possible_ann_paths = []
78
+ for ann_path in ann_dict.get(prefix):
79
+ if Path(ann_path).parent == Path(path).parent:
80
+ possible_ann_paths.append(ann_path)
81
+ item.ann_data = possible_ann_paths
82
+ self._items.append(item)
83
+ self._meta = ProjectMeta()
84
+ return self.items_count > 0
85
+
86
+ def to_supervisely(
87
+ self,
88
+ item: VolumeConverter.Item,
89
+ meta: ProjectMeta = None,
90
+ renamed_classes: dict = None,
91
+ renamed_tags: dict = None,
92
+ ) -> VolumeAnnotation:
93
+ """Convert to Supervisely format."""
94
+
95
+ try:
96
+ objs = []
97
+ spatial_figures = []
98
+ for idx, ann_path in enumerate(item.ann_data, start=1):
99
+ for mask, _ in helper.get_annotation_from_nii(ann_path):
100
+ class_name = f"Segment_{idx}"
101
+ class_name = renamed_classes.get(class_name, class_name)
102
+ obj_class = meta.get_obj_class(class_name)
103
+ if obj_class is None:
104
+ obj_class = ObjClass(class_name, Mask3D)
105
+ meta = meta.add_obj_class(obj_class)
106
+ self._meta_changed = True
107
+ self._meta = meta
108
+ obj = VolumeObject(obj_class, mask_3d=mask)
109
+ spatial_figures.append(obj.figure)
110
+ objs.append(obj)
111
+ return VolumeAnnotation(item.volume_meta, objects=objs, spatial_figures=spatial_figures)
112
+ except Exception as e:
113
+ logger.warning(f"Failed to convert {item.path} to Supervisely format: {e}")
114
+ return item.create_empty_annotation()
@@ -1,12 +1,9 @@
1
1
  import os
2
2
  from pathlib import Path
3
3
 
4
- import magic
5
-
6
4
  from supervisely import ProjectMeta, generate_free_name, logger
7
5
  from supervisely._utils import batched, is_development
8
6
  from supervisely.annotation.obj_class import ObjClass
9
- from supervisely.annotation.obj_class_collection import ObjClassCollection
10
7
  from supervisely.api.api import Api
11
8
  from supervisely.convert.base_converter import AvailableVolumeConverters
12
9
  from supervisely.convert.volume.nii import nii_volume_helper as helper
@@ -18,12 +15,47 @@ from supervisely.io.fs import (
18
15
  get_file_name_with_ext,
19
16
  list_files,
20
17
  )
21
- from supervisely.volume.volume import is_nifti_file
18
+ from supervisely.task.progress import tqdm_sly
19
+ from supervisely.volume.volume import is_nifti_file, read_nrrd_serie_volume_np
22
20
  from supervisely.volume_annotation.volume_annotation import VolumeAnnotation
23
21
  from supervisely.volume_annotation.volume_object import VolumeObject
24
22
 
25
23
 
26
24
  class NiiConverter(VolumeConverter):
25
+ """
26
+ Convert NIfTI 3D volume file to Supervisely format.
27
+ Supports .nii and .nii.gz files.
28
+
29
+ The NIfTI file should be structured as follows:
30
+ - <volume_name>.nii
31
+ - <volume_name>/
32
+ - <cls_name_1>.nii
33
+ - <cls_name_2>.nii
34
+ - ...
35
+ - ...
36
+
37
+ where <volume_name> is the name of the volume
38
+ If the volume has annotations, they should be in the corresponding directory
39
+ with the same name as the volume (without extension)
40
+ <cls_name> is the name of the annotation class
41
+ <cls_name>.nii:
42
+ - represent objects of the single class
43
+ - should be unique for the current volume (e.g. tumor.nii.gz, lung.nii.gz)
44
+ - can contain multiple objects of the class (each object should be represented by a different value in the mask)
45
+
46
+ Example:
47
+ 📂 .
48
+ ├── 📂 CTChest
49
+ │ ├── 🩻 lung.nii.gz
50
+ │ └── 🩻 tumor.nii.gz
51
+ ├── 🩻 CTChest.nii.gz
52
+ └── 🩻 Spine.nii.gz
53
+ """
54
+
55
+ def __init__(self, *args, **kwargs):
56
+ super().__init__(*args, **kwargs)
57
+ self._supports_links = True
58
+ self._meta_changed = False
27
59
 
28
60
  def __str__(self) -> str:
29
61
  return AvailableVolumeConverters.NII
@@ -34,6 +66,9 @@ class NiiConverter(VolumeConverter):
34
66
  # nrrds_dict = {}
35
67
  nifti_dict = {}
36
68
  nifti_dirs = {}
69
+
70
+ planes_detected = {p: False for p in helper.PlanePrefix.values()}
71
+
37
72
  for root, _, files in os.walk(self._input_data):
38
73
  dir_name = os.path.basename(root)
39
74
  nifti_dirs[dir_name] = root
@@ -41,13 +76,17 @@ class NiiConverter(VolumeConverter):
41
76
  continue
42
77
  for file in files:
43
78
  path = os.path.join(root, file)
44
- mime = magic.from_file(path, mime=True)
45
- if mime == "application/gzip" or mime == "application/octet-stream":
46
- if is_nifti_file(path): # is nifti
47
- name = get_file_name(path)
48
- if name.endswith(".nii"):
49
- name = get_file_name(name)
50
- nifti_dict[name] = path
79
+ if is_nifti_file(path): # is nifti
80
+ name = get_file_name(path)
81
+ if name.endswith(".nii"):
82
+ name = get_file_name(name)
83
+ nifti_dict[name] = path
84
+ for prefix in planes_detected.keys():
85
+ if name.startswith(prefix):
86
+ planes_detected[prefix] = True
87
+
88
+ if any(planes_detected.values()):
89
+ return False
51
90
 
52
91
  self._items = []
53
92
  skip_files = []
@@ -69,6 +108,39 @@ class NiiConverter(VolumeConverter):
69
108
  self._meta = ProjectMeta()
70
109
  return self.items_count > 0
71
110
 
111
+ def to_supervisely(
112
+ self,
113
+ item: VolumeConverter.Item,
114
+ meta: ProjectMeta = None,
115
+ renamed_classes: dict = None,
116
+ renamed_tags: dict = None,
117
+ ) -> VolumeAnnotation:
118
+ """Convert to Supervisely format."""
119
+
120
+ try:
121
+ objs = []
122
+ spatial_figures = []
123
+ for ann_path in item.ann_data:
124
+ ann_name = get_file_name(ann_path)
125
+ if ann_name.endswith(".nii"):
126
+ ann_name = get_file_name(ann_name)
127
+
128
+ ann_name = renamed_classes.get(ann_name, ann_name)
129
+ for mask, _ in helper.get_annotation_from_nii(ann_path):
130
+ obj_class = meta.get_obj_class(ann_name)
131
+ if obj_class is None:
132
+ obj_class = ObjClass(ann_name, Mask3D)
133
+ meta = meta.add_obj_class(obj_class)
134
+ self._meta_changed = True
135
+ self._meta = meta
136
+ obj = VolumeObject(obj_class, mask_3d=mask)
137
+ spatial_figures.append(obj.figure)
138
+ objs.append(obj)
139
+ return VolumeAnnotation(item.volume_meta, objects=objs, spatial_figures=spatial_figures)
140
+ except Exception as e:
141
+ logger.warning(f"Failed to convert {item.path} to Supervisely format: {e}")
142
+ return item.create_empty_annotation()
143
+
72
144
  def upload_dataset(
73
145
  self,
74
146
  api: Api,
@@ -78,7 +150,7 @@ class NiiConverter(VolumeConverter):
78
150
  ):
79
151
  """Upload converted data to Supervisely"""
80
152
 
81
- meta, renamed_classes, renamed_tags = self.merge_metas_with_conflicts(api, dataset_id)
153
+ meta, renamed_classes, _ = self.merge_metas_with_conflicts(api, dataset_id)
82
154
 
83
155
  existing_names = set([vol.name for vol in api.volume.get_list(dataset_id)])
84
156
 
@@ -91,13 +163,16 @@ class NiiConverter(VolumeConverter):
91
163
 
92
164
  converted_dir_name = "converted"
93
165
  converted_dir = os.path.join(self._input_data, converted_dir_name)
94
- meta_changed = False
95
166
 
96
167
  for batch in batched(self._items, batch_size=batch_size):
97
168
  item_names = []
98
169
  item_paths = []
99
170
 
100
171
  for item in batch:
172
+ if self._upload_as_links:
173
+ remote_path = self.remote_files_map.get(item.path)
174
+ if remote_path is not None:
175
+ item.custom_data = {"remote_path": remote_path}
101
176
  # nii_path = item.path
102
177
  item.path = helper.nifti_to_nrrd(item.path, converted_dir)
103
178
  ext = get_file_ext(item.path)
@@ -112,35 +187,28 @@ class NiiConverter(VolumeConverter):
112
187
  item_names.append(item.name)
113
188
  item_paths.append(item.path)
114
189
 
115
- volume_info = api.volume.upload_nrrd_serie_path(
116
- dataset_id, name=item.name, path=item.path
190
+ # upload volume
191
+ volume_np, volume_meta = read_nrrd_serie_volume_np(item.path)
192
+ progress_nrrd = tqdm_sly(
193
+ desc=f"Uploading volume '{item.name}'",
194
+ total=sum(volume_np.shape),
195
+ leave=True if progress_cb is None else False,
196
+ position=1,
117
197
  )
198
+ if item.custom_data is not None:
199
+ volume_meta.update(item.custom_data)
200
+ api.volume.upload_np(dataset_id, item.name, volume_np, volume_meta, progress_nrrd)
201
+ info = api.volume.get_info_by_name(dataset_id, item.name)
202
+ item.volume_meta = info.meta
118
203
 
119
- if isinstance(item.ann_data, list) and len(item.ann_data) > 0:
120
- objs = []
121
- spatial_figures = []
122
- for ann_path in item.ann_data:
123
- ann_name = get_file_name(ann_path)
124
- if ann_name.endswith(".nii"):
125
- ann_name = get_file_name(ann_name)
126
- for mask, _ in helper.get_annotation_from_nii(ann_path):
127
- obj_class = meta.get_obj_class(ann_name)
128
- if obj_class is None:
129
- obj_class = ObjClass(ann_name, Mask3D)
130
- meta = meta.add_obj_class(obj_class)
131
- meta_changed = True
132
- obj = VolumeObject(obj_class, mask_3d=mask)
133
- spatial_figures.append(obj.figure)
134
- objs.append(obj)
135
- ann = VolumeAnnotation(
136
- volume_info.meta, objects=objs, spatial_figures=spatial_figures
137
- )
138
-
139
- if meta_changed:
140
- self._meta = meta
141
- _, _, _ = self.merge_metas_with_conflicts(api, dataset_id)
204
+ # create and upload annotation
205
+ if item.ann_data is not None:
206
+ ann = self.to_supervisely(item, meta, renamed_classes, None)
207
+
208
+ if self._meta_changed:
209
+ meta, renamed_classes, _ = self.merge_metas_with_conflicts(api, dataset_id)
142
210
 
143
- api.volume.annotation.append(volume_info.id, ann)
211
+ api.volume.annotation.append(info.id, ann)
144
212
 
145
213
  if log_progress:
146
214
  progress_cb(len(batch))
@@ -4,10 +4,22 @@ from typing import Generator
4
4
  import nrrd
5
5
  import numpy as np
6
6
 
7
+ from supervisely.collection.str_enum import StrEnum
7
8
  from supervisely.geometry.mask_3d import Mask3D
8
9
  from supervisely.io.fs import ensure_base_path, get_file_ext, get_file_name
9
10
  from supervisely.volume.volume import convert_3d_nifti_to_nrrd
10
11
 
12
+ VOLUME_NAME = "anatomic"
13
+ LABEL_NAME = ["inference", "label", "annotation", "mask", "segmentation"]
14
+
15
+
16
+ class PlanePrefix(str, StrEnum):
17
+ """Prefix for plane names."""
18
+
19
+ CORONAL = "cor"
20
+ SAGITTAL = "sag"
21
+ AXIAL = "axl"
22
+
11
23
 
12
24
  def nifti_to_nrrd(nii_file_path: str, converted_dir: str) -> str:
13
25
  """Convert NIfTI 3D volume file to NRRD 3D volume file."""
@@ -39,7 +39,7 @@ from supervisely import (
39
39
  is_production,
40
40
  logger,
41
41
  )
42
- from supervisely._utils import abs_url, get_filename_from_headers, sync_call
42
+ from supervisely._utils import abs_url, get_filename_from_headers
43
43
  from supervisely.api.file_api import FileInfo
44
44
  from supervisely.app import get_synced_data_dir
45
45
  from supervisely.app.widgets import Progress
@@ -60,7 +60,7 @@ from supervisely.nn.utils import ModelSource
60
60
  from supervisely.output import set_directory
61
61
  from supervisely.project.download import (
62
62
  copy_from_cache,
63
- download_async_or_sync,
63
+ download_fast,
64
64
  download_to_cache,
65
65
  get_cache_size,
66
66
  is_cached,
@@ -806,7 +806,7 @@ class TrainApp:
806
806
  with self.progress_bar_main(message="Downloading input data", total=total_images) as pbar:
807
807
  logger.debug("Downloading project data without cache")
808
808
  self.progress_bar_main.show()
809
- download_async_or_sync(
809
+ download_fast(
810
810
  api=self._api,
811
811
  project_id=self.project_id,
812
812
  dest_dir=self.project_dir,
@@ -1619,14 +1619,13 @@ class TrainApp:
1619
1619
  unit_scale=True,
1620
1620
  ) as upload_artifacts_pbar:
1621
1621
  self.progress_bar_main.show()
1622
- remote_dir = sync_call(
1623
- self._api.file.upload_directory_async(
1624
- team_id=self.team_id,
1625
- local_dir=local_demo_dir,
1626
- remote_dir=remote_demo_dir,
1627
- progress_size_cb=upload_artifacts_pbar.update,
1628
- )
1622
+ remote_dir = self._api.file.upload_directory_fast(
1623
+ team_id=self.team_id,
1624
+ local_dir=local_demo_dir,
1625
+ remote_dir=remote_demo_dir,
1626
+ progress_cb=upload_artifacts_pbar.update,
1629
1627
  )
1628
+
1630
1629
  self.progress_bar_main.hide()
1631
1630
 
1632
1631
  def _get_train_val_splits_for_app_state(self) -> Dict:
@@ -1733,13 +1732,11 @@ class TrainApp:
1733
1732
  unit_scale=True,
1734
1733
  ) as upload_artifacts_pbar:
1735
1734
  self.progress_bar_main.show()
1736
- remote_dir = sync_call(
1737
- self._api.file.upload_directory_async(
1738
- team_id=self.team_id,
1739
- local_dir=self.output_dir,
1740
- remote_dir=remote_artifacts_dir,
1741
- progress_size_cb=upload_artifacts_pbar.update,
1742
- )
1735
+ remote_dir = self._api.file.upload_directory_fast(
1736
+ team_id=self.team_id,
1737
+ local_dir=self.output_dir,
1738
+ remote_dir=remote_artifacts_dir,
1739
+ progress_cb=upload_artifacts_pbar.update,
1743
1740
  )
1744
1741
  self.progress_bar_main.hide()
1745
1742
 
@@ -2524,13 +2521,11 @@ class TrainApp:
2524
2521
  logger.debug(f"Uploading {len(export_weights)} export weights of size {size} bytes")
2525
2522
  logger.debug(f"Destination paths: {file_dest_paths}")
2526
2523
  self.progress_bar_main.show()
2527
- sync_call(
2528
- self._api.file.upload_bulk_async(
2529
- team_id=self.team_id,
2530
- src_paths=export_weights.values(),
2531
- dst_paths=file_dest_paths,
2532
- progress_cb=export_upload_main_pbar.update,
2533
- )
2524
+ self._api.file.upload_bulk_fast(
2525
+ team_id=self.team_id,
2526
+ src_paths=export_weights.values(),
2527
+ dst_paths=file_dest_paths,
2528
+ progress_cb=export_upload_main_pbar.update,
2534
2529
  )
2535
2530
 
2536
2531
  self.progress_bar_main.hide()
@@ -281,6 +281,34 @@ def download_async_or_sync(
281
281
  )
282
282
 
283
283
 
284
+ def download_fast(
285
+ api: Api,
286
+ project_id: int,
287
+ dest_dir: str,
288
+ dataset_ids: Optional[List[int]] = None,
289
+ log_progress: bool = True,
290
+ progress_cb: Optional[Union[tqdm, Callable]] = None,
291
+ semaphore: Optional[asyncio.Semaphore] = None,
292
+ **kwargs,
293
+ ) -> None:
294
+ """
295
+ Download project in a fast mode.
296
+ Items are downloaded asynchronously. If an error occurs, the method will fallback to synchronous download.
297
+ Automatically detects project type.
298
+ You can pass :class:`ProjectInfo` as `project_info` kwarg to avoid additional API requests.
299
+ """
300
+ download_async_or_sync(
301
+ api=api,
302
+ project_id=project_id,
303
+ dest_dir=dest_dir,
304
+ dataset_ids=dataset_ids,
305
+ log_progress=log_progress,
306
+ progress_cb=progress_cb,
307
+ semaphore=semaphore,
308
+ **kwargs,
309
+ )
310
+
311
+
284
312
  def _get_cache_dir(project_id: int, dataset_path: str = None) -> str:
285
313
  p = os.path.join(apps_cache_dir(), str(project_id))
286
314
  if dataset_path is not None:
@@ -468,7 +496,7 @@ def _download_project_to_cache(
468
496
  if len(dataset_infos) == 0:
469
497
  logger.debug("No datasets to download")
470
498
  return
471
- download_async_or_sync(
499
+ download_fast(
472
500
  api=api,
473
501
  project_id=project_id,
474
502
  dest_dir=cached_project_dir,
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: supervisely
3
- Version: 6.73.323
3
+ Version: 6.73.325
4
4
  Summary: Supervisely Python SDK.
5
5
  Home-page: https://github.com/supervisely/supervisely
6
6
  Author: Supervisely
@@ -1,6 +1,6 @@
1
1
  supervisely/README.md,sha256=XM-DiMC6To3I9RjQZ0c61905EFRR_jnCUx2q3uNR-X8,3331
2
2
  supervisely/__init__.py,sha256=mtgVKiRSlnRU7yKG0Re130mBL10wCzsNfOfi-w-Kj4c,10833
3
- supervisely/_utils.py,sha256=hYzGRVAh-cB2RmqixHbaJQZHy4byNip4KZm2Gdt8P7k,16849
3
+ supervisely/_utils.py,sha256=KWfbw8XFfV2uxLzhuku_J0UQKpG-D80Hp6UOQD316P8,17460
4
4
  supervisely/function_wrapper.py,sha256=R5YajTQ0GnRp2vtjwfC9hINkzQc0JiyGsu8TER373xY,1912
5
5
  supervisely/sly_logger.py,sha256=z92Vu5hmC0GgTIJO1n6kPDayRW9__8ix8hL6poDZj-Y,6274
6
6
  supervisely/tiny_timer.py,sha256=hkpe_7FE6bsKL79blSs7WBaktuPavEVu67IpEPrfmjE,183
@@ -25,7 +25,7 @@ supervisely/api/annotation_api.py,sha256=kuk4qwojTJxYr2iqAKbW-QhWw_DFc4TsjA2Wc2M
25
25
  supervisely/api/api.py,sha256=6TczKT1t0MWlbArSW31RmeyWP04pqngfUO_NrG5FETE,66287
26
26
  supervisely/api/app_api.py,sha256=RsbVej8WxWVn9cNo5s3Fqd1symsCdsfOaKVBKEUapRY,71927
27
27
  supervisely/api/dataset_api.py,sha256=GH7prDRJKyJlTv_7_Y-RkTwJN7ED4EkXNqqmi3iIdI4,41352
28
- supervisely/api/file_api.py,sha256=xVM4fFeIc52aKnxduCIU7L6Rgd7Rh36rzTJ8hVT8hw4,88925
28
+ supervisely/api/file_api.py,sha256=bVWv6kf3B5n6qlB14HmUa6iUr8ara5cr-pPK8QC7XWg,92932
29
29
  supervisely/api/github_api.py,sha256=NIexNjEer9H5rf5sw2LEZd7C1WR-tK4t6IZzsgeAAwQ,623
30
30
  supervisely/api/image_annotation_tool_api.py,sha256=YcUo78jRDBJYvIjrd-Y6FJAasLta54nnxhyaGyanovA,5237
31
31
  supervisely/api/image_api.py,sha256=WIML_6N1qgOWBm3acexmGSWz4hAaSxlYmUtbytROaP8,192375
@@ -95,7 +95,7 @@ supervisely/app/fastapi/offline.py,sha256=CwMMkJ1frD6wiZS-SEoNDtQ1UJcJe1Ob6ohE3r
95
95
  supervisely/app/fastapi/request.py,sha256=NU7rKmxJ1pfkDZ7_yHckRcRAueJRQIqCor11UO2OHr8,766
96
96
  supervisely/app/fastapi/subapp.py,sha256=5lMfFLYBfHzE1OmITHsogB9hScyTJFjGV45AKY67Hkg,45647
97
97
  supervisely/app/fastapi/templating.py,sha256=JOAW8U-14GD47E286mzFi3mZSPbm_csJGqtXWLRM4rc,2929
98
- supervisely/app/fastapi/utils.py,sha256=GZuTWLcVRGVx8TL3jVEYUOZIT2FawbwIe2kAOBLw9ho,398
98
+ supervisely/app/fastapi/utils.py,sha256=t_UquzlFrdkKtAJmH6eJ279pE8Aa3BaIu4XjX-SEaIE,946
99
99
  supervisely/app/fastapi/websocket.py,sha256=TlRSPOAhRItTv1HGvdukK1ZvhRjMUxRa-lJlsRR9rJw,1308
100
100
  supervisely/app/v1/__init__.py,sha256=OdU0PYv6hLwahYoyaLFO8m3cbJSchvPbqxuG1N3T734,848
101
101
  supervisely/app/v1/app_config.md,sha256=-8GKbiQoX25RhEj3EDJ7TxiYuFw5wL2TO3qV5AJLZTs,2536
@@ -566,7 +566,7 @@ supervisely/collection/key_indexed_collection.py,sha256=x2UVlkprspWhhae9oLUzjTWB
566
566
  supervisely/collection/str_enum.py,sha256=Zp29yFGvnxC6oJRYNNlXhO2lTSdsriU1wiGHj6ahEJE,1250
567
567
  supervisely/convert/__init__.py,sha256=ropgB1eebG2bfLoJyf2jp8Vv9UkFujaW3jVX-71ho1g,1353
568
568
  supervisely/convert/base_converter.py,sha256=O2SP4I_Hd0aSn8kbOUocy8orkc_-iD-TQ-z4ieUqabA,18579
569
- supervisely/convert/converter.py,sha256=tWxTDfFv7hwzQhUQrBxzfr6WP8FUGFX_ewg5T2HbUYo,8959
569
+ supervisely/convert/converter.py,sha256=ymhjzy75bhtpOTJSB7Xfq5tcfZjK_DMxJXIa_uuEitA,10668
570
570
  supervisely/convert/image/__init__.py,sha256=JEuyaBiiyiYmEUYqdn8Mog5FVXpz0H1zFubKkOOm73I,1395
571
571
  supervisely/convert/image/image_converter.py,sha256=8vak8ZoKTN1ye2ZmCTvCZ605-Rw1AFLIEo7bJMfnR68,10426
572
572
  supervisely/convert/image/image_helper.py,sha256=fdV0edQD6hVGQ8TXn2JGDzsnrAXPDMacHBQsApzOME8,3677
@@ -658,14 +658,15 @@ supervisely/convert/video/mot/mot_converter.py,sha256=wXbv-9Psc2uVnhzHuOt5VnRIvS
658
658
  supervisely/convert/video/sly/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
659
659
  supervisely/convert/video/sly/sly_video_converter.py,sha256=S2qif7JFxqIi9VN_ez_iBtoJXpG9W6Ky2k5Er3-DtUo,4418
660
660
  supervisely/convert/video/sly/sly_video_helper.py,sha256=D8PgoXpi0y3z-VEqvBLDf_gSUQ2hTL3irrfJyGhaV0Y,6758
661
- supervisely/convert/volume/__init__.py,sha256=NjVfOa9uH1BdYvB-RynW6L28x0f_tqL9p7tHSIQ6Sso,245
661
+ supervisely/convert/volume/__init__.py,sha256=RpSYjufciJT6AdhI9Oqp70b3XoFTtSkxFNexoqeOPW4,353
662
662
  supervisely/convert/volume/volume_converter.py,sha256=3jpt2Yn_G4FSP_vHFsJHQfYNQpT7q6ar_sRyr_xrPnA,5335
663
663
  supervisely/convert/volume/dicom/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
664
664
  supervisely/convert/volume/dicom/dicom_converter.py,sha256=__QP8fMAaq_BdWFYh1_nAYT2gpY1WwZzdlDj39YwHhw,3195
665
665
  supervisely/convert/volume/dicom/dicom_helper.py,sha256=1EXmxl5Z8Xi3ZkZnfJ4EbiPCVyITSXUc0Cn_oo02pPE,1284
666
666
  supervisely/convert/volume/nii/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
667
- supervisely/convert/volume/nii/nii_volume_converter.py,sha256=kI2JmeFuLfLWgYGCEozoaka1QH4TocnfgyN0em6maa0,5946
668
- supervisely/convert/volume/nii/nii_volume_helper.py,sha256=kzh20fsdeI8efA0vawW0M6Wh48nMlCLzHBQFuSNVFmc,1136
667
+ supervisely/convert/volume/nii/nii_planes_volume_converter.py,sha256=SV1OAoel92qOvWTfcVqzee9LQypAQGQ-hJEMJ9j7nLg,4995
668
+ supervisely/convert/volume/nii/nii_volume_converter.py,sha256=nVHKK7wVhMvBiX1awMoXOwt2f5MHujqVmof0v_zW5E4,8502
669
+ supervisely/convert/volume/nii/nii_volume_helper.py,sha256=wy8GqjgnFJaEgKKSOgKw_QPETdiYYY5c9BBfyJE3Hqo,1417
669
670
  supervisely/convert/volume/sly/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
670
671
  supervisely/convert/volume/sly/sly_volume_converter.py,sha256=XmSuxnRqxchG87b244f3h0UHvOt6IkajMquL1drWlCM,5595
671
672
  supervisely/convert/volume/sly/sly_volume_helper.py,sha256=gUY0GW3zDMlO2y-zQQG36uoXMrKkKz4-ErM1CDxFCxE,5620
@@ -979,7 +980,7 @@ supervisely/nn/tracker/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NM
979
980
  supervisely/nn/tracker/utils/gmc.py,sha256=3JX8979H3NA-YHNaRQyj9Z-xb9qtyMittPEjGw8y2Jo,11557
980
981
  supervisely/nn/tracker/utils/kalman_filter.py,sha256=eSFmCjM0mikHCAFvj-KCVzw-0Jxpoc3Cfc2NWEjJC1Q,17268
981
982
  supervisely/nn/training/__init__.py,sha256=gY4PCykJ-42MWKsqb9kl-skemKa8yB6t_fb5kzqR66U,111
982
- supervisely/nn/training/train_app.py,sha256=yVsMdMlV6OHCRMJ63-hPmTfdsC3Z1-0ohphsMtDMpPw,104944
983
+ supervisely/nn/training/train_app.py,sha256=lFvYxt2Zsd7FrqfeA2C-eNX0tQVLtz3V8om3DwvNFtM,104720
983
984
  supervisely/nn/training/gui/__init__.py,sha256=Nqnn8clbgv-5l0PgxcTOldg8mkMKrFn4TvPL-rYUUGg,1
984
985
  supervisely/nn/training/gui/classes_selector.py,sha256=8UgzA4aogOAr1s42smwEcDbgaBj_i0JLhjwlZ9bFdIA,3772
985
986
  supervisely/nn/training/gui/gui.py,sha256=CnT_QhihrxdSHKybpI0pXhPLwCaXEana_qdn0DhXByg,25558
@@ -1016,7 +1017,7 @@ supervisely/pointcloud_episodes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm
1016
1017
  supervisely/pointcloud_episodes/pointcloud_episodes.py,sha256=cRXdtw7bMsbsdVQjxfWxFSESrO-LGiqqsZyyExl2Mbg,3430
1017
1018
  supervisely/project/__init__.py,sha256=hlzdj9Pgy53Q3qdP8LMtGTChvZHQuuShdtui2eRUQeE,2601
1018
1019
  supervisely/project/data_version.py,sha256=6vOz5ovBeCIiMAKUG7lGQ5IXvQnU1GbcnrWxdOvaVlo,19311
1019
- supervisely/project/download.py,sha256=zb8sb4XZ6Qi3CP7fmtLRUAYzaxs_W0WnOfe2x3ZVRMs,24639
1020
+ supervisely/project/download.py,sha256=GQFYN3KCdM_egXDzoyZrzl6Yeg2QshYQNFNlKi8Nh8A,25471
1020
1021
  supervisely/project/pointcloud_episode_project.py,sha256=yiWdNBQiI6f1O9sr1pg8JHW6O-w3XUB1rikJNn3Oung,41866
1021
1022
  supervisely/project/pointcloud_project.py,sha256=Kx1Vaes-krwG3BiRRtHRLQxb9G5m5bTHPN9IzRqmNWo,49399
1022
1023
  supervisely/project/project.py,sha256=nKnnYVrSx_MWh5G_fObaAegkRxLFJg_J074SaduEYGo,205871
@@ -1081,9 +1082,9 @@ supervisely/worker_proto/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZ
1081
1082
  supervisely/worker_proto/worker_api_pb2.py,sha256=VQfi5JRBHs2pFCK1snec3JECgGnua3Xjqw_-b3aFxuM,59142
1082
1083
  supervisely/worker_proto/worker_api_pb2_grpc.py,sha256=3BwQXOaP9qpdi0Dt9EKG--Lm8KGN0C5AgmUfRv77_Jk,28940
1083
1084
  supervisely_lib/__init__.py,sha256=7-3QnN8Zf0wj8NCr2oJmqoQWMKKPKTECvjH9pd2S5vY,159
1084
- supervisely-6.73.323.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
1085
- supervisely-6.73.323.dist-info/METADATA,sha256=uIqQoH6i-OiLhSZSLt6SqL7O1ZWPV0D6ZRJICli80eE,33596
1086
- supervisely-6.73.323.dist-info/WHEEL,sha256=P9jw-gEje8ByB7_hXoICnHtVCrEwMQh-630tKvQWehc,91
1087
- supervisely-6.73.323.dist-info/entry_points.txt,sha256=U96-5Hxrp2ApRjnCoUiUhWMqijqh8zLR03sEhWtAcms,102
1088
- supervisely-6.73.323.dist-info/top_level.txt,sha256=kcFVwb7SXtfqZifrZaSE3owHExX4gcNYe7Q2uoby084,28
1089
- supervisely-6.73.323.dist-info/RECORD,,
1085
+ supervisely-6.73.325.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
1086
+ supervisely-6.73.325.dist-info/METADATA,sha256=wRj0QjkNc2OGdL0AtTslp2JAlgDO8-1f3XeeUfoZfOY,33596
1087
+ supervisely-6.73.325.dist-info/WHEEL,sha256=P9jw-gEje8ByB7_hXoICnHtVCrEwMQh-630tKvQWehc,91
1088
+ supervisely-6.73.325.dist-info/entry_points.txt,sha256=U96-5Hxrp2ApRjnCoUiUhWMqijqh8zLR03sEhWtAcms,102
1089
+ supervisely-6.73.325.dist-info/top_level.txt,sha256=kcFVwb7SXtfqZifrZaSE3owHExX4gcNYe7Q2uoby084,28
1090
+ supervisely-6.73.325.dist-info/RECORD,,