supervisely 6.73.344__py3-none-any.whl → 6.73.346__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- supervisely/convert/image/sly/fast_sly_image_converter.py +11 -5
- supervisely/convert/image/sly/sly_image_converter.py +12 -1
- supervisely/convert/volume/__init__.py +1 -1
- supervisely/convert/volume/nii/nii_planes_volume_converter.py +187 -5
- supervisely/convert/volume/nii/nii_volume_converter.py +1 -1
- supervisely/convert/volume/nii/nii_volume_helper.py +207 -0
- supervisely/project/data_version.py +7 -1
- supervisely/project/project.py +89 -8
- {supervisely-6.73.344.dist-info → supervisely-6.73.346.dist-info}/METADATA +1 -1
- {supervisely-6.73.344.dist-info → supervisely-6.73.346.dist-info}/RECORD +14 -14
- {supervisely-6.73.344.dist-info → supervisely-6.73.346.dist-info}/LICENSE +0 -0
- {supervisely-6.73.344.dist-info → supervisely-6.73.346.dist-info}/WHEEL +0 -0
- {supervisely-6.73.344.dist-info → supervisely-6.73.346.dist-info}/entry_points.txt +0 -0
- {supervisely-6.73.344.dist-info → supervisely-6.73.346.dist-info}/top_level.txt +0 -0
|
@@ -1,21 +1,23 @@
|
|
|
1
1
|
import os
|
|
2
|
+
from pathlib import Path
|
|
2
3
|
|
|
4
|
+
import supervisely.convert.image.sly.sly_image_helper as helper
|
|
3
5
|
from supervisely import (
|
|
4
6
|
Annotation,
|
|
5
7
|
Api,
|
|
6
|
-
ProjectMeta,
|
|
7
8
|
Label,
|
|
9
|
+
Project,
|
|
10
|
+
ProjectMeta,
|
|
8
11
|
Rectangle,
|
|
9
12
|
batched,
|
|
10
13
|
is_development,
|
|
11
14
|
logger,
|
|
12
15
|
)
|
|
13
|
-
from supervisely.convert.image.sly.sly_image_converter import SLYImageConverter
|
|
14
|
-
import supervisely.convert.image.sly.sly_image_helper as helper
|
|
15
16
|
from supervisely.convert.image.image_converter import ImageConverter
|
|
16
|
-
from supervisely.io.fs import get_file_ext
|
|
17
|
-
from supervisely.io.json import load_json_file
|
|
18
17
|
from supervisely.convert.image.image_helper import validate_image_bounds
|
|
18
|
+
from supervisely.convert.image.sly.sly_image_converter import SLYImageConverter
|
|
19
|
+
from supervisely.io.fs import dir_empty, dir_exists, get_file_ext
|
|
20
|
+
from supervisely.io.json import load_json_file
|
|
19
21
|
|
|
20
22
|
|
|
21
23
|
class FastSlyImageConverter(SLYImageConverter, ImageConverter):
|
|
@@ -29,7 +31,11 @@ class FastSlyImageConverter(SLYImageConverter, ImageConverter):
|
|
|
29
31
|
detected_ann_cnt = 0
|
|
30
32
|
self._items = []
|
|
31
33
|
meta = ProjectMeta()
|
|
34
|
+
|
|
32
35
|
for root, _, files in os.walk(self._input_data):
|
|
36
|
+
if Path(root).name == Project.blob_dir_name:
|
|
37
|
+
logger.debug("FastSlyImageConverter: Detected blob directory. Skipping...")
|
|
38
|
+
return False
|
|
33
39
|
for file in files:
|
|
34
40
|
full_path = os.path.join(root, file)
|
|
35
41
|
ext = get_file_ext(full_path)
|
|
@@ -291,8 +291,19 @@ class SLYImageConverter(ImageConverter):
|
|
|
291
291
|
self.upload_project(api, dataset_id, batch_size, log_progress)
|
|
292
292
|
elif self.blob_project:
|
|
293
293
|
dataset_info = api.dataset.get_info_by_id(dataset_id, raise_error=True)
|
|
294
|
+
project_dirs = [d for d in find_project_dirs(self._input_data)]
|
|
295
|
+
if len(project_dirs) == 0:
|
|
296
|
+
raise RuntimeError(
|
|
297
|
+
"Failed to find Supervisely project with blobs in the input data"
|
|
298
|
+
)
|
|
299
|
+
project_dir = project_dirs[0]
|
|
300
|
+
if len(project_dirs) > 1:
|
|
301
|
+
logger.info(
|
|
302
|
+
"Found multiple possible Supervisely projects with blobs in the input data. "
|
|
303
|
+
f"Only the first one will be uploaded: {project_dir}"
|
|
304
|
+
)
|
|
294
305
|
upload_project_fs(
|
|
295
|
-
dir=
|
|
306
|
+
dir=project_dir,
|
|
296
307
|
api=api,
|
|
297
308
|
workspace_id=dataset_info.workspace_id,
|
|
298
309
|
log_progress=log_progress,
|
|
@@ -3,5 +3,5 @@ from supervisely.convert.volume.sly.sly_volume_converter import SLYVolumeConvert
|
|
|
3
3
|
from supervisely.convert.volume.dicom.dicom_converter import DICOMConverter
|
|
4
4
|
from supervisely.convert.volume.nii.nii_volume_converter import NiiConverter
|
|
5
5
|
from supervisely.convert.volume.nii.nii_planes_volume_converter import (
|
|
6
|
-
NiiPlaneStructuredConverter,
|
|
6
|
+
NiiPlaneStructuredConverter, NiiPlaneStructuredAnnotationConverter
|
|
7
7
|
)
|
|
@@ -2,16 +2,17 @@ import os
|
|
|
2
2
|
from collections import defaultdict
|
|
3
3
|
from pathlib import Path
|
|
4
4
|
|
|
5
|
-
from supervisely import ProjectMeta, logger
|
|
5
|
+
from supervisely import ProjectMeta, logger, Api
|
|
6
6
|
from supervisely.annotation.obj_class import ObjClass
|
|
7
7
|
from supervisely.convert.volume.nii import nii_volume_helper as helper
|
|
8
8
|
from supervisely.convert.volume.nii.nii_volume_converter import NiiConverter
|
|
9
9
|
from supervisely.convert.volume.volume_converter import VolumeConverter
|
|
10
10
|
from supervisely.geometry.mask_3d import Mask3D
|
|
11
|
-
from supervisely.io.fs import get_file_ext, get_file_name
|
|
11
|
+
from supervisely.io.fs import get_file_ext, get_file_name, list_files_recursively
|
|
12
12
|
from supervisely.volume.volume import is_nifti_file
|
|
13
13
|
from supervisely.volume_annotation.volume_annotation import VolumeAnnotation
|
|
14
14
|
from supervisely.volume_annotation.volume_object import VolumeObject
|
|
15
|
+
from supervisely._utils import batched, is_development
|
|
15
16
|
|
|
16
17
|
|
|
17
18
|
class NiiPlaneStructuredConverter(NiiConverter, VolumeConverter):
|
|
@@ -58,6 +59,7 @@ class NiiPlaneStructuredConverter(NiiConverter, VolumeConverter):
|
|
|
58
59
|
def __init__(self, *args, **kwargs):
|
|
59
60
|
super().__init__(*args, **kwargs)
|
|
60
61
|
self._is_semantic = False
|
|
62
|
+
self.volume_meta = None
|
|
61
63
|
|
|
62
64
|
@property
|
|
63
65
|
def is_semantic(self) -> bool:
|
|
@@ -67,6 +69,12 @@ class NiiPlaneStructuredConverter(NiiConverter, VolumeConverter):
|
|
|
67
69
|
def is_semantic(self, value: bool):
|
|
68
70
|
self._is_semantic = value
|
|
69
71
|
|
|
72
|
+
def create_empty_annotation(self):
|
|
73
|
+
return VolumeAnnotation(self.volume_meta)
|
|
74
|
+
|
|
75
|
+
def __str__(self):
|
|
76
|
+
return "nii_custom"
|
|
77
|
+
|
|
70
78
|
def validate_format(self) -> bool:
|
|
71
79
|
# create Items
|
|
72
80
|
converted_dir_name = "converted"
|
|
@@ -87,8 +95,7 @@ class NiiPlaneStructuredConverter(NiiConverter, VolumeConverter):
|
|
|
87
95
|
prefix = full_name.split("_")[0]
|
|
88
96
|
if prefix not in helper.PlanePrefix.values():
|
|
89
97
|
continue
|
|
90
|
-
|
|
91
|
-
if name in helper.LABEL_NAME or name[:-1] in helper.LABEL_NAME:
|
|
98
|
+
if any(label_name in full_name for label_name in helper.LABEL_NAME):
|
|
92
99
|
ann_dict[prefix].append(path)
|
|
93
100
|
else:
|
|
94
101
|
volumes_dict[prefix].append(path)
|
|
@@ -114,7 +121,7 @@ class NiiPlaneStructuredConverter(NiiConverter, VolumeConverter):
|
|
|
114
121
|
for path in paths:
|
|
115
122
|
item = self.Item(item_path=path)
|
|
116
123
|
possible_ann_paths = []
|
|
117
|
-
for ann_path in ann_dict.get(prefix):
|
|
124
|
+
for ann_path in ann_dict.get(prefix, []):
|
|
118
125
|
if Path(ann_path).parent == Path(path).parent:
|
|
119
126
|
possible_ann_paths.append(ann_path)
|
|
120
127
|
item.ann_data = possible_ann_paths
|
|
@@ -160,3 +167,178 @@ class NiiPlaneStructuredConverter(NiiConverter, VolumeConverter):
|
|
|
160
167
|
except Exception as e:
|
|
161
168
|
logger.warning(f"Failed to convert {item.path} to Supervisely format: {e}")
|
|
162
169
|
return item.create_empty_annotation()
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
class NiiPlaneStructuredAnnotationConverter(NiiConverter, VolumeConverter):
|
|
173
|
+
"""
|
|
174
|
+
Upload NIfTI Annotations
|
|
175
|
+
"""
|
|
176
|
+
|
|
177
|
+
class Item(VolumeConverter.BaseItem):
|
|
178
|
+
def __init__(self, *args, **kwargs):
|
|
179
|
+
super().__init__(*args, **kwargs)
|
|
180
|
+
self._is_semantic = False
|
|
181
|
+
self.volume_meta = None
|
|
182
|
+
|
|
183
|
+
@property
|
|
184
|
+
def is_semantic(self) -> bool:
|
|
185
|
+
return self._is_semantic
|
|
186
|
+
|
|
187
|
+
@is_semantic.setter
|
|
188
|
+
def is_semantic(self, value: bool):
|
|
189
|
+
self._is_semantic = value
|
|
190
|
+
|
|
191
|
+
def create_empty_annotation(self):
|
|
192
|
+
return VolumeAnnotation(self.volume_meta)
|
|
193
|
+
|
|
194
|
+
def __init__(self, *args, **kwargs):
|
|
195
|
+
super().__init__(*args, **kwargs)
|
|
196
|
+
self._json_map = None
|
|
197
|
+
|
|
198
|
+
def __str__(self):
|
|
199
|
+
return "nii_custom_ann"
|
|
200
|
+
|
|
201
|
+
def validate_format(self) -> bool:
|
|
202
|
+
try:
|
|
203
|
+
from nibabel import load, filebasedimages
|
|
204
|
+
except ImportError:
|
|
205
|
+
raise ImportError(
|
|
206
|
+
"No module named nibabel. Please make sure that module is installed from pip and try again."
|
|
207
|
+
)
|
|
208
|
+
cls_color_map = None
|
|
209
|
+
|
|
210
|
+
has_volumes = lambda x: helper.VOLUME_NAME in x
|
|
211
|
+
if list_files_recursively(self._input_data, filter_fn=has_volumes):
|
|
212
|
+
return False
|
|
213
|
+
|
|
214
|
+
txts = list_files_recursively(self._input_data, [".txt"], None, True)
|
|
215
|
+
cls_color_map = next(iter(txts), None)
|
|
216
|
+
if cls_color_map is not None:
|
|
217
|
+
cls_color_map = helper.read_cls_color_map(cls_color_map)
|
|
218
|
+
|
|
219
|
+
jsons = list_files_recursively(self._input_data, [".json"], None, True)
|
|
220
|
+
json_map = next(iter(jsons), None)
|
|
221
|
+
if json_map is not None:
|
|
222
|
+
self._json_map = helper.read_json_map(json_map)
|
|
223
|
+
|
|
224
|
+
is_ann = lambda x: any(label_name in x for label_name in helper.LABEL_NAME)
|
|
225
|
+
for root, _, files in os.walk(self._input_data):
|
|
226
|
+
for file in files:
|
|
227
|
+
path = os.path.join(root, file)
|
|
228
|
+
if is_ann(file):
|
|
229
|
+
prefix = get_file_name(path).split("_")[0]
|
|
230
|
+
if prefix not in helper.PlanePrefix.values():
|
|
231
|
+
continue
|
|
232
|
+
try:
|
|
233
|
+
nii = load(path)
|
|
234
|
+
except filebasedimages.ImageFileError:
|
|
235
|
+
continue
|
|
236
|
+
item = self.Item(item_path=None, ann_data=path)
|
|
237
|
+
item.set_shape(nii.shape)
|
|
238
|
+
if cls_color_map is not None:
|
|
239
|
+
item.custom_data["cls_color_map"] = cls_color_map
|
|
240
|
+
self._items.append(item)
|
|
241
|
+
|
|
242
|
+
obj_classes = None
|
|
243
|
+
if cls_color_map is not None:
|
|
244
|
+
obj_classes = [ObjClass(name, Mask3D, color) for name, color in cls_color_map.values()]
|
|
245
|
+
|
|
246
|
+
self._meta = ProjectMeta(obj_classes=obj_classes)
|
|
247
|
+
return len(self._items) > 0
|
|
248
|
+
|
|
249
|
+
def to_supervisely(
|
|
250
|
+
self,
|
|
251
|
+
item: VolumeConverter.Item,
|
|
252
|
+
meta: ProjectMeta = None,
|
|
253
|
+
renamed_classes: dict = None,
|
|
254
|
+
renamed_tags: dict = None,
|
|
255
|
+
) -> VolumeAnnotation:
|
|
256
|
+
"""Convert to Supervisely format."""
|
|
257
|
+
import re
|
|
258
|
+
try:
|
|
259
|
+
objs = []
|
|
260
|
+
spatial_figures = []
|
|
261
|
+
ann_path = item.ann_data
|
|
262
|
+
ann_idx = 0
|
|
263
|
+
match = re.search(r"_(\d+)(?:\.[^.]+)+$", ann_path)
|
|
264
|
+
if match:
|
|
265
|
+
ann_idx = int(match.group(1))
|
|
266
|
+
for mask, pixel_id in helper.get_annotation_from_nii(ann_path):
|
|
267
|
+
class_id = pixel_id if item.is_semantic else ann_idx
|
|
268
|
+
class_name = f"Segment_{class_id}"
|
|
269
|
+
color = None
|
|
270
|
+
if item.custom_data.get("cls_color_map") is not None:
|
|
271
|
+
class_info = item.custom_data["cls_color_map"].get(class_id)
|
|
272
|
+
if class_info is not None:
|
|
273
|
+
class_name, color = class_info
|
|
274
|
+
class_name = renamed_classes.get(class_name, class_name)
|
|
275
|
+
obj_class = meta.get_obj_class(class_name)
|
|
276
|
+
if obj_class is None:
|
|
277
|
+
obj_class = ObjClass(class_name, Mask3D, color)
|
|
278
|
+
meta = meta.add_obj_class(obj_class)
|
|
279
|
+
self._meta_changed = True
|
|
280
|
+
self._meta = meta
|
|
281
|
+
obj = VolumeObject(obj_class, mask_3d=mask)
|
|
282
|
+
spatial_figures.append(obj.figure)
|
|
283
|
+
objs.append(obj)
|
|
284
|
+
return VolumeAnnotation(item.volume_meta, objects=objs, spatial_figures=spatial_figures)
|
|
285
|
+
except Exception as e:
|
|
286
|
+
logger.warning(f"Failed to convert {item.ann_data} to Supervisely format: {e}")
|
|
287
|
+
return item.create_empty_annotation()
|
|
288
|
+
|
|
289
|
+
def upload_dataset(
|
|
290
|
+
self, api: Api, dataset_id: int, batch_size: int = 50, log_progress=True
|
|
291
|
+
) -> None:
|
|
292
|
+
meta, renamed_classes, _ = self.merge_metas_with_conflicts(api, dataset_id)
|
|
293
|
+
|
|
294
|
+
matcher = helper.AnnotationMatcher(self._items, dataset_id)
|
|
295
|
+
if self._json_map is not None:
|
|
296
|
+
try:
|
|
297
|
+
matched_dict = matcher.match_from_json(api, self._json_map)
|
|
298
|
+
except Exception as e:
|
|
299
|
+
logger.error(f"Failed to match annotations from a json map: {e}")
|
|
300
|
+
matched_dict = {}
|
|
301
|
+
else:
|
|
302
|
+
matcher.get_volumes(api)
|
|
303
|
+
matched_dict = matcher.match_items()
|
|
304
|
+
if len(matched_dict) != len(self._items):
|
|
305
|
+
extra = {
|
|
306
|
+
"items count": len(self._items),
|
|
307
|
+
"matched count": len(matched_dict),
|
|
308
|
+
"unmatched count": len(self._items) - len(matched_dict),
|
|
309
|
+
}
|
|
310
|
+
logger.warning(
|
|
311
|
+
"Not all items were matched with volumes. Some items may be skipped.",
|
|
312
|
+
extra=extra,
|
|
313
|
+
)
|
|
314
|
+
if len(matched_dict) == 0:
|
|
315
|
+
raise RuntimeError(
|
|
316
|
+
"No items were matched with volumes. Please check the input data and try again."
|
|
317
|
+
)
|
|
318
|
+
|
|
319
|
+
if log_progress:
|
|
320
|
+
progress, progress_cb = self.get_progress(
|
|
321
|
+
len(matched_dict), "Uploading volumes annotations..."
|
|
322
|
+
)
|
|
323
|
+
else:
|
|
324
|
+
progress_cb = None
|
|
325
|
+
|
|
326
|
+
for item, volume in matched_dict.items():
|
|
327
|
+
item.volume_meta = volume.meta
|
|
328
|
+
ann = self.to_supervisely(item, meta, renamed_classes, None)
|
|
329
|
+
if self._meta_changed:
|
|
330
|
+
meta, renamed_classes, _ = self.merge_metas_with_conflicts(api, dataset_id)
|
|
331
|
+
self._meta_changed = False
|
|
332
|
+
api.volume.annotation.append(volume.id, ann, volume_info=volume)
|
|
333
|
+
progress_cb(1) if log_progress else None
|
|
334
|
+
|
|
335
|
+
res_ds_info = api.dataset.get_info_by_id(dataset_id)
|
|
336
|
+
if res_ds_info.items_count == 0:
|
|
337
|
+
logger.info("Resulting dataset is empty. Removing it.")
|
|
338
|
+
api.dataset.remove(dataset_id)
|
|
339
|
+
|
|
340
|
+
|
|
341
|
+
if log_progress:
|
|
342
|
+
if is_development():
|
|
343
|
+
progress.close()
|
|
344
|
+
logger.info(f"Successfully uploaded {len(matched_dict)} annotations.")
|
|
@@ -207,7 +207,7 @@ class NiiConverter(VolumeConverter):
|
|
|
207
207
|
|
|
208
208
|
if self._meta_changed:
|
|
209
209
|
meta, renamed_classes, _ = self.merge_metas_with_conflicts(api, dataset_id)
|
|
210
|
-
|
|
210
|
+
self._meta_changed = False
|
|
211
211
|
api.volume.annotation.append(info.id, ann)
|
|
212
212
|
|
|
213
213
|
if log_progress:
|
|
@@ -3,7 +3,10 @@ from typing import Generator
|
|
|
3
3
|
|
|
4
4
|
import nrrd
|
|
5
5
|
import numpy as np
|
|
6
|
+
from pathlib import Path
|
|
7
|
+
from collections import defaultdict, namedtuple
|
|
6
8
|
|
|
9
|
+
from supervisely import Api
|
|
7
10
|
from supervisely.collection.str_enum import StrEnum
|
|
8
11
|
from supervisely.geometry.mask_3d import Mask3D
|
|
9
12
|
from supervisely.io.fs import ensure_base_path, get_file_ext, get_file_name
|
|
@@ -69,6 +72,20 @@ def read_cls_color_map(path: str) -> dict:
|
|
|
69
72
|
return None
|
|
70
73
|
return cls_color_map
|
|
71
74
|
|
|
75
|
+
def read_json_map(path: str) -> dict:
|
|
76
|
+
import json
|
|
77
|
+
|
|
78
|
+
"""Read JSON map from file."""
|
|
79
|
+
if not os.path.exists(path):
|
|
80
|
+
return None
|
|
81
|
+
try:
|
|
82
|
+
with open(path, "r") as file:
|
|
83
|
+
json_map = json.load(file)
|
|
84
|
+
except Exception as e:
|
|
85
|
+
logger.warning(f"Failed to read JSON map from {path}: {e}")
|
|
86
|
+
return None
|
|
87
|
+
return json_map
|
|
88
|
+
|
|
72
89
|
|
|
73
90
|
def nifti_to_nrrd(nii_file_path: str, converted_dir: str) -> str:
|
|
74
91
|
"""Convert NIfTI 3D volume file to NRRD 3D volume file."""
|
|
@@ -97,3 +114,193 @@ def get_annotation_from_nii(path: str) -> Generator[Mask3D, None, None]:
|
|
|
97
114
|
continue
|
|
98
115
|
mask = Mask3D(data == class_id)
|
|
99
116
|
yield mask, class_id
|
|
117
|
+
|
|
118
|
+
class AnnotationMatcher:
|
|
119
|
+
def __init__(self, items, dataset_id):
|
|
120
|
+
self._items = items
|
|
121
|
+
self._ds_id = dataset_id
|
|
122
|
+
self._ann_paths = defaultdict(list)
|
|
123
|
+
|
|
124
|
+
self._item_by_filename = {}
|
|
125
|
+
self._item_by_path = {}
|
|
126
|
+
|
|
127
|
+
for item in items:
|
|
128
|
+
path = Path(item.ann_data)
|
|
129
|
+
dataset_name = path.parts[-2]
|
|
130
|
+
filename = path.name
|
|
131
|
+
|
|
132
|
+
self._ann_paths[dataset_name].append(filename)
|
|
133
|
+
self._item_by_filename[filename] = item
|
|
134
|
+
self._item_by_path[(dataset_name, filename)] = item
|
|
135
|
+
|
|
136
|
+
self._project_wide = False
|
|
137
|
+
self._volumes = None
|
|
138
|
+
|
|
139
|
+
def get_volumes(self, api: Api):
|
|
140
|
+
dataset_info = api.dataset.get_info_by_id(self._ds_id)
|
|
141
|
+
datasets = {dataset_info.name: dataset_info}
|
|
142
|
+
project_id = dataset_info.project_id
|
|
143
|
+
if dataset_info.items_count > 0 and len(self._ann_paths.keys()) == 1:
|
|
144
|
+
self._project_wide = False
|
|
145
|
+
else:
|
|
146
|
+
datasets = {dsinfo.name: dsinfo for dsinfo in api.dataset.get_list(project_id, recursive=True)}
|
|
147
|
+
self._project_wide = True
|
|
148
|
+
|
|
149
|
+
volumes = defaultdict(lambda: {})
|
|
150
|
+
ds_filter = lambda ds_name: ds_name in self._ann_paths if self._project_wide else True
|
|
151
|
+
for ds_name, ds_info in datasets.items():
|
|
152
|
+
if ds_filter(ds_name):
|
|
153
|
+
volumes[ds_name].update(
|
|
154
|
+
{info.name: info for info in api.volume.get_list(ds_info.id)}
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
if len(volumes) == 0:
|
|
158
|
+
err_msg = "Failed to retrieve volumes from the project. Perhaps the input data structure is incorrect."
|
|
159
|
+
raise RuntimeError(err_msg)
|
|
160
|
+
|
|
161
|
+
self._volumes = volumes
|
|
162
|
+
|
|
163
|
+
def match_items(self):
|
|
164
|
+
"""Match annotation files with corresponding volumes using regex-based matching."""
|
|
165
|
+
import re
|
|
166
|
+
|
|
167
|
+
def extract_prefix(ann_file):
|
|
168
|
+
import re
|
|
169
|
+
pattern = r'^(?P<prefix>cor|sag|axl).*?(?:' + "|".join(LABEL_NAME) + r')'
|
|
170
|
+
m = re.match(pattern, ann_file, re.IGNORECASE)
|
|
171
|
+
if m:
|
|
172
|
+
return m.group("prefix").lower()
|
|
173
|
+
return None
|
|
174
|
+
|
|
175
|
+
def is_volume_match(volume_name, prefix):
|
|
176
|
+
pattern = r'^' + re.escape(prefix) + r'.*?anatomic'
|
|
177
|
+
return re.match(pattern, volume_name, re.IGNORECASE) is not None
|
|
178
|
+
|
|
179
|
+
def find_best_volume_match(prefix, available_volumes):
|
|
180
|
+
candidates = {name: volume for name, volume in available_volumes.items() if is_volume_match(name, prefix)}
|
|
181
|
+
if not candidates:
|
|
182
|
+
return None, None
|
|
183
|
+
|
|
184
|
+
# Prefer an exact candidate
|
|
185
|
+
ann_name_no_ext = ann_file.split(".")[0]
|
|
186
|
+
exact_candidate = re.sub(r'(' + '|'.join(LABEL_NAME) + r')', 'anatomic', ann_name_no_ext, flags=re.IGNORECASE)
|
|
187
|
+
for name in candidates:
|
|
188
|
+
if re.fullmatch(re.escape(exact_candidate), name, re.IGNORECASE):
|
|
189
|
+
return name, candidates[name]
|
|
190
|
+
|
|
191
|
+
# Otherwise, choose the candidate with the shortest name
|
|
192
|
+
best_match = sorted(candidates.keys(), key=len)[0]
|
|
193
|
+
return best_match, candidates[best_match]
|
|
194
|
+
|
|
195
|
+
item_to_volume = {}
|
|
196
|
+
|
|
197
|
+
def process_annotation_file(ann_file, dataset_name, volumes):
|
|
198
|
+
prefix = extract_prefix(ann_file)
|
|
199
|
+
if prefix is None:
|
|
200
|
+
logger.warning(f"Failed to extract prefix from annotation file {ann_file}. Skipping.")
|
|
201
|
+
return
|
|
202
|
+
|
|
203
|
+
matched_name, matched_volume = find_best_volume_match(prefix, volumes)
|
|
204
|
+
if not matched_volume:
|
|
205
|
+
logger.warning(f"No matching volume found for annotation with prefix '{prefix}' in dataset {dataset_name}.")
|
|
206
|
+
return
|
|
207
|
+
|
|
208
|
+
# Retrieve the correct item based on matching mode.
|
|
209
|
+
item = (
|
|
210
|
+
self._item_by_path.get((dataset_name, ann_file))
|
|
211
|
+
if self._project_wide
|
|
212
|
+
else self._item_by_filename.get(ann_file)
|
|
213
|
+
)
|
|
214
|
+
if not item:
|
|
215
|
+
logger.warning(f"Item not found for annotation file {ann_file} in {'dataset ' + dataset_name if self._project_wide else 'single dataset mode'}.")
|
|
216
|
+
return
|
|
217
|
+
|
|
218
|
+
item_to_volume[item] = matched_volume
|
|
219
|
+
ann_file = ann_file.split(".")[0]
|
|
220
|
+
ann_supposed_match = re.sub(r'(' + '|'.join(LABEL_NAME) + r')', 'anatomic', ann_file, flags=re.IGNORECASE)
|
|
221
|
+
if matched_name.lower() != ann_supposed_match:
|
|
222
|
+
logger.debug(f"Fuzzy matched {ann_file} to volume {matched_name} using prefix '{prefix}'.")
|
|
223
|
+
|
|
224
|
+
# Perform matching
|
|
225
|
+
for dataset_name, volumes in self._volumes.items():
|
|
226
|
+
ann_files = self._ann_paths.get(dataset_name, []) if self._project_wide else list(self._ann_paths.values())[0]
|
|
227
|
+
for ann_file in ann_files:
|
|
228
|
+
process_annotation_file(ann_file, dataset_name, volumes)
|
|
229
|
+
|
|
230
|
+
# Mark volumes having only one matching item as semantic and validate shape.
|
|
231
|
+
volume_to_items = defaultdict(list)
|
|
232
|
+
for item, volume in item_to_volume.items():
|
|
233
|
+
volume_to_items[volume.id].append(item)
|
|
234
|
+
|
|
235
|
+
for volume_id, items in volume_to_items.items():
|
|
236
|
+
if len(items) == 1:
|
|
237
|
+
items[0].is_semantic = True
|
|
238
|
+
|
|
239
|
+
items_to_remove = []
|
|
240
|
+
for item, volume in item_to_volume.items():
|
|
241
|
+
volume_shape = tuple(volume.file_meta["sizes"])
|
|
242
|
+
if item.shape != volume_shape:
|
|
243
|
+
logger.warning(f"Volume shape mismatch: {item.shape} != {volume_shape}")
|
|
244
|
+
# items_to_remove.append(item)
|
|
245
|
+
for item in items_to_remove:
|
|
246
|
+
del item_to_volume[item]
|
|
247
|
+
|
|
248
|
+
return item_to_volume
|
|
249
|
+
|
|
250
|
+
def match_from_json(self, api: Api, json_map: dict):
|
|
251
|
+
"""
|
|
252
|
+
Match annotation files with corresponding volumes based on a JSON map.
|
|
253
|
+
|
|
254
|
+
Example json structure:
|
|
255
|
+
{
|
|
256
|
+
"cor_inference_1.nii": 123,
|
|
257
|
+
"sag_mask_2.nii": 456
|
|
258
|
+
}
|
|
259
|
+
Where key is the annotation file name and value is the volume ID.
|
|
260
|
+
|
|
261
|
+
For project-wide matching, the key should include dataset name:
|
|
262
|
+
{
|
|
263
|
+
"dataset1/cor_inference_1.nii": 123,
|
|
264
|
+
"dataset2/sag_mask_2.nii": 456
|
|
265
|
+
}
|
|
266
|
+
"""
|
|
267
|
+
item_to_volume = {}
|
|
268
|
+
|
|
269
|
+
for ann_path, volume_id in json_map.items():
|
|
270
|
+
# Check if it's a project-wide path (contains dataset name)
|
|
271
|
+
path_parts = Path(ann_path)
|
|
272
|
+
if len(path_parts.parts) > 1:
|
|
273
|
+
# Project-wide format: "dataset_name/filename.nii"
|
|
274
|
+
dataset_name = path_parts.parts[-2]
|
|
275
|
+
ann_name = path_parts.name
|
|
276
|
+
item = self._item_by_path.get((dataset_name, ann_name))
|
|
277
|
+
else:
|
|
278
|
+
# Single dataset format: "filename.nii"
|
|
279
|
+
ann_name = path_parts.name
|
|
280
|
+
item = self._item_by_filename.get(ann_name)
|
|
281
|
+
|
|
282
|
+
if item:
|
|
283
|
+
volume = api.volume.get_info_by_id(volume_id)
|
|
284
|
+
if volume:
|
|
285
|
+
item_to_volume[item] = volume
|
|
286
|
+
|
|
287
|
+
# Validate shape
|
|
288
|
+
volume_shape = tuple(volume.file_meta["sizes"])
|
|
289
|
+
if item.shape != volume_shape:
|
|
290
|
+
logger.warning(
|
|
291
|
+
f"Volume shape mismatch: {item.shape} != {volume_shape} for {ann_path}. Using anyway."
|
|
292
|
+
)
|
|
293
|
+
else:
|
|
294
|
+
logger.warning(f"Volume {volume_id} not found for {ann_path}.")
|
|
295
|
+
else:
|
|
296
|
+
logger.warning(f"Item not found for annotation file {ann_path}.")
|
|
297
|
+
|
|
298
|
+
# Set semantic flag for volumes with only one associated item
|
|
299
|
+
volume_to_items = defaultdict(list)
|
|
300
|
+
for item, volume in item_to_volume.items():
|
|
301
|
+
volume_to_items[volume.id].append(item)
|
|
302
|
+
for volume_id, items in volume_to_items.items():
|
|
303
|
+
if len(items) == 1:
|
|
304
|
+
items[0].is_semantic = True
|
|
305
|
+
|
|
306
|
+
return item_to_volume
|
|
@@ -17,7 +17,9 @@ from supervisely.io.fs import remove_dir, silent_remove
|
|
|
17
17
|
|
|
18
18
|
|
|
19
19
|
class VersionInfo(NamedTuple):
|
|
20
|
-
"""
|
|
20
|
+
"""
|
|
21
|
+
Object with image parameters from Supervisely that describes the version of the project.
|
|
22
|
+
"""
|
|
21
23
|
|
|
22
24
|
id: int
|
|
23
25
|
project_id: int
|
|
@@ -34,6 +36,10 @@ class VersionInfo(NamedTuple):
|
|
|
34
36
|
|
|
35
37
|
|
|
36
38
|
class DataVersion(ModuleApiBase):
|
|
39
|
+
"""
|
|
40
|
+
Class for managing project versions.
|
|
41
|
+
This class provides methods for creating, restoring, and managing project versions.
|
|
42
|
+
"""
|
|
37
43
|
|
|
38
44
|
def __init__(self, api):
|
|
39
45
|
"""
|
supervisely/project/project.py
CHANGED
|
@@ -579,21 +579,24 @@ class Dataset(KeyObject):
|
|
|
579
579
|
Consistency checks. Every item must have an annotation, and the correspondence must be one to one.
|
|
580
580
|
If not - it generate exception error.
|
|
581
581
|
"""
|
|
582
|
-
|
|
582
|
+
blob_offset_paths = list_files(
|
|
583
|
+
self.directory, filter_fn=lambda x: x.endswith(OFFSETS_PKL_SUFFIX)
|
|
584
|
+
)
|
|
585
|
+
has_blob_offsets = len(blob_offset_paths) > 0
|
|
586
|
+
|
|
587
|
+
if not dir_exists(self.item_dir) and not has_blob_offsets:
|
|
583
588
|
raise FileNotFoundError("Item directory not found: {!r}".format(self.item_dir))
|
|
584
589
|
if not dir_exists(self.ann_dir):
|
|
585
590
|
raise FileNotFoundError("Annotation directory not found: {!r}".format(self.ann_dir))
|
|
586
591
|
|
|
587
592
|
raw_ann_paths = list_files(self.ann_dir, [ANN_EXT])
|
|
588
|
-
img_paths = list_files(self.item_dir, filter_fn=self._has_valid_ext)
|
|
589
|
-
|
|
590
593
|
raw_ann_names = set(os.path.basename(path) for path in raw_ann_paths)
|
|
591
|
-
img_names = [os.path.basename(path) for path in img_paths]
|
|
592
594
|
|
|
593
|
-
|
|
594
|
-
self.
|
|
595
|
-
|
|
596
|
-
|
|
595
|
+
if dir_exists(self.item_dir):
|
|
596
|
+
img_paths = list_files(self.item_dir, filter_fn=self._has_valid_ext)
|
|
597
|
+
img_names = [os.path.basename(path) for path in img_paths]
|
|
598
|
+
else:
|
|
599
|
+
img_names = []
|
|
597
600
|
|
|
598
601
|
# If we have blob offset files, add the image names from those
|
|
599
602
|
if has_blob_offsets:
|
|
@@ -2061,6 +2064,84 @@ class Dataset(KeyObject):
|
|
|
2061
2064
|
progress_cb=progress_cb,
|
|
2062
2065
|
)
|
|
2063
2066
|
|
|
2067
|
+
def get_blob_img_bytes(self, image_name: str) -> bytes:
|
|
2068
|
+
"""
|
|
2069
|
+
Get image bytes from blob file.
|
|
2070
|
+
|
|
2071
|
+
:param image_name: Image name with extension.
|
|
2072
|
+
:type image_name: :class:`str`
|
|
2073
|
+
:return: Bytes of the image.
|
|
2074
|
+
:rtype: :class:`bytes`
|
|
2075
|
+
|
|
2076
|
+
:Usage example:
|
|
2077
|
+
|
|
2078
|
+
.. code-block:: python
|
|
2079
|
+
|
|
2080
|
+
import supervisely as sly
|
|
2081
|
+
dataset_path = "/path/to/project/lemons_annotated/ds1"
|
|
2082
|
+
dataset = sly.Dataset(dataset_path, sly.OpenMode.READ)
|
|
2083
|
+
image_name = "IMG_0748.jpeg"
|
|
2084
|
+
|
|
2085
|
+
img_bytes = dataset.get_blob_img_bytes(image_name)
|
|
2086
|
+
"""
|
|
2087
|
+
|
|
2088
|
+
if self.project_dir is None:
|
|
2089
|
+
raise RuntimeError("Project directory is not set. Cannot get blob image bytes.")
|
|
2090
|
+
|
|
2091
|
+
blob_image_info = None
|
|
2092
|
+
|
|
2093
|
+
for offset in self.blob_offsets:
|
|
2094
|
+
for batch in BlobImageInfo.load_from_pickle_generator(offset):
|
|
2095
|
+
for file in batch:
|
|
2096
|
+
if file.name == image_name:
|
|
2097
|
+
blob_image_info = file
|
|
2098
|
+
blob_file_name = removesuffix(Path(offset).name, OFFSETS_PKL_SUFFIX)
|
|
2099
|
+
break
|
|
2100
|
+
if blob_image_info is None:
|
|
2101
|
+
logger.debug(
|
|
2102
|
+
f"Image '{image_name}' not found in blob offsets. "
|
|
2103
|
+
f"Make sure that the image is stored in the blob file."
|
|
2104
|
+
)
|
|
2105
|
+
return None
|
|
2106
|
+
|
|
2107
|
+
blob_file_path = os.path.join(self.project_dir, self.blob_dir_name, blob_file_name + ".tar")
|
|
2108
|
+
if file_exists(blob_file_path):
|
|
2109
|
+
with open(blob_file_path, "rb") as f:
|
|
2110
|
+
f.seek(blob_image_info.offset_start)
|
|
2111
|
+
img_bytes = f.read(blob_image_info.offset_end - blob_image_info.offset_start)
|
|
2112
|
+
else:
|
|
2113
|
+
logger.debug(
|
|
2114
|
+
f"Blob file '{blob_file_path}' not found. "
|
|
2115
|
+
f"Make sure that the blob file exists in the specified directory."
|
|
2116
|
+
)
|
|
2117
|
+
img_bytes = None
|
|
2118
|
+
return img_bytes
|
|
2119
|
+
|
|
2120
|
+
def get_blob_img_np(self, image_name: str) -> np.ndarray:
|
|
2121
|
+
"""
|
|
2122
|
+
Get image as numpy array from blob file.
|
|
2123
|
+
|
|
2124
|
+
:param image_name: Image name with extension.
|
|
2125
|
+
:type image_name: :class:`str`
|
|
2126
|
+
:return: Numpy array of the image.
|
|
2127
|
+
:rtype: :class:`numpy.ndarray`
|
|
2128
|
+
|
|
2129
|
+
:Usage example:
|
|
2130
|
+
|
|
2131
|
+
.. code-block:: python
|
|
2132
|
+
|
|
2133
|
+
import supervisely as sly
|
|
2134
|
+
dataset_path = "/path/to/project/lemons_annotated/ds1"
|
|
2135
|
+
dataset = sly.Dataset(dataset_path, sly.OpenMode.READ)
|
|
2136
|
+
image_name = "IMG_0748.jpeg"
|
|
2137
|
+
|
|
2138
|
+
img_np = dataset.get_blob_img_np(image_name)
|
|
2139
|
+
"""
|
|
2140
|
+
img_bytes = self.get_blob_img_bytes(image_name)
|
|
2141
|
+
if img_bytes is None:
|
|
2142
|
+
return None
|
|
2143
|
+
return sly_image.read_bytes(img_bytes)
|
|
2144
|
+
|
|
2064
2145
|
|
|
2065
2146
|
class Project:
|
|
2066
2147
|
"""
|
|
@@ -606,8 +606,8 @@ supervisely/convert/image/pdf/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5N
|
|
|
606
606
|
supervisely/convert/image/pdf/pdf_converter.py,sha256=LKvVng9jPp0cSIjYEjKLOb48wtdOdB7LXS2gjmOdZhE,2442
|
|
607
607
|
supervisely/convert/image/pdf/pdf_helper.py,sha256=IDwLEvsVy8lu-KC1lXvSRkZZ9BCC6ylebnNEtLQU5L4,1288
|
|
608
608
|
supervisely/convert/image/sly/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
609
|
-
supervisely/convert/image/sly/fast_sly_image_converter.py,sha256=
|
|
610
|
-
supervisely/convert/image/sly/sly_image_converter.py,sha256=
|
|
609
|
+
supervisely/convert/image/sly/fast_sly_image_converter.py,sha256=wtiM-Dl4xivT60-79pj8XMdHQc2kPMi7DqGcEngiWRg,5669
|
|
610
|
+
supervisely/convert/image/sly/sly_image_converter.py,sha256=_sTiPTeNwZ1qWdtecmusanzvApHN7TskB7slgI3mibs,14370
|
|
611
611
|
supervisely/convert/image/sly/sly_image_helper.py,sha256=5Ri8fKb5dzh5b3v8AJ5u8xVFOQfAtoWqZ7HktPsCjTI,7373
|
|
612
612
|
supervisely/convert/image/yolo/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
613
613
|
supervisely/convert/image/yolo/yolo_converter.py,sha256=Wn5dR05y4SEPONcaxWr9ofnbvbf-SbRZN0fkksk5Dps,11391
|
|
@@ -658,15 +658,15 @@ supervisely/convert/video/mot/mot_converter.py,sha256=wXbv-9Psc2uVnhzHuOt5VnRIvS
|
|
|
658
658
|
supervisely/convert/video/sly/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
659
659
|
supervisely/convert/video/sly/sly_video_converter.py,sha256=S2qif7JFxqIi9VN_ez_iBtoJXpG9W6Ky2k5Er3-DtUo,4418
|
|
660
660
|
supervisely/convert/video/sly/sly_video_helper.py,sha256=D8PgoXpi0y3z-VEqvBLDf_gSUQ2hTL3irrfJyGhaV0Y,6758
|
|
661
|
-
supervisely/convert/volume/__init__.py,sha256=
|
|
661
|
+
supervisely/convert/volume/__init__.py,sha256=NaACs000WT2iy_g63TiZZ6IlgCjyDXx6i2OHsGpCYOs,391
|
|
662
662
|
supervisely/convert/volume/volume_converter.py,sha256=3jpt2Yn_G4FSP_vHFsJHQfYNQpT7q6ar_sRyr_xrPnA,5335
|
|
663
663
|
supervisely/convert/volume/dicom/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
664
664
|
supervisely/convert/volume/dicom/dicom_converter.py,sha256=Hw4RxU_qvllk6M26udZE6G-m1RWR8-VVPcEPwFlqrVg,3354
|
|
665
665
|
supervisely/convert/volume/dicom/dicom_helper.py,sha256=OrKlyt1hA5BOXKhE1LF1WxBIv3b6t96xRras4OSAuNM,2891
|
|
666
666
|
supervisely/convert/volume/nii/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
667
|
-
supervisely/convert/volume/nii/nii_planes_volume_converter.py,sha256=
|
|
668
|
-
supervisely/convert/volume/nii/nii_volume_converter.py,sha256=
|
|
669
|
-
supervisely/convert/volume/nii/nii_volume_helper.py,sha256=
|
|
667
|
+
supervisely/convert/volume/nii/nii_planes_volume_converter.py,sha256=TrV7Mkczt8w2WpJizmOZwqeG9zlcLy-8p4D22B9nYyo,14344
|
|
668
|
+
supervisely/convert/volume/nii/nii_volume_converter.py,sha256=n8HWRvwXUzugTQt4PKpbSacsuC4EQxoYHAWXcXC5KE8,8526
|
|
669
|
+
supervisely/convert/volume/nii/nii_volume_helper.py,sha256=8cS1LCvDcgGuinBARTmbOm-lLQmJ___3gyemt26W_-Y,11572
|
|
670
670
|
supervisely/convert/volume/sly/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
671
671
|
supervisely/convert/volume/sly/sly_volume_converter.py,sha256=XmSuxnRqxchG87b244f3h0UHvOt6IkajMquL1drWlCM,5595
|
|
672
672
|
supervisely/convert/volume/sly/sly_volume_helper.py,sha256=gUY0GW3zDMlO2y-zQQG36uoXMrKkKz4-ErM1CDxFCxE,5620
|
|
@@ -1016,11 +1016,11 @@ supervisely/pointcloud_annotation/pointcloud_tag_collection.py,sha256=j_TAN23GkT
|
|
|
1016
1016
|
supervisely/pointcloud_episodes/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
1017
1017
|
supervisely/pointcloud_episodes/pointcloud_episodes.py,sha256=cRXdtw7bMsbsdVQjxfWxFSESrO-LGiqqsZyyExl2Mbg,3430
|
|
1018
1018
|
supervisely/project/__init__.py,sha256=hlzdj9Pgy53Q3qdP8LMtGTChvZHQuuShdtui2eRUQeE,2601
|
|
1019
|
-
supervisely/project/data_version.py,sha256=
|
|
1019
|
+
supervisely/project/data_version.py,sha256=P5Lui6i64pYeJWmAdGJDv8GRXxjfpSSZ8zT_MxIrynE,19553
|
|
1020
1020
|
supervisely/project/download.py,sha256=4QOEQZnolmOpEO6Wl6DIc73BwIYr9m-6anbarrU6VwQ,24902
|
|
1021
1021
|
supervisely/project/pointcloud_episode_project.py,sha256=yiWdNBQiI6f1O9sr1pg8JHW6O-w3XUB1rikJNn3Oung,41866
|
|
1022
1022
|
supervisely/project/pointcloud_project.py,sha256=Kx1Vaes-krwG3BiRRtHRLQxb9G5m5bTHPN9IzRqmNWo,49399
|
|
1023
|
-
supervisely/project/project.py,sha256=
|
|
1023
|
+
supervisely/project/project.py,sha256=OunVB11sVQSOvkqkjsEEkX1nq9OUXOXpHTdcLDjOFe0,233256
|
|
1024
1024
|
supervisely/project/project_meta.py,sha256=26s8IiHC5Pg8B1AQi6_CrsWteioJP2in00cRNe8QlW0,51423
|
|
1025
1025
|
supervisely/project/project_settings.py,sha256=NLThzU_DCynOK6hkHhVdFyezwprn9UqlnrLDe_3qhkY,9347
|
|
1026
1026
|
supervisely/project/project_type.py,sha256=7mQ7zg6r7Bm2oFn5aR8n_PeLqMmOaPZd6ph7Z8ZISTw,608
|
|
@@ -1082,9 +1082,9 @@ supervisely/worker_proto/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZ
|
|
|
1082
1082
|
supervisely/worker_proto/worker_api_pb2.py,sha256=VQfi5JRBHs2pFCK1snec3JECgGnua3Xjqw_-b3aFxuM,59142
|
|
1083
1083
|
supervisely/worker_proto/worker_api_pb2_grpc.py,sha256=3BwQXOaP9qpdi0Dt9EKG--Lm8KGN0C5AgmUfRv77_Jk,28940
|
|
1084
1084
|
supervisely_lib/__init__.py,sha256=7-3QnN8Zf0wj8NCr2oJmqoQWMKKPKTECvjH9pd2S5vY,159
|
|
1085
|
-
supervisely-6.73.
|
|
1086
|
-
supervisely-6.73.
|
|
1087
|
-
supervisely-6.73.
|
|
1088
|
-
supervisely-6.73.
|
|
1089
|
-
supervisely-6.73.
|
|
1090
|
-
supervisely-6.73.
|
|
1085
|
+
supervisely-6.73.346.dist-info/LICENSE,sha256=xx0jnfkXJvxRnG63LTGOxlggYnIysveWIZ6H3PNdCrQ,11357
|
|
1086
|
+
supervisely-6.73.346.dist-info/METADATA,sha256=yUMHQ-WdsvhuLAX9ZHWWh3fgUDTlkNR36oZaRpzRHf8,33596
|
|
1087
|
+
supervisely-6.73.346.dist-info/WHEEL,sha256=iAkIy5fosb7FzIOwONchHf19Qu7_1wCWyFNR5gu9nU0,91
|
|
1088
|
+
supervisely-6.73.346.dist-info/entry_points.txt,sha256=U96-5Hxrp2ApRjnCoUiUhWMqijqh8zLR03sEhWtAcms,102
|
|
1089
|
+
supervisely-6.73.346.dist-info/top_level.txt,sha256=kcFVwb7SXtfqZifrZaSE3owHExX4gcNYe7Q2uoby084,28
|
|
1090
|
+
supervisely-6.73.346.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|