datamint 1.3.0__py3-none-any.whl → 1.4.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of datamint might be problematic. Click here for more details.
- datamint/apihandler/annotation_api_handler.py +138 -27
- datamint/apihandler/dto/annotation_dto.py +50 -0
- datamint/apihandler/root_api_handler.py +44 -5
- datamint/client_cmd_tools/datamint_upload.py +121 -7
- datamint/experiment/experiment.py +1 -1
- datamint/utils/io_utils.py +37 -10
- {datamint-1.3.0.dist-info → datamint-1.4.1.dist-info}/METADATA +1 -1
- {datamint-1.3.0.dist-info → datamint-1.4.1.dist-info}/RECORD +10 -10
- {datamint-1.3.0.dist-info → datamint-1.4.1.dist-info}/WHEEL +0 -0
- {datamint-1.3.0.dist-info → datamint-1.4.1.dist-info}/entry_points.txt +0 -0
|
@@ -11,7 +11,7 @@ import asyncio
|
|
|
11
11
|
import aiohttp
|
|
12
12
|
from requests.exceptions import HTTPError
|
|
13
13
|
from deprecated.sphinx import deprecated
|
|
14
|
-
from .dto.annotation_dto import CreateAnnotationDto, LineGeometry, CoordinateSystem, AnnotationType
|
|
14
|
+
from .dto.annotation_dto import CreateAnnotationDto, LineGeometry, BoxGeometry, CoordinateSystem, AnnotationType
|
|
15
15
|
import pydicom
|
|
16
16
|
|
|
17
17
|
_LOGGER = logging.getLogger(__name__)
|
|
@@ -353,7 +353,7 @@ class AnnotationAPIHandler(BaseAPIHandler):
|
|
|
353
353
|
author_email: Optional[str] = None,
|
|
354
354
|
model_id: Optional[str] = None,
|
|
355
355
|
project: Optional[str] = None,
|
|
356
|
-
):
|
|
356
|
+
) -> list[str]:
|
|
357
357
|
"""
|
|
358
358
|
Add annotations to a resource.
|
|
359
359
|
|
|
@@ -415,6 +415,66 @@ class AnnotationAPIHandler(BaseAPIHandler):
|
|
|
415
415
|
|
|
416
416
|
resp = self._run_request(request_params)
|
|
417
417
|
self._check_errors_response_json(resp)
|
|
418
|
+
return resp.json()
|
|
419
|
+
|
|
420
|
+
def _create_geometry_annotation(self,
|
|
421
|
+
geometry: LineGeometry | BoxGeometry,
|
|
422
|
+
resource_id: str,
|
|
423
|
+
identifier: str,
|
|
424
|
+
frame_index: int | None = None,
|
|
425
|
+
project: Optional[str] = None,
|
|
426
|
+
worklist_id: Optional[str] = None,
|
|
427
|
+
imported_from: Optional[str] = None,
|
|
428
|
+
author_email: Optional[str] = None,
|
|
429
|
+
model_id: Optional[str] = None) -> list[str]:
|
|
430
|
+
"""
|
|
431
|
+
Common method for creating geometry-based annotations.
|
|
432
|
+
|
|
433
|
+
Args:
|
|
434
|
+
geometry: The geometry object (LineGeometry or BoxGeometry)
|
|
435
|
+
resource_id: The resource unique id
|
|
436
|
+
identifier: The annotation identifier
|
|
437
|
+
frame_index: The frame index of the annotation
|
|
438
|
+
project: The project unique id or name
|
|
439
|
+
worklist_id: The annotation worklist unique id
|
|
440
|
+
imported_from: The imported from source value
|
|
441
|
+
author_email: The email to consider as the author of the annotation
|
|
442
|
+
model_id: The model unique id
|
|
443
|
+
"""
|
|
444
|
+
if project is not None and worklist_id is not None:
|
|
445
|
+
raise ValueError('Only one of project or worklist_id can be provided.')
|
|
446
|
+
|
|
447
|
+
if project is not None:
|
|
448
|
+
proj = self.get_project_by_name(project)
|
|
449
|
+
if 'error' in proj.keys():
|
|
450
|
+
raise DatamintException(f"Project {project} not found.")
|
|
451
|
+
worklist_id = proj['worklist_id']
|
|
452
|
+
|
|
453
|
+
anndto = CreateAnnotationDto(
|
|
454
|
+
type=geometry.type,
|
|
455
|
+
identifier=identifier,
|
|
456
|
+
scope='frame',
|
|
457
|
+
annotation_worklist_id=worklist_id,
|
|
458
|
+
value=None,
|
|
459
|
+
imported_from=imported_from,
|
|
460
|
+
import_author=author_email,
|
|
461
|
+
frame_index=frame_index,
|
|
462
|
+
geometry=geometry,
|
|
463
|
+
model_id=model_id,
|
|
464
|
+
is_model=model_id is not None,
|
|
465
|
+
)
|
|
466
|
+
|
|
467
|
+
json_data = anndto.to_dict()
|
|
468
|
+
|
|
469
|
+
request_params = {
|
|
470
|
+
'method': 'POST',
|
|
471
|
+
'url': f'{self.root_url}/annotations/{resource_id}/annotations',
|
|
472
|
+
'json': [json_data]
|
|
473
|
+
}
|
|
474
|
+
|
|
475
|
+
resp = self._run_request(request_params)
|
|
476
|
+
self._check_errors_response_json(resp)
|
|
477
|
+
return resp.json()
|
|
418
478
|
|
|
419
479
|
def add_line_annotation(self,
|
|
420
480
|
point1: tuple[int, int] | tuple[float, float, float],
|
|
@@ -428,7 +488,7 @@ class AnnotationAPIHandler(BaseAPIHandler):
|
|
|
428
488
|
worklist_id: Optional[str] = None,
|
|
429
489
|
imported_from: Optional[str] = None,
|
|
430
490
|
author_email: Optional[str] = None,
|
|
431
|
-
model_id: Optional[str] = None):
|
|
491
|
+
model_id: Optional[str] = None) -> list[str]:
|
|
432
492
|
"""
|
|
433
493
|
Add a line annotation to a resource.
|
|
434
494
|
|
|
@@ -466,12 +526,6 @@ class AnnotationAPIHandler(BaseAPIHandler):
|
|
|
466
526
|
if project is not None and worklist_id is not None:
|
|
467
527
|
raise ValueError('Only one of project or worklist_id can be provided.')
|
|
468
528
|
|
|
469
|
-
if project is not None:
|
|
470
|
-
proj = self.get_project_by_name(project)
|
|
471
|
-
if 'error' in proj.keys():
|
|
472
|
-
raise DatamintException(f"Project {project} not found.")
|
|
473
|
-
worklist_id = proj['worklist_id']
|
|
474
|
-
|
|
475
529
|
if coords_system == 'pixel':
|
|
476
530
|
if dicom_metadata is None:
|
|
477
531
|
point1 = (point1[0], point1[1], frame_index)
|
|
@@ -486,30 +540,87 @@ class AnnotationAPIHandler(BaseAPIHandler):
|
|
|
486
540
|
else:
|
|
487
541
|
raise ValueError(f"Unknown coordinate system: {coords_system}")
|
|
488
542
|
|
|
489
|
-
|
|
490
|
-
|
|
543
|
+
return self._create_geometry_annotation(
|
|
544
|
+
geometry=geom,
|
|
545
|
+
resource_id=resource_id,
|
|
491
546
|
identifier=identifier,
|
|
492
|
-
scope='frame',
|
|
493
|
-
annotation_worklist_id=worklist_id,
|
|
494
|
-
value=None,
|
|
495
|
-
imported_from=imported_from,
|
|
496
|
-
import_author=author_email,
|
|
497
547
|
frame_index=frame_index,
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
548
|
+
project=project,
|
|
549
|
+
worklist_id=worklist_id,
|
|
550
|
+
imported_from=imported_from,
|
|
551
|
+
author_email=author_email,
|
|
552
|
+
model_id=model_id
|
|
501
553
|
)
|
|
502
554
|
|
|
503
|
-
|
|
555
|
+
def add_box_annotation(self,
|
|
556
|
+
point1: tuple[int, int] | tuple[float, float, float],
|
|
557
|
+
point2: tuple[int, int] | tuple[float, float, float],
|
|
558
|
+
resource_id: str,
|
|
559
|
+
identifier: str,
|
|
560
|
+
frame_index: int | None = None,
|
|
561
|
+
dicom_metadata: pydicom.Dataset | str | None = None,
|
|
562
|
+
coords_system: CoordinateSystem = 'pixel',
|
|
563
|
+
project: Optional[str] = None,
|
|
564
|
+
worklist_id: Optional[str] = None,
|
|
565
|
+
imported_from: Optional[str] = None,
|
|
566
|
+
author_email: Optional[str] = None,
|
|
567
|
+
model_id: Optional[str] = None):
|
|
568
|
+
"""
|
|
569
|
+
Add a box annotation to a resource.
|
|
504
570
|
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
509
|
-
|
|
571
|
+
Args:
|
|
572
|
+
point1: The first corner point of the box. Can be a 2d or 3d point.
|
|
573
|
+
If `coords_system` is 'pixel', it must be a 2d point representing pixel coordinates.
|
|
574
|
+
If `coords_system` is 'patient', it must be a 3d point representing patient coordinates.
|
|
575
|
+
point2: The opposite diagonal corner point of the box. See `point1` for more details.
|
|
576
|
+
resource_id: The resource unique id.
|
|
577
|
+
identifier: The annotation identifier, also known as the annotation's label.
|
|
578
|
+
frame_index: The frame index of the annotation.
|
|
579
|
+
dicom_metadata: The DICOM metadata of the image. If provided, coordinates will be converted
|
|
580
|
+
automatically using the DICOM metadata.
|
|
581
|
+
coords_system: The coordinate system of the points. Can be 'pixel' or 'patient'.
|
|
582
|
+
If 'pixel', points are in pixel coordinates. If 'patient', points are in patient coordinates.
|
|
583
|
+
project: The project unique id or name.
|
|
584
|
+
worklist_id: The annotation worklist unique id. Optional.
|
|
585
|
+
imported_from: The imported from source value.
|
|
586
|
+
author_email: The email to consider as the author of the annotation. If None, uses the API key customer.
|
|
587
|
+
model_id: The model unique id. Optional.
|
|
510
588
|
|
|
511
|
-
|
|
512
|
-
|
|
589
|
+
Example:
|
|
590
|
+
.. code-block:: python
|
|
591
|
+
|
|
592
|
+
res_id = 'aa93813c-cef0-4edd-a45c-85d4a8f1ad0d'
|
|
593
|
+
api.add_box_annotation([10, 10], (50, 40),
|
|
594
|
+
resource_id=res_id,
|
|
595
|
+
identifier='BoundingBox1',
|
|
596
|
+
frame_index=2,
|
|
597
|
+
project='Example Project')
|
|
598
|
+
"""
|
|
599
|
+
if coords_system == 'pixel':
|
|
600
|
+
if dicom_metadata is None:
|
|
601
|
+
point1 = (point1[0], point1[1], frame_index)
|
|
602
|
+
point2 = (point2[0], point2[1], frame_index)
|
|
603
|
+
geom = BoxGeometry(point1, point2)
|
|
604
|
+
else:
|
|
605
|
+
if isinstance(dicom_metadata, str):
|
|
606
|
+
dicom_metadata = pydicom.dcmread(dicom_metadata)
|
|
607
|
+
geom = BoxGeometry.from_dicom(dicom_metadata, point1, point2, slice_index=frame_index)
|
|
608
|
+
elif coords_system == 'patient':
|
|
609
|
+
geom = BoxGeometry(point1, point2)
|
|
610
|
+
else:
|
|
611
|
+
raise ValueError(f"Unknown coordinate system: {coords_system}")
|
|
612
|
+
|
|
613
|
+
return self._create_geometry_annotation(
|
|
614
|
+
geometry=geom,
|
|
615
|
+
resource_id=resource_id,
|
|
616
|
+
identifier=identifier,
|
|
617
|
+
frame_index=frame_index,
|
|
618
|
+
project=project,
|
|
619
|
+
worklist_id=worklist_id,
|
|
620
|
+
imported_from=imported_from,
|
|
621
|
+
author_email=author_email,
|
|
622
|
+
model_id=model_id
|
|
623
|
+
)
|
|
513
624
|
|
|
514
625
|
@deprecated(version='0.12.1', reason='Use :meth:`~get_annotations` instead with `resource_id` parameter.')
|
|
515
626
|
def get_resource_annotations(self,
|
|
@@ -97,6 +97,56 @@ class LineGeometry(Geometry):
|
|
|
97
97
|
return LineGeometry(new_point1, new_point2)
|
|
98
98
|
|
|
99
99
|
|
|
100
|
+
class BoxGeometry(Geometry):
|
|
101
|
+
def __init__(self, point1: tuple[float, float, float],
|
|
102
|
+
point2: tuple[float, float, float]):
|
|
103
|
+
"""
|
|
104
|
+
Create a box geometry from two diagonal corner points.
|
|
105
|
+
|
|
106
|
+
Args:
|
|
107
|
+
point1: First corner point (x, y, z) or (x, y, frame_index)
|
|
108
|
+
point2: Opposite diagonal corner point (x, y, z) or (x, y, frame_index)
|
|
109
|
+
"""
|
|
110
|
+
super().__init__(AnnotationType.SQUARE) # Using SQUARE as the box type
|
|
111
|
+
if isinstance(point1, np.ndarray):
|
|
112
|
+
point1 = point1.tolist()
|
|
113
|
+
if isinstance(point2, np.ndarray):
|
|
114
|
+
point2 = point2.tolist()
|
|
115
|
+
self.point1 = point1
|
|
116
|
+
self.point2 = point2
|
|
117
|
+
|
|
118
|
+
def to_dict(self) -> dict:
|
|
119
|
+
return {
|
|
120
|
+
'points': [self.point1, self.point2],
|
|
121
|
+
}
|
|
122
|
+
|
|
123
|
+
@staticmethod
|
|
124
|
+
def from_dicom(ds: pydicom.Dataset,
|
|
125
|
+
point1: tuple[int, int],
|
|
126
|
+
point2: tuple[int, int],
|
|
127
|
+
slice_index: int | None = None) -> 'BoxGeometry':
|
|
128
|
+
"""
|
|
129
|
+
Create a box geometry from DICOM pixel coordinates.
|
|
130
|
+
|
|
131
|
+
Args:
|
|
132
|
+
ds: DICOM dataset containing spatial metadata
|
|
133
|
+
point1: First corner in pixel coordinates (x, y)
|
|
134
|
+
point2: Opposite corner in pixel coordinates (x, y)
|
|
135
|
+
slice_index: The slice/frame index for 3D positioning
|
|
136
|
+
|
|
137
|
+
Returns:
|
|
138
|
+
BoxGeometry with patient coordinate points
|
|
139
|
+
"""
|
|
140
|
+
pixel_x1, pixel_y1 = point1
|
|
141
|
+
pixel_x2, pixel_y2 = point2
|
|
142
|
+
|
|
143
|
+
new_point1 = pixel_to_patient(ds, pixel_x1, pixel_y1,
|
|
144
|
+
slice_index=slice_index)
|
|
145
|
+
new_point2 = pixel_to_patient(ds, pixel_x2, pixel_y2,
|
|
146
|
+
slice_index=slice_index)
|
|
147
|
+
return BoxGeometry(new_point1, new_point2)
|
|
148
|
+
|
|
149
|
+
|
|
100
150
|
class CreateAnnotationDto:
|
|
101
151
|
def __init__(self,
|
|
102
152
|
type: AnnotationType | str,
|
|
@@ -63,6 +63,7 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
63
63
|
session=None,
|
|
64
64
|
modality: Optional[str] = None,
|
|
65
65
|
publish: bool = False,
|
|
66
|
+
metadata_file: Optional[str] = None,
|
|
66
67
|
) -> str:
|
|
67
68
|
if _is_io_object(file_path):
|
|
68
69
|
name = file_path.name
|
|
@@ -97,6 +98,8 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
97
98
|
is_a_dicom_file = is_dicom(name) or is_dicom(file_path)
|
|
98
99
|
if is_a_dicom_file:
|
|
99
100
|
mimetype = 'application/dicom'
|
|
101
|
+
elif name.endswith('.nii') or name.endswith('.nii.gz'):
|
|
102
|
+
mimetype = 'application/x-nifti'
|
|
100
103
|
|
|
101
104
|
filename = os.path.basename(name)
|
|
102
105
|
_LOGGER.debug(f"File name '{filename}' mimetype: {mimetype}")
|
|
@@ -115,6 +118,25 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
115
118
|
f = _open_io(file_path)
|
|
116
119
|
|
|
117
120
|
try:
|
|
121
|
+
metadata_content = None
|
|
122
|
+
metadata_dict = None
|
|
123
|
+
if metadata_file is not None:
|
|
124
|
+
try:
|
|
125
|
+
with open(metadata_file, 'r') as metadata_f:
|
|
126
|
+
metadata_content = metadata_f.read()
|
|
127
|
+
metadata_dict = json.loads(metadata_content)
|
|
128
|
+
metadata_dict_lower = {k.lower(): v for k, v in metadata_dict.items() if isinstance(k, str)}
|
|
129
|
+
try:
|
|
130
|
+
if modality is None:
|
|
131
|
+
if 'modality' in metadata_dict_lower:
|
|
132
|
+
modality = metadata_dict_lower['modality']
|
|
133
|
+
except Exception as e:
|
|
134
|
+
_LOGGER.debug(f"Failed to extract modality from metadata file {metadata_file}: {e}")
|
|
135
|
+
_LOGGER.debug(f"Metadata dict: {metadata_dict}")
|
|
136
|
+
except Exception as e:
|
|
137
|
+
_LOGGER.warning(f"Failed to read metadata file {metadata_file}: {e}")
|
|
138
|
+
|
|
139
|
+
|
|
118
140
|
form = aiohttp.FormData()
|
|
119
141
|
url = self._get_endpoint_url(RootAPIHandler.ENDPOINT_RESOURCES)
|
|
120
142
|
file_key = 'resource'
|
|
@@ -134,6 +156,14 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
134
156
|
tags = ','.join([l.strip() for l in tags])
|
|
135
157
|
form.add_field('tags', tags)
|
|
136
158
|
|
|
159
|
+
# Add JSON metadata if provided
|
|
160
|
+
if metadata_content is not None:
|
|
161
|
+
try:
|
|
162
|
+
_LOGGER.debug(f"Adding metadata from {metadata_file}")
|
|
163
|
+
form.add_field('metadata', metadata_content, content_type='application/json')
|
|
164
|
+
except Exception as e:
|
|
165
|
+
_LOGGER.warning(f"Failed to read metadata file {metadata_file}: {e}")
|
|
166
|
+
|
|
137
167
|
request_params = {
|
|
138
168
|
'method': 'POST',
|
|
139
169
|
'url': url,
|
|
@@ -170,6 +200,7 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
170
200
|
publish: bool = False,
|
|
171
201
|
segmentation_files: Optional[list[dict]] = None,
|
|
172
202
|
transpose_segmentation: bool = False,
|
|
203
|
+
metadata_files: Optional[list[Optional[str]]] = None,
|
|
173
204
|
) -> list[str]:
|
|
174
205
|
if on_error not in ['raise', 'skip']:
|
|
175
206
|
raise ValueError("on_error must be either 'raise' or 'skip'")
|
|
@@ -177,8 +208,11 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
177
208
|
if segmentation_files is None:
|
|
178
209
|
segmentation_files = _infinite_gen(None)
|
|
179
210
|
|
|
211
|
+
if metadata_files is None:
|
|
212
|
+
metadata_files = _infinite_gen(None)
|
|
213
|
+
|
|
180
214
|
async with aiohttp.ClientSession() as session:
|
|
181
|
-
async def __upload_single_resource(file_path, segfiles: dict):
|
|
215
|
+
async def __upload_single_resource(file_path, segfiles: dict, metadata_file: Optional[str]):
|
|
182
216
|
async with self.semaphore:
|
|
183
217
|
rid = await self._upload_single_resource_async(
|
|
184
218
|
file_path=file_path,
|
|
@@ -191,6 +225,7 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
191
225
|
channel=channel,
|
|
192
226
|
modality=modality,
|
|
193
227
|
publish=publish,
|
|
228
|
+
metadata_file=metadata_file,
|
|
194
229
|
)
|
|
195
230
|
if segfiles is not None:
|
|
196
231
|
fpaths = segfiles['files']
|
|
@@ -208,7 +243,8 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
208
243
|
transpose_segmentation=transpose_segmentation)
|
|
209
244
|
return rid
|
|
210
245
|
|
|
211
|
-
tasks = [__upload_single_resource(f, segfiles
|
|
246
|
+
tasks = [__upload_single_resource(f, segfiles, metadata_file)
|
|
247
|
+
for f, segfiles, metadata_file in zip(files_path, segmentation_files, metadata_files)]
|
|
212
248
|
return await asyncio.gather(*tasks, return_exceptions=on_error == 'skip')
|
|
213
249
|
|
|
214
250
|
def _assemble_dicoms(self, files_path: Sequence[str | IO]) -> tuple[Sequence[str | IO], bool]:
|
|
@@ -248,7 +284,8 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
248
284
|
segmentation_files: Optional[list[Union[list[str], dict]]] = None,
|
|
249
285
|
transpose_segmentation: bool = False,
|
|
250
286
|
modality: Optional[str] = None,
|
|
251
|
-
assemble_dicoms: bool = True
|
|
287
|
+
assemble_dicoms: bool = True,
|
|
288
|
+
metadata_files: Optional[list[Optional[str]]] = None
|
|
252
289
|
) -> list[str | Exception] | str | Exception:
|
|
253
290
|
"""
|
|
254
291
|
Upload resources.
|
|
@@ -274,6 +311,7 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
274
311
|
transpose_segmentation (bool): Whether to transpose the segmentation files or not.
|
|
275
312
|
modality (Optional[str]): The modality of the resources.
|
|
276
313
|
assemble_dicoms (bool): Whether to assemble the dicom files or not based on the SOPInstanceUID and InstanceNumber attributes.
|
|
314
|
+
metadata_files (Optional[list[Optional[str]]]): JSON metadata files to include with each resource.
|
|
277
315
|
|
|
278
316
|
Raises:
|
|
279
317
|
ResourceNotFoundError: If `publish_to` is supplied, and the project does not exists.
|
|
@@ -319,6 +357,7 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
319
357
|
segmentation_files=segmentation_files,
|
|
320
358
|
transpose_segmentation=transpose_segmentation,
|
|
321
359
|
modality=modality,
|
|
360
|
+
metadata_files=metadata_files,
|
|
322
361
|
)
|
|
323
362
|
|
|
324
363
|
resource_ids = loop.run_until_complete(task)
|
|
@@ -690,13 +729,13 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
690
729
|
'url': url}
|
|
691
730
|
try:
|
|
692
731
|
response = self._run_request(request_params)
|
|
693
|
-
|
|
732
|
+
|
|
694
733
|
# Get mimetype if needed for auto_convert or add_extension
|
|
695
734
|
mimetype = None
|
|
696
735
|
if auto_convert or add_extension:
|
|
697
736
|
resource_info = self.get_resources_by_ids(resource_id)
|
|
698
737
|
mimetype = resource_info['mimetype']
|
|
699
|
-
|
|
738
|
+
|
|
700
739
|
if auto_convert:
|
|
701
740
|
try:
|
|
702
741
|
resource_file = BaseAPIHandler.convert_format(response.content,
|
|
@@ -256,12 +256,91 @@ def _find_segmentation_files(segmentation_root_path: str,
|
|
|
256
256
|
return segmentation_files
|
|
257
257
|
|
|
258
258
|
|
|
259
|
-
def
|
|
259
|
+
def _find_json_metadata(file_path: str | Path) -> Optional[str]:
|
|
260
|
+
"""
|
|
261
|
+
Find a JSON file with the same base name as the given file.
|
|
262
|
+
|
|
263
|
+
Args:
|
|
264
|
+
file_path (str): Path to the main file (e.g., NIFTI file)
|
|
265
|
+
|
|
266
|
+
Returns:
|
|
267
|
+
Optional[str]: Path to the JSON metadata file if found, None otherwise
|
|
268
|
+
"""
|
|
269
|
+
file_path = Path(file_path)
|
|
270
|
+
|
|
271
|
+
# Handle .nii.gz files specially - need to remove both extensions
|
|
272
|
+
if file_path.name.endswith('.nii.gz'):
|
|
273
|
+
base_name = file_path.name[:-7] # Remove .nii.gz
|
|
274
|
+
json_path = file_path.parent / f"{base_name}.json"
|
|
275
|
+
else:
|
|
276
|
+
json_path = file_path.with_suffix('.json')
|
|
277
|
+
|
|
278
|
+
if json_path.exists() and json_path.is_file():
|
|
279
|
+
_LOGGER.debug(f"Found JSON metadata file: {json_path}")
|
|
280
|
+
return str(json_path)
|
|
281
|
+
|
|
282
|
+
return None
|
|
283
|
+
|
|
284
|
+
|
|
285
|
+
def _collect_metadata_files(files_path: list[str], auto_detect_json: bool) -> tuple[list, list[str]]:
|
|
286
|
+
"""
|
|
287
|
+
Collect JSON metadata files for the given files and filter them from main files list.
|
|
288
|
+
|
|
289
|
+
Args:
|
|
290
|
+
files_path (list[str]): List of file paths
|
|
291
|
+
auto_detect_json (bool): Whether to auto-detect JSON metadata files
|
|
292
|
+
|
|
293
|
+
Returns:
|
|
294
|
+
tuple[list[Optional[str]], list[str]]: Tuple of (metadata file paths, filtered files_path)
|
|
295
|
+
- metadata file paths: List of metadata file paths (None if no metadata found)
|
|
296
|
+
- filtered files_path: Original files_path with JSON metadata files removed
|
|
297
|
+
"""
|
|
298
|
+
if not auto_detect_json:
|
|
299
|
+
return [None] * len(files_path), files_path
|
|
300
|
+
|
|
301
|
+
metadata_files = []
|
|
302
|
+
used_json_files = set()
|
|
303
|
+
nifti_extensions = ['.nii', '.nii.gz']
|
|
304
|
+
|
|
305
|
+
for file_path in files_path:
|
|
306
|
+
# Check if this is a NIFTI file
|
|
307
|
+
if any(file_path.endswith(ext) for ext in nifti_extensions):
|
|
308
|
+
json_file = _find_json_metadata(file_path)
|
|
309
|
+
metadata_files.append(json_file)
|
|
310
|
+
if json_file is not None:
|
|
311
|
+
used_json_files.add(json_file)
|
|
312
|
+
else:
|
|
313
|
+
metadata_files.append(None)
|
|
314
|
+
|
|
315
|
+
# Filter out JSON files that are being used as metadata from the main files list
|
|
316
|
+
filtered_files_path = [f for f in files_path if f not in used_json_files]
|
|
317
|
+
|
|
318
|
+
# Update metadata_files to match the filtered list
|
|
319
|
+
if used_json_files:
|
|
320
|
+
_LOGGER.debug(f"Filtering out {len(used_json_files)} JSON metadata files from main upload list")
|
|
321
|
+
filtered_metadata_files = []
|
|
322
|
+
|
|
323
|
+
for original_file in files_path:
|
|
324
|
+
if original_file not in used_json_files:
|
|
325
|
+
original_index = files_path.index(original_file)
|
|
326
|
+
filtered_metadata_files.append(metadata_files[original_index])
|
|
327
|
+
|
|
328
|
+
metadata_files = filtered_metadata_files
|
|
329
|
+
|
|
330
|
+
return metadata_files, filtered_files_path
|
|
331
|
+
|
|
332
|
+
|
|
333
|
+
def _parse_args() -> tuple[Any, list, Optional[list[dict]], Optional[list[str]]]:
|
|
260
334
|
parser = argparse.ArgumentParser(
|
|
261
335
|
description='DatamintAPI command line tool for uploading DICOM files and other resources')
|
|
262
|
-
|
|
263
|
-
|
|
336
|
+
|
|
337
|
+
# Add positional argument for path
|
|
338
|
+
parser.add_argument('path', nargs='?', type=_is_valid_path_argparse, metavar="PATH",
|
|
264
339
|
help='Path to the resource file(s) or a directory')
|
|
340
|
+
|
|
341
|
+
# Keep the --path option for backward compatibility, but make it optional
|
|
342
|
+
parser.add_argument('--path', dest='path_flag', type=_is_valid_path_argparse, metavar="FILE",
|
|
343
|
+
help='Path to the resource file(s) or a directory (alternative to positional argument)')
|
|
265
344
|
parser.add_argument('-r', '--recursive', nargs='?', const=-1, # -1 means infinite
|
|
266
345
|
type=int,
|
|
267
346
|
help='Recurse folders looking for DICOMs. If a number is passed, recurse that number of levels.')
|
|
@@ -302,9 +381,28 @@ def _parse_args() -> tuple[Any, list, Optional[list[dict]]]:
|
|
|
302
381
|
help='Automatically answer yes to all prompts')
|
|
303
382
|
parser.add_argument('--transpose-segmentation', action='store_true', default=False,
|
|
304
383
|
help='Transpose the segmentation dimensions to match the image dimensions')
|
|
384
|
+
parser.add_argument('--auto-detect-json', action='store_true', default=True,
|
|
385
|
+
help='Automatically detect and include JSON metadata files with the same base name as NIFTI files')
|
|
386
|
+
parser.add_argument('--no-auto-detect-json', dest='auto_detect_json', action='store_false',
|
|
387
|
+
help='Disable automatic detection of JSON metadata files (default behavior)')
|
|
305
388
|
parser.add_argument('--version', action='version', version=f'%(prog)s {datamint_version}')
|
|
306
389
|
parser.add_argument('--verbose', action='store_true', help='Print debug messages', default=False)
|
|
307
390
|
args = parser.parse_args()
|
|
391
|
+
|
|
392
|
+
# Handle path argument priority: positional takes precedence over --path flag
|
|
393
|
+
if args.path is not None and args.path_flag is not None:
|
|
394
|
+
_USER_LOGGER.warning("Both positional path and --path flag provided. Using positional argument.")
|
|
395
|
+
final_path = args.path
|
|
396
|
+
elif args.path is not None:
|
|
397
|
+
final_path = args.path
|
|
398
|
+
elif args.path_flag is not None:
|
|
399
|
+
final_path = args.path_flag
|
|
400
|
+
else:
|
|
401
|
+
parser.error("Path argument is required. Provide it as a positional argument or use --path flag.")
|
|
402
|
+
|
|
403
|
+
# Replace args.path with the final resolved path for consistency
|
|
404
|
+
args.path = final_path
|
|
405
|
+
|
|
308
406
|
if args.verbose:
|
|
309
407
|
# Get the console handler and set to debug
|
|
310
408
|
logging.getLogger().handlers[0].setLevel(logging.DEBUG)
|
|
@@ -319,7 +417,6 @@ def _parse_args() -> tuple[Any, list, Optional[list[dict]]]:
|
|
|
319
417
|
raise ValueError("--include-extensions and --exclude-extensions are mutually exclusive.")
|
|
320
418
|
|
|
321
419
|
try:
|
|
322
|
-
|
|
323
420
|
if os.path.isfile(args.path):
|
|
324
421
|
file_path = [args.path]
|
|
325
422
|
if args.recursive is not None:
|
|
@@ -337,6 +434,12 @@ def _parse_args() -> tuple[Any, list, Optional[list[dict]]]:
|
|
|
337
434
|
if len(file_path) == 0:
|
|
338
435
|
raise ValueError(f"No valid file was found in {args.path}")
|
|
339
436
|
|
|
437
|
+
# Collect JSON metadata files and filter them from main files list
|
|
438
|
+
metadata_files, file_path = _collect_metadata_files(file_path, args.auto_detect_json)
|
|
439
|
+
|
|
440
|
+
if len(file_path) == 0:
|
|
441
|
+
raise ValueError(f"No valid non-metadata files found in {args.path}")
|
|
442
|
+
|
|
340
443
|
if args.segmentation_names is not None:
|
|
341
444
|
with open(args.segmentation_names, 'r') as f:
|
|
342
445
|
segmentation_names = yaml.safe_load(f)
|
|
@@ -360,7 +463,7 @@ def _parse_args() -> tuple[Any, list, Optional[list[dict]]]:
|
|
|
360
463
|
raise ValueError("Cannot use both --tag and --label. Use --tag instead. --label is deprecated.")
|
|
361
464
|
args.tag = args.tag if args.tag is not None else args.label
|
|
362
465
|
|
|
363
|
-
return args, file_path, segmentation_files
|
|
466
|
+
return args, file_path, segmentation_files, metadata_files
|
|
364
467
|
|
|
365
468
|
except Exception as e:
|
|
366
469
|
if args.verbose:
|
|
@@ -371,6 +474,7 @@ def _parse_args() -> tuple[Any, list, Optional[list[dict]]]:
|
|
|
371
474
|
def print_input_summary(files_path: list[str],
|
|
372
475
|
args,
|
|
373
476
|
segfiles: Optional[list[dict]],
|
|
477
|
+
metadata_files: Optional[list[str]] = None,
|
|
374
478
|
include_extensions=None):
|
|
375
479
|
### Create a summary of the upload ###
|
|
376
480
|
total_files = len(files_path)
|
|
@@ -397,6 +501,7 @@ def print_input_summary(files_path: list[str],
|
|
|
397
501
|
if ext == '':
|
|
398
502
|
ext = 'no extension'
|
|
399
503
|
_USER_LOGGER.info(f"\t{ext}: {count}")
|
|
504
|
+
# Check for multiple extensions
|
|
400
505
|
if len(ext_counts) > 1 and include_extensions is None:
|
|
401
506
|
_USER_LOGGER.warning("Multiple file extensions found!" +
|
|
402
507
|
" Make sure you are uploading the correct files.")
|
|
@@ -419,6 +524,13 @@ def print_input_summary(files_path: list[str],
|
|
|
419
524
|
else:
|
|
420
525
|
_USER_LOGGER.info(msg)
|
|
421
526
|
|
|
527
|
+
if metadata_files is not None:
|
|
528
|
+
num_metadata_files = sum([1 if metadata is not None else 0 for metadata in metadata_files])
|
|
529
|
+
if num_metadata_files > 0:
|
|
530
|
+
msg = f"Number of files with JSON metadata: {num_metadata_files} ({num_metadata_files / total_files:.0%})"
|
|
531
|
+
_USER_LOGGER.info(msg)
|
|
532
|
+
# TODO: Could add validation to ensure JSON metadata files contain valid DICOM metadata structure
|
|
533
|
+
|
|
422
534
|
|
|
423
535
|
def print_results_summary(files_path: list[str],
|
|
424
536
|
results: list[str | Exception]):
|
|
@@ -441,7 +553,7 @@ def main():
|
|
|
441
553
|
load_cmdline_logging_config()
|
|
442
554
|
|
|
443
555
|
try:
|
|
444
|
-
args, files_path, segfiles = _parse_args()
|
|
556
|
+
args, files_path, segfiles, metadata_files = _parse_args()
|
|
445
557
|
except Exception as e:
|
|
446
558
|
_USER_LOGGER.error(f'Error validating arguments. {e}')
|
|
447
559
|
return
|
|
@@ -449,6 +561,7 @@ def main():
|
|
|
449
561
|
print_input_summary(files_path,
|
|
450
562
|
args=args,
|
|
451
563
|
segfiles=segfiles,
|
|
564
|
+
metadata_files=metadata_files,
|
|
452
565
|
include_extensions=args.include_extensions)
|
|
453
566
|
|
|
454
567
|
if not args.yes:
|
|
@@ -471,7 +584,8 @@ def main():
|
|
|
471
584
|
publish=args.publish,
|
|
472
585
|
segmentation_files=segfiles,
|
|
473
586
|
transpose_segmentation=args.transpose_segmentation,
|
|
474
|
-
assemble_dicoms=True
|
|
587
|
+
assemble_dicoms=True,
|
|
588
|
+
metadata_files=metadata_files
|
|
475
589
|
)
|
|
476
590
|
_USER_LOGGER.info('Upload finished!')
|
|
477
591
|
_LOGGER.debug(f"Number of results: {len(results)}")
|
|
@@ -803,7 +803,7 @@ class Experiment:
|
|
|
803
803
|
Args:
|
|
804
804
|
resource_id: The resource ID of the sample.
|
|
805
805
|
predictions: The predictions of the model. One binary mask for each class. Can be a numpy array of shape (H, W) or (N,H,W);
|
|
806
|
-
Or a path to a png file; Or a path to a .nii.gz file.
|
|
806
|
+
Or a path to a png file; Or a path to a .nii/.nii.gz file.
|
|
807
807
|
label_name: The name of the class or a dictionary mapping pixel values to names.
|
|
808
808
|
Example: ``{1: 'Femur', 2: 'Tibia'}`` means that pixel value 1 is 'Femur' and pixel value 2 is 'Tibia'.
|
|
809
809
|
frame_index: The frame index of the prediction or a list of frame indexes.
|
datamint/utils/io_utils.py
CHANGED
|
@@ -54,17 +54,32 @@ def read_video(file_path: str, index: int = None) -> np.ndarray:
|
|
|
54
54
|
|
|
55
55
|
|
|
56
56
|
def read_nifti(file_path: str) -> np.ndarray:
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
57
|
+
"""
|
|
58
|
+
Read a NIfTI file and return the image data in standardized format.
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
file_path: Path to the NIfTI file (.nii or .nii.gz)
|
|
62
|
+
|
|
63
|
+
Returns:
|
|
64
|
+
np.ndarray: Image data with shape (#frames, C, H, W)
|
|
65
|
+
"""
|
|
66
|
+
try:
|
|
67
|
+
nii_img = nib.load(file_path)
|
|
68
|
+
imgs = nii_img.get_fdata() # shape: (W, H, #frame) or (W, H)
|
|
69
|
+
|
|
70
|
+
if imgs.ndim == 2:
|
|
71
|
+
imgs = imgs.transpose(1, 0) # (W, H) -> (H, W)
|
|
72
|
+
imgs = imgs[np.newaxis, np.newaxis] # -> (1, 1, H, W)
|
|
73
|
+
elif imgs.ndim == 3:
|
|
74
|
+
imgs = imgs.transpose(2, 1, 0) # (W, H, #frame) -> (#frame, H, W)
|
|
75
|
+
imgs = imgs[:, np.newaxis] # -> (#frame, 1, H, W)
|
|
76
|
+
else:
|
|
77
|
+
raise ValueError(f"Unsupported number of dimensions in '{file_path}': {imgs.ndim}")
|
|
66
78
|
|
|
67
|
-
|
|
79
|
+
return imgs
|
|
80
|
+
except Exception as e:
|
|
81
|
+
_LOGGER.error(f"Failed to read NIfTI file '{file_path}': {e}")
|
|
82
|
+
raise e
|
|
68
83
|
|
|
69
84
|
|
|
70
85
|
def read_image(file_path: str) -> np.ndarray:
|
|
@@ -123,6 +138,18 @@ def read_array_normalized(file_path: str,
|
|
|
123
138
|
else:
|
|
124
139
|
if mime_type == 'image/x.nifti' or file_path.endswith(NII_EXTS):
|
|
125
140
|
imgs = read_nifti(file_path)
|
|
141
|
+
# For NIfTI files, try to load associated JSON metadata
|
|
142
|
+
if return_metainfo:
|
|
143
|
+
json_path = file_path.replace('.nii.gz', '.json').replace('.nii', '.json')
|
|
144
|
+
if os.path.exists(json_path):
|
|
145
|
+
try:
|
|
146
|
+
import json
|
|
147
|
+
with open(json_path, 'r') as f:
|
|
148
|
+
metainfo = json.load(f)
|
|
149
|
+
_LOGGER.debug(f"Loaded JSON metadata from {json_path}")
|
|
150
|
+
except Exception as e:
|
|
151
|
+
_LOGGER.warning(f"Failed to load JSON metadata from {json_path}: {e}")
|
|
152
|
+
metainfo = None
|
|
126
153
|
elif mime_type.startswith('image/') or file_path.endswith(IMAGE_EXTS):
|
|
127
154
|
imgs = read_image(file_path)
|
|
128
155
|
elif file_path.endswith('.npy') or mime_type == 'application/x-numpy-data':
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: datamint
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.4.1
|
|
4
4
|
Summary: A library for interacting with the Datamint API, designed for efficient data management, processing and Deep Learning workflows.
|
|
5
5
|
Requires-Python: >=3.10
|
|
6
6
|
Classifier: Programming Language :: Python :: 3
|
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
datamint/__init__.py,sha256=7rKCCsaa4RBRTIfuHB708rai1xwDHLtkFNFJGKYG5D4,757
|
|
2
|
-
datamint/apihandler/annotation_api_handler.py,sha256=
|
|
2
|
+
datamint/apihandler/annotation_api_handler.py,sha256=uaxtkFCBfRdhXcNzZrqfoV_5FLe8w7MFE-4fVD1Wu8Y,40084
|
|
3
3
|
datamint/apihandler/api_handler.py,sha256=cdVSddrFCKlF_BJ81LO1aJ0OP49rssjpNEFzJ6Q7YyY,384
|
|
4
4
|
datamint/apihandler/base_api_handler.py,sha256=XSxZEQEkbQpuixGDu_P9jbxUQht3Z3JgxaeiFKPkVDM,11690
|
|
5
|
-
datamint/apihandler/dto/annotation_dto.py,sha256=
|
|
5
|
+
datamint/apihandler/dto/annotation_dto.py,sha256=otCIesoqGBlbSOw4ErqFsXp2HwJsPNUQlkynQh_7pHg,7110
|
|
6
6
|
datamint/apihandler/exp_api_handler.py,sha256=hFUgUgBc5rL7odK7gTW3MnrvMY1pVfJUpUdzRNobMQE,6226
|
|
7
|
-
datamint/apihandler/root_api_handler.py,sha256
|
|
7
|
+
datamint/apihandler/root_api_handler.py,sha256=-dy5IxDP3wJAr2ahhxKKswWfyKSzn6hHrPSYwKKW1pQ,44507
|
|
8
8
|
datamint/client_cmd_tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
9
9
|
datamint/client_cmd_tools/datamint_config.py,sha256=NNWLBaHXYhY1fqherwg0u3bcu0r95ZJIMTH45X-bJ2Q,6279
|
|
10
|
-
datamint/client_cmd_tools/datamint_upload.py,sha256=
|
|
10
|
+
datamint/client_cmd_tools/datamint_upload.py,sha256=oDNt-91Naz8fQER2sk3I9x9G7C7xzQ0SjBjr0XNht1k,25900
|
|
11
11
|
datamint/configs.py,sha256=Bdp6NydYwyCJ2dk19_gf_o3M2ZyQOmMHpLi8wEWNHUk,1426
|
|
12
12
|
datamint/dataset/__init__.py,sha256=4PlUKSvVhdfQvvuq8jQXrkdqnot-iTTizM3aM1vgSwg,47
|
|
13
13
|
datamint/dataset/base_dataset.py,sha256=EnnIeF3ZaBL2M8qEV39U0ogKptyvezBNoVOvrS12bZ8,38756
|
|
@@ -16,14 +16,14 @@ datamint/examples/__init__.py,sha256=zcYnd5nLVme9GCTPYH-1JpGo8xXK2WEYvhzcy_2alZc
|
|
|
16
16
|
datamint/examples/example_projects.py,sha256=7Nb_EaIdzJTQa9zopqc-WhTBQWQJSoQZ_KjRS4PB4FI,2931
|
|
17
17
|
datamint/experiment/__init__.py,sha256=5qQOMzoG17DEd1YnTF-vS0qiM-DGdbNh42EUo91CRhQ,34
|
|
18
18
|
datamint/experiment/_patcher.py,sha256=ZgbezoevAYhJsbiJTvWPALGTcUiMT371xddcTllt3H4,23296
|
|
19
|
-
datamint/experiment/experiment.py,sha256=
|
|
19
|
+
datamint/experiment/experiment.py,sha256=aHK9dRFdQTi569xgUg1KqlCZLHZpDmSH3g3ndPIZvXw,44546
|
|
20
20
|
datamint/logging.yaml,sha256=a5dsATpul7QHeUHB2TjABFjWaPXBMbO--dgn8GlRqwk,483
|
|
21
21
|
datamint/utils/dicom_utils.py,sha256=HTuEjwXyTSMaTVGb9pFOO76q2KLTr2CxTDoCRElVHRA,26023
|
|
22
|
-
datamint/utils/io_utils.py,sha256=
|
|
22
|
+
datamint/utils/io_utils.py,sha256=ebP1atKkhKEf1mUU1LsVwDq0h_so7kVKkD_7hQYn_kM,6754
|
|
23
23
|
datamint/utils/logging_utils.py,sha256=DvoA35ATYG3JTwfXEXYawDyKRfHeCrH0a9czfkmz8kM,1851
|
|
24
24
|
datamint/utils/torchmetrics.py,sha256=lwU0nOtsSWfebyp7dvjlAggaqXtj5ohSEUXOg3L0hJE,2837
|
|
25
25
|
datamint/utils/visualization.py,sha256=yaUVAOHar59VrGUjpAWv5eVvQSfztFG0eP9p5Vt3l-M,4470
|
|
26
|
-
datamint-1.
|
|
27
|
-
datamint-1.
|
|
28
|
-
datamint-1.
|
|
29
|
-
datamint-1.
|
|
26
|
+
datamint-1.4.1.dist-info/METADATA,sha256=Av01QDy0f3zl0oud-ebW6vyjk0J0P33MGWMq8wg3nMs,4065
|
|
27
|
+
datamint-1.4.1.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
|
|
28
|
+
datamint-1.4.1.dist-info/entry_points.txt,sha256=mn5H6jPjO-rY0W0CAZ6Z_KKWhMLvyVaSpoqk77jlTI4,145
|
|
29
|
+
datamint-1.4.1.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|