datamint 1.5.0__tar.gz → 1.5.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of datamint might be problematic. Click here for more details.
- {datamint-1.5.0 → datamint-1.5.2}/PKG-INFO +1 -1
- {datamint-1.5.0 → datamint-1.5.2}/datamint/apihandler/annotation_api_handler.py +26 -69
- {datamint-1.5.0 → datamint-1.5.2}/datamint/client_cmd_tools/datamint_upload.py +2 -2
- {datamint-1.5.0 → datamint-1.5.2}/pyproject.toml +2 -2
- {datamint-1.5.0 → datamint-1.5.2}/README.md +0 -0
- {datamint-1.5.0 → datamint-1.5.2}/datamint/__init__.py +0 -0
- {datamint-1.5.0 → datamint-1.5.2}/datamint/apihandler/api_handler.py +0 -0
- {datamint-1.5.0 → datamint-1.5.2}/datamint/apihandler/base_api_handler.py +0 -0
- {datamint-1.5.0 → datamint-1.5.2}/datamint/apihandler/dto/annotation_dto.py +0 -0
- {datamint-1.5.0 → datamint-1.5.2}/datamint/apihandler/exp_api_handler.py +0 -0
- {datamint-1.5.0 → datamint-1.5.2}/datamint/apihandler/root_api_handler.py +0 -0
- {datamint-1.5.0 → datamint-1.5.2}/datamint/client_cmd_tools/__init__.py +0 -0
- {datamint-1.5.0 → datamint-1.5.2}/datamint/client_cmd_tools/datamint_config.py +0 -0
- {datamint-1.5.0 → datamint-1.5.2}/datamint/configs.py +0 -0
- {datamint-1.5.0 → datamint-1.5.2}/datamint/dataset/__init__.py +0 -0
- {datamint-1.5.0 → datamint-1.5.2}/datamint/dataset/base_dataset.py +0 -0
- {datamint-1.5.0 → datamint-1.5.2}/datamint/dataset/dataset.py +0 -0
- {datamint-1.5.0 → datamint-1.5.2}/datamint/examples/__init__.py +0 -0
- {datamint-1.5.0 → datamint-1.5.2}/datamint/examples/example_projects.py +0 -0
- {datamint-1.5.0 → datamint-1.5.2}/datamint/experiment/__init__.py +0 -0
- {datamint-1.5.0 → datamint-1.5.2}/datamint/experiment/_patcher.py +0 -0
- {datamint-1.5.0 → datamint-1.5.2}/datamint/experiment/experiment.py +0 -0
- {datamint-1.5.0 → datamint-1.5.2}/datamint/logging.yaml +0 -0
- {datamint-1.5.0 → datamint-1.5.2}/datamint/utils/dicom_utils.py +0 -0
- {datamint-1.5.0 → datamint-1.5.2}/datamint/utils/io_utils.py +0 -0
- {datamint-1.5.0 → datamint-1.5.2}/datamint/utils/logging_utils.py +0 -0
- {datamint-1.5.0 → datamint-1.5.2}/datamint/utils/torchmetrics.py +0 -0
- {datamint-1.5.0 → datamint-1.5.2}/datamint/utils/visualization.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: datamint
|
|
3
|
-
Version: 1.5.
|
|
3
|
+
Version: 1.5.2
|
|
4
4
|
Summary: A library for interacting with the Datamint API, designed for efficient data management, processing and Deep Learning workflows.
|
|
5
5
|
Requires-Python: >=3.10
|
|
6
6
|
Classifier: Programming Language :: Python :: 3
|
|
@@ -13,6 +13,7 @@ from requests.exceptions import HTTPError
|
|
|
13
13
|
from deprecated.sphinx import deprecated
|
|
14
14
|
from .dto.annotation_dto import CreateAnnotationDto, LineGeometry, BoxGeometry, CoordinateSystem, AnnotationType
|
|
15
15
|
import pydicom
|
|
16
|
+
import json
|
|
16
17
|
|
|
17
18
|
_LOGGER = logging.getLogger(__name__)
|
|
18
19
|
_USER_LOGGER = logging.getLogger('user_logger')
|
|
@@ -119,7 +120,7 @@ class AnnotationAPIHandler(BaseAPIHandler):
|
|
|
119
120
|
try:
|
|
120
121
|
try:
|
|
121
122
|
img = np.array(Image.open(fio))
|
|
122
|
-
|
|
123
|
+
|
|
123
124
|
# Check that frame is not empty
|
|
124
125
|
uniq_vals = np.unique(img)
|
|
125
126
|
if discard_empty_segmentations:
|
|
@@ -133,7 +134,7 @@ class AnnotationAPIHandler(BaseAPIHandler):
|
|
|
133
134
|
|
|
134
135
|
segnames = AnnotationAPIHandler._get_segmentation_names(uniq_vals, names=name)
|
|
135
136
|
segs_generator = AnnotationAPIHandler._split_segmentations(img, uniq_vals, fio)
|
|
136
|
-
|
|
137
|
+
|
|
137
138
|
# Create annotations
|
|
138
139
|
annotations: list[CreateAnnotationDto] = []
|
|
139
140
|
for segname in segnames:
|
|
@@ -148,7 +149,7 @@ class AnnotationAPIHandler(BaseAPIHandler):
|
|
|
148
149
|
annotation_worklist_id=worklist_id
|
|
149
150
|
)
|
|
150
151
|
annotations.append(ann)
|
|
151
|
-
|
|
152
|
+
|
|
152
153
|
# Validate unique identifiers
|
|
153
154
|
if len(annotations) != len(set([a.identifier for a in annotations])):
|
|
154
155
|
raise ValueError(
|
|
@@ -161,7 +162,7 @@ class AnnotationAPIHandler(BaseAPIHandler):
|
|
|
161
162
|
if len(annotids) != len(segnames):
|
|
162
163
|
_LOGGER.warning(f"Number of uploaded annotations ({len(annotids)})" +
|
|
163
164
|
f" does not match the number of annotations ({len(segnames)})")
|
|
164
|
-
|
|
165
|
+
|
|
165
166
|
for annotid, segname, fio_seg in zip(annotids, segnames, segs_generator):
|
|
166
167
|
form = aiohttp.FormData()
|
|
167
168
|
form.add_field('file', fio_seg, filename=segname, content_type='image/png')
|
|
@@ -173,7 +174,7 @@ class AnnotationAPIHandler(BaseAPIHandler):
|
|
|
173
174
|
resp = await self._run_request_async(request_params)
|
|
174
175
|
if 'error' in resp:
|
|
175
176
|
raise DatamintException(resp['error'])
|
|
176
|
-
|
|
177
|
+
|
|
177
178
|
return annotids
|
|
178
179
|
finally:
|
|
179
180
|
fio.close()
|
|
@@ -183,7 +184,7 @@ class AnnotationAPIHandler(BaseAPIHandler):
|
|
|
183
184
|
async def _upload_volume_segmentation_async(self,
|
|
184
185
|
resource_id: str,
|
|
185
186
|
file_path: str | np.ndarray,
|
|
186
|
-
name:
|
|
187
|
+
name: str | dict[int, str] | None = None,
|
|
187
188
|
imported_from: Optional[str] = None,
|
|
188
189
|
author_email: Optional[str] = None,
|
|
189
190
|
worklist_id: Optional[str] = None,
|
|
@@ -209,85 +210,41 @@ class AnnotationAPIHandler(BaseAPIHandler):
|
|
|
209
210
|
Raises:
|
|
210
211
|
ValueError: If name is not a string or file format is unsupported for volume upload.
|
|
211
212
|
"""
|
|
212
|
-
if isinstance(name, dict):
|
|
213
|
-
raise ValueError("Volume uploads only support string names, not dictionaries.")
|
|
214
|
-
|
|
215
213
|
if name is None:
|
|
216
214
|
name = 'volume_segmentation'
|
|
217
215
|
|
|
218
|
-
# Create volume annotation
|
|
219
|
-
ann = CreateAnnotationDto(
|
|
220
|
-
type='segmentation',
|
|
221
|
-
identifier=name,
|
|
222
|
-
scope='frame', # Volume segmentations use image scope
|
|
223
|
-
imported_from=imported_from,
|
|
224
|
-
import_author=author_email,
|
|
225
|
-
model_id=model_id,
|
|
226
|
-
annotation_worklist_id=worklist_id
|
|
227
|
-
)
|
|
228
|
-
|
|
229
|
-
annotids = await self._upload_annotations_async(resource_id, [ann])
|
|
230
|
-
_LOGGER.debug(f"Created volume annotation with ID: {annotids}")
|
|
231
|
-
|
|
232
|
-
if not annotids:
|
|
233
|
-
raise DatamintException("Failed to create volume annotation")
|
|
234
|
-
|
|
235
|
-
annotid = annotids[0]
|
|
236
|
-
|
|
237
216
|
# Prepare file for upload
|
|
238
217
|
if isinstance(file_path, str):
|
|
239
218
|
if file_path.endswith('.nii') or file_path.endswith('.nii.gz'):
|
|
240
|
-
content_type = 'application/x-nifti'
|
|
241
219
|
# Upload NIfTI file directly
|
|
242
220
|
with open(file_path, 'rb') as f:
|
|
243
221
|
filename = os.path.basename(file_path)
|
|
244
222
|
form = aiohttp.FormData()
|
|
245
223
|
form.add_field('file', f, filename=filename, content_type='application/x-nifti')
|
|
246
|
-
|
|
224
|
+
model_id = 'c9daf156-5335-4cb3-b374-5b3a776e0025'
|
|
225
|
+
if model_id is not None:
|
|
226
|
+
form.add_field('model_id', model_id) # Add model_id if provided
|
|
227
|
+
if worklist_id is not None:
|
|
228
|
+
form.add_field('annotation_worklist_id', worklist_id)
|
|
229
|
+
form.add_field('segmentation_map', json.dumps(name), content_type='application/json')
|
|
230
|
+
|
|
247
231
|
request_params = dict(
|
|
248
232
|
method='POST',
|
|
249
|
-
url=f'{self.root_url}/annotations/{resource_id}/
|
|
233
|
+
url=f'{self.root_url}/annotations/{resource_id}/segmentations/file',
|
|
250
234
|
data=form,
|
|
251
235
|
)
|
|
252
236
|
resp = await self._run_request_async(request_params)
|
|
253
237
|
if 'error' in resp:
|
|
254
238
|
raise DatamintException(resp['error'])
|
|
239
|
+
return resp
|
|
255
240
|
else:
|
|
256
241
|
raise ValueError(f"Volume upload not supported for file format: {file_path}")
|
|
257
242
|
elif isinstance(file_path, np.ndarray):
|
|
258
|
-
|
|
259
|
-
# TODO: Consider supporting direct numpy array upload or convert to a supported format
|
|
260
|
-
if transpose_segmentation:
|
|
261
|
-
volume_data = file_path.transpose(1, 0, 2) if file_path.ndim == 3 else file_path.transpose(1, 0)
|
|
262
|
-
else:
|
|
263
|
-
volume_data = file_path
|
|
264
|
-
|
|
265
|
-
# Create temporary NIfTI file
|
|
266
|
-
import tempfile
|
|
267
|
-
with tempfile.NamedTemporaryFile(suffix='.nii.gz', delete=False) as tmp_file:
|
|
268
|
-
nii_img = nib.Nifti1Image(volume_data.astype(np.uint8), np.eye(4))
|
|
269
|
-
nib.save(nii_img, tmp_file.name)
|
|
270
|
-
|
|
271
|
-
try:
|
|
272
|
-
with open(tmp_file.name, 'rb') as f:
|
|
273
|
-
form = aiohttp.FormData()
|
|
274
|
-
form.add_field('file', f, filename=f'{name}.nii.gz', content_type='application/x-nifti')
|
|
275
|
-
|
|
276
|
-
request_params = dict(
|
|
277
|
-
method='POST',
|
|
278
|
-
url=f'{self.root_url}/annotations/{resource_id}/annotations/{annotid}/file',
|
|
279
|
-
data=form,
|
|
280
|
-
)
|
|
281
|
-
resp = await self._run_request_async(request_params)
|
|
282
|
-
if 'error' in resp:
|
|
283
|
-
raise DatamintException(resp['error'])
|
|
284
|
-
finally:
|
|
285
|
-
os.unlink(tmp_file.name) # Clean up temporary file
|
|
243
|
+
raise NotImplementedError
|
|
286
244
|
else:
|
|
287
245
|
raise ValueError(f"Unsupported file_path type for volume upload: {type(file_path)}")
|
|
288
246
|
|
|
289
247
|
_USER_LOGGER.info(f'Volume segmentation uploaded for resource {resource_id}')
|
|
290
|
-
return annotids
|
|
291
248
|
|
|
292
249
|
async def _upload_segmentations_async(self,
|
|
293
250
|
resource_id: str,
|
|
@@ -334,25 +291,25 @@ class AnnotationAPIHandler(BaseAPIHandler):
|
|
|
334
291
|
if upload_volume:
|
|
335
292
|
if frame_index is not None:
|
|
336
293
|
_LOGGER.warning("frame_index parameter ignored when upload_volume=True")
|
|
337
|
-
|
|
294
|
+
|
|
338
295
|
return await self._upload_volume_segmentation_async(
|
|
339
296
|
resource_id=resource_id,
|
|
340
297
|
file_path=file_path,
|
|
341
|
-
name=name
|
|
298
|
+
name=name,
|
|
342
299
|
imported_from=imported_from,
|
|
343
300
|
author_email=author_email,
|
|
344
301
|
worklist_id=worklist_id,
|
|
345
302
|
model_id=model_id,
|
|
346
303
|
transpose_segmentation=transpose_segmentation
|
|
347
304
|
)
|
|
348
|
-
|
|
305
|
+
|
|
349
306
|
# Handle frame-by-frame upload (existing logic)
|
|
350
307
|
nframes, fios = AnnotationAPIHandler._generate_segmentations_ios(
|
|
351
308
|
file_path, transpose_segmentation=transpose_segmentation
|
|
352
309
|
)
|
|
353
310
|
if frame_index is None:
|
|
354
311
|
frame_index = list(range(nframes))
|
|
355
|
-
|
|
312
|
+
|
|
356
313
|
annotids = []
|
|
357
314
|
for fidx, f in zip(frame_index, fios):
|
|
358
315
|
frame_annotids = await self._upload_single_frame_segmentation_async(
|
|
@@ -368,12 +325,12 @@ class AnnotationAPIHandler(BaseAPIHandler):
|
|
|
368
325
|
)
|
|
369
326
|
annotids.extend(frame_annotids)
|
|
370
327
|
return annotids
|
|
371
|
-
|
|
328
|
+
|
|
372
329
|
# Handle single file-like object
|
|
373
330
|
if fio is not None:
|
|
374
331
|
if upload_volume:
|
|
375
332
|
raise ValueError("upload_volume=True is not supported when providing fio parameter")
|
|
376
|
-
|
|
333
|
+
|
|
377
334
|
return await self._upload_single_frame_segmentation_async(
|
|
378
335
|
resource_id=resource_id,
|
|
379
336
|
frame_index=frame_index,
|
|
@@ -385,7 +342,7 @@ class AnnotationAPIHandler(BaseAPIHandler):
|
|
|
385
342
|
worklist_id=worklist_id,
|
|
386
343
|
model_id=model_id
|
|
387
344
|
)
|
|
388
|
-
|
|
345
|
+
|
|
389
346
|
raise ValueError("Either file_path or fio must be provided")
|
|
390
347
|
|
|
391
348
|
def upload_segmentations(self,
|
|
@@ -429,7 +386,7 @@ class AnnotationAPIHandler(BaseAPIHandler):
|
|
|
429
386
|
"""
|
|
430
387
|
if isinstance(file_path, str) and not os.path.exists(file_path):
|
|
431
388
|
raise FileNotFoundError(f"File {file_path} not found.")
|
|
432
|
-
|
|
389
|
+
|
|
433
390
|
# Handle NIfTI files specially - upload as single volume
|
|
434
391
|
if isinstance(file_path, str) and (file_path.endswith('.nii') or file_path.endswith('.nii.gz')):
|
|
435
392
|
_LOGGER.info(f"Uploading NIfTI segmentation file: {file_path}")
|
|
@@ -666,7 +623,7 @@ class AnnotationAPIHandler(BaseAPIHandler):
|
|
|
666
623
|
model_id: Optional[str] = None) -> list[str]:
|
|
667
624
|
"""
|
|
668
625
|
Common method for creating geometry-based annotations.
|
|
669
|
-
|
|
626
|
+
|
|
670
627
|
Args:
|
|
671
628
|
geometry: The geometry object (LineGeometry or BoxGeometry)
|
|
672
629
|
resource_id: The resource unique id
|
|
@@ -106,8 +106,8 @@ def walk_to_depth(path: str | Path,
|
|
|
106
106
|
|
|
107
107
|
|
|
108
108
|
def filter_files(files_path: Iterable[Path],
|
|
109
|
-
include_extensions,
|
|
110
|
-
exclude_extensions) -> list[Path]:
|
|
109
|
+
include_extensions: Optional[list[str]] = None,
|
|
110
|
+
exclude_extensions: Optional[list[str]] = None) -> list[Path]:
|
|
111
111
|
def fix_extension(ext: str) -> str:
|
|
112
112
|
if ext == "" or ext[0] == '.':
|
|
113
113
|
return ext
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "datamint"
|
|
3
3
|
description = "A library for interacting with the Datamint API, designed for efficient data management, processing and Deep Learning workflows."
|
|
4
|
-
version = "1.5.
|
|
4
|
+
version = "1.5.2"
|
|
5
5
|
dynamic = ["dependencies"]
|
|
6
6
|
requires-python = ">=3.10"
|
|
7
7
|
readme = "README.md"
|
|
@@ -66,6 +66,6 @@ docs = ["sphinx", "sphinx_rtd_theme", "sphinx-tabs", "setuptools"]
|
|
|
66
66
|
dev = ["pytest", "pytest-cov", "responses", "aioresponses"]
|
|
67
67
|
|
|
68
68
|
[build-system]
|
|
69
|
-
requires = ["poetry-core>=
|
|
69
|
+
requires = ["poetry-core>=1.0.0"]
|
|
70
70
|
build-backend = "poetry.core.masonry.api"
|
|
71
71
|
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|