datamint 1.4.1__py3-none-any.whl → 1.5.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of datamint might be problematic. Click here for more details.
- datamint/apihandler/annotation_api_handler.py +246 -66
- datamint/apihandler/root_api_handler.py +227 -101
- datamint/client_cmd_tools/datamint_config.py +102 -59
- datamint/client_cmd_tools/datamint_upload.py +26 -18
- datamint/utils/dicom_utils.py +12 -12
- {datamint-1.4.1.dist-info → datamint-1.5.1.dist-info}/METADATA +1 -1
- {datamint-1.4.1.dist-info → datamint-1.5.1.dist-info}/RECORD +9 -9
- {datamint-1.4.1.dist-info → datamint-1.5.1.dist-info}/WHEEL +0 -0
- {datamint-1.4.1.dist-info → datamint-1.5.1.dist-info}/entry_points.txt +0 -0
|
@@ -57,13 +57,13 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
57
57
|
mimetype: Optional[str] = None,
|
|
58
58
|
anonymize: bool = False,
|
|
59
59
|
anonymize_retain_codes: Sequence[tuple] = [],
|
|
60
|
-
tags: list[str] =
|
|
61
|
-
mung_filename: Sequence[int] | Literal['all'] = None,
|
|
60
|
+
tags: list[str] = [],
|
|
61
|
+
mung_filename: Sequence[int] | Literal['all'] | None = None,
|
|
62
62
|
channel: Optional[str] = None,
|
|
63
63
|
session=None,
|
|
64
64
|
modality: Optional[str] = None,
|
|
65
65
|
publish: bool = False,
|
|
66
|
-
metadata_file: Optional[str] = None,
|
|
66
|
+
metadata_file: Optional[str | dict] = None,
|
|
67
67
|
) -> str:
|
|
68
68
|
if _is_io_object(file_path):
|
|
69
69
|
name = file_path.name
|
|
@@ -121,21 +121,29 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
121
121
|
metadata_content = None
|
|
122
122
|
metadata_dict = None
|
|
123
123
|
if metadata_file is not None:
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
|
|
138
|
-
|
|
124
|
+
if isinstance(metadata_file, dict):
|
|
125
|
+
# Metadata is already a dictionary
|
|
126
|
+
metadata_dict = metadata_file
|
|
127
|
+
metadata_content = json.dumps(metadata_dict)
|
|
128
|
+
_LOGGER.debug("Using provided metadata dictionary")
|
|
129
|
+
else:
|
|
130
|
+
# Metadata is a file path
|
|
131
|
+
try:
|
|
132
|
+
with open(metadata_file, 'r') as metadata_f:
|
|
133
|
+
metadata_content = metadata_f.read()
|
|
134
|
+
metadata_dict = json.loads(metadata_content)
|
|
135
|
+
except Exception as e:
|
|
136
|
+
_LOGGER.warning(f"Failed to read metadata file {metadata_file}: {e}")
|
|
137
|
+
|
|
138
|
+
# Extract modality from metadata if available
|
|
139
|
+
if metadata_dict is not None:
|
|
140
|
+
metadata_dict_lower = {k.lower(): v for k, v in metadata_dict.items() if isinstance(k, str)}
|
|
141
|
+
try:
|
|
142
|
+
if modality is None:
|
|
143
|
+
if 'modality' in metadata_dict_lower:
|
|
144
|
+
modality = metadata_dict_lower['modality']
|
|
145
|
+
except Exception as e:
|
|
146
|
+
_LOGGER.debug(f"Failed to extract modality from metadata: {e}")
|
|
139
147
|
|
|
140
148
|
form = aiohttp.FormData()
|
|
141
149
|
url = self._get_endpoint_url(RootAPIHandler.ENDPOINT_RESOURCES)
|
|
@@ -150,19 +158,18 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
150
158
|
form.add_field('channel', channel)
|
|
151
159
|
if modality is not None:
|
|
152
160
|
form.add_field('modality', modality)
|
|
153
|
-
|
|
161
|
+
form.add_field('bypass_inbox', 'true' if publish else 'false')
|
|
154
162
|
if tags is not None and len(tags) > 0:
|
|
155
163
|
# comma separated list of tags
|
|
156
|
-
tags
|
|
157
|
-
form.add_field('tags', tags)
|
|
164
|
+
form.add_field('tags', ','.join([l.strip() for l in tags]))
|
|
158
165
|
|
|
159
166
|
# Add JSON metadata if provided
|
|
160
167
|
if metadata_content is not None:
|
|
161
168
|
try:
|
|
162
|
-
_LOGGER.debug(
|
|
169
|
+
_LOGGER.debug("Adding metadata to form data")
|
|
163
170
|
form.add_field('metadata', metadata_content, content_type='application/json')
|
|
164
171
|
except Exception as e:
|
|
165
|
-
_LOGGER.warning(f"Failed to
|
|
172
|
+
_LOGGER.warning(f"Failed to add metadata to form: {e}")
|
|
166
173
|
|
|
167
174
|
request_params = {
|
|
168
175
|
'method': 'POST',
|
|
@@ -189,7 +196,6 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
189
196
|
async def _upload_resources_async(self,
|
|
190
197
|
files_path: Sequence[str | IO],
|
|
191
198
|
mimetype: Optional[str] = None,
|
|
192
|
-
batch_id: Optional[str] = None,
|
|
193
199
|
anonymize: bool = False,
|
|
194
200
|
anonymize_retain_codes: Sequence[tuple] = [],
|
|
195
201
|
on_error: Literal['raise', 'skip'] = 'raise',
|
|
@@ -200,7 +206,7 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
200
206
|
publish: bool = False,
|
|
201
207
|
segmentation_files: Optional[list[dict]] = None,
|
|
202
208
|
transpose_segmentation: bool = False,
|
|
203
|
-
metadata_files: Optional[list[
|
|
209
|
+
metadata_files: Optional[list[str | dict | None]] = None,
|
|
204
210
|
) -> list[str]:
|
|
205
211
|
if on_error not in ['raise', 'skip']:
|
|
206
212
|
raise ValueError("on_error must be either 'raise' or 'skip'")
|
|
@@ -212,7 +218,7 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
212
218
|
metadata_files = _infinite_gen(None)
|
|
213
219
|
|
|
214
220
|
async with aiohttp.ClientSession() as session:
|
|
215
|
-
async def __upload_single_resource(file_path, segfiles: dict, metadata_file:
|
|
221
|
+
async def __upload_single_resource(file_path, segfiles: dict[str, list | dict], metadata_file: str | dict | None):
|
|
216
222
|
async with self.semaphore:
|
|
217
223
|
rid = await self._upload_single_resource_async(
|
|
218
224
|
file_path=file_path,
|
|
@@ -233,8 +239,9 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
233
239
|
if isinstance(names, dict):
|
|
234
240
|
names = _infinite_gen(names)
|
|
235
241
|
frame_indices = segfiles.get('frame_index', _infinite_gen(None))
|
|
236
|
-
|
|
237
|
-
|
|
242
|
+
for f, name, frame_index in tqdm(zip(fpaths, names, frame_indices),
|
|
243
|
+
desc=f"Uploading segmentations for {file_path}",
|
|
244
|
+
total=len(fpaths)):
|
|
238
245
|
if f is not None:
|
|
239
246
|
await self._upload_segmentations_async(rid,
|
|
240
247
|
file_path=f,
|
|
@@ -257,6 +264,9 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
257
264
|
other_files_path.append(f)
|
|
258
265
|
|
|
259
266
|
orig_len = len(dicoms_files_path)
|
|
267
|
+
if orig_len == 0:
|
|
268
|
+
_LOGGER.debug("No DICOM files found to assemble.")
|
|
269
|
+
return files_path, False
|
|
260
270
|
dicoms_files_path = dicom_utils.assemble_dicoms(dicoms_files_path, return_as_IO=True)
|
|
261
271
|
|
|
262
272
|
new_len = len(dicoms_files_path)
|
|
@@ -269,15 +279,100 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
269
279
|
|
|
270
280
|
return files_path, assembled
|
|
271
281
|
|
|
282
|
+
def upload_resource(self,
|
|
283
|
+
file_path: str | IO | pydicom.dataset.Dataset,
|
|
284
|
+
mimetype: Optional[str] = None,
|
|
285
|
+
anonymize: bool = False,
|
|
286
|
+
anonymize_retain_codes: Sequence[tuple] = [],
|
|
287
|
+
on_error: Literal['raise', 'skip'] = 'raise',
|
|
288
|
+
tags: Optional[Sequence[str]] = None,
|
|
289
|
+
mung_filename: Sequence[int] | Literal['all'] | None = None,
|
|
290
|
+
channel: Optional[str] = None,
|
|
291
|
+
publish: bool = False,
|
|
292
|
+
publish_to: Optional[str] = None,
|
|
293
|
+
segmentation_files: list[str] | dict | None = None,
|
|
294
|
+
transpose_segmentation: bool = False,
|
|
295
|
+
modality: Optional[str] = None,
|
|
296
|
+
assemble_dicoms: bool = True,
|
|
297
|
+
metadata: dict | str | None = None
|
|
298
|
+
) -> str | Exception:
|
|
299
|
+
"""
|
|
300
|
+
Uploads a single resource (examples: DICOM file, NIfTI file, pydicom object)
|
|
301
|
+
|
|
302
|
+
Args:
|
|
303
|
+
file_path: The path to the resource file or a list of paths to resources files.
|
|
304
|
+
mimetype: The mimetype of the resources. If None, it will be guessed.
|
|
305
|
+
anonymize: Whether to anonymize the dicoms or not.
|
|
306
|
+
anonymize_retain_codes: The tags to retain when anonymizing the dicoms.
|
|
307
|
+
on_error: Whether to raise an exception when an error occurs or to skip the error.
|
|
308
|
+
tags: The tags to add to the resources.
|
|
309
|
+
mung_filename: The parts of the filepath to keep when renaming the resource file.
|
|
310
|
+
''all'' keeps all parts.
|
|
311
|
+
channel: The channel to upload the resources to. An arbitrary name to group the resources.
|
|
312
|
+
publish: Whether to directly publish the resources or not. They will have the 'published' status.
|
|
313
|
+
publish_to: The project name or id to publish the resources to.
|
|
314
|
+
They will have the 'published' status and will be added to the project.
|
|
315
|
+
If this is set, `publish` parameter is ignored.
|
|
316
|
+
segmentation_files: The segmentation files to upload.
|
|
317
|
+
If it is a dict, it should have two keys: 'files' and 'names'.
|
|
318
|
+
- files: A list of paths to the segmentation files. Example: ['seg1.nii.gz', 'seg2.nii.gz'].
|
|
319
|
+
- names: Can be a list (same size of `files`) of labels for the segmentation files. Example: ['Brain', 'Lung']. Or a dictionary mapping a pixel value to a label. Example: {'1': 'Brain', '2': 'Lung'}.
|
|
320
|
+
transpose_segmentation: Whether to transpose the segmentation files or not.
|
|
321
|
+
modality: The modality of the resources.
|
|
322
|
+
assemble_dicoms: Whether to assemble the dicom files or not based on the SeriesInstanceUID and InstanceNumber attributes.
|
|
323
|
+
metadata: JSON metadata to include with each resource.
|
|
324
|
+
Can be a file path (str) or an already loaded dictionary (dict).
|
|
325
|
+
|
|
326
|
+
Raises:
|
|
327
|
+
ResourceNotFoundError: If `publish_to` is supplied, and the project does not exists.
|
|
328
|
+
|
|
329
|
+
Returns:
|
|
330
|
+
str | Exception: A resource ID or an error.
|
|
331
|
+
|
|
332
|
+
Example:
|
|
333
|
+
.. code-block:: python
|
|
334
|
+
|
|
335
|
+
file_path = '/path/to/resource.dcm'
|
|
336
|
+
segfiles = {'files': ['/path/to/seg_brain.nii.gz', '/path/to/seg_lung.nii.gz'],
|
|
337
|
+
'names': ['Brain', 'Lung']}
|
|
338
|
+
result = api_handler.upload_resource(
|
|
339
|
+
file_path=file_path,
|
|
340
|
+
mimetype='application/dicom',
|
|
341
|
+
anonymize=True,
|
|
342
|
+
tags=['example', 'dicom'],
|
|
343
|
+
segmentation_files=segfiles,
|
|
344
|
+
metadata={'PatientID': '12345'}
|
|
345
|
+
)
|
|
346
|
+
"""
|
|
347
|
+
|
|
348
|
+
result = self.upload_resources(
|
|
349
|
+
files_path=[file_path],
|
|
350
|
+
mimetype=mimetype,
|
|
351
|
+
anonymize=anonymize,
|
|
352
|
+
anonymize_retain_codes=anonymize_retain_codes,
|
|
353
|
+
on_error=on_error,
|
|
354
|
+
tags=tags,
|
|
355
|
+
mung_filename=mung_filename,
|
|
356
|
+
channel=channel,
|
|
357
|
+
publish=publish,
|
|
358
|
+
publish_to=publish_to,
|
|
359
|
+
segmentation_files=[segmentation_files] if segmentation_files is not None else None,
|
|
360
|
+
transpose_segmentation=transpose_segmentation,
|
|
361
|
+
modality=modality,
|
|
362
|
+
assemble_dicoms=assemble_dicoms,
|
|
363
|
+
metadata=metadata
|
|
364
|
+
)
|
|
365
|
+
|
|
366
|
+
return result[0]
|
|
367
|
+
|
|
272
368
|
def upload_resources(self,
|
|
273
369
|
files_path: str | IO | Sequence[str | IO] | pydicom.dataset.Dataset,
|
|
274
370
|
mimetype: Optional[str] = None,
|
|
275
371
|
anonymize: bool = False,
|
|
276
372
|
anonymize_retain_codes: Sequence[tuple] = [],
|
|
277
373
|
on_error: Literal['raise', 'skip'] = 'raise',
|
|
278
|
-
labels=None,
|
|
279
374
|
tags: Optional[Sequence[str]] = None,
|
|
280
|
-
mung_filename: Sequence[int] | Literal['all'] = None,
|
|
375
|
+
mung_filename: Sequence[int] | Literal['all'] | None = None,
|
|
281
376
|
channel: Optional[str] = None,
|
|
282
377
|
publish: bool = False,
|
|
283
378
|
publish_to: Optional[str] = None,
|
|
@@ -285,7 +380,7 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
285
380
|
transpose_segmentation: bool = False,
|
|
286
381
|
modality: Optional[str] = None,
|
|
287
382
|
assemble_dicoms: bool = True,
|
|
288
|
-
|
|
383
|
+
metadata: list[str | dict | None] | dict | str | None = None
|
|
289
384
|
) -> list[str | Exception] | str | Exception:
|
|
290
385
|
"""
|
|
291
386
|
Upload resources.
|
|
@@ -296,9 +391,6 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
296
391
|
anonymize (bool): Whether to anonymize the dicoms or not.
|
|
297
392
|
anonymize_retain_codes (Sequence[tuple]): The tags to retain when anonymizing the dicoms.
|
|
298
393
|
on_error (Literal['raise', 'skip']): Whether to raise an exception when an error occurs or to skip the error.
|
|
299
|
-
labels:
|
|
300
|
-
.. deprecated:: 0.11.0
|
|
301
|
-
Use `tags` instead.
|
|
302
394
|
tags (Optional[Sequence[str]]): The tags to add to the resources.
|
|
303
395
|
mung_filename (Sequence[int] | Literal['all']): The parts of the filepath to keep when renaming the resource file.
|
|
304
396
|
''all'' keeps all parts.
|
|
@@ -308,24 +400,33 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
308
400
|
They will have the 'published' status and will be added to the project.
|
|
309
401
|
If this is set, `publish` parameter is ignored.
|
|
310
402
|
segmentation_files (Optional[list[Union[list[str], dict]]]): The segmentation files to upload.
|
|
403
|
+
If each element is a dict, it should have two keys: 'files' and 'names'.
|
|
404
|
+
- files: A list of paths to the segmentation files. Example: ['seg1.nii.gz', 'seg2.nii.gz'].
|
|
405
|
+
- names: Can be a list (same size of `files`) of labels for the segmentation files. Example: ['Brain', 'Lung'].
|
|
311
406
|
transpose_segmentation (bool): Whether to transpose the segmentation files or not.
|
|
312
407
|
modality (Optional[str]): The modality of the resources.
|
|
313
|
-
assemble_dicoms (bool): Whether to assemble the dicom files or not based on the
|
|
314
|
-
|
|
408
|
+
assemble_dicoms (bool): Whether to assemble the dicom files or not based on the SeriesInstanceUID and InstanceNumber attributes.
|
|
409
|
+
metadatas (Optional[list[str | dict | None]]): JSON metadata to include with each resource.
|
|
410
|
+
Must have the same length as `files_path`.
|
|
411
|
+
Can be file paths (str) or already loaded dictionaries (dict).
|
|
315
412
|
|
|
316
413
|
Raises:
|
|
317
414
|
ResourceNotFoundError: If `publish_to` is supplied, and the project does not exists.
|
|
318
415
|
|
|
319
416
|
Returns:
|
|
320
|
-
list[str]:
|
|
417
|
+
list[str | Exception]: A list of resource IDs or errors.
|
|
321
418
|
"""
|
|
322
419
|
|
|
323
420
|
if on_error not in ['raise', 'skip']:
|
|
324
421
|
raise ValueError("on_error must be either 'raise' or 'skip'")
|
|
325
|
-
if labels is not None and tags is None:
|
|
326
|
-
tags = labels
|
|
327
422
|
|
|
328
|
-
files_path,
|
|
423
|
+
files_path, is_multiple_resources = RootAPIHandler.__process_files_parameter(files_path)
|
|
424
|
+
if isinstance(metadata, (str, dict)):
|
|
425
|
+
_LOGGER.debug("Converting metadatas to a list")
|
|
426
|
+
metadata = [metadata]
|
|
427
|
+
|
|
428
|
+
if metadata is not None and len(metadata) != len(files_path):
|
|
429
|
+
raise ValueError("The number of metadata files must match the number of resources.")
|
|
329
430
|
if assemble_dicoms:
|
|
330
431
|
files_path, assembled = self._assemble_dicoms(files_path)
|
|
331
432
|
assemble_dicoms = assembled
|
|
@@ -333,7 +434,7 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
333
434
|
if segmentation_files is not None:
|
|
334
435
|
if assemble_dicoms:
|
|
335
436
|
raise NotImplementedError("Segmentation files cannot be uploaded when assembling dicoms yet.")
|
|
336
|
-
if
|
|
437
|
+
if is_multiple_resources:
|
|
337
438
|
if len(segmentation_files) != len(files_path):
|
|
338
439
|
raise ValueError("The number of segmentation files must match the number of resources.")
|
|
339
440
|
else:
|
|
@@ -344,6 +445,15 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
344
445
|
|
|
345
446
|
segmentation_files = [segfiles if (isinstance(segfiles, dict) or segfiles is None) else {'files': segfiles}
|
|
346
447
|
for segfiles in segmentation_files]
|
|
448
|
+
|
|
449
|
+
for segfiles in segmentation_files:
|
|
450
|
+
if 'files' not in segfiles:
|
|
451
|
+
raise ValueError("segmentation_files must contain a 'files' key with a list of file paths.")
|
|
452
|
+
if 'names' in segfiles:
|
|
453
|
+
# same length as files
|
|
454
|
+
if isinstance(segfiles['names'], (list, tuple)) and len(segfiles['names']) != len(segfiles['files']):
|
|
455
|
+
raise ValueError("segmentation_files['names'] must have the same length as segmentation_files['files'].")
|
|
456
|
+
|
|
347
457
|
loop = asyncio.get_event_loop()
|
|
348
458
|
task = self._upload_resources_async(files_path=files_path,
|
|
349
459
|
mimetype=mimetype,
|
|
@@ -357,32 +467,32 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
357
467
|
segmentation_files=segmentation_files,
|
|
358
468
|
transpose_segmentation=transpose_segmentation,
|
|
359
469
|
modality=modality,
|
|
360
|
-
metadata_files=
|
|
470
|
+
metadata_files=metadata,
|
|
361
471
|
)
|
|
362
472
|
|
|
363
473
|
resource_ids = loop.run_until_complete(task)
|
|
364
474
|
_LOGGER.info(f"Resources uploaded: {resource_ids}")
|
|
365
475
|
|
|
366
|
-
if publish_to is not None
|
|
367
|
-
_USER_LOGGER.info('
|
|
476
|
+
if publish_to is not None:
|
|
477
|
+
_USER_LOGGER.info('Adding resources to project')
|
|
368
478
|
resource_ids_succ = [rid for rid in resource_ids if not isinstance(rid, Exception)]
|
|
369
479
|
try:
|
|
370
|
-
self.
|
|
480
|
+
self.add_to_project(resource_ids_succ, publish_to)
|
|
371
481
|
except Exception as e:
|
|
372
|
-
_LOGGER.error(f"Error
|
|
482
|
+
_LOGGER.error(f"Error adding resources to project: {e}")
|
|
373
483
|
if on_error == 'raise':
|
|
374
484
|
raise e
|
|
375
485
|
|
|
376
|
-
if
|
|
486
|
+
if is_multiple_resources:
|
|
377
487
|
return resource_ids
|
|
378
488
|
return resource_ids[0]
|
|
379
489
|
|
|
380
490
|
def publish_resources(self,
|
|
381
|
-
resource_ids:
|
|
491
|
+
resource_ids: str | Sequence[str],
|
|
382
492
|
project_name: Optional[str] = None,
|
|
383
493
|
) -> None:
|
|
384
494
|
"""
|
|
385
|
-
Publish a resource,
|
|
495
|
+
Publish a resource, changing its status to 'published'.
|
|
386
496
|
|
|
387
497
|
Args:
|
|
388
498
|
resource_ids (str|Sequence[str]): The resource unique id or a list of resource unique ids.
|
|
@@ -430,6 +540,39 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
430
540
|
|
|
431
541
|
self._run_request(params)
|
|
432
542
|
|
|
543
|
+
def add_to_project(self,
|
|
544
|
+
resource_ids: str | Sequence[str],
|
|
545
|
+
project_name: str,
|
|
546
|
+
) -> None:
|
|
547
|
+
"""
|
|
548
|
+
Add resources to a project.
|
|
549
|
+
|
|
550
|
+
Args:
|
|
551
|
+
resource_ids (str|Sequence[str]): The resource unique id or a list of resource unique ids.
|
|
552
|
+
project_name (str): The project name or id to add the resource to.
|
|
553
|
+
|
|
554
|
+
Raises:
|
|
555
|
+
ResourceNotFoundError: If the resource does not exists or the project does not exists.
|
|
556
|
+
|
|
557
|
+
"""
|
|
558
|
+
if isinstance(resource_ids, str):
|
|
559
|
+
resource_ids = [resource_ids]
|
|
560
|
+
|
|
561
|
+
# get the project id by its name
|
|
562
|
+
project = self.get_project_by_name(project_name)
|
|
563
|
+
if 'error' in project:
|
|
564
|
+
raise ResourceNotFoundError('project', {'project_name': project_name})
|
|
565
|
+
|
|
566
|
+
dataset_id = project['dataset_id']
|
|
567
|
+
|
|
568
|
+
params = {
|
|
569
|
+
'method': 'POST',
|
|
570
|
+
'url': f'{self.root_url}/datasets/{dataset_id}/resources',
|
|
571
|
+
'json': {'resource_ids_to_add': resource_ids, 'all_files_selected': False}
|
|
572
|
+
}
|
|
573
|
+
|
|
574
|
+
self._run_request(params)
|
|
575
|
+
|
|
433
576
|
def get_project_by_id(self, project_id: str) -> dict:
|
|
434
577
|
"""
|
|
435
578
|
Get a project by its id.
|
|
@@ -481,31 +624,34 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
481
624
|
|
|
482
625
|
@staticmethod
|
|
483
626
|
def __process_files_parameter(file_path: str | IO | Sequence[str | IO] | pydicom.dataset.Dataset) -> tuple[Sequence[str | IO], bool]:
|
|
627
|
+
"""
|
|
628
|
+
Process the file_path parameter to ensure it is a list of file paths or IO objects.
|
|
629
|
+
"""
|
|
484
630
|
if isinstance(file_path, pydicom.dataset.Dataset):
|
|
485
631
|
file_path = to_bytesio(file_path, file_path.filename)
|
|
486
632
|
|
|
487
633
|
if isinstance(file_path, str):
|
|
488
634
|
if os.path.isdir(file_path):
|
|
489
635
|
is_list = True
|
|
490
|
-
|
|
636
|
+
new_file_path = [f'{file_path}/{f}' for f in os.listdir(file_path)]
|
|
491
637
|
else:
|
|
492
638
|
is_list = False
|
|
493
|
-
|
|
639
|
+
new_file_path = [file_path]
|
|
494
640
|
# Check if is an IO object
|
|
495
641
|
elif _is_io_object(file_path):
|
|
496
642
|
is_list = False
|
|
497
|
-
|
|
643
|
+
new_file_path = [file_path]
|
|
498
644
|
elif not hasattr(file_path, '__len__'):
|
|
499
645
|
if hasattr(file_path, '__iter__'):
|
|
500
646
|
is_list = True
|
|
501
|
-
|
|
647
|
+
new_file_path = list(file_path)
|
|
502
648
|
else:
|
|
503
649
|
is_list = False
|
|
504
|
-
|
|
650
|
+
new_file_path = [file_path]
|
|
505
651
|
else:
|
|
506
652
|
is_list = True
|
|
507
|
-
|
|
508
|
-
return
|
|
653
|
+
new_file_path = file_path
|
|
654
|
+
return new_file_path, is_list
|
|
509
655
|
|
|
510
656
|
def get_resources_by_ids(self, ids: str | Sequence[str]) -> dict[str, Any] | Sequence[dict[str, Any]]:
|
|
511
657
|
"""
|
|
@@ -520,9 +666,6 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
520
666
|
Raises:
|
|
521
667
|
ResourceNotFoundError: If the resource does not exists.
|
|
522
668
|
|
|
523
|
-
Example:
|
|
524
|
-
>>> api_handler.get_resources_by_ids('resource_id')
|
|
525
|
-
>>> api_handler.get_resources_by_ids(['resource_id1', 'resource_id2'])
|
|
526
669
|
"""
|
|
527
670
|
input_is_a_string = isinstance(ids, str) # used later to return a single object or a list of objects
|
|
528
671
|
if input_is_a_string:
|
|
@@ -547,7 +690,6 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
547
690
|
status: Optional[ResourceStatus] = None,
|
|
548
691
|
from_date: Optional[date] = None,
|
|
549
692
|
to_date: Optional[date] = None,
|
|
550
|
-
labels=None,
|
|
551
693
|
tags: Optional[Sequence[str]] = None,
|
|
552
694
|
modality: Optional[str] = None,
|
|
553
695
|
mimetype: Optional[str] = None,
|
|
@@ -555,7 +697,6 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
555
697
|
order_field: Optional[ResourceFields] = None,
|
|
556
698
|
order_ascending: Optional[bool] = None,
|
|
557
699
|
channel: Optional[str] = None,
|
|
558
|
-
project_id: Optional[str] = None,
|
|
559
700
|
project_name: Optional[str] = None,
|
|
560
701
|
filename: Optional[str] = None
|
|
561
702
|
) -> Generator[dict, None, None]:
|
|
@@ -568,9 +709,6 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
568
709
|
status (ResourceStatus): The resource status. Possible values: 'inbox', 'published', 'archived' or None. If None, it will return all resources.
|
|
569
710
|
from_date (Optional[date]): The start date.
|
|
570
711
|
to_date (Optional[date]): The end date.
|
|
571
|
-
labels:
|
|
572
|
-
.. deprecated:: 0.11.0
|
|
573
|
-
Use `tags` instead.
|
|
574
712
|
tags (Optional[list[str]]): The tags to filter the resources.
|
|
575
713
|
modality (Optional[str]): The modality of the resources.
|
|
576
714
|
mimetype (Optional[str]): The mimetype of the resources.
|
|
@@ -585,12 +723,6 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
585
723
|
>>> for resource in api_handler.get_resources(status='inbox'):
|
|
586
724
|
>>> print(resource)
|
|
587
725
|
"""
|
|
588
|
-
if labels is not None and tags is None:
|
|
589
|
-
tags = labels
|
|
590
|
-
|
|
591
|
-
if project_id is not None and project_name is not None:
|
|
592
|
-
_LOGGER.warning("Both project_id and project_name were provided.")
|
|
593
|
-
|
|
594
726
|
# Convert datetime objects to ISO format
|
|
595
727
|
if from_date:
|
|
596
728
|
from_date = from_date.isoformat()
|
|
@@ -608,7 +740,6 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
608
740
|
"order_field": order_field,
|
|
609
741
|
"order_by_asc": order_ascending,
|
|
610
742
|
"channel_name": channel,
|
|
611
|
-
"projectId": project_id,
|
|
612
743
|
"filename": filename,
|
|
613
744
|
}
|
|
614
745
|
if project_name is not None:
|
|
@@ -669,17 +800,15 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
669
800
|
yield from self._run_pagination_request(request_params,
|
|
670
801
|
return_field='data')
|
|
671
802
|
|
|
672
|
-
def set_resource_tags(self,
|
|
673
|
-
|
|
674
|
-
|
|
803
|
+
def set_resource_tags(self,
|
|
804
|
+
resource_id: str,
|
|
805
|
+
tags: Sequence[str],
|
|
675
806
|
):
|
|
676
807
|
url = f"{self._get_endpoint_url(RootAPIHandler.ENDPOINT_RESOURCES)}/{resource_id}/tags"
|
|
677
808
|
data = {}
|
|
678
809
|
|
|
679
810
|
if tags is not None:
|
|
680
811
|
data['tags'] = tags
|
|
681
|
-
if frame_labels is not None:
|
|
682
|
-
data['frame_labels'] = frame_labels
|
|
683
812
|
|
|
684
813
|
request_params = {'method': 'PUT',
|
|
685
814
|
'url': url,
|
|
@@ -880,8 +1009,6 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
880
1009
|
Returns:
|
|
881
1010
|
list[dict]: A list of dictionaries with the users information.
|
|
882
1011
|
|
|
883
|
-
Example:
|
|
884
|
-
>>> api_handler.get_users()
|
|
885
1012
|
"""
|
|
886
1013
|
request_params = {
|
|
887
1014
|
'method': 'GET',
|
|
@@ -905,6 +1032,7 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
905
1032
|
password (Optional[str]): The user password.
|
|
906
1033
|
firstname (Optional[str]): The user first name.
|
|
907
1034
|
lastname (Optional[str]): The user last name.
|
|
1035
|
+
roles (Optional[list[str]]): The user roles. If None, the user will be created with the default role.
|
|
908
1036
|
|
|
909
1037
|
Returns:
|
|
910
1038
|
dict: The user information.
|
|
@@ -932,6 +1060,13 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
932
1060
|
|
|
933
1061
|
Example:
|
|
934
1062
|
>>> api_handler.get_projects()
|
|
1063
|
+
[{'id': '15ab9105-6e92-48c0-bb21-8e1325ec4305',
|
|
1064
|
+
'name': 'Example Project',
|
|
1065
|
+
'description': 'this is an example project',
|
|
1066
|
+
'created_at': '2025-04-23T14:41:03.475Z',
|
|
1067
|
+
'created_by': 'user@mail.com',
|
|
1068
|
+
(...)}
|
|
1069
|
+
]
|
|
935
1070
|
"""
|
|
936
1071
|
request_params = {
|
|
937
1072
|
'method': 'GET',
|
|
@@ -939,36 +1074,22 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
939
1074
|
}
|
|
940
1075
|
return self._run_request(request_params).json()['data']
|
|
941
1076
|
|
|
942
|
-
@deprecated(version='0.12.0', reason="Use :meth:`~get_resources` with project_id parameter instead.")
|
|
943
|
-
def get_resources_by_project(self, project_id: str) -> Generator[dict, None, None]:
|
|
944
|
-
"""
|
|
945
|
-
Get the resources by project.
|
|
946
|
-
|
|
947
|
-
Args:
|
|
948
|
-
project_id (str): The project id.
|
|
949
|
-
|
|
950
|
-
Returns:
|
|
951
|
-
list[dict]: The list of resources.
|
|
952
|
-
|
|
953
|
-
Example:
|
|
954
|
-
>>> api_handler.get_resources_by_project('project_id')
|
|
955
|
-
"""
|
|
956
|
-
request_params = {
|
|
957
|
-
'method': 'GET',
|
|
958
|
-
'url': f'{self.root_url}/projects/{project_id}/resources'
|
|
959
|
-
}
|
|
960
|
-
return self._run_pagination_request(request_params)
|
|
961
|
-
|
|
962
1077
|
def create_project(self,
|
|
963
1078
|
name: str,
|
|
964
1079
|
description: str,
|
|
965
1080
|
resources_ids: list[str],
|
|
966
|
-
is_active_learning: bool = False
|
|
1081
|
+
is_active_learning: bool = False,
|
|
1082
|
+
two_up_display: bool = False,
|
|
1083
|
+
) -> dict:
|
|
967
1084
|
"""
|
|
968
1085
|
Create a new project.
|
|
969
1086
|
|
|
970
1087
|
Args:
|
|
971
1088
|
name (str): The name of the project.
|
|
1089
|
+
description (str): The description of the project.
|
|
1090
|
+
resources_ids (list[str]): The list of resource ids to be included in the project.
|
|
1091
|
+
is_active_learning (bool): Whether the project is an active learning project or not.
|
|
1092
|
+
two_up_display (bool): allow annotators to display multiple resources for annotation.
|
|
972
1093
|
|
|
973
1094
|
Returns:
|
|
974
1095
|
dict: The created project.
|
|
@@ -976,6 +1097,7 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
976
1097
|
Raises:
|
|
977
1098
|
DatamintException: If the project could not be created.
|
|
978
1099
|
"""
|
|
1100
|
+
|
|
979
1101
|
request_args = {
|
|
980
1102
|
'url': self._get_endpoint_url('projects'),
|
|
981
1103
|
'method': 'POST',
|
|
@@ -989,7 +1111,7 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
989
1111
|
"frame_labels": [],
|
|
990
1112
|
"image_labels": [],
|
|
991
1113
|
},
|
|
992
|
-
"two_up_display":
|
|
1114
|
+
"two_up_display": two_up_display,
|
|
993
1115
|
"require_review": False,
|
|
994
1116
|
'description': description}
|
|
995
1117
|
}
|
|
@@ -1012,7 +1134,8 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
1012
1134
|
'url': url
|
|
1013
1135
|
}
|
|
1014
1136
|
try:
|
|
1015
|
-
|
|
1137
|
+
resp = self._run_request(request_params)
|
|
1138
|
+
_LOGGER.debug(resp)
|
|
1016
1139
|
except ResourceNotFoundError as e:
|
|
1017
1140
|
e.set_params('project', {'project_id': project_id})
|
|
1018
1141
|
raise e
|
|
@@ -1028,6 +1151,9 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
1028
1151
|
Args:
|
|
1029
1152
|
project_id (str): The project id.
|
|
1030
1153
|
outpath (str): The path to save the project zip file.
|
|
1154
|
+
all_annotations (bool): Whether to include all annotations in the downloaded dataset,
|
|
1155
|
+
even those not made by the provided project.
|
|
1156
|
+
include_unannotated (bool): Whether to include unannotated resources in the downloaded dataset.
|
|
1031
1157
|
|
|
1032
1158
|
Example:
|
|
1033
1159
|
>>> api_handler.download_project('project_id', 'path/to/project.zip')
|