datamint 1.8.0__tar.gz → 1.9.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datamint might be problematic. Click here for more details.

Files changed (27) hide show
  1. {datamint-1.8.0 → datamint-1.9.1}/PKG-INFO +2 -2
  2. {datamint-1.8.0 → datamint-1.9.1}/datamint/apihandler/root_api_handler.py +70 -21
  3. {datamint-1.8.0 → datamint-1.9.1}/pyproject.toml +2 -2
  4. {datamint-1.8.0 → datamint-1.9.1}/README.md +0 -0
  5. {datamint-1.8.0 → datamint-1.9.1}/datamint/__init__.py +0 -0
  6. {datamint-1.8.0 → datamint-1.9.1}/datamint/apihandler/annotation_api_handler.py +0 -0
  7. {datamint-1.8.0 → datamint-1.9.1}/datamint/apihandler/api_handler.py +0 -0
  8. {datamint-1.8.0 → datamint-1.9.1}/datamint/apihandler/base_api_handler.py +0 -0
  9. {datamint-1.8.0 → datamint-1.9.1}/datamint/apihandler/dto/annotation_dto.py +0 -0
  10. {datamint-1.8.0 → datamint-1.9.1}/datamint/apihandler/exp_api_handler.py +0 -0
  11. {datamint-1.8.0 → datamint-1.9.1}/datamint/client_cmd_tools/__init__.py +0 -0
  12. {datamint-1.8.0 → datamint-1.9.1}/datamint/client_cmd_tools/datamint_config.py +0 -0
  13. {datamint-1.8.0 → datamint-1.9.1}/datamint/client_cmd_tools/datamint_upload.py +0 -0
  14. {datamint-1.8.0 → datamint-1.9.1}/datamint/configs.py +0 -0
  15. {datamint-1.8.0 → datamint-1.9.1}/datamint/dataset/__init__.py +0 -0
  16. {datamint-1.8.0 → datamint-1.9.1}/datamint/dataset/annotation.py +0 -0
  17. {datamint-1.8.0 → datamint-1.9.1}/datamint/dataset/base_dataset.py +0 -0
  18. {datamint-1.8.0 → datamint-1.9.1}/datamint/dataset/dataset.py +0 -0
  19. {datamint-1.8.0 → datamint-1.9.1}/datamint/examples/__init__.py +0 -0
  20. {datamint-1.8.0 → datamint-1.9.1}/datamint/examples/example_projects.py +0 -0
  21. {datamint-1.8.0 → datamint-1.9.1}/datamint/experiment/__init__.py +0 -0
  22. {datamint-1.8.0 → datamint-1.9.1}/datamint/experiment/_patcher.py +0 -0
  23. {datamint-1.8.0 → datamint-1.9.1}/datamint/experiment/experiment.py +0 -0
  24. {datamint-1.8.0 → datamint-1.9.1}/datamint/logging.yaml +0 -0
  25. {datamint-1.8.0 → datamint-1.9.1}/datamint/utils/logging_utils.py +0 -0
  26. {datamint-1.8.0 → datamint-1.9.1}/datamint/utils/torchmetrics.py +0 -0
  27. {datamint-1.8.0 → datamint-1.9.1}/datamint/utils/visualization.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: datamint
3
- Version: 1.8.0
3
+ Version: 1.9.1
4
4
  Summary: A library for interacting with the Datamint API, designed for efficient data management, processing and Deep Learning workflows.
5
5
  Requires-Python: >=3.10
6
6
  Classifier: Programming Language :: Python :: 3
@@ -19,7 +19,7 @@ Requires-Dist: humanize (>=4.0.0,<5.0.0)
19
19
  Requires-Dist: lazy-loader (>=0.3.0)
20
20
  Requires-Dist: lightning
21
21
  Requires-Dist: matplotlib
22
- Requires-Dist: medimgkit (>=0.3.1)
22
+ Requires-Dist: medimgkit (>=0.4.4)
23
23
  Requires-Dist: nest-asyncio (>=1.0.0,<2.0.0)
24
24
  Requires-Dist: nibabel (>=4.0.0)
25
25
  Requires-Dist: numpy
@@ -8,7 +8,7 @@ import asyncio
8
8
  import aiohttp
9
9
  from medimgkit.dicom_utils import anonymize_dicom, to_bytesio, is_dicom, is_dicom_report
10
10
  from medimgkit import dicom_utils, standardize_mimetype
11
- from medimgkit.io_utils import is_io_object
11
+ from medimgkit.io_utils import is_io_object, peek
12
12
  from medimgkit.format_detection import guess_typez, guess_extension, DEFAULT_MIME_TYPE
13
13
  from medimgkit.nifti_utils import DEFAULT_NIFTI_MIME, NIFTI_MIMES
14
14
  import pydicom
@@ -103,16 +103,22 @@ class RootAPIHandler(BaseAPIHandler):
103
103
  filename = os.path.basename(name)
104
104
  _LOGGER.debug(f"File name '{filename}' mimetype: {mimetype}")
105
105
 
106
- if anonymize:
107
- if is_a_dicom_file == True or is_dicom(file_path):
108
- ds = pydicom.dcmread(file_path)
106
+ if is_a_dicom_file == True or is_dicom(file_path):
107
+ if tags is None:
108
+ tags = []
109
+ else:
110
+ tags = list(tags)
111
+ ds = pydicom.dcmread(file_path)
112
+ if anonymize:
109
113
  _LOGGER.info(f"Anonymizing {file_path}")
110
114
  ds = anonymize_dicom(ds, retain_codes=anonymize_retain_codes)
111
- # make the dicom `ds` object a file-like object in order to avoid unnecessary disk writes
112
- f = to_bytesio(ds, name)
113
- else:
114
- _LOGGER.warning(f"File {file_path} is not a dicom file. Skipping anonymization.")
115
- f = _open_io(file_path)
115
+ lat = dicom_utils.get_dicom_laterality(ds)
116
+ if lat == 'L':
117
+ tags.append("left")
118
+ elif lat == 'R':
119
+ tags.append("right")
120
+ # make the dicom `ds` object a file-like object in order to avoid unnecessary disk writes
121
+ f = to_bytesio(ds, name)
116
122
  else:
117
123
  f = _open_io(file_path)
118
124
 
@@ -423,8 +429,21 @@ class RootAPIHandler(BaseAPIHandler):
423
429
 
424
430
  # Discard DICOM reports
425
431
  if discard_dicom_reports:
426
- files_path = [f for f in files_path if not is_dicom_report(f)]
427
432
  old_size = len(files_path)
433
+ # Create filtered lists maintaining index correspondence
434
+ filtered_files = []
435
+ filtered_metadata = []
436
+
437
+ for i, f in enumerate(files_path):
438
+ if not is_dicom_report(f):
439
+ filtered_files.append(f)
440
+ if metadata is not None:
441
+ filtered_metadata.append(metadata[i])
442
+
443
+ files_path = filtered_files
444
+ if metadata is not None:
445
+ metadata = filtered_metadata
446
+
428
447
  if old_size is not None and old_size != len(files_path):
429
448
  _LOGGER.info(f"Discarded {old_size - len(files_path)} DICOM report files from upload.")
430
449
 
@@ -437,7 +456,7 @@ class RootAPIHandler(BaseAPIHandler):
437
456
  if assemble_dicoms:
438
457
  files_path, assembled = self._assemble_dicoms(files_path)
439
458
  assemble_dicoms = assembled
440
-
459
+
441
460
  if segmentation_files is not None:
442
461
  if assemble_dicoms:
443
462
  raise NotImplementedError("Segmentation files cannot be uploaded when assembling dicoms yet.")
@@ -1093,16 +1112,46 @@ class RootAPIHandler(BaseAPIHandler):
1093
1112
  """
1094
1113
  if isinstance(resource_ids, str):
1095
1114
  resource_ids = [resource_ids]
1096
- for rid in resource_ids:
1097
- url = f"{self._get_endpoint_url(RootAPIHandler.ENDPOINT_RESOURCES)}/{rid}"
1098
- request_params = {'method': 'DELETE',
1099
- 'url': url
1100
- }
1101
- try:
1102
- self._run_request(request_params)
1103
- except ResourceNotFoundError as e:
1104
- e.set_params('resource', {'resource_id': rid})
1105
- raise e
1115
+
1116
+ async def _delete_all_resources_async():
1117
+ async with aiohttp.ClientSession() as session:
1118
+ tasks = [
1119
+ self._delete_resource_async(resource_id, session)
1120
+ for resource_id in resource_ids
1121
+ ]
1122
+ await asyncio.gather(*tasks)
1123
+
1124
+ loop = asyncio.get_event_loop()
1125
+ loop.run_until_complete(_delete_all_resources_async())
1126
+
1127
+
1128
+ async def _delete_resource_async(self,
1129
+ resource_id: str,
1130
+ session: aiohttp.ClientSession | None = None) -> None:
1131
+ """
1132
+ Asynchronously delete a resource by its unique id.
1133
+
1134
+ Args:
1135
+ resource_id (str): The resource unique id.
1136
+ session (aiohttp.ClientSession | None): The aiohttp session to use for the request.
1137
+
1138
+ Raises:
1139
+ ResourceNotFoundError: If the resource does not exist.
1140
+ """
1141
+ if session is not None and not isinstance(session, aiohttp.ClientSession):
1142
+ raise ValueError("session must be an aiohttp.ClientSession object.")
1143
+
1144
+ url = f"{self._get_endpoint_url(RootAPIHandler.ENDPOINT_RESOURCES)}/{resource_id}"
1145
+ request_params = {
1146
+ 'method': 'DELETE',
1147
+ 'url': url
1148
+ }
1149
+
1150
+ try:
1151
+ await self._run_request_async(request_params, session)
1152
+ except ResourceNotFoundError as e:
1153
+ e.set_params('resource', {'resource_id': resource_id})
1154
+ raise e
1106
1155
 
1107
1156
  def get_datasets(self) -> list[dict]:
1108
1157
  """
@@ -1,7 +1,7 @@
1
1
  [project]
2
2
  name = "datamint"
3
3
  description = "A library for interacting with the Datamint API, designed for efficient data management, processing and Deep Learning workflows."
4
- version = "1.8.0"
4
+ version = "1.9.1"
5
5
  dynamic = ["dependencies"]
6
6
  requires-python = ">=3.10"
7
7
  readme = "README.md"
@@ -40,7 +40,7 @@ matplotlib = "*"
40
40
  lightning = "*"
41
41
  albumentations = ">=2.0.0"
42
42
  lazy-loader = ">=0.3.0"
43
- medimgkit = ">=0.3.1"
43
+ medimgkit = ">=0.4.4"
44
44
  # For compatibility with the datamintapi package
45
45
  datamintapi = "0.0.*"
46
46
  # Extra dependencies for docs
File without changes
File without changes
File without changes
File without changes