datamint 2.1.4__tar.gz → 2.2.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datamint might be problematic. Click here for more details.

Files changed (49) hide show
  1. {datamint-2.1.4 → datamint-2.2.1}/PKG-INFO +2 -2
  2. {datamint-2.1.4 → datamint-2.2.1}/datamint/api/endpoints/resources_api.py +18 -24
  3. {datamint-2.1.4 → datamint-2.2.1}/datamint/client_cmd_tools/datamint_upload.py +2 -3
  4. {datamint-2.1.4 → datamint-2.2.1}/datamint/dataset/base_dataset.py +0 -3
  5. {datamint-2.1.4 → datamint-2.2.1}/datamint/dataset/dataset.py +16 -10
  6. {datamint-2.1.4 → datamint-2.2.1}/pyproject.toml +2 -2
  7. {datamint-2.1.4 → datamint-2.2.1}/README.md +0 -0
  8. {datamint-2.1.4 → datamint-2.2.1}/datamint/__init__.py +0 -0
  9. {datamint-2.1.4 → datamint-2.2.1}/datamint/api/__init__.py +0 -0
  10. {datamint-2.1.4 → datamint-2.2.1}/datamint/api/base_api.py +0 -0
  11. {datamint-2.1.4 → datamint-2.2.1}/datamint/api/client.py +0 -0
  12. {datamint-2.1.4 → datamint-2.2.1}/datamint/api/dto/__init__.py +0 -0
  13. {datamint-2.1.4 → datamint-2.2.1}/datamint/api/endpoints/__init__.py +0 -0
  14. {datamint-2.1.4 → datamint-2.2.1}/datamint/api/endpoints/annotations_api.py +0 -0
  15. {datamint-2.1.4 → datamint-2.2.1}/datamint/api/endpoints/channels_api.py +0 -0
  16. {datamint-2.1.4 → datamint-2.2.1}/datamint/api/endpoints/datasetsinfo_api.py +0 -0
  17. {datamint-2.1.4 → datamint-2.2.1}/datamint/api/endpoints/projects_api.py +0 -0
  18. {datamint-2.1.4 → datamint-2.2.1}/datamint/api/endpoints/users_api.py +0 -0
  19. {datamint-2.1.4 → datamint-2.2.1}/datamint/api/entity_base_api.py +0 -0
  20. {datamint-2.1.4 → datamint-2.2.1}/datamint/apihandler/annotation_api_handler.py +0 -0
  21. {datamint-2.1.4 → datamint-2.2.1}/datamint/apihandler/api_handler.py +0 -0
  22. {datamint-2.1.4 → datamint-2.2.1}/datamint/apihandler/base_api_handler.py +0 -0
  23. {datamint-2.1.4 → datamint-2.2.1}/datamint/apihandler/dto/__init__.py +0 -0
  24. {datamint-2.1.4 → datamint-2.2.1}/datamint/apihandler/dto/annotation_dto.py +0 -0
  25. {datamint-2.1.4 → datamint-2.2.1}/datamint/apihandler/exp_api_handler.py +0 -0
  26. {datamint-2.1.4 → datamint-2.2.1}/datamint/apihandler/root_api_handler.py +0 -0
  27. {datamint-2.1.4 → datamint-2.2.1}/datamint/client_cmd_tools/__init__.py +0 -0
  28. {datamint-2.1.4 → datamint-2.2.1}/datamint/client_cmd_tools/datamint_config.py +0 -0
  29. {datamint-2.1.4 → datamint-2.2.1}/datamint/configs.py +0 -0
  30. {datamint-2.1.4 → datamint-2.2.1}/datamint/dataset/__init__.py +0 -0
  31. {datamint-2.1.4 → datamint-2.2.1}/datamint/dataset/annotation.py +0 -0
  32. {datamint-2.1.4 → datamint-2.2.1}/datamint/entities/__init__.py +0 -0
  33. {datamint-2.1.4 → datamint-2.2.1}/datamint/entities/annotation.py +0 -0
  34. {datamint-2.1.4 → datamint-2.2.1}/datamint/entities/base_entity.py +0 -0
  35. {datamint-2.1.4 → datamint-2.2.1}/datamint/entities/channel.py +0 -0
  36. {datamint-2.1.4 → datamint-2.2.1}/datamint/entities/datasetinfo.py +0 -0
  37. {datamint-2.1.4 → datamint-2.2.1}/datamint/entities/project.py +0 -0
  38. {datamint-2.1.4 → datamint-2.2.1}/datamint/entities/resource.py +0 -0
  39. {datamint-2.1.4 → datamint-2.2.1}/datamint/entities/user.py +0 -0
  40. {datamint-2.1.4 → datamint-2.2.1}/datamint/examples/__init__.py +0 -0
  41. {datamint-2.1.4 → datamint-2.2.1}/datamint/examples/example_projects.py +0 -0
  42. {datamint-2.1.4 → datamint-2.2.1}/datamint/exceptions.py +0 -0
  43. {datamint-2.1.4 → datamint-2.2.1}/datamint/experiment/__init__.py +0 -0
  44. {datamint-2.1.4 → datamint-2.2.1}/datamint/experiment/_patcher.py +0 -0
  45. {datamint-2.1.4 → datamint-2.2.1}/datamint/experiment/experiment.py +0 -0
  46. {datamint-2.1.4 → datamint-2.2.1}/datamint/logging.yaml +0 -0
  47. {datamint-2.1.4 → datamint-2.2.1}/datamint/utils/logging_utils.py +0 -0
  48. {datamint-2.1.4 → datamint-2.2.1}/datamint/utils/torchmetrics.py +0 -0
  49. {datamint-2.1.4 → datamint-2.2.1}/datamint/utils/visualization.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datamint
3
- Version: 2.1.4
3
+ Version: 2.2.1
4
4
  Summary: A library for interacting with the Datamint API, designed for efficient data management, processing and Deep Learning workflows.
5
5
  Requires-Python: >=3.10
6
6
  Classifier: Programming Language :: Python :: 3
@@ -21,7 +21,7 @@ Requires-Dist: humanize (>=4.0.0,<5.0.0)
21
21
  Requires-Dist: lazy-loader (>=0.3.0)
22
22
  Requires-Dist: lightning
23
23
  Requires-Dist: matplotlib
24
- Requires-Dist: medimgkit (>=0.6.6)
24
+ Requires-Dist: medimgkit (>=0.7.2)
25
25
  Requires-Dist: nest-asyncio (>=1.0.0,<2.0.0)
26
26
  Requires-Dist: nibabel (>=4.0.0)
27
27
  Requires-Dist: numpy
@@ -85,8 +85,8 @@ class ResourcesApi(CreatableEntityApi[Resource], DeletableEntityApi[Resource]):
85
85
 
86
86
  Args:
87
87
  status: The resource status. Possible values: 'inbox', 'published', 'archived' or None. If None, it will return all resources.
88
- from_date : The start date.
89
- to_date: The end date.
88
+ from_date : The start date (inclusive).
89
+ to_date: The end date (exclusive).
90
90
  tags: The tags to filter the resources.
91
91
  modality: The modality of the resources.
92
92
  mimetype: The mimetype of the resources.
@@ -235,30 +235,26 @@ class ResourcesApi(CreatableEntityApi[Resource], DeletableEntityApi[Resource]):
235
235
  metadata_file: Optional[str | dict] = None,
236
236
  ) -> str:
237
237
  if is_io_object(file_path):
238
- name = file_path.name
238
+ source_filepath = os.path.abspath(os.path.expanduser(file_path.name))
239
+ filename = os.path.basename(source_filepath)
239
240
  else:
240
- name = file_path
241
+ source_filepath = os.path.abspath(os.path.expanduser(file_path))
242
+ filename = os.path.basename(source_filepath)
241
243
 
242
244
  if session is not None and not isinstance(session, aiohttp.ClientSession):
243
245
  raise ValueError("session must be an aiohttp.ClientSession object.")
244
246
 
245
- name = os.path.expanduser(os.path.normpath(name))
246
- if len(Path(name).parts) == 0:
247
- raise ValueError(f"File path '{name}' is not valid.")
248
- name = os.path.join(*[x if x != '..' else '_' for x in Path(name).parts])
249
-
250
247
  if mung_filename is not None:
251
- file_parts = Path(name).parts
248
+ file_parts = Path(source_filepath).parts
252
249
  if file_parts[0] == os.path.sep:
253
250
  file_parts = file_parts[1:]
254
251
  if mung_filename == 'all':
255
- new_file_path = '_'.join(file_parts)
252
+ new_filename = '_'.join(file_parts)
256
253
  else:
257
254
  folder_parts = file_parts[:-1]
258
- new_file_path = '_'.join([folder_parts[i-1] for i in mung_filename if i <= len(folder_parts)])
259
- new_file_path += '_' + file_parts[-1]
260
- name = new_file_path
261
- _LOGGER.debug(f"New file path: {name}")
255
+ new_filename = '_'.join([folder_parts[i-1] for i in mung_filename if i <= len(folder_parts)])
256
+ new_filename += '_' + file_parts[-1]
257
+ filename = new_filename
262
258
 
263
259
  is_a_dicom_file = None
264
260
  if mimetype is None:
@@ -268,14 +264,12 @@ class ResourcesApi(CreatableEntityApi[Resource], DeletableEntityApi[Resource]):
268
264
  mimetype = DEFAULT_NIFTI_MIME
269
265
  break
270
266
  else:
271
- if ext == '.nii.gz' or name.lower().endswith('nii.gz'):
267
+ if ext == '.nii.gz' or filename.lower().endswith('nii.gz'):
272
268
  mimetype = DEFAULT_NIFTI_MIME
273
269
  else:
274
270
  mimetype = mimetype_list[-1] if mimetype_list else DEFAULT_MIME_TYPE
275
271
 
276
272
  mimetype = standardize_mimetype(mimetype)
277
- filename = os.path.basename(name)
278
- _LOGGER.debug(f"File name '{filename}' mimetype: {mimetype}")
279
273
 
280
274
  if is_a_dicom_file == True or is_dicom(file_path):
281
275
  if tags is None:
@@ -292,7 +286,7 @@ class ResourcesApi(CreatableEntityApi[Resource], DeletableEntityApi[Resource]):
292
286
  elif lat == 'R':
293
287
  tags.append("right")
294
288
  # make the dicom `ds` object a file-like object in order to avoid unnecessary disk writes
295
- f = to_bytesio(ds, name)
289
+ f = to_bytesio(ds, filename)
296
290
  else:
297
291
  f = _open_io(file_path)
298
292
 
@@ -329,7 +323,7 @@ class ResourcesApi(CreatableEntityApi[Resource], DeletableEntityApi[Resource]):
329
323
  form.add_field('source', 'api')
330
324
 
331
325
  form.add_field(file_key, f, filename=filename, content_type=mimetype)
332
- form.add_field('source_filepath', name) # full path to the file
326
+ form.add_field('source_filepath', source_filepath) # full path to the file
333
327
  if mimetype is not None:
334
328
  form.add_field('mimetype', mimetype)
335
329
  if channel is not None:
@@ -354,11 +348,11 @@ class ResourcesApi(CreatableEntityApi[Resource], DeletableEntityApi[Resource]):
354
348
  data=form)
355
349
  if 'error' in resp_data:
356
350
  raise DatamintException(resp_data['error'])
357
- _LOGGER.debug(f"Response on uploading {name}: {resp_data}")
351
+ _LOGGER.debug(f"Response on uploading {filename}: {resp_data}")
358
352
  return resp_data['id']
359
353
  except Exception as e:
360
- if 'name' in locals():
361
- _LOGGER.error(f"Error uploading {name}: {e}")
354
+ if 'filename' in locals():
355
+ _LOGGER.error(f"Error uploading {filename}: {e}")
362
356
  else:
363
357
  _LOGGER.error(f"Error uploading {file_path}: {e}")
364
358
  raise
@@ -676,7 +670,7 @@ class ResourcesApi(CreatableEntityApi[Resource], DeletableEntityApi[Resource]):
676
670
  channel='study_channel',
677
671
  segmentation_files={
678
672
  'files': ['path/to/segmentation.nii.gz'],
679
- 'names': {1: 'Bone', 2: 'Tissue'}
673
+ 'names': {1: 'Brain', 2: 'Lung'}
680
674
  },
681
675
  metadata={'patient_age': 45, 'modality': 'CT'}
682
676
  )
@@ -492,7 +492,7 @@ def _get_files_from_path(path: str | Path,
492
492
  Returns:
493
493
  List of file paths as strings
494
494
  """
495
- path = Path(path)
495
+ path = Path(path).resolve()
496
496
 
497
497
  if path.is_file():
498
498
  return [str(path)]
@@ -507,7 +507,7 @@ def _get_files_from_path(path: str | Path,
507
507
 
508
508
  file_paths = walk_to_depth(path, recursive_depth, exclude_pattern)
509
509
  filtered_files = filter_files(file_paths, include_extensions, exclude_extensions)
510
- return [str(f) for f in filtered_files]
510
+ return [str(f.resolve()) for f in filtered_files]
511
511
 
512
512
  except Exception as e:
513
513
  _LOGGER.error(f'Error in recursive search: {e}')
@@ -786,7 +786,6 @@ def main():
786
786
  _USER_LOGGER.error(f'❌ Connection failed: {e}')
787
787
  return
788
788
  try:
789
- print('>>>', segfiles)
790
789
  results = api.resources.upload_resources(channel=args.channel,
791
790
  files_path=files_path,
792
791
  tags=args.tag,
@@ -1,15 +1,12 @@
1
1
  import os
2
2
  import requests
3
- from tqdm.auto import tqdm
4
3
  from typing import Optional, Callable, Any, Literal, Sequence
5
4
  import logging
6
5
  import shutil
7
6
  import json
8
- import yaml
9
7
  import pydicom
10
8
  from pydicom.dataset import FileDataset
11
9
  import numpy as np
12
- from datamint import configs
13
10
  from torch.utils.data import DataLoader
14
11
  import torch
15
12
  from torch import Tensor
@@ -1,5 +1,5 @@
1
1
  from .base_dataset import DatamintBaseDataset
2
- from typing import List, Optional, Callable, Any, Dict, Literal, Sequence
2
+ from typing import Optional, Callable, Any, Literal, Sequence
3
3
  import torch
4
4
  from torch import Tensor
5
5
  import os
@@ -120,7 +120,7 @@ class DatamintDataset(DatamintBaseDataset):
120
120
 
121
121
  def _load_segmentations(self,
122
122
  annotations: Sequence[Annotation],
123
- img_shape) -> tuple[dict[str, list], dict[str, list]]:
123
+ img_shape) -> tuple[dict[str, list], dict[str, list], dict[str, Any]]:
124
124
  """
125
125
  Load segmentations from annotations.
126
126
 
@@ -129,12 +129,13 @@ class DatamintDataset(DatamintBaseDataset):
129
129
  img_shape: shape of the image (#frames, C, H, W)
130
130
 
131
131
  Returns:
132
- tuple[dict[str, list], dict[str, list]]: a tuple of two dictionaries.
132
+ tuple[dict[str, list], dict[str, list], dict[str, Any]]: a tuple of two dictionaries and additional metadata.
133
133
  The first dictionary is author -> list of #frames tensors, each tensor has shape (#instances_i, H, W).
134
134
  The second dictionary is author -> list of #frames segmentation labels (tensors).
135
135
  """
136
136
  segmentations = {}
137
137
  seg_labels = {}
138
+ seg_metainfos = {}
138
139
 
139
140
  if self.return_frame_by_frame:
140
141
  assert len(img_shape) == 3, f"img_shape must have 3 dimensions, got {img_shape}"
@@ -155,11 +156,15 @@ class DatamintDataset(DatamintBaseDataset):
155
156
 
156
157
  segfilepath = ann.file # png file
157
158
  segfilepath = os.path.join(self.dataset_dir, segfilepath)
158
- seg = read_array_normalized(segfilepath) # (frames, C, H, W)
159
+ seg, seg_metainfo = read_array_normalized(segfilepath, return_metainfo=True) # (frames, C, H, W)
159
160
  if seg.shape[1] != 1:
160
161
  raise ValueError(f"Segmentation file must have 1 channel, got {seg.shape} in {segfilepath}")
161
162
  seg = seg[:, 0, :, :] # (frames, H, W)
162
-
163
+
164
+ if seg_metainfo is None:
165
+ raise Exception
166
+ seg_metainfos[author] = seg_metainfo
167
+
163
168
  # # FIXME: avoid enforcing resizing the mask
164
169
  # seg = (Image.open(segfilepath)
165
170
  # .convert('L')
@@ -217,7 +222,7 @@ class DatamintDataset(DatamintBaseDataset):
217
222
  author_segs[i] = torch.zeros((0, h, w), dtype=torch.bool)
218
223
  author_labels[i] = torch.zeros(0, dtype=torch.int32)
219
224
 
220
- return segmentations, seg_labels
225
+ return segmentations, seg_labels, seg_metainfos
221
226
 
222
227
  def _instanceseg2semanticseg(self,
223
228
  segmentations: Sequence[Tensor],
@@ -273,19 +278,19 @@ class DatamintDataset(DatamintBaseDataset):
273
278
  raise ValueError(f"Unknown semantic_seg_merge_strategy: {self.semantic_seg_merge_strategy}")
274
279
  return merged_segs.to(torch.get_default_dtype())
275
280
 
276
- def _apply_semantic_seg_merge_strategy_union(self, segmentations: Dict[str, torch.Tensor]) -> torch.Tensor:
281
+ def _apply_semantic_seg_merge_strategy_union(self, segmentations: dict[str, torch.Tensor]) -> torch.Tensor:
277
282
  new_segmentations = torch.zeros_like(list(segmentations.values())[0])
278
283
  for seg in segmentations.values():
279
284
  new_segmentations += seg
280
285
  return new_segmentations.bool()
281
286
 
282
- def _apply_semantic_seg_merge_strategy_intersection(self, segmentations: Dict[str, torch.Tensor]) -> torch.Tensor:
287
+ def _apply_semantic_seg_merge_strategy_intersection(self, segmentations: dict[str, torch.Tensor]) -> torch.Tensor:
283
288
  new_segmentations = torch.ones_like(list(segmentations.values())[0])
284
289
  for seg in segmentations.values():
285
290
  new_segmentations += seg
286
291
  return new_segmentations.bool()
287
292
 
288
- def _apply_semantic_seg_merge_strategy_mode(self, segmentations: Dict[str, torch.Tensor]) -> torch.Tensor:
293
+ def _apply_semantic_seg_merge_strategy_mode(self, segmentations: dict[str, torch.Tensor]) -> torch.Tensor:
289
294
  new_segmentations = torch.zeros_like(list(segmentations.values())[0])
290
295
  for seg in segmentations.values():
291
296
  new_segmentations += seg
@@ -428,7 +433,7 @@ class DatamintDataset(DatamintBaseDataset):
428
433
 
429
434
  try:
430
435
  if self.return_segmentations:
431
- segmentations, seg_labels = self._load_segmentations(annotations, img.shape)
436
+ segmentations, seg_labels, seg_metainfos = self._load_segmentations(annotations, img.shape)
432
437
  # seg_labels can be dict[str, list[Tensor]]
433
438
  # apply mask transform
434
439
  if self.mask_transform is not None:
@@ -475,6 +480,7 @@ class DatamintDataset(DatamintBaseDataset):
475
480
  new_item['seg_labels'] = seg_labels
476
481
  # process seg_labels to convert from code to label names
477
482
  new_item['seg_labels_names'] = self._seg_labels_to_names(seg_labels)
483
+ new_item['seg_metainfo'] = {'file_metainfo': seg_metainfos}
478
484
 
479
485
  except Exception:
480
486
  _LOGGER.error(f'Error in loading/processing segmentations of {metainfo}')
@@ -1,7 +1,7 @@
1
1
  [project]
2
2
  name = "datamint"
3
3
  description = "A library for interacting with the Datamint API, designed for efficient data management, processing and Deep Learning workflows."
4
- version = "2.1.4"
4
+ version = "2.2.1"
5
5
  dynamic = ["dependencies"]
6
6
  requires-python = ">=3.10"
7
7
  readme = "README.md"
@@ -40,7 +40,7 @@ matplotlib = "*"
40
40
  lightning = "*"
41
41
  albumentations = ">=2.0.0"
42
42
  lazy-loader = ">=0.3.0"
43
- medimgkit = ">=0.6.6"
43
+ medimgkit = ">=0.7.2"
44
44
  typing_extensions = ">=4.0.0"
45
45
  pydantic = ">=2.6.4"
46
46
  httpx = "*"
File without changes
File without changes
File without changes
File without changes