datamint 1.4.0__tar.gz → 1.4.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datamint might be problematic. Click here for more details.

Files changed (28) hide show
  1. {datamint-1.4.0 → datamint-1.4.1}/PKG-INFO +1 -1
  2. {datamint-1.4.0 → datamint-1.4.1}/datamint/client_cmd_tools/datamint_upload.py +10 -5
  3. {datamint-1.4.0 → datamint-1.4.1}/datamint/experiment/experiment.py +1 -1
  4. {datamint-1.4.0 → datamint-1.4.1}/datamint/utils/io_utils.py +37 -10
  5. {datamint-1.4.0 → datamint-1.4.1}/pyproject.toml +1 -1
  6. {datamint-1.4.0 → datamint-1.4.1}/README.md +0 -0
  7. {datamint-1.4.0 → datamint-1.4.1}/datamint/__init__.py +0 -0
  8. {datamint-1.4.0 → datamint-1.4.1}/datamint/apihandler/annotation_api_handler.py +0 -0
  9. {datamint-1.4.0 → datamint-1.4.1}/datamint/apihandler/api_handler.py +0 -0
  10. {datamint-1.4.0 → datamint-1.4.1}/datamint/apihandler/base_api_handler.py +0 -0
  11. {datamint-1.4.0 → datamint-1.4.1}/datamint/apihandler/dto/annotation_dto.py +0 -0
  12. {datamint-1.4.0 → datamint-1.4.1}/datamint/apihandler/exp_api_handler.py +0 -0
  13. {datamint-1.4.0 → datamint-1.4.1}/datamint/apihandler/root_api_handler.py +0 -0
  14. {datamint-1.4.0 → datamint-1.4.1}/datamint/client_cmd_tools/__init__.py +0 -0
  15. {datamint-1.4.0 → datamint-1.4.1}/datamint/client_cmd_tools/datamint_config.py +0 -0
  16. {datamint-1.4.0 → datamint-1.4.1}/datamint/configs.py +0 -0
  17. {datamint-1.4.0 → datamint-1.4.1}/datamint/dataset/__init__.py +0 -0
  18. {datamint-1.4.0 → datamint-1.4.1}/datamint/dataset/base_dataset.py +0 -0
  19. {datamint-1.4.0 → datamint-1.4.1}/datamint/dataset/dataset.py +0 -0
  20. {datamint-1.4.0 → datamint-1.4.1}/datamint/examples/__init__.py +0 -0
  21. {datamint-1.4.0 → datamint-1.4.1}/datamint/examples/example_projects.py +0 -0
  22. {datamint-1.4.0 → datamint-1.4.1}/datamint/experiment/__init__.py +0 -0
  23. {datamint-1.4.0 → datamint-1.4.1}/datamint/experiment/_patcher.py +0 -0
  24. {datamint-1.4.0 → datamint-1.4.1}/datamint/logging.yaml +0 -0
  25. {datamint-1.4.0 → datamint-1.4.1}/datamint/utils/dicom_utils.py +0 -0
  26. {datamint-1.4.0 → datamint-1.4.1}/datamint/utils/logging_utils.py +0 -0
  27. {datamint-1.4.0 → datamint-1.4.1}/datamint/utils/torchmetrics.py +0 -0
  28. {datamint-1.4.0 → datamint-1.4.1}/datamint/utils/visualization.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: datamint
3
- Version: 1.4.0
3
+ Version: 1.4.1
4
4
  Summary: A library for interacting with the Datamint API, designed for efficient data management, processing and Deep Learning workflows.
5
5
  Requires-Python: >=3.10
6
6
  Classifier: Programming Language :: Python :: 3
@@ -267,7 +267,13 @@ def _find_json_metadata(file_path: str | Path) -> Optional[str]:
267
267
  Optional[str]: Path to the JSON metadata file if found, None otherwise
268
268
  """
269
269
  file_path = Path(file_path)
270
- json_path = file_path.with_suffix('.json')
270
+
271
+ # Handle .nii.gz files specially - need to remove both extensions
272
+ if file_path.name.endswith('.nii.gz'):
273
+ base_name = file_path.name[:-7] # Remove .nii.gz
274
+ json_path = file_path.parent / f"{base_name}.json"
275
+ else:
276
+ json_path = file_path.with_suffix('.json')
271
277
 
272
278
  if json_path.exists() and json_path.is_file():
273
279
  _LOGGER.debug(f"Found JSON metadata file: {json_path}")
@@ -313,12 +319,11 @@ def _collect_metadata_files(files_path: list[str], auto_detect_json: bool) -> tu
313
319
  if used_json_files:
314
320
  _LOGGER.debug(f"Filtering out {len(used_json_files)} JSON metadata files from main upload list")
315
321
  filtered_metadata_files = []
316
- filtered_file_index = 0
317
-
322
+
318
323
  for original_file in files_path:
319
324
  if original_file not in used_json_files:
320
- filtered_metadata_files.append(metadata_files[files_path.index(original_file)])
321
- filtered_file_index += 1
325
+ original_index = files_path.index(original_file)
326
+ filtered_metadata_files.append(metadata_files[original_index])
322
327
 
323
328
  metadata_files = filtered_metadata_files
324
329
 
@@ -803,7 +803,7 @@ class Experiment:
803
803
  Args:
804
804
  resource_id: The resource ID of the sample.
805
805
  predictions: The predictions of the model. One binary mask for each class. Can be a numpy array of shape (H, W) or (N,H,W);
806
- Or a path to a png file; Or a path to a .nii.gz file.
806
+ Or a path to a png file; Or a path to a .nii/.nii.gz file.
807
807
  label_name: The name of the class or a dictionary mapping pixel values to names.
808
808
  Example: ``{1: 'Femur', 2: 'Tibia'}`` means that pixel value 1 is 'Femur' and pixel value 2 is 'Tibia'.
809
809
  frame_index: The frame index of the prediction or a list of frame indexes.
@@ -54,17 +54,32 @@ def read_video(file_path: str, index: int = None) -> np.ndarray:
54
54
 
55
55
 
56
56
  def read_nifti(file_path: str) -> np.ndarray:
57
- imgs = nib.load(file_path).get_fdata() # shape: (W, H, #frame) or (W, H)
58
- if imgs.ndim == 2:
59
- imgs = imgs.transpose(1, 0)
60
- imgs = imgs[np.newaxis, np.newaxis]
61
- elif imgs.ndim == 3:
62
- imgs = imgs.transpose(2, 1, 0)
63
- imgs = imgs[:, np.newaxis]
64
- else:
65
- raise ValueError(f"Unsupported number of dimensions in '{file_path}': {imgs.ndim}")
57
+ """
58
+ Read a NIfTI file and return the image data in standardized format.
59
+
60
+ Args:
61
+ file_path: Path to the NIfTI file (.nii or .nii.gz)
62
+
63
+ Returns:
64
+ np.ndarray: Image data with shape (#frames, C, H, W)
65
+ """
66
+ try:
67
+ nii_img = nib.load(file_path)
68
+ imgs = nii_img.get_fdata() # shape: (W, H, #frame) or (W, H)
69
+
70
+ if imgs.ndim == 2:
71
+ imgs = imgs.transpose(1, 0) # (W, H) -> (H, W)
72
+ imgs = imgs[np.newaxis, np.newaxis] # -> (1, 1, H, W)
73
+ elif imgs.ndim == 3:
74
+ imgs = imgs.transpose(2, 1, 0) # (W, H, #frame) -> (#frame, H, W)
75
+ imgs = imgs[:, np.newaxis] # -> (#frame, 1, H, W)
76
+ else:
77
+ raise ValueError(f"Unsupported number of dimensions in '{file_path}': {imgs.ndim}")
66
78
 
67
- return imgs
79
+ return imgs
80
+ except Exception as e:
81
+ _LOGGER.error(f"Failed to read NIfTI file '{file_path}': {e}")
82
+ raise e
68
83
 
69
84
 
70
85
  def read_image(file_path: str) -> np.ndarray:
@@ -123,6 +138,18 @@ def read_array_normalized(file_path: str,
123
138
  else:
124
139
  if mime_type == 'image/x.nifti' or file_path.endswith(NII_EXTS):
125
140
  imgs = read_nifti(file_path)
141
+ # For NIfTI files, try to load associated JSON metadata
142
+ if return_metainfo:
143
+ json_path = file_path.replace('.nii.gz', '.json').replace('.nii', '.json')
144
+ if os.path.exists(json_path):
145
+ try:
146
+ import json
147
+ with open(json_path, 'r') as f:
148
+ metainfo = json.load(f)
149
+ _LOGGER.debug(f"Loaded JSON metadata from {json_path}")
150
+ except Exception as e:
151
+ _LOGGER.warning(f"Failed to load JSON metadata from {json_path}: {e}")
152
+ metainfo = None
126
153
  elif mime_type.startswith('image/') or file_path.endswith(IMAGE_EXTS):
127
154
  imgs = read_image(file_path)
128
155
  elif file_path.endswith('.npy') or mime_type == 'application/x-numpy-data':
@@ -1,7 +1,7 @@
1
1
  [project]
2
2
  name = "datamint"
3
3
  description = "A library for interacting with the Datamint API, designed for efficient data management, processing and Deep Learning workflows."
4
- version = "1.4.0"
4
+ version = "1.4.1"
5
5
  dynamic = ["dependencies"]
6
6
  requires-python = ">=3.10"
7
7
  readme = "README.md"
File without changes
File without changes
File without changes
File without changes