datamint 1.2.4__tar.gz → 1.4.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datamint might be problematic. Click here for more details.

Files changed (29) hide show
  1. {datamint-1.2.4 → datamint-1.4.0}/PKG-INFO +5 -5
  2. {datamint-1.2.4 → datamint-1.4.0}/README.md +3 -4
  3. {datamint-1.2.4/datamintapi → datamint-1.4.0/datamint}/__init__.py +4 -3
  4. {datamint-1.2.4/datamintapi → datamint-1.4.0/datamint}/apihandler/annotation_api_handler.py +139 -28
  5. {datamint-1.2.4/datamintapi → datamint-1.4.0/datamint}/apihandler/api_handler.py +1 -1
  6. {datamint-1.2.4/datamintapi → datamint-1.4.0/datamint}/apihandler/base_api_handler.py +1 -1
  7. {datamint-1.2.4/datamintapi → datamint-1.4.0/datamint}/apihandler/dto/annotation_dto.py +51 -1
  8. {datamint-1.2.4/datamintapi → datamint-1.4.0/datamint}/apihandler/exp_api_handler.py +1 -1
  9. {datamint-1.2.4/datamintapi → datamint-1.4.0/datamint}/apihandler/root_api_handler.py +47 -8
  10. {datamint-1.2.4/datamintapi → datamint-1.4.0/datamint}/client_cmd_tools/datamint_config.py +3 -3
  11. {datamint-1.2.4/datamintapi → datamint-1.4.0/datamint}/client_cmd_tools/datamint_upload.py +123 -14
  12. {datamint-1.2.4/datamintapi → datamint-1.4.0/datamint}/dataset/base_dataset.py +5 -5
  13. {datamint-1.2.4/datamintapi → datamint-1.4.0/datamint}/dataset/dataset.py +1 -1
  14. {datamint-1.2.4/datamintapi → datamint-1.4.0/datamint}/examples/example_projects.py +1 -1
  15. {datamint-1.2.4/datamintapi → datamint-1.4.0/datamint}/experiment/_patcher.py +1 -1
  16. {datamint-1.2.4/datamintapi → datamint-1.4.0/datamint}/experiment/experiment.py +18 -18
  17. {datamint-1.2.4/datamintapi → datamint-1.4.0/datamint}/logging.yaml +2 -2
  18. {datamint-1.2.4/datamintapi → datamint-1.4.0/datamint}/utils/logging_utils.py +1 -1
  19. {datamint-1.2.4 → datamint-1.4.0}/pyproject.toml +7 -5
  20. datamint-1.2.4/datamint/__init__.py +0 -11
  21. {datamint-1.2.4/datamintapi → datamint-1.4.0/datamint}/client_cmd_tools/__init__.py +0 -0
  22. {datamint-1.2.4/datamintapi → datamint-1.4.0/datamint}/configs.py +0 -0
  23. {datamint-1.2.4/datamintapi → datamint-1.4.0/datamint}/dataset/__init__.py +0 -0
  24. {datamint-1.2.4/datamintapi → datamint-1.4.0/datamint}/examples/__init__.py +0 -0
  25. {datamint-1.2.4/datamintapi → datamint-1.4.0/datamint}/experiment/__init__.py +0 -0
  26. {datamint-1.2.4/datamintapi → datamint-1.4.0/datamint}/utils/dicom_utils.py +0 -0
  27. {datamint-1.2.4/datamintapi → datamint-1.4.0/datamint}/utils/io_utils.py +0 -0
  28. {datamint-1.2.4/datamintapi → datamint-1.4.0/datamint}/utils/torchmetrics.py +0 -0
  29. {datamint-1.2.4/datamintapi → datamint-1.4.0/datamint}/utils/visualization.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: datamint
3
- Version: 1.2.4
3
+ Version: 1.4.0
4
4
  Summary: A library for interacting with the Datamint API, designed for efficient data management, processing and Deep Learning workflows.
5
5
  Requires-Python: >=3.10
6
6
  Classifier: Programming Language :: Python :: 3
@@ -14,6 +14,7 @@ Requires-Dist: Deprecated (>=1.2.0)
14
14
  Requires-Dist: aiohttp (>=3.0.0,<4.0.0)
15
15
  Requires-Dist: aioresponses (>=0.7.8,<0.8.0) ; extra == "dev"
16
16
  Requires-Dist: albumentations (>=2.0.0)
17
+ Requires-Dist: datamintapi (==0.0.*)
17
18
  Requires-Dist: humanize (>=4.0.0,<5.0.0)
18
19
  Requires-Dist: lazy-loader (>=0.3.0)
19
20
  Requires-Dist: lightning
@@ -52,13 +53,12 @@ See the full documentation at https://sonanceai.github.io/datamint-python-api/
52
53
  ## Installation
53
54
 
54
55
  Datamint requires Python 3.10+.
55
- You can install Datamint and its dependencies using pip
56
+ You can install/update Datamint and its dependencies using pip
56
57
 
57
58
  ```bash
58
- pip install git+https://github.com/SonanceAI/datamint-python-api
59
+ pip install -U datamint
59
60
  ```
60
61
 
61
- Soon we will be releasing the package on PyPi.
62
62
  We recommend that you install Datamint in a dedicated virtual environment, to avoid conflicting with your system packages.
63
63
  Create the enviroment once with `python3 -m venv datamint-env` and then activate it whenever you need it with:
64
64
  - `source datamint-env/bin/activate` (Linux/MAC)
@@ -95,7 +95,7 @@ os.environ["DATAMINT_API_KEY"] = "my_api_key"
95
95
  Specify API key in the |APIHandlerClass| constructor:
96
96
 
97
97
  ```python
98
- from datamintapi import APIHandler
98
+ from datamint import APIHandler
99
99
  api = APIHandler(api_key='my_api_key')
100
100
  ```
101
101
 
@@ -8,13 +8,12 @@ See the full documentation at https://sonanceai.github.io/datamint-python-api/
8
8
  ## Installation
9
9
 
10
10
  Datamint requires Python 3.10+.
11
- You can install Datamint and its dependencies using pip
11
+ You can install/update Datamint and its dependencies using pip
12
12
 
13
13
  ```bash
14
- pip install git+https://github.com/SonanceAI/datamint-python-api
14
+ pip install -U datamint
15
15
  ```
16
16
 
17
- Soon we will be releasing the package on PyPi.
18
17
  We recommend that you install Datamint in a dedicated virtual environment, to avoid conflicting with your system packages.
19
18
  Create the enviroment once with `python3 -m venv datamint-env` and then activate it whenever you need it with:
20
19
  - `source datamint-env/bin/activate` (Linux/MAC)
@@ -51,7 +50,7 @@ os.environ["DATAMINT_API_KEY"] = "my_api_key"
51
50
  Specify API key in the |APIHandlerClass| constructor:
52
51
 
53
52
  ```python
54
- from datamintapi import APIHandler
53
+ from datamint import APIHandler
55
54
  api = APIHandler(api_key='my_api_key')
56
55
  ```
57
56
 
@@ -1,6 +1,7 @@
1
1
  """
2
- Datamint API is a Python package that provides a simple interface to the Datamint API.
2
+ Datamint API package alias.
3
3
  """
4
+
4
5
  import importlib.metadata
5
6
  from typing import TYPE_CHECKING
6
7
  if TYPE_CHECKING:
@@ -21,5 +22,5 @@ else:
21
22
  },
22
23
  )
23
24
 
24
- __name__ = "datamintapi"
25
- __version__ = importlib.metadata.version('datamint')
25
+ __name__ = "datamint"
26
+ __version__ = importlib.metadata.version(__name__)
@@ -11,7 +11,7 @@ import asyncio
11
11
  import aiohttp
12
12
  from requests.exceptions import HTTPError
13
13
  from deprecated.sphinx import deprecated
14
- from .dto.annotation_dto import CreateAnnotationDto, LineGeometry, CoordinateSystem, AnnotationType
14
+ from .dto.annotation_dto import CreateAnnotationDto, LineGeometry, BoxGeometry, CoordinateSystem, AnnotationType
15
15
  import pydicom
16
16
 
17
17
  _LOGGER = logging.getLogger(__name__)
@@ -353,7 +353,7 @@ class AnnotationAPIHandler(BaseAPIHandler):
353
353
  author_email: Optional[str] = None,
354
354
  model_id: Optional[str] = None,
355
355
  project: Optional[str] = None,
356
- ):
356
+ ) -> list[str]:
357
357
  """
358
358
  Add annotations to a resource.
359
359
 
@@ -415,6 +415,66 @@ class AnnotationAPIHandler(BaseAPIHandler):
415
415
 
416
416
  resp = self._run_request(request_params)
417
417
  self._check_errors_response_json(resp)
418
+ return resp.json()
419
+
420
+ def _create_geometry_annotation(self,
421
+ geometry: LineGeometry | BoxGeometry,
422
+ resource_id: str,
423
+ identifier: str,
424
+ frame_index: int | None = None,
425
+ project: Optional[str] = None,
426
+ worklist_id: Optional[str] = None,
427
+ imported_from: Optional[str] = None,
428
+ author_email: Optional[str] = None,
429
+ model_id: Optional[str] = None) -> list[str]:
430
+ """
431
+ Common method for creating geometry-based annotations.
432
+
433
+ Args:
434
+ geometry: The geometry object (LineGeometry or BoxGeometry)
435
+ resource_id: The resource unique id
436
+ identifier: The annotation identifier
437
+ frame_index: The frame index of the annotation
438
+ project: The project unique id or name
439
+ worklist_id: The annotation worklist unique id
440
+ imported_from: The imported from source value
441
+ author_email: The email to consider as the author of the annotation
442
+ model_id: The model unique id
443
+ """
444
+ if project is not None and worklist_id is not None:
445
+ raise ValueError('Only one of project or worklist_id can be provided.')
446
+
447
+ if project is not None:
448
+ proj = self.get_project_by_name(project)
449
+ if 'error' in proj.keys():
450
+ raise DatamintException(f"Project {project} not found.")
451
+ worklist_id = proj['worklist_id']
452
+
453
+ anndto = CreateAnnotationDto(
454
+ type=geometry.type,
455
+ identifier=identifier,
456
+ scope='frame',
457
+ annotation_worklist_id=worklist_id,
458
+ value=None,
459
+ imported_from=imported_from,
460
+ import_author=author_email,
461
+ frame_index=frame_index,
462
+ geometry=geometry,
463
+ model_id=model_id,
464
+ is_model=model_id is not None,
465
+ )
466
+
467
+ json_data = anndto.to_dict()
468
+
469
+ request_params = {
470
+ 'method': 'POST',
471
+ 'url': f'{self.root_url}/annotations/{resource_id}/annotations',
472
+ 'json': [json_data]
473
+ }
474
+
475
+ resp = self._run_request(request_params)
476
+ self._check_errors_response_json(resp)
477
+ return resp.json()
418
478
 
419
479
  def add_line_annotation(self,
420
480
  point1: tuple[int, int] | tuple[float, float, float],
@@ -428,7 +488,7 @@ class AnnotationAPIHandler(BaseAPIHandler):
428
488
  worklist_id: Optional[str] = None,
429
489
  imported_from: Optional[str] = None,
430
490
  author_email: Optional[str] = None,
431
- model_id: Optional[str] = None):
491
+ model_id: Optional[str] = None) -> list[str]:
432
492
  """
433
493
  Add a line annotation to a resource.
434
494
 
@@ -466,12 +526,6 @@ class AnnotationAPIHandler(BaseAPIHandler):
466
526
  if project is not None and worklist_id is not None:
467
527
  raise ValueError('Only one of project or worklist_id can be provided.')
468
528
 
469
- if project is not None:
470
- proj = self.get_project_by_name(project)
471
- if 'error' in proj.keys():
472
- raise DatamintException(f"Project {project} not found.")
473
- worklist_id = proj['worklist_id']
474
-
475
529
  if coords_system == 'pixel':
476
530
  if dicom_metadata is None:
477
531
  point1 = (point1[0], point1[1], frame_index)
@@ -486,30 +540,87 @@ class AnnotationAPIHandler(BaseAPIHandler):
486
540
  else:
487
541
  raise ValueError(f"Unknown coordinate system: {coords_system}")
488
542
 
489
- anndto = CreateAnnotationDto(
490
- type=AnnotationType.LINE,
543
+ return self._create_geometry_annotation(
544
+ geometry=geom,
545
+ resource_id=resource_id,
491
546
  identifier=identifier,
492
- scope='frame',
493
- annotation_worklist_id=worklist_id,
494
- value=None,
495
- imported_from=imported_from,
496
- import_author=author_email,
497
547
  frame_index=frame_index,
498
- geometry=geom,
499
- model_id=model_id,
500
- is_model=model_id is not None,
548
+ project=project,
549
+ worklist_id=worklist_id,
550
+ imported_from=imported_from,
551
+ author_email=author_email,
552
+ model_id=model_id
501
553
  )
502
554
 
503
- json_data = anndto.to_dict()
555
+ def add_box_annotation(self,
556
+ point1: tuple[int, int] | tuple[float, float, float],
557
+ point2: tuple[int, int] | tuple[float, float, float],
558
+ resource_id: str,
559
+ identifier: str,
560
+ frame_index: int | None = None,
561
+ dicom_metadata: pydicom.Dataset | str | None = None,
562
+ coords_system: CoordinateSystem = 'pixel',
563
+ project: Optional[str] = None,
564
+ worklist_id: Optional[str] = None,
565
+ imported_from: Optional[str] = None,
566
+ author_email: Optional[str] = None,
567
+ model_id: Optional[str] = None):
568
+ """
569
+ Add a box annotation to a resource.
504
570
 
505
- request_params = {
506
- 'method': 'POST',
507
- 'url': f'{self.root_url}/annotations/{resource_id}/annotations',
508
- 'json': [json_data]
509
- }
571
+ Args:
572
+ point1: The first corner point of the box. Can be a 2d or 3d point.
573
+ If `coords_system` is 'pixel', it must be a 2d point representing pixel coordinates.
574
+ If `coords_system` is 'patient', it must be a 3d point representing patient coordinates.
575
+ point2: The opposite diagonal corner point of the box. See `point1` for more details.
576
+ resource_id: The resource unique id.
577
+ identifier: The annotation identifier, also known as the annotation's label.
578
+ frame_index: The frame index of the annotation.
579
+ dicom_metadata: The DICOM metadata of the image. If provided, coordinates will be converted
580
+ automatically using the DICOM metadata.
581
+ coords_system: The coordinate system of the points. Can be 'pixel' or 'patient'.
582
+ If 'pixel', points are in pixel coordinates. If 'patient', points are in patient coordinates.
583
+ project: The project unique id or name.
584
+ worklist_id: The annotation worklist unique id. Optional.
585
+ imported_from: The imported from source value.
586
+ author_email: The email to consider as the author of the annotation. If None, uses the API key customer.
587
+ model_id: The model unique id. Optional.
510
588
 
511
- resp = self._run_request(request_params)
512
- self._check_errors_response_json(resp)
589
+ Example:
590
+ .. code-block:: python
591
+
592
+ res_id = 'aa93813c-cef0-4edd-a45c-85d4a8f1ad0d'
593
+ api.add_box_annotation([10, 10], (50, 40),
594
+ resource_id=res_id,
595
+ identifier='BoundingBox1',
596
+ frame_index=2,
597
+ project='Example Project')
598
+ """
599
+ if coords_system == 'pixel':
600
+ if dicom_metadata is None:
601
+ point1 = (point1[0], point1[1], frame_index)
602
+ point2 = (point2[0], point2[1], frame_index)
603
+ geom = BoxGeometry(point1, point2)
604
+ else:
605
+ if isinstance(dicom_metadata, str):
606
+ dicom_metadata = pydicom.dcmread(dicom_metadata)
607
+ geom = BoxGeometry.from_dicom(dicom_metadata, point1, point2, slice_index=frame_index)
608
+ elif coords_system == 'patient':
609
+ geom = BoxGeometry(point1, point2)
610
+ else:
611
+ raise ValueError(f"Unknown coordinate system: {coords_system}")
612
+
613
+ return self._create_geometry_annotation(
614
+ geometry=geom,
615
+ resource_id=resource_id,
616
+ identifier=identifier,
617
+ frame_index=frame_index,
618
+ project=project,
619
+ worklist_id=worklist_id,
620
+ imported_from=imported_from,
621
+ author_email=author_email,
622
+ model_id=model_id
623
+ )
513
624
 
514
625
  @deprecated(version='0.12.1', reason='Use :meth:`~get_annotations` instead with `resource_id` parameter.')
515
626
  def get_resource_annotations(self,
@@ -541,7 +652,7 @@ class AnnotationAPIHandler(BaseAPIHandler):
541
652
 
542
653
  Args:
543
654
  resource_id (Optional[str]): The resource unique id.
544
- annotation_type (Optional[str]): The annotation type. See :class:`~datamintapi.dto.annotation_dto.AnnotationType`.
655
+ annotation_type (Optional[str]): The annotation type. See :class:`~datamint.dto.annotation_dto.AnnotationType`.
545
656
  annotator_email (Optional[str]): The annotator email.
546
657
  date_from (Optional[date]): The start date.
547
658
  date_to (Optional[date]): The end date.
@@ -9,7 +9,7 @@ class APIHandler(RootAPIHandler, ExperimentAPIHandler, AnnotationAPIHandler):
9
9
 
10
10
  .. code-block:: python
11
11
 
12
- from datamintapi import APIHandler
12
+ from datamint import APIHandler
13
13
  api = APIHandler()
14
14
  """
15
15
  pass
@@ -13,7 +13,7 @@ from io import BytesIO
13
13
  import cv2
14
14
  import nibabel as nib
15
15
  from nibabel.filebasedimages import FileBasedImage as nib_FileBasedImage
16
- from datamintapi import configs
16
+ from datamint import configs
17
17
  from functools import wraps
18
18
 
19
19
  _LOGGER = logging.getLogger(__name__)
@@ -18,7 +18,7 @@ import json
18
18
  from typing import Any, TypeAlias, Literal
19
19
  import logging
20
20
  from enum import Enum
21
- from datamintapi.utils.dicom_utils import pixel_to_patient
21
+ from datamint.utils.dicom_utils import pixel_to_patient
22
22
  import pydicom
23
23
  import numpy as np
24
24
 
@@ -97,6 +97,56 @@ class LineGeometry(Geometry):
97
97
  return LineGeometry(new_point1, new_point2)
98
98
 
99
99
 
100
+ class BoxGeometry(Geometry):
101
+ def __init__(self, point1: tuple[float, float, float],
102
+ point2: tuple[float, float, float]):
103
+ """
104
+ Create a box geometry from two diagonal corner points.
105
+
106
+ Args:
107
+ point1: First corner point (x, y, z) or (x, y, frame_index)
108
+ point2: Opposite diagonal corner point (x, y, z) or (x, y, frame_index)
109
+ """
110
+ super().__init__(AnnotationType.SQUARE) # Using SQUARE as the box type
111
+ if isinstance(point1, np.ndarray):
112
+ point1 = point1.tolist()
113
+ if isinstance(point2, np.ndarray):
114
+ point2 = point2.tolist()
115
+ self.point1 = point1
116
+ self.point2 = point2
117
+
118
+ def to_dict(self) -> dict:
119
+ return {
120
+ 'points': [self.point1, self.point2],
121
+ }
122
+
123
+ @staticmethod
124
+ def from_dicom(ds: pydicom.Dataset,
125
+ point1: tuple[int, int],
126
+ point2: tuple[int, int],
127
+ slice_index: int | None = None) -> 'BoxGeometry':
128
+ """
129
+ Create a box geometry from DICOM pixel coordinates.
130
+
131
+ Args:
132
+ ds: DICOM dataset containing spatial metadata
133
+ point1: First corner in pixel coordinates (x, y)
134
+ point2: Opposite corner in pixel coordinates (x, y)
135
+ slice_index: The slice/frame index for 3D positioning
136
+
137
+ Returns:
138
+ BoxGeometry with patient coordinate points
139
+ """
140
+ pixel_x1, pixel_y1 = point1
141
+ pixel_x2, pixel_y2 = point2
142
+
143
+ new_point1 = pixel_to_patient(ds, pixel_x1, pixel_y1,
144
+ slice_index=slice_index)
145
+ new_point2 = pixel_to_patient(ds, pixel_x2, pixel_y2,
146
+ slice_index=slice_index)
147
+ return BoxGeometry(new_point1, new_point2)
148
+
149
+
100
150
  class CreateAnnotationDto:
101
151
  def __init__(self,
102
152
  type: AnnotationType | str,
@@ -1,4 +1,4 @@
1
- from datamintapi.apihandler.base_api_handler import BaseAPIHandler
1
+ from datamint.apihandler.base_api_handler import BaseAPIHandler
2
2
  from typing import Optional, Dict, List, Union, Any
3
3
  import json
4
4
  import logging
@@ -6,8 +6,8 @@ from requests.exceptions import HTTPError
6
6
  import logging
7
7
  import asyncio
8
8
  import aiohttp
9
- from datamintapi.utils.dicom_utils import anonymize_dicom, to_bytesio, is_dicom
10
- from datamintapi.utils import dicom_utils
9
+ from datamint.utils.dicom_utils import anonymize_dicom, to_bytesio, is_dicom
10
+ from datamint.utils import dicom_utils
11
11
  import pydicom
12
12
  from pathlib import Path
13
13
  from datetime import date
@@ -15,7 +15,7 @@ import mimetypes
15
15
  from PIL import Image
16
16
  import cv2
17
17
  from nibabel.filebasedimages import FileBasedImage as nib_FileBasedImage
18
- from datamintapi import configs
18
+ from datamint import configs
19
19
  from .base_api_handler import BaseAPIHandler, DatamintException, ResourceNotFoundError, ResourceFields, ResourceStatus
20
20
  from deprecated.sphinx import deprecated
21
21
  import json
@@ -63,6 +63,7 @@ class RootAPIHandler(BaseAPIHandler):
63
63
  session=None,
64
64
  modality: Optional[str] = None,
65
65
  publish: bool = False,
66
+ metadata_file: Optional[str] = None,
66
67
  ) -> str:
67
68
  if _is_io_object(file_path):
68
69
  name = file_path.name
@@ -97,6 +98,8 @@ class RootAPIHandler(BaseAPIHandler):
97
98
  is_a_dicom_file = is_dicom(name) or is_dicom(file_path)
98
99
  if is_a_dicom_file:
99
100
  mimetype = 'application/dicom'
101
+ elif name.endswith('.nii') or name.endswith('.nii.gz'):
102
+ mimetype = 'application/x-nifti'
100
103
 
101
104
  filename = os.path.basename(name)
102
105
  _LOGGER.debug(f"File name '{filename}' mimetype: {mimetype}")
@@ -115,6 +118,25 @@ class RootAPIHandler(BaseAPIHandler):
115
118
  f = _open_io(file_path)
116
119
 
117
120
  try:
121
+ metadata_content = None
122
+ metadata_dict = None
123
+ if metadata_file is not None:
124
+ try:
125
+ with open(metadata_file, 'r') as metadata_f:
126
+ metadata_content = metadata_f.read()
127
+ metadata_dict = json.loads(metadata_content)
128
+ metadata_dict_lower = {k.lower(): v for k, v in metadata_dict.items() if isinstance(k, str)}
129
+ try:
130
+ if modality is None:
131
+ if 'modality' in metadata_dict_lower:
132
+ modality = metadata_dict_lower['modality']
133
+ except Exception as e:
134
+ _LOGGER.debug(f"Failed to extract modality from metadata file {metadata_file}: {e}")
135
+ _LOGGER.debug(f"Metadata dict: {metadata_dict}")
136
+ except Exception as e:
137
+ _LOGGER.warning(f"Failed to read metadata file {metadata_file}: {e}")
138
+
139
+
118
140
  form = aiohttp.FormData()
119
141
  url = self._get_endpoint_url(RootAPIHandler.ENDPOINT_RESOURCES)
120
142
  file_key = 'resource'
@@ -134,6 +156,14 @@ class RootAPIHandler(BaseAPIHandler):
134
156
  tags = ','.join([l.strip() for l in tags])
135
157
  form.add_field('tags', tags)
136
158
 
159
+ # Add JSON metadata if provided
160
+ if metadata_content is not None:
161
+ try:
162
+ _LOGGER.debug(f"Adding metadata from {metadata_file}")
163
+ form.add_field('metadata', metadata_content, content_type='application/json')
164
+ except Exception as e:
165
+ _LOGGER.warning(f"Failed to read metadata file {metadata_file}: {e}")
166
+
137
167
  request_params = {
138
168
  'method': 'POST',
139
169
  'url': url,
@@ -170,6 +200,7 @@ class RootAPIHandler(BaseAPIHandler):
170
200
  publish: bool = False,
171
201
  segmentation_files: Optional[list[dict]] = None,
172
202
  transpose_segmentation: bool = False,
203
+ metadata_files: Optional[list[Optional[str]]] = None,
173
204
  ) -> list[str]:
174
205
  if on_error not in ['raise', 'skip']:
175
206
  raise ValueError("on_error must be either 'raise' or 'skip'")
@@ -177,8 +208,11 @@ class RootAPIHandler(BaseAPIHandler):
177
208
  if segmentation_files is None:
178
209
  segmentation_files = _infinite_gen(None)
179
210
 
211
+ if metadata_files is None:
212
+ metadata_files = _infinite_gen(None)
213
+
180
214
  async with aiohttp.ClientSession() as session:
181
- async def __upload_single_resource(file_path, segfiles: dict):
215
+ async def __upload_single_resource(file_path, segfiles: dict, metadata_file: Optional[str]):
182
216
  async with self.semaphore:
183
217
  rid = await self._upload_single_resource_async(
184
218
  file_path=file_path,
@@ -191,6 +225,7 @@ class RootAPIHandler(BaseAPIHandler):
191
225
  channel=channel,
192
226
  modality=modality,
193
227
  publish=publish,
228
+ metadata_file=metadata_file,
194
229
  )
195
230
  if segfiles is not None:
196
231
  fpaths = segfiles['files']
@@ -208,7 +243,8 @@ class RootAPIHandler(BaseAPIHandler):
208
243
  transpose_segmentation=transpose_segmentation)
209
244
  return rid
210
245
 
211
- tasks = [__upload_single_resource(f, segfiles) for f, segfiles in zip(files_path, segmentation_files)]
246
+ tasks = [__upload_single_resource(f, segfiles, metadata_file)
247
+ for f, segfiles, metadata_file in zip(files_path, segmentation_files, metadata_files)]
212
248
  return await asyncio.gather(*tasks, return_exceptions=on_error == 'skip')
213
249
 
214
250
  def _assemble_dicoms(self, files_path: Sequence[str | IO]) -> tuple[Sequence[str | IO], bool]:
@@ -248,7 +284,8 @@ class RootAPIHandler(BaseAPIHandler):
248
284
  segmentation_files: Optional[list[Union[list[str], dict]]] = None,
249
285
  transpose_segmentation: bool = False,
250
286
  modality: Optional[str] = None,
251
- assemble_dicoms: bool = True
287
+ assemble_dicoms: bool = True,
288
+ metadata_files: Optional[list[Optional[str]]] = None
252
289
  ) -> list[str | Exception] | str | Exception:
253
290
  """
254
291
  Upload resources.
@@ -274,6 +311,7 @@ class RootAPIHandler(BaseAPIHandler):
274
311
  transpose_segmentation (bool): Whether to transpose the segmentation files or not.
275
312
  modality (Optional[str]): The modality of the resources.
276
313
  assemble_dicoms (bool): Whether to assemble the dicom files or not based on the SOPInstanceUID and InstanceNumber attributes.
314
+ metadata_files (Optional[list[Optional[str]]]): JSON metadata files to include with each resource.
277
315
 
278
316
  Raises:
279
317
  ResourceNotFoundError: If `publish_to` is supplied, and the project does not exists.
@@ -319,6 +357,7 @@ class RootAPIHandler(BaseAPIHandler):
319
357
  segmentation_files=segmentation_files,
320
358
  transpose_segmentation=transpose_segmentation,
321
359
  modality=modality,
360
+ metadata_files=metadata_files,
322
361
  )
323
362
 
324
363
  resource_ids = loop.run_until_complete(task)
@@ -690,13 +729,13 @@ class RootAPIHandler(BaseAPIHandler):
690
729
  'url': url}
691
730
  try:
692
731
  response = self._run_request(request_params)
693
-
732
+
694
733
  # Get mimetype if needed for auto_convert or add_extension
695
734
  mimetype = None
696
735
  if auto_convert or add_extension:
697
736
  resource_info = self.get_resources_by_ids(resource_id)
698
737
  mimetype = resource_info['mimetype']
699
-
738
+
700
739
  if auto_convert:
701
740
  try:
702
741
  resource_file = BaseAPIHandler.convert_format(response.content,
@@ -1,7 +1,7 @@
1
1
  import argparse
2
2
  import logging
3
- from datamintapi import configs
4
- from datamintapi.utils.logging_utils import load_cmdline_logging_config
3
+ from datamint import configs
4
+ from datamint.utils.logging_utils import load_cmdline_logging_config
5
5
 
6
6
  # Create two loggings: one for the user and one for the developer
7
7
  _LOGGER = logging.getLogger(__name__)
@@ -79,7 +79,7 @@ def configure_api_key():
79
79
  def test_connection():
80
80
  """Test the API connection with current settings."""
81
81
  try:
82
- from datamintapi import APIHandler
82
+ from datamint import APIHandler
83
83
  _USER_LOGGER.info("🔄 Testing connection...")
84
84
  api = APIHandler()
85
85
  # Simple test - try to get projects
@@ -1,18 +1,18 @@
1
1
  import argparse
2
- from datamintapi.apihandler.api_handler import APIHandler
2
+ from datamint.apihandler.api_handler import APIHandler
3
3
  import os
4
4
  from humanize import naturalsize
5
5
  import logging
6
6
  from pathlib import Path
7
7
  import sys
8
- from datamintapi.utils.dicom_utils import is_dicom
8
+ from datamint.utils.dicom_utils import is_dicom
9
9
  import fnmatch
10
10
  from typing import Sequence, Generator, Optional, Any
11
11
  from collections import defaultdict
12
- from datamintapi import __version__ as datamintapi_version
13
- from datamintapi import configs
14
- from datamintapi.client_cmd_tools.datamint_config import ask_api_key
15
- from datamintapi.utils.logging_utils import load_cmdline_logging_config
12
+ from datamint import __version__ as datamint_version
13
+ from datamint import configs
14
+ from datamint.client_cmd_tools.datamint_config import ask_api_key
15
+ from datamint.utils.logging_utils import load_cmdline_logging_config
16
16
  import yaml
17
17
 
18
18
  # Create two loggings: one for the user and one for the developer
@@ -256,12 +256,86 @@ def _find_segmentation_files(segmentation_root_path: str,
256
256
  return segmentation_files
257
257
 
258
258
 
259
- def _parse_args() -> tuple[Any, list, Optional[list[dict]]]:
259
+ def _find_json_metadata(file_path: str | Path) -> Optional[str]:
260
+ """
261
+ Find a JSON file with the same base name as the given file.
262
+
263
+ Args:
264
+ file_path (str): Path to the main file (e.g., NIFTI file)
265
+
266
+ Returns:
267
+ Optional[str]: Path to the JSON metadata file if found, None otherwise
268
+ """
269
+ file_path = Path(file_path)
270
+ json_path = file_path.with_suffix('.json')
271
+
272
+ if json_path.exists() and json_path.is_file():
273
+ _LOGGER.debug(f"Found JSON metadata file: {json_path}")
274
+ return str(json_path)
275
+
276
+ return None
277
+
278
+
279
+ def _collect_metadata_files(files_path: list[str], auto_detect_json: bool) -> tuple[list, list[str]]:
280
+ """
281
+ Collect JSON metadata files for the given files and filter them from main files list.
282
+
283
+ Args:
284
+ files_path (list[str]): List of file paths
285
+ auto_detect_json (bool): Whether to auto-detect JSON metadata files
286
+
287
+ Returns:
288
+ tuple[list[Optional[str]], list[str]]: Tuple of (metadata file paths, filtered files_path)
289
+ - metadata file paths: List of metadata file paths (None if no metadata found)
290
+ - filtered files_path: Original files_path with JSON metadata files removed
291
+ """
292
+ if not auto_detect_json:
293
+ return [None] * len(files_path), files_path
294
+
295
+ metadata_files = []
296
+ used_json_files = set()
297
+ nifti_extensions = ['.nii', '.nii.gz']
298
+
299
+ for file_path in files_path:
300
+ # Check if this is a NIFTI file
301
+ if any(file_path.endswith(ext) for ext in nifti_extensions):
302
+ json_file = _find_json_metadata(file_path)
303
+ metadata_files.append(json_file)
304
+ if json_file is not None:
305
+ used_json_files.add(json_file)
306
+ else:
307
+ metadata_files.append(None)
308
+
309
+ # Filter out JSON files that are being used as metadata from the main files list
310
+ filtered_files_path = [f for f in files_path if f not in used_json_files]
311
+
312
+ # Update metadata_files to match the filtered list
313
+ if used_json_files:
314
+ _LOGGER.debug(f"Filtering out {len(used_json_files)} JSON metadata files from main upload list")
315
+ filtered_metadata_files = []
316
+ filtered_file_index = 0
317
+
318
+ for original_file in files_path:
319
+ if original_file not in used_json_files:
320
+ filtered_metadata_files.append(metadata_files[files_path.index(original_file)])
321
+ filtered_file_index += 1
322
+
323
+ metadata_files = filtered_metadata_files
324
+
325
+ return metadata_files, filtered_files_path
326
+
327
+
328
+ def _parse_args() -> tuple[Any, list, Optional[list[dict]], Optional[list[str]]]:
260
329
  parser = argparse.ArgumentParser(
261
330
  description='DatamintAPI command line tool for uploading DICOM files and other resources')
262
- parser.add_argument('--path', type=_is_valid_path_argparse, metavar="FILE",
263
- required=True,
331
+
332
+ # Add positional argument for path
333
+ parser.add_argument('path', nargs='?', type=_is_valid_path_argparse, metavar="PATH",
264
334
  help='Path to the resource file(s) or a directory')
335
+
336
+ # Keep the --path option for backward compatibility, but make it optional
337
+ parser.add_argument('--path', dest='path_flag', type=_is_valid_path_argparse, metavar="FILE",
338
+ help='Path to the resource file(s) or a directory (alternative to positional argument)')
265
339
  parser.add_argument('-r', '--recursive', nargs='?', const=-1, # -1 means infinite
266
340
  type=int,
267
341
  help='Recurse folders looking for DICOMs. If a number is passed, recurse that number of levels.')
@@ -302,9 +376,28 @@ def _parse_args() -> tuple[Any, list, Optional[list[dict]]]:
302
376
  help='Automatically answer yes to all prompts')
303
377
  parser.add_argument('--transpose-segmentation', action='store_true', default=False,
304
378
  help='Transpose the segmentation dimensions to match the image dimensions')
305
- parser.add_argument('--version', action='version', version=f'%(prog)s {datamintapi_version}')
379
+ parser.add_argument('--auto-detect-json', action='store_true', default=True,
380
+ help='Automatically detect and include JSON metadata files with the same base name as NIFTI files')
381
+ parser.add_argument('--no-auto-detect-json', dest='auto_detect_json', action='store_false',
382
+ help='Disable automatic detection of JSON metadata files (default behavior)')
383
+ parser.add_argument('--version', action='version', version=f'%(prog)s {datamint_version}')
306
384
  parser.add_argument('--verbose', action='store_true', help='Print debug messages', default=False)
307
385
  args = parser.parse_args()
386
+
387
+ # Handle path argument priority: positional takes precedence over --path flag
388
+ if args.path is not None and args.path_flag is not None:
389
+ _USER_LOGGER.warning("Both positional path and --path flag provided. Using positional argument.")
390
+ final_path = args.path
391
+ elif args.path is not None:
392
+ final_path = args.path
393
+ elif args.path_flag is not None:
394
+ final_path = args.path_flag
395
+ else:
396
+ parser.error("Path argument is required. Provide it as a positional argument or use --path flag.")
397
+
398
+ # Replace args.path with the final resolved path for consistency
399
+ args.path = final_path
400
+
308
401
  if args.verbose:
309
402
  # Get the console handler and set to debug
310
403
  logging.getLogger().handlers[0].setLevel(logging.DEBUG)
@@ -319,7 +412,6 @@ def _parse_args() -> tuple[Any, list, Optional[list[dict]]]:
319
412
  raise ValueError("--include-extensions and --exclude-extensions are mutually exclusive.")
320
413
 
321
414
  try:
322
-
323
415
  if os.path.isfile(args.path):
324
416
  file_path = [args.path]
325
417
  if args.recursive is not None:
@@ -337,6 +429,12 @@ def _parse_args() -> tuple[Any, list, Optional[list[dict]]]:
337
429
  if len(file_path) == 0:
338
430
  raise ValueError(f"No valid file was found in {args.path}")
339
431
 
432
+ # Collect JSON metadata files and filter them from main files list
433
+ metadata_files, file_path = _collect_metadata_files(file_path, args.auto_detect_json)
434
+
435
+ if len(file_path) == 0:
436
+ raise ValueError(f"No valid non-metadata files found in {args.path}")
437
+
340
438
  if args.segmentation_names is not None:
341
439
  with open(args.segmentation_names, 'r') as f:
342
440
  segmentation_names = yaml.safe_load(f)
@@ -360,7 +458,7 @@ def _parse_args() -> tuple[Any, list, Optional[list[dict]]]:
360
458
  raise ValueError("Cannot use both --tag and --label. Use --tag instead. --label is deprecated.")
361
459
  args.tag = args.tag if args.tag is not None else args.label
362
460
 
363
- return args, file_path, segmentation_files
461
+ return args, file_path, segmentation_files, metadata_files
364
462
 
365
463
  except Exception as e:
366
464
  if args.verbose:
@@ -371,6 +469,7 @@ def _parse_args() -> tuple[Any, list, Optional[list[dict]]]:
371
469
  def print_input_summary(files_path: list[str],
372
470
  args,
373
471
  segfiles: Optional[list[dict]],
472
+ metadata_files: Optional[list[str]] = None,
374
473
  include_extensions=None):
375
474
  ### Create a summary of the upload ###
376
475
  total_files = len(files_path)
@@ -397,6 +496,7 @@ def print_input_summary(files_path: list[str],
397
496
  if ext == '':
398
497
  ext = 'no extension'
399
498
  _USER_LOGGER.info(f"\t{ext}: {count}")
499
+ # Check for multiple extensions
400
500
  if len(ext_counts) > 1 and include_extensions is None:
401
501
  _USER_LOGGER.warning("Multiple file extensions found!" +
402
502
  " Make sure you are uploading the correct files.")
@@ -419,6 +519,13 @@ def print_input_summary(files_path: list[str],
419
519
  else:
420
520
  _USER_LOGGER.info(msg)
421
521
 
522
+ if metadata_files is not None:
523
+ num_metadata_files = sum([1 if metadata is not None else 0 for metadata in metadata_files])
524
+ if num_metadata_files > 0:
525
+ msg = f"Number of files with JSON metadata: {num_metadata_files} ({num_metadata_files / total_files:.0%})"
526
+ _USER_LOGGER.info(msg)
527
+ # TODO: Could add validation to ensure JSON metadata files contain valid DICOM metadata structure
528
+
422
529
 
423
530
  def print_results_summary(files_path: list[str],
424
531
  results: list[str | Exception]):
@@ -441,7 +548,7 @@ def main():
441
548
  load_cmdline_logging_config()
442
549
 
443
550
  try:
444
- args, files_path, segfiles = _parse_args()
551
+ args, files_path, segfiles, metadata_files = _parse_args()
445
552
  except Exception as e:
446
553
  _USER_LOGGER.error(f'Error validating arguments. {e}')
447
554
  return
@@ -449,6 +556,7 @@ def main():
449
556
  print_input_summary(files_path,
450
557
  args=args,
451
558
  segfiles=segfiles,
559
+ metadata_files=metadata_files,
452
560
  include_extensions=args.include_extensions)
453
561
 
454
562
  if not args.yes:
@@ -471,7 +579,8 @@ def main():
471
579
  publish=args.publish,
472
580
  segmentation_files=segfiles,
473
581
  transpose_segmentation=args.transpose_segmentation,
474
- assemble_dicoms=True
582
+ assemble_dicoms=True,
583
+ metadata_files=metadata_files
475
584
  )
476
585
  _USER_LOGGER.info('Upload finished!')
477
586
  _LOGGER.debug(f"Number of results: {len(results)}")
@@ -8,13 +8,13 @@ import json
8
8
  import yaml
9
9
  import pydicom
10
10
  import numpy as np
11
- from datamintapi import configs
11
+ from datamint import configs
12
12
  from torch.utils.data import DataLoader
13
13
  import torch
14
- from datamintapi.apihandler.base_api_handler import DatamintException
15
- from datamintapi.utils.dicom_utils import is_dicom
14
+ from datamint.apihandler.base_api_handler import DatamintException
15
+ from datamint.utils.dicom_utils import is_dicom
16
16
  import cv2
17
- from datamintapi.utils.io_utils import read_array_normalized
17
+ from datamint.utils.io_utils import read_array_normalized
18
18
  from deprecated import deprecated
19
19
  from datetime import datetime
20
20
 
@@ -79,7 +79,7 @@ class DatamintBaseDataset:
79
79
  include_frame_label_names: Optional[list[str]] = None,
80
80
  exclude_frame_label_names: Optional[list[str]] = None
81
81
  ):
82
- from datamintapi.apihandler.api_handler import APIHandler
82
+ from datamint.apihandler.api_handler import APIHandler
83
83
 
84
84
  if project_name is None:
85
85
  raise ValueError("project_name is required.")
@@ -17,7 +17,7 @@ class DatamintDataset(DatamintBaseDataset):
17
17
  In addition to that, it has functionality to better process annotations and segmentations.
18
18
 
19
19
  .. note::
20
- Import using ``from datamintapi import Dataset``.
20
+ Import using ``from datamint import Dataset``.
21
21
 
22
22
  Args:
23
23
  root: Root directory of dataset where data already exists or will be downloaded.
@@ -1,6 +1,6 @@
1
1
  import requests
2
2
  import io
3
- from datamintapi import APIHandler
3
+ from datamint import APIHandler
4
4
  import logging
5
5
  from PIL import Image
6
6
  import numpy as np
@@ -486,7 +486,7 @@ def initialize_automatic_logging(enable_rich_logging: bool = True):
486
486
  # check if RichHandler is already in the handlers
487
487
  if enable_rich_logging and not any(isinstance(h, RichHandler) for h in logging.getLogger().handlers):
488
488
  logging.getLogger().handlers.append(RichHandler()) # set rich logging handler for the root logger
489
- # logging.getLogger("datamintapi").setLevel(logging.INFO)
489
+ # logging.getLogger("datamint").setLevel(logging.INFO)
490
490
 
491
491
  pytorch_patcher = PytorchPatcher()
492
492
 
@@ -1,14 +1,14 @@
1
1
  import logging
2
- from datamintapi.apihandler.api_handler import APIHandler
3
- from datamintapi.apihandler.base_api_handler import DatamintException
2
+ from datamint.apihandler.api_handler import APIHandler
3
+ from datamint.apihandler.base_api_handler import DatamintException
4
4
  from datetime import datetime, timezone
5
5
  from typing import List, Dict, Optional, Union, Any, Tuple, IO, Literal
6
6
  from collections import defaultdict
7
- from datamintapi.dataset.dataset import DatamintDataset
7
+ from datamint.dataset.dataset import DatamintDataset
8
8
  import os
9
9
  import numpy as np
10
10
  import heapq
11
- from datamintapi.utils import io_utils
11
+ from datamint.utils import io_utils
12
12
 
13
13
  _LOGGER = logging.getLogger(__name__)
14
14
 
@@ -570,7 +570,7 @@ class Experiment:
570
570
 
571
571
  Args:
572
572
  split (str): The split of the dataset to get. Can be one of ['all', 'train', 'test', 'val'].
573
- **kwargs: Additional arguments to pass to the :py:class:`~datamintapi.dataset.dataset.DatamintDataset` class.
573
+ **kwargs: Additional arguments to pass to the :py:class:`~datamint.dataset.dataset.DatamintDataset` class.
574
574
 
575
575
  Returns:
576
576
  DatamintDataset: The dataset object.
@@ -815,22 +815,22 @@ class Experiment:
815
815
  Example:
816
816
  .. code-block:: python
817
817
 
818
- resource_id = '123'
819
- predictions = np.array([[0.1, 0.4], [0.9, 0.2]])
820
- label_name = 'fracture'
821
- exp.log_segmentation_predictions(resource_id, predictions, label_name, threshold=0.5)
818
+ resource_id = '123'
819
+ predictions = np.array([[0.1, 0.4], [0.9, 0.2]])
820
+ label_name = 'fracture'
821
+ exp.log_segmentation_predictions(resource_id, predictions, label_name, threshold=0.5)
822
822
 
823
823
  .. code-block:: python
824
824
 
825
- resource_id = '456'
826
- predictions = np.array([[0, 1, 2], [1, 2, 0]]) # Multi-class mask with values 0, 1, 2
827
- label_name = {1: 'Femur', 2: 'Tibia'} # Mapping of pixel values to class names
828
- exp.log_segmentation_predictions(
829
- resource_id,
830
- predictions,
831
- label_name,
832
- predictions_format='multi-class'
833
- )
825
+ resource_id = '456'
826
+ predictions = np.array([[0, 1, 2], [1, 2, 0]]) # Multi-class mask with values 0, 1, 2
827
+ label_name = {1: 'Femur', 2: 'Tibia'} # Mapping of pixel values to class names
828
+ exp.log_segmentation_predictions(
829
+ resource_id,
830
+ predictions,
831
+ label_name,
832
+ predictions_format='multi-class'
833
+ )
834
834
  """
835
835
 
836
836
  if predictions_format not in ['multi-class', 'probability']:
@@ -7,13 +7,13 @@ handlers:
7
7
  level: WARNING
8
8
  show_time: False
9
9
  console_user:
10
- class: datamintapi.utils.logging_utils.ConditionalRichHandler
10
+ class: datamint.utils.logging_utils.ConditionalRichHandler
11
11
  level: INFO
12
12
  show_path: False
13
13
  show_time: False
14
14
 
15
15
  loggers:
16
- datamintapi:
16
+ datamint:
17
17
  level: ERROR
18
18
  handlers: [console]
19
19
  propagate: no
@@ -45,7 +45,7 @@ def load_cmdline_logging_config():
45
45
  with open('logging_dev.yaml', 'r') as f:
46
46
  config = yaml.safe_load(f)
47
47
  except:
48
- with importlib.resources.open_text('datamintapi', 'logging.yaml') as f:
48
+ with importlib.resources.open_text('datamint', 'logging.yaml') as f:
49
49
  config = yaml.safe_load(f.read())
50
50
 
51
51
  logging.config.dictConfig(config)
@@ -1,19 +1,19 @@
1
1
  [project]
2
2
  name = "datamint"
3
3
  description = "A library for interacting with the Datamint API, designed for efficient data management, processing and Deep Learning workflows."
4
- version = "1.2.4"
4
+ version = "1.4.0"
5
5
  dynamic = ["dependencies"]
6
6
  requires-python = ">=3.10"
7
7
  readme = "README.md"
8
8
 
9
9
  [project.scripts]
10
- datamint-upload = 'datamintapi.client_cmd_tools.datamint_upload:main'
11
- datamint-config = 'datamintapi.client_cmd_tools.datamint_config:main'
10
+ datamint-upload = 'datamint.client_cmd_tools.datamint_upload:main'
11
+ datamint-config = 'datamint.client_cmd_tools.datamint_config:main'
12
12
 
13
13
  [tool.poetry]
14
14
  # license = "Proprietary" # https://python-poetry.org/docs/pyproject/
15
- include = [{ path = "datamintapi/logging.yaml", format = ["sdist", "wheel"] }]
16
- packages = [ { include = "datamint" }, { include = "datamintapi" }]
15
+ include = [{ path = "datamint/logging.yaml", format = ["sdist", "wheel"] }]
16
+ packages = [ { include = "datamint" }]
17
17
 
18
18
  # https://python-poetry.org/docs/dependency-specification/
19
19
  [tool.poetry.dependencies]
@@ -40,6 +40,8 @@ matplotlib = "*"
40
40
  lightning = "*"
41
41
  albumentations = ">=2.0.0"
42
42
  lazy-loader = ">=0.3.0"
43
+ # For compatibility with the datamintapi package
44
+ datamintapi = "0.0.*"
43
45
  # Extra dependencies for docs
44
46
  sphinx = { version = ">=5.0", optional = true }
45
47
  sphinx_rtd_theme = { version = ">=2.0.0", optional = true }
@@ -1,11 +0,0 @@
1
- """
2
- Datamint API package alias.
3
-
4
- This module serves as an alias for the datamintapi package.
5
- """
6
-
7
- from datamintapi import *
8
- import importlib.metadata
9
-
10
- __name__ = "datamint"
11
- __version__ = importlib.metadata.version(__name__)