datamint 1.9.2__py3-none-any.whl → 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datamint might be problematic. Click here for more details.

Files changed (40) hide show
  1. datamint/__init__.py +2 -0
  2. datamint/api/__init__.py +3 -0
  3. datamint/api/base_api.py +430 -0
  4. datamint/api/client.py +91 -0
  5. datamint/api/dto/__init__.py +10 -0
  6. datamint/api/endpoints/__init__.py +17 -0
  7. datamint/api/endpoints/annotations_api.py +984 -0
  8. datamint/api/endpoints/channels_api.py +28 -0
  9. datamint/api/endpoints/datasetsinfo_api.py +16 -0
  10. datamint/api/endpoints/projects_api.py +203 -0
  11. datamint/api/endpoints/resources_api.py +1013 -0
  12. datamint/api/endpoints/users_api.py +38 -0
  13. datamint/api/entity_base_api.py +347 -0
  14. datamint/apihandler/annotation_api_handler.py +5 -5
  15. datamint/apihandler/api_handler.py +3 -6
  16. datamint/apihandler/base_api_handler.py +6 -28
  17. datamint/apihandler/dto/__init__.py +0 -0
  18. datamint/apihandler/dto/annotation_dto.py +1 -1
  19. datamint/apihandler/root_api_handler.py +53 -28
  20. datamint/client_cmd_tools/datamint_config.py +6 -37
  21. datamint/client_cmd_tools/datamint_upload.py +84 -58
  22. datamint/dataset/base_dataset.py +65 -75
  23. datamint/dataset/dataset.py +2 -2
  24. datamint/entities/__init__.py +20 -0
  25. datamint/entities/annotation.py +178 -0
  26. datamint/entities/base_entity.py +51 -0
  27. datamint/entities/channel.py +46 -0
  28. datamint/entities/datasetinfo.py +22 -0
  29. datamint/entities/project.py +64 -0
  30. datamint/entities/resource.py +130 -0
  31. datamint/entities/user.py +21 -0
  32. datamint/examples/example_projects.py +41 -44
  33. datamint/exceptions.py +27 -1
  34. datamint/logging.yaml +1 -1
  35. datamint/utils/logging_utils.py +75 -0
  36. {datamint-1.9.2.dist-info → datamint-2.0.0.dist-info}/METADATA +13 -9
  37. datamint-2.0.0.dist-info/RECORD +50 -0
  38. {datamint-1.9.2.dist-info → datamint-2.0.0.dist-info}/WHEEL +1 -1
  39. datamint-1.9.2.dist-info/RECORD +0 -29
  40. {datamint-1.9.2.dist-info → datamint-2.0.0.dist-info}/entry_points.txt +0 -0
@@ -7,7 +7,7 @@ import numpy as np
7
7
  import logging
8
8
  from PIL import Image
9
9
  import albumentations
10
- from datamint.dataset.annotation import Annotation
10
+ from datamint.entities.annotation import Annotation
11
11
 
12
12
  _LOGGER = logging.getLogger(__name__)
13
13
 
@@ -155,7 +155,7 @@ class DatamintDataset(DatamintBaseDataset):
155
155
  # FIXME: avoid enforcing resizing the mask
156
156
  seg = (Image.open(segfilepath)
157
157
  .convert('L')
158
- .resize((w, h), Image.NEAREST)
158
+ .resize((w, h), Image.Resampling.NEAREST)
159
159
  )
160
160
  seg = np.array(seg)
161
161
 
@@ -0,0 +1,20 @@
1
+ """DataMint entities package."""
2
+
3
+ from .annotation import Annotation
4
+ from .base_entity import BaseEntity
5
+ from .channel import Channel, ChannelResourceData
6
+ from .project import Project
7
+ from .resource import Resource
8
+ from .user import User # new export
9
+ from .datasetinfo import DatasetInfo
10
+
11
+ __all__ = [
12
+ 'Annotation',
13
+ 'BaseEntity',
14
+ 'Channel',
15
+ 'ChannelResourceData',
16
+ 'Project',
17
+ 'Resource',
18
+ "User",
19
+ 'DatasetInfo',
20
+ ]
@@ -0,0 +1,178 @@
1
+ # filepath: datamint/entities/annotation.py
2
+ """Annotation entity module for DataMint API.
3
+
4
+ This module defines the Annotation model used to represent annotation
5
+ records returned by the DataMint API.
6
+ """
7
+
8
+ from typing import Any
9
+ import logging
10
+ from .base_entity import BaseEntity, MISSING_FIELD
11
+ from pydantic import Field
12
+ from datetime import datetime
13
+
14
+ logger = logging.getLogger(__name__)
15
+
16
+ # Map API field names to class attributes
17
+ _FIELD_MAPPING = {
18
+ 'type': 'annotation_type',
19
+ 'name': 'identifier',
20
+ 'added_by': 'created_by',
21
+ 'index': 'frame_index',
22
+ }
23
+
24
+
25
+ class Annotation(BaseEntity):
26
+ """Pydantic Model representing a DataMint annotation.
27
+
28
+ Attributes:
29
+ id: Unique identifier for the annotation.
30
+ identifier: User-friendly identifier or label for the annotation.
31
+ scope: Scope of the annotation (e.g., "frame", "image").
32
+ frame_index: Index of the frame if scope is frame-based.
33
+ annotation_type: Type of annotation (e.g., "segmentation", "bbox", "label").
34
+ text_value: Optional text value associated with the annotation.
35
+ numeric_value: Optional numeric value associated with the annotation.
36
+ units: Optional units for numeric_value.
37
+ geometry: Optional geometry payload (e.g., polygons, masks) as a list.
38
+ created_at: ISO timestamp for when the annotation was created.
39
+ created_by: Email or identifier of the creating user.
40
+ annotation_worklist_id: Optional worklist ID associated with the annotation.
41
+ status: Lifecycle status of the annotation (e.g., "new", "approved").
42
+ approved_at: Optional ISO timestamp for approval time.
43
+ approved_by: Optional identifier of the approver.
44
+ resource_id: ID of the resource this annotation belongs to.
45
+ associated_file: Path or identifier of any associated file artifact.
46
+ deleted: Whether the annotation is marked as deleted.
47
+ deleted_at: Optional ISO timestamp for deletion time.
48
+ deleted_by: Optional identifier of the user who deleted the annotation.
49
+ created_by_model: Optional identifier of the model that created this annotation.
50
+ old_geometry: Optional previous geometry payload for change tracking.
51
+ set_name: Optional set name this annotation belongs to.
52
+ resource_filename: Optional filename of the resource.
53
+ resource_modality: Optional modality of the resource (e.g., CT, MR).
54
+ annotation_worklist_name: Optional worklist name associated with the annotation.
55
+ user_info: Optional user information with keys like firstname and lastname.
56
+ values: Optional extra values payload for flexible schemas.
57
+ """
58
+
59
+ id: str
60
+ identifier: str
61
+ scope: str
62
+ frame_index: int | None
63
+ annotation_type: str
64
+ text_value: str | None
65
+ numeric_value: float | int | None
66
+ units: str | None
67
+ geometry: list | dict | None
68
+ created_at: str # ISO timestamp string
69
+ created_by: str
70
+ annotation_worklist_id: str | None
71
+ status: str
72
+ approved_at: str | None # ISO timestamp string
73
+ approved_by: str | None
74
+ resource_id: str
75
+ associated_file: str | None
76
+ deleted: bool
77
+ deleted_at: str | None # ISO timestamp string
78
+ deleted_by: str | None
79
+ created_by_model: str | None
80
+ set_name: str | None
81
+ resource_filename: str | None
82
+ resource_modality: str | None
83
+ annotation_worklist_name: str | None
84
+ user_info: dict | None
85
+ values: list | None = MISSING_FIELD
86
+ file: str | None = None # Add file field for segmentations
87
+
88
+ @classmethod
89
+ def from_dict(cls, data: dict[str, Any]) -> 'Annotation':
90
+ """Create an Annotation instance from a dictionary.
91
+
92
+ Args:
93
+ data: Dictionary containing annotation data from API
94
+
95
+ Returns:
96
+ Annotation instance
97
+ """
98
+ # Convert field names and filter valid fields
99
+ converted_data = {}
100
+ for key, value in data.items():
101
+ # Map field names if needed
102
+ mapped_key = _FIELD_MAPPING.get(key, key)
103
+ converted_data[mapped_key] = value
104
+
105
+ if 'scope' not in converted_data:
106
+ converted_data['scope'] = 'image' if converted_data.get('frame_index') is None else 'frame'
107
+
108
+ if converted_data['annotation_type'] in ['segmentation']:
109
+ if converted_data.get('file') is None:
110
+ raise ValueError(f"Segmentation annotations must have an associated file. {data}")
111
+
112
+ # Create instance with only valid fields
113
+ valid_fields = {f for f in cls.model_fields.keys()}
114
+ filtered_data = {k: v for k, v in converted_data.items() if k in valid_fields}
115
+
116
+ return cls(**filtered_data)
117
+
118
+ @property
119
+ def type(self) -> str:
120
+ """Alias for :attr:`annotation_type`."""
121
+ return self.annotation_type
122
+
123
+ @property
124
+ def name(self) -> str:
125
+ """Get the annotation name (alias for identifier)."""
126
+ return self.identifier
127
+
128
+ @property
129
+ def index(self) -> int | None:
130
+ """Get the frame index (alias for frame_index)."""
131
+ return self.frame_index
132
+
133
+ @property
134
+ def value(self) -> str | None:
135
+ """Get the annotation value (for category annotations)."""
136
+ return self.text_value
137
+
138
+ @property
139
+ def added_by(self) -> str:
140
+ """Get the creator email (alias for created_by)."""
141
+ return self.created_by
142
+
143
+ def is_segmentation(self) -> bool:
144
+ """Check if this is a segmentation annotation."""
145
+ return self.annotation_type == 'segmentation'
146
+
147
+ def is_label(self) -> bool:
148
+ """Check if this is a label annotation."""
149
+ return self.annotation_type == 'label'
150
+
151
+ def is_category(self) -> bool:
152
+ """Check if this is a category annotation."""
153
+ return self.annotation_type == 'category'
154
+
155
+ def is_frame_scoped(self) -> bool:
156
+ """Check if this annotation is frame-scoped."""
157
+ return self.scope == 'frame'
158
+
159
+ def is_image_scoped(self) -> bool:
160
+ """Check if this annotation is image-scoped."""
161
+ return self.scope == 'image'
162
+
163
+ def get_created_datetime(self) -> datetime | None:
164
+ """
165
+ Get the creation datetime as a datetime object.
166
+
167
+ Returns:
168
+ datetime object or None if created_at is not set
169
+ """
170
+ if isinstance(self.created_at, datetime):
171
+ return self.created_at
172
+
173
+ if self.created_at:
174
+ try:
175
+ return datetime.fromisoformat(self.created_at.replace('Z', '+00:00'))
176
+ except ValueError:
177
+ logger.warning(f"Could not parse created_at datetime: {self.created_at}")
178
+ return None
@@ -0,0 +1,51 @@
1
+ import logging
2
+ import sys
3
+ from typing import Any
4
+ from pydantic import ConfigDict, BaseModel
5
+
6
+ if sys.version_info >= (3, 11):
7
+ from typing import Self
8
+ else:
9
+ from typing_extensions import Self
10
+ _LOGGER = logging.getLogger(__name__)
11
+
12
+ MISSING_FIELD = 'MISSING_FIELD' # Used when a field is sometimes missing for one endpoint but not on another endpoint
13
+
14
+ # Track logged warnings to avoid duplicates
15
+ _LOGGED_WARNINGS: set[tuple[str, str]] = set()
16
+
17
+
18
+ class BaseEntity(BaseModel):
19
+ """
20
+ Base class for all entities in the Datamint system.
21
+
22
+ This class provides common functionality for all entities, such as
23
+ serialization and deserialization from dictionaries, as well as
24
+ handling unknown fields gracefully.
25
+ """
26
+
27
+ model_config = ConfigDict(extra='allow') # Allow extra fields not defined in the model
28
+
29
+ def asdict(self) -> dict[str, Any]:
30
+ """Convert the entity to a dictionary, including unknown fields."""
31
+ return self.model_dump(warnings='none')
32
+
33
+ def asjson(self) -> str:
34
+ """Convert the entity to a JSON string, including unknown fields."""
35
+ return self.model_dump_json(warnings='none')
36
+
37
+ def model_post_init(self, __context: Any) -> None:
38
+ """Handle unknown fields by logging a warning once per class/field combination in debug mode."""
39
+ if self.__pydantic_extra__ and _LOGGER.isEnabledFor(logging.DEBUG):
40
+ class_name = self.__class__.__name__
41
+
42
+ have_to_log = False
43
+ for key in self.__pydantic_extra__.keys():
44
+ warning_key = (class_name, key)
45
+
46
+ if warning_key not in _LOGGED_WARNINGS:
47
+ _LOGGED_WARNINGS.add(warning_key)
48
+ have_to_log = True
49
+
50
+ if have_to_log:
51
+ _LOGGER.warning(f"Unknown fields {list(self.__pydantic_extra__.keys())} found in {class_name}")
@@ -0,0 +1,46 @@
1
+ from pydantic import ConfigDict, BaseModel
2
+ from datetime import datetime
3
+ from datamint.entities.base_entity import BaseEntity
4
+
5
+
6
+ class ChannelResourceData(BaseModel):
7
+ """Represents resource data within a channel.
8
+
9
+ Attributes:
10
+ created_by: Email of the user who created the resource.
11
+ customer_id: UUID of the customer.
12
+ resource_id: UUID of the resource.
13
+ resource_file_name: Original filename of the resource.
14
+ resource_mimetype: MIME type of the resource.
15
+ """
16
+ model_config = ConfigDict(extra='allow')
17
+
18
+ created_by: str
19
+ customer_id: str
20
+ resource_id: str
21
+ resource_file_name: str
22
+ resource_mimetype: str
23
+
24
+
25
+ class Channel(BaseEntity):
26
+ """Represents a channel containing multiple resources.
27
+
28
+ A channel is a collection of resources grouped together,
29
+ typically for batch processing or organization purposes.
30
+
31
+ Attributes:
32
+ channel_name: Name identifier for the channel.
33
+ resource_data: List of resources contained in this channel.
34
+ deleted: Whether the channel has been marked as deleted.
35
+ created_at: Timestamp when the channel was created.
36
+ updated_at: Timestamp when the channel was last updated.
37
+ """
38
+ channel_name: str
39
+ resource_data: list[ChannelResourceData]
40
+ deleted: bool = False
41
+ created_at: str | None = None
42
+ updated_at: str | None = None
43
+
44
+ def get_resource_ids(self) -> list[str]:
45
+ """Get list of all resource IDs in this channel."""
46
+ return [resource.resource_id for resource in self.resource_data] if self.resource_data else []
@@ -0,0 +1,22 @@
1
+ """Project entity module for DataMint API."""
2
+
3
+ from datetime import datetime
4
+ import logging
5
+ from .base_entity import BaseEntity, MISSING_FIELD
6
+
7
+ logger = logging.getLogger(__name__)
8
+
9
+
10
+ class DatasetInfo(BaseEntity):
11
+ """Pydantic Model representing a DataMint dataset.
12
+ """
13
+
14
+ id: str
15
+ name: str
16
+ created_at: str # ISO timestamp string
17
+ created_by: str
18
+ description: str
19
+ customer_id: str
20
+ updated_at: str | None
21
+ total_resource: int
22
+ resource_ids: list[str]
@@ -0,0 +1,64 @@
1
+ """Project entity module for DataMint API."""
2
+
3
+ from datetime import datetime
4
+ import logging
5
+ from .base_entity import BaseEntity, MISSING_FIELD
6
+
7
+ logger = logging.getLogger(__name__)
8
+
9
+
10
+ class Project(BaseEntity):
11
+ """Pydantic Model representing a DataMint project.
12
+
13
+ This class models a project entity from the DataMint API, containing
14
+ information about the project, its dataset, worklist, AI model, and
15
+ annotation statistics.
16
+
17
+ Attributes:
18
+ id: Unique identifier for the project
19
+ name: Human-readable name of the project
20
+ description: Optional description of the project
21
+ created_at: ISO timestamp when the project was created
22
+ created_by: Email of the user who created the project
23
+ dataset_id: ID of the associated dataset
24
+ worklist_id: ID of the associated worklist
25
+ ai_model_id: Optional ID of the associated AI model
26
+ viewable_ai_segs: Optional configuration for viewable AI segments
27
+ editable_ai_segs: Optional configuration for editable AI segments
28
+ archived: Whether the project is archived
29
+ resource_count: Total number of resources in the project
30
+ annotated_resource_count: Number of resources that have been annotated
31
+ most_recent_experiment: Optional information about the most recent experiment
32
+ closed_resources_count: Number of resources marked as closed/completed
33
+ resources_to_annotate_count: Number of resources still needing annotation
34
+ annotators: List of annotators assigned to this project
35
+ """
36
+ id: str
37
+ name: str
38
+ created_at: str # ISO timestamp string
39
+ created_by: str
40
+ dataset_id: str
41
+ worklist_id: str
42
+ archived: bool
43
+ resource_count: int
44
+ annotated_resource_count: int
45
+ description: str | None
46
+ ai_model_id: str | None
47
+ viewable_ai_segs: list | None
48
+ editable_ai_segs: list | None
49
+ closed_resources_count: int = MISSING_FIELD
50
+ resources_to_annotate_count: int = MISSING_FIELD
51
+ most_recent_experiment: str | None = MISSING_FIELD # ISO timestamp string
52
+ annotators: list[dict] = MISSING_FIELD
53
+ customer_id: str | None = MISSING_FIELD
54
+ archived_on: str | None = MISSING_FIELD
55
+ archived_by: str | None = MISSING_FIELD
56
+ is_active_learning: bool = MISSING_FIELD
57
+ two_up_display: bool = MISSING_FIELD
58
+ require_review: bool = MISSING_FIELD
59
+
60
+ @property
61
+ def url(self) -> str:
62
+ """Get the URL to access this project in the DataMint web application."""
63
+ base_url = "https://app.datamint.io/projects/edit"
64
+ return f"{base_url}/{self.id}"
@@ -0,0 +1,130 @@
1
+ """Resource entity module for DataMint API."""
2
+
3
+ from datetime import datetime
4
+ from typing import Optional, Any
5
+ import logging
6
+ from .base_entity import BaseEntity, MISSING_FIELD
7
+ from pydantic import Field
8
+
9
+ logger = logging.getLogger(__name__)
10
+
11
+ class Resource(BaseEntity):
12
+ """Represents a DataMint resource with all its properties and metadata.
13
+
14
+ This class models a resource entity from the DataMint API, containing
15
+ information about uploaded files, their metadata, and associated projects.
16
+
17
+ Attributes:
18
+ id: Unique identifier for the resource
19
+ resource_uri: URI path to access the resource file
20
+ storage: Storage type (e.g., 'DicomResource')
21
+ location: Storage location path
22
+ upload_channel: Channel used for upload (e.g., 'tmp')
23
+ filename: Original filename of the resource
24
+ modality: Medical imaging modality
25
+ mimetype: MIME type of the file
26
+ size: File size in bytes
27
+ upload_mechanism: Mechanism used for upload (e.g., 'api')
28
+ customer_id: Customer/organization identifier
29
+ status: Current status of the resource
30
+ created_at: ISO timestamp when resource was created
31
+ created_by: Email of the user who created the resource
32
+ published: Whether the resource is published
33
+ published_on: ISO timestamp when resource was published
34
+ published_by: Email of the user who published the resource
35
+ publish_transforms: Optional publication transforms
36
+ deleted: Whether the resource is deleted
37
+ deleted_at: Optional ISO timestamp when resource was deleted
38
+ deleted_by: Optional email of the user who deleted the resource
39
+ metadata: Resource metadata with DICOM information
40
+ source_filepath: Original source file path
41
+ tags: List of tags associated with the resource
42
+ instance_uid: DICOM SOP Instance UID (top-level)
43
+ series_uid: DICOM Series Instance UID (top-level)
44
+ study_uid: DICOM Study Instance UID (top-level)
45
+ patient_id: Patient identifier (top-level)
46
+ segmentations: Optional segmentation data
47
+ measurements: Optional measurement data
48
+ categories: Optional category data
49
+ labels: List of labels associated with the resource
50
+ user_info: Information about the user who created the resource
51
+ projects: List of projects this resource belongs to
52
+ """
53
+ id: str
54
+ resource_uri: str
55
+ storage: str
56
+ location: str
57
+ upload_channel: str
58
+ filename: str
59
+ modality: str
60
+ mimetype: str
61
+ size: int
62
+ upload_mechanism: str
63
+ customer_id: str
64
+ status: str
65
+ created_at: str
66
+ created_by: str
67
+ published: bool
68
+ deleted: bool
69
+ source_filepath: str | None
70
+ metadata: dict
71
+ projects: list[dict] = MISSING_FIELD
72
+ published_on: str | None
73
+ published_by: str | None
74
+ tags: list[str] | None = None
75
+ publish_transforms: Optional[Any] = None
76
+ deleted_at: Optional[str] = None
77
+ deleted_by: Optional[str] = None
78
+ instance_uid: Optional[str] = None
79
+ series_uid: Optional[str] = None
80
+ study_uid: Optional[str] = None
81
+ patient_id: Optional[str] = None
82
+ segmentations: Optional[Any] = None # TODO: Define proper type when spec available
83
+ measurements: Optional[Any] = None # TODO: Define proper type when spec available
84
+ categories: Optional[Any] = None # TODO: Define proper type when spec available
85
+ user_info: Optional[dict] = None
86
+
87
+ @property
88
+ def size_mb(self) -> float:
89
+ """Get file size in megabytes.
90
+
91
+ Returns:
92
+ File size in MB rounded to 2 decimal places
93
+ """
94
+ return round(self.size / (1024 * 1024), 2)
95
+
96
+ def is_dicom(self) -> bool:
97
+ """Check if the resource is a DICOM file.
98
+
99
+ Returns:
100
+ True if the resource is a DICOM file, False otherwise
101
+ """
102
+ return self.mimetype == 'application/dicom' or self.storage == 'DicomResource'
103
+
104
+ def get_project_names(self) -> list[str]:
105
+ """Get list of project names this resource belongs to.
106
+
107
+ Returns:
108
+ List of project names
109
+ """
110
+ return [proj['name'] for proj in self.projects]
111
+
112
+ def __str__(self) -> str:
113
+ """String representation of the resource.
114
+
115
+ Returns:
116
+ Human-readable string describing the resource
117
+ """
118
+ return f"Resource(id='{self.id}', filename='{self.filename}', size={self.size_mb}MB)"
119
+
120
+ def __repr__(self) -> str:
121
+ """Detailed string representation of the resource.
122
+
123
+ Returns:
124
+ Detailed string representation for debugging
125
+ """
126
+ return (
127
+ f"Resource(id='{self.id}', filename='{self.filename}', "
128
+ f"modality='{self.modality}', status='{self.status}', "
129
+ f"published={self.published})"
130
+ )
@@ -0,0 +1,21 @@
1
+ from .base_entity import BaseEntity
2
+
3
+ class User(BaseEntity):
4
+ """User entity model.
5
+
6
+ Attributes:
7
+ email: User email address (unique identifier in most cases).
8
+ firstname: First name.
9
+ lastname: Last name.
10
+ roles: List of role strings assigned to the user.
11
+ customer_id: UUID of the owning customer/tenant.
12
+ created_at: ISO 8601 timestamp of creation.
13
+ """
14
+ email: str
15
+ firstname: str | None
16
+ lastname: str | None
17
+ roles: list[str]
18
+ customer_id: str
19
+ created_at: str
20
+
21
+ # Potential improvement: convert created_at to datetime for easier comparisons.
@@ -1,75 +1,72 @@
1
1
  import requests
2
2
  import io
3
- from datamint import APIHandler
3
+ from datamint import Api
4
4
  import logging
5
5
  from PIL import Image
6
6
  import numpy as np
7
+ from datamint.entities import Project, Resource
8
+ from pydicom.data import get_testdata_file
7
9
 
8
10
  _LOGGER = logging.getLogger(__name__)
9
11
 
10
12
 
11
- def _download_pydicom_test_file(filename: str) -> io.BytesIO:
12
- """Download a pydicom test file from GitHub and return its content as a BytesIO object."""
13
- url = f'https://raw.githubusercontent.com/pydicom/pydicom/master/tests/data/{filename}'
14
- response = requests.get(url)
15
- response.raise_for_status()
16
- content = io.BytesIO(response.content)
17
- content.name = filename
18
- return content
19
-
20
-
21
13
  class ProjectMR:
22
14
  @staticmethod
23
- def upload_resource_emri_small(api: APIHandler = None) -> str:
15
+ def upload_resource_emri_small(api: Api | None = None) -> Resource:
24
16
  if api is None:
25
- api = APIHandler()
17
+ api = Api()
26
18
 
27
- searched_res = api.get_resources(status='published', tags=['example'], filename='emri_small.dcm')
19
+ searched_res = api.resources.get_list(status='published',
20
+ tags=['example'],
21
+ filename='emri_small.dcm')
28
22
  for res in searched_res:
29
23
  _LOGGER.info('Resource already exists.')
30
- return res['id']
24
+ return res
31
25
 
32
- dcm_content = _download_pydicom_test_file('emri_small.dcm')
26
+ dcm_path = get_testdata_file("emri_small.dcm",
27
+ read=False)
33
28
 
34
- _LOGGER.info(f'Uploading resource {dcm_content.name}...')
35
- return api.upload_resources(dcm_content,
36
- anonymize=True,
37
- publish=True,
38
- tags=['example'])
29
+ _LOGGER.info('Uploading resource emri_small.dcm...')
30
+ resid = api.resources.upload_resource(dcm_path,
31
+ anonymize=False,
32
+ publish=True,
33
+ tags=['example'])
34
+ return api.resources.get_by_id(resid)
39
35
 
40
36
  @staticmethod
41
- def _upload_annotations(api: APIHandler,
42
- resid: str,
43
- proj) -> None:
37
+ def _upload_annotations(api: Api,
38
+ res: Resource,
39
+ proj: Project) -> None:
44
40
  _LOGGER.info('Uploading annotations...')
45
- proj_id = proj['id']
46
- proj_info = api.get_project_by_id(proj_id)
47
41
  segurl = 'https://github.com/user-attachments/assets/8c5d7dfe-1b5a-497d-b76e-fe790f09bb90'
48
42
  resp = requests.get(segurl, stream=True)
49
43
  resp.raise_for_status()
50
44
  img = Image.open(io.BytesIO(resp.content)).convert('L')
51
- api.upload_segmentations(resid, np.array(img),
52
- name='object1', frame_index=1,
53
- worklist_id=proj_info['worklist_id'])
54
- api.set_annotation_status(project_id=proj_id,
55
- resource_id=resid,
56
- status='closed')
45
+ api.annotations.upload_segmentations(res, np.array(img),
46
+ name='object1', frame_index=1,
47
+ worklist_id=proj.worklist_id)
48
+ api.projects.set_work_status(resource=res,
49
+ project=proj,
50
+ status='closed')
57
51
 
58
52
  @staticmethod
59
53
  def create(project_name: str = 'Example Project MR',
60
- with_annotations=True) -> str:
61
- api = APIHandler()
54
+ with_annotations=True) -> Project:
55
+ api = Api()
56
+
57
+ res = ProjectMR.upload_resource_emri_small(api)
58
+ proj = api.projects.get_by_name(name=project_name)
59
+ if proj:
60
+ _LOGGER.warning(f'Project {project_name} already exists. Returning it without modifications...')
61
+ return proj
62
62
 
63
- resid = ProjectMR.upload_resource_emri_small(api)
64
- proj = api.get_project_by_name(project_name)
65
- if 'id' in proj:
66
- msg = f'Project {project_name} already exists. Delete it first or choose another name.'
67
- raise ValueError(msg)
68
63
  _LOGGER.info(f'Creating project {project_name}...')
69
- proj = api.create_project(name=project_name,
70
- description='This is an example project',
71
- resources_ids=[resid])
64
+ projid = api.projects.create(name=project_name,
65
+ description='This is an example project',
66
+ resources_ids=[res.id])
67
+ proj = api.projects.get_by_id(projid)
68
+
72
69
  if with_annotations:
73
- ProjectMR._upload_annotations(api, resid, proj)
70
+ ProjectMR._upload_annotations(api, res, proj)
74
71
 
75
- return proj['id']
72
+ return proj