datamint 1.9.3__tar.gz → 2.0.1__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datamint might be problematic. Click here for more details.

Files changed (51) hide show
  1. {datamint-1.9.3 → datamint-2.0.1}/PKG-INFO +13 -9
  2. {datamint-1.9.3 → datamint-2.0.1}/README.md +7 -6
  3. {datamint-1.9.3 → datamint-2.0.1}/datamint/__init__.py +2 -0
  4. datamint-2.0.1/datamint/api/__init__.py +3 -0
  5. datamint-2.0.1/datamint/api/base_api.py +430 -0
  6. datamint-2.0.1/datamint/api/client.py +91 -0
  7. datamint-2.0.1/datamint/api/dto/__init__.py +10 -0
  8. datamint-2.0.1/datamint/api/endpoints/__init__.py +17 -0
  9. datamint-2.0.1/datamint/api/endpoints/annotations_api.py +984 -0
  10. datamint-2.0.1/datamint/api/endpoints/channels_api.py +28 -0
  11. datamint-2.0.1/datamint/api/endpoints/datasetsinfo_api.py +16 -0
  12. datamint-2.0.1/datamint/api/endpoints/projects_api.py +203 -0
  13. datamint-2.0.1/datamint/api/endpoints/resources_api.py +1013 -0
  14. datamint-2.0.1/datamint/api/endpoints/users_api.py +38 -0
  15. datamint-2.0.1/datamint/api/entity_base_api.py +347 -0
  16. {datamint-1.9.3 → datamint-2.0.1}/datamint/apihandler/api_handler.py +3 -6
  17. {datamint-1.9.3 → datamint-2.0.1}/datamint/apihandler/base_api_handler.py +6 -28
  18. {datamint-1.9.3 → datamint-2.0.1}/datamint/apihandler/dto/annotation_dto.py +1 -1
  19. datamint-2.0.1/datamint/client_cmd_tools/__init__.py +0 -0
  20. {datamint-1.9.3 → datamint-2.0.1}/datamint/client_cmd_tools/datamint_upload.py +19 -30
  21. {datamint-1.9.3 → datamint-2.0.1}/datamint/dataset/base_dataset.py +83 -86
  22. {datamint-1.9.3 → datamint-2.0.1}/datamint/dataset/dataset.py +2 -2
  23. datamint-2.0.1/datamint/entities/__init__.py +20 -0
  24. datamint-2.0.1/datamint/entities/annotation.py +178 -0
  25. datamint-2.0.1/datamint/entities/base_entity.py +51 -0
  26. datamint-2.0.1/datamint/entities/channel.py +46 -0
  27. datamint-2.0.1/datamint/entities/datasetinfo.py +22 -0
  28. datamint-2.0.1/datamint/entities/project.py +64 -0
  29. datamint-2.0.1/datamint/entities/resource.py +130 -0
  30. datamint-2.0.1/datamint/entities/user.py +21 -0
  31. datamint-2.0.1/datamint/examples/example_projects.py +72 -0
  32. datamint-2.0.1/datamint/exceptions.py +31 -0
  33. {datamint-1.9.3 → datamint-2.0.1}/pyproject.toml +4 -2
  34. datamint-1.9.3/datamint/examples/example_projects.py +0 -75
  35. datamint-1.9.3/datamint/exceptions.py +0 -5
  36. {datamint-1.9.3 → datamint-2.0.1}/datamint/apihandler/annotation_api_handler.py +0 -0
  37. {datamint-1.9.3/datamint/client_cmd_tools → datamint-2.0.1/datamint/apihandler/dto}/__init__.py +0 -0
  38. {datamint-1.9.3 → datamint-2.0.1}/datamint/apihandler/exp_api_handler.py +0 -0
  39. {datamint-1.9.3 → datamint-2.0.1}/datamint/apihandler/root_api_handler.py +0 -0
  40. {datamint-1.9.3 → datamint-2.0.1}/datamint/client_cmd_tools/datamint_config.py +0 -0
  41. {datamint-1.9.3 → datamint-2.0.1}/datamint/configs.py +0 -0
  42. {datamint-1.9.3 → datamint-2.0.1}/datamint/dataset/__init__.py +0 -0
  43. {datamint-1.9.3 → datamint-2.0.1}/datamint/dataset/annotation.py +0 -0
  44. {datamint-1.9.3 → datamint-2.0.1}/datamint/examples/__init__.py +0 -0
  45. {datamint-1.9.3 → datamint-2.0.1}/datamint/experiment/__init__.py +0 -0
  46. {datamint-1.9.3 → datamint-2.0.1}/datamint/experiment/_patcher.py +0 -0
  47. {datamint-1.9.3 → datamint-2.0.1}/datamint/experiment/experiment.py +0 -0
  48. {datamint-1.9.3 → datamint-2.0.1}/datamint/logging.yaml +0 -0
  49. {datamint-1.9.3 → datamint-2.0.1}/datamint/utils/logging_utils.py +0 -0
  50. {datamint-1.9.3 → datamint-2.0.1}/datamint/utils/torchmetrics.py +0 -0
  51. {datamint-1.9.3 → datamint-2.0.1}/datamint/utils/visualization.py +0 -0
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.3
1
+ Metadata-Version: 2.4
2
2
  Name: datamint
3
- Version: 1.9.3
3
+ Version: 2.0.1
4
4
  Summary: A library for interacting with the Datamint API, designed for efficient data management, processing and Deep Learning workflows.
5
5
  Requires-Python: >=3.10
6
6
  Classifier: Programming Language :: Python :: 3
@@ -8,6 +8,7 @@ Classifier: Programming Language :: Python :: 3.10
8
8
  Classifier: Programming Language :: Python :: 3.11
9
9
  Classifier: Programming Language :: Python :: 3.12
10
10
  Classifier: Programming Language :: Python :: 3.13
11
+ Classifier: Programming Language :: Python :: 3.14
11
12
  Provides-Extra: dev
12
13
  Provides-Extra: docs
13
14
  Requires-Dist: Deprecated (>=1.2.0)
@@ -19,13 +20,14 @@ Requires-Dist: humanize (>=4.0.0,<5.0.0)
19
20
  Requires-Dist: lazy-loader (>=0.3.0)
20
21
  Requires-Dist: lightning
21
22
  Requires-Dist: matplotlib
22
- Requires-Dist: medimgkit (>=0.5.0)
23
+ Requires-Dist: medimgkit (>=0.6.0)
23
24
  Requires-Dist: nest-asyncio (>=1.0.0,<2.0.0)
24
25
  Requires-Dist: nibabel (>=4.0.0)
25
26
  Requires-Dist: numpy
26
27
  Requires-Dist: opencv-python (>=4.0.0)
27
28
  Requires-Dist: pandas (>=2.0.0)
28
29
  Requires-Dist: platformdirs (>=4.0.0,<5.0.0)
30
+ Requires-Dist: pydantic (>=2.6.4)
29
31
  Requires-Dist: pydicom (>=3.0.0,<4.0.0)
30
32
  Requires-Dist: pylibjpeg (>=2.0.0,<3.0.0)
31
33
  Requires-Dist: pylibjpeg-libjpeg (>=2.0.0,<3.0.0)
@@ -42,6 +44,7 @@ Requires-Dist: sphinx_rtd_theme (>=2.0.0) ; extra == "docs"
42
44
  Requires-Dist: torch (>=1.2.0,!=2.3.0)
43
45
  Requires-Dist: torchvision (>=0.18.0)
44
46
  Requires-Dist: tqdm (>=4.0.0,<5.0.0)
47
+ Requires-Dist: typing_extensions (>=4.0.0)
45
48
  Description-Content-Type: text/markdown
46
49
 
47
50
 
@@ -91,13 +94,13 @@ import os
91
94
  os.environ["DATAMINT_API_KEY"] = "my_api_key"
92
95
  ```
93
96
 
94
- ### Method 3: APIHandler constructor
97
+ ### Method 3: Api constructor
95
98
 
96
- Specify API key in the |APIHandlerClass| constructor:
99
+ Specify API key in the Api constructor:
97
100
 
98
101
  ```python
99
- from datamint import APIHandler
100
- api = APIHandler(api_key='my_api_key')
102
+ from datamint import Api
103
+ api = Api(api_key='my_api_key')
101
104
  ```
102
105
 
103
106
  ## Tutorials
@@ -110,8 +113,9 @@ You can find example notebooks in the `notebooks` folder:
110
113
 
111
114
  and example scripts in [examples](examples) folder:
112
115
 
113
- - [Running an experiment for classification](examples/experiment_traintest_classifier.py)
114
- - [Running an experiment for segmentation](examples/experiment_traintest_segmentation.py)
116
+ - [API usage examples](examples/api_usage.ipynb)
117
+ - [Project and entity usage](examples/project_entity_usage.ipynb)
118
+ - [Channels example](examples/channels_example.ipynb)
115
119
 
116
120
  ## Full documentation
117
121
 
@@ -45,13 +45,13 @@ import os
45
45
  os.environ["DATAMINT_API_KEY"] = "my_api_key"
46
46
  ```
47
47
 
48
- ### Method 3: APIHandler constructor
48
+ ### Method 3: Api constructor
49
49
 
50
- Specify API key in the |APIHandlerClass| constructor:
50
+ Specify API key in the Api constructor:
51
51
 
52
52
  ```python
53
- from datamint import APIHandler
54
- api = APIHandler(api_key='my_api_key')
53
+ from datamint import Api
54
+ api = Api(api_key='my_api_key')
55
55
  ```
56
56
 
57
57
  ## Tutorials
@@ -64,8 +64,9 @@ You can find example notebooks in the `notebooks` folder:
64
64
 
65
65
  and example scripts in [examples](examples) folder:
66
66
 
67
- - [Running an experiment for classification](examples/experiment_traintest_classifier.py)
68
- - [Running an experiment for segmentation](examples/experiment_traintest_segmentation.py)
67
+ - [API usage examples](examples/api_usage.ipynb)
68
+ - [Project and entity usage](examples/project_entity_usage.ipynb)
69
+ - [Channels example](examples/channels_example.ipynb)
69
70
 
70
71
  ## Full documentation
71
72
 
@@ -8,6 +8,7 @@ if TYPE_CHECKING:
8
8
  from .dataset.dataset import DatamintDataset as Dataset
9
9
  from .apihandler.api_handler import APIHandler
10
10
  from .experiment import Experiment
11
+ from .api.client import Api
11
12
  else:
12
13
  import lazy_loader as lazy
13
14
 
@@ -19,6 +20,7 @@ else:
19
20
  "dataset": ['Dataset'],
20
21
  "apihandler.api_handler": ["APIHandler"],
21
22
  "experiment": ["Experiment"],
23
+ "api.client": ["Api"],
22
24
  },
23
25
  )
24
26
 
@@ -0,0 +1,3 @@
1
+ from .client import Api
2
+
3
+ __all__ = ['Api']
@@ -0,0 +1,430 @@
1
+ import logging
2
+ from typing import Any, Generator, AsyncGenerator, Sequence
3
+ import httpx
4
+ from dataclasses import dataclass
5
+ from datamint.exceptions import DatamintException, ResourceNotFoundError
6
+ import aiohttp
7
+ import json
8
+ import pydicom.dataset
9
+ from PIL import Image
10
+ import cv2
11
+ import nibabel as nib
12
+ from nibabel.filebasedimages import FileBasedImage as nib_FileBasedImage
13
+ from io import BytesIO
14
+ import gzip
15
+ import contextlib
16
+ import asyncio
17
+
18
+ logger = logging.getLogger(__name__)
19
+
20
+ # Generic type for entities
21
+ _PAGE_LIMIT = 5000
22
+
23
+
24
+ @dataclass
25
+ class ApiConfig:
26
+ """Configuration for API client.
27
+
28
+ Attributes:
29
+ server_url: Base URL for the API.
30
+ api_key: Optional API key for authentication.
31
+ timeout: Request timeout in seconds.
32
+ max_retries: Maximum number of retries for requests.
33
+ """
34
+ server_url: str
35
+ api_key: str | None = None
36
+ timeout: float = 30.0
37
+ max_retries: int = 3
38
+
39
+
40
+ class BaseApi:
41
+ """Base class for all API endpoint handlers."""
42
+
43
+ def __init__(self,
44
+ config: ApiConfig,
45
+ client: httpx.Client | None = None) -> None:
46
+ """Initialize the base API handler.
47
+
48
+ Args:
49
+ config: API configuration containing base URL, API key, etc.
50
+ client: Optional HTTP client instance. If None, a new one will be created.
51
+ """
52
+ self.config = config
53
+ self.client = client or self._create_client()
54
+ self.semaphore = asyncio.Semaphore(20)
55
+
56
+ def _create_client(self) -> httpx.Client:
57
+ """Create and configure HTTP client with authentication and timeouts."""
58
+ headers = None
59
+ if self.config.api_key:
60
+ headers = {"apikey": self.config.api_key}
61
+
62
+ return httpx.Client(
63
+ base_url=self.config.server_url,
64
+ headers=headers,
65
+ timeout=self.config.timeout
66
+ )
67
+
68
+ def _stream_request(self, method: str, endpoint: str, **kwargs):
69
+ """Make streaming HTTP request with error handling.
70
+
71
+ Args:
72
+ method: HTTP method (GET, POST, PUT, DELETE)
73
+ endpoint: API endpoint path
74
+ **kwargs: Additional arguments for the request
75
+
76
+ Returns:
77
+ HTTP response object configured for streaming
78
+
79
+ Raises:
80
+ httpx.HTTPStatusError: If the request fails
81
+
82
+ Example:
83
+ with api._stream_request('GET', '/large-file') as response:
84
+ for chunk in response.iter_bytes():
85
+ process_chunk(chunk)
86
+ """
87
+ url = endpoint.lstrip('/') # Remove leading slash for httpx
88
+
89
+ try:
90
+ return self.client.stream(method, url, **kwargs)
91
+ except httpx.RequestError as e:
92
+ logger.error(f"Request error for streaming {method} {endpoint}: {e}")
93
+ raise
94
+
95
+ def _make_request(self, method: str, endpoint: str, **kwargs) -> httpx.Response:
96
+ """Make HTTP request with error handling and retries.
97
+
98
+ Args:
99
+ method: HTTP method (GET, POST, PUT, DELETE)
100
+ endpoint: API endpoint path
101
+ **kwargs: Additional arguments for the request
102
+
103
+ Returns:
104
+ HTTP response object
105
+
106
+ Raises:
107
+ httpx.HTTPStatusError: If the request fails
108
+ """
109
+ url = endpoint.lstrip('/') # Remove leading slash for httpx
110
+
111
+ try:
112
+ curl_command = self._generate_curl_command({"method": method,
113
+ "url": url,
114
+ "headers": self.client.headers,
115
+ **kwargs}, fail_silently=True)
116
+ logger.debug(f'Equivalent curl command: "{curl_command}"')
117
+ response = self.client.request(method, url, **kwargs)
118
+ response.raise_for_status()
119
+ return response
120
+ except httpx.HTTPStatusError as e:
121
+ logger.error(f"HTTP error {e.response.status_code} for {method} {endpoint}: {e.response.text}")
122
+ raise
123
+ except httpx.RequestError as e:
124
+ logger.error(f"Request error for {method} {endpoint}: {e}")
125
+ raise
126
+
127
+ def _generate_curl_command(self,
128
+ request_args: dict,
129
+ fail_silently: bool = False) -> str:
130
+ """
131
+ Generate a curl command for debugging purposes.
132
+
133
+ Args:
134
+ request_args (dict): Request arguments dictionary containing method, url, headers, etc.
135
+
136
+ Returns:
137
+ str: Equivalent curl command
138
+ """
139
+ try:
140
+ method = request_args.get('method', 'GET').upper()
141
+ url = request_args['url']
142
+ headers = request_args.get('headers', {})
143
+ data = request_args.get('json') or request_args.get('data')
144
+ params = request_args.get('params')
145
+
146
+ curl_command = ['curl']
147
+
148
+ # Add method if not GET
149
+ if method != 'GET':
150
+ curl_command.extend(['-X', method])
151
+
152
+ # Add headers
153
+ for key, value in headers.items():
154
+ if key.lower() == 'apikey':
155
+ value = '<YOUR-API-KEY>' # Mask API key for security
156
+ curl_command.extend(['-H', f"'{key}: {value}'"])
157
+
158
+ # Add query parameters
159
+ if params:
160
+ param_str = '&'.join([f"{k}={v}" for k, v in params.items()])
161
+ url = f"{url}?{param_str}"
162
+ # Add URL
163
+ curl_command.append(f"'{url}'")
164
+
165
+ # Add data
166
+ if data:
167
+ if isinstance(data, aiohttp.FormData): # Check if it's aiohttp.FormData
168
+ # Handle FormData by extracting fields
169
+ form_parts = []
170
+ for options, headers, value in data._fields:
171
+ # get the name from options
172
+ name = options.get('name', 'file')
173
+ if hasattr(value, 'read'): # File-like object
174
+ filename = getattr(value, 'name', 'file')
175
+ form_parts.extend(['-F', f"'{name}=@{filename}'"])
176
+ else:
177
+ form_parts.extend(['-F', f"'{name}={value}'"])
178
+ curl_command.extend(form_parts)
179
+ elif isinstance(data, dict):
180
+ curl_command.extend(['-d', f"'{json.dumps(data)}'"])
181
+ else:
182
+ curl_command.extend(['-d', f"'{data}'"])
183
+
184
+ return ' '.join(curl_command)
185
+ except Exception as e:
186
+ if fail_silently:
187
+ logger.debug(f"Error generating curl command: {e}")
188
+ return "<error generating curl command>"
189
+ raise
190
+
191
+ @staticmethod
192
+ def get_status_code(e: httpx.HTTPStatusError | aiohttp.ClientResponseError) -> int:
193
+ if hasattr(e, 'response') and e.response is not None:
194
+ # httpx.HTTPStatusError
195
+ return e.response.status_code
196
+ if hasattr(e, 'status'):
197
+ # aiohttp.ClientResponseError
198
+ return e.status
199
+ if hasattr(e, 'status_code'):
200
+ return e.status_code
201
+ logger.debug(f"Unable to get status code from exception of type {type(e)}")
202
+ return -1
203
+
204
+ @staticmethod
205
+ def _has_status_code(e: httpx.HTTPError | aiohttp.ClientResponseError,
206
+ status_code: int) -> bool:
207
+ return BaseApi.get_status_code(e) == status_code
208
+
209
+ def _check_errors_response(self,
210
+ response: httpx.Response | aiohttp.ClientResponse,
211
+ url: str):
212
+ try:
213
+ response.raise_for_status()
214
+ except (httpx.HTTPStatusError, aiohttp.ClientResponseError) as e:
215
+ logger.error(f"HTTP error occurred: {e}")
216
+ status_code = BaseApi.get_status_code(e)
217
+ if status_code >= 500 and status_code < 600:
218
+ logger.error(f"Error in request to {url}: {e}")
219
+ if status_code >= 400 and status_code < 500:
220
+ if isinstance(e, aiohttp.ClientResponseError):
221
+ # aiohttp.ClientResponse does not have .text or .json() methods directly
222
+ error_msg = e.message
223
+ else:
224
+ error_msg = e.response.text
225
+ logger.info(f"Error response: {error_msg}")
226
+ if ' not found' in error_msg.lower():
227
+ # Will be caught by the caller and properly initialized:
228
+ raise ResourceNotFoundError('unknown', {})
229
+ raise
230
+
231
+ @contextlib.asynccontextmanager
232
+ async def _make_request_async(self,
233
+ method: str,
234
+ endpoint: str,
235
+ session: aiohttp.ClientSession | None = None,
236
+ **kwargs) -> AsyncGenerator[aiohttp.ClientResponse, None]:
237
+ """Make asynchronous HTTP request with error handling as an async context manager.
238
+
239
+ Args:
240
+ method: HTTP method (GET, POST, PUT, DELETE)
241
+ endpoint: API endpoint path
242
+ session: Optional aiohttp session. If None, a new one will be created.
243
+ **kwargs: Additional arguments for the request
244
+
245
+ Yields:
246
+ An aiohttp.ClientResponse object.
247
+
248
+ Raises:
249
+ aiohttp.ClientError: If the request fails
250
+
251
+ Example:
252
+ .. code-block:: python
253
+
254
+ async with api._make_request_async('GET', '/data') as response:
255
+ data = await response.json()
256
+ """
257
+
258
+ if session is None:
259
+ async with aiohttp.ClientSession() as temp_session:
260
+ async with self._make_request_async(method, endpoint, temp_session, **kwargs) as resp:
261
+ yield resp
262
+ return
263
+
264
+ url = f"{self.config.server_url.rstrip('/')}/{endpoint.lstrip('/')}"
265
+
266
+ headers = kwargs.pop('headers', {})
267
+ if self.config.api_key:
268
+ headers['apikey'] = self.config.api_key
269
+
270
+ timeout = aiohttp.ClientTimeout(total=self.config.timeout)
271
+
272
+ response = None
273
+ curl_cmd = self._generate_curl_command(
274
+ {"method": method, "url": url, "headers": headers, **kwargs},
275
+ fail_silently=True
276
+ )
277
+ logger.debug(f'Equivalent curl command: "{curl_cmd}"')
278
+ async with self.semaphore:
279
+ try:
280
+ response = await session.request(
281
+ method=method,
282
+ url=url,
283
+ headers=headers,
284
+ timeout=timeout,
285
+ **kwargs
286
+ )
287
+ self._check_errors_response(response, url=url)
288
+ yield response
289
+ except aiohttp.ClientError as e:
290
+ logger.error(f"Request error for {method} {endpoint}: {e}")
291
+ raise
292
+ finally:
293
+ if response is not None:
294
+ response.release()
295
+
296
+ async def _make_request_async_json(self,
297
+ method: str,
298
+ endpoint: str,
299
+ session: aiohttp.ClientSession | None = None,
300
+ **kwargs):
301
+ """Make asynchronous HTTP request and parse JSON response.
302
+
303
+ Args:
304
+ method: HTTP method (GET, POST, etc.)
305
+ endpoint: API endpoint path
306
+ session: Optional aiohttp session. If None, a new one will be created.
307
+ **kwargs: Additional arguments for the request
308
+
309
+ Returns:
310
+ Parsed JSON response or error information.
311
+ """
312
+ async with self._make_request_async(method, endpoint, session=session, **kwargs) as resp:
313
+ return await resp.json()
314
+
315
+ def _make_request_with_pagination(self,
316
+ method: str,
317
+ endpoint: str,
318
+ return_field: str | None = None,
319
+ limit: int | None = None,
320
+ **kwargs
321
+ ) -> Generator[tuple[httpx.Response, list | dict | str], None, None]:
322
+ """Make paginated HTTP requests, yielding each page of results.
323
+
324
+ Args:
325
+ method: HTTP method (GET, POST, etc.)
326
+ endpoint: API endpoint path
327
+ return_field: Optional field name to extract from each item in the response
328
+ limit: Optional maximum number of items to retrieve
329
+ **kwargs: Additional arguments for the request (e.g., params, json)
330
+
331
+ Yields:
332
+ Tuples of (HTTP response, items from the current page `response.json()`, for convenience)
333
+ """
334
+ offset = 0
335
+ total_fetched = 0
336
+ params = dict(kwargs.get('params', {}))
337
+ # Ensure kwargs carries our params reference so mutations below take effect
338
+ kwargs['params'] = params
339
+
340
+ while True:
341
+ if limit is not None and total_fetched >= limit:
342
+ break
343
+
344
+ page_limit = _PAGE_LIMIT
345
+ if limit is not None:
346
+ remaining = limit - total_fetched
347
+ page_limit = min(_PAGE_LIMIT, remaining)
348
+
349
+ params['offset'] = offset
350
+ params['limit'] = page_limit
351
+
352
+ response = self._make_request(method=method,
353
+ endpoint=endpoint,
354
+ **kwargs)
355
+ items = self._convert_array_response(response.json(), return_field=return_field)
356
+
357
+ if not items:
358
+ break
359
+
360
+ items_to_yield = items
361
+ if limit is not None:
362
+ # This ensures we don't yield more than the limit if the API returns more than requested in the last page
363
+ items_to_yield = items[:limit - total_fetched]
364
+
365
+ yield response, items_to_yield
366
+ total_fetched += len(items_to_yield)
367
+
368
+ if len(items) < _PAGE_LIMIT:
369
+ break
370
+
371
+ offset += len(items)
372
+
373
+ def _convert_array_response(self,
374
+ data: dict | list,
375
+ return_field: str | None = None) -> list | dict | str:
376
+ """Normalize array-like responses into a list when possible.
377
+
378
+ Args:
379
+ data: Parsed JSON response.
380
+ return_field: Preferred top-level field to extract when present.
381
+
382
+ Returns:
383
+ A list of items when identifiable, otherwise the original data.
384
+ """
385
+ if isinstance(data, list):
386
+ items = data
387
+ else:
388
+ if 'data' in data:
389
+ items = data['data']
390
+ elif 'items' in data:
391
+ items = data['items']
392
+ else:
393
+ return data
394
+ if return_field is not None:
395
+ if 'totalCount' in data and len(items) == 1 and return_field in items[0]:
396
+ items = items[0][return_field]
397
+ return items
398
+
399
+ @staticmethod
400
+ def convert_format(bytes_array: bytes,
401
+ mimetype: str,
402
+ file_path: str | None = None
403
+ ) -> pydicom.dataset.Dataset | Image.Image | cv2.VideoCapture | bytes | nib_FileBasedImage:
404
+ """ Convert the bytes array to the appropriate format based on the mimetype."""
405
+ content_io = BytesIO(bytes_array)
406
+ if mimetype.endswith('/dicom'):
407
+ return pydicom.dcmread(content_io)
408
+ elif mimetype.startswith('image/'):
409
+ return Image.open(content_io)
410
+ elif mimetype.startswith('video/'):
411
+ if file_path is None:
412
+ raise NotImplementedError("file_path=None is not implemented yet for video/* mimetypes.")
413
+ return cv2.VideoCapture(file_path)
414
+ elif mimetype == 'application/json':
415
+ return json.loads(bytes_array)
416
+ elif mimetype == 'application/octet-stream':
417
+ return bytes_array
418
+ elif mimetype.endswith('nifti'):
419
+ try:
420
+ return nib.Nifti1Image.from_stream(content_io)
421
+ except Exception as e:
422
+ if file_path is not None:
423
+ return nib.load(file_path)
424
+ raise e
425
+ elif mimetype == 'application/gzip':
426
+ # let's hope it's a .nii.gz
427
+ with gzip.open(content_io, 'rb') as f:
428
+ return nib.Nifti1Image.from_stream(f)
429
+
430
+ raise ValueError(f"Unsupported mimetype: {mimetype}")
@@ -0,0 +1,91 @@
1
+ from typing import Optional
2
+ import httpx
3
+ from .base_api import ApiConfig
4
+ from .endpoints import ProjectsApi, ResourcesApi, AnnotationsApi, ChannelsApi, UsersApi, DatasetsInfoApi
5
+ import datamint.configs
6
+ from datamint.exceptions import DatamintException
7
+ import asyncio
8
+
9
+
10
+ class Api:
11
+ """Main API client that provides access to all endpoint handlers."""
12
+ DEFAULT_SERVER_URL = 'https://api.datamint.io'
13
+ DATAMINT_API_VENV_NAME = datamint.configs.ENV_VARS[datamint.configs.APIKEY_KEY]
14
+
15
+ _API_MAP = {
16
+ 'projects': ProjectsApi,
17
+ 'resources': ResourcesApi,
18
+ 'annotations': AnnotationsApi,
19
+ 'channels': ChannelsApi,
20
+ 'users': UsersApi,
21
+ 'datasets': DatasetsInfoApi
22
+ }
23
+
24
+ def __init__(self,
25
+ server_url: str | None = None,
26
+ api_key: Optional[str] = None,
27
+ timeout: float = 60.0, max_retries: int = 2,
28
+ check_connection: bool = True) -> None:
29
+ """Initialize the API client.
30
+
31
+ Args:
32
+ base_url: Base URL for the API
33
+ api_key: Optional API key for authentication
34
+ timeout: Request timeout in seconds
35
+ max_retries: Maximum number of retry attempts
36
+ client: Optional HTTP client instance
37
+ """
38
+ if server_url is None:
39
+ server_url = datamint.configs.get_value(datamint.configs.APIURL_KEY)
40
+ if server_url is None:
41
+ server_url = Api.DEFAULT_SERVER_URL
42
+ server_url = server_url.rstrip('/')
43
+ if api_key is None:
44
+ api_key = datamint.configs.get_value(datamint.configs.APIKEY_KEY)
45
+ if api_key is None:
46
+ msg = f"API key not provided! Use the environment variable " + \
47
+ f"{Api.DATAMINT_API_VENV_NAME} or pass it as an argument."
48
+ raise DatamintException(msg)
49
+ self.config = ApiConfig(
50
+ server_url=server_url,
51
+ api_key=api_key,
52
+ timeout=timeout,
53
+ max_retries=max_retries
54
+ )
55
+ self._client = None
56
+ self._endpoints = {}
57
+ if check_connection:
58
+ self.check_connection()
59
+
60
+ def check_connection(self):
61
+ try:
62
+ self.projects.get_list(limit=1)
63
+ except Exception as e:
64
+ raise DatamintException("Error connecting to the Datamint API." +
65
+ f" Please check your api_key and/or other configurations. {e}")
66
+
67
+ def _get_endpoint(self, name: str):
68
+ if name not in self._endpoints:
69
+ api_class = self._API_MAP[name]
70
+ self._endpoints[name] = api_class(self.config, self._client)
71
+ return self._endpoints[name]
72
+
73
+ @property
74
+ def projects(self) -> ProjectsApi:
75
+ return self._get_endpoint('projects')
76
+ @property
77
+ def resources(self) -> ResourcesApi:
78
+ return self._get_endpoint('resources')
79
+ @property
80
+ def annotations(self) -> AnnotationsApi:
81
+ return self._get_endpoint('annotations')
82
+ @property
83
+ def channels(self) -> ChannelsApi:
84
+ return self._get_endpoint('channels')
85
+ @property
86
+ def users(self) -> UsersApi:
87
+ return self._get_endpoint('users')
88
+ @property
89
+ def _datasetsinfo(self) -> DatasetsInfoApi:
90
+ """Internal property to access DatasetsInfoApi."""
91
+ return self._get_endpoint('datasets')
@@ -0,0 +1,10 @@
1
+ from datamint.apihandler.dto import annotation_dto
2
+ from datamint.apihandler.dto.annotation_dto import AnnotationType, CreateAnnotationDto, Geometry, BoxGeometry
3
+
4
+ __all__ = [
5
+ "annotation_dto",
6
+ "AnnotationType",
7
+ "CreateAnnotationDto",
8
+ "Geometry",
9
+ "BoxGeometry",
10
+ ]
@@ -0,0 +1,17 @@
1
+ """API endpoint handlers."""
2
+
3
+ from .annotations_api import AnnotationsApi
4
+ from .channels_api import ChannelsApi
5
+ from .projects_api import ProjectsApi
6
+ from .resources_api import ResourcesApi
7
+ from .users_api import UsersApi
8
+ from .datasetsinfo_api import DatasetsInfoApi
9
+
10
+ __all__ = [
11
+ 'AnnotationsApi',
12
+ 'ChannelsApi',
13
+ 'ProjectsApi',
14
+ 'ResourcesApi',
15
+ 'UsersApi',
16
+ 'DatasetsInfoApi'
17
+ ]