datamint 1.9.2__py3-none-any.whl → 2.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datamint might be problematic. Click here for more details.

Files changed (40) hide show
  1. datamint/__init__.py +2 -0
  2. datamint/api/__init__.py +3 -0
  3. datamint/api/base_api.py +430 -0
  4. datamint/api/client.py +91 -0
  5. datamint/api/dto/__init__.py +10 -0
  6. datamint/api/endpoints/__init__.py +17 -0
  7. datamint/api/endpoints/annotations_api.py +984 -0
  8. datamint/api/endpoints/channels_api.py +28 -0
  9. datamint/api/endpoints/datasetsinfo_api.py +16 -0
  10. datamint/api/endpoints/projects_api.py +203 -0
  11. datamint/api/endpoints/resources_api.py +1013 -0
  12. datamint/api/endpoints/users_api.py +38 -0
  13. datamint/api/entity_base_api.py +347 -0
  14. datamint/apihandler/annotation_api_handler.py +5 -5
  15. datamint/apihandler/api_handler.py +3 -6
  16. datamint/apihandler/base_api_handler.py +6 -28
  17. datamint/apihandler/dto/__init__.py +0 -0
  18. datamint/apihandler/dto/annotation_dto.py +1 -1
  19. datamint/apihandler/root_api_handler.py +53 -28
  20. datamint/client_cmd_tools/datamint_config.py +6 -37
  21. datamint/client_cmd_tools/datamint_upload.py +84 -58
  22. datamint/dataset/base_dataset.py +65 -75
  23. datamint/dataset/dataset.py +2 -2
  24. datamint/entities/__init__.py +20 -0
  25. datamint/entities/annotation.py +178 -0
  26. datamint/entities/base_entity.py +51 -0
  27. datamint/entities/channel.py +46 -0
  28. datamint/entities/datasetinfo.py +22 -0
  29. datamint/entities/project.py +64 -0
  30. datamint/entities/resource.py +130 -0
  31. datamint/entities/user.py +21 -0
  32. datamint/examples/example_projects.py +41 -44
  33. datamint/exceptions.py +27 -1
  34. datamint/logging.yaml +1 -1
  35. datamint/utils/logging_utils.py +75 -0
  36. {datamint-1.9.2.dist-info → datamint-2.0.0.dist-info}/METADATA +13 -9
  37. datamint-2.0.0.dist-info/RECORD +50 -0
  38. {datamint-1.9.2.dist-info → datamint-2.0.0.dist-info}/WHEEL +1 -1
  39. datamint-1.9.2.dist-info/RECORD +0 -29
  40. {datamint-1.9.2.dist-info → datamint-2.0.0.dist-info}/entry_points.txt +0 -0
@@ -0,0 +1,38 @@
1
+ from ..entity_base_api import CreatableEntityApi, ApiConfig
2
+ from datamint.entities import User
3
+ import httpx
4
+
5
+
6
+ class UsersApi(CreatableEntityApi[User]):
7
+ def __init__(self,
8
+ config: ApiConfig,
9
+ client: httpx.Client | None = None) -> None:
10
+ super().__init__(config, User, 'users', client)
11
+
12
+ def create(self,
13
+ email: str,
14
+ password: str | None = None,
15
+ firstname: str | None = None,
16
+ lastname: str | None = None,
17
+ roles: list[str] | None = None
18
+ ) -> str:
19
+ """Create a new user.
20
+
21
+ Args:
22
+ email: The user's email address.
23
+ password: The user's password. If None, a random password will be generated.
24
+ firstname: The user's first name.
25
+ lastname: The user's last name.
26
+ roles: List of roles to assign to the user.
27
+
28
+ Returns:
29
+ The id of the created user.
30
+ """
31
+ data = dict(
32
+ email=email,
33
+ password=password,
34
+ firstname=firstname,
35
+ lastname=lastname,
36
+ roles=roles
37
+ )
38
+ return self._create(data)
@@ -0,0 +1,347 @@
1
+ from typing import Any, TypeVar, Generic, Type, Sequence
2
+ import logging
3
+ import httpx
4
+ from dataclasses import dataclass
5
+ from datamint.entities.base_entity import BaseEntity
6
+ from datamint.exceptions import DatamintException, ResourceNotFoundError
7
+ import aiohttp
8
+ import asyncio
9
+ from .base_api import ApiConfig, BaseApi
10
+ import contextlib
11
+ from typing import AsyncGenerator
12
+
13
+ logger = logging.getLogger(__name__)
14
+ T = TypeVar('T', bound=BaseEntity)
15
+
16
+
17
+ class EntityBaseApi(BaseApi, Generic[T]):
18
+ """Base API handler for entity-related endpoints with CRUD operations.
19
+
20
+ This class provides a template for API handlers that work with specific
21
+ entity types, offering common CRUD operations with proper typing.
22
+
23
+ Type Parameters:
24
+ T: The entity type this API handler manages (must extend BaseEntity)
25
+ """
26
+
27
+ def __init__(self, config: ApiConfig,
28
+ entity_class: Type[T],
29
+ endpoint_base: str,
30
+ client: httpx.Client | None = None) -> None:
31
+ """Initialize the entity API handler.
32
+
33
+ Args:
34
+ config: API configuration containing base URL, API key, etc.
35
+ entity_class: The entity class this handler manages
36
+ endpoint_base: Base endpoint path (e.g., 'projects', 'annotations')
37
+ client: Optional HTTP client instance. If None, a new one will be created.
38
+ """
39
+ super().__init__(config, client)
40
+ self.entity_class = entity_class
41
+ self.endpoint_base = endpoint_base.strip('/')
42
+
43
+ @staticmethod
44
+ def _entid(entity: BaseEntity | str) -> str:
45
+ return entity if isinstance(entity, str) else entity.id
46
+
47
+ def _make_entity_request(self,
48
+ method: str,
49
+ entity_id: str | BaseEntity,
50
+ add_path: str = '',
51
+ **kwargs) -> httpx.Response:
52
+ try:
53
+ entity_id = self._entid(entity_id)
54
+ add_path = '/'.join(add_path.strip().strip('/').split('/'))
55
+ return self._make_request(method, f'/{self.endpoint_base}/{entity_id}/{add_path}', **kwargs)
56
+ except httpx.HTTPStatusError as e:
57
+ if e.response.status_code == 404:
58
+ raise ResourceNotFoundError(self.endpoint_base, {'id': entity_id}) from e
59
+ raise
60
+
61
+ @contextlib.asynccontextmanager
62
+ async def _make_entity_request_async(self,
63
+ method: str,
64
+ entity_id: str | BaseEntity,
65
+ add_path: str = '',
66
+ session: aiohttp.ClientSession | None = None,
67
+ **kwargs) -> AsyncGenerator[aiohttp.ClientResponse, None]:
68
+ try:
69
+ entity_id = self._entid(entity_id)
70
+ add_path = '/'.join(add_path.strip().strip('/').split('/'))
71
+ async with self._make_request_async(method,
72
+ f'/{self.endpoint_base}/{entity_id}/{add_path}',
73
+ session=session,
74
+ **kwargs) as resp:
75
+ yield resp
76
+ except aiohttp.ClientResponseError as e:
77
+ if e.status == 404:
78
+ raise ResourceNotFoundError(self.endpoint_base, {'id': entity_id}) from e
79
+ raise
80
+
81
+ def _stream_entity_request(self,
82
+ method: str,
83
+ entity_id: str,
84
+ add_path: str = '',
85
+ **kwargs):
86
+ try:
87
+ add_path = '/'.join(add_path.strip().strip('/').split('/'))
88
+ return self._stream_request(method, f'/{self.endpoint_base}/{entity_id}/{add_path}', **kwargs)
89
+ except httpx.HTTPStatusError as e:
90
+ if e.response.status_code == 404:
91
+ raise ResourceNotFoundError(self.endpoint_base, {'id': entity_id}) from e
92
+ raise
93
+
94
+ def get_list(self, limit: int | None = None,
95
+ **kwargs) -> Sequence[T]:
96
+ """Get entities with optional filtering.
97
+
98
+ Returns:
99
+ List of entity instances.
100
+
101
+ Raises:
102
+ httpx.HTTPStatusError: If the request fails.
103
+ """
104
+ new_kwargs = dict(kwargs)
105
+
106
+ # Remove None values from the payload.
107
+ for k in list(new_kwargs.keys()):
108
+ if new_kwargs[k] is None:
109
+ del new_kwargs[k]
110
+
111
+ items_gen = self._make_request_with_pagination('GET', f'/{self.endpoint_base}',
112
+ return_field=self.endpoint_base,
113
+ limit=limit,
114
+ **new_kwargs)
115
+
116
+ all_items = []
117
+ for resp, items in items_gen:
118
+ all_items.extend(items)
119
+
120
+ return [self.entity_class(**item) for item in all_items]
121
+
122
+ def get_all(self, limit: int | None = None) -> Sequence[T]:
123
+ """Get all entities with optional pagination and filtering.
124
+
125
+ Returns:
126
+ List of entity instances
127
+
128
+ Raises:
129
+ httpx.HTTPStatusError: If the request fails
130
+ """
131
+ return self.get_list(limit=limit)
132
+
133
+ def get_by_id(self, entity_id: str) -> T:
134
+ """Get a specific entity by its ID.
135
+
136
+ Args:
137
+ entity_id: Unique identifier for the entity.
138
+
139
+ Returns:
140
+ Entity instance.
141
+
142
+ Raises:
143
+ httpx.HTTPStatusError: If the entity is not found or request fails.
144
+ """
145
+ response = self._make_entity_request('GET', entity_id)
146
+ return self.entity_class(**response.json())
147
+
148
+ async def _create_async(self, entity_data: dict[str, Any]) -> str | Sequence[str | dict]:
149
+ """Create a new entity.
150
+
151
+ Args:
152
+ entity_data: Dictionary containing entity data for creation.
153
+
154
+ Returns:
155
+ The id of the created entity.
156
+
157
+ Raises:
158
+ httpx.HTTPStatusError: If creation fails.
159
+ """
160
+ respdata = await self._make_request_async_json('POST',
161
+ f'/{self.endpoint_base}',
162
+ json=entity_data)
163
+ if 'error' in respdata:
164
+ raise DatamintException(respdata['error'])
165
+ if isinstance(respdata, str):
166
+ return respdata
167
+ if isinstance(respdata, list):
168
+ return respdata
169
+ if isinstance(respdata, dict):
170
+ return respdata.get('id')
171
+ return respdata
172
+
173
+ def _get_child_entities(self,
174
+ parent_entity: BaseEntity | str,
175
+ child_entity_name: str) -> httpx.Response:
176
+ response = self._make_entity_request('GET', parent_entity,
177
+ add_path=child_entity_name)
178
+ return response
179
+
180
+ # def bulk_create(self, entities_data: list[dict[str, Any]]) -> list[T]:
181
+ # """Create multiple entities in a single request.
182
+
183
+ # Args:
184
+ # entities_data: List of dictionaries containing entity data
185
+
186
+ # Returns:
187
+ # List of created entity instances
188
+
189
+ # Raises:
190
+ # httpx.HTTPStatusError: If bulk creation fails
191
+ # """
192
+ # payload = {'items': entities_data} # Common bulk API format
193
+ # response = self._make_request('POST', f'/{self.endpoint_base}/bulk', json=payload)
194
+ # data = response.json()
195
+
196
+ # # Handle response format - may be direct list or wrapped
197
+ # items = data if isinstance(data, list) else data.get('items', [])
198
+ # return [self.entity_class(**item) for item in items]
199
+
200
+ # def count(self, **params: Any) -> int:
201
+ # """Get the total count of entities matching the given filters.
202
+
203
+ # Args:
204
+ # **params: Query parameters for filtering
205
+
206
+ # Returns:
207
+ # Total count of matching entities
208
+
209
+ # Raises:
210
+ # httpx.HTTPStatusError: If the request fails
211
+ # """
212
+ # response = self._make_request('GET', f'/{self.endpoint_base}/count', params=params)
213
+ # data = response.json()
214
+ # return data.get('count', 0) if isinstance(data, dict) else data
215
+
216
+
217
+ class DeletableEntityApi(EntityBaseApi[T]):
218
+ """Extension of EntityBaseApi for entities that support soft deletion.
219
+
220
+ This class adds methods to handle soft-deleted entities, allowing
221
+ retrieval and restoration of such entities.
222
+ """
223
+
224
+ def delete(self, entity: str | BaseEntity) -> None:
225
+ """Delete an entity.
226
+
227
+ Args:
228
+ entity: Unique identifier for the entity to delete or the entity instance itself.
229
+
230
+ Raises:
231
+ httpx.HTTPStatusError: If deletion fails or entity not found
232
+ """
233
+ self._make_entity_request('DELETE', entity)
234
+
235
+ def bulk_delete(self, entities: Sequence[str | BaseEntity]) -> None:
236
+ """Delete multiple entities.
237
+
238
+ Args:
239
+ entities: Sequence of unique identifiers for the entities to delete or the entity instances themselves.
240
+
241
+ Raises:
242
+ httpx.HTTPStatusError: If deletion fails or any entity not found
243
+ """
244
+ async def _delete_all_async():
245
+ async with aiohttp.ClientSession() as session:
246
+ tasks = [
247
+ self._delete_async(entity, session)
248
+ for entity in entities
249
+ ]
250
+ await asyncio.gather(*tasks)
251
+
252
+ loop = asyncio.get_event_loop()
253
+ loop.run_until_complete(_delete_all_async())
254
+
255
+ async def _delete_async(self,
256
+ entity: str | BaseEntity,
257
+ session: aiohttp.ClientSession | None = None) -> None:
258
+ """Asynchronously delete an entity by its ID.
259
+
260
+ Args:
261
+ entity: Unique identifier for the entity to delete or the entity instance itself.
262
+
263
+ Raises:
264
+ httpx.HTTPStatusError: If deletion fails or entity not found
265
+ """
266
+ async with self._make_entity_request_async('DELETE', entity,
267
+ session=session) as resp:
268
+ await resp.text() # Consume response to complete request
269
+
270
+ # def get_deleted(self, **kwargs) -> Sequence[T]:
271
+ # pass
272
+
273
+ # def restore(self, entity_id: str | BaseEntity) -> T:
274
+ # pass
275
+
276
+
277
+ class CreatableEntityApi(EntityBaseApi[T]):
278
+ """Extension of EntityBaseApi for entities that support creation.
279
+
280
+ This class adds methods to handle creation of new entities.
281
+ """
282
+
283
+ def _create(self, entity_data: dict[str, Any]) -> str | list[str | dict]:
284
+ """Create a new entity.
285
+
286
+ Args:
287
+ entity_data: Dictionary containing entity data for creation.
288
+
289
+ Returns:
290
+ The id of the created entity.
291
+
292
+ Raises:
293
+ httpx.HTTPStatusError: If creation fails.
294
+ """
295
+ response = self._make_request('POST', f'/{self.endpoint_base}', json=entity_data)
296
+ respdata = response.json()
297
+ if isinstance(respdata, str):
298
+ return respdata
299
+ if isinstance(respdata, list):
300
+ return respdata
301
+ if isinstance(respdata, dict):
302
+ return respdata.get('id')
303
+ return respdata
304
+
305
+ def create(self, *args, **kwargs) -> str | T:
306
+ raise NotImplementedError("Subclasses must implement the create method with their own custom parameters")
307
+
308
+
309
+ class UpdatableEntityApi(EntityBaseApi[T]):
310
+ # def update(self, entity_id: str, entity_data: dict[str, Any]):
311
+ # """Update an existing entity.
312
+
313
+ # Args:
314
+ # entity_id: Unique identifier for the entity.
315
+ # entity_data: Dictionary containing updated entity data.
316
+
317
+ # Returns:
318
+ # Updated entity instance.
319
+
320
+ # Raises:
321
+ # httpx.HTTPStatusError: If update fails or entity not found.
322
+ # """
323
+ # self._make_entity_request('PUT', entity_id, json=entity_data)
324
+
325
+ def patch(self, entity: str | T, entity_data: dict[str, Any]):
326
+ """Partially update an existing entity.
327
+
328
+ Args:
329
+ entity: Unique identifier for the entity or the entity instance.
330
+ entity_data: Dictionary containing fields to update. Only provided fields will be updated.
331
+
332
+ Returns:
333
+ Updated entity instance.
334
+
335
+ Raises:
336
+ httpx.HTTPStatusError: If update fails or entity not found.
337
+ """
338
+ self._make_entity_request('PATCH', entity, json=entity_data)
339
+
340
+ def partial_update(self, entity: str | T, entity_data: dict[str, Any]):
341
+ """Alias for :py:meth:`patch` to partially update an entity."""
342
+ return self.patch(entity, entity_data)
343
+
344
+
345
+ class CRUDEntityApi(CreatableEntityApi[T], UpdatableEntityApi[T], DeletableEntityApi[T]):
346
+ """Full CRUD API handler for entities supporting create, read, update, delete operations."""
347
+ pass
@@ -995,11 +995,11 @@ class AnnotationAPIHandler(BaseAPIHandler):
995
995
 
996
996
  def update_annotation_worklist(self,
997
997
  worklist_id: str,
998
- frame_labels: list[str] = None,
999
- image_labels: list[str] = None,
1000
- annotations: list[dict] = None,
1001
- status: Literal['new', 'updating', 'active', 'completed'] = None,
1002
- name: str = None,
998
+ frame_labels: list[str] | None = None,
999
+ image_labels: list[str] | None = None,
1000
+ annotations: list[dict] | None = None,
1001
+ status: Literal['new', 'updating', 'active', 'completed'] | None = None,
1002
+ name: str | None = None,
1003
1003
  ):
1004
1004
  """
1005
1005
  Update the status of an annotation worklist.
@@ -1,15 +1,12 @@
1
1
  from .root_api_handler import RootAPIHandler
2
2
  from .annotation_api_handler import AnnotationAPIHandler
3
3
  from .exp_api_handler import ExperimentAPIHandler
4
+ from deprecated.sphinx import deprecated
4
5
 
5
6
 
7
+ @deprecated(reason="Please use `from datamint import Api` instead.", version="2.0.0")
6
8
  class APIHandler(RootAPIHandler, ExperimentAPIHandler, AnnotationAPIHandler):
7
9
  """
8
- Import using this code:
9
-
10
- .. code-block:: python
11
-
12
- from datamint import APIHandler
13
- api = APIHandler()
10
+ Deprecated. Use `from datamint import Api` instead.
14
11
  """
15
12
  pass
@@ -15,7 +15,8 @@ import nibabel as nib
15
15
  from nibabel.filebasedimages import FileBasedImage as nib_FileBasedImage
16
16
  from datamint import configs
17
17
  import gzip
18
- from datamint.exceptions import DatamintException
18
+ from datamint.exceptions import DatamintException, ResourceNotFoundError
19
+ from deprecated.sphinx import deprecated
19
20
 
20
21
  _LOGGER = logging.getLogger(__name__)
21
22
 
@@ -30,33 +31,7 @@ ResourceFields: TypeAlias = Literal['modality', 'created_by', 'published_by', 'p
30
31
  _PAGE_LIMIT = 5000
31
32
 
32
33
 
33
- class ResourceNotFoundError(DatamintException):
34
- """
35
- Exception raised when a resource is not found.
36
- For instance, when trying to get a resource by a non-existing id.
37
- """
38
-
39
- def __init__(self,
40
- resource_type: str,
41
- params: dict):
42
- """ Constructor.
43
-
44
- Args:
45
- resource_type (str): A resource type.
46
- params (dict): Dict of params identifying the sought resource.
47
- """
48
- super().__init__()
49
- self.resource_type = resource_type
50
- self.params = params
51
-
52
- def set_params(self, resource_type: str, params: dict):
53
- self.resource_type = resource_type
54
- self.params = params
55
-
56
- def __str__(self):
57
- return f"Resource '{self.resource_type}' not found for parameters: {self.params}"
58
-
59
-
34
+ @deprecated(reason="Please use `from datamint import Api` instead.", version="2.0.0")
60
35
  class BaseAPIHandler:
61
36
  """
62
37
  Class to handle the API requests to the Datamint API
@@ -68,6 +43,9 @@ class BaseAPIHandler:
68
43
  root_url: Optional[str] = None,
69
44
  api_key: Optional[str] = None,
70
45
  check_connection: bool = True):
46
+ # deprecated
47
+ _LOGGER.warning("The class APIHandler is deprecated and will be removed in future versions. "
48
+ "Please use `from datamint import Api` instead.")
71
49
  nest_asyncio.apply() # For running asyncio in jupyter notebooks
72
50
  self.root_url = root_url if root_url is not None else configs.get_value(configs.APIURL_KEY)
73
51
  if self.root_url is None:
File without changes
@@ -152,7 +152,7 @@ class CreateAnnotationDto:
152
152
  type: AnnotationType | str,
153
153
  identifier: str,
154
154
  scope: str,
155
- annotation_worklist_id: str,
155
+ annotation_worklist_id: str | None = None,
156
156
  value=None,
157
157
  imported_from: str | None = None,
158
158
  import_author: str | None = None,
@@ -6,7 +6,7 @@ from requests.exceptions import HTTPError
6
6
  import logging
7
7
  import asyncio
8
8
  import aiohttp
9
- from medimgkit.dicom_utils import anonymize_dicom, to_bytesio, is_dicom, is_dicom_report
9
+ from medimgkit.dicom_utils import anonymize_dicom, to_bytesio, is_dicom, is_dicom_report, GeneratorWithLength
10
10
  from medimgkit import dicom_utils, standardize_mimetype
11
11
  from medimgkit.io_utils import is_io_object, peek
12
12
  from medimgkit.format_detection import guess_typez, guess_extension, DEFAULT_MIME_TYPE
@@ -185,9 +185,7 @@ class RootAPIHandler(BaseAPIHandler):
185
185
  resp_data = await self._run_request_async(request_params, session)
186
186
  if 'error' in resp_data:
187
187
  raise DatamintException(resp_data['error'])
188
- _LOGGER.info(f"Response on uploading {name}: {resp_data}")
189
-
190
- _USER_LOGGER.info(f'"{name}" uploaded')
188
+ _LOGGER.debug(f"Response on uploading {name}: {resp_data}")
191
189
  return resp_data['id']
192
190
  except Exception as e:
193
191
  if 'name' in locals():
@@ -212,6 +210,7 @@ class RootAPIHandler(BaseAPIHandler):
212
210
  segmentation_files: Optional[list[dict]] = None,
213
211
  transpose_segmentation: bool = False,
214
212
  metadata_files: Optional[list[str | dict | None]] = None,
213
+ progress_bar: tqdm | None = None,
215
214
  ) -> list[str]:
216
215
  if on_error not in ['raise', 'skip']:
217
216
  raise ValueError("on_error must be either 'raise' or 'skip'")
@@ -225,6 +224,8 @@ class RootAPIHandler(BaseAPIHandler):
225
224
  async with aiohttp.ClientSession() as session:
226
225
  async def __upload_single_resource(file_path, segfiles: dict[str, list | dict],
227
226
  metadata_file: str | dict | None):
227
+ name = file_path.name if is_io_object(file_path) else file_path
228
+ name = os.path.basename(name)
228
229
  rid = await self._upload_single_resource_async(
229
230
  file_path=file_path,
230
231
  mimetype=mimetype,
@@ -238,6 +239,12 @@ class RootAPIHandler(BaseAPIHandler):
238
239
  publish=publish,
239
240
  metadata_file=metadata_file,
240
241
  )
242
+ if progress_bar:
243
+ progress_bar.update(1)
244
+ progress_bar.set_postfix(file=name)
245
+ else:
246
+ _USER_LOGGER.info(f'"{name}" uploaded')
247
+
241
248
  if segfiles is not None:
242
249
  fpaths = segfiles['files']
243
250
  names = segfiles.get('names', _infinite_gen(None))
@@ -295,18 +302,17 @@ class RootAPIHandler(BaseAPIHandler):
295
302
  if new_len != orig_len:
296
303
  _LOGGER.info(f"Assembled {new_len} dicom files out of {orig_len} files.")
297
304
  mapping_idx = [None] * len(files_path)
298
- files_path = itertools.chain(dicoms_files_path, other_files_path)
305
+
306
+ files_path = GeneratorWithLength(itertools.chain(dicoms_files_path, other_files_path),
307
+ length=new_len + len(other_files_path))
299
308
  assembled = True
300
309
  for orig_idx, value in zip(dicom_original_idxs, dicoms_files_path.inverse_mapping_idx):
301
310
  mapping_idx[orig_idx] = value
302
311
  for i, orig_idx in enumerate(others_original_idxs):
303
312
  mapping_idx[orig_idx] = new_len + i
304
- # mapping_idx = [[dicom_original_idxs[i] for i in idxlist]
305
- # for idxlist in dicoms_files_path.mapping_idx]
306
- # mapping_idx += [[i] for i in others_original_idxs]
307
313
  else:
308
314
  assembled = False
309
- # mapping_idx = [[i] for i in range(len(files_path))]
315
+ mapping_idx = [i for i in range(len(files_path))]
310
316
 
311
317
  return files_path, assembled, mapping_idx
312
318
 
@@ -391,7 +397,8 @@ class RootAPIHandler(BaseAPIHandler):
391
397
  transpose_segmentation=transpose_segmentation,
392
398
  modality=modality,
393
399
  assemble_dicoms=assemble_dicoms,
394
- metadata=metadata
400
+ metadata=metadata,
401
+ progress_bar=False
395
402
  )
396
403
 
397
404
  return result[0]
@@ -412,7 +419,8 @@ class RootAPIHandler(BaseAPIHandler):
412
419
  modality: Optional[str] = None,
413
420
  assemble_dicoms: bool = True,
414
421
  metadata: list[str | dict | None] | dict | str | None = None,
415
- discard_dicom_reports: bool = True
422
+ discard_dicom_reports: bool = True,
423
+ progress_bar: bool = False
416
424
  ) -> list[str | Exception] | str | Exception:
417
425
  """
418
426
  Upload resources.
@@ -485,6 +493,11 @@ class RootAPIHandler(BaseAPIHandler):
485
493
  assemble_dicoms = assembled
486
494
  else:
487
495
  mapping_idx = [i for i in range(len(files_path))]
496
+ n_files = len(files_path)
497
+
498
+ if n_files <= 1:
499
+ # Disable progress bar for single file uploads
500
+ progress_bar = False
488
501
 
489
502
  if segmentation_files is not None:
490
503
  if assemble_dicoms:
@@ -513,22 +526,32 @@ class RootAPIHandler(BaseAPIHandler):
513
526
  "segmentation_files['names'] must have the same length as segmentation_files['files'].")
514
527
 
515
528
  loop = asyncio.get_event_loop()
516
- task = self._upload_resources_async(files_path=files_path,
517
- mimetype=mimetype,
518
- anonymize=anonymize,
519
- anonymize_retain_codes=anonymize_retain_codes,
520
- on_error=on_error,
521
- tags=tags,
522
- mung_filename=mung_filename,
523
- channel=channel,
524
- publish=publish,
525
- segmentation_files=segmentation_files,
526
- transpose_segmentation=transpose_segmentation,
527
- modality=modality,
528
- metadata_files=metadata,
529
- )
530
-
531
- resource_ids = loop.run_until_complete(task)
529
+ pbar = None
530
+ try:
531
+ if progress_bar:
532
+ pbar = tqdm(total=n_files, desc="Uploading resources", unit="file")
533
+
534
+ task = self._upload_resources_async(files_path=files_path,
535
+ mimetype=mimetype,
536
+ anonymize=anonymize,
537
+ anonymize_retain_codes=anonymize_retain_codes,
538
+ on_error=on_error,
539
+ tags=tags,
540
+ mung_filename=mung_filename,
541
+ channel=channel,
542
+ publish=publish,
543
+ segmentation_files=segmentation_files,
544
+ transpose_segmentation=transpose_segmentation,
545
+ modality=modality,
546
+ metadata_files=metadata,
547
+ progress_bar=pbar
548
+ )
549
+
550
+ resource_ids = loop.run_until_complete(task)
551
+ finally:
552
+ if pbar:
553
+ pbar.close()
554
+
532
555
  _LOGGER.info(f"Resources uploaded: {resource_ids}")
533
556
 
534
557
  if publish_to is not None:
@@ -623,7 +646,9 @@ class RootAPIHandler(BaseAPIHandler):
623
646
  # get the project id by its name
624
647
  project = self.get_project_by_name(project_name)
625
648
  if 'error' in project:
626
- raise ResourceNotFoundError('project', {'project_name': project_name})
649
+ project = self.get_project_by_id(project_name)
650
+ if 'error' in project:
651
+ raise ResourceNotFoundError('project', {'project_name': project_name})
627
652
 
628
653
  dataset_id = project['dataset_id']
629
654