datamint 1.9.1__tar.gz → 1.9.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of datamint might be problematic. Click here for more details.
- {datamint-1.9.1 → datamint-1.9.2}/PKG-INFO +2 -2
- {datamint-1.9.1 → datamint-1.9.2}/datamint/apihandler/base_api_handler.py +4 -10
- {datamint-1.9.1 → datamint-1.9.2}/datamint/apihandler/root_api_handler.py +48 -15
- {datamint-1.9.1 → datamint-1.9.2}/datamint/client_cmd_tools/datamint_config.py +1 -1
- {datamint-1.9.1 → datamint-1.9.2}/datamint/client_cmd_tools/datamint_upload.py +99 -22
- datamint-1.9.2/datamint/exceptions.py +5 -0
- {datamint-1.9.1 → datamint-1.9.2}/pyproject.toml +2 -2
- {datamint-1.9.1 → datamint-1.9.2}/README.md +0 -0
- {datamint-1.9.1 → datamint-1.9.2}/datamint/__init__.py +0 -0
- {datamint-1.9.1 → datamint-1.9.2}/datamint/apihandler/annotation_api_handler.py +0 -0
- {datamint-1.9.1 → datamint-1.9.2}/datamint/apihandler/api_handler.py +0 -0
- {datamint-1.9.1 → datamint-1.9.2}/datamint/apihandler/dto/annotation_dto.py +0 -0
- {datamint-1.9.1 → datamint-1.9.2}/datamint/apihandler/exp_api_handler.py +0 -0
- {datamint-1.9.1 → datamint-1.9.2}/datamint/client_cmd_tools/__init__.py +0 -0
- {datamint-1.9.1 → datamint-1.9.2}/datamint/configs.py +0 -0
- {datamint-1.9.1 → datamint-1.9.2}/datamint/dataset/__init__.py +0 -0
- {datamint-1.9.1 → datamint-1.9.2}/datamint/dataset/annotation.py +0 -0
- {datamint-1.9.1 → datamint-1.9.2}/datamint/dataset/base_dataset.py +0 -0
- {datamint-1.9.1 → datamint-1.9.2}/datamint/dataset/dataset.py +0 -0
- {datamint-1.9.1 → datamint-1.9.2}/datamint/examples/__init__.py +0 -0
- {datamint-1.9.1 → datamint-1.9.2}/datamint/examples/example_projects.py +0 -0
- {datamint-1.9.1 → datamint-1.9.2}/datamint/experiment/__init__.py +0 -0
- {datamint-1.9.1 → datamint-1.9.2}/datamint/experiment/_patcher.py +0 -0
- {datamint-1.9.1 → datamint-1.9.2}/datamint/experiment/experiment.py +0 -0
- {datamint-1.9.1 → datamint-1.9.2}/datamint/logging.yaml +0 -0
- {datamint-1.9.1 → datamint-1.9.2}/datamint/utils/logging_utils.py +0 -0
- {datamint-1.9.1 → datamint-1.9.2}/datamint/utils/torchmetrics.py +0 -0
- {datamint-1.9.1 → datamint-1.9.2}/datamint/utils/visualization.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: datamint
|
|
3
|
-
Version: 1.9.
|
|
3
|
+
Version: 1.9.2
|
|
4
4
|
Summary: A library for interacting with the Datamint API, designed for efficient data management, processing and Deep Learning workflows.
|
|
5
5
|
Requires-Python: >=3.10
|
|
6
6
|
Classifier: Programming Language :: Python :: 3
|
|
@@ -19,7 +19,7 @@ Requires-Dist: humanize (>=4.0.0,<5.0.0)
|
|
|
19
19
|
Requires-Dist: lazy-loader (>=0.3.0)
|
|
20
20
|
Requires-Dist: lightning
|
|
21
21
|
Requires-Dist: matplotlib
|
|
22
|
-
Requires-Dist: medimgkit (>=0.
|
|
22
|
+
Requires-Dist: medimgkit (>=0.5.0)
|
|
23
23
|
Requires-Dist: nest-asyncio (>=1.0.0,<2.0.0)
|
|
24
24
|
Requires-Dist: nibabel (>=4.0.0)
|
|
25
25
|
Requires-Dist: numpy
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from typing import Optional, Literal, Generator, TypeAlias
|
|
1
|
+
from typing import Optional, Literal, Generator, TypeAlias
|
|
2
2
|
import pydicom.dataset
|
|
3
3
|
from requests import Session
|
|
4
4
|
from requests.exceptions import HTTPError
|
|
@@ -15,6 +15,7 @@ import nibabel as nib
|
|
|
15
15
|
from nibabel.filebasedimages import FileBasedImage as nib_FileBasedImage
|
|
16
16
|
from datamint import configs
|
|
17
17
|
import gzip
|
|
18
|
+
from datamint.exceptions import DatamintException
|
|
18
19
|
|
|
19
20
|
_LOGGER = logging.getLogger(__name__)
|
|
20
21
|
|
|
@@ -29,13 +30,6 @@ ResourceFields: TypeAlias = Literal['modality', 'created_by', 'published_by', 'p
|
|
|
29
30
|
_PAGE_LIMIT = 5000
|
|
30
31
|
|
|
31
32
|
|
|
32
|
-
class DatamintException(Exception):
|
|
33
|
-
"""
|
|
34
|
-
Base class for exceptions in this module.
|
|
35
|
-
"""
|
|
36
|
-
pass
|
|
37
|
-
|
|
38
|
-
|
|
39
33
|
class ResourceNotFoundError(DatamintException):
|
|
40
34
|
"""
|
|
41
35
|
Exception raised when a resource is not found.
|
|
@@ -196,10 +190,10 @@ class BaseAPIHandler:
|
|
|
196
190
|
_LOGGER.error(f"Error in request to {request_args['url']}: {e}")
|
|
197
191
|
if status_code >= 400 and status_code < 500:
|
|
198
192
|
try:
|
|
199
|
-
_LOGGER.
|
|
193
|
+
_LOGGER.info(f"Error response: {response.text}")
|
|
200
194
|
error_data = response.json()
|
|
201
195
|
except Exception as e2:
|
|
202
|
-
_LOGGER.
|
|
196
|
+
_LOGGER.info(f"Error parsing the response. {e2}")
|
|
203
197
|
else:
|
|
204
198
|
if isinstance(error_data['message'], str) and ' not found' in error_data['message'].lower():
|
|
205
199
|
# Will be caught by the caller and properly initialized:
|
|
@@ -223,7 +223,8 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
223
223
|
metadata_files = _infinite_gen(None)
|
|
224
224
|
|
|
225
225
|
async with aiohttp.ClientSession() as session:
|
|
226
|
-
async def __upload_single_resource(file_path, segfiles: dict[str, list | dict],
|
|
226
|
+
async def __upload_single_resource(file_path, segfiles: dict[str, list | dict],
|
|
227
|
+
metadata_file: str | dict | None):
|
|
227
228
|
rid = await self._upload_single_resource_async(
|
|
228
229
|
file_path=file_path,
|
|
229
230
|
mimetype=mimetype,
|
|
@@ -258,30 +259,56 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
258
259
|
for f, segfiles, metadata_file in zip(files_path, segmentation_files, metadata_files)]
|
|
259
260
|
return await asyncio.gather(*tasks, return_exceptions=on_error == 'skip')
|
|
260
261
|
|
|
261
|
-
def _assemble_dicoms(self, files_path: Sequence[str | IO]
|
|
262
|
+
def _assemble_dicoms(self, files_path: Sequence[str | IO]
|
|
263
|
+
) -> tuple[Sequence[str | IO], bool, Sequence[int]]:
|
|
264
|
+
"""
|
|
265
|
+
Assembles DICOM files into a single file.
|
|
266
|
+
|
|
267
|
+
Args:
|
|
268
|
+
files_path: The paths to the DICOM files to assemble.
|
|
269
|
+
|
|
270
|
+
Returns:
|
|
271
|
+
A tuple containing:
|
|
272
|
+
- The paths to the assembled DICOM files.
|
|
273
|
+
- A boolean indicating whether the assembly was successful.
|
|
274
|
+
- same length as the output assembled DICOMs, mapping assembled DICOM to original DICOMs.
|
|
275
|
+
"""
|
|
262
276
|
dicoms_files_path = []
|
|
263
277
|
other_files_path = []
|
|
264
|
-
|
|
278
|
+
dicom_original_idxs = []
|
|
279
|
+
others_original_idxs = []
|
|
280
|
+
for i, f in enumerate(files_path):
|
|
265
281
|
if is_dicom(f):
|
|
266
282
|
dicoms_files_path.append(f)
|
|
283
|
+
dicom_original_idxs.append(i)
|
|
267
284
|
else:
|
|
268
285
|
other_files_path.append(f)
|
|
286
|
+
others_original_idxs.append(i)
|
|
269
287
|
|
|
270
288
|
orig_len = len(dicoms_files_path)
|
|
271
289
|
if orig_len == 0:
|
|
272
290
|
_LOGGER.debug("No DICOM files found to assemble.")
|
|
273
|
-
return files_path, False
|
|
291
|
+
return files_path, False, []
|
|
274
292
|
dicoms_files_path = dicom_utils.assemble_dicoms(dicoms_files_path, return_as_IO=True)
|
|
275
293
|
|
|
276
294
|
new_len = len(dicoms_files_path)
|
|
277
295
|
if new_len != orig_len:
|
|
278
296
|
_LOGGER.info(f"Assembled {new_len} dicom files out of {orig_len} files.")
|
|
297
|
+
mapping_idx = [None] * len(files_path)
|
|
279
298
|
files_path = itertools.chain(dicoms_files_path, other_files_path)
|
|
280
299
|
assembled = True
|
|
300
|
+
for orig_idx, value in zip(dicom_original_idxs, dicoms_files_path.inverse_mapping_idx):
|
|
301
|
+
mapping_idx[orig_idx] = value
|
|
302
|
+
for i, orig_idx in enumerate(others_original_idxs):
|
|
303
|
+
mapping_idx[orig_idx] = new_len + i
|
|
304
|
+
# mapping_idx = [[dicom_original_idxs[i] for i in idxlist]
|
|
305
|
+
# for idxlist in dicoms_files_path.mapping_idx]
|
|
306
|
+
# mapping_idx += [[i] for i in others_original_idxs]
|
|
281
307
|
else:
|
|
282
308
|
assembled = False
|
|
309
|
+
# mapping_idx = [[i] for i in range(len(files_path))]
|
|
283
310
|
|
|
284
|
-
return files_path, assembled
|
|
311
|
+
return files_path, assembled, mapping_idx
|
|
285
312
|
|
|
286
313
|
def upload_resource(self,
|
|
287
314
|
file_path: str | IO | pydicom.dataset.Dataset,
|
|
@@ -433,17 +460,17 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
433
460
|
# Create filtered lists maintaining index correspondence
|
|
434
461
|
filtered_files = []
|
|
435
462
|
filtered_metadata = []
|
|
436
|
-
|
|
463
|
+
|
|
437
464
|
for i, f in enumerate(files_path):
|
|
438
465
|
if not is_dicom_report(f):
|
|
439
466
|
filtered_files.append(f)
|
|
440
467
|
if metadata is not None:
|
|
441
468
|
filtered_metadata.append(metadata[i])
|
|
442
|
-
|
|
469
|
+
|
|
443
470
|
files_path = filtered_files
|
|
444
471
|
if metadata is not None:
|
|
445
472
|
metadata = filtered_metadata
|
|
446
|
-
|
|
473
|
+
|
|
447
474
|
if old_size is not None and old_size != len(files_path):
|
|
448
475
|
_LOGGER.info(f"Discarded {old_size - len(files_path)} DICOM report files from upload.")
|
|
449
476
|
|
|
@@ -454,9 +481,11 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
454
481
|
if metadata is not None and len(metadata) != len(files_path):
|
|
455
482
|
raise ValueError("The number of metadata files must match the number of resources.")
|
|
456
483
|
if assemble_dicoms:
|
|
457
|
-
files_path, assembled = self._assemble_dicoms(files_path)
|
|
484
|
+
files_path, assembled, mapping_idx = self._assemble_dicoms(files_path)
|
|
458
485
|
assemble_dicoms = assembled
|
|
459
|
-
|
|
486
|
+
else:
|
|
487
|
+
mapping_idx = [i for i in range(len(files_path))]
|
|
488
|
+
|
|
460
489
|
if segmentation_files is not None:
|
|
461
490
|
if assemble_dicoms:
|
|
462
491
|
raise NotImplementedError("Segmentation files cannot be uploaded when assembling dicoms yet.")
|
|
@@ -512,6 +541,10 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
512
541
|
if on_error == 'raise':
|
|
513
542
|
raise e
|
|
514
543
|
|
|
544
|
+
if mapping_idx:
|
|
545
|
+
_LOGGER.debug(f"Mapping indices for DICOM files: {mapping_idx}")
|
|
546
|
+
resource_ids = [resource_ids[idx] for idx in mapping_idx]
|
|
547
|
+
|
|
515
548
|
if is_multiple_resources:
|
|
516
549
|
return resource_ids
|
|
517
550
|
return resource_ids[0]
|
|
@@ -652,7 +685,8 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
652
685
|
raise e
|
|
653
686
|
|
|
654
687
|
@staticmethod
|
|
655
|
-
def __process_files_parameter(file_path: str | IO | Sequence[str | IO] | pydicom.dataset.Dataset
|
|
688
|
+
def __process_files_parameter(file_path: str | IO | Sequence[str | IO] | pydicom.dataset.Dataset
|
|
689
|
+
) -> tuple[Sequence[str | IO], bool]:
|
|
656
690
|
"""
|
|
657
691
|
Process the file_path parameter to ensure it is a list of file paths or IO objects.
|
|
658
692
|
"""
|
|
@@ -1123,11 +1157,10 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
1123
1157
|
|
|
1124
1158
|
loop = asyncio.get_event_loop()
|
|
1125
1159
|
loop.run_until_complete(_delete_all_resources_async())
|
|
1126
|
-
|
|
1127
|
-
|
|
1160
|
+
|
|
1128
1161
|
async def _delete_resource_async(self,
|
|
1129
|
-
|
|
1130
|
-
|
|
1162
|
+
resource_id: str,
|
|
1163
|
+
session: aiohttp.ClientSession | None = None) -> None:
|
|
1131
1164
|
"""
|
|
1132
1165
|
Asynchronously delete a resource by its unique id.
|
|
1133
1166
|
|
|
@@ -125,7 +125,7 @@ def test_connection():
|
|
|
125
125
|
projects = api.get_projects()
|
|
126
126
|
console.print(f"[success]✅ Connection successful! Found {len(projects)} projects.[/success]")
|
|
127
127
|
except ImportError:
|
|
128
|
-
console.print("[error]❌ Full API not available. Install with: pip install datamint
|
|
128
|
+
console.print("[error]❌ Full API not available. Install with: pip install datamint[/error]")
|
|
129
129
|
except Exception as e:
|
|
130
130
|
console.print(f"[error]❌ Connection failed: {e}[/error]")
|
|
131
131
|
|
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
from datamint.exceptions import DatamintException
|
|
1
2
|
import argparse
|
|
2
3
|
from datamint.apihandler.api_handler import APIHandler
|
|
3
4
|
import os
|
|
@@ -16,6 +17,7 @@ from datamint.utils.logging_utils import load_cmdline_logging_config
|
|
|
16
17
|
import yaml
|
|
17
18
|
from collections.abc import Iterable
|
|
18
19
|
import pandas as pd
|
|
20
|
+
import pydicom.errors
|
|
19
21
|
|
|
20
22
|
# Create two loggings: one for the user and one for the developer
|
|
21
23
|
_LOGGER = logging.getLogger(__name__)
|
|
@@ -24,6 +26,66 @@ _USER_LOGGER = logging.getLogger('user_logger')
|
|
|
24
26
|
MAX_RECURSION_LIMIT = 1000
|
|
25
27
|
|
|
26
28
|
|
|
29
|
+
def _get_minimal_distinguishing_paths(file_paths: list[str]) -> dict[str, str]:
|
|
30
|
+
"""
|
|
31
|
+
Generate minimal distinguishing paths for files to avoid ambiguity when multiple files have the same name.
|
|
32
|
+
|
|
33
|
+
Args:
|
|
34
|
+
file_paths: List of file paths
|
|
35
|
+
|
|
36
|
+
Returns:
|
|
37
|
+
Dictionary mapping full path to minimal distinguishing path
|
|
38
|
+
"""
|
|
39
|
+
if not file_paths:
|
|
40
|
+
return {}
|
|
41
|
+
|
|
42
|
+
# Convert to Path objects and get absolute paths
|
|
43
|
+
paths = [Path(fp).resolve() for fp in file_paths]
|
|
44
|
+
result = {}
|
|
45
|
+
|
|
46
|
+
# Group files by basename
|
|
47
|
+
basename_groups = defaultdict(list)
|
|
48
|
+
for i, path in enumerate(paths):
|
|
49
|
+
basename_groups[path.name].append((i, path))
|
|
50
|
+
|
|
51
|
+
for basename, path_list in basename_groups.items():
|
|
52
|
+
if len(path_list) == 1:
|
|
53
|
+
# Only one file with this name, use just the basename
|
|
54
|
+
idx, path = path_list[0]
|
|
55
|
+
result[file_paths[idx]] = basename
|
|
56
|
+
else:
|
|
57
|
+
# Multiple files with same name, need to distinguish them
|
|
58
|
+
path_parts_list = [path.parts for _, path in path_list]
|
|
59
|
+
|
|
60
|
+
# Find the minimum number of parent directories needed to distinguish
|
|
61
|
+
max_depth_needed = 1
|
|
62
|
+
for depth in range(1, max(len(parts) for parts in path_parts_list) + 1):
|
|
63
|
+
# Check if this depth is enough to distinguish all files
|
|
64
|
+
suffixes = []
|
|
65
|
+
for parts in path_parts_list:
|
|
66
|
+
if depth >= len(parts):
|
|
67
|
+
suffixes.append('/'.join(parts))
|
|
68
|
+
else:
|
|
69
|
+
suffixes.append('/'.join(parts[-depth:]))
|
|
70
|
+
|
|
71
|
+
if len(set(suffixes)) == len(suffixes):
|
|
72
|
+
# All suffixes are unique at this depth
|
|
73
|
+
max_depth_needed = depth
|
|
74
|
+
break
|
|
75
|
+
|
|
76
|
+
# Apply the minimal distinguishing paths
|
|
77
|
+
for (idx, path), parts in zip(path_list, path_parts_list):
|
|
78
|
+
if max_depth_needed >= len(parts):
|
|
79
|
+
distinguishing_path = '/'.join(parts)
|
|
80
|
+
else:
|
|
81
|
+
distinguishing_path = '/'.join(parts[-max_depth_needed:])
|
|
82
|
+
result[file_paths[idx]] = distinguishing_path
|
|
83
|
+
|
|
84
|
+
return result
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
|
|
88
|
+
|
|
27
89
|
def _read_segmentation_names(segmentation_names_path: str | Path) -> dict:
|
|
28
90
|
"""
|
|
29
91
|
Read a segmentation names file (yaml or csv) and return its content as a dictionary.
|
|
@@ -124,7 +186,7 @@ def walk_to_depth(path: str | Path,
|
|
|
124
186
|
depth: int,
|
|
125
187
|
exclude_pattern: str | None = None) -> Generator[Path, None, None]:
|
|
126
188
|
path = Path(path)
|
|
127
|
-
|
|
189
|
+
|
|
128
190
|
# Check for DICOMDIR first at current directory level
|
|
129
191
|
dicomdir_path = detect_dicomdir(path)
|
|
130
192
|
if dicomdir_path is not None:
|
|
@@ -138,7 +200,7 @@ def walk_to_depth(path: str | Path,
|
|
|
138
200
|
except Exception as e:
|
|
139
201
|
_USER_LOGGER.warning(f"Failed to parse DICOMDIR at {path}: {e}. Falling back to directory scan.")
|
|
140
202
|
# Continue with regular directory scanning below
|
|
141
|
-
|
|
203
|
+
|
|
142
204
|
# Regular directory scanning
|
|
143
205
|
for child in path.iterdir():
|
|
144
206
|
if _is_system_file(child):
|
|
@@ -606,12 +668,15 @@ def print_input_summary(files_path: list[str],
|
|
|
606
668
|
ext_counts = [(ext, count) for ext, count in ext_dict.items()]
|
|
607
669
|
ext_counts.sort(key=lambda x: x[1], reverse=True)
|
|
608
670
|
|
|
671
|
+
# Get distinguishing paths for better display
|
|
672
|
+
distinguishing_paths = _get_minimal_distinguishing_paths(files_path)
|
|
673
|
+
|
|
609
674
|
_USER_LOGGER.info(f"Number of files to be uploaded: {total_files}")
|
|
610
|
-
_USER_LOGGER.info(f"\t{files_path[0]}")
|
|
675
|
+
_USER_LOGGER.info(f"\t{distinguishing_paths[files_path[0]]}")
|
|
611
676
|
if total_files >= 2:
|
|
612
677
|
if total_files >= 3:
|
|
613
678
|
_USER_LOGGER.info("\t(...)")
|
|
614
|
-
_USER_LOGGER.info(f"\t{files_path[-1]}")
|
|
679
|
+
_USER_LOGGER.info(f"\t{distinguishing_paths[files_path[-1]]}")
|
|
615
680
|
_USER_LOGGER.info(f"Total size of the upload: {naturalsize(total_size)}")
|
|
616
681
|
_USER_LOGGER.info(f"Number of files per extension:")
|
|
617
682
|
for ext, count in ext_counts:
|
|
@@ -653,17 +718,21 @@ def print_results_summary(files_path: list[str],
|
|
|
653
718
|
results: list[str | Exception]) -> int:
|
|
654
719
|
# Check for failed uploads
|
|
655
720
|
failure_files = [f for f, r in zip(files_path, results) if isinstance(r, Exception)]
|
|
721
|
+
# Get distinguishing paths for better error reporting
|
|
722
|
+
distinguishing_paths = _get_minimal_distinguishing_paths(files_path)
|
|
723
|
+
|
|
656
724
|
_USER_LOGGER.info(f"\nUpload summary:")
|
|
657
725
|
_USER_LOGGER.info(f"\tTotal files: {len(files_path)}")
|
|
658
726
|
_USER_LOGGER.info(f"\tSuccessful uploads: {len(files_path) - len(failure_files)}")
|
|
659
|
-
_USER_LOGGER.info(f"\tFailed uploads: {len(failure_files)}")
|
|
660
727
|
if len(failure_files) > 0:
|
|
661
|
-
_USER_LOGGER.
|
|
728
|
+
_USER_LOGGER.info(f"\t❌ Failed uploads: {len(failure_files)}")
|
|
729
|
+
_USER_LOGGER.warning(f"\tFailed files: {[distinguishing_paths[f] for f in failure_files]}")
|
|
662
730
|
_USER_LOGGER.warning(f"\nFailures:")
|
|
663
731
|
for f, r in zip(files_path, results):
|
|
664
|
-
_LOGGER.debug(f"Failure: {f} - {r}")
|
|
665
732
|
if isinstance(r, Exception):
|
|
666
|
-
_USER_LOGGER.warning(f"\t{
|
|
733
|
+
_USER_LOGGER.warning(f"\t{distinguishing_paths[f]}: {r}")
|
|
734
|
+
else:
|
|
735
|
+
_USER_LOGGER.info(f'✅ All uploads successful!')
|
|
667
736
|
return len(failure_files)
|
|
668
737
|
|
|
669
738
|
|
|
@@ -691,20 +760,28 @@ def main():
|
|
|
691
760
|
|
|
692
761
|
has_a_dicom_file = any(is_dicom(f) for f in files_path)
|
|
693
762
|
|
|
694
|
-
|
|
695
|
-
|
|
696
|
-
|
|
697
|
-
|
|
698
|
-
|
|
699
|
-
|
|
700
|
-
|
|
701
|
-
|
|
702
|
-
|
|
703
|
-
|
|
704
|
-
|
|
705
|
-
|
|
706
|
-
|
|
707
|
-
|
|
763
|
+
try:
|
|
764
|
+
api_handler = APIHandler(check_connection=True)
|
|
765
|
+
except DatamintException as e:
|
|
766
|
+
_USER_LOGGER.error(f'❌ Connection failed: {e}')
|
|
767
|
+
return
|
|
768
|
+
try:
|
|
769
|
+
results = api_handler.upload_resources(channel=args.channel,
|
|
770
|
+
files_path=files_path,
|
|
771
|
+
tags=args.tag,
|
|
772
|
+
on_error='skip',
|
|
773
|
+
anonymize=args.retain_pii == False and has_a_dicom_file,
|
|
774
|
+
anonymize_retain_codes=args.retain_attribute,
|
|
775
|
+
mung_filename=args.mungfilename,
|
|
776
|
+
publish=args.publish,
|
|
777
|
+
segmentation_files=segfiles,
|
|
778
|
+
transpose_segmentation=args.transpose_segmentation,
|
|
779
|
+
assemble_dicoms=True,
|
|
780
|
+
metadata=metadata_files
|
|
781
|
+
)
|
|
782
|
+
except pydicom.errors.InvalidDicomError as e:
|
|
783
|
+
_USER_LOGGER.error(f'❌ Invalid DICOM file: {e}')
|
|
784
|
+
return
|
|
708
785
|
_USER_LOGGER.info('Upload finished!')
|
|
709
786
|
_LOGGER.debug(f"Number of results: {len(results)}")
|
|
710
787
|
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "datamint"
|
|
3
3
|
description = "A library for interacting with the Datamint API, designed for efficient data management, processing and Deep Learning workflows."
|
|
4
|
-
version = "1.9.
|
|
4
|
+
version = "1.9.2"
|
|
5
5
|
dynamic = ["dependencies"]
|
|
6
6
|
requires-python = ">=3.10"
|
|
7
7
|
readme = "README.md"
|
|
@@ -40,7 +40,7 @@ matplotlib = "*"
|
|
|
40
40
|
lightning = "*"
|
|
41
41
|
albumentations = ">=2.0.0"
|
|
42
42
|
lazy-loader = ">=0.3.0"
|
|
43
|
-
medimgkit = ">=0.
|
|
43
|
+
medimgkit = ">=0.5.0"
|
|
44
44
|
# For compatibility with the datamintapi package
|
|
45
45
|
datamintapi = "0.0.*"
|
|
46
46
|
# Extra dependencies for docs
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|