datamint 1.9.2__py3-none-any.whl → 1.9.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of datamint might be problematic. Click here for more details.
- datamint/apihandler/annotation_api_handler.py +5 -5
- datamint/apihandler/root_api_handler.py +53 -28
- datamint/client_cmd_tools/datamint_config.py +6 -37
- datamint/client_cmd_tools/datamint_upload.py +94 -57
- datamint/logging.yaml +1 -1
- datamint/utils/logging_utils.py +75 -0
- {datamint-1.9.2.dist-info → datamint-1.9.3.dist-info}/METADATA +1 -1
- {datamint-1.9.2.dist-info → datamint-1.9.3.dist-info}/RECORD +10 -10
- {datamint-1.9.2.dist-info → datamint-1.9.3.dist-info}/WHEEL +0 -0
- {datamint-1.9.2.dist-info → datamint-1.9.3.dist-info}/entry_points.txt +0 -0
|
@@ -995,11 +995,11 @@ class AnnotationAPIHandler(BaseAPIHandler):
|
|
|
995
995
|
|
|
996
996
|
def update_annotation_worklist(self,
|
|
997
997
|
worklist_id: str,
|
|
998
|
-
frame_labels: list[str] = None,
|
|
999
|
-
image_labels: list[str] = None,
|
|
1000
|
-
annotations: list[dict] = None,
|
|
1001
|
-
status: Literal['new', 'updating', 'active', 'completed'] = None,
|
|
1002
|
-
name: str = None,
|
|
998
|
+
frame_labels: list[str] | None = None,
|
|
999
|
+
image_labels: list[str] | None = None,
|
|
1000
|
+
annotations: list[dict] | None = None,
|
|
1001
|
+
status: Literal['new', 'updating', 'active', 'completed'] | None = None,
|
|
1002
|
+
name: str | None = None,
|
|
1003
1003
|
):
|
|
1004
1004
|
"""
|
|
1005
1005
|
Update the status of an annotation worklist.
|
|
@@ -6,7 +6,7 @@ from requests.exceptions import HTTPError
|
|
|
6
6
|
import logging
|
|
7
7
|
import asyncio
|
|
8
8
|
import aiohttp
|
|
9
|
-
from medimgkit.dicom_utils import anonymize_dicom, to_bytesio, is_dicom, is_dicom_report
|
|
9
|
+
from medimgkit.dicom_utils import anonymize_dicom, to_bytesio, is_dicom, is_dicom_report, GeneratorWithLength
|
|
10
10
|
from medimgkit import dicom_utils, standardize_mimetype
|
|
11
11
|
from medimgkit.io_utils import is_io_object, peek
|
|
12
12
|
from medimgkit.format_detection import guess_typez, guess_extension, DEFAULT_MIME_TYPE
|
|
@@ -185,9 +185,7 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
185
185
|
resp_data = await self._run_request_async(request_params, session)
|
|
186
186
|
if 'error' in resp_data:
|
|
187
187
|
raise DatamintException(resp_data['error'])
|
|
188
|
-
_LOGGER.
|
|
189
|
-
|
|
190
|
-
_USER_LOGGER.info(f'"{name}" uploaded')
|
|
188
|
+
_LOGGER.debug(f"Response on uploading {name}: {resp_data}")
|
|
191
189
|
return resp_data['id']
|
|
192
190
|
except Exception as e:
|
|
193
191
|
if 'name' in locals():
|
|
@@ -212,6 +210,7 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
212
210
|
segmentation_files: Optional[list[dict]] = None,
|
|
213
211
|
transpose_segmentation: bool = False,
|
|
214
212
|
metadata_files: Optional[list[str | dict | None]] = None,
|
|
213
|
+
progress_bar: tqdm | None = None,
|
|
215
214
|
) -> list[str]:
|
|
216
215
|
if on_error not in ['raise', 'skip']:
|
|
217
216
|
raise ValueError("on_error must be either 'raise' or 'skip'")
|
|
@@ -225,6 +224,8 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
225
224
|
async with aiohttp.ClientSession() as session:
|
|
226
225
|
async def __upload_single_resource(file_path, segfiles: dict[str, list | dict],
|
|
227
226
|
metadata_file: str | dict | None):
|
|
227
|
+
name = file_path.name if is_io_object(file_path) else file_path
|
|
228
|
+
name = os.path.basename(name)
|
|
228
229
|
rid = await self._upload_single_resource_async(
|
|
229
230
|
file_path=file_path,
|
|
230
231
|
mimetype=mimetype,
|
|
@@ -238,6 +239,12 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
238
239
|
publish=publish,
|
|
239
240
|
metadata_file=metadata_file,
|
|
240
241
|
)
|
|
242
|
+
if progress_bar:
|
|
243
|
+
progress_bar.update(1)
|
|
244
|
+
progress_bar.set_postfix(file=name)
|
|
245
|
+
else:
|
|
246
|
+
_USER_LOGGER.info(f'"{name}" uploaded')
|
|
247
|
+
|
|
241
248
|
if segfiles is not None:
|
|
242
249
|
fpaths = segfiles['files']
|
|
243
250
|
names = segfiles.get('names', _infinite_gen(None))
|
|
@@ -295,18 +302,17 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
295
302
|
if new_len != orig_len:
|
|
296
303
|
_LOGGER.info(f"Assembled {new_len} dicom files out of {orig_len} files.")
|
|
297
304
|
mapping_idx = [None] * len(files_path)
|
|
298
|
-
|
|
305
|
+
|
|
306
|
+
files_path = GeneratorWithLength(itertools.chain(dicoms_files_path, other_files_path),
|
|
307
|
+
length=new_len + len(other_files_path))
|
|
299
308
|
assembled = True
|
|
300
309
|
for orig_idx, value in zip(dicom_original_idxs, dicoms_files_path.inverse_mapping_idx):
|
|
301
310
|
mapping_idx[orig_idx] = value
|
|
302
311
|
for i, orig_idx in enumerate(others_original_idxs):
|
|
303
312
|
mapping_idx[orig_idx] = new_len + i
|
|
304
|
-
# mapping_idx = [[dicom_original_idxs[i] for i in idxlist]
|
|
305
|
-
# for idxlist in dicoms_files_path.mapping_idx]
|
|
306
|
-
# mapping_idx += [[i] for i in others_original_idxs]
|
|
307
313
|
else:
|
|
308
314
|
assembled = False
|
|
309
|
-
|
|
315
|
+
mapping_idx = [i for i in range(len(files_path))]
|
|
310
316
|
|
|
311
317
|
return files_path, assembled, mapping_idx
|
|
312
318
|
|
|
@@ -391,7 +397,8 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
391
397
|
transpose_segmentation=transpose_segmentation,
|
|
392
398
|
modality=modality,
|
|
393
399
|
assemble_dicoms=assemble_dicoms,
|
|
394
|
-
metadata=metadata
|
|
400
|
+
metadata=metadata,
|
|
401
|
+
progress_bar=False
|
|
395
402
|
)
|
|
396
403
|
|
|
397
404
|
return result[0]
|
|
@@ -412,7 +419,8 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
412
419
|
modality: Optional[str] = None,
|
|
413
420
|
assemble_dicoms: bool = True,
|
|
414
421
|
metadata: list[str | dict | None] | dict | str | None = None,
|
|
415
|
-
discard_dicom_reports: bool = True
|
|
422
|
+
discard_dicom_reports: bool = True,
|
|
423
|
+
progress_bar: bool = False
|
|
416
424
|
) -> list[str | Exception] | str | Exception:
|
|
417
425
|
"""
|
|
418
426
|
Upload resources.
|
|
@@ -485,6 +493,11 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
485
493
|
assemble_dicoms = assembled
|
|
486
494
|
else:
|
|
487
495
|
mapping_idx = [i for i in range(len(files_path))]
|
|
496
|
+
n_files = len(files_path)
|
|
497
|
+
|
|
498
|
+
if n_files <= 1:
|
|
499
|
+
# Disable progress bar for single file uploads
|
|
500
|
+
progress_bar = False
|
|
488
501
|
|
|
489
502
|
if segmentation_files is not None:
|
|
490
503
|
if assemble_dicoms:
|
|
@@ -513,22 +526,32 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
513
526
|
"segmentation_files['names'] must have the same length as segmentation_files['files'].")
|
|
514
527
|
|
|
515
528
|
loop = asyncio.get_event_loop()
|
|
516
|
-
|
|
517
|
-
|
|
518
|
-
|
|
519
|
-
|
|
520
|
-
|
|
521
|
-
|
|
522
|
-
|
|
523
|
-
|
|
524
|
-
|
|
525
|
-
|
|
526
|
-
|
|
527
|
-
|
|
528
|
-
|
|
529
|
-
|
|
530
|
-
|
|
531
|
-
|
|
529
|
+
pbar = None
|
|
530
|
+
try:
|
|
531
|
+
if progress_bar:
|
|
532
|
+
pbar = tqdm(total=n_files, desc="Uploading resources", unit="file")
|
|
533
|
+
|
|
534
|
+
task = self._upload_resources_async(files_path=files_path,
|
|
535
|
+
mimetype=mimetype,
|
|
536
|
+
anonymize=anonymize,
|
|
537
|
+
anonymize_retain_codes=anonymize_retain_codes,
|
|
538
|
+
on_error=on_error,
|
|
539
|
+
tags=tags,
|
|
540
|
+
mung_filename=mung_filename,
|
|
541
|
+
channel=channel,
|
|
542
|
+
publish=publish,
|
|
543
|
+
segmentation_files=segmentation_files,
|
|
544
|
+
transpose_segmentation=transpose_segmentation,
|
|
545
|
+
modality=modality,
|
|
546
|
+
metadata_files=metadata,
|
|
547
|
+
progress_bar=pbar
|
|
548
|
+
)
|
|
549
|
+
|
|
550
|
+
resource_ids = loop.run_until_complete(task)
|
|
551
|
+
finally:
|
|
552
|
+
if pbar:
|
|
553
|
+
pbar.close()
|
|
554
|
+
|
|
532
555
|
_LOGGER.info(f"Resources uploaded: {resource_ids}")
|
|
533
556
|
|
|
534
557
|
if publish_to is not None:
|
|
@@ -623,7 +646,9 @@ class RootAPIHandler(BaseAPIHandler):
|
|
|
623
646
|
# get the project id by its name
|
|
624
647
|
project = self.get_project_by_name(project_name)
|
|
625
648
|
if 'error' in project:
|
|
626
|
-
|
|
649
|
+
project = self.get_project_by_id(project_name)
|
|
650
|
+
if 'error' in project:
|
|
651
|
+
raise ResourceNotFoundError('project', {'project_name': project_name})
|
|
627
652
|
|
|
628
653
|
dataset_id = project['dataset_id']
|
|
629
654
|
|
|
@@ -1,46 +1,13 @@
|
|
|
1
1
|
import argparse
|
|
2
2
|
import logging
|
|
3
|
-
import os
|
|
4
|
-
import platform
|
|
5
3
|
from datamint import configs
|
|
6
|
-
from datamint.utils.logging_utils import load_cmdline_logging_config
|
|
4
|
+
from datamint.utils.logging_utils import load_cmdline_logging_config, ConsoleWrapperHandler
|
|
7
5
|
from rich.prompt import Prompt, Confirm
|
|
8
6
|
from rich.console import Console
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
# Create a custom theme that works well on both dark and blue backgrounds
|
|
12
|
-
def _create_console_theme() -> Theme:
|
|
13
|
-
"""Create a custom Rich theme optimized for cross-platform terminals."""
|
|
14
|
-
# Detect if we're likely on PowerShell (Windows + PowerShell)
|
|
15
|
-
is_powershell = (
|
|
16
|
-
platform.system() == "Windows" and
|
|
17
|
-
os.environ.get("PSModulePath") is not None
|
|
18
|
-
)
|
|
19
|
-
|
|
20
|
-
if is_powershell:
|
|
21
|
-
# PowerShell blue background - use high contrast colors
|
|
22
|
-
return Theme({
|
|
23
|
-
"warning": "bright_yellow",
|
|
24
|
-
"error": "bright_red on white",
|
|
25
|
-
"success": "bright_green",
|
|
26
|
-
"key": "bright_cyan",
|
|
27
|
-
"accent": "bright_cyan",
|
|
28
|
-
"title": "bold"
|
|
29
|
-
})
|
|
30
|
-
else:
|
|
31
|
-
# Linux/Unix terminals - standard colors
|
|
32
|
-
return Theme({
|
|
33
|
-
"warning": "yellow",
|
|
34
|
-
"error": "red",
|
|
35
|
-
"success": "green",
|
|
36
|
-
"key": "cyan",
|
|
37
|
-
"accent": "bright_blue",
|
|
38
|
-
"title": "bold"
|
|
39
|
-
})
|
|
40
|
-
|
|
41
|
-
# Create console with custom theme
|
|
42
|
-
console = Console(theme=_create_console_theme())
|
|
7
|
+
|
|
43
8
|
_LOGGER = logging.getLogger(__name__)
|
|
9
|
+
_USER_LOGGER = logging.getLogger('user_logger')
|
|
10
|
+
console: Console
|
|
44
11
|
|
|
45
12
|
|
|
46
13
|
def configure_default_url():
|
|
@@ -170,7 +137,9 @@ def interactive_mode():
|
|
|
170
137
|
|
|
171
138
|
def main():
|
|
172
139
|
"""Main entry point for the configuration tool."""
|
|
140
|
+
global console
|
|
173
141
|
load_cmdline_logging_config()
|
|
142
|
+
console = [h for h in _USER_LOGGER.handlers if isinstance(h, ConsoleWrapperHandler)][0].console
|
|
174
143
|
parser = argparse.ArgumentParser(
|
|
175
144
|
description='🔧 Datamint API Configuration Tool',
|
|
176
145
|
epilog="""
|
|
@@ -12,8 +12,8 @@ from typing import Generator, Optional, Any
|
|
|
12
12
|
from collections import defaultdict
|
|
13
13
|
from datamint import __version__ as datamint_version
|
|
14
14
|
from datamint import configs
|
|
15
|
-
from datamint.
|
|
16
|
-
from
|
|
15
|
+
from datamint.utils.logging_utils import load_cmdline_logging_config, ConsoleWrapperHandler
|
|
16
|
+
from rich.console import Console
|
|
17
17
|
import yaml
|
|
18
18
|
from collections.abc import Iterable
|
|
19
19
|
import pandas as pd
|
|
@@ -22,32 +22,42 @@ import pydicom.errors
|
|
|
22
22
|
# Create two loggings: one for the user and one for the developer
|
|
23
23
|
_LOGGER = logging.getLogger(__name__)
|
|
24
24
|
_USER_LOGGER = logging.getLogger('user_logger')
|
|
25
|
+
logging.getLogger('pydicom').setLevel(logging.ERROR)
|
|
26
|
+
CONSOLE: Console
|
|
25
27
|
|
|
26
28
|
MAX_RECURSION_LIMIT = 1000
|
|
27
29
|
|
|
30
|
+
# Default extensions to exclude when --include-extensions is not specified
|
|
31
|
+
DEFAULT_EXCLUDED_EXTENSIONS = [
|
|
32
|
+
'.txt', '.json', '.xml', '.docx', '.doc', '.pdf', '.xlsx', '.xls', '.csv', '.tsv',
|
|
33
|
+
'.log', '.ini', '.cfg', '.conf', '.yaml', '.yml', '.md', '.rst', '.html', '.htm',
|
|
34
|
+
'.exe', '.bat', '.sh', '.py', '.js', '.css',
|
|
35
|
+
'.sql', '.bak', '.tmp', '.temp', '.lock', '.DS_Store', '.gitignore'
|
|
36
|
+
]
|
|
37
|
+
|
|
28
38
|
|
|
29
39
|
def _get_minimal_distinguishing_paths(file_paths: list[str]) -> dict[str, str]:
|
|
30
40
|
"""
|
|
31
41
|
Generate minimal distinguishing paths for files to avoid ambiguity when multiple files have the same name.
|
|
32
|
-
|
|
42
|
+
|
|
33
43
|
Args:
|
|
34
44
|
file_paths: List of file paths
|
|
35
|
-
|
|
45
|
+
|
|
36
46
|
Returns:
|
|
37
47
|
Dictionary mapping full path to minimal distinguishing path
|
|
38
48
|
"""
|
|
39
49
|
if not file_paths:
|
|
40
50
|
return {}
|
|
41
|
-
|
|
51
|
+
|
|
42
52
|
# Convert to Path objects and get absolute paths
|
|
43
53
|
paths = [Path(fp).resolve() for fp in file_paths]
|
|
44
54
|
result = {}
|
|
45
|
-
|
|
55
|
+
|
|
46
56
|
# Group files by basename
|
|
47
57
|
basename_groups = defaultdict(list)
|
|
48
58
|
for i, path in enumerate(paths):
|
|
49
59
|
basename_groups[path.name].append((i, path))
|
|
50
|
-
|
|
60
|
+
|
|
51
61
|
for basename, path_list in basename_groups.items():
|
|
52
62
|
if len(path_list) == 1:
|
|
53
63
|
# Only one file with this name, use just the basename
|
|
@@ -56,7 +66,7 @@ def _get_minimal_distinguishing_paths(file_paths: list[str]) -> dict[str, str]:
|
|
|
56
66
|
else:
|
|
57
67
|
# Multiple files with same name, need to distinguish them
|
|
58
68
|
path_parts_list = [path.parts for _, path in path_list]
|
|
59
|
-
|
|
69
|
+
|
|
60
70
|
# Find the minimum number of parent directories needed to distinguish
|
|
61
71
|
max_depth_needed = 1
|
|
62
72
|
for depth in range(1, max(len(parts) for parts in path_parts_list) + 1):
|
|
@@ -67,12 +77,12 @@ def _get_minimal_distinguishing_paths(file_paths: list[str]) -> dict[str, str]:
|
|
|
67
77
|
suffixes.append('/'.join(parts))
|
|
68
78
|
else:
|
|
69
79
|
suffixes.append('/'.join(parts[-depth:]))
|
|
70
|
-
|
|
80
|
+
|
|
71
81
|
if len(set(suffixes)) == len(suffixes):
|
|
72
82
|
# All suffixes are unique at this depth
|
|
73
83
|
max_depth_needed = depth
|
|
74
84
|
break
|
|
75
|
-
|
|
85
|
+
|
|
76
86
|
# Apply the minimal distinguishing paths
|
|
77
87
|
for (idx, path), parts in zip(path_list, path_parts_list):
|
|
78
88
|
if max_depth_needed >= len(parts):
|
|
@@ -80,10 +90,8 @@ def _get_minimal_distinguishing_paths(file_paths: list[str]) -> dict[str, str]:
|
|
|
80
90
|
else:
|
|
81
91
|
distinguishing_path = '/'.join(parts[-max_depth_needed:])
|
|
82
92
|
result[file_paths[idx]] = distinguishing_path
|
|
83
|
-
|
|
84
|
-
return result
|
|
85
|
-
|
|
86
93
|
|
|
94
|
+
return result
|
|
87
95
|
|
|
88
96
|
|
|
89
97
|
def _read_segmentation_names(segmentation_names_path: str | Path) -> dict:
|
|
@@ -257,6 +265,7 @@ def handle_api_key() -> str | None:
|
|
|
257
265
|
If it does not exist, it asks the user to input it.
|
|
258
266
|
Then, it asks the user if he wants to save the API key at a proper location in the machine
|
|
259
267
|
"""
|
|
268
|
+
from datamint.client_cmd_tools.datamint_config import ask_api_key
|
|
260
269
|
api_key = configs.get_value(configs.APIKEY_KEY)
|
|
261
270
|
if api_key is None:
|
|
262
271
|
_USER_LOGGER.info("API key not found. Please provide it:")
|
|
@@ -524,6 +533,8 @@ def _parse_args() -> tuple[Any, list[str], Optional[list[dict]], Optional[list[s
|
|
|
524
533
|
parser.add_argument('--channel', '--name', type=str, required=False,
|
|
525
534
|
help='Channel name (arbritary) to upload the resources to. \
|
|
526
535
|
Useful for organizing the resources in the platform.')
|
|
536
|
+
parser.add_argument('--project', type=str, required=False,
|
|
537
|
+
help='Project name to add the uploaded resources to after successful upload.')
|
|
527
538
|
parser.add_argument('--retain-pii', action='store_true', help='Do not anonymize DICOMs')
|
|
528
539
|
parser.add_argument('--retain-attribute', type=_tuple_int_type, action='append',
|
|
529
540
|
default=[],
|
|
@@ -541,7 +552,8 @@ def _parse_args() -> tuple[Any, list[str], Optional[list[dict]], Optional[list[s
|
|
|
541
552
|
help='File extensions to be considered for uploading. Default: all file extensions.' +
|
|
542
553
|
' Example: --include-extensions dcm jpg png')
|
|
543
554
|
parser.add_argument('--exclude-extensions', type=str, nargs='+',
|
|
544
|
-
help='File extensions to be excluded from uploading.
|
|
555
|
+
help='File extensions to be excluded from uploading. ' +
|
|
556
|
+
'Default: common non-medical file extensions (.txt, .json, .xml, .docx, etc.) when --include-extensions is not specified.' +
|
|
545
557
|
' Example: --exclude-extensions txt csv'
|
|
546
558
|
)
|
|
547
559
|
parser.add_argument('--segmentation_path', type=_is_valid_path_argparse, metavar="FILE",
|
|
@@ -581,7 +593,6 @@ def _parse_args() -> tuple[Any, list[str], Optional[list[dict]], Optional[list[s
|
|
|
581
593
|
|
|
582
594
|
if args.verbose:
|
|
583
595
|
# Get the console handler and set to debug
|
|
584
|
-
print(logging.getLogger().handlers)
|
|
585
596
|
logging.getLogger().handlers[0].setLevel(logging.DEBUG)
|
|
586
597
|
logging.getLogger('datamint').setLevel(logging.DEBUG)
|
|
587
598
|
_LOGGER.setLevel(logging.DEBUG)
|
|
@@ -594,6 +605,11 @@ def _parse_args() -> tuple[Any, list[str], Optional[list[dict]], Optional[list[s
|
|
|
594
605
|
if args.include_extensions is not None and args.exclude_extensions is not None:
|
|
595
606
|
raise ValueError("--include-extensions and --exclude-extensions are mutually exclusive.")
|
|
596
607
|
|
|
608
|
+
# Apply default excluded extensions if neither include nor exclude extensions are specified
|
|
609
|
+
if args.include_extensions is None and args.exclude_extensions is None:
|
|
610
|
+
args.exclude_extensions = DEFAULT_EXCLUDED_EXTENSIONS
|
|
611
|
+
_LOGGER.debug(f"Applied default excluded extensions: {args.exclude_extensions}")
|
|
612
|
+
|
|
597
613
|
try:
|
|
598
614
|
if os.path.isfile(args.path):
|
|
599
615
|
file_path = [args.path]
|
|
@@ -720,24 +736,26 @@ def print_results_summary(files_path: list[str],
|
|
|
720
736
|
failure_files = [f for f, r in zip(files_path, results) if isinstance(r, Exception)]
|
|
721
737
|
# Get distinguishing paths for better error reporting
|
|
722
738
|
distinguishing_paths = _get_minimal_distinguishing_paths(files_path)
|
|
723
|
-
|
|
739
|
+
|
|
724
740
|
_USER_LOGGER.info(f"\nUpload summary:")
|
|
725
741
|
_USER_LOGGER.info(f"\tTotal files: {len(files_path)}")
|
|
726
742
|
_USER_LOGGER.info(f"\tSuccessful uploads: {len(files_path) - len(failure_files)}")
|
|
727
743
|
if len(failure_files) > 0:
|
|
728
|
-
_USER_LOGGER.
|
|
744
|
+
_USER_LOGGER.warning(f"\tFailed uploads: {len(failure_files)}")
|
|
729
745
|
_USER_LOGGER.warning(f"\tFailed files: {[distinguishing_paths[f] for f in failure_files]}")
|
|
730
746
|
_USER_LOGGER.warning(f"\nFailures:")
|
|
731
747
|
for f, r in zip(files_path, results):
|
|
732
748
|
if isinstance(r, Exception):
|
|
733
749
|
_USER_LOGGER.warning(f"\t{distinguishing_paths[f]}: {r}")
|
|
734
750
|
else:
|
|
735
|
-
|
|
751
|
+
CONSOLE.print(f'✅ All uploads successful!', style='success')
|
|
736
752
|
return len(failure_files)
|
|
737
753
|
|
|
738
754
|
|
|
739
755
|
def main():
|
|
756
|
+
global CONSOLE
|
|
740
757
|
load_cmdline_logging_config()
|
|
758
|
+
CONSOLE = [h for h in _USER_LOGGER.handlers if isinstance(h, ConsoleWrapperHandler)][0].console
|
|
741
759
|
|
|
742
760
|
try:
|
|
743
761
|
args, files_path, segfiles, metadata_files = _parse_args()
|
|
@@ -745,48 +763,67 @@ def main():
|
|
|
745
763
|
_USER_LOGGER.error(f'Error validating arguments. {e}')
|
|
746
764
|
sys.exit(1)
|
|
747
765
|
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
|
|
751
|
-
|
|
752
|
-
|
|
766
|
+
try:
|
|
767
|
+
print_input_summary(files_path,
|
|
768
|
+
args=args,
|
|
769
|
+
segfiles=segfiles,
|
|
770
|
+
metadata_files=metadata_files,
|
|
771
|
+
include_extensions=args.include_extensions)
|
|
753
772
|
|
|
754
|
-
|
|
755
|
-
|
|
756
|
-
|
|
757
|
-
|
|
758
|
-
|
|
759
|
-
|
|
773
|
+
if not args.yes:
|
|
774
|
+
confirmation = input("Do you want to proceed with the upload? (y/n): ")
|
|
775
|
+
if confirmation.lower() != "y":
|
|
776
|
+
_USER_LOGGER.info("Upload cancelled.")
|
|
777
|
+
return
|
|
778
|
+
#######################################
|
|
760
779
|
|
|
761
|
-
|
|
780
|
+
has_a_dicom_file = any(is_dicom(f) for f in files_path)
|
|
762
781
|
|
|
763
|
-
|
|
764
|
-
|
|
765
|
-
|
|
766
|
-
|
|
767
|
-
|
|
768
|
-
|
|
769
|
-
|
|
770
|
-
|
|
771
|
-
|
|
772
|
-
|
|
773
|
-
|
|
774
|
-
|
|
775
|
-
|
|
776
|
-
|
|
777
|
-
|
|
778
|
-
|
|
779
|
-
|
|
780
|
-
|
|
781
|
-
|
|
782
|
-
|
|
783
|
-
|
|
784
|
-
|
|
785
|
-
|
|
786
|
-
|
|
787
|
-
|
|
788
|
-
|
|
789
|
-
|
|
782
|
+
try:
|
|
783
|
+
api_handler = APIHandler(check_connection=True)
|
|
784
|
+
except DatamintException as e:
|
|
785
|
+
_USER_LOGGER.error(f'❌ Connection failed: {e}')
|
|
786
|
+
return
|
|
787
|
+
try:
|
|
788
|
+
results = api_handler.upload_resources(channel=args.channel,
|
|
789
|
+
files_path=files_path,
|
|
790
|
+
tags=args.tag,
|
|
791
|
+
on_error='skip',
|
|
792
|
+
anonymize=args.retain_pii == False and has_a_dicom_file,
|
|
793
|
+
anonymize_retain_codes=args.retain_attribute,
|
|
794
|
+
mung_filename=args.mungfilename,
|
|
795
|
+
publish=args.publish,
|
|
796
|
+
segmentation_files=segfiles,
|
|
797
|
+
transpose_segmentation=args.transpose_segmentation,
|
|
798
|
+
assemble_dicoms=True,
|
|
799
|
+
metadata=metadata_files,
|
|
800
|
+
progress_bar=True
|
|
801
|
+
)
|
|
802
|
+
except pydicom.errors.InvalidDicomError as e:
|
|
803
|
+
_USER_LOGGER.error(f'❌ Invalid DICOM file: {e}')
|
|
804
|
+
return
|
|
805
|
+
_USER_LOGGER.info('Upload finished!')
|
|
806
|
+
_LOGGER.debug(f"Number of results: {len(results)}")
|
|
807
|
+
|
|
808
|
+
# Add resources to project if specified
|
|
809
|
+
if args.project is not None:
|
|
810
|
+
_USER_LOGGER.info(f"Adding uploaded resources to project '{args.project}'...")
|
|
811
|
+
try:
|
|
812
|
+
# Filter successful uploads to get resource IDs
|
|
813
|
+
successful_resource_ids = [r for r in results if not isinstance(r, Exception)]
|
|
814
|
+
if successful_resource_ids:
|
|
815
|
+
api_handler.add_to_project(project_name=args.project, resource_ids=successful_resource_ids)
|
|
816
|
+
_USER_LOGGER.info(f"✅ Successfully added {len(successful_resource_ids)} resources to project '{args.project}'")
|
|
817
|
+
else:
|
|
818
|
+
_USER_LOGGER.warning("No successful uploads to add to project")
|
|
819
|
+
except Exception as e:
|
|
820
|
+
_USER_LOGGER.error(f"❌ Failed to add resources to project '{args.project}': {e}")
|
|
821
|
+
|
|
822
|
+
num_failures = print_results_summary(files_path, results)
|
|
823
|
+
if num_failures > 0:
|
|
824
|
+
sys.exit(1)
|
|
825
|
+
except KeyboardInterrupt:
|
|
826
|
+
CONSOLE.print("\nUpload cancelled by user.", style='warning')
|
|
790
827
|
sys.exit(1)
|
|
791
828
|
|
|
792
829
|
|
datamint/logging.yaml
CHANGED
datamint/utils/logging_utils.py
CHANGED
|
@@ -1,3 +1,8 @@
|
|
|
1
|
+
from rich.theme import Theme
|
|
2
|
+
from logging import Logger, DEBUG, INFO, WARNING, ERROR, CRITICAL
|
|
3
|
+
from rich.console import Console
|
|
4
|
+
import platform
|
|
5
|
+
import os
|
|
1
6
|
import logging
|
|
2
7
|
import logging.config
|
|
3
8
|
from rich.console import ConsoleRenderable
|
|
@@ -53,3 +58,73 @@ def load_cmdline_logging_config():
|
|
|
53
58
|
print(f"Warning: Error loading logging configuration file: {e}")
|
|
54
59
|
_LOGGER.exception(e)
|
|
55
60
|
logging.basicConfig(level=logging.INFO)
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
LEVELS_MAPPING = {
|
|
64
|
+
DEBUG: None,
|
|
65
|
+
INFO: None,
|
|
66
|
+
WARNING: "warning",
|
|
67
|
+
ERROR: "error",
|
|
68
|
+
CRITICAL: "error"
|
|
69
|
+
}
|
|
70
|
+
|
|
71
|
+
|
|
72
|
+
def _create_console_theme() -> Theme:
|
|
73
|
+
"""Create a custom Rich theme optimized for cross-platform terminals."""
|
|
74
|
+
# Detect if we're likely on PowerShell (Windows + PowerShell)
|
|
75
|
+
is_powershell = (
|
|
76
|
+
platform.system() == "Windows" and
|
|
77
|
+
os.environ.get("PSModulePath") is not None
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
if is_powershell:
|
|
81
|
+
# PowerShell blue background - use high contrast colors
|
|
82
|
+
return Theme({
|
|
83
|
+
"warning": "bright_yellow",
|
|
84
|
+
"error": "bright_red on white",
|
|
85
|
+
"success": "bright_green",
|
|
86
|
+
"key": "bright_cyan",
|
|
87
|
+
"accent": "bright_cyan",
|
|
88
|
+
"title": "bold"
|
|
89
|
+
})
|
|
90
|
+
else:
|
|
91
|
+
# Linux/Unix terminals - standard colors
|
|
92
|
+
return Theme({
|
|
93
|
+
"warning": "yellow",
|
|
94
|
+
"error": "red",
|
|
95
|
+
"success": "green",
|
|
96
|
+
"key": "cyan",
|
|
97
|
+
"accent": "bright_blue",
|
|
98
|
+
"title": "bold"
|
|
99
|
+
})
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
class ConsoleWrapperHandler(ConditionalRichHandler):
|
|
103
|
+
"""
|
|
104
|
+
A logging handler that uses a rich.console.Console to print log messages.
|
|
105
|
+
"""
|
|
106
|
+
def __init__(self, *args, console: Console | None = None, **kwargs):
|
|
107
|
+
"""
|
|
108
|
+
Initializes the ConsoleWrapperHandler.
|
|
109
|
+
|
|
110
|
+
Args:
|
|
111
|
+
console (Console | None): A rich Console instance. If None, a new one is created.
|
|
112
|
+
"""
|
|
113
|
+
super().__init__(*args, **kwargs)
|
|
114
|
+
if console is None:
|
|
115
|
+
console = Console(theme=_create_console_theme())
|
|
116
|
+
self.console = console
|
|
117
|
+
|
|
118
|
+
def emit(self, record: logging.LogRecord) -> None:
|
|
119
|
+
"""
|
|
120
|
+
Emits a log record.
|
|
121
|
+
|
|
122
|
+
Args:
|
|
123
|
+
record (logging.LogRecord): The log record to emit.
|
|
124
|
+
"""
|
|
125
|
+
try:
|
|
126
|
+
msg = self.format(record)
|
|
127
|
+
style = LEVELS_MAPPING.get(record.levelno)
|
|
128
|
+
self.console.print(msg, style=style)
|
|
129
|
+
except Exception:
|
|
130
|
+
self.handleError(record)
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: datamint
|
|
3
|
-
Version: 1.9.
|
|
3
|
+
Version: 1.9.3
|
|
4
4
|
Summary: A library for interacting with the Datamint API, designed for efficient data management, processing and Deep Learning workflows.
|
|
5
5
|
Requires-Python: >=3.10
|
|
6
6
|
Classifier: Programming Language :: Python :: 3
|
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
datamint/__init__.py,sha256=7rKCCsaa4RBRTIfuHB708rai1xwDHLtkFNFJGKYG5D4,757
|
|
2
|
-
datamint/apihandler/annotation_api_handler.py,sha256=
|
|
2
|
+
datamint/apihandler/annotation_api_handler.py,sha256=W3vV4z3BqX1OQe1r7zr8dI-IVu4zUDxED4QttdiWV-E,57098
|
|
3
3
|
datamint/apihandler/api_handler.py,sha256=cdVSddrFCKlF_BJ81LO1aJ0OP49rssjpNEFzJ6Q7YyY,384
|
|
4
4
|
datamint/apihandler/base_api_handler.py,sha256=An9chkUcq_v2_Tkr9TbwI_lnsXCyNYgugxK9nRu4oG8,12126
|
|
5
5
|
datamint/apihandler/dto/annotation_dto.py,sha256=qId1RK1VO7dXrvGJ7dqJ31jBQB7Z8yy5x0tLSiMxTB4,7105
|
|
6
6
|
datamint/apihandler/exp_api_handler.py,sha256=hFUgUgBc5rL7odK7gTW3MnrvMY1pVfJUpUdzRNobMQE,6226
|
|
7
|
-
datamint/apihandler/root_api_handler.py,sha256=
|
|
7
|
+
datamint/apihandler/root_api_handler.py,sha256=jBof_XPTeq4o41CW-l-I5GHQKVa76kaX75RovS_qAM4,63384
|
|
8
8
|
datamint/client_cmd_tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
9
|
-
datamint/client_cmd_tools/datamint_config.py,sha256=
|
|
10
|
-
datamint/client_cmd_tools/datamint_upload.py,sha256=
|
|
9
|
+
datamint/client_cmd_tools/datamint_config.py,sha256=5S9sgS64F141ZcvvjCHEEgfNKwhGt4g2oJQaJqeiugA,7228
|
|
10
|
+
datamint/client_cmd_tools/datamint_upload.py,sha256=CKd3gnKUVSRn0eC3BfC0S0P_vB9fQNmiJ2NTCJgg8c4,36804
|
|
11
11
|
datamint/configs.py,sha256=Bdp6NydYwyCJ2dk19_gf_o3M2ZyQOmMHpLi8wEWNHUk,1426
|
|
12
12
|
datamint/dataset/__init__.py,sha256=4PlUKSvVhdfQvvuq8jQXrkdqnot-iTTizM3aM1vgSwg,47
|
|
13
13
|
datamint/dataset/annotation.py,sha256=qN1IMjdfLD2ceQ6va3l76jOXA8Vb_c-eBk1oWQu6hW0,7994
|
|
@@ -19,11 +19,11 @@ datamint/exceptions.py,sha256=AdpAC528xrml7LfWt04zQK8pONoDBx8WmXSvzRGi52o,106
|
|
|
19
19
|
datamint/experiment/__init__.py,sha256=5qQOMzoG17DEd1YnTF-vS0qiM-DGdbNh42EUo91CRhQ,34
|
|
20
20
|
datamint/experiment/_patcher.py,sha256=ZgbezoevAYhJsbiJTvWPALGTcUiMT371xddcTllt3H4,23296
|
|
21
21
|
datamint/experiment/experiment.py,sha256=aHK9dRFdQTi569xgUg1KqlCZLHZpDmSH3g3ndPIZvXw,44546
|
|
22
|
-
datamint/logging.yaml,sha256=
|
|
23
|
-
datamint/utils/logging_utils.py,sha256=
|
|
22
|
+
datamint/logging.yaml,sha256=tOMxtc2UmwlIMTK6ljtnBwTco1PNrPeq3mx2iMuSbiw,482
|
|
23
|
+
datamint/utils/logging_utils.py,sha256=9pRoaPrWu2jOdDCiAoUsjEdP5ZwaealWL3hjUqFvx9g,4022
|
|
24
24
|
datamint/utils/torchmetrics.py,sha256=lwU0nOtsSWfebyp7dvjlAggaqXtj5ohSEUXOg3L0hJE,2837
|
|
25
25
|
datamint/utils/visualization.py,sha256=yaUVAOHar59VrGUjpAWv5eVvQSfztFG0eP9p5Vt3l-M,4470
|
|
26
|
-
datamint-1.9.
|
|
27
|
-
datamint-1.9.
|
|
28
|
-
datamint-1.9.
|
|
29
|
-
datamint-1.9.
|
|
26
|
+
datamint-1.9.3.dist-info/METADATA,sha256=gAyuwZSWmgg7tzzD8RcfNM_TV5N9ec-phDRB1W3gO1Y,4100
|
|
27
|
+
datamint-1.9.3.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
|
|
28
|
+
datamint-1.9.3.dist-info/entry_points.txt,sha256=mn5H6jPjO-rY0W0CAZ6Z_KKWhMLvyVaSpoqk77jlTI4,145
|
|
29
|
+
datamint-1.9.3.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|