datamint 1.4.1__py3-none-any.whl → 1.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of datamint might be problematic. Click here for more details.
- datamint/apihandler/annotation_api_handler.py +288 -65
- datamint/apihandler/root_api_handler.py +227 -101
- datamint/client_cmd_tools/datamint_config.py +102 -59
- datamint/client_cmd_tools/datamint_upload.py +24 -16
- datamint/utils/dicom_utils.py +12 -12
- {datamint-1.4.1.dist-info → datamint-1.5.0.dist-info}/METADATA +1 -1
- {datamint-1.4.1.dist-info → datamint-1.5.0.dist-info}/RECORD +9 -9
- {datamint-1.4.1.dist-info → datamint-1.5.0.dist-info}/WHEEL +0 -0
- {datamint-1.4.1.dist-info → datamint-1.5.0.dist-info}/entry_points.txt +0 -0
|
@@ -1,45 +1,82 @@
|
|
|
1
1
|
import argparse
|
|
2
2
|
import logging
|
|
3
|
+
import os
|
|
4
|
+
import platform
|
|
3
5
|
from datamint import configs
|
|
4
6
|
from datamint.utils.logging_utils import load_cmdline_logging_config
|
|
5
|
-
|
|
6
|
-
|
|
7
|
+
from rich.prompt import Prompt, Confirm
|
|
8
|
+
from rich.console import Console
|
|
9
|
+
from rich.theme import Theme
|
|
10
|
+
|
|
11
|
+
# Create a custom theme that works well on both dark and blue backgrounds
|
|
12
|
+
def _create_console_theme() -> Theme:
|
|
13
|
+
"""Create a custom Rich theme optimized for cross-platform terminals."""
|
|
14
|
+
# Detect if we're likely on PowerShell (Windows + PowerShell)
|
|
15
|
+
is_powershell = (
|
|
16
|
+
platform.system() == "Windows" and
|
|
17
|
+
os.environ.get("PSModulePath") is not None
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
if is_powershell:
|
|
21
|
+
# PowerShell blue background - use high contrast colors
|
|
22
|
+
return Theme({
|
|
23
|
+
"warning": "bright_yellow",
|
|
24
|
+
"error": "bright_red on white",
|
|
25
|
+
"success": "bright_green",
|
|
26
|
+
"key": "bright_cyan",
|
|
27
|
+
"accent": "bright_cyan",
|
|
28
|
+
"title": "bold"
|
|
29
|
+
})
|
|
30
|
+
else:
|
|
31
|
+
# Linux/Unix terminals - standard colors
|
|
32
|
+
return Theme({
|
|
33
|
+
"warning": "yellow",
|
|
34
|
+
"error": "red",
|
|
35
|
+
"success": "green",
|
|
36
|
+
"key": "cyan",
|
|
37
|
+
"accent": "bright_blue",
|
|
38
|
+
"title": "bold"
|
|
39
|
+
})
|
|
40
|
+
|
|
41
|
+
# Create console with custom theme
|
|
42
|
+
console = Console(theme=_create_console_theme())
|
|
7
43
|
_LOGGER = logging.getLogger(__name__)
|
|
8
|
-
_USER_LOGGER = logging.getLogger('user_logger')
|
|
9
44
|
|
|
10
45
|
|
|
11
46
|
def configure_default_url():
|
|
12
47
|
"""Configure the default API URL interactively."""
|
|
13
|
-
|
|
14
|
-
|
|
48
|
+
current_url = configs.get_value(configs.APIURL_KEY, 'Not set')
|
|
49
|
+
console.print(f"Current default URL: [key]{current_url}[/key]")
|
|
50
|
+
url = Prompt.ask("Enter the default API URL (leave empty to abort)", console=console).strip()
|
|
15
51
|
if url == '':
|
|
16
52
|
return
|
|
17
53
|
|
|
18
54
|
# Basic URL validation
|
|
19
55
|
if not (url.startswith('http://') or url.startswith('https://')):
|
|
20
|
-
|
|
56
|
+
console.print("[warning]⚠️ URL should start with http:// or https://[/warning]")
|
|
21
57
|
return
|
|
22
58
|
|
|
23
59
|
configs.set_value(configs.APIURL_KEY, url)
|
|
24
|
-
|
|
60
|
+
console.print("[success]✅ Default API URL set successfully.[/success]")
|
|
25
61
|
|
|
26
62
|
|
|
27
63
|
def ask_api_key(ask_to_save: bool) -> str | None:
|
|
28
64
|
"""Ask user for API key with improved guidance."""
|
|
29
|
-
|
|
65
|
+
console.print("[info]💡 Get your API key from your Datamint administrator or the web app (https://app.datamint.io/team)[/info]")
|
|
30
66
|
|
|
31
|
-
api_key =
|
|
67
|
+
api_key = Prompt.ask('API key (leave empty to abort)', console=console).strip()
|
|
32
68
|
if api_key == '':
|
|
33
69
|
return None
|
|
34
70
|
|
|
35
71
|
if ask_to_save:
|
|
36
|
-
ans =
|
|
72
|
+
ans = Confirm.ask("Save the API key so it automatically loads next time? (y/n): ",
|
|
73
|
+
default=True, console=console)
|
|
37
74
|
try:
|
|
38
|
-
if ans
|
|
75
|
+
if ans:
|
|
39
76
|
configs.set_value(configs.APIKEY_KEY, api_key)
|
|
40
|
-
|
|
77
|
+
console.print("[success]✅ API key saved.[/success]")
|
|
41
78
|
except Exception as e:
|
|
42
|
-
|
|
79
|
+
console.print("[error]❌ Error saving API key.[/error]")
|
|
43
80
|
_LOGGER.exception(e)
|
|
44
81
|
return api_key
|
|
45
82
|
|
|
@@ -48,85 +85,91 @@ def show_all_configurations():
|
|
|
48
85
|
"""Display all current configurations in a user-friendly format."""
|
|
49
86
|
config = configs.read_config()
|
|
50
87
|
if config is not None and len(config) > 0:
|
|
51
|
-
|
|
88
|
+
console.print("[title]📋 Current configurations:[/title]")
|
|
52
89
|
for key, value in config.items():
|
|
53
90
|
# Mask API key for security
|
|
54
91
|
if key == configs.APIKEY_KEY and value:
|
|
55
92
|
masked_value = f"{value[:3]}...{value[-3:]}" if len(value) > 6 else value
|
|
56
|
-
|
|
93
|
+
console.print(f" [key]{key}[/key]: [dim]{masked_value}[/dim]")
|
|
57
94
|
else:
|
|
58
|
-
|
|
95
|
+
console.print(f" [key]{key}[/key]: {value}")
|
|
59
96
|
else:
|
|
60
|
-
|
|
97
|
+
console.print("[dim]No configurations found.[/dim]")
|
|
61
98
|
|
|
62
99
|
|
|
63
100
|
def clear_all_configurations():
|
|
64
101
|
"""Clear all configurations with confirmation."""
|
|
65
|
-
yesno =
|
|
66
|
-
|
|
102
|
+
yesno = Confirm.ask('Are you sure you want to clear all configurations?',
|
|
103
|
+
default=True, console=console)
|
|
104
|
+
if yesno:
|
|
67
105
|
configs.clear_all_configurations()
|
|
68
|
-
|
|
106
|
+
console.print("[success]✅ All configurations cleared.[/success]")
|
|
69
107
|
|
|
70
108
|
|
|
71
109
|
def configure_api_key():
|
|
110
|
+
"""Configure API key interactively."""
|
|
72
111
|
api_key = ask_api_key(ask_to_save=False)
|
|
73
112
|
if api_key is None:
|
|
74
113
|
return
|
|
75
114
|
configs.set_value(configs.APIKEY_KEY, api_key)
|
|
76
|
-
|
|
115
|
+
console.print("[success]✅ API key saved.[/success]")
|
|
77
116
|
|
|
78
117
|
|
|
79
118
|
def test_connection():
|
|
80
119
|
"""Test the API connection with current settings."""
|
|
81
120
|
try:
|
|
82
121
|
from datamint import APIHandler
|
|
83
|
-
|
|
122
|
+
console.print("[accent]🔄 Testing connection...[/accent]")
|
|
84
123
|
api = APIHandler()
|
|
85
124
|
# Simple test - try to get projects
|
|
86
125
|
projects = api.get_projects()
|
|
87
|
-
|
|
126
|
+
console.print(f"[success]✅ Connection successful! Found {len(projects)} projects.[/success]")
|
|
88
127
|
except ImportError:
|
|
89
|
-
|
|
128
|
+
console.print("[error]❌ Full API not available. Install with: pip install datamint-python-api[full][/error]")
|
|
90
129
|
except Exception as e:
|
|
91
|
-
|
|
92
|
-
_USER_LOGGER.info("💡 Check your API key and URL settings")
|
|
130
|
+
console.print(f"[error]❌ Connection failed: {e}[/error]")
|
|
93
131
|
|
|
94
132
|
|
|
95
133
|
def interactive_mode():
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
while True:
|
|
103
|
-
_USER_LOGGER.info("\n📋 Select the action you want to perform:")
|
|
104
|
-
_USER_LOGGER.info(" (1) Configure the API key")
|
|
105
|
-
_USER_LOGGER.info(" (2) Configure the default URL")
|
|
106
|
-
_USER_LOGGER.info(" (3) Show all configuration settings")
|
|
107
|
-
_USER_LOGGER.info(" (4) Clear all configuration settings")
|
|
108
|
-
_USER_LOGGER.info(" (5) Test connection")
|
|
109
|
-
_USER_LOGGER.info(" (q) Exit")
|
|
110
|
-
choice = input("Enter your choice: ").lower().strip()
|
|
111
|
-
|
|
112
|
-
if choice == '1':
|
|
134
|
+
"""Run the interactive configuration mode."""
|
|
135
|
+
console.print("[title]🔧 Datamint Configuration Tool[/title]")
|
|
136
|
+
|
|
137
|
+
try:
|
|
138
|
+
if len(configs.read_config()) == 0:
|
|
139
|
+
console.print("[warning]👋 Welcome! Let's set up your API key first.[/warning]")
|
|
113
140
|
configure_api_key()
|
|
114
|
-
elif choice == '2':
|
|
115
|
-
configure_default_url()
|
|
116
|
-
elif choice == '3':
|
|
117
|
-
show_all_configurations()
|
|
118
|
-
elif choice == '4':
|
|
119
|
-
clear_all_configurations()
|
|
120
|
-
elif choice == '5':
|
|
121
|
-
test_connection()
|
|
122
|
-
elif choice in ('q', 'exit', 'quit'):
|
|
123
|
-
_USER_LOGGER.info("👋 Goodbye!")
|
|
124
|
-
break
|
|
125
|
-
else:
|
|
126
|
-
_USER_LOGGER.info("❌ Invalid choice. Please enter a number between 1 and 5 or 'q' to quit.")
|
|
127
141
|
|
|
142
|
+
while True:
|
|
143
|
+
console.print("\n[title]📋 Select the action you want to perform:[/title]")
|
|
144
|
+
console.print(" [accent](1)[/accent] Configure the API key")
|
|
145
|
+
console.print(" [accent](2)[/accent] Configure the default URL")
|
|
146
|
+
console.print(" [accent](3)[/accent] Show all configuration settings")
|
|
147
|
+
console.print(" [accent](4)[/accent] Clear all configuration settings")
|
|
148
|
+
console.print(" [accent](5)[/accent] Test connection")
|
|
149
|
+
console.print(" [accent](q)[/accent] Exit")
|
|
150
|
+
choice = Prompt.ask("Enter your choice", console=console).lower().strip()
|
|
151
|
+
|
|
152
|
+
if choice == '1':
|
|
153
|
+
configure_api_key()
|
|
154
|
+
elif choice == '2':
|
|
155
|
+
configure_default_url()
|
|
156
|
+
elif choice == '3':
|
|
157
|
+
show_all_configurations()
|
|
158
|
+
elif choice == '4':
|
|
159
|
+
clear_all_configurations()
|
|
160
|
+
elif choice == '5':
|
|
161
|
+
test_connection()
|
|
162
|
+
elif choice in ('q', 'exit', 'quit'):
|
|
163
|
+
break
|
|
164
|
+
else:
|
|
165
|
+
console.print("[error]❌ Invalid choice. Please enter a number between 1 and 5 or 'q' to quit.[/error]")
|
|
166
|
+
except KeyboardInterrupt:
|
|
167
|
+
console.print('')
|
|
168
|
+
|
|
169
|
+
console.print("[success]👋 Goodbye![/success]")
|
|
128
170
|
|
|
129
171
|
def main():
|
|
172
|
+
"""Main entry point for the configuration tool."""
|
|
130
173
|
load_cmdline_logging_config()
|
|
131
174
|
parser = argparse.ArgumentParser(
|
|
132
175
|
description='🔧 Datamint API Configuration Tool',
|
|
@@ -148,15 +191,15 @@ More Documentation: https://sonanceai.github.io/datamint-python-api/command_line
|
|
|
148
191
|
|
|
149
192
|
if args.api_key is not None:
|
|
150
193
|
configs.set_value(configs.APIKEY_KEY, args.api_key)
|
|
151
|
-
|
|
194
|
+
console.print("[success]✅ API key saved.[/success]")
|
|
152
195
|
|
|
153
196
|
if args.default_url is not None:
|
|
154
197
|
# Basic URL validation
|
|
155
198
|
if not (args.default_url.startswith('http://') or args.default_url.startswith('https://')):
|
|
156
|
-
|
|
199
|
+
console.print("[error]❌ URL must start with http:// or https://[/error]")
|
|
157
200
|
return
|
|
158
201
|
configs.set_value(configs.APIURL_KEY, args.default_url)
|
|
159
|
-
|
|
202
|
+
console.print("[success]✅ Default URL saved.[/success]")
|
|
160
203
|
|
|
161
204
|
no_arguments_provided = args.api_key is None and args.default_url is None
|
|
162
205
|
|
|
@@ -7,13 +7,14 @@ from pathlib import Path
|
|
|
7
7
|
import sys
|
|
8
8
|
from datamint.utils.dicom_utils import is_dicom
|
|
9
9
|
import fnmatch
|
|
10
|
-
from typing import
|
|
10
|
+
from typing import Generator, Optional, Any
|
|
11
11
|
from collections import defaultdict
|
|
12
12
|
from datamint import __version__ as datamint_version
|
|
13
13
|
from datamint import configs
|
|
14
14
|
from datamint.client_cmd_tools.datamint_config import ask_api_key
|
|
15
15
|
from datamint.utils.logging_utils import load_cmdline_logging_config
|
|
16
16
|
import yaml
|
|
17
|
+
from collections.abc import Iterable
|
|
17
18
|
|
|
18
19
|
# Create two loggings: one for the user and one for the developer
|
|
19
20
|
_LOGGER = logging.getLogger(__name__)
|
|
@@ -86,9 +87,9 @@ def _is_system_file(path: Path) -> bool:
|
|
|
86
87
|
return any(fnmatch.fnmatch(path.name, pattern) for pattern in ignored_patterns)
|
|
87
88
|
|
|
88
89
|
|
|
89
|
-
def walk_to_depth(path: str,
|
|
90
|
+
def walk_to_depth(path: str | Path,
|
|
90
91
|
depth: int,
|
|
91
|
-
exclude_pattern: str = None) -> Generator[Path, None, None]:
|
|
92
|
+
exclude_pattern: str | None = None) -> Generator[Path, None, None]:
|
|
92
93
|
path = Path(path)
|
|
93
94
|
for child in path.iterdir():
|
|
94
95
|
if _is_system_file(child):
|
|
@@ -104,7 +105,7 @@ def walk_to_depth(path: str,
|
|
|
104
105
|
yield child
|
|
105
106
|
|
|
106
107
|
|
|
107
|
-
def filter_files(files_path:
|
|
108
|
+
def filter_files(files_path: Iterable[Path],
|
|
108
109
|
include_extensions,
|
|
109
110
|
exclude_extensions) -> list[Path]:
|
|
110
111
|
def fix_extension(ext: str) -> str:
|
|
@@ -112,7 +113,7 @@ def filter_files(files_path: Sequence[Path],
|
|
|
112
113
|
return ext
|
|
113
114
|
return '.' + ext
|
|
114
115
|
|
|
115
|
-
def normalize_extensions(exts_list:
|
|
116
|
+
def normalize_extensions(exts_list: Iterable[str]) -> list[str]:
|
|
116
117
|
# explodes the extensions if they are separated by commas
|
|
117
118
|
exts_list = [ext.split(',') for ext in exts_list]
|
|
118
119
|
exts_list = [item for sublist in exts_list for item in sublist]
|
|
@@ -140,7 +141,7 @@ def filter_files(files_path: Sequence[Path],
|
|
|
140
141
|
return files_path
|
|
141
142
|
|
|
142
143
|
|
|
143
|
-
def handle_api_key() -> str:
|
|
144
|
+
def handle_api_key() -> str | None:
|
|
144
145
|
"""
|
|
145
146
|
Checks for API keys.
|
|
146
147
|
If it does not exist, it asks the user to input it.
|
|
@@ -330,7 +331,7 @@ def _collect_metadata_files(files_path: list[str], auto_detect_json: bool) -> tu
|
|
|
330
331
|
return metadata_files, filtered_files_path
|
|
331
332
|
|
|
332
333
|
|
|
333
|
-
def _parse_args() -> tuple[Any, list, Optional[list[dict]], Optional[list[str]]]:
|
|
334
|
+
def _parse_args() -> tuple[Any, list[str], Optional[list[dict]], Optional[list[str]]]:
|
|
334
335
|
parser = argparse.ArgumentParser(
|
|
335
336
|
description='DatamintAPI command line tool for uploading DICOM files and other resources')
|
|
336
337
|
|
|
@@ -391,11 +392,11 @@ def _parse_args() -> tuple[Any, list, Optional[list[dict]], Optional[list[str]]]
|
|
|
391
392
|
|
|
392
393
|
# Handle path argument priority: positional takes precedence over --path flag
|
|
393
394
|
if args.path is not None and args.path_flag is not None:
|
|
394
|
-
_USER_LOGGER.warning("Both positional path and --path flag provided.
|
|
395
|
+
_USER_LOGGER.warning("Both positional path and --path flag provided.")
|
|
396
|
+
raise ValueError("Both positional path and --path flag provided.")
|
|
397
|
+
elif args.path is not None and isinstance(args.path, (str, Path)):
|
|
395
398
|
final_path = args.path
|
|
396
|
-
elif args.
|
|
397
|
-
final_path = args.path
|
|
398
|
-
elif args.path_flag is not None:
|
|
399
|
+
elif args.path_flag is not None and isinstance(args.path_flag, (str, Path)):
|
|
399
400
|
final_path = args.path_flag
|
|
400
401
|
else:
|
|
401
402
|
parser.error("Path argument is required. Provide it as a positional argument or use --path flag.")
|
|
@@ -424,6 +425,10 @@ def _parse_args() -> tuple[Any, list, Optional[list[dict]], Optional[list[str]]]
|
|
|
424
425
|
else:
|
|
425
426
|
try:
|
|
426
427
|
recursive_depth = 0 if args.recursive is None else args.recursive
|
|
428
|
+
if recursive_depth < 0:
|
|
429
|
+
recursive_depth = MAX_RECURSION_LIMIT
|
|
430
|
+
else:
|
|
431
|
+
recursive_depth = min(MAX_RECURSION_LIMIT, recursive_depth)
|
|
427
432
|
file_path = walk_to_depth(args.path, recursive_depth, args.exclude)
|
|
428
433
|
file_path = filter_files(file_path, args.include_extensions, args.exclude_extensions)
|
|
429
434
|
file_path = list(map(str, file_path)) # from Path to str
|
|
@@ -533,7 +538,7 @@ def print_input_summary(files_path: list[str],
|
|
|
533
538
|
|
|
534
539
|
|
|
535
540
|
def print_results_summary(files_path: list[str],
|
|
536
|
-
results: list[str | Exception]):
|
|
541
|
+
results: list[str | Exception]) -> int:
|
|
537
542
|
# Check for failed uploads
|
|
538
543
|
failure_files = [f for f, r in zip(files_path, results) if isinstance(r, Exception)]
|
|
539
544
|
_USER_LOGGER.info(f"\nUpload summary:")
|
|
@@ -541,12 +546,13 @@ def print_results_summary(files_path: list[str],
|
|
|
541
546
|
_USER_LOGGER.info(f"\tSuccessful uploads: {len(files_path) - len(failure_files)}")
|
|
542
547
|
_USER_LOGGER.info(f"\tFailed uploads: {len(failure_files)}")
|
|
543
548
|
if len(failure_files) > 0:
|
|
544
|
-
_USER_LOGGER.warning(f"\tFailed files: {failure_files}")
|
|
549
|
+
_USER_LOGGER.warning(f"\tFailed files: {[os.path.basename(f) for f in failure_files]}")
|
|
545
550
|
_USER_LOGGER.warning(f"\nFailures:")
|
|
546
551
|
for f, r in zip(files_path, results):
|
|
547
552
|
_LOGGER.debug(f"Failure: {f} - {r}")
|
|
548
553
|
if isinstance(r, Exception):
|
|
549
554
|
_USER_LOGGER.warning(f"\t{os.path.basename(f)}: {r}")
|
|
555
|
+
return len(failure_files)
|
|
550
556
|
|
|
551
557
|
|
|
552
558
|
def main():
|
|
@@ -556,7 +562,7 @@ def main():
|
|
|
556
562
|
args, files_path, segfiles, metadata_files = _parse_args()
|
|
557
563
|
except Exception as e:
|
|
558
564
|
_USER_LOGGER.error(f'Error validating arguments. {e}')
|
|
559
|
-
|
|
565
|
+
sys.exit(1)
|
|
560
566
|
|
|
561
567
|
print_input_summary(files_path,
|
|
562
568
|
args=args,
|
|
@@ -585,12 +591,14 @@ def main():
|
|
|
585
591
|
segmentation_files=segfiles,
|
|
586
592
|
transpose_segmentation=args.transpose_segmentation,
|
|
587
593
|
assemble_dicoms=True,
|
|
588
|
-
|
|
594
|
+
metadata=metadata_files
|
|
589
595
|
)
|
|
590
596
|
_USER_LOGGER.info('Upload finished!')
|
|
591
597
|
_LOGGER.debug(f"Number of results: {len(results)}")
|
|
592
598
|
|
|
593
|
-
print_results_summary(files_path, results)
|
|
599
|
+
num_failures = print_results_summary(files_path, results)
|
|
600
|
+
if num_failures > 0:
|
|
601
|
+
sys.exit(1)
|
|
594
602
|
|
|
595
603
|
|
|
596
604
|
if __name__ == '__main__':
|
datamint/utils/dicom_utils.py
CHANGED
|
@@ -243,20 +243,20 @@ def assemble_dicoms(files_path: list[str | IO],
|
|
|
243
243
|
|
|
244
244
|
for file_path in tqdm(files_path, desc="Reading DICOMs metadata", unit="file"):
|
|
245
245
|
dicom = pydicom.dcmread(file_path,
|
|
246
|
-
specific_tags=['
|
|
247
|
-
|
|
248
|
-
if
|
|
246
|
+
specific_tags=['SeriesInstanceUID', 'InstanceNumber', 'Rows', 'Columns'])
|
|
247
|
+
series_uid = dicom.get('SeriesInstanceUID', None)
|
|
248
|
+
if series_uid is None:
|
|
249
249
|
# generate a random uid
|
|
250
|
-
|
|
250
|
+
series_uid = pydicom.uid.generate_uid()
|
|
251
251
|
instance_number = dicom.get('InstanceNumber', 0)
|
|
252
252
|
rows = dicom.get('Rows', None)
|
|
253
253
|
columns = dicom.get('Columns', None)
|
|
254
|
-
dicoms_map[
|
|
254
|
+
dicoms_map[series_uid].append((instance_number, file_path, rows, columns))
|
|
255
255
|
if hasattr(file_path, "seek"):
|
|
256
256
|
file_path.seek(0)
|
|
257
257
|
|
|
258
|
-
# Validate that all DICOMs with the same
|
|
259
|
-
for
|
|
258
|
+
# Validate that all DICOMs with the same SeriesInstanceUID have matching dimensions
|
|
259
|
+
for series_uid, dicom_list in dicoms_map.items():
|
|
260
260
|
if len(dicom_list) <= 1:
|
|
261
261
|
continue
|
|
262
262
|
|
|
@@ -268,7 +268,7 @@ def assemble_dicoms(files_path: list[str | IO],
|
|
|
268
268
|
for instance_number, file_path, rows, columns in dicom_list:
|
|
269
269
|
if rows != first_rows or columns != first_columns:
|
|
270
270
|
msg = (
|
|
271
|
-
f"Dimension mismatch in
|
|
271
|
+
f"Dimension mismatch in SeriesInstanceUID {series_uid}: "
|
|
272
272
|
f"Expected {first_rows}x{first_columns}, got {rows}x{columns} "
|
|
273
273
|
f"for file {file_path} and {dicom_list[0][1]}"
|
|
274
274
|
)
|
|
@@ -360,11 +360,11 @@ def _generate_dicom_name(ds: pydicom.Dataset) -> str:
|
|
|
360
360
|
# components.append(os.path.basename(ds.filename))
|
|
361
361
|
if hasattr(ds, 'SeriesDescription'):
|
|
362
362
|
components.append(ds.SeriesDescription)
|
|
363
|
-
if hasattr(ds, 'SeriesNumber'):
|
|
363
|
+
if len(components) == 0 and hasattr(ds, 'SeriesNumber'):
|
|
364
364
|
components.append(f"ser{ds.SeriesNumber}")
|
|
365
365
|
if hasattr(ds, 'StudyDescription'):
|
|
366
366
|
components.append(ds.StudyDescription)
|
|
367
|
-
|
|
367
|
+
elif hasattr(ds, 'StudyID'):
|
|
368
368
|
components.append(ds.StudyID)
|
|
369
369
|
|
|
370
370
|
# Join components and add extension
|
|
@@ -375,8 +375,8 @@ def _generate_dicom_name(ds: pydicom.Dataset) -> str:
|
|
|
375
375
|
if len(description) > 0:
|
|
376
376
|
return description
|
|
377
377
|
|
|
378
|
-
if hasattr(ds, '
|
|
379
|
-
return ds.
|
|
378
|
+
if hasattr(ds, 'SeriesInstanceUID'):
|
|
379
|
+
return ds.SeriesInstanceUID + ".dcm"
|
|
380
380
|
|
|
381
381
|
# Fallback to generic name if no attributes found
|
|
382
382
|
return ds.filename if hasattr(ds, 'filename') else f"merged_dicom_{uuid.uuid4()}.dcm"
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.3
|
|
2
2
|
Name: datamint
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.5.0
|
|
4
4
|
Summary: A library for interacting with the Datamint API, designed for efficient data management, processing and Deep Learning workflows.
|
|
5
5
|
Requires-Python: >=3.10
|
|
6
6
|
Classifier: Programming Language :: Python :: 3
|
|
@@ -1,13 +1,13 @@
|
|
|
1
1
|
datamint/__init__.py,sha256=7rKCCsaa4RBRTIfuHB708rai1xwDHLtkFNFJGKYG5D4,757
|
|
2
|
-
datamint/apihandler/annotation_api_handler.py,sha256=
|
|
2
|
+
datamint/apihandler/annotation_api_handler.py,sha256=NMS4SPcp1QAyee8TqLp2sw9htjGcdrEiW6_Qv4h0JlE,49905
|
|
3
3
|
datamint/apihandler/api_handler.py,sha256=cdVSddrFCKlF_BJ81LO1aJ0OP49rssjpNEFzJ6Q7YyY,384
|
|
4
4
|
datamint/apihandler/base_api_handler.py,sha256=XSxZEQEkbQpuixGDu_P9jbxUQht3Z3JgxaeiFKPkVDM,11690
|
|
5
5
|
datamint/apihandler/dto/annotation_dto.py,sha256=otCIesoqGBlbSOw4ErqFsXp2HwJsPNUQlkynQh_7pHg,7110
|
|
6
6
|
datamint/apihandler/exp_api_handler.py,sha256=hFUgUgBc5rL7odK7gTW3MnrvMY1pVfJUpUdzRNobMQE,6226
|
|
7
|
-
datamint/apihandler/root_api_handler.py,sha256
|
|
7
|
+
datamint/apihandler/root_api_handler.py,sha256=OIGq6aHX64B94MmAikcFzF0rdekRH4l1S59x2Pa_DJA,51739
|
|
8
8
|
datamint/client_cmd_tools/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
9
|
-
datamint/client_cmd_tools/datamint_config.py,sha256=
|
|
10
|
-
datamint/client_cmd_tools/datamint_upload.py,sha256=
|
|
9
|
+
datamint/client_cmd_tools/datamint_config.py,sha256=md7dnWrbl10lPtXKbmD9yo6onLJsajeG8Vz0ZWH1v4M,8181
|
|
10
|
+
datamint/client_cmd_tools/datamint_upload.py,sha256=yGwEsg5mrAH92UIMl1d5Vv5t0HWUAam4PW-SLJHbYE0,26380
|
|
11
11
|
datamint/configs.py,sha256=Bdp6NydYwyCJ2dk19_gf_o3M2ZyQOmMHpLi8wEWNHUk,1426
|
|
12
12
|
datamint/dataset/__init__.py,sha256=4PlUKSvVhdfQvvuq8jQXrkdqnot-iTTizM3aM1vgSwg,47
|
|
13
13
|
datamint/dataset/base_dataset.py,sha256=EnnIeF3ZaBL2M8qEV39U0ogKptyvezBNoVOvrS12bZ8,38756
|
|
@@ -18,12 +18,12 @@ datamint/experiment/__init__.py,sha256=5qQOMzoG17DEd1YnTF-vS0qiM-DGdbNh42EUo91CR
|
|
|
18
18
|
datamint/experiment/_patcher.py,sha256=ZgbezoevAYhJsbiJTvWPALGTcUiMT371xddcTllt3H4,23296
|
|
19
19
|
datamint/experiment/experiment.py,sha256=aHK9dRFdQTi569xgUg1KqlCZLHZpDmSH3g3ndPIZvXw,44546
|
|
20
20
|
datamint/logging.yaml,sha256=a5dsATpul7QHeUHB2TjABFjWaPXBMbO--dgn8GlRqwk,483
|
|
21
|
-
datamint/utils/dicom_utils.py,sha256=
|
|
21
|
+
datamint/utils/dicom_utils.py,sha256=n1CrYg1AgnlbgIktDfVXQ1Logh8lwCqYbjqHu5GElUE,26062
|
|
22
22
|
datamint/utils/io_utils.py,sha256=ebP1atKkhKEf1mUU1LsVwDq0h_so7kVKkD_7hQYn_kM,6754
|
|
23
23
|
datamint/utils/logging_utils.py,sha256=DvoA35ATYG3JTwfXEXYawDyKRfHeCrH0a9czfkmz8kM,1851
|
|
24
24
|
datamint/utils/torchmetrics.py,sha256=lwU0nOtsSWfebyp7dvjlAggaqXtj5ohSEUXOg3L0hJE,2837
|
|
25
25
|
datamint/utils/visualization.py,sha256=yaUVAOHar59VrGUjpAWv5eVvQSfztFG0eP9p5Vt3l-M,4470
|
|
26
|
-
datamint-1.
|
|
27
|
-
datamint-1.
|
|
28
|
-
datamint-1.
|
|
29
|
-
datamint-1.
|
|
26
|
+
datamint-1.5.0.dist-info/METADATA,sha256=_JkJij-WrsGHzHO3k39TAFKcX-0WQbvVwi-6POUgetE,4065
|
|
27
|
+
datamint-1.5.0.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
|
|
28
|
+
datamint-1.5.0.dist-info/entry_points.txt,sha256=mn5H6jPjO-rY0W0CAZ6Z_KKWhMLvyVaSpoqk77jlTI4,145
|
|
29
|
+
datamint-1.5.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|