brkraw 0.3.11__py3-none-any.whl → 0.5.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- brkraw/__init__.py +9 -3
- brkraw/apps/__init__.py +12 -0
- brkraw/apps/addon/__init__.py +30 -0
- brkraw/apps/addon/core.py +35 -0
- brkraw/apps/addon/dependencies.py +402 -0
- brkraw/apps/addon/installation.py +500 -0
- brkraw/apps/addon/io.py +21 -0
- brkraw/apps/hook/__init__.py +25 -0
- brkraw/apps/hook/core.py +636 -0
- brkraw/apps/loader/__init__.py +10 -0
- brkraw/apps/loader/core.py +622 -0
- brkraw/apps/loader/formatter.py +288 -0
- brkraw/apps/loader/helper.py +797 -0
- brkraw/apps/loader/info/__init__.py +11 -0
- brkraw/apps/loader/info/scan.py +85 -0
- brkraw/apps/loader/info/scan.yaml +90 -0
- brkraw/apps/loader/info/study.py +69 -0
- brkraw/apps/loader/info/study.yaml +156 -0
- brkraw/apps/loader/info/transform.py +92 -0
- brkraw/apps/loader/types.py +220 -0
- brkraw/cli/__init__.py +5 -0
- brkraw/cli/commands/__init__.py +2 -0
- brkraw/cli/commands/addon.py +327 -0
- brkraw/cli/commands/config.py +205 -0
- brkraw/cli/commands/convert.py +903 -0
- brkraw/cli/commands/hook.py +348 -0
- brkraw/cli/commands/info.py +74 -0
- brkraw/cli/commands/init.py +214 -0
- brkraw/cli/commands/params.py +106 -0
- brkraw/cli/commands/prune.py +288 -0
- brkraw/cli/commands/session.py +371 -0
- brkraw/cli/hook_args.py +80 -0
- brkraw/cli/main.py +83 -0
- brkraw/cli/utils.py +60 -0
- brkraw/core/__init__.py +13 -0
- brkraw/core/config.py +380 -0
- brkraw/core/entrypoints.py +25 -0
- brkraw/core/formatter.py +367 -0
- brkraw/core/fs.py +495 -0
- brkraw/core/jcamp.py +600 -0
- brkraw/core/layout.py +451 -0
- brkraw/core/parameters.py +781 -0
- brkraw/core/zip.py +1121 -0
- brkraw/dataclasses/__init__.py +14 -0
- brkraw/dataclasses/node.py +139 -0
- brkraw/dataclasses/reco.py +33 -0
- brkraw/dataclasses/scan.py +61 -0
- brkraw/dataclasses/study.py +131 -0
- brkraw/default/__init__.py +3 -0
- brkraw/default/pruner_specs/deid4share.yaml +42 -0
- brkraw/default/rules/00_default.yaml +4 -0
- brkraw/default/specs/metadata_dicom.yaml +236 -0
- brkraw/default/specs/metadata_transforms.py +92 -0
- brkraw/resolver/__init__.py +7 -0
- brkraw/resolver/affine.py +539 -0
- brkraw/resolver/datatype.py +69 -0
- brkraw/resolver/fid.py +90 -0
- brkraw/resolver/helpers.py +36 -0
- brkraw/resolver/image.py +188 -0
- brkraw/resolver/nifti.py +370 -0
- brkraw/resolver/shape.py +235 -0
- brkraw/schema/__init__.py +3 -0
- brkraw/schema/context_map.yaml +62 -0
- brkraw/schema/meta.yaml +57 -0
- brkraw/schema/niftiheader.yaml +95 -0
- brkraw/schema/pruner.yaml +55 -0
- brkraw/schema/remapper.yaml +128 -0
- brkraw/schema/rules.yaml +154 -0
- brkraw/specs/__init__.py +10 -0
- brkraw/specs/hook/__init__.py +12 -0
- brkraw/specs/hook/logic.py +31 -0
- brkraw/specs/hook/validator.py +22 -0
- brkraw/specs/meta/__init__.py +5 -0
- brkraw/specs/meta/validator.py +156 -0
- brkraw/specs/pruner/__init__.py +15 -0
- brkraw/specs/pruner/logic.py +361 -0
- brkraw/specs/pruner/validator.py +119 -0
- brkraw/specs/remapper/__init__.py +27 -0
- brkraw/specs/remapper/logic.py +924 -0
- brkraw/specs/remapper/validator.py +314 -0
- brkraw/specs/rules/__init__.py +6 -0
- brkraw/specs/rules/logic.py +263 -0
- brkraw/specs/rules/validator.py +103 -0
- brkraw-0.5.0.dist-info/METADATA +81 -0
- brkraw-0.5.0.dist-info/RECORD +88 -0
- {brkraw-0.3.11.dist-info → brkraw-0.5.0.dist-info}/WHEEL +1 -2
- brkraw-0.5.0.dist-info/entry_points.txt +13 -0
- brkraw/lib/__init__.py +0 -4
- brkraw/lib/backup.py +0 -641
- brkraw/lib/bids.py +0 -0
- brkraw/lib/errors.py +0 -125
- brkraw/lib/loader.py +0 -1220
- brkraw/lib/orient.py +0 -194
- brkraw/lib/parser.py +0 -48
- brkraw/lib/pvobj.py +0 -301
- brkraw/lib/reference.py +0 -245
- brkraw/lib/utils.py +0 -471
- brkraw/scripts/__init__.py +0 -0
- brkraw/scripts/brk_backup.py +0 -106
- brkraw/scripts/brkraw.py +0 -744
- brkraw/ui/__init__.py +0 -0
- brkraw/ui/config.py +0 -17
- brkraw/ui/main_win.py +0 -214
- brkraw/ui/previewer.py +0 -225
- brkraw/ui/scan_info.py +0 -72
- brkraw/ui/scan_list.py +0 -73
- brkraw/ui/subj_info.py +0 -128
- brkraw-0.3.11.dist-info/METADATA +0 -25
- brkraw-0.3.11.dist-info/RECORD +0 -28
- brkraw-0.3.11.dist-info/entry_points.txt +0 -3
- brkraw-0.3.11.dist-info/top_level.txt +0 -2
- tests/__init__.py +0 -0
- {brkraw-0.3.11.dist-info → brkraw-0.5.0.dist-info/licenses}/LICENSE +0 -0
|
@@ -0,0 +1,903 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
"""Convert a scan/reco to NIfTI with optional metadata sidecar.
|
|
4
|
+
|
|
5
|
+
Last updated: 2026-01-06
|
|
6
|
+
"""
|
|
7
|
+
|
|
8
|
+
import argparse
|
|
9
|
+
import inspect
|
|
10
|
+
import json
|
|
11
|
+
import logging
|
|
12
|
+
import os
|
|
13
|
+
import re
|
|
14
|
+
import sys
|
|
15
|
+
from pathlib import Path
|
|
16
|
+
from typing import Any, Mapping, Optional, Dict, List, Tuple, Literal, cast, get_args
|
|
17
|
+
|
|
18
|
+
import numpy as np
|
|
19
|
+
from brkraw.cli.utils import load
|
|
20
|
+
from brkraw.cli.hook_args import load_hook_args_yaml, merge_hook_args
|
|
21
|
+
from brkraw.core import config as config_core
|
|
22
|
+
from brkraw.core import layout as layout_core
|
|
23
|
+
from brkraw.resolver import nifti as nifti_resolver
|
|
24
|
+
from brkraw.specs import remapper as remapper_core
|
|
25
|
+
from brkraw.resolver.nifti import XYZUNIT, TUNIT, Nifti1HeaderContents
|
|
26
|
+
from brkraw.resolver.affine import SubjectPose, SubjectType
|
|
27
|
+
from brkraw.apps.loader.types import AffineSpace
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
logger = logging.getLogger("brkraw")
|
|
31
|
+
|
|
32
|
+
_INVALID_CHARS = re.compile(r"[^A-Za-z0-9._-]+")
|
|
33
|
+
|
|
34
|
+
_COUNTER_TAG = re.compile(r"\{(?:Counter|counter)\}")
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def cmd_convert(args: argparse.Namespace) -> int:
|
|
38
|
+
"""Convert a scan/reco to NIfTI with optional metadata sidecars.
|
|
39
|
+
|
|
40
|
+
Args:
|
|
41
|
+
args: Parsed CLI arguments for the convert subcommand.
|
|
42
|
+
|
|
43
|
+
Returns:
|
|
44
|
+
Exit status code (0 on success, non-zero on failure).
|
|
45
|
+
"""
|
|
46
|
+
# resolve core paths
|
|
47
|
+
if args.path is None:
|
|
48
|
+
args.path = os.environ.get("BRKRAW_PATH")
|
|
49
|
+
if args.path is None:
|
|
50
|
+
args.parser.print_help()
|
|
51
|
+
return 2
|
|
52
|
+
if not Path(args.path).exists():
|
|
53
|
+
logger.error("Path not found: %s", args.path)
|
|
54
|
+
return 2
|
|
55
|
+
|
|
56
|
+
if args.output is None:
|
|
57
|
+
args.output = os.environ.get("BRKRAW_CONVERT_OUTPUT")
|
|
58
|
+
if args.prefix is None:
|
|
59
|
+
args.prefix = os.environ.get("BRKRAW_CONVERT_PREFIX")
|
|
60
|
+
|
|
61
|
+
# resolve scan/reco ids
|
|
62
|
+
id_sources = (
|
|
63
|
+
("scan_id", "BRKRAW_SCAN_ID", True),
|
|
64
|
+
("scan_id", "BRKRAW_CONVERT_SCAN_ID", False),
|
|
65
|
+
("reco_id", "BRKRAW_RECO_ID", False),
|
|
66
|
+
("reco_id", "BRKRAW_CONVERT_RECO_ID", False),
|
|
67
|
+
)
|
|
68
|
+
for attr, env_key, split_comma in id_sources:
|
|
69
|
+
if getattr(args, attr) is not None:
|
|
70
|
+
continue
|
|
71
|
+
value = os.environ.get(env_key)
|
|
72
|
+
if not value:
|
|
73
|
+
continue
|
|
74
|
+
text = value.split(",")[0] if split_comma else value
|
|
75
|
+
try:
|
|
76
|
+
setattr(args, attr, int(text))
|
|
77
|
+
except ValueError:
|
|
78
|
+
logger.error("Invalid %s: %s", env_key, value)
|
|
79
|
+
return 2
|
|
80
|
+
|
|
81
|
+
# resolve flags + spaces
|
|
82
|
+
if not args.sidecar:
|
|
83
|
+
args.sidecar = _env_flag("BRKRAW_CONVERT_SIDECAR")
|
|
84
|
+
if args.no_convert and not args.sidecar:
|
|
85
|
+
logger.error("--no-convert requires --sidecar.")
|
|
86
|
+
return 2
|
|
87
|
+
if not args.flip_x:
|
|
88
|
+
args.flip_x = _env_flag("BRKRAW_CONVERT_FLIP_X")
|
|
89
|
+
if not args.flatten_fg:
|
|
90
|
+
args.flatten_fg = _env_flag("BRKRAW_CONVERT_FLATTEN_FG")
|
|
91
|
+
if args.space is None:
|
|
92
|
+
args.space = os.environ.get("BRKRAW_CONVERT_SPACE")
|
|
93
|
+
if args.override_subject_type is None:
|
|
94
|
+
args.override_subject_type = _coerce_choice(
|
|
95
|
+
"BRKRAW_CONVERT_OVERRIDE_SUBJECT_TYPE",
|
|
96
|
+
os.environ.get("BRKRAW_CONVERT_OVERRIDE_SUBJECT_TYPE"),
|
|
97
|
+
get_args(SubjectType),
|
|
98
|
+
)
|
|
99
|
+
if args.override_subject_pose is None:
|
|
100
|
+
args.override_subject_pose = _coerce_choice(
|
|
101
|
+
"BRKRAW_CONVERT_OVERRIDE_SUBJECT_POSE",
|
|
102
|
+
os.environ.get("BRKRAW_CONVERT_OVERRIDE_SUBJECT_POSE"),
|
|
103
|
+
get_args(SubjectPose),
|
|
104
|
+
)
|
|
105
|
+
if args.xyz_units == "mm":
|
|
106
|
+
args.xyz_units = _coerce_choice(
|
|
107
|
+
"BRKRAW_CONVERT_XYZ_UNITS",
|
|
108
|
+
os.environ.get("BRKRAW_CONVERT_XYZ_UNITS"),
|
|
109
|
+
get_args(XYZUNIT),
|
|
110
|
+
default=args.xyz_units,
|
|
111
|
+
)
|
|
112
|
+
if args.t_units == "sec":
|
|
113
|
+
args.t_units = _coerce_choice(
|
|
114
|
+
"BRKRAW_CONVERT_T_UNITS",
|
|
115
|
+
os.environ.get("BRKRAW_CONVERT_T_UNITS"),
|
|
116
|
+
get_args(TUNIT),
|
|
117
|
+
default=args.t_units,
|
|
118
|
+
)
|
|
119
|
+
if args.space is None:
|
|
120
|
+
args.space = "subject_ras"
|
|
121
|
+
for attr, env_key in (
|
|
122
|
+
("format", "BRKRAW_CONVERT_FORMAT"),
|
|
123
|
+
("header", "BRKRAW_CONVERT_HEADER"),
|
|
124
|
+
("context_map", "BRKRAW_CONVERT_CONTEXT_MAP"),
|
|
125
|
+
):
|
|
126
|
+
if getattr(args, attr) is None:
|
|
127
|
+
setattr(args, attr, os.environ.get(env_key))
|
|
128
|
+
if args.compress is None:
|
|
129
|
+
if "BRKRAW_CONVERT_COMPRESS" in os.environ:
|
|
130
|
+
args.compress = _env_flag("BRKRAW_CONVERT_COMPRESS")
|
|
131
|
+
else:
|
|
132
|
+
args.compress = True
|
|
133
|
+
|
|
134
|
+
output_is_file = False
|
|
135
|
+
if args.output:
|
|
136
|
+
out_path = Path(args.output)
|
|
137
|
+
output_is_file = out_path.suffix in {".nii", ".gz"} or out_path.name.endswith(".nii.gz")
|
|
138
|
+
if output_is_file and args.prefix:
|
|
139
|
+
logger.error("Cannot use --prefix when --output is a file path.")
|
|
140
|
+
return 2
|
|
141
|
+
|
|
142
|
+
args.format = _coerce_choice(
|
|
143
|
+
"BRKRAW_CONVERT_FORMAT",
|
|
144
|
+
args.format or "nifti",
|
|
145
|
+
("nifti", "nifti1"),
|
|
146
|
+
default="nifti",
|
|
147
|
+
)
|
|
148
|
+
|
|
149
|
+
try:
|
|
150
|
+
render_layout_supports_counter = "counter" in inspect.signature(layout_core.render_layout).parameters
|
|
151
|
+
except (TypeError, ValueError):
|
|
152
|
+
render_layout_supports_counter = True
|
|
153
|
+
try:
|
|
154
|
+
slicepack_supports_counter = (
|
|
155
|
+
"counter" in inspect.signature(layout_core.render_slicepack_suffixes).parameters
|
|
156
|
+
)
|
|
157
|
+
except (TypeError, ValueError):
|
|
158
|
+
slicepack_supports_counter = True
|
|
159
|
+
|
|
160
|
+
hook_args_by_name: Dict[str, Dict[str, Any]] = {}
|
|
161
|
+
hook_args_yaml_sources: List[str] = []
|
|
162
|
+
for env_key in ("BRKRAW_CONVERT_HOOK_ARGS_YAML", "BRKRAW_HOOK_ARGS_YAML"):
|
|
163
|
+
value = os.environ.get(env_key)
|
|
164
|
+
if value:
|
|
165
|
+
hook_args_yaml_sources.extend([part.strip() for part in value.split(",") if part.strip()])
|
|
166
|
+
hook_args_yaml_sources.extend(args.hook_args_yaml or [])
|
|
167
|
+
if hook_args_yaml_sources:
|
|
168
|
+
try:
|
|
169
|
+
hook_args_by_name = load_hook_args_yaml(hook_args_yaml_sources)
|
|
170
|
+
except ValueError as exc:
|
|
171
|
+
logger.error("%s", exc)
|
|
172
|
+
return 2
|
|
173
|
+
|
|
174
|
+
try:
|
|
175
|
+
hook_args_cli = _parse_hook_args(args.hook_arg or [])
|
|
176
|
+
except ValueError as exc:
|
|
177
|
+
logger.error("%s", exc)
|
|
178
|
+
return 2
|
|
179
|
+
hook_args_by_name = merge_hook_args(hook_args_by_name, hook_args_cli)
|
|
180
|
+
|
|
181
|
+
loader = load(args.path, prefix="Loading")
|
|
182
|
+
try:
|
|
183
|
+
override_header = nifti_resolver.load_header_overrides(args.header)
|
|
184
|
+
except ValueError:
|
|
185
|
+
return 2
|
|
186
|
+
|
|
187
|
+
batch_all = args.scan_id is None
|
|
188
|
+
if batch_all and args.output and not output_is_file and not args.output.endswith(os.sep):
|
|
189
|
+
args.output = f"{args.output}{os.sep}"
|
|
190
|
+
if batch_all and output_is_file:
|
|
191
|
+
logger.error("When omitting --scan-id, --output must be a directory.")
|
|
192
|
+
return 2
|
|
193
|
+
|
|
194
|
+
scan_ids = list(loader.avail.keys()) if batch_all else [args.scan_id]
|
|
195
|
+
if not scan_ids:
|
|
196
|
+
logger.error("No scans available for conversion.")
|
|
197
|
+
return 2
|
|
198
|
+
|
|
199
|
+
root = None
|
|
200
|
+
layout_entries = config_core.layout_entries(root=root)
|
|
201
|
+
layout_template = config_core.layout_template(root=root)
|
|
202
|
+
layout_meta = {}
|
|
203
|
+
|
|
204
|
+
selector_map = None
|
|
205
|
+
if args.context_map:
|
|
206
|
+
# resolve selector
|
|
207
|
+
try:
|
|
208
|
+
selector_map = remapper_core.load_context_map(args.context_map)
|
|
209
|
+
except Exception as exc:
|
|
210
|
+
logger.error("%s", exc)
|
|
211
|
+
return 2
|
|
212
|
+
|
|
213
|
+
# resolve layout
|
|
214
|
+
layout_meta = layout_core.load_layout_meta(args.context_map)
|
|
215
|
+
if isinstance(layout_meta, dict):
|
|
216
|
+
meta_entries = layout_meta.get("layout_entries")
|
|
217
|
+
if meta_entries is None:
|
|
218
|
+
meta_entries = layout_meta.get("layout_fields")
|
|
219
|
+
if isinstance(meta_entries, list):
|
|
220
|
+
layout_entries = meta_entries
|
|
221
|
+
meta_template = layout_meta.get("layout_template")
|
|
222
|
+
if isinstance(meta_template, str) and meta_template.strip():
|
|
223
|
+
layout_template = meta_template
|
|
224
|
+
|
|
225
|
+
slicepack_suffix = config_core.output_slicepack_suffix(root=root)
|
|
226
|
+
if isinstance(layout_meta, dict):
|
|
227
|
+
meta_suffix = layout_meta.get("slicepack_suffix")
|
|
228
|
+
if isinstance(meta_suffix, str) and meta_suffix.strip():
|
|
229
|
+
slicepack_suffix = meta_suffix
|
|
230
|
+
|
|
231
|
+
total_written = 0
|
|
232
|
+
reserved_paths: set = set()
|
|
233
|
+
for scan_id in scan_ids:
|
|
234
|
+
if scan_id is None:
|
|
235
|
+
continue
|
|
236
|
+
scan = loader.get_scan(scan_id)
|
|
237
|
+
reco_ids = [args.reco_id] if args.reco_id is not None else list(scan.avail.keys())
|
|
238
|
+
if not reco_ids:
|
|
239
|
+
if getattr(scan, "_converter_hook", None):
|
|
240
|
+
reco_ids = [None]
|
|
241
|
+
else:
|
|
242
|
+
continue
|
|
243
|
+
for reco_id in reco_ids:
|
|
244
|
+
if selector_map is not None:
|
|
245
|
+
# convert selection by context_map
|
|
246
|
+
selector_info, selector_meta = layout_core.load_layout_info_parts(
|
|
247
|
+
loader,
|
|
248
|
+
scan_id,
|
|
249
|
+
context_map=args.context_map,
|
|
250
|
+
reco_id=reco_id,
|
|
251
|
+
)
|
|
252
|
+
if not selector_info and not selector_meta:
|
|
253
|
+
logger.debug("Skipping scan %s reco %s (no metadata).", scan_id, reco_id)
|
|
254
|
+
continue
|
|
255
|
+
if not remapper_core.matches_context_map_selectors(
|
|
256
|
+
(selector_info, selector_meta),
|
|
257
|
+
selector_map,
|
|
258
|
+
):
|
|
259
|
+
logger.debug("Skipping scan %s reco %s (selector mismatch).", scan_id, reco_id)
|
|
260
|
+
continue
|
|
261
|
+
if args.no_convert:
|
|
262
|
+
nii_list: List[Any] = []
|
|
263
|
+
output_count = 1
|
|
264
|
+
else:
|
|
265
|
+
nii = loader.convert(
|
|
266
|
+
scan_id,
|
|
267
|
+
reco_id=reco_id,
|
|
268
|
+
format=cast(Literal["nifti", "nifti1"], args.format),
|
|
269
|
+
space=cast(AffineSpace, args.space),
|
|
270
|
+
override_header=cast(Nifti1HeaderContents, override_header) if override_header else None,
|
|
271
|
+
override_subject_type=cast(Optional[SubjectType], args.override_subject_type),
|
|
272
|
+
override_subject_pose=cast(Optional[SubjectPose], args.override_subject_pose),
|
|
273
|
+
flip_x=args.flip_x,
|
|
274
|
+
flatten_fg=args.flatten_fg,
|
|
275
|
+
xyz_units=cast(XYZUNIT, args.xyz_units),
|
|
276
|
+
t_units=cast(TUNIT, args.t_units),
|
|
277
|
+
hook_args_by_name=hook_args_by_name,
|
|
278
|
+
)
|
|
279
|
+
if nii is None:
|
|
280
|
+
if not batch_all and args.reco_id is not None:
|
|
281
|
+
logger.error("No NIfTI output generated for scan %s reco %s.", scan_id, reco_id)
|
|
282
|
+
return 2
|
|
283
|
+
continue
|
|
284
|
+
nii_list = list(nii) if isinstance(nii, tuple) else [nii]
|
|
285
|
+
output_count = len(nii_list)
|
|
286
|
+
|
|
287
|
+
slicepack_suffixes: Optional[List[str]] = None
|
|
288
|
+
output_paths: Optional[List[Path]] = None
|
|
289
|
+
uses_counter_tag = _uses_counter_tag(
|
|
290
|
+
layout_template=layout_template,
|
|
291
|
+
layout_entries=layout_entries,
|
|
292
|
+
prefix_template=args.prefix,
|
|
293
|
+
)
|
|
294
|
+
counter_enabled = bool(uses_counter_tag and render_layout_supports_counter)
|
|
295
|
+
|
|
296
|
+
for counter in range(1, 1000):
|
|
297
|
+
layout_kwargs: Dict[str, Any] = {"counter": counter} if counter_enabled else {}
|
|
298
|
+
try:
|
|
299
|
+
candidate_base_name = layout_core.render_layout(
|
|
300
|
+
loader,
|
|
301
|
+
scan_id,
|
|
302
|
+
layout_entries=layout_entries,
|
|
303
|
+
layout_template=layout_template,
|
|
304
|
+
context_map=args.context_map,
|
|
305
|
+
reco_id=reco_id,
|
|
306
|
+
**layout_kwargs,
|
|
307
|
+
)
|
|
308
|
+
except Exception as exc:
|
|
309
|
+
logger.error("%s", exc)
|
|
310
|
+
return 2
|
|
311
|
+
if args.prefix:
|
|
312
|
+
candidate_base_name = layout_core.render_layout(
|
|
313
|
+
loader,
|
|
314
|
+
scan_id,
|
|
315
|
+
layout_entries=None,
|
|
316
|
+
layout_template=args.prefix,
|
|
317
|
+
context_map=args.context_map,
|
|
318
|
+
reco_id=reco_id,
|
|
319
|
+
**layout_kwargs,
|
|
320
|
+
)
|
|
321
|
+
if batch_all and args.prefix:
|
|
322
|
+
candidate_base_name = f"{candidate_base_name}_scan-{scan_id}"
|
|
323
|
+
if args.reco_id is None and len(reco_ids) > 1:
|
|
324
|
+
candidate_base_name = f"{candidate_base_name}_reco-{reco_id}"
|
|
325
|
+
candidate_base_name = _sanitize_filename(candidate_base_name)
|
|
326
|
+
|
|
327
|
+
if not counter_enabled and counter > 1:
|
|
328
|
+
candidate_base_name = f"{candidate_base_name}_{counter}"
|
|
329
|
+
|
|
330
|
+
slicepack_suffixes = None
|
|
331
|
+
if not args.no_convert and output_count > 1:
|
|
332
|
+
info = layout_core.load_layout_info(
|
|
333
|
+
loader,
|
|
334
|
+
scan_id,
|
|
335
|
+
context_map=args.context_map,
|
|
336
|
+
reco_id=reco_id,
|
|
337
|
+
)
|
|
338
|
+
slicepack_suffixes = layout_core.render_slicepack_suffixes(
|
|
339
|
+
info,
|
|
340
|
+
count=len(nii_list),
|
|
341
|
+
template=slicepack_suffix,
|
|
342
|
+
**({"counter": counter} if slicepack_supports_counter and counter_enabled else {}),
|
|
343
|
+
)
|
|
344
|
+
output_paths = _resolve_output_paths(
|
|
345
|
+
args.output,
|
|
346
|
+
candidate_base_name,
|
|
347
|
+
count=output_count,
|
|
348
|
+
compress=bool(args.compress),
|
|
349
|
+
slicepack_suffix=slicepack_suffix,
|
|
350
|
+
slicepack_suffixes=slicepack_suffixes,
|
|
351
|
+
)
|
|
352
|
+
if output_paths is None:
|
|
353
|
+
return 2
|
|
354
|
+
if len(output_paths) != output_count:
|
|
355
|
+
logger.error("Output path count does not match NIfTI outputs.")
|
|
356
|
+
return 2
|
|
357
|
+
if _paths_collide(output_paths, reserved_paths):
|
|
358
|
+
continue
|
|
359
|
+
break
|
|
360
|
+
else:
|
|
361
|
+
logger.error("Could not resolve unique output name after many attempts.")
|
|
362
|
+
return 2
|
|
363
|
+
|
|
364
|
+
if output_paths is None:
|
|
365
|
+
logger.error("Output paths could not be resolved.")
|
|
366
|
+
return 2
|
|
367
|
+
for path in output_paths:
|
|
368
|
+
reserved_paths.add(path)
|
|
369
|
+
|
|
370
|
+
sidecar_meta = None
|
|
371
|
+
if args.sidecar:
|
|
372
|
+
sidecar_meta = loader.get_metadata(
|
|
373
|
+
scan_id,
|
|
374
|
+
reco_id=reco_id,
|
|
375
|
+
context_map=args.context_map,
|
|
376
|
+
)
|
|
377
|
+
|
|
378
|
+
if args.no_convert:
|
|
379
|
+
for path in output_paths:
|
|
380
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
381
|
+
_write_sidecar(path, sidecar_meta)
|
|
382
|
+
total_written += 1
|
|
383
|
+
else:
|
|
384
|
+
for path, obj in zip(output_paths, nii_list):
|
|
385
|
+
path.parent.mkdir(parents=True, exist_ok=True)
|
|
386
|
+
obj.to_filename(str(path))
|
|
387
|
+
logger.info("Wrote NIfTI: %s", path)
|
|
388
|
+
total_written += 1
|
|
389
|
+
if args.sidecar:
|
|
390
|
+
_write_sidecar(path, sidecar_meta)
|
|
391
|
+
if total_written == 0:
|
|
392
|
+
if args.no_convert:
|
|
393
|
+
logger.error("No sidecar outputs generated.")
|
|
394
|
+
else:
|
|
395
|
+
logger.error("No NIfTI outputs generated.")
|
|
396
|
+
return 2
|
|
397
|
+
return 0
|
|
398
|
+
|
|
399
|
+
|
|
400
|
+
def cmd_convert_batch(args: argparse.Namespace) -> int:
|
|
401
|
+
"""Convert all datasets under a root folder.
|
|
402
|
+
|
|
403
|
+
Args:
|
|
404
|
+
args: Parsed CLI arguments for the convert-batch subcommand.
|
|
405
|
+
|
|
406
|
+
Returns:
|
|
407
|
+
Exit status code (0 on success, non-zero on failure).
|
|
408
|
+
"""
|
|
409
|
+
if args.path is None:
|
|
410
|
+
args.path = os.environ.get("BRKRAW_PATH")
|
|
411
|
+
if args.path is None:
|
|
412
|
+
args.parser.print_help()
|
|
413
|
+
return 2
|
|
414
|
+
root = Path(args.path).expanduser()
|
|
415
|
+
if not root.exists():
|
|
416
|
+
logger.error("Path not found: %s", root)
|
|
417
|
+
return 2
|
|
418
|
+
if args.output:
|
|
419
|
+
out_path = Path(args.output)
|
|
420
|
+
if out_path.suffix in {".nii", ".gz"} or out_path.name.endswith(".nii.gz"):
|
|
421
|
+
logger.error("When using convert batch, --output must be a directory.")
|
|
422
|
+
return 2
|
|
423
|
+
if not args.output.endswith(os.sep):
|
|
424
|
+
args.output = f"{args.output}{os.sep}"
|
|
425
|
+
args.scan_id = None
|
|
426
|
+
args.reco_id = None
|
|
427
|
+
candidates = _iter_dataset_paths(root)
|
|
428
|
+
if not candidates:
|
|
429
|
+
logger.error("No datasets found under %s", root)
|
|
430
|
+
return 2
|
|
431
|
+
failures = 0
|
|
432
|
+
successes = 0
|
|
433
|
+
for dataset_path in candidates:
|
|
434
|
+
logger.info("Converting dataset: %s", dataset_path)
|
|
435
|
+
dataset_args = argparse.Namespace(**vars(args))
|
|
436
|
+
dataset_args.path = str(dataset_path)
|
|
437
|
+
try:
|
|
438
|
+
rc = cmd_convert(dataset_args)
|
|
439
|
+
except Exception as exc:
|
|
440
|
+
logger.error("Failed to convert %s: %s", dataset_path, exc)
|
|
441
|
+
failures += 1
|
|
442
|
+
continue
|
|
443
|
+
if rc != 0:
|
|
444
|
+
failures += 1
|
|
445
|
+
else:
|
|
446
|
+
successes += 1
|
|
447
|
+
if successes == 0:
|
|
448
|
+
logger.error("No datasets were converted.")
|
|
449
|
+
return 2
|
|
450
|
+
if failures:
|
|
451
|
+
logger.info("Converted %d dataset(s); %d failed.", successes, failures)
|
|
452
|
+
return 0
|
|
453
|
+
|
|
454
|
+
|
|
455
|
+
def _sanitize_filename(name: str) -> str:
|
|
456
|
+
"""Return a filesystem-safe name by replacing invalid characters.
|
|
457
|
+
|
|
458
|
+
Args:
|
|
459
|
+
name: Input filename or prefix.
|
|
460
|
+
|
|
461
|
+
Returns:
|
|
462
|
+
Sanitized filename string.
|
|
463
|
+
"""
|
|
464
|
+
parts = []
|
|
465
|
+
for raw in re.split(r"[\\/]+", name.strip()):
|
|
466
|
+
if not raw:
|
|
467
|
+
continue
|
|
468
|
+
cleaned = _INVALID_CHARS.sub("_", raw)
|
|
469
|
+
cleaned = re.sub(r"_+", "_", cleaned).strip("._-")
|
|
470
|
+
if cleaned:
|
|
471
|
+
parts.append(cleaned)
|
|
472
|
+
return os.sep.join(parts) or "scan"
|
|
473
|
+
|
|
474
|
+
|
|
475
|
+
def _iter_dataset_paths(root: Path) -> List[Path]:
|
|
476
|
+
"""Enumerate dataset roots under a folder or file input.
|
|
477
|
+
|
|
478
|
+
Args:
|
|
479
|
+
root: Root folder or dataset path.
|
|
480
|
+
|
|
481
|
+
Returns:
|
|
482
|
+
List of dataset paths.
|
|
483
|
+
"""
|
|
484
|
+
if root.is_file():
|
|
485
|
+
return [root]
|
|
486
|
+
candidates: List[Path] = []
|
|
487
|
+
try:
|
|
488
|
+
for entry in root.iterdir():
|
|
489
|
+
if entry.is_dir():
|
|
490
|
+
candidates.append(entry)
|
|
491
|
+
continue
|
|
492
|
+
if entry.is_file() and _is_zip_file(entry):
|
|
493
|
+
candidates.append(entry)
|
|
494
|
+
except PermissionError:
|
|
495
|
+
logger.error("Permission denied while reading %s", root)
|
|
496
|
+
return candidates
|
|
497
|
+
|
|
498
|
+
|
|
499
|
+
def _is_zip_file(path: Path) -> bool:
|
|
500
|
+
"""Return True when a path looks like a zip archive.
|
|
501
|
+
|
|
502
|
+
Args:
|
|
503
|
+
path: Filesystem path to inspect.
|
|
504
|
+
|
|
505
|
+
Returns:
|
|
506
|
+
True if the file has a zip signature.
|
|
507
|
+
"""
|
|
508
|
+
try:
|
|
509
|
+
with path.open("rb") as handle:
|
|
510
|
+
sig = handle.read(4)
|
|
511
|
+
except OSError:
|
|
512
|
+
return False
|
|
513
|
+
return sig in {b"PK\x03\x04", b"PK\x05\x06", b"PK\x07\x08"}
|
|
514
|
+
|
|
515
|
+
|
|
516
|
+
def _resolve_output_paths(
|
|
517
|
+
output: Optional[str],
|
|
518
|
+
base_name: str,
|
|
519
|
+
*,
|
|
520
|
+
count: int,
|
|
521
|
+
compress: bool,
|
|
522
|
+
slicepack_suffix: str,
|
|
523
|
+
slicepack_suffixes: Optional[List[str]],
|
|
524
|
+
) -> Optional[List[Path]]:
|
|
525
|
+
"""Resolve output file paths based on CLI inputs.
|
|
526
|
+
|
|
527
|
+
Args:
|
|
528
|
+
output: Output path from CLI.
|
|
529
|
+
base_name: Base filename without extension.
|
|
530
|
+
count: Number of slice packs to write.
|
|
531
|
+
compress: Whether to use .nii.gz.
|
|
532
|
+
slicepack_suffix: Default suffix template for slice packs.
|
|
533
|
+
slicepack_suffixes: Optional explicit suffix list.
|
|
534
|
+
|
|
535
|
+
Returns:
|
|
536
|
+
List of output paths or None when invalid.
|
|
537
|
+
"""
|
|
538
|
+
if output is None:
|
|
539
|
+
base_dir = Path.cwd()
|
|
540
|
+
base = base_name
|
|
541
|
+
ext = ".nii.gz" if compress else ".nii"
|
|
542
|
+
return _expand_output_paths(
|
|
543
|
+
base_dir,
|
|
544
|
+
base,
|
|
545
|
+
ext,
|
|
546
|
+
count=count,
|
|
547
|
+
slicepack_suffix=slicepack_suffix,
|
|
548
|
+
slicepack_suffixes=slicepack_suffixes,
|
|
549
|
+
)
|
|
550
|
+
else:
|
|
551
|
+
out_path = Path(output).expanduser()
|
|
552
|
+
if output.endswith(os.sep) or (out_path.exists() and out_path.is_dir()):
|
|
553
|
+
base_dir = out_path
|
|
554
|
+
base = base_name
|
|
555
|
+
ext = ".nii.gz" if compress else ".nii"
|
|
556
|
+
return _expand_output_paths(
|
|
557
|
+
base_dir,
|
|
558
|
+
base,
|
|
559
|
+
ext,
|
|
560
|
+
count=count,
|
|
561
|
+
slicepack_suffix=slicepack_suffix,
|
|
562
|
+
slicepack_suffixes=slicepack_suffixes,
|
|
563
|
+
)
|
|
564
|
+
if out_path.suffix in {".nii", ".gz"} or out_path.name.endswith(".nii.gz"):
|
|
565
|
+
base_dir = out_path.parent
|
|
566
|
+
name = out_path.name
|
|
567
|
+
if name.endswith(".nii.gz"):
|
|
568
|
+
base, ext = name[:-7], ".nii.gz"
|
|
569
|
+
elif name.endswith(".nii"):
|
|
570
|
+
base, ext = name[:-4], ".nii"
|
|
571
|
+
else:
|
|
572
|
+
base, ext = name, ".nii.gz"
|
|
573
|
+
return _expand_output_paths(
|
|
574
|
+
base_dir,
|
|
575
|
+
base,
|
|
576
|
+
ext,
|
|
577
|
+
count=count,
|
|
578
|
+
slicepack_suffix=slicepack_suffix,
|
|
579
|
+
slicepack_suffixes=slicepack_suffixes,
|
|
580
|
+
)
|
|
581
|
+
base_dir = out_path
|
|
582
|
+
base = base_name
|
|
583
|
+
ext = ".nii.gz" if compress else ".nii"
|
|
584
|
+
return _expand_output_paths(
|
|
585
|
+
base_dir,
|
|
586
|
+
base,
|
|
587
|
+
ext,
|
|
588
|
+
count=count,
|
|
589
|
+
slicepack_suffix=slicepack_suffix,
|
|
590
|
+
slicepack_suffixes=slicepack_suffixes,
|
|
591
|
+
)
|
|
592
|
+
|
|
593
|
+
|
|
594
|
+
def _expand_output_paths(
|
|
595
|
+
base_dir: Path,
|
|
596
|
+
base: str,
|
|
597
|
+
ext: str,
|
|
598
|
+
*,
|
|
599
|
+
count: int,
|
|
600
|
+
slicepack_suffix: str,
|
|
601
|
+
slicepack_suffixes: Optional[List[str]],
|
|
602
|
+
) -> List[Path]:
|
|
603
|
+
"""Expand output filenames for slice packs.
|
|
604
|
+
|
|
605
|
+
Args:
|
|
606
|
+
base_dir: Output directory.
|
|
607
|
+
base: Base filename.
|
|
608
|
+
ext: File extension.
|
|
609
|
+
count: Number of slice packs to write.
|
|
610
|
+
slicepack_suffix: Default suffix template for slice packs.
|
|
611
|
+
slicepack_suffixes: Optional explicit suffix list.
|
|
612
|
+
|
|
613
|
+
Returns:
|
|
614
|
+
List of output paths.
|
|
615
|
+
"""
|
|
616
|
+
base_dir.mkdir(parents=True, exist_ok=True)
|
|
617
|
+
if count <= 1:
|
|
618
|
+
return [base_dir / f"{base}{ext}"]
|
|
619
|
+
if slicepack_suffixes:
|
|
620
|
+
return [
|
|
621
|
+
base_dir / f"{base}{slicepack_suffixes[i]}{ext}"
|
|
622
|
+
for i in range(min(count, len(slicepack_suffixes)))
|
|
623
|
+
]
|
|
624
|
+
suffix = slicepack_suffix or "_slpack{index}"
|
|
625
|
+
if "{index}" not in suffix:
|
|
626
|
+
suffix = f"{suffix}{{index}}"
|
|
627
|
+
return [base_dir / f"{base}{suffix.format(index=i + 1)}{ext}" for i in range(count)]
|
|
628
|
+
|
|
629
|
+
|
|
630
|
+
def _paths_collide(paths: List[Path], reserved: set) -> bool:
|
|
631
|
+
if len(set(paths)) != len(paths):
|
|
632
|
+
return True
|
|
633
|
+
for path in paths:
|
|
634
|
+
if path in reserved or path.exists():
|
|
635
|
+
return True
|
|
636
|
+
return False
|
|
637
|
+
|
|
638
|
+
|
|
639
|
+
def _env_flag(name: str) -> bool:
|
|
640
|
+
"""Return True when an env var is set to a truthy value.
|
|
641
|
+
|
|
642
|
+
Args:
|
|
643
|
+
name: Environment variable name.
|
|
644
|
+
|
|
645
|
+
Returns:
|
|
646
|
+
True if the env var is truthy.
|
|
647
|
+
"""
|
|
648
|
+
value = os.environ.get(name)
|
|
649
|
+
if value is None:
|
|
650
|
+
return False
|
|
651
|
+
return str(value).strip().lower() in {"1", "true", "yes", "y", "on"}
|
|
652
|
+
|
|
653
|
+
|
|
654
|
+
def _coerce_choice(name: str, value: Optional[str], choices: Tuple[str, ...], *, default=None):
|
|
655
|
+
"""Validate a value against allowed choices.
|
|
656
|
+
|
|
657
|
+
Args:
|
|
658
|
+
name: Label used for error reporting.
|
|
659
|
+
value: Input value to validate.
|
|
660
|
+
choices: Allowed string values.
|
|
661
|
+
default: Default when value is None.
|
|
662
|
+
|
|
663
|
+
Returns:
|
|
664
|
+
The validated value or default.
|
|
665
|
+
|
|
666
|
+
Raises:
|
|
667
|
+
ValueError: If value is not in choices.
|
|
668
|
+
"""
|
|
669
|
+
if value is None:
|
|
670
|
+
return default
|
|
671
|
+
value = value.strip()
|
|
672
|
+
if value in choices:
|
|
673
|
+
return value
|
|
674
|
+
logger.error("Invalid %s: %s", name, value)
|
|
675
|
+
raise ValueError(f"Invalid {name}: {value}")
|
|
676
|
+
|
|
677
|
+
|
|
678
|
+
def _parse_hook_args(values: List[str]) -> Dict[str, Dict[str, Any]]:
|
|
679
|
+
parsed: Dict[str, Dict[str, Any]] = {}
|
|
680
|
+
for raw in values:
|
|
681
|
+
if ":" not in raw or "=" not in raw:
|
|
682
|
+
raise ValueError("Hook args must be in HOOK:KEY=VALUE format.")
|
|
683
|
+
hook_name, rest = raw.split(":", 1)
|
|
684
|
+
key, value = rest.split("=", 1)
|
|
685
|
+
hook_name = hook_name.strip()
|
|
686
|
+
key = key.strip()
|
|
687
|
+
if not hook_name or not key:
|
|
688
|
+
raise ValueError("Hook args must include hook name and key.")
|
|
689
|
+
parsed.setdefault(hook_name, {})[key] = _coerce_scalar(value.strip())
|
|
690
|
+
return parsed
|
|
691
|
+
|
|
692
|
+
|
|
693
|
+
def _uses_counter_tag(
|
|
694
|
+
*,
|
|
695
|
+
layout_template: Optional[str],
|
|
696
|
+
layout_entries: List[Any],
|
|
697
|
+
prefix_template: Optional[str],
|
|
698
|
+
) -> bool:
|
|
699
|
+
if isinstance(layout_template, str) and _COUNTER_TAG.search(layout_template):
|
|
700
|
+
return True
|
|
701
|
+
if isinstance(prefix_template, str) and _COUNTER_TAG.search(prefix_template):
|
|
702
|
+
return True
|
|
703
|
+
for field in layout_entries or []:
|
|
704
|
+
if not isinstance(field, Mapping):
|
|
705
|
+
continue
|
|
706
|
+
key = field.get("key")
|
|
707
|
+
if isinstance(key, str) and key.strip() in {"Counter", "counter"}:
|
|
708
|
+
return True
|
|
709
|
+
return False
|
|
710
|
+
|
|
711
|
+
|
|
712
|
+
def _coerce_scalar(value: str) -> Any:
|
|
713
|
+
if value.lower() in {"true", "false"}:
|
|
714
|
+
return value.lower() == "true"
|
|
715
|
+
try:
|
|
716
|
+
return int(value)
|
|
717
|
+
except ValueError:
|
|
718
|
+
pass
|
|
719
|
+
try:
|
|
720
|
+
return float(value)
|
|
721
|
+
except ValueError:
|
|
722
|
+
return value
|
|
723
|
+
|
|
724
|
+
|
|
725
|
+
def _to_json_safe(value: Any) -> Any:
|
|
726
|
+
"""Convert values to JSON-serializable types.
|
|
727
|
+
|
|
728
|
+
Args:
|
|
729
|
+
value: Input value to normalize.
|
|
730
|
+
|
|
731
|
+
Returns:
|
|
732
|
+
JSON-serializable value.
|
|
733
|
+
"""
|
|
734
|
+
if isinstance(value, Mapping):
|
|
735
|
+
return {str(k): _to_json_safe(v) for k, v in value.items()}
|
|
736
|
+
if isinstance(value, (list, tuple)):
|
|
737
|
+
return [_to_json_safe(v) for v in value]
|
|
738
|
+
if isinstance(value, np.ndarray):
|
|
739
|
+
return value.tolist()
|
|
740
|
+
return value
|
|
741
|
+
|
|
742
|
+
|
|
743
|
+
def _write_sidecar(path: Path, meta: Any) -> None:
|
|
744
|
+
"""Write sidecar JSON metadata next to a NIfTI path.
|
|
745
|
+
|
|
746
|
+
Args:
|
|
747
|
+
path: NIfTI file path.
|
|
748
|
+
meta: Metadata to serialize.
|
|
749
|
+
"""
|
|
750
|
+
sidecar = path.with_suffix(".json")
|
|
751
|
+
if path.name.endswith(".nii.gz"):
|
|
752
|
+
sidecar = path.with_name(path.name[:-7] + ".json")
|
|
753
|
+
payload = _to_json_safe(meta or {})
|
|
754
|
+
sidecar.write_text(json.dumps(payload, indent=2, sort_keys=False), encoding="utf-8")
|
|
755
|
+
logger.info("Wrote sidecar: %s", sidecar)
|
|
756
|
+
|
|
757
|
+
|
|
758
|
+
def _add_convert_args(
|
|
759
|
+
parser: argparse.ArgumentParser,
|
|
760
|
+
*,
|
|
761
|
+
output_help: str,
|
|
762
|
+
include_scan_reco: bool = True,
|
|
763
|
+
) -> None:
|
|
764
|
+
"""Register convert-related CLI arguments on a parser.
|
|
765
|
+
|
|
766
|
+
Args:
|
|
767
|
+
parser: Target argument parser.
|
|
768
|
+
output_help: Help text for the output argument.
|
|
769
|
+
include_scan_reco: Whether to add scan/reco options.
|
|
770
|
+
"""
|
|
771
|
+
if include_scan_reco:
|
|
772
|
+
parser.add_argument(
|
|
773
|
+
"-s",
|
|
774
|
+
"--scan-id",
|
|
775
|
+
type=int,
|
|
776
|
+
help="Scan id to convert.",
|
|
777
|
+
)
|
|
778
|
+
parser.add_argument(
|
|
779
|
+
"-r",
|
|
780
|
+
"--reco-id",
|
|
781
|
+
type=int,
|
|
782
|
+
help="Reco id to convert (defaults to all recos when omitted).",
|
|
783
|
+
)
|
|
784
|
+
parser.add_argument(
|
|
785
|
+
"--flip-x",
|
|
786
|
+
action="store_true",
|
|
787
|
+
help="Flip x-axis in NIfTI header.",
|
|
788
|
+
)
|
|
789
|
+
parser.add_argument(
|
|
790
|
+
"--xyz-units",
|
|
791
|
+
choices=list(get_args(XYZUNIT)),
|
|
792
|
+
default="mm",
|
|
793
|
+
help="Spatial units for NIfTI header (default: mm).",
|
|
794
|
+
)
|
|
795
|
+
parser.add_argument(
|
|
796
|
+
"--t-units",
|
|
797
|
+
choices=list(get_args(TUNIT)),
|
|
798
|
+
default="sec",
|
|
799
|
+
help="Temporal units for NIfTI header (default: sec).",
|
|
800
|
+
)
|
|
801
|
+
parser.add_argument(
|
|
802
|
+
"--header",
|
|
803
|
+
help="Path to a YAML file containing NIfTI header overrides.",
|
|
804
|
+
)
|
|
805
|
+
|
|
806
|
+
parser.add_argument(
|
|
807
|
+
"-o",
|
|
808
|
+
"--output",
|
|
809
|
+
help=output_help,
|
|
810
|
+
)
|
|
811
|
+
parser.add_argument(
|
|
812
|
+
"--prefix",
|
|
813
|
+
help="Filename prefix (supports {Key} tags from layout info).",
|
|
814
|
+
)
|
|
815
|
+
parser.add_argument(
|
|
816
|
+
"--sidecar",
|
|
817
|
+
action="store_true",
|
|
818
|
+
help="Write a JSON sidecar using metadata rules.",
|
|
819
|
+
)
|
|
820
|
+
parser.add_argument(
|
|
821
|
+
"--no-convert",
|
|
822
|
+
action="store_true",
|
|
823
|
+
help="Skip NIfTI conversion and only write sidecar metadata (requires --sidecar).",
|
|
824
|
+
)
|
|
825
|
+
parser.add_argument(
|
|
826
|
+
"--context-map",
|
|
827
|
+
dest="context_map",
|
|
828
|
+
help="Context map YAML for metadata and output mapping.",
|
|
829
|
+
)
|
|
830
|
+
parser.add_argument(
|
|
831
|
+
"--hook-arg",
|
|
832
|
+
action="append",
|
|
833
|
+
default=[],
|
|
834
|
+
help="Hook argument in HOOK:KEY=VALUE format (repeatable).",
|
|
835
|
+
)
|
|
836
|
+
parser.add_argument(
|
|
837
|
+
"--hook-args-yaml",
|
|
838
|
+
action="append",
|
|
839
|
+
default=[],
|
|
840
|
+
help="YAML file containing hook args mapping (repeatable).",
|
|
841
|
+
)
|
|
842
|
+
parser.add_argument(
|
|
843
|
+
"--space",
|
|
844
|
+
choices=list(get_args(AffineSpace)),
|
|
845
|
+
help="Affine space for conversion (default: subject_ras).",
|
|
846
|
+
)
|
|
847
|
+
parser.add_argument(
|
|
848
|
+
"--override-subject-type",
|
|
849
|
+
choices=list(get_args(SubjectType)),
|
|
850
|
+
help="Override subject type for subject-view affines (space=subject_ras).",
|
|
851
|
+
)
|
|
852
|
+
parser.add_argument(
|
|
853
|
+
"--override-subject-pose",
|
|
854
|
+
choices=list(get_args(SubjectPose)),
|
|
855
|
+
help="Override subject pose for subject-view affines (space=subject_ras).",
|
|
856
|
+
)
|
|
857
|
+
parser.add_argument(
|
|
858
|
+
"--format",
|
|
859
|
+
choices=["nifti", "nifti1"],
|
|
860
|
+
help="Output format (default: nifti).",
|
|
861
|
+
)
|
|
862
|
+
parser.add_argument(
|
|
863
|
+
"--flatten-fg",
|
|
864
|
+
action="store_true",
|
|
865
|
+
help="Flatten frame-group dimensions to 4D when data is 5D or higher.",
|
|
866
|
+
)
|
|
867
|
+
parser.add_argument(
|
|
868
|
+
"--no-compress",
|
|
869
|
+
dest="compress",
|
|
870
|
+
action="store_false",
|
|
871
|
+
help="Write .nii instead of .nii.gz (default: compressed).",
|
|
872
|
+
)
|
|
873
|
+
|
|
874
|
+
|
|
875
|
+
def register(subparsers: argparse._SubParsersAction) -> None: # type: ignore[name-defined]
|
|
876
|
+
"""Register convert subcommands on the main CLI parser.
|
|
877
|
+
|
|
878
|
+
Args:
|
|
879
|
+
subparsers: Subparser collection from argparse.
|
|
880
|
+
"""
|
|
881
|
+
convert_parser = subparsers.add_parser(
|
|
882
|
+
"convert",
|
|
883
|
+
help="Convert a scan/reco to NIfTI.",
|
|
884
|
+
)
|
|
885
|
+
convert_parser.add_argument(
|
|
886
|
+
"path",
|
|
887
|
+
nargs="?",
|
|
888
|
+
help="Path to the Bruker study.",
|
|
889
|
+
)
|
|
890
|
+
_add_convert_args(convert_parser, output_help="Output directory or .nii/.nii.gz file path.")
|
|
891
|
+
convert_parser.set_defaults(func=cmd_convert, parser=convert_parser)
|
|
892
|
+
|
|
893
|
+
batch_parser = subparsers.add_parser(
|
|
894
|
+
"convert-batch",
|
|
895
|
+
help="Convert all datasets under a root folder.",
|
|
896
|
+
)
|
|
897
|
+
batch_parser.add_argument("path", help="Root folder containing datasets.")
|
|
898
|
+
_add_convert_args(
|
|
899
|
+
batch_parser,
|
|
900
|
+
output_help="Output directory.",
|
|
901
|
+
include_scan_reco=False,
|
|
902
|
+
)
|
|
903
|
+
batch_parser.set_defaults(func=cmd_convert_batch, parser=batch_parser)
|