brainseg-containers 1.0.0.dev0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- brainseg_containers-1.0.0.dev0/PKG-INFO +126 -0
- brainseg_containers-1.0.0.dev0/README.md +99 -0
- brainseg_containers-1.0.0.dev0/pyproject.toml +77 -0
- brainseg_containers-1.0.0.dev0/setup.cfg +4 -0
- brainseg_containers-1.0.0.dev0/src/brainseg/__init__.py +0 -0
- brainseg_containers-1.0.0.dev0/src/brainseg/clients/__init__.py +0 -0
- brainseg_containers-1.0.0.dev0/src/brainseg/clients/compare_segs.py +155 -0
- brainseg_containers-1.0.0.dev0/src/brainseg/clients/remap_labels.py +27 -0
- brainseg_containers-1.0.0.dev0/src/brainseg/clients/runner.py +265 -0
- brainseg_containers-1.0.0.dev0/src/brainseg/data/__init__.py +0 -0
- brainseg_containers-1.0.0.dev0/src/brainseg/data/freesurfer-label-list-full-lut.txt +1433 -0
- brainseg_containers-1.0.0.dev0/src/brainseg/data/freesurfer-label-list-lut.txt +40 -0
- brainseg_containers-1.0.0.dev0/src/brainseg/data/freesurfer-label-list-reduced-lut.txt +111 -0
- brainseg_containers-1.0.0.dev0/src/brainseg/data/gouhfi-label-list-lut.txt +40 -0
- brainseg_containers-1.0.0.dev0/src/brainseg/data/simnibs-label-list-lut.txt +15 -0
- brainseg_containers-1.0.0.dev0/src/brainseg/remap.py +48 -0
- brainseg_containers-1.0.0.dev0/src/brainseg_containers.egg-info/PKG-INFO +126 -0
- brainseg_containers-1.0.0.dev0/src/brainseg_containers.egg-info/SOURCES.txt +20 -0
- brainseg_containers-1.0.0.dev0/src/brainseg_containers.egg-info/dependency_links.txt +1 -0
- brainseg_containers-1.0.0.dev0/src/brainseg_containers.egg-info/entry_points.txt +3 -0
- brainseg_containers-1.0.0.dev0/src/brainseg_containers.egg-info/requires.txt +25 -0
- brainseg_containers-1.0.0.dev0/src/brainseg_containers.egg-info/top_level.txt +1 -0
|
@@ -0,0 +1,126 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: brainseg-containers
|
|
3
|
+
Version: 1.0.0.dev0
|
|
4
|
+
Summary: wrapper for brain segmentation tools
|
|
5
|
+
Author-email: Marius Causemann <mariusca@simula.no>
|
|
6
|
+
Description-Content-Type: text/markdown
|
|
7
|
+
Requires-Dist: nibabel
|
|
8
|
+
Requires-Dist: fastremap
|
|
9
|
+
Provides-Extra: test
|
|
10
|
+
Provides-Extra: dev
|
|
11
|
+
Requires-Dist: pdbpp; extra == "dev"
|
|
12
|
+
Requires-Dist: ipython; extra == "dev"
|
|
13
|
+
Requires-Dist: mypy; extra == "dev"
|
|
14
|
+
Requires-Dist: ruff; extra == "dev"
|
|
15
|
+
Provides-Extra: plot
|
|
16
|
+
Requires-Dist: matplotlib; extra == "plot"
|
|
17
|
+
Requires-Dist: nilearn; extra == "plot"
|
|
18
|
+
Requires-Dist: numpy; extra == "plot"
|
|
19
|
+
Requires-Dist: pandas; extra == "plot"
|
|
20
|
+
Provides-Extra: docs
|
|
21
|
+
Requires-Dist: jupyter-book<2.0.0; extra == "docs"
|
|
22
|
+
Requires-Dist: jupytext; extra == "docs"
|
|
23
|
+
Requires-Dist: ipykernel<7.0.0; extra == "docs"
|
|
24
|
+
Requires-Dist: sphinx-codeautolink; extra == "docs"
|
|
25
|
+
Provides-Extra: all
|
|
26
|
+
Requires-Dist: brainseg-containers[dev,docs,plot,test]; extra == "all"
|
|
27
|
+
|
|
28
|
+
# BrainSeg-container
|
|
29
|
+
|
|
30
|
+
This repository provides a streamlined Python wrapper and CLI tool to automatically download, run, and standardize outputs from state-of-the-art brain segmentation tools using Apptainer / Singularity containers.
|
|
31
|
+
|
|
32
|
+
Brain segmentation tools often have conflicting dependencies, complex installation steps, or require specific versions of system libraries. This package solves that problem by containerizing the tools and handling the execution, file binding, and label standardization for you.
|
|
33
|
+
|
|
34
|
+

|
|
35
|
+
## The Tools
|
|
36
|
+
|
|
37
|
+
This pipeline currently supports the following deep-learning-based segmentation tools. We may add more in the future.
|
|
38
|
+
|
|
39
|
+
1. [**GOUHFI**](https://github.com/mafortin/GOUHFI)
|
|
40
|
+
This tool was designed to handle the challenges of Ultra-High Field MRI (7T+). It utilizes "domain randomization" during training, which allows it to remain robust across different MRI contrasts and resolutions, including standard clinical scans.
|
|
41
|
+
|
|
42
|
+
* **Resolution:** Native (preserves input resolution).
|
|
43
|
+
|
|
44
|
+
* **CSF Availability:** Yes (segments ventricles and subarachnoid space CSF).
|
|
45
|
+
|
|
46
|
+
2. [**SynthSeg**](https://github.com/BBillot/SynthSeg)
|
|
47
|
+
Developed by the FreeSurfer team, this tool is famous for working "out of the box" on almost any kind of MRI scan (different contrasts, resolutions, or messy clinical data) without needing retraining.
|
|
48
|
+
|
|
49
|
+
* **Resolution:** Fixed 1mm isotropic (always resamples input to 1mm).
|
|
50
|
+
|
|
51
|
+
* **CSF Availability:** Yes.
|
|
52
|
+
|
|
53
|
+
3. [**FastSurfer**](https://github.com/Deep-MI/FastSurfer)
|
|
54
|
+
A rapid deep-learning-based segmentation tool that is optimized for speed, finishing in minutes rather than hours.
|
|
55
|
+
|
|
56
|
+
* **Resolution:** Native (but experimental below 0.7mm).
|
|
57
|
+
|
|
58
|
+
* **CSF Availability:** No (segments ventricles, but ignores subarachnoid space CSF).
|
|
59
|
+
|
|
60
|
+
4. [**SimNIBS (Charm)**](https://github.com/simnibs/simnibs)
|
|
61
|
+
The "Complete Head Anatomy Reconstruction Method" from the SimNIBS suite. While designed for modeling brain stimulation (TMS/TES), it produces high-quality segmentation of extra-cerebral tissues (skull, scalp, etc.) in addition to the brain.
|
|
62
|
+
|
|
63
|
+
* **Resolution:** Native (pipeline uses the upsampled output to match input).
|
|
64
|
+
|
|
65
|
+
* **CSF Availability:** Yes.
|
|
66
|
+
* **Segmented regions:** Charm provides the following segmentation labels: White-Matter, Gray-Matter, CSF, Bone, Scalp, Eye_bals, Compact_bone, Spongy_bone, Blood, Muscle, Cartilage, Fat, Electrode, Saline_or_gel
|
|
67
|
+
|
|
68
|
+
## Comparison Output
|
|
69
|
+
|
|
70
|
+
The pipeline can automatically generate a comparison grid so you can quickly inspect the differences between the tools.
|
|
71
|
+
|
|
72
|
+
## Getting Started
|
|
73
|
+
|
|
74
|
+
### Prerequisites
|
|
75
|
+
|
|
76
|
+
You must have Apptainer (or Singularity) installed on your system to run the containers.
|
|
77
|
+
|
|
78
|
+
* Ubuntu/Debian: `sudo apt install apptainer`
|
|
79
|
+
* Conda/Mamba: `conda install -c conda-forge apptainer `
|
|
80
|
+
|
|
81
|
+
|
|
82
|
+
### Installation
|
|
83
|
+
|
|
84
|
+
You can install the package directly via pip:
|
|
85
|
+
|
|
86
|
+
```bash
|
|
87
|
+
pip install brainseg-containers
|
|
88
|
+
```
|
|
89
|
+
|
|
90
|
+
*(Optional) If you want to use the plotting and comparison features, install with the `plot` extras:*
|
|
91
|
+
```bash
|
|
92
|
+
pip install brainseg-containers[plot]
|
|
93
|
+
```
|
|
94
|
+
|
|
95
|
+
---
|
|
96
|
+
|
|
97
|
+
## Usage
|
|
98
|
+
|
|
99
|
+
The package provides a simple command-line interface. The first time you run a specific tool, the wrapper will automatically download the corresponding container from the GitHub Container Registry and store it in `~/.brainseg_containers/`.
|
|
100
|
+
|
|
101
|
+
### Basic Command
|
|
102
|
+
|
|
103
|
+
```bash
|
|
104
|
+
brainseg-containers -t <tool_name> -i <input_file.nii.gz> -o <output_file.nii.gz>
|
|
105
|
+
```
|
|
106
|
+
|
|
107
|
+
**Available Tools:** `synthseg`, `gouhfi`, `fastsurfer`, `simnibs`
|
|
108
|
+
|
|
109
|
+
### Examples
|
|
110
|
+
|
|
111
|
+
**Run GOUHFI on a single subject:**
|
|
112
|
+
```bash
|
|
113
|
+
brainseg-containers -t gouhfi -i inputs/sub-01_T1w.nii.gz -o results/sub-01_gouhfi.nii.gz
|
|
114
|
+
```
|
|
115
|
+
|
|
116
|
+
**Run SynthSeg on the same subject:**
|
|
117
|
+
```bash
|
|
118
|
+
brainseg-containers -t synthseg -i inputs/sub-01_T1w.nii.gz -o results/sub-01_synthseg.nii.gz
|
|
119
|
+
```
|
|
120
|
+
|
|
121
|
+
*Note: You can optionally provide a custom path to a pre-downloaded `.sif` image using the `--container` flag.*
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
### Note on Labels
|
|
125
|
+
|
|
126
|
+
Different tools use different numbers to represent brain regions. To make comparison easier, this pipeline automatically **remaps** the output labels of FastSurfer and GOUHFI to match the standard FreeSurfer lookup table.
|
|
@@ -0,0 +1,99 @@
|
|
|
1
|
+
# BrainSeg-container
|
|
2
|
+
|
|
3
|
+
This repository provides a streamlined Python wrapper and CLI tool to automatically download, run, and standardize outputs from state-of-the-art brain segmentation tools using Apptainer / Singularity containers.
|
|
4
|
+
|
|
5
|
+
Brain segmentation tools often have conflicting dependencies, complex installation steps, or require specific versions of system libraries. This package solves that problem by containerizing the tools and handling the execution, file binding, and label standardization for you.
|
|
6
|
+
|
|
7
|
+

|
|
8
|
+
## The Tools
|
|
9
|
+
|
|
10
|
+
This pipeline currently supports the following deep-learning-based segmentation tools. We may add more in the future.
|
|
11
|
+
|
|
12
|
+
1. [**GOUHFI**](https://github.com/mafortin/GOUHFI)
|
|
13
|
+
This tool was designed to handle the challenges of Ultra-High Field MRI (7T+). It utilizes "domain randomization" during training, which allows it to remain robust across different MRI contrasts and resolutions, including standard clinical scans.
|
|
14
|
+
|
|
15
|
+
* **Resolution:** Native (preserves input resolution).
|
|
16
|
+
|
|
17
|
+
* **CSF Availability:** Yes (segments ventricles and subarachnoid space CSF).
|
|
18
|
+
|
|
19
|
+
2. [**SynthSeg**](https://github.com/BBillot/SynthSeg)
|
|
20
|
+
Developed by the FreeSurfer team, this tool is famous for working "out of the box" on almost any kind of MRI scan (different contrasts, resolutions, or messy clinical data) without needing retraining.
|
|
21
|
+
|
|
22
|
+
* **Resolution:** Fixed 1mm isotropic (always resamples input to 1mm).
|
|
23
|
+
|
|
24
|
+
* **CSF Availability:** Yes.
|
|
25
|
+
|
|
26
|
+
3. [**FastSurfer**](https://github.com/Deep-MI/FastSurfer)
|
|
27
|
+
A rapid deep-learning-based segmentation tool that is optimized for speed, finishing in minutes rather than hours.
|
|
28
|
+
|
|
29
|
+
* **Resolution:** Native (but experimental below 0.7mm).
|
|
30
|
+
|
|
31
|
+
* **CSF Availability:** No (segments ventricles, but ignores subarachnoid space CSF).
|
|
32
|
+
|
|
33
|
+
4. [**SimNIBS (Charm)**](https://github.com/simnibs/simnibs)
|
|
34
|
+
The "Complete Head Anatomy Reconstruction Method" from the SimNIBS suite. While designed for modeling brain stimulation (TMS/TES), it produces high-quality segmentation of extra-cerebral tissues (skull, scalp, etc.) in addition to the brain.
|
|
35
|
+
|
|
36
|
+
* **Resolution:** Native (pipeline uses the upsampled output to match input).
|
|
37
|
+
|
|
38
|
+
* **CSF Availability:** Yes.
|
|
39
|
+
* **Segmented regions:** Charm provides the following segmentation labels: White-Matter, Gray-Matter, CSF, Bone, Scalp, Eye_bals, Compact_bone, Spongy_bone, Blood, Muscle, Cartilage, Fat, Electrode, Saline_or_gel
|
|
40
|
+
|
|
41
|
+
## Comparison Output
|
|
42
|
+
|
|
43
|
+
The pipeline can automatically generate a comparison grid so you can quickly inspect the differences between the tools.
|
|
44
|
+
|
|
45
|
+
## Getting Started
|
|
46
|
+
|
|
47
|
+
### Prerequisites
|
|
48
|
+
|
|
49
|
+
You must have Apptainer (or Singularity) installed on your system to run the containers.
|
|
50
|
+
|
|
51
|
+
* Ubuntu/Debian: `sudo apt install apptainer`
|
|
52
|
+
* Conda/Mamba: `conda install -c conda-forge apptainer `
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
### Installation
|
|
56
|
+
|
|
57
|
+
You can install the package directly via pip:
|
|
58
|
+
|
|
59
|
+
```bash
|
|
60
|
+
pip install brainseg-containers
|
|
61
|
+
```
|
|
62
|
+
|
|
63
|
+
*(Optional) If you want to use the plotting and comparison features, install with the `plot` extras:*
|
|
64
|
+
```bash
|
|
65
|
+
pip install brainseg-containers[plot]
|
|
66
|
+
```
|
|
67
|
+
|
|
68
|
+
---
|
|
69
|
+
|
|
70
|
+
## Usage
|
|
71
|
+
|
|
72
|
+
The package provides a simple command-line interface. The first time you run a specific tool, the wrapper will automatically download the corresponding container from the GitHub Container Registry and store it in `~/.brainseg_containers/`.
|
|
73
|
+
|
|
74
|
+
### Basic Command
|
|
75
|
+
|
|
76
|
+
```bash
|
|
77
|
+
brainseg-containers -t <tool_name> -i <input_file.nii.gz> -o <output_file.nii.gz>
|
|
78
|
+
```
|
|
79
|
+
|
|
80
|
+
**Available Tools:** `synthseg`, `gouhfi`, `fastsurfer`, `simnibs`
|
|
81
|
+
|
|
82
|
+
### Examples
|
|
83
|
+
|
|
84
|
+
**Run GOUHFI on a single subject:**
|
|
85
|
+
```bash
|
|
86
|
+
brainseg-containers -t gouhfi -i inputs/sub-01_T1w.nii.gz -o results/sub-01_gouhfi.nii.gz
|
|
87
|
+
```
|
|
88
|
+
|
|
89
|
+
**Run SynthSeg on the same subject:**
|
|
90
|
+
```bash
|
|
91
|
+
brainseg-containers -t synthseg -i inputs/sub-01_T1w.nii.gz -o results/sub-01_synthseg.nii.gz
|
|
92
|
+
```
|
|
93
|
+
|
|
94
|
+
*Note: You can optionally provide a custom path to a pre-downloaded `.sif` image using the `--container` flag.*
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
### Note on Labels
|
|
98
|
+
|
|
99
|
+
Different tools use different numbers to represent brain regions. To make comparison easier, this pipeline automatically **remaps** the output labels of FastSurfer and GOUHFI to match the standard FreeSurfer lookup table.
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
[build-system] # Require setuptool version due to https://github.com/pypa/setuptools/issues/2938
|
|
2
|
+
requires = ["setuptools>=61.0.0", "wheel"]
|
|
3
|
+
|
|
4
|
+
[project]
|
|
5
|
+
dependencies = [
|
|
6
|
+
"nibabel","fastremap"
|
|
7
|
+
]
|
|
8
|
+
name = "brainseg-containers"
|
|
9
|
+
version = "1.0.0.dev0"
|
|
10
|
+
description = "wrapper for brain segmentation tools"
|
|
11
|
+
authors = [{ name = "Marius Causemann", email = "mariusca@simula.no" }]
|
|
12
|
+
license = { file = "LICENSE" }
|
|
13
|
+
readme = "README.md"
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
[project.scripts]
|
|
17
|
+
brainseg = "brainseg.clients.runner:main"
|
|
18
|
+
brainseg_relabel = "brainseg.clients.remap_labels:main"
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
[project.optional-dependencies]
|
|
22
|
+
test = []
|
|
23
|
+
dev = ["pdbpp", "ipython", "mypy", "ruff"]
|
|
24
|
+
plot = ["matplotlib","nilearn", "numpy", "pandas"]
|
|
25
|
+
docs = [
|
|
26
|
+
"jupyter-book<2.0.0",
|
|
27
|
+
"jupytext",
|
|
28
|
+
"ipykernel<7.0.0", # Note: Remove once https://github.com/ipython/ipykernel/issues/1450 is in a release
|
|
29
|
+
"sphinx-codeautolink",
|
|
30
|
+
]
|
|
31
|
+
all = ["brainseg-containers[test,dev,docs,plot]"]
|
|
32
|
+
|
|
33
|
+
[tool.pytest.ini_options]
|
|
34
|
+
addopts = ["--import-mode=importlib"]
|
|
35
|
+
testpaths = ["tests"]
|
|
36
|
+
|
|
37
|
+
[tool.mypy]
|
|
38
|
+
ignore_missing_imports = true
|
|
39
|
+
# Folders to exclude
|
|
40
|
+
exclude = ["docs/", "build/"]
|
|
41
|
+
# Folder to check with mypy
|
|
42
|
+
files = ["src", "tests"]
|
|
43
|
+
|
|
44
|
+
[tool.ruff]
|
|
45
|
+
src = ["src", "tests", "docs"]
|
|
46
|
+
line-length = 100
|
|
47
|
+
indent-width = 4
|
|
48
|
+
|
|
49
|
+
[tool.setuptools]
|
|
50
|
+
include-package-data = true
|
|
51
|
+
|
|
52
|
+
[tool.setuptools.package-data]
|
|
53
|
+
"brainseg.data" = ["*.txt"]
|
|
54
|
+
|
|
55
|
+
[tool.ruff.lint]
|
|
56
|
+
select = [
|
|
57
|
+
# Pyflakes
|
|
58
|
+
"F",
|
|
59
|
+
# Pycodestyle
|
|
60
|
+
"E",
|
|
61
|
+
"W",
|
|
62
|
+
# isort
|
|
63
|
+
"I001",
|
|
64
|
+
]
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
[tool.ruff.lint.isort]
|
|
68
|
+
known-first-party = ["brainseg"]
|
|
69
|
+
known-third-party = ["numpy", "pytest"]
|
|
70
|
+
section-order = [
|
|
71
|
+
"future",
|
|
72
|
+
"standard-library",
|
|
73
|
+
"mpi",
|
|
74
|
+
"third-party",
|
|
75
|
+
"first-party",
|
|
76
|
+
"local-folder",
|
|
77
|
+
]
|
|
File without changes
|
|
File without changes
|
|
@@ -0,0 +1,155 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
import math
|
|
3
|
+
import matplotlib.pyplot as plt
|
|
4
|
+
from nilearn import plotting
|
|
5
|
+
from matplotlib.colors import ListedColormap
|
|
6
|
+
import numpy as np
|
|
7
|
+
import pandas as pd
|
|
8
|
+
import nibabel as nib
|
|
9
|
+
plt.style.use('dark_background')
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
lut_path = "resources/freesurfer-label-list-lut.txt"
|
|
13
|
+
|
|
14
|
+
def create_exact_colormap(lut_path, alpha=0.5):
|
|
15
|
+
"""
|
|
16
|
+
Creates a sparse ListedColormap where the index matches the label ID exactly.
|
|
17
|
+
Indices not found in the LUT are transparent (alpha=0).
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
# Read LUT (handles whitespace and comments)
|
|
21
|
+
df = pd.read_csv(
|
|
22
|
+
lut_path,
|
|
23
|
+
sep=r"\s+",
|
|
24
|
+
comment="#",
|
|
25
|
+
header=None,
|
|
26
|
+
names=["index", "name", "r", "g", "b", "a"],
|
|
27
|
+
dtype={"index": int, "r": int, "g": int, "b": int}
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
# 1. Determine the size of the colormap needed
|
|
31
|
+
# We need an array large enough to hold the highest label ID found.
|
|
32
|
+
# e.g., if max ID is 14175, we need 14176 entries (0 to 14175).
|
|
33
|
+
max_id = df["index"].max()
|
|
34
|
+
|
|
35
|
+
df.loc[df["name"]=="CSF", ['r', 'g', 'b']] = [0, 255, 255]
|
|
36
|
+
|
|
37
|
+
# 2. Initialize RGBA array with Transparent (0,0,0,0)
|
|
38
|
+
# nilearn will overlay this on the T1, so 0 alpha means "show T1"
|
|
39
|
+
lut_colors = np.zeros((max_id + 1, 4))
|
|
40
|
+
|
|
41
|
+
# 3. Fill in the specific indices defined in the LUT
|
|
42
|
+
# Normalize 0-255 RGB to 0-1 for Matplotlib
|
|
43
|
+
indices = df["index"].values
|
|
44
|
+
rgbs = df[["r", "g", "b"]].values / 255.0
|
|
45
|
+
|
|
46
|
+
# Set RGB colors
|
|
47
|
+
lut_colors[indices, 0:3] = rgbs
|
|
48
|
+
# Set Alpha to 1.0 (Opaque) for defined labels
|
|
49
|
+
lut_colors[indices, 3] = alpha
|
|
50
|
+
|
|
51
|
+
# Create the discrete colormap
|
|
52
|
+
# 'N' is implicitly len(lut_colors), ensuring 1:1 mapping
|
|
53
|
+
custom_cmap = ListedColormap(lut_colors, name="FreeSurfer_Discrete")
|
|
54
|
+
|
|
55
|
+
return custom_cmap, max_id
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def get_ventricle_center(seg_path):
|
|
59
|
+
"""
|
|
60
|
+
Calculates coordinates centered specifically on the ventricles.
|
|
61
|
+
"""
|
|
62
|
+
print(f"Calculating center from ventricles in: {seg_path}")
|
|
63
|
+
img = nib.load(seg_path)
|
|
64
|
+
|
|
65
|
+
# FreeSurfer Standard Labels for Ventricles:
|
|
66
|
+
# 4: Left-Lateral-Ventricle
|
|
67
|
+
# 14: 3rd-Ventricle
|
|
68
|
+
# 43: Right-Lateral-Ventricle
|
|
69
|
+
target_labels = [4, 14, 43]
|
|
70
|
+
|
|
71
|
+
# Create binary mask of ventricles
|
|
72
|
+
mask_data = np.isin(img.get_fdata(), target_labels).astype(float)
|
|
73
|
+
|
|
74
|
+
new_img = nib.Nifti1Image(mask_data, img.affine, img.header)
|
|
75
|
+
|
|
76
|
+
return plotting.find_xyz_cut_coords(new_img)
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
fs_labeled = ["synthseg", "gouhfi", "fastsurfer"]
|
|
80
|
+
|
|
81
|
+
def main():
|
|
82
|
+
parser = argparse.ArgumentParser(
|
|
83
|
+
description="Generate a grid of segmentation overlays in ortho view (Sagittal, Coronal, Axial)."
|
|
84
|
+
)
|
|
85
|
+
|
|
86
|
+
parser.add_argument("-i", "--image", required=True, help="Path to the T1w anatomical image (background).")
|
|
87
|
+
parser.add_argument("-s", "--segs", required=True, nargs='+', help="List of segmentation NIfTI files.")
|
|
88
|
+
parser.add_argument("-o", "--output", default="seg_comparison_ortho.png", help="Path to save the output image.")
|
|
89
|
+
|
|
90
|
+
args = parser.parse_args()
|
|
91
|
+
|
|
92
|
+
cmap, vmax = create_exact_colormap(lut_path, alpha=0.6)
|
|
93
|
+
|
|
94
|
+
# 1. Setup Grid
|
|
95
|
+
n_segs = len(args.segs)
|
|
96
|
+
cols = 1
|
|
97
|
+
rows = math.ceil(n_segs / cols)
|
|
98
|
+
|
|
99
|
+
# Calculate figure size: Allocate more width (10 inches) per column for the ortho view
|
|
100
|
+
fig, axes = plt.subplots(
|
|
101
|
+
rows, cols,
|
|
102
|
+
figsize=(8 * cols, 2.45 * rows),
|
|
103
|
+
gridspec_kw={'wspace': 0, 'hspace': 0}
|
|
104
|
+
)
|
|
105
|
+
plt.subplots_adjust(left=0, right=1, bottom=0, top=1)
|
|
106
|
+
# Flatten axes for consistent indexing
|
|
107
|
+
if n_segs > 1:
|
|
108
|
+
axes = axes.flatten()
|
|
109
|
+
else:
|
|
110
|
+
axes = [axes]
|
|
111
|
+
|
|
112
|
+
# 2. Determine Cut Coordinates automatically
|
|
113
|
+
cut_coords = get_ventricle_center(args.segs[0])
|
|
114
|
+
|
|
115
|
+
print(f"Visualizing ortho slices at coordinates: {np.round(cut_coords,2)}")
|
|
116
|
+
|
|
117
|
+
# 3. Loop through segmentations and plot
|
|
118
|
+
for i, seg_path in enumerate(args.segs):
|
|
119
|
+
print(f"Processing: {seg_path}")
|
|
120
|
+
ax = axes[i]
|
|
121
|
+
# Determine title from filename
|
|
122
|
+
title = seg_path.split("/")[-1].replace(".nii.gz", "").replace(".nii", "").replace("_seg", "")
|
|
123
|
+
|
|
124
|
+
plotting.plot_roi(
|
|
125
|
+
roi_img=seg_path,
|
|
126
|
+
bg_img=args.image,
|
|
127
|
+
axes=ax,
|
|
128
|
+
display_mode='ortho', # Shows Sagittal, Coronal, and Axial cuts
|
|
129
|
+
cut_coords=cut_coords, # Ensures all models show the exact same anatomical point
|
|
130
|
+
cmap="tab20", #cmap,
|
|
131
|
+
vmax=vmax if title in fs_labeled else None,
|
|
132
|
+
vmin=0,
|
|
133
|
+
#title=title,
|
|
134
|
+
alpha=0.4,
|
|
135
|
+
resampling_interpolation='nearest',
|
|
136
|
+
annotate=False,
|
|
137
|
+
draw_cross=True,
|
|
138
|
+
black_bg=True,
|
|
139
|
+
colorbar=False
|
|
140
|
+
)
|
|
141
|
+
fig.text(0.01, 0.95, title,
|
|
142
|
+
transform=ax.transAxes,
|
|
143
|
+
horizontalalignment='left',
|
|
144
|
+
verticalalignment='top',
|
|
145
|
+
color="white",
|
|
146
|
+
fontsize=10,
|
|
147
|
+
weight='bold',
|
|
148
|
+
zorder=1000
|
|
149
|
+
)
|
|
150
|
+
|
|
151
|
+
plt.savefig(args.output, dpi=300, pad_inches=0.0, bbox_inches='tight')
|
|
152
|
+
print(f"Saved comparison to: {args.output}")
|
|
153
|
+
|
|
154
|
+
if __name__ == "__main__":
|
|
155
|
+
main()
|
|
@@ -0,0 +1,27 @@
|
|
|
1
|
+
import argparse
|
|
2
|
+
import os
|
|
3
|
+
from brainseg.remap import remap_file
|
|
4
|
+
|
|
5
|
+
def main():
|
|
6
|
+
parser = argparse.ArgumentParser(
|
|
7
|
+
description="Exchange NIfTI labels from an old schema to a new schema using fastremap."
|
|
8
|
+
)
|
|
9
|
+
|
|
10
|
+
# Required arguments
|
|
11
|
+
parser.add_argument("-i", "--input", required=True, help="Path to the input .nii.gz file")
|
|
12
|
+
parser.add_argument("-o", "--output", required=True, help="Path to save the remapped .nii.gz file")
|
|
13
|
+
parser.add_argument("--old-txt", required=True, help="Text file containing old label mapping")
|
|
14
|
+
parser.add_argument("--new-txt", required=True, help="Text file containing new label mapping")
|
|
15
|
+
|
|
16
|
+
# Optional flag
|
|
17
|
+
parser.add_argument("--inplace", action="store_true", help="Perform remap in-place to save memory")
|
|
18
|
+
|
|
19
|
+
args = parser.parse_args()
|
|
20
|
+
|
|
21
|
+
print(f"--- Processing: {os.path.basename(args.input)} ---")
|
|
22
|
+
|
|
23
|
+
remap_file(args.input, args.old_txt, args.new_txt, args.output)
|
|
24
|
+
print(f"Success! Saved to: {args.output}")
|
|
25
|
+
|
|
26
|
+
if __name__ == "__main__":
|
|
27
|
+
main()
|