simcortexpp 0.1.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. simcortexpp/__init__.py +0 -0
  2. simcortexpp/cli/__init__.py +0 -0
  3. simcortexpp/cli/main.py +81 -0
  4. simcortexpp/configs/__init__.py +0 -0
  5. simcortexpp/configs/deform/__init__.py +0 -0
  6. simcortexpp/configs/deform/eval.yaml +34 -0
  7. simcortexpp/configs/deform/inference.yaml +60 -0
  8. simcortexpp/configs/deform/train.yaml +98 -0
  9. simcortexpp/configs/initsurf/__init__.py +0 -0
  10. simcortexpp/configs/initsurf/generate.yaml +50 -0
  11. simcortexpp/configs/seg/__init__.py +0 -0
  12. simcortexpp/configs/seg/eval.yaml +31 -0
  13. simcortexpp/configs/seg/inference.yaml +35 -0
  14. simcortexpp/configs/seg/train.yaml +42 -0
  15. simcortexpp/deform/__init__.py +0 -0
  16. simcortexpp/deform/data/__init__.py +0 -0
  17. simcortexpp/deform/data/dataloader.py +268 -0
  18. simcortexpp/deform/eval.py +347 -0
  19. simcortexpp/deform/inference.py +244 -0
  20. simcortexpp/deform/models/__init__.py +0 -0
  21. simcortexpp/deform/models/surfdeform.py +356 -0
  22. simcortexpp/deform/train.py +1173 -0
  23. simcortexpp/deform/utils/__init__.py +0 -0
  24. simcortexpp/deform/utils/coords.py +90 -0
  25. simcortexpp/initsurf/__init__.py +0 -0
  26. simcortexpp/initsurf/generate.py +354 -0
  27. simcortexpp/initsurf/paths.py +19 -0
  28. simcortexpp/preproc/__init__.py +0 -0
  29. simcortexpp/preproc/fs_to_mni.py +696 -0
  30. simcortexpp/seg/__init__.py +0 -0
  31. simcortexpp/seg/data/__init__.py +0 -0
  32. simcortexpp/seg/data/dataloader.py +328 -0
  33. simcortexpp/seg/eval.py +248 -0
  34. simcortexpp/seg/inference.py +291 -0
  35. simcortexpp/seg/models/__init__.py +0 -0
  36. simcortexpp/seg/models/unet.py +63 -0
  37. simcortexpp/seg/train.py +432 -0
  38. simcortexpp/utils/__init__.py +0 -0
  39. simcortexpp/utils/tca.py +298 -0
  40. simcortexpp-0.1.0.dist-info/METADATA +334 -0
  41. simcortexpp-0.1.0.dist-info/RECORD +44 -0
  42. simcortexpp-0.1.0.dist-info/WHEEL +5 -0
  43. simcortexpp-0.1.0.dist-info/entry_points.txt +2 -0
  44. simcortexpp-0.1.0.dist-info/top_level.txt +1 -0
@@ -0,0 +1,298 @@
1
+ """
2
+ Topology correction algorithm by Bazin et al.
3
+ Please cite the original papers if you use this code:
4
+ - Bazin et al. Topology correction using fast marching methods and its application to brain segmentation.
5
+ MICCAI, 2005.
6
+ - Bazin et al. Topology correction of segmented medical images using a fast marching algorithm.
7
+ Computer methods and programs in biomedicine, 2007.
8
+
9
+ The algorithm is re-implemented and accelerated using Python+Numba.
10
+
11
+ For the original Java implementation please see:
12
+ - https://github.com/piloubazin/cbstools-public/blob/master/de/mpg/cbs/core/shape/ShapeTopologyCorrection2.java
13
+ Or refer to the Nighres software:
14
+ - https://nighres.readthedocs.io/en/latest/shape/topology_correction.html
15
+ The look up table file "critical186LUT.raw.gz" is downloaded from Nighres:
16
+ - https://nighres.readthedocs.io/en/latest/
17
+ """
18
+
19
+
20
+ from heapq import *
21
+ import numpy as np
22
+ from numba import njit
23
+ import gzip
24
+ from scipy.ndimage import binary_dilation
25
+ from pathlib import Path
26
+
27
+ class topology():
28
+ """
29
+ apply topology correction algorithm
30
+ inpt: input volume
31
+ threshold: used to create initial mask. We set threshold=16 for CortexODE.
32
+ """
33
+ def __init__(self):
34
+ lut_path = Path(__file__).resolve().parent / "critical186LUT.raw.gz"
35
+ bit, lut = tca_init_fill(str(lut_path), threshold=1.0)
36
+ self.bit = bit
37
+ self.lut = lut
38
+ def apply(self, inpt, threshold=1.0):
39
+ mask, init_pts = tca_mask_fill(inpt, threshold)
40
+ output = tca_fill(inpt, mask, init_pts, self.bit, self.lut)
41
+ return output # , mask
42
+
43
+
44
+ @njit
45
+ def bit_map():
46
+ """used for compute key"""
47
+ twobit = np.array([2**k for k in range(26)], dtype=np.float64)
48
+ bit = np.zeros(27, dtype=np.float64)
49
+ bit[:13] = twobit[:13]
50
+ bit[14:] = twobit[13:]
51
+ bit = bit.copy().reshape(3,3,3)
52
+
53
+ return bit
54
+
55
+
56
+ @njit
57
+ def check_topology(img, LUT, bit):
58
+ """check the critical points"""
59
+ res = False
60
+ if img[1,1,1] == 1: # inside the original object
61
+ res = True
62
+ else: # check topology
63
+ # load key from pattern: should keep dtypes as the same
64
+ key = 0.
65
+ for i in range(3):
66
+ for j in range(3):
67
+ for k in range(3):
68
+ key += img[i,j,k] * bit[i,j,k]
69
+ key = int(key)
70
+ if LUT[key]:
71
+ res = True
72
+ else:
73
+ res = False
74
+ return res
75
+
76
+
77
+ """ tca_fill
78
+ This algorithm propagates from background to object.
79
+ It fills all holes and is used to fix WM segemntation.
80
+ """
81
+
82
+ def tca_mask_fill(levelset, threshold=1.0):
83
+ """intialize processing mask"""
84
+
85
+ initmask = np.zeros_like(levelset)
86
+ initmask[2:-2, 2:-2, 2:-2] = 1
87
+ initmask *= (levelset <= threshold)
88
+ mask = binary_dilation(initmask, structure=np.ones([3,3,3]))
89
+ init_pts = np.stack(np.where((mask-initmask)==1)).astype(int).T
90
+
91
+ return mask, init_pts
92
+
93
+
94
+ def tca_init_fill(path, threshold=1.0):
95
+ """
96
+ Initialization for topology correction.
97
+ Step 1. load look up table
98
+ Step 2. load bit map
99
+ Step 3. compile the Numba by a toy example
100
+ """
101
+ # load look up tables
102
+ with gzip.open(path, 'rb') as lut_file:
103
+ LUT = lut_file.read()
104
+
105
+ # load bit map
106
+ bit = bit_map()
107
+
108
+ # create a toy example to compile the Numba
109
+ img = (threshold-0.1) * np.ones([10,10,10])
110
+ img[4:6,4:6,4:6] = threshold + 0.1
111
+ mask, init_pts = tca_mask_fill(img, threshold)
112
+ img_fix = tca_fill(img, mask, init_pts, bit, LUT)
113
+
114
+ return bit, LUT
115
+
116
+
117
+ @njit
118
+ def tca_fill(levelset, mask, init_pts, bit, LUT):
119
+
120
+ dtype = levelset.dtype
121
+
122
+ minDistance = np.array(1e-5, dtype=dtype).item()
123
+ UNKNOWN = np.array(1e11, dtype=dtype).item()
124
+
125
+ nx, ny, nz = levelset.shape
126
+ C6 = [(-1,0,0), (1,0,0), (0,-1,0), (0,1,0), (0,0,-1), (0,0,1)]
127
+
128
+ corrected = np.ones_like(levelset) * UNKNOWN
129
+ processed = np.zeros(levelset.shape, dtype=np.uint8)
130
+ inheap = np.zeros(levelset.shape, dtype=np.uint8)
131
+
132
+ mainval = np.array(1e15, dtype=dtype).item()
133
+ maskval = np.array(-1e15, dtype=dtype).item()
134
+ maskval = np.max(mask * levelset)
135
+
136
+ """add neighbors to the heap"""
137
+ heap = [] # max heap
138
+ for x0,y0,z0 in init_pts:
139
+ processed[x0,y0,z0] = 1.
140
+ corrected[x0,y0,z0] = levelset[x0,y0,z0]
141
+ if corrected[x0,y0,z0] < mainval:
142
+ mainval = corrected[x0,y0,z0]
143
+
144
+ for dx, dy, dz in C6:
145
+ xn = x0 + dx
146
+ yn = y0 + dy
147
+ zn = z0 + dz
148
+ if mask[xn,yn,zn] and not processed[xn,yn,zn]:
149
+ heap.append((-levelset[xn,yn,zn],(xn,yn,zn)))
150
+ inheap[xn,yn,zn] = 1.
151
+ heapify(heap)
152
+
153
+ """Run Topology Correction"""
154
+ while len(heap) > 0:
155
+
156
+ # pop the heap
157
+ val_, (x, y, z) = heappop(heap)
158
+ val = - val_
159
+ inheap[x, y, z] = 0.
160
+
161
+ if processed[x, y, z]:
162
+ continue
163
+ cube = processed[x-1:x+2, y-1:y+2, z-1:z+2]
164
+ non_critical = check_topology(cube, LUT, bit)
165
+
166
+ if non_critical:
167
+ # all correct: update and find new neighbors
168
+ corrected[x,y,z] = val
169
+ processed[x,y,z]= 1. # update the current level
170
+ mainval = val
171
+
172
+ # find new neighbors
173
+ for dx, dy, dz in C6:
174
+ xn = x + dx
175
+ yn = y + dy
176
+ zn = z + dz
177
+ if mask[xn,yn,zn] and not processed[xn,yn,zn] and not inheap[xn,yn,zn]:
178
+ prio = -min(levelset[xn,yn,zn], val - minDistance)
179
+ heappush(heap, (prio, (xn, yn, zn)))
180
+ inheap[xn,yn,zn] = True
181
+
182
+ corrected += (mainval-corrected) * (1-processed)
183
+ corrected += (maskval-corrected) * (1-mask)
184
+
185
+ return corrected
186
+
187
+
188
+
189
+ """ tca_cut (to be validated)
190
+ This algorithm propagates from object to background.
191
+ It cuts all handles and is used to fix GM segemntation.
192
+
193
+ Note: this function is not fully validated because we only use tca_fill for CortexODE.
194
+ """
195
+
196
+ def tca_mask_cut(levelset, threshold=1.0):
197
+ """intialize processing mask"""
198
+ initmask = np.zeros_like(levelset)
199
+ initmask[2:-2, 2:-2, 2:-2] = 1
200
+ initmask *= (levelset <= threshold)
201
+ mask = binary_dilation(initmask, structure=np.ones([3,3,3]))
202
+ init_pts = np.stack(np.where(levelset==np.min(levelset))).astype(int).T
203
+ return mask, init_pts
204
+
205
+
206
+ def tca_init_cut(path, threshold=1.0):
207
+ """
208
+ Initialization for topology correction.
209
+ Step 1. load look up table
210
+ Step 2. load bit map
211
+ Step 3. compile the Numba by a toy example
212
+ """
213
+ # load look up tables
214
+ with gzip.open(path, 'rb') as lut_file:
215
+ LUT = lut_file.read()
216
+
217
+ # load bit map
218
+ bit = bit_map()
219
+
220
+ # create a toy example to compile the Numba
221
+ img = (threshold+0.1) * np.ones([10,10,10])
222
+ img[4:6,4:6,4:6] = threshold - 0.1
223
+ img[5,5,5] = threshold - 0.2
224
+ mask, init_pts = tca_mask_cut(img, threshold)
225
+ img_fix = tca_cut(img, mask, init_pts, bit, LUT)
226
+
227
+ return bit, LUT
228
+
229
+
230
+
231
+ @njit
232
+ def tca_cut(levelset, mask, init_pts, bit, LUT):
233
+
234
+ """Configuration"""
235
+ minDistance = 1e-5
236
+ UNKNOWN = 10e+10
237
+ nx,ny,nz = levelset.shape
238
+ # connectivity
239
+ C6 = [(-1,0,0), (1,0,0), (0,-1,0), (0,1,0), (0,0,-1), (0,0,1)]
240
+
241
+ """Initialize indicators"""
242
+ corrected = np.ones_like(levelset) * UNKNOWN # gdm functions
243
+ processed = np.zeros_like(levelset).astype(np.float64)
244
+ inheap = np.zeros_like(levelset).astype(np.float64)
245
+ mainval = -1e15
246
+ maskval = -1e15
247
+ maskval = np.max(mask*levelset)
248
+
249
+ """add neighbors to the heap"""
250
+ heap = [] # max heap
251
+ for x0,y0,z0 in init_pts:
252
+ processed[x0,y0,z0] = 1.
253
+ corrected[x0,y0,z0] = levelset[x0,y0,z0]
254
+ if corrected[x0,y0,z0] > mainval:
255
+ mainval = corrected[x0,y0,z0]
256
+
257
+ for dx, dy, dz in C6:
258
+ xn = x0 + dx
259
+ yn = y0 + dy
260
+ zn = z0 + dz
261
+ if mask[xn,yn,zn] and not processed[xn,yn,zn]:
262
+ heap.append((levelset[xn,yn,zn],(xn,yn,zn)))
263
+ inheap[xn,yn,zn] = 1.
264
+ heapify(heap)
265
+
266
+
267
+ """Run Topology Correction"""
268
+ while len(heap) > 0:
269
+
270
+ # pop the heap
271
+ val_, (x, y, z) = heappop(heap)
272
+ val = val_
273
+ inheap[x, y, z] = 0.
274
+
275
+ if processed[x, y, z]:
276
+ continue
277
+ cube = processed[x-1:x+2, y-1:y+2, z-1:z+2]
278
+ non_critical = check_topology(cube, LUT, bit)
279
+
280
+ if non_critical:
281
+ # all correct: update and find new neighbors
282
+ corrected[x,y,z] = val
283
+ processed[x,y,z]= 1. # update the current level
284
+ mainval = val
285
+
286
+ # find new neighbors
287
+ for dx, dy, dz in C6:
288
+ xn = x + dx
289
+ yn = y + dy
290
+ zn = z + dz
291
+ if mask[xn,yn,zn] and not processed[xn,yn,zn] and not inheap[xn,yn,zn]:
292
+ heappush(heap, (max(levelset[xn,yn,zn], val-minDistance), (xn, yn, zn)))
293
+ inheap[xn,yn,zn] = True
294
+
295
+ corrected += (mainval-corrected) * (1-processed)
296
+ corrected += (maskval-corrected) * (1-mask)
297
+
298
+ return corrected
@@ -0,0 +1,334 @@
1
+ Metadata-Version: 2.4
2
+ Name: simcortexpp
3
+ Version: 0.1.0
4
+ Summary: SimCortexPP (SCPP) — CLI-first pipeline for cortical surface reconstruction (preproc, seg, initsurf, deform)
5
+ Author: Kaveh Moradkhani
6
+ Requires-Python: >=3.10
7
+ Description-Content-Type: text/markdown
8
+ Requires-Dist: typer>=0.12
9
+ Requires-Dist: numpy>=1.24
10
+ Requires-Dist: pandas>=2.0
11
+ Requires-Dist: nibabel>=5.2
12
+ Requires-Dist: hydra-core>=1.3
13
+ Requires-Dist: omegaconf>=2.3
14
+ Requires-Dist: tqdm>=4.66
15
+ Requires-Dist: tensorboard>=2.14
16
+ Requires-Dist: trimesh>=4.0
17
+ Requires-Dist: openpyxl>=3.1
18
+ Provides-Extra: seg
19
+ Requires-Dist: monai>=1.3; extra == "seg"
20
+ Provides-Extra: deform-metrics
21
+ Requires-Dist: python-fcl; extra == "deform-metrics"
22
+ Requires-Dist: pymeshlab; extra == "deform-metrics"
23
+ Provides-Extra: torch
24
+ Requires-Dist: torch>=2.0; extra == "torch"
25
+
26
+ # SimCortexPP (SCPP)
27
+
28
+ SimCortexPP (SCPP) is a **CLI-first** Python package for cortical surface reconstruction in MNI space. It provides four stages:
29
+
30
+ 1. **Preprocessing (FreeSurfer → MNI152)**
31
+ Export key FreeSurfer volumes/surfaces, register them to MNI152, and write outputs in a **BIDS-derivatives-style** layout.
32
+
33
+ 2. **Segmentation (3D U-Net in MNI space)**
34
+ Train and apply a 3D U-Net to predict a **9-class segmentation** in **MNI152 space**, with inference and evaluation utilities.
35
+
36
+ 3. **Initial Surfaces (InitSurf)**
37
+ Generate initial White Matter and Pial surfaces from segmentation predictions (plus ribbon SDF/probability outputs).
38
+
39
+ 4. **Deformation (Deform)**
40
+ Deform the initial surfaces toward MNI-aligned FreeSurfer surfaces using geometric losses and optional collision metrics, and write **deformed surfaces** as BIDS-derivatives.
41
+
42
+ This README focuses on **how to run the pipeline correctly** (inputs, outputs, folder/file naming, and commands).
43
+
44
+ ---
45
+
46
+ ## Table of Contents
47
+
48
+ - [Installation](#installation)
49
+ - [Configuration](#configuration)
50
+ - [Data and Folder Conventions](#data-and-folder-conventions)
51
+ - [Split File Format](#split-file-format)
52
+ - [Stage 1 — Preprocessing: FreeSurfer → MNI152](#stage-1--preprocessing-freesurfer--mni152)
53
+ - [Stage 2 — Segmentation: 3D U-Net (MNI space)](#stage-2--segmentation-3d-u-net-mni-space)
54
+ - [Stage 3 — Initial Surfaces (InitSurf)](#stage-3--initial-surfaces-initsurf)
55
+ - [Stage 4 — Deformation (Deform)](#stage-4--deformation-deform)
56
+ - [License](#license)
57
+
58
+ ---
59
+
60
+ ## Installation
61
+
62
+ From the repository root:
63
+
64
+ ```bash
65
+ pip install -e .
66
+ scpp --help
67
+ scpp seg --help
68
+ scpp initsurf --help
69
+ scpp deform --help
70
+ ```
71
+
72
+ ### Recommended environment
73
+ - Python 3.10+
74
+ - PyTorch + MONAI
75
+ - `nibabel`, `numpy`, `pandas`, `openpyxl`
76
+ - `trimesh`, `scipy`, `tqdm`
77
+ - External tools for Stage 1: **NiftyReg** (`reg_aladin`, `reg_resample`)
78
+ - Optional (Deform metrics):
79
+ - `python-fcl` for collision metrics
80
+ - `pymeshlab` for SIF (self-intersection fraction) in Deform evaluation
81
+
82
+ ---
83
+
84
+ ## Configuration
85
+
86
+ All stages use Hydra YAML configs shipped with the package (see `src/simcortexpp/configs/<stage>/*.yaml`).
87
+
88
+ You have **two** ways to configure a run:
89
+
90
+ 1) **Edit the stage YAML** (recommended for longer runs / stable experiments), then run commands with no extra arguments, e.g.:
91
+ - `scpp deform eval`
92
+
93
+ 2) **Use Hydra overrides on the CLI** (recommended for quick tests), e.g.:
94
+ - `scpp deform eval dataset.split_name=test outputs.out_dir=/tmp/deform_eval`
95
+
96
+ ---
97
+
98
+ ## Data and Folder Conventions
99
+
100
+ You will typically work with **two roots**:
101
+
102
+ 1) **Code repository (this repository)**
103
+ Contains code, configs, and scripts (no data).
104
+
105
+ 2) **Dataset root (BIDS + derivatives)**
106
+ Each dataset has its own root directory. Recommended structure:
107
+
108
+ ```text
109
+ datasets/<dataset-name>/
110
+ bids/ # raw BIDS dataset
111
+ derivatives/ # processed outputs (BIDS derivatives)
112
+ freesurfer-7.4.1/
113
+ scpp-preproc-0.1/
114
+ scpp-seg-0.1/
115
+ scpp-initsurf-0.1/
116
+ scpp-deform-0.1/
117
+ splits/
118
+ <dataset>_split.csv
119
+ ```
120
+
121
+ SCPP reads inputs from `derivatives/` and writes outputs back to `derivatives/` using BIDS-derivatives-style naming.
122
+
123
+ > Important: keep the dataset root naming consistent (e.g., use `datasets/...` everywhere).
124
+
125
+ ---
126
+
127
+ ## Split File Format
128
+
129
+ A split CSV is required for Segmentation, InitSurf, and Deform.
130
+
131
+ ### Single-dataset split
132
+ Minimal columns:
133
+ - `subject` (e.g., `sub-0001`)
134
+ - `split` in `{train, val, test}`
135
+
136
+ ### Multi-dataset split
137
+ Include an additional column:
138
+ - `dataset` (string key that matches config keys, e.g., `HCP_YA`, `OASIS1`)
139
+
140
+ Example:
141
+ ```csv
142
+ subject,split,dataset
143
+ sub-100307,test,HCP_YA
144
+ sub-101915,test,HCP_YA
145
+ sub-0001,test,OASIS1
146
+ ```
147
+
148
+ ---
149
+
150
+ ## Stage 1 — Preprocessing: FreeSurfer → MNI152
151
+
152
+ This stage exports key FreeSurfer outputs (volumes + surfaces), registers them to **MNI152**, and writes results to a **BIDS-derivatives-style** folder.
153
+
154
+ ### Inputs
155
+ - FreeSurfer derivatives root (contains `sub-*` folders)
156
+ - MNI template (e.g., `src/MNI152_T1_1mm.nii.gz`)
157
+
158
+ ### Dependencies (system tools)
159
+ - **NiftyReg**: `reg_aladin`, `reg_resample` must be in `PATH`
160
+ - **FreeSurfer** tools are recommended (e.g., `mri_convert`, `mris_convert`) for consistent conversions
161
+
162
+ ### Run (all subjects discovered automatically)
163
+ ```bash
164
+ scpp fs-to-mni --freesurfer-root /path/to/datasets/<dataset>/derivatives/freesurfer-7.4.1 --out-deriv-root /path/to/datasets/<dataset>/derivatives/scpp-preproc-0.1 --mni-template /path/to/SimCortexPP/src/MNI152_T1_1mm.nii.gz --decimate 0.3 -v
165
+ ```
166
+
167
+ ### Run (selected subjects)
168
+ ```bash
169
+ scpp fs-to-mni --freesurfer-root /path/to/datasets/<dataset>/derivatives/freesurfer-7.4.1 --out-deriv-root /path/to/datasets/<dataset>/derivatives/scpp-preproc-0.1 --mni-template /path/to/SimCortexPP/src/MNI152_T1_1mm.nii.gz -p sub-0001 -p sub-0019 -v
170
+ ```
171
+
172
+ ### Output layout (example)
173
+ ```text
174
+ scpp-preproc-0.1/
175
+ dataset_description.json
176
+ sub-XXXX/
177
+ ses-01/
178
+ anat/
179
+ sub-XXXX_ses-01_space-MNI152_desc-preproc_T1w.nii.gz
180
+ sub-XXXX_ses-01_space-MNI152_desc-aparc+aseg_dseg.nii.gz
181
+ sub-XXXX_ses-01_space-MNI152_desc-filled_T1w.nii.gz
182
+ sub-XXXX_ses-01_from-T1w_to-MNI152_mode-image_xfm.txt
183
+ surfaces/
184
+ sub-XXXX_ses-01_space-MNI152_hemi-L_white.surf.ply
185
+ sub-XXXX_ses-01_space-MNI152_hemi-L_pial.surf.ply
186
+ sub-XXXX_ses-01_space-MNI152_hemi-R_white.surf.ply
187
+ sub-XXXX_ses-01_space-MNI152_hemi-R_pial.surf.ply
188
+ ```
189
+
190
+ ---
191
+
192
+ ## Stage 2 — Segmentation: 3D U-Net (MNI space)
193
+
194
+ This stage trains and applies a 3D U-Net to predict a **9-class segmentation** in **MNI152 space** using Stage 1 outputs.
195
+
196
+ ### Expected inputs (from Stage 1)
197
+ Under `scpp-preproc-*`, for each subject:
198
+ - `..._desc-preproc_T1w.nii.gz`
199
+ - `..._desc-aparc+aseg_dseg.nii.gz`
200
+ - `..._desc-filled_T1w.nii.gz`
201
+
202
+ ### Output naming (predictions)
203
+ Predictions are written under `scpp-seg-*`:
204
+ - `sub-XXXX/ses-01/anat/sub-XXXX_ses-01_space-MNI152_desc-seg9_dseg.nii.gz`
205
+
206
+ ### Train (single GPU)
207
+ ```bash
208
+ scpp seg train dataset.path=/path/to/datasets/<dataset>/derivatives/scpp-preproc-0.1 dataset.split_file=/path/to/datasets/<dataset>/splits/<dataset>_split.csv outputs.root=/path/to/scpp-runs/seg/exp01 trainer.use_ddp=false
209
+ ```
210
+
211
+ ### Train (multi-GPU, torchrun)
212
+ ```bash
213
+ scpp seg train --torchrun --nproc-per-node 2 dataset.path=/path/to/datasets/<dataset>/derivatives/scpp-preproc-0.1 dataset.split_file=/path/to/datasets/<dataset>/splits/<dataset>_split.csv outputs.root=/path/to/scpp-runs/seg/exp01 trainer.use_ddp=true
214
+ ```
215
+
216
+ ### Inference
217
+ ```bash
218
+ scpp seg infer dataset.path=/path/to/datasets/<dataset>/derivatives/scpp-preproc-0.1 dataset.split_file=/path/to/datasets/<dataset>/splits/<dataset>_split.csv dataset.split_name=test model.ckpt_path=/path/to/seg_best_dice.pt outputs.out_root=/path/to/datasets/<dataset>/derivatives/scpp-seg-0.1
219
+ ```
220
+
221
+ ### Evaluation (multi-dataset example)
222
+ ```bash
223
+ scpp seg eval dataset.split_file=/path/to/datasets/splits/dataset_split.csv dataset.split_name=test dataset.roots.HCP_YA=/path/to/datasets/hcpya-u100/derivatives/scpp-preproc-0.1 dataset.roots.OASIS1=/path/to/datasets/oasis-1/derivatives/scpp-preproc-0.1 outputs.pred_roots.HCP_YA=/path/to/datasets/hcpya-u100/derivatives/scpp-seg-0.1 outputs.pred_roots.OASIS1=/path/to/datasets/oasis-1/derivatives/scpp-seg-0.1 outputs.eval_csv=/path/to/scpp-runs/seg/exp01/evals/seg_eval_test.csv outputs.eval_xlsx=/path/to/scpp-runs/seg/exp01/evals/seg_eval_test.xlsx
224
+ ```
225
+
226
+ ---
227
+
228
+ ## Stage 3 — Initial Surfaces (InitSurf)
229
+
230
+ This stage generates initial cortical surfaces from saved segmentation predictions (not end-to-end).
231
+
232
+ ### Inputs
233
+ - Preproc roots (`scpp-preproc-*`) for MNI T1
234
+ - Seg roots (`scpp-seg-*`) for `..._desc-seg9_dseg.nii.gz`
235
+ - Split CSV (same format as Stage 2)
236
+
237
+ ### Outputs
238
+ BIDS-derivatives-style outputs under `scpp-initsurf-*` (meshes + SDF volumes + ribbon prob):
239
+ ```text
240
+ scpp-initsurf-0.1/
241
+ dataset_description.json
242
+ sub-XXXX/
243
+ ses-01/
244
+ anat/
245
+ sub-XXXX_ses-01_space-MNI152_desc-lh_white_sdf.nii.gz
246
+ sub-XXXX_ses-01_space-MNI152_desc-rh_white_sdf.nii.gz
247
+ sub-XXXX_ses-01_space-MNI152_desc-lh_pial_sdf.nii.gz
248
+ sub-XXXX_ses-01_space-MNI152_desc-rh_pial_sdf.nii.gz
249
+ sub-XXXX_ses-01_space-MNI152_desc-ribbon_sdf.nii.gz
250
+ sub-XXXX_ses-01_space-MNI152_desc-ribbon_prob.nii.gz
251
+ surfaces/
252
+ sub-XXXX_ses-01_space-MNI152_hemi-L_white.surf.ply
253
+ sub-XXXX_ses-01_space-MNI152_hemi-L_pial.surf.ply
254
+ sub-XXXX_ses-01_space-MNI152_hemi-R_white.surf.ply
255
+ sub-XXXX_ses-01_space-MNI152_hemi-R_pial.surf.ply
256
+ ```
257
+
258
+ ### Run (multi-dataset example)
259
+ ```bash
260
+ scpp initsurf generate dataset.split_file=/path/to/datasets/splits/dataset_split.csv dataset.split_name=all dataset.roots.HCP_YA=/path/to/datasets/hcpya-u100/derivatives/scpp-preproc-0.1 dataset.roots.OASIS1=/path/to/datasets/oasis-1/derivatives/scpp-preproc-0.1 dataset.seg_roots.HCP_YA=/path/to/datasets/hcpya-u100/derivatives/scpp-seg-0.1 dataset.seg_roots.OASIS1=/path/to/datasets/oasis-1/derivatives/scpp-seg-0.1 outputs.out_roots.HCP_YA=/path/to/datasets/hcpya-u100/derivatives/scpp-initsurf-0.1 outputs.out_roots.OASIS1=/path/to/datasets/oasis-1/derivatives/scpp-initsurf-0.1 outputs.log_dir=/path/to/scpp-runs/initsurf/exp01/logs_generate
261
+ ```
262
+
263
+ Typical runtime: ~31 s/subject (hardware-dependent).
264
+
265
+ ---
266
+
267
+ ## Stage 4 — Deformation (Deform)
268
+
269
+ This stage deforms InitSurf meshes using input volumes and geometric losses, and writes **deformed** surfaces to a BIDS-derivatives folder.
270
+
271
+ ### Inputs
272
+ - Preproc root (`scpp-preproc-*`): MNI T1 + GT FreeSurfer surfaces in MNI space
273
+ - InitSurf root (`scpp-initsurf-*`): ribbon probability + initial surfaces
274
+ - Split CSV (same format as Stage 2)
275
+
276
+ ### Outputs
277
+ Deformed surfaces under `scpp-deform-*`:
278
+ ```text
279
+ scpp-deform-0.1/
280
+ dataset_description.json
281
+ sub-XXXX/
282
+ ses-01/
283
+ surfaces/
284
+ sub-XXXX_ses-01_space-MNI152_desc-deform_hemi-L_white.surf.ply
285
+ sub-XXXX_ses-01_space-MNI152_desc-deform_hemi-L_pial.surf.ply
286
+ sub-XXXX_ses-01_space-MNI152_desc-deform_hemi-R_white.surf.ply
287
+ sub-XXXX_ses-01_space-MNI152_desc-deform_hemi-R_pial.surf.ply
288
+ ```
289
+
290
+ ### Train (multi-GPU example)
291
+ ```bash
292
+ scpp deform train --torchrun --nproc-per-node 2 outputs.root=/path/to/scpp-runs/deform/exp01
293
+ ```
294
+
295
+ ### Inference
296
+ ```bash
297
+ scpp deform infer
298
+ ```
299
+
300
+ ### Evaluation
301
+ ```bash
302
+ scpp deform eval
303
+ ```
304
+
305
+ Evaluation writes four Excel files:
306
+ - `surface_metrics.xlsx`
307
+ - `collision_metrics.xlsx`
308
+ - `collision_metrics_enhanced.xlsx`
309
+ - `collision_summary.xlsx`
310
+
311
+ ---
312
+
313
+ ## Outputs Summary
314
+
315
+ For each dataset root:
316
+
317
+ - `derivatives/scpp-preproc-0.1/`
318
+ MNI T1, aparc+aseg, filled, transforms, and MNI-aligned FreeSurfer surfaces.
319
+
320
+ - `derivatives/scpp-seg-0.1/`
321
+ 9-class segmentation predictions (`*_desc-seg9_dseg.nii.gz`).
322
+
323
+ - `derivatives/scpp-initsurf-0.1/`
324
+ Initial surfaces (`*.surf.ply`) + SDF volumes + ribbon SDF/probability.
325
+
326
+ - `derivatives/scpp-deform-0.1/`
327
+ Deformed surfaces (`*_desc-deform_*.surf.ply`).
328
+
329
+ ---
330
+
331
+
332
+ ## License
333
+
334
+ Add your license and citation details here if needed.
@@ -0,0 +1,44 @@
1
+ simcortexpp/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ simcortexpp/cli/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
3
+ simcortexpp/cli/main.py,sha256=SZyV66Qdzd-q7zkle09-Y9SzLoPsGIMp1zyZpv1tHek,3108
4
+ simcortexpp/configs/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
+ simcortexpp/configs/deform/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
+ simcortexpp/configs/deform/eval.yaml,sha256=QQFUciVQrxC9q4PqtpENsPFYYQLwv2LFquc7lhSZZXw,1052
7
+ simcortexpp/configs/deform/inference.yaml,sha256=i9HJ6qscq_7aLN1G5wSfgJRHyZ0bv-15OZbaMMQttmk,1737
8
+ simcortexpp/configs/deform/train.yaml,sha256=n0kly9M-1aaO71ASzGRQGnZiecb1RF80dalf2Ss92W4,2103
9
+ simcortexpp/configs/initsurf/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
+ simcortexpp/configs/initsurf/generate.yaml,sha256=36WxtHZz7eouurDEuStFq3s0lZQYMigWvi8REWKAM3Q,1434
11
+ simcortexpp/configs/seg/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
12
+ simcortexpp/configs/seg/eval.yaml,sha256=nwQ2njQR5sZO6WJPswnniJNmhcC55gZoSGzDPQkAnaE,1115
13
+ simcortexpp/configs/seg/inference.yaml,sha256=3QCT2Zp4wSUvE0QJ6uMMhzrBfcRicY2LVlZ3ILwTfgQ,1031
14
+ simcortexpp/configs/seg/train.yaml,sha256=adm84bcodkQyzEFw1w15u1nt6KNczhgaiqVg29t_FJw,922
15
+ simcortexpp/deform/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16
+ simcortexpp/deform/eval.py,sha256=3WQ4ZCz1LjkFYaN5HNpMYqfpD2qfDYMXCVV-vQc6f54,12186
17
+ simcortexpp/deform/inference.py,sha256=-N_LOT_KEQYCmP_WNUy-5wUp7RG7KPgpaC3rx3fLA8M,8826
18
+ simcortexpp/deform/train.py,sha256=SGblWWk3LhrPO4jKRUe1I7dVkC0-zepar8bwPXzSGus,46719
19
+ simcortexpp/deform/data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
20
+ simcortexpp/deform/data/dataloader.py,sha256=7LQuvz6I7Ie-krJCDWdBBpURy2EkgISyRdGRBcPb1bw,8966
21
+ simcortexpp/deform/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
22
+ simcortexpp/deform/models/surfdeform.py,sha256=kLG6eSOteu19HluQh4g_st7UgNzmpcEDcruMLtsiSBE,13526
23
+ simcortexpp/deform/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
24
+ simcortexpp/deform/utils/coords.py,sha256=efjz2Clr6f3uCEtZxymbAqaEs6tLWX9w_KLoorTCJbg,3010
25
+ simcortexpp/initsurf/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
26
+ simcortexpp/initsurf/generate.py,sha256=thM2vN31nt6J3byBtkRAqWpO9he8spvZ07FSr7UCsSM,15095
27
+ simcortexpp/initsurf/paths.py,sha256=Dox0pAGZm8k1wCchsek7OEr2XHyiDb_0b0upqQYblKY,783
28
+ simcortexpp/preproc/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
29
+ simcortexpp/preproc/fs_to_mni.py,sha256=LyTla34qkQAfiuc1kgdve20_dTqNWeHkBTMMe_JVcs8,24812
30
+ simcortexpp/seg/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
31
+ simcortexpp/seg/eval.py,sha256=UcMUPseL5-NNHlAjZkOic34yAnjvTj_vwOYMqt-ox0A,9288
32
+ simcortexpp/seg/inference.py,sha256=x885BvuQo9yCzmCsjI6CW4aHtKGOZAQ1lnX-gGddbk8,10254
33
+ simcortexpp/seg/train.py,sha256=pT2o06CqbPfVE5nc377bbGilodqVdBm-BgnG42RJWfE,15247
34
+ simcortexpp/seg/data/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
35
+ simcortexpp/seg/data/dataloader.py,sha256=TFTMf2SEE3Aqyt0HIeIrN8_dOHvPdRBBmo43_lYnFZM,11292
36
+ simcortexpp/seg/models/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
37
+ simcortexpp/seg/models/unet.py,sha256=vG2ZTec2cYRltW4U__3r6oiSZRlsIN5M8kJZ8o21RxE,2379
38
+ simcortexpp/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
39
+ simcortexpp/utils/tca.py,sha256=Tj9_HAZo8_PxeK0e1Q46s0f9kBgPYDUbHBTNYhfKWYI,9171
40
+ simcortexpp-0.1.0.dist-info/METADATA,sha256=s9xM_1Ox5cFxGzySme-t1zwdU-N9YQWY-8svLYX7FDc,11984
41
+ simcortexpp-0.1.0.dist-info/WHEEL,sha256=YCfwYGOYMi5Jhw2fU4yNgwErybb2IX5PEwBKV4ZbdBo,91
42
+ simcortexpp-0.1.0.dist-info/entry_points.txt,sha256=6VvMqyqopjoDohH8jLw2jNvo-VzHh8sMfv9JO9Jkwp4,50
43
+ simcortexpp-0.1.0.dist-info/top_level.txt,sha256=SKYCpY9Y_P2F-h9EAtlXjJQRs1XlTEMHMqmdoLRnDik,12
44
+ simcortexpp-0.1.0.dist-info/RECORD,,
@@ -0,0 +1,5 @@
1
+ Wheel-Version: 1.0
2
+ Generator: setuptools (82.0.0)
3
+ Root-Is-Purelib: true
4
+ Tag: py3-none-any
5
+
@@ -0,0 +1,2 @@
1
+ [console_scripts]
2
+ scpp = simcortexpp.cli.main:app
@@ -0,0 +1 @@
1
+ simcortexpp