junifer 0.0.5.dev110__py3-none-any.whl → 0.0.5.dev145__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- junifer/__init__.py +17 -0
- junifer/_version.py +2 -2
- junifer/api/__init__.py +3 -0
- junifer/api/decorators.py +9 -0
- junifer/api/functions.py +7 -4
- junifer/api/parser.py +3 -0
- junifer/api/queue_context/__init__.py +3 -0
- junifer/api/res/afni/run_afni_docker.sh +1 -1
- junifer/api/res/ants/run_ants_docker.sh +1 -1
- junifer/api/res/fsl/run_fsl_docker.sh +1 -1
- junifer/api/tests/test_api_utils.py +2 -0
- junifer/configs/juseless/__init__.py +3 -0
- junifer/configs/juseless/datagrabbers/__init__.py +9 -0
- junifer/configs/juseless/datagrabbers/aomic_id1000_vbm.py +3 -0
- junifer/configs/juseless/datagrabbers/camcan_vbm.py +3 -0
- junifer/configs/juseless/datagrabbers/ixi_vbm.py +3 -0
- junifer/configs/juseless/datagrabbers/ucla.py +3 -0
- junifer/configs/juseless/datagrabbers/ukb_vbm.py +3 -0
- junifer/data/__init__.py +20 -0
- junifer/data/coordinates.py +8 -0
- junifer/data/masks.py +10 -0
- junifer/data/parcellations.py +9 -0
- junifer/data/template_spaces.py +3 -0
- junifer/data/tests/test_masks.py +1 -1
- junifer/data/utils.py +3 -0
- junifer/datagrabber/__init__.py +15 -0
- junifer/datagrabber/aomic/__init__.py +3 -0
- junifer/datagrabber/aomic/id1000.py +3 -0
- junifer/datagrabber/aomic/piop1.py +3 -0
- junifer/datagrabber/aomic/piop2.py +3 -0
- junifer/datagrabber/base.py +3 -0
- junifer/datagrabber/datalad_base.py +3 -0
- junifer/datagrabber/hcp1200/__init__.py +3 -0
- junifer/datagrabber/hcp1200/datalad_hcp1200.py +3 -0
- junifer/datagrabber/hcp1200/hcp1200.py +3 -0
- junifer/datagrabber/multiple.py +3 -0
- junifer/datagrabber/pattern.py +5 -1
- junifer/datagrabber/pattern_datalad.py +3 -0
- junifer/datagrabber/utils.py +3 -0
- junifer/datareader/__init__.py +3 -0
- junifer/datareader/default.py +3 -0
- junifer/external/BrainPrint/brainprint/__init__.py +4 -0
- junifer/external/BrainPrint/brainprint/_version.py +3 -0
- junifer/external/BrainPrint/brainprint/asymmetry.py +91 -0
- junifer/external/BrainPrint/brainprint/brainprint.py +441 -0
- junifer/external/BrainPrint/brainprint/surfaces.py +258 -0
- junifer/external/BrainPrint/brainprint/utils/__init__.py +1 -0
- junifer/external/BrainPrint/brainprint/utils/_config.py +112 -0
- junifer/external/BrainPrint/brainprint/utils/utils.py +188 -0
- junifer/markers/__init__.py +22 -0
- junifer/markers/base.py +21 -7
- junifer/markers/brainprint.py +665 -0
- junifer/markers/collection.py +3 -0
- junifer/markers/complexity/__init__.py +10 -0
- junifer/markers/complexity/complexity_base.py +3 -0
- junifer/markers/complexity/hurst_exponent.py +3 -0
- junifer/markers/complexity/multiscale_entropy_auc.py +3 -0
- junifer/markers/complexity/perm_entropy.py +3 -0
- junifer/markers/complexity/range_entropy.py +3 -0
- junifer/markers/complexity/range_entropy_auc.py +3 -0
- junifer/markers/complexity/sample_entropy.py +3 -0
- junifer/markers/complexity/tests/test_hurst_exponent.py +7 -0
- junifer/markers/complexity/tests/test_multiscale_entropy_auc.py +7 -0
- junifer/markers/complexity/tests/test_perm_entropy.py +7 -0
- junifer/markers/complexity/tests/test_range_entropy.py +7 -0
- junifer/markers/complexity/tests/test_range_entropy_auc.py +7 -0
- junifer/markers/complexity/tests/test_sample_entropy.py +7 -0
- junifer/markers/complexity/tests/test_weighted_perm_entropy.py +7 -0
- junifer/markers/complexity/weighted_perm_entropy.py +3 -0
- junifer/markers/ets_rss.py +3 -0
- junifer/markers/falff/__init__.py +3 -0
- junifer/markers/falff/_afni_falff.py +3 -0
- junifer/markers/falff/_junifer_falff.py +3 -0
- junifer/markers/falff/falff_base.py +3 -0
- junifer/markers/falff/falff_parcels.py +3 -0
- junifer/markers/falff/falff_spheres.py +3 -0
- junifer/markers/functional_connectivity/__init__.py +9 -0
- junifer/markers/functional_connectivity/crossparcellation_functional_connectivity.py +3 -0
- junifer/markers/functional_connectivity/edge_functional_connectivity_parcels.py +3 -0
- junifer/markers/functional_connectivity/edge_functional_connectivity_spheres.py +3 -0
- junifer/markers/functional_connectivity/functional_connectivity_base.py +3 -0
- junifer/markers/functional_connectivity/functional_connectivity_parcels.py +3 -0
- junifer/markers/functional_connectivity/functional_connectivity_spheres.py +3 -0
- junifer/markers/parcel_aggregation.py +3 -0
- junifer/markers/reho/__init__.py +3 -0
- junifer/markers/reho/_afni_reho.py +4 -1
- junifer/markers/reho/_junifer_reho.py +4 -1
- junifer/markers/reho/reho_base.py +2 -0
- junifer/markers/reho/reho_parcels.py +5 -2
- junifer/markers/reho/reho_spheres.py +5 -2
- junifer/markers/sphere_aggregation.py +3 -0
- junifer/markers/temporal_snr/__init__.py +3 -0
- junifer/markers/temporal_snr/temporal_snr_base.py +3 -0
- junifer/markers/temporal_snr/temporal_snr_parcels.py +3 -0
- junifer/markers/temporal_snr/temporal_snr_spheres.py +3 -0
- junifer/markers/tests/test_brainprint.py +47 -0
- junifer/onthefly/__init__.py +3 -0
- junifer/onthefly/read_transform.py +3 -0
- junifer/pipeline/__init__.py +8 -0
- junifer/pipeline/pipeline_step_mixin.py +3 -0
- junifer/pipeline/registry.py +3 -0
- junifer/pipeline/singleton.py +3 -0
- junifer/pipeline/update_meta_mixin.py +3 -0
- junifer/pipeline/utils.py +3 -0
- junifer/pipeline/workdir_manager.py +3 -0
- junifer/preprocess/__init__.py +8 -0
- junifer/preprocess/base.py +4 -1
- junifer/preprocess/confounds/__init__.py +3 -0
- junifer/preprocess/confounds/fmriprep_confound_remover.py +3 -0
- junifer/preprocess/smoothing/__init__.py +3 -0
- junifer/preprocess/warping/__init__.py +3 -0
- junifer/preprocess/warping/_ants_warper.py +3 -0
- junifer/preprocess/warping/_fsl_warper.py +3 -0
- junifer/stats.py +3 -0
- junifer/storage/__init__.py +8 -0
- junifer/storage/base.py +3 -0
- junifer/storage/hdf5.py +3 -0
- junifer/storage/pandas_base.py +3 -0
- junifer/storage/sqlite.py +3 -0
- junifer/storage/utils.py +9 -0
- junifer/testing/__init__.py +3 -0
- junifer/testing/datagrabbers.py +7 -0
- junifer/testing/utils.py +3 -0
- junifer/utils/__init__.py +10 -0
- junifer/utils/fs.py +3 -0
- junifer/utils/helpers.py +3 -0
- junifer/utils/logging.py +10 -0
- {junifer-0.0.5.dev110.dist-info → junifer-0.0.5.dev145.dist-info}/METADATA +2 -1
- {junifer-0.0.5.dev110.dist-info → junifer-0.0.5.dev145.dist-info}/RECORD +134 -124
- {junifer-0.0.5.dev110.dist-info → junifer-0.0.5.dev145.dist-info}/AUTHORS.rst +0 -0
- {junifer-0.0.5.dev110.dist-info → junifer-0.0.5.dev145.dist-info}/LICENSE.md +0 -0
- {junifer-0.0.5.dev110.dist-info → junifer-0.0.5.dev145.dist-info}/WHEEL +0 -0
- {junifer-0.0.5.dev110.dist-info → junifer-0.0.5.dev145.dist-info}/entry_points.txt +0 -0
- {junifer-0.0.5.dev110.dist-info → junifer-0.0.5.dev145.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,441 @@
|
|
1
|
+
"""
|
2
|
+
Definition of the brainprint analysis execution functions..
|
3
|
+
"""
|
4
|
+
|
5
|
+
import shutil
|
6
|
+
import warnings
|
7
|
+
from pathlib import Path
|
8
|
+
from typing import Dict, Tuple, Union
|
9
|
+
|
10
|
+
import numpy as np
|
11
|
+
from lapy import TriaMesh, shapedna
|
12
|
+
|
13
|
+
from . import __version__
|
14
|
+
from .asymmetry import compute_asymmetry
|
15
|
+
from .surfaces import create_surfaces, read_vtk
|
16
|
+
from .utils.utils import (
|
17
|
+
create_output_paths,
|
18
|
+
export_brainprint_results,
|
19
|
+
test_freesurfer,
|
20
|
+
validate_environment,
|
21
|
+
validate_subject_dir,
|
22
|
+
)
|
23
|
+
|
24
|
+
warnings.filterwarnings("ignore", ".*negative int.*")
|
25
|
+
|
26
|
+
|
27
|
+
def apply_eigenvalues_options(
|
28
|
+
eigenvalues: np.ndarray,
|
29
|
+
triangular_mesh: TriaMesh,
|
30
|
+
norm: str = "none",
|
31
|
+
reweight: bool = False,
|
32
|
+
) -> np.ndarray:
|
33
|
+
"""
|
34
|
+
Apply BrainPrint analysis configuration options to the ShapeDNA eigenvalues.
|
35
|
+
|
36
|
+
Parameters
|
37
|
+
----------
|
38
|
+
eigenvalues : np.ndarray
|
39
|
+
ShapeDNA derived eigenvalues.
|
40
|
+
triangular_mesh : TriaMesh
|
41
|
+
Surface representation.
|
42
|
+
norm : str, optional
|
43
|
+
Eigenvalues normalization method (default is "none").
|
44
|
+
reweight : bool, optional
|
45
|
+
Whether to reweight eigenvalues or not (default is False).
|
46
|
+
|
47
|
+
Returns
|
48
|
+
-------
|
49
|
+
np.ndarray
|
50
|
+
Fixed eigenvalues.
|
51
|
+
"""
|
52
|
+
if not triangular_mesh.is_oriented():
|
53
|
+
triangular_mesh.orient_()
|
54
|
+
if norm != "none":
|
55
|
+
eigenvalues = shapedna.normalize_ev(
|
56
|
+
geom=triangular_mesh,
|
57
|
+
evals=eigenvalues,
|
58
|
+
method=norm,
|
59
|
+
)
|
60
|
+
if reweight:
|
61
|
+
eigenvalues = shapedna.reweight_ev(eigenvalues)
|
62
|
+
return eigenvalues
|
63
|
+
|
64
|
+
|
65
|
+
def compute_surface_brainprint(
|
66
|
+
path: Path,
|
67
|
+
return_eigenvectors: bool = True,
|
68
|
+
num: int = 50,
|
69
|
+
norm: str = "none",
|
70
|
+
reweight: bool = False,
|
71
|
+
use_cholmod: bool = False,
|
72
|
+
) -> Tuple[np.ndarray, Union[np.ndarray, None]]:
|
73
|
+
"""
|
74
|
+
Compute BrainPrint eigenvalues and eigenvectors for the given surface.
|
75
|
+
|
76
|
+
Parameters
|
77
|
+
----------
|
78
|
+
path : Path
|
79
|
+
|
80
|
+
Path to the *.vtk* surface path.
|
81
|
+
return_eigenvectors : bool, optional
|
82
|
+
Whether to store eigenvectors in the result (default is True).
|
83
|
+
num : int, optional
|
84
|
+
Number of eigenvalues to compute (default is 50).
|
85
|
+
norm : str, optional
|
86
|
+
Eigenvalues normalization method (default is "none").
|
87
|
+
reweight : bool, optional
|
88
|
+
Whether to reweight eigenvalues or not (default is False).
|
89
|
+
use_cholmod : bool, optional
|
90
|
+
If True, attempts to use the Cholesky decomposition for improved execution
|
91
|
+
speed. Requires the ``scikit-sparse`` library. If it can not be found, an error
|
92
|
+
will be thrown.
|
93
|
+
If False, will use slower LU decomposition. This is the default.
|
94
|
+
|
95
|
+
Returns
|
96
|
+
-------
|
97
|
+
Tuple[np.ndarray, Union[np.ndarray, None]]
|
98
|
+
Eigenvalues, eigenvectors (if returned).
|
99
|
+
"""
|
100
|
+
triangular_mesh = read_vtk(path)
|
101
|
+
shape_dna = shapedna.compute_shapedna(
|
102
|
+
triangular_mesh,
|
103
|
+
k=num,
|
104
|
+
lump=False,
|
105
|
+
aniso=None,
|
106
|
+
aniso_smooth=10,
|
107
|
+
use_cholmod=use_cholmod,
|
108
|
+
)
|
109
|
+
|
110
|
+
eigenvectors = None
|
111
|
+
if return_eigenvectors:
|
112
|
+
eigenvectors = shape_dna["Eigenvectors"]
|
113
|
+
|
114
|
+
eigenvalues = shape_dna["Eigenvalues"]
|
115
|
+
eigenvalues = apply_eigenvalues_options(
|
116
|
+
eigenvalues, triangular_mesh, norm, reweight
|
117
|
+
)
|
118
|
+
eigenvalues = np.concatenate(
|
119
|
+
(
|
120
|
+
np.array(triangular_mesh.area(), ndmin=1),
|
121
|
+
np.array(triangular_mesh.volume(), ndmin=1),
|
122
|
+
eigenvalues,
|
123
|
+
)
|
124
|
+
)
|
125
|
+
return eigenvalues, eigenvectors
|
126
|
+
|
127
|
+
|
128
|
+
def compute_brainprint(
|
129
|
+
surfaces: Dict[str, Path],
|
130
|
+
keep_eigenvectors: bool = False,
|
131
|
+
num: int = 50,
|
132
|
+
norm: str = "none",
|
133
|
+
reweight: bool = False,
|
134
|
+
use_cholmod: bool = False,
|
135
|
+
) -> Tuple[Dict[str, np.ndarray], Union[Dict[str, np.ndarray], None]]:
|
136
|
+
"""
|
137
|
+
Compute ShapeDNA descriptors over several surfaces.
|
138
|
+
|
139
|
+
Parameters
|
140
|
+
----------
|
141
|
+
surfaces : Dict[str, Path]
|
142
|
+
Dictionary mapping from labels to *.vtk* paths.
|
143
|
+
keep_eigenvectors : bool, optional
|
144
|
+
Whether to also return eigenvectors or not, by default False.
|
145
|
+
num : int, optional
|
146
|
+
Number of eigenvalues to compute, by default 50.
|
147
|
+
norm : str, optional
|
148
|
+
Eigenvalues normalization method, by default "none".
|
149
|
+
reweight : bool, optional
|
150
|
+
Whether to reweight eigenvalues or not, by default False.
|
151
|
+
use_cholmod : bool, optional
|
152
|
+
If True, attempts to use the Cholesky decomposition for improved execution
|
153
|
+
speed. Requires the ``scikit-sparse`` library. If it can not be found, an error
|
154
|
+
will be thrown. If False, will use slower LU decomposition. This is the default.
|
155
|
+
|
156
|
+
Returns
|
157
|
+
-------
|
158
|
+
Tuple[Dict[str, np.ndarray], Union[Dict[str, np.ndarray], None]]
|
159
|
+
Surface label to eigenvalues, surface label to eigenvectors (if
|
160
|
+
*keep_eigenvectors* is True).
|
161
|
+
"""
|
162
|
+
eigenvalues = dict()
|
163
|
+
eigenvectors = dict() if keep_eigenvectors else None
|
164
|
+
for surface_label, surface_path in surfaces.items():
|
165
|
+
try:
|
166
|
+
(
|
167
|
+
surface_eigenvalues,
|
168
|
+
surface_eigenvectors,
|
169
|
+
) = compute_surface_brainprint(
|
170
|
+
surface_path,
|
171
|
+
num=num,
|
172
|
+
norm=norm,
|
173
|
+
reweight=reweight,
|
174
|
+
return_eigenvectors=keep_eigenvectors,
|
175
|
+
use_cholmod=use_cholmod,
|
176
|
+
)
|
177
|
+
except Exception as e:
|
178
|
+
message = (
|
179
|
+
"BrainPrint analysis raised the following exception:\n"
|
180
|
+
"{exception}".format(exception=e)
|
181
|
+
)
|
182
|
+
warnings.warn(message)
|
183
|
+
eigenvalues[surface_label] = ["NaN"] * (num + 2)
|
184
|
+
else:
|
185
|
+
if len(surface_eigenvalues) == 0:
|
186
|
+
eigenvalues[surface_label] = ["NaN"] * (num + 2)
|
187
|
+
else:
|
188
|
+
eigenvalues[surface_label] = surface_eigenvalues
|
189
|
+
if keep_eigenvectors:
|
190
|
+
eigenvectors[surface_label] = surface_eigenvectors
|
191
|
+
return eigenvalues, eigenvectors
|
192
|
+
|
193
|
+
|
194
|
+
def run_brainprint(
|
195
|
+
subjects_dir: Path,
|
196
|
+
subject_id: str,
|
197
|
+
destination: Path = None,
|
198
|
+
num: int = 50,
|
199
|
+
skip_cortex: bool = False,
|
200
|
+
keep_eigenvectors: bool = False,
|
201
|
+
norm: str = "none",
|
202
|
+
reweight: bool = False,
|
203
|
+
asymmetry: bool = False,
|
204
|
+
asymmetry_distance: str = "euc",
|
205
|
+
keep_temp: bool = False,
|
206
|
+
use_cholmod: bool = False,
|
207
|
+
):
|
208
|
+
"""
|
209
|
+
Run the BrainPrint analysis.
|
210
|
+
|
211
|
+
Parameters
|
212
|
+
----------
|
213
|
+
subjects_dir : Path
|
214
|
+
FreeSurfer's subjects directory.
|
215
|
+
subject_id : str
|
216
|
+
The subject identifier, as defined within the FreeSurfer's subjects
|
217
|
+
directory.
|
218
|
+
destination : Path, optional
|
219
|
+
If provided, will use this path as the results root directory, by
|
220
|
+
default None.
|
221
|
+
num : int, optional
|
222
|
+
Number of eigenvalues to compute, by default 50.
|
223
|
+
skip_cortex : bool, optional
|
224
|
+
_description_, by default False.
|
225
|
+
keep_eigenvectors : bool, optional
|
226
|
+
Whether to also return eigenvectors or not, by default False.
|
227
|
+
norm : str, optional
|
228
|
+
Eigenvalues normalization method, by default "none".
|
229
|
+
reweight : bool, optional
|
230
|
+
Whether to reweight eigenvalues or not, by default False.
|
231
|
+
asymmetry : bool, optional
|
232
|
+
Whether to calculate asymmetry between lateral structures, by default
|
233
|
+
False.
|
234
|
+
asymmetry_distance : str, optional
|
235
|
+
Distance measurement to use if *asymmetry* is set to True, by default
|
236
|
+
"euc".
|
237
|
+
keep_temp : bool, optional
|
238
|
+
Whether to keep the temporary files directory or not, by default False.
|
239
|
+
use_cholmod : bool, optional
|
240
|
+
If True, attempts to use the Cholesky decomposition for improved execution
|
241
|
+
speed. Requires the ``scikit-sparse`` library. If it can not be found, an error
|
242
|
+
will be thrown. If False, will use slower LU decomposition. This is the default.
|
243
|
+
|
244
|
+
Returns
|
245
|
+
-------
|
246
|
+
Tuple[Dict[str, np.ndarray], Union[Dict[str, np.ndarray], None], Union[Dict[str, float], None]]
|
247
|
+
A tuple containing dictionaries with BrainPrint analysis results.
|
248
|
+
- Eigenvalues
|
249
|
+
- Eigenvectors
|
250
|
+
- Distances
|
251
|
+
""" # noqa: E501
|
252
|
+
validate_environment()
|
253
|
+
test_freesurfer()
|
254
|
+
subject_dir = validate_subject_dir(subjects_dir, subject_id)
|
255
|
+
destination = create_output_paths(
|
256
|
+
subject_dir=subject_dir,
|
257
|
+
destination=destination,
|
258
|
+
)
|
259
|
+
|
260
|
+
surfaces = create_surfaces(subject_dir, destination, skip_cortex=skip_cortex)
|
261
|
+
eigenvalues, eigenvectors = compute_brainprint(
|
262
|
+
surfaces,
|
263
|
+
num=num,
|
264
|
+
norm=norm,
|
265
|
+
reweight=reweight,
|
266
|
+
keep_eigenvectors=keep_eigenvectors,
|
267
|
+
use_cholmod=use_cholmod,
|
268
|
+
)
|
269
|
+
|
270
|
+
distances = None
|
271
|
+
if asymmetry:
|
272
|
+
distances = compute_asymmetry(
|
273
|
+
eigenvalues,
|
274
|
+
distance=asymmetry_distance,
|
275
|
+
skip_cortex=skip_cortex,
|
276
|
+
)
|
277
|
+
|
278
|
+
csv_name = "{subject_id}.brainprint.csv".format(subject_id=subject_id)
|
279
|
+
csv_path = destination / csv_name
|
280
|
+
export_brainprint_results(csv_path, eigenvalues, eigenvectors, distances)
|
281
|
+
if not keep_temp:
|
282
|
+
shutil.rmtree(destination / "temp")
|
283
|
+
print(
|
284
|
+
"Returning matrices for eigenvalues, eigenvectors, and (optionally) distances."
|
285
|
+
)
|
286
|
+
print("The eigenvalue matrix contains area and volume as first two rows.")
|
287
|
+
return eigenvalues, eigenvectors, distances
|
288
|
+
|
289
|
+
|
290
|
+
class Brainprint:
|
291
|
+
__version__ = __version__
|
292
|
+
|
293
|
+
def __init__(
|
294
|
+
self,
|
295
|
+
subjects_dir: Path,
|
296
|
+
num: int = 50,
|
297
|
+
skip_cortex: bool = False,
|
298
|
+
keep_eigenvectors: bool = False,
|
299
|
+
norm: str = "none",
|
300
|
+
reweight: bool = False,
|
301
|
+
asymmetry: bool = False,
|
302
|
+
asymmetry_distance: str = "euc",
|
303
|
+
keep_temp: bool = False,
|
304
|
+
environment_validation: bool = True,
|
305
|
+
freesurfer_validation: bool = True,
|
306
|
+
use_cholmod: bool = False,
|
307
|
+
) -> None:
|
308
|
+
"""
|
309
|
+
Initializes a new :class:`Brainprint` instance.
|
310
|
+
|
311
|
+
Parameters
|
312
|
+
----------
|
313
|
+
subjects_dir : Path
|
314
|
+
FreeSurfer's subjects directory
|
315
|
+
num : int, optional
|
316
|
+
Number of eigenvalues to compute, by default 50
|
317
|
+
norm : str, optional
|
318
|
+
Eigenvalues normalization method, by default "none"
|
319
|
+
reweight : bool, optional
|
320
|
+
Whether to reweight eigenvalues or not, by default False
|
321
|
+
skip_cortex : bool, optional
|
322
|
+
_description_, by default False
|
323
|
+
keep_eigenvectors : bool, optional
|
324
|
+
Whether to also return eigenvectors or not, by default False
|
325
|
+
asymmetry : bool, optional
|
326
|
+
Whether to calculate asymmetry between lateral structures, by
|
327
|
+
default False
|
328
|
+
asymmetry_distance : str, optional
|
329
|
+
Distance measurement to use if *asymmetry* is set to True, by
|
330
|
+
default "euc"
|
331
|
+
keep_temp : bool, optional
|
332
|
+
Whether to keep the temporary files directory or not, by default False
|
333
|
+
use_cholmod : bool, optional
|
334
|
+
If True, attempts to use the Cholesky decomposition for improved execution
|
335
|
+
speed. Requires the ``scikit-sparse`` library. If it can not be found, an
|
336
|
+
error will be thrown. If False, will use slower LU decomposition. This is
|
337
|
+
the default.
|
338
|
+
"""
|
339
|
+
self.subjects_dir = subjects_dir
|
340
|
+
self.num = num
|
341
|
+
self.norm = norm
|
342
|
+
self.skip_cortex = skip_cortex
|
343
|
+
self.reweight = reweight
|
344
|
+
self.keep_eigenvectors = keep_eigenvectors
|
345
|
+
self.asymmetry = asymmetry
|
346
|
+
self.asymmetry_distance = asymmetry_distance
|
347
|
+
self.keep_temp = keep_temp
|
348
|
+
self.use_cholmod = use_cholmod
|
349
|
+
|
350
|
+
self._subject_id = None
|
351
|
+
self._destination = None
|
352
|
+
self._eigenvalues = None
|
353
|
+
self._eigenvectors = None
|
354
|
+
self._distances = None
|
355
|
+
|
356
|
+
if environment_validation:
|
357
|
+
validate_environment()
|
358
|
+
if freesurfer_validation:
|
359
|
+
test_freesurfer()
|
360
|
+
|
361
|
+
def run(self, subject_id: str, destination: Path = None) -> Dict[str, Path]:
|
362
|
+
"""
|
363
|
+
Run Brainprint analysis for a specified subject.
|
364
|
+
|
365
|
+
Parameters
|
366
|
+
----------
|
367
|
+
subject_id : str
|
368
|
+
The ID of the subject to analyze.
|
369
|
+
destination : Path, optional
|
370
|
+
The destination directory for analysis results, by default None.
|
371
|
+
|
372
|
+
Returns
|
373
|
+
-------
|
374
|
+
Dict[str, Path]
|
375
|
+
A dictionary containing paths to the generated analysis results.
|
376
|
+
"""
|
377
|
+
self._eigenvalues = self._eigenvectors = self._distances = None
|
378
|
+
subject_dir = validate_subject_dir(self.subjects_dir, subject_id)
|
379
|
+
destination = create_output_paths(
|
380
|
+
subject_dir=subject_dir,
|
381
|
+
destination=destination,
|
382
|
+
)
|
383
|
+
|
384
|
+
surfaces = create_surfaces(
|
385
|
+
subject_dir, destination, skip_cortex=self.skip_cortex
|
386
|
+
)
|
387
|
+
self._eigenvalues, self._eigenvectors = compute_brainprint(
|
388
|
+
surfaces,
|
389
|
+
num=self.num,
|
390
|
+
norm=self.norm,
|
391
|
+
reweight=self.reweight,
|
392
|
+
keep_eigenvectors=self.keep_eigenvectors,
|
393
|
+
use_cholmod=self.use_cholmod,
|
394
|
+
)
|
395
|
+
|
396
|
+
if self.asymmetry:
|
397
|
+
self._distances = compute_asymmetry(
|
398
|
+
self._eigenvalues,
|
399
|
+
distance=self.asymmetry_distance,
|
400
|
+
skip_cortex=self.skip_cortex,
|
401
|
+
)
|
402
|
+
|
403
|
+
self.cleanup(destination=destination)
|
404
|
+
return self.export_results(destination=destination, subject_id=subject_id)
|
405
|
+
|
406
|
+
def export_results(self, destination: Path, subject_id: str) -> None:
|
407
|
+
"""
|
408
|
+
Export Brainprint analysis results to a CSV file.
|
409
|
+
|
410
|
+
Parameters
|
411
|
+
----------
|
412
|
+
destination : Path
|
413
|
+
The destination directory for analysis results.
|
414
|
+
subject_id : str
|
415
|
+
The ID of the subject being analyzed.
|
416
|
+
|
417
|
+
Returns
|
418
|
+
-------
|
419
|
+
None
|
420
|
+
"""
|
421
|
+
csv_name = "{subject_id}.brainprint.csv".format(subject_id=subject_id)
|
422
|
+
csv_path = destination / csv_name
|
423
|
+
return export_brainprint_results(
|
424
|
+
csv_path, self._eigenvalues, self._eigenvectors, self._distances
|
425
|
+
)
|
426
|
+
|
427
|
+
def cleanup(self, destination: Path) -> None:
|
428
|
+
"""
|
429
|
+
Clean up temporary files generated during the analysis.
|
430
|
+
|
431
|
+
Parameters
|
432
|
+
----------
|
433
|
+
destination : Path
|
434
|
+
The destination directory for analysis results.
|
435
|
+
|
436
|
+
Returns
|
437
|
+
-------
|
438
|
+
None
|
439
|
+
"""
|
440
|
+
if not self.keep_temp:
|
441
|
+
shutil.rmtree(destination / "temp")
|
@@ -0,0 +1,258 @@
|
|
1
|
+
"""
|
2
|
+
Utility module holding surface generation related functions.
|
3
|
+
"""
|
4
|
+
import uuid
|
5
|
+
from pathlib import Path
|
6
|
+
from typing import Dict, List
|
7
|
+
|
8
|
+
from lapy import TriaMesh
|
9
|
+
|
10
|
+
from .utils.utils import run_shell_command
|
11
|
+
|
12
|
+
|
13
|
+
def create_aseg_surface(
|
14
|
+
subject_dir: Path, destination: Path, indices: List[int]
|
15
|
+
) -> Path:
|
16
|
+
"""
|
17
|
+
Generate a surface from the aseg and label files.
|
18
|
+
|
19
|
+
Parameters
|
20
|
+
----------
|
21
|
+
subject_dir : Path
|
22
|
+
Path to the subject's directory.
|
23
|
+
destination : Path
|
24
|
+
Path to the destination directory where the surface will be saved.
|
25
|
+
indices : List[int]
|
26
|
+
List of label indices to include in the surface generation.
|
27
|
+
|
28
|
+
Returns
|
29
|
+
-------
|
30
|
+
Path
|
31
|
+
Path to the generated surface in VTK format.
|
32
|
+
"""
|
33
|
+
aseg_path = subject_dir / "mri/aseg.mgz"
|
34
|
+
norm_path = subject_dir / "mri/norm.mgz"
|
35
|
+
temp_name = "temp/aseg.{uid}".format(uid=uuid.uuid4())
|
36
|
+
indices_mask = destination / f"{temp_name}.mgz"
|
37
|
+
# binarize on selected labels (creates temp indices_mask)
|
38
|
+
# always binarize first, otherwise pretess may scale aseg if labels are
|
39
|
+
# larger than 255 (e.g. aseg+aparc, bug in mri_pretess?)
|
40
|
+
binarize_template = "mri_binarize --i {source} --match {match} --o {destination}"
|
41
|
+
binarize_command = binarize_template.format(
|
42
|
+
source=aseg_path, match=" ".join(indices), destination=indices_mask
|
43
|
+
)
|
44
|
+
run_shell_command(binarize_command)
|
45
|
+
|
46
|
+
label_value = "1"
|
47
|
+
# if norm exist, fix label (pretess)
|
48
|
+
if norm_path.is_file():
|
49
|
+
pretess_template = (
|
50
|
+
"mri_pretess {source} {label_value} {norm_path} {destination}"
|
51
|
+
)
|
52
|
+
pretess_command = pretess_template.format(
|
53
|
+
source=indices_mask,
|
54
|
+
label_value=label_value,
|
55
|
+
norm_path=norm_path,
|
56
|
+
destination=indices_mask,
|
57
|
+
)
|
58
|
+
run_shell_command(pretess_command)
|
59
|
+
|
60
|
+
# runs marching cube to extract surface
|
61
|
+
surface_name = "{name}.surf".format(name=temp_name)
|
62
|
+
surface_path = destination / surface_name
|
63
|
+
extraction_template = "mri_mc {source} {label_value} {destination}"
|
64
|
+
extraction_command = extraction_template.format(
|
65
|
+
source=indices_mask, label_value=label_value, destination=surface_path
|
66
|
+
)
|
67
|
+
run_shell_command(extraction_command)
|
68
|
+
|
69
|
+
# convert to vtk
|
70
|
+
relative_path = "surfaces/aseg.final.{indices}.vtk".format(
|
71
|
+
indices="_".join(indices)
|
72
|
+
)
|
73
|
+
conversion_destination = destination / relative_path
|
74
|
+
conversion_template = "mris_convert {source} {destination}"
|
75
|
+
conversion_command = conversion_template.format(
|
76
|
+
source=surface_path, destination=conversion_destination
|
77
|
+
)
|
78
|
+
run_shell_command(conversion_command)
|
79
|
+
|
80
|
+
return conversion_destination
|
81
|
+
|
82
|
+
|
83
|
+
def create_aseg_surfaces(subject_dir: Path, destination: Path) -> Dict[str, Path]:
|
84
|
+
"""
|
85
|
+
Create surfaces from FreeSurfer aseg labels.
|
86
|
+
|
87
|
+
Parameters
|
88
|
+
----------
|
89
|
+
subject_dir : Path
|
90
|
+
Path to the subject's FreeSurfer directory.
|
91
|
+
destination : Path
|
92
|
+
Path to the destination directory for saving surfaces.
|
93
|
+
|
94
|
+
Returns
|
95
|
+
-------
|
96
|
+
Dict[str, Path]
|
97
|
+
Dictionary of label names mapped to corresponding surface Path objects.
|
98
|
+
"""
|
99
|
+
# Define aseg labels
|
100
|
+
|
101
|
+
# combined and individual aseg labels:
|
102
|
+
# - Left Striatum: left Caudate + Putamen + Accumbens
|
103
|
+
# - Right Striatum: right Caudate + Putamen + Accumbens
|
104
|
+
# - CorpusCallosum: 5 subregions combined
|
105
|
+
# - Cerebellum: brainstem + (left+right) cerebellum WM and GM
|
106
|
+
# - Ventricles: (left+right) lat.vent + inf.lat.vent + choroidplexus + 3rdVent + CSF
|
107
|
+
# - Lateral-Ventricle: lat.vent + inf.lat.vent + choroidplexus
|
108
|
+
# - 3rd-Ventricle: 3rd-Ventricle + CSF
|
109
|
+
|
110
|
+
aseg_labels = {
|
111
|
+
"CorpusCallosum": ["251", "252", "253", "254", "255"],
|
112
|
+
"Cerebellum": ["7", "8", "16", "46", "47"],
|
113
|
+
"Ventricles": ["4", "5", "14", "24", "31", "43", "44", "63"],
|
114
|
+
"3rd-Ventricle": ["14", "24"],
|
115
|
+
"4th-Ventricle": ["15"],
|
116
|
+
"Brain-Stem": ["16"],
|
117
|
+
"Left-Striatum": ["11", "12", "26"],
|
118
|
+
"Left-Lateral-Ventricle": ["4", "5", "31"],
|
119
|
+
"Left-Cerebellum-White-Matter": ["7"],
|
120
|
+
"Left-Cerebellum-Cortex": ["8"],
|
121
|
+
"Left-Thalamus-Proper": ["10"],
|
122
|
+
"Left-Caudate": ["11"],
|
123
|
+
"Left-Putamen": ["12"],
|
124
|
+
"Left-Pallidum": ["13"],
|
125
|
+
"Left-Hippocampus": ["17"],
|
126
|
+
"Left-Amygdala": ["18"],
|
127
|
+
"Left-Accumbens-area": ["26"],
|
128
|
+
"Left-VentralDC": ["28"],
|
129
|
+
"Right-Striatum": ["50", "51", "58"],
|
130
|
+
"Right-Lateral-Ventricle": ["43", "44", "63"],
|
131
|
+
"Right-Cerebellum-White-Matter": ["46"],
|
132
|
+
"Right-Cerebellum-Cortex": ["47"],
|
133
|
+
"Right-Thalamus-Proper": ["49"],
|
134
|
+
"Right-Caudate": ["50"],
|
135
|
+
"Right-Putamen": ["51"],
|
136
|
+
"Right-Pallidum": ["52"],
|
137
|
+
"Right-Hippocampus": ["53"],
|
138
|
+
"Right-Amygdala": ["54"],
|
139
|
+
"Right-Accumbens-area": ["58"],
|
140
|
+
"Right-VentralDC": ["60"],
|
141
|
+
}
|
142
|
+
return {
|
143
|
+
label: create_aseg_surface(subject_dir, destination, indices)
|
144
|
+
for label, indices in aseg_labels.items()
|
145
|
+
}
|
146
|
+
|
147
|
+
|
148
|
+
def create_cortical_surfaces(subject_dir: Path, destination: Path) -> Dict[str, Path]:
|
149
|
+
"""
|
150
|
+
Create cortical surfaces from FreeSurfer labels.
|
151
|
+
|
152
|
+
Parameters
|
153
|
+
----------
|
154
|
+
subject_dir : Path
|
155
|
+
Path to the subject's FreeSurfer directory.
|
156
|
+
destination : Path
|
157
|
+
Path to the destination directory where the surfaces will be saved.
|
158
|
+
|
159
|
+
Returns
|
160
|
+
-------
|
161
|
+
Dict[str, Path]
|
162
|
+
Dictionary mapping label names to associated surface Paths.
|
163
|
+
"""
|
164
|
+
cortical_labels = {
|
165
|
+
"lh-white-2d": "lh.white",
|
166
|
+
"rh-white-2d": "rh.white",
|
167
|
+
"lh-pial-2d": "lh.pial",
|
168
|
+
"rh-pial-2d": "rh.pial",
|
169
|
+
}
|
170
|
+
return {
|
171
|
+
label: surf_to_vtk(
|
172
|
+
subject_dir / "surf" / name,
|
173
|
+
destination / "surfaces" / f"{name}.vtk",
|
174
|
+
)
|
175
|
+
for label, name in cortical_labels.items()
|
176
|
+
}
|
177
|
+
|
178
|
+
|
179
|
+
def create_surfaces(
|
180
|
+
subject_dir: Path, destination: Path, skip_cortex: bool = False
|
181
|
+
) -> Dict[str, Path]:
|
182
|
+
"""
|
183
|
+
Create surfaces based on FreeSurfer labels.
|
184
|
+
|
185
|
+
Parameters
|
186
|
+
----------
|
187
|
+
subject_dir : Path
|
188
|
+
Path to the subject's FreeSurfer directory.
|
189
|
+
destination : Path
|
190
|
+
Path to the destination directory where the surfaces will be saved.
|
191
|
+
skip_cortex : bool, optional
|
192
|
+
If True, cortical surfaces will not be created (default is False).
|
193
|
+
|
194
|
+
Returns
|
195
|
+
-------
|
196
|
+
Dict[str, Path]
|
197
|
+
Dict mapping label names to the corresponding Path objects of created surfaces.
|
198
|
+
"""
|
199
|
+
surfaces = create_aseg_surfaces(subject_dir, destination)
|
200
|
+
if not skip_cortex:
|
201
|
+
cortical_surfaces = create_cortical_surfaces(subject_dir, destination)
|
202
|
+
surfaces.update(cortical_surfaces)
|
203
|
+
return surfaces
|
204
|
+
|
205
|
+
|
206
|
+
def read_vtk(path: Path):
|
207
|
+
"""
|
208
|
+
Read a VTK file and return a triangular mesh.
|
209
|
+
|
210
|
+
Parameters
|
211
|
+
----------
|
212
|
+
path : Path
|
213
|
+
Path to the VTK file to be read.
|
214
|
+
|
215
|
+
Returns
|
216
|
+
-------
|
217
|
+
TriaMesh
|
218
|
+
A triangular mesh object representing the contents of the VTK file.
|
219
|
+
|
220
|
+
Raises
|
221
|
+
------
|
222
|
+
RuntimeError
|
223
|
+
If there is an issue reading the VTK file or if the file is empty.
|
224
|
+
"""
|
225
|
+
try:
|
226
|
+
triangular_mesh = TriaMesh.read_vtk(path)
|
227
|
+
except Exception:
|
228
|
+
message = "Failed to read VTK from the following path: {path}!".format(
|
229
|
+
path=path
|
230
|
+
)
|
231
|
+
raise RuntimeError(message)
|
232
|
+
else:
|
233
|
+
if triangular_mesh is None:
|
234
|
+
message = "Failed to read VTK from the following path: {path}!".format(
|
235
|
+
path=path
|
236
|
+
)
|
237
|
+
raise RuntimeError(message)
|
238
|
+
return triangular_mesh
|
239
|
+
|
240
|
+
|
241
|
+
def surf_to_vtk(source: Path, destination: Path) -> Path:
|
242
|
+
"""
|
243
|
+
Converted a FreeSurfer *.surf* file to *.vtk*.
|
244
|
+
|
245
|
+
Parameters
|
246
|
+
----------
|
247
|
+
source : Path
|
248
|
+
FreeSurfer *.surf* file.
|
249
|
+
destination : Path
|
250
|
+
Equivalent *.vtk* file.
|
251
|
+
|
252
|
+
Returns
|
253
|
+
-------
|
254
|
+
Path
|
255
|
+
Resulting *.vtk* file.
|
256
|
+
"""
|
257
|
+
TriaMesh.read_fssurf(source).write_vtk(destination)
|
258
|
+
return destination
|
@@ -0,0 +1 @@
|
|
1
|
+
"""Utilities module."""
|