junifer 0.0.4.dev831__py3-none-any.whl → 0.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (206) hide show
  1. junifer/__init__.py +17 -0
  2. junifer/_version.py +2 -2
  3. junifer/api/__init__.py +4 -1
  4. junifer/api/cli.py +91 -1
  5. junifer/api/decorators.py +9 -0
  6. junifer/api/functions.py +56 -10
  7. junifer/api/parser.py +3 -0
  8. junifer/api/queue_context/__init__.py +4 -1
  9. junifer/api/queue_context/gnu_parallel_local_adapter.py +16 -6
  10. junifer/api/queue_context/htcondor_adapter.py +16 -5
  11. junifer/api/queue_context/tests/test_gnu_parallel_local_adapter.py +41 -12
  12. junifer/api/queue_context/tests/test_htcondor_adapter.py +48 -15
  13. junifer/api/res/afni/run_afni_docker.sh +1 -1
  14. junifer/api/res/ants/run_ants_docker.sh +1 -1
  15. junifer/api/res/freesurfer/mri_binarize +3 -0
  16. junifer/api/res/freesurfer/mri_mc +3 -0
  17. junifer/api/res/freesurfer/mri_pretess +3 -0
  18. junifer/api/res/freesurfer/mris_convert +3 -0
  19. junifer/api/res/freesurfer/run_freesurfer_docker.sh +61 -0
  20. junifer/api/res/fsl/run_fsl_docker.sh +1 -1
  21. junifer/api/res/{run_conda.sh → run_conda.bash} +1 -1
  22. junifer/api/res/run_conda.zsh +23 -0
  23. junifer/api/res/run_venv.bash +22 -0
  24. junifer/api/res/{run_venv.sh → run_venv.zsh} +1 -1
  25. junifer/api/tests/test_api_utils.py +4 -2
  26. junifer/api/tests/test_cli.py +83 -0
  27. junifer/api/tests/test_functions.py +27 -2
  28. junifer/configs/__init__.py +1 -1
  29. junifer/configs/juseless/__init__.py +4 -1
  30. junifer/configs/juseless/datagrabbers/__init__.py +10 -1
  31. junifer/configs/juseless/datagrabbers/aomic_id1000_vbm.py +4 -3
  32. junifer/configs/juseless/datagrabbers/camcan_vbm.py +3 -0
  33. junifer/configs/juseless/datagrabbers/ixi_vbm.py +4 -3
  34. junifer/configs/juseless/datagrabbers/tests/test_ucla.py +1 -3
  35. junifer/configs/juseless/datagrabbers/ucla.py +12 -9
  36. junifer/configs/juseless/datagrabbers/ukb_vbm.py +3 -0
  37. junifer/data/__init__.py +21 -1
  38. junifer/data/coordinates.py +10 -19
  39. junifer/data/masks/ukb/UKB_15K_GM_template.nii.gz +0 -0
  40. junifer/data/masks.py +58 -87
  41. junifer/data/parcellations.py +14 -3
  42. junifer/data/template_spaces.py +4 -1
  43. junifer/data/tests/test_masks.py +26 -37
  44. junifer/data/utils.py +3 -0
  45. junifer/datagrabber/__init__.py +18 -1
  46. junifer/datagrabber/aomic/__init__.py +3 -0
  47. junifer/datagrabber/aomic/id1000.py +70 -37
  48. junifer/datagrabber/aomic/piop1.py +69 -36
  49. junifer/datagrabber/aomic/piop2.py +71 -38
  50. junifer/datagrabber/aomic/tests/test_id1000.py +44 -100
  51. junifer/datagrabber/aomic/tests/test_piop1.py +65 -108
  52. junifer/datagrabber/aomic/tests/test_piop2.py +45 -102
  53. junifer/datagrabber/base.py +13 -6
  54. junifer/datagrabber/datalad_base.py +13 -1
  55. junifer/datagrabber/dmcc13_benchmark.py +36 -53
  56. junifer/datagrabber/hcp1200/__init__.py +3 -0
  57. junifer/datagrabber/hcp1200/datalad_hcp1200.py +3 -0
  58. junifer/datagrabber/hcp1200/hcp1200.py +4 -1
  59. junifer/datagrabber/multiple.py +45 -6
  60. junifer/datagrabber/pattern.py +170 -62
  61. junifer/datagrabber/pattern_datalad.py +25 -12
  62. junifer/datagrabber/pattern_validation_mixin.py +388 -0
  63. junifer/datagrabber/tests/test_datalad_base.py +4 -4
  64. junifer/datagrabber/tests/test_dmcc13_benchmark.py +46 -19
  65. junifer/datagrabber/tests/test_multiple.py +161 -84
  66. junifer/datagrabber/tests/test_pattern.py +45 -0
  67. junifer/datagrabber/tests/test_pattern_datalad.py +4 -4
  68. junifer/datagrabber/tests/test_pattern_validation_mixin.py +249 -0
  69. junifer/datareader/__init__.py +4 -1
  70. junifer/datareader/default.py +95 -43
  71. junifer/external/BrainPrint/brainprint/__init__.py +4 -0
  72. junifer/external/BrainPrint/brainprint/_version.py +3 -0
  73. junifer/external/BrainPrint/brainprint/asymmetry.py +91 -0
  74. junifer/external/BrainPrint/brainprint/brainprint.py +441 -0
  75. junifer/external/BrainPrint/brainprint/surfaces.py +258 -0
  76. junifer/external/BrainPrint/brainprint/utils/__init__.py +1 -0
  77. junifer/external/BrainPrint/brainprint/utils/_config.py +112 -0
  78. junifer/external/BrainPrint/brainprint/utils/utils.py +188 -0
  79. junifer/external/__init__.py +1 -1
  80. junifer/external/nilearn/__init__.py +5 -1
  81. junifer/external/nilearn/junifer_connectivity_measure.py +483 -0
  82. junifer/external/nilearn/junifer_nifti_spheres_masker.py +23 -9
  83. junifer/external/nilearn/tests/test_junifer_connectivity_measure.py +1089 -0
  84. junifer/external/nilearn/tests/test_junifer_nifti_spheres_masker.py +76 -1
  85. junifer/markers/__init__.py +23 -1
  86. junifer/markers/base.py +68 -28
  87. junifer/markers/brainprint.py +459 -0
  88. junifer/markers/collection.py +10 -2
  89. junifer/markers/complexity/__init__.py +10 -0
  90. junifer/markers/complexity/complexity_base.py +26 -43
  91. junifer/markers/complexity/hurst_exponent.py +3 -0
  92. junifer/markers/complexity/multiscale_entropy_auc.py +3 -0
  93. junifer/markers/complexity/perm_entropy.py +3 -0
  94. junifer/markers/complexity/range_entropy.py +3 -0
  95. junifer/markers/complexity/range_entropy_auc.py +3 -0
  96. junifer/markers/complexity/sample_entropy.py +3 -0
  97. junifer/markers/complexity/tests/test_hurst_exponent.py +11 -3
  98. junifer/markers/complexity/tests/test_multiscale_entropy_auc.py +11 -3
  99. junifer/markers/complexity/tests/test_perm_entropy.py +11 -3
  100. junifer/markers/complexity/tests/test_range_entropy.py +11 -3
  101. junifer/markers/complexity/tests/test_range_entropy_auc.py +11 -3
  102. junifer/markers/complexity/tests/test_sample_entropy.py +11 -3
  103. junifer/markers/complexity/tests/test_weighted_perm_entropy.py +11 -3
  104. junifer/markers/complexity/weighted_perm_entropy.py +3 -0
  105. junifer/markers/ets_rss.py +27 -42
  106. junifer/markers/falff/__init__.py +3 -0
  107. junifer/markers/falff/_afni_falff.py +5 -2
  108. junifer/markers/falff/_junifer_falff.py +3 -0
  109. junifer/markers/falff/falff_base.py +20 -46
  110. junifer/markers/falff/falff_parcels.py +56 -27
  111. junifer/markers/falff/falff_spheres.py +60 -29
  112. junifer/markers/falff/tests/test_falff_parcels.py +39 -23
  113. junifer/markers/falff/tests/test_falff_spheres.py +39 -23
  114. junifer/markers/functional_connectivity/__init__.py +9 -0
  115. junifer/markers/functional_connectivity/crossparcellation_functional_connectivity.py +63 -60
  116. junifer/markers/functional_connectivity/edge_functional_connectivity_parcels.py +45 -32
  117. junifer/markers/functional_connectivity/edge_functional_connectivity_spheres.py +49 -36
  118. junifer/markers/functional_connectivity/functional_connectivity_base.py +71 -70
  119. junifer/markers/functional_connectivity/functional_connectivity_parcels.py +34 -25
  120. junifer/markers/functional_connectivity/functional_connectivity_spheres.py +40 -30
  121. junifer/markers/functional_connectivity/tests/test_crossparcellation_functional_connectivity.py +11 -7
  122. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_parcels.py +27 -7
  123. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_spheres.py +28 -12
  124. junifer/markers/functional_connectivity/tests/test_functional_connectivity_parcels.py +35 -11
  125. junifer/markers/functional_connectivity/tests/test_functional_connectivity_spheres.py +36 -62
  126. junifer/markers/parcel_aggregation.py +47 -61
  127. junifer/markers/reho/__init__.py +3 -0
  128. junifer/markers/reho/_afni_reho.py +5 -2
  129. junifer/markers/reho/_junifer_reho.py +4 -1
  130. junifer/markers/reho/reho_base.py +8 -27
  131. junifer/markers/reho/reho_parcels.py +28 -17
  132. junifer/markers/reho/reho_spheres.py +27 -18
  133. junifer/markers/reho/tests/test_reho_parcels.py +8 -3
  134. junifer/markers/reho/tests/test_reho_spheres.py +8 -3
  135. junifer/markers/sphere_aggregation.py +43 -59
  136. junifer/markers/temporal_snr/__init__.py +3 -0
  137. junifer/markers/temporal_snr/temporal_snr_base.py +23 -32
  138. junifer/markers/temporal_snr/temporal_snr_parcels.py +9 -6
  139. junifer/markers/temporal_snr/temporal_snr_spheres.py +9 -6
  140. junifer/markers/temporal_snr/tests/test_temporal_snr_parcels.py +6 -3
  141. junifer/markers/temporal_snr/tests/test_temporal_snr_spheres.py +6 -3
  142. junifer/markers/tests/test_brainprint.py +58 -0
  143. junifer/markers/tests/test_collection.py +9 -8
  144. junifer/markers/tests/test_ets_rss.py +15 -9
  145. junifer/markers/tests/test_markers_base.py +17 -18
  146. junifer/markers/tests/test_parcel_aggregation.py +93 -32
  147. junifer/markers/tests/test_sphere_aggregation.py +72 -19
  148. junifer/onthefly/__init__.py +4 -1
  149. junifer/onthefly/read_transform.py +3 -0
  150. junifer/pipeline/__init__.py +9 -1
  151. junifer/pipeline/pipeline_step_mixin.py +21 -4
  152. junifer/pipeline/registry.py +3 -0
  153. junifer/pipeline/singleton.py +3 -0
  154. junifer/pipeline/tests/test_registry.py +1 -1
  155. junifer/pipeline/update_meta_mixin.py +3 -0
  156. junifer/pipeline/utils.py +67 -1
  157. junifer/pipeline/workdir_manager.py +3 -0
  158. junifer/preprocess/__init__.py +10 -2
  159. junifer/preprocess/base.py +6 -3
  160. junifer/preprocess/confounds/__init__.py +3 -0
  161. junifer/preprocess/confounds/fmriprep_confound_remover.py +47 -60
  162. junifer/preprocess/confounds/tests/test_fmriprep_confound_remover.py +72 -113
  163. junifer/preprocess/smoothing/__init__.py +9 -0
  164. junifer/preprocess/smoothing/_afni_smoothing.py +119 -0
  165. junifer/preprocess/smoothing/_fsl_smoothing.py +116 -0
  166. junifer/preprocess/smoothing/_nilearn_smoothing.py +69 -0
  167. junifer/preprocess/smoothing/smoothing.py +174 -0
  168. junifer/preprocess/smoothing/tests/test_smoothing.py +94 -0
  169. junifer/preprocess/warping/__init__.py +3 -0
  170. junifer/preprocess/warping/_ants_warper.py +3 -0
  171. junifer/preprocess/warping/_fsl_warper.py +3 -0
  172. junifer/stats.py +4 -1
  173. junifer/storage/__init__.py +9 -1
  174. junifer/storage/base.py +40 -1
  175. junifer/storage/hdf5.py +71 -9
  176. junifer/storage/pandas_base.py +3 -0
  177. junifer/storage/sqlite.py +3 -0
  178. junifer/storage/tests/test_hdf5.py +82 -10
  179. junifer/storage/utils.py +9 -0
  180. junifer/testing/__init__.py +4 -1
  181. junifer/testing/datagrabbers.py +13 -6
  182. junifer/testing/tests/test_partlycloudytesting_datagrabber.py +7 -7
  183. junifer/testing/utils.py +3 -0
  184. junifer/utils/__init__.py +13 -2
  185. junifer/utils/fs.py +3 -0
  186. junifer/utils/helpers.py +32 -1
  187. junifer/utils/logging.py +33 -4
  188. junifer/utils/tests/test_logging.py +8 -0
  189. {junifer-0.0.4.dev831.dist-info → junifer-0.0.5.dist-info}/METADATA +17 -16
  190. junifer-0.0.5.dist-info/RECORD +275 -0
  191. {junifer-0.0.4.dev831.dist-info → junifer-0.0.5.dist-info}/WHEEL +1 -1
  192. junifer/datagrabber/tests/test_datagrabber_utils.py +0 -218
  193. junifer/datagrabber/utils.py +0 -230
  194. junifer/preprocess/ants/__init__.py +0 -4
  195. junifer/preprocess/ants/ants_apply_transforms_warper.py +0 -185
  196. junifer/preprocess/ants/tests/test_ants_apply_transforms_warper.py +0 -56
  197. junifer/preprocess/bold_warper.py +0 -265
  198. junifer/preprocess/fsl/__init__.py +0 -4
  199. junifer/preprocess/fsl/apply_warper.py +0 -179
  200. junifer/preprocess/fsl/tests/test_apply_warper.py +0 -45
  201. junifer/preprocess/tests/test_bold_warper.py +0 -159
  202. junifer-0.0.4.dev831.dist-info/RECORD +0 -257
  203. {junifer-0.0.4.dev831.dist-info → junifer-0.0.5.dist-info}/AUTHORS.rst +0 -0
  204. {junifer-0.0.4.dev831.dist-info → junifer-0.0.5.dist-info}/LICENSE.md +0 -0
  205. {junifer-0.0.4.dev831.dist-info → junifer-0.0.5.dist-info}/entry_points.txt +0 -0
  206. {junifer-0.0.4.dev831.dist-info → junifer-0.0.5.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,459 @@
1
+ """Provide class for BrainPrint."""
2
+
3
+ # Authors: Synchon Mandal <s.mandal@fz-juelich.de>
4
+ # License: AGPL
5
+
6
+ import uuid
7
+ from pathlib import Path
8
+ from typing import (
9
+ Any,
10
+ ClassVar,
11
+ Dict,
12
+ List,
13
+ Optional,
14
+ Set,
15
+ Union,
16
+ )
17
+
18
+ import numpy as np
19
+ import numpy.typing as npt
20
+
21
+ from ..api.decorators import register_marker
22
+ from ..external.BrainPrint.brainprint.brainprint import (
23
+ compute_asymmetry,
24
+ compute_brainprint,
25
+ )
26
+ from ..external.BrainPrint.brainprint.surfaces import surf_to_vtk
27
+ from ..pipeline import WorkDirManager
28
+ from ..utils import logger, run_ext_cmd
29
+ from .base import BaseMarker
30
+
31
+
32
+ __all__ = ["BrainPrint"]
33
+
34
+
35
+ @register_marker
36
+ class BrainPrint(BaseMarker):
37
+ """Class for BrainPrint.
38
+
39
+ Parameters
40
+ ----------
41
+ num : positive int, optional
42
+ Number of eigenvalues to compute (default 50).
43
+ skip_cortex : bool, optional
44
+ Whether to skip cortical surface or not (default False).
45
+ keep_eigenvectors : bool, optional
46
+ Whether to also return eigenvectors or not (default False).
47
+ norm : str, optional
48
+ Eigenvalues normalization method (default "none").
49
+ reweight : bool, optional
50
+ Whether to reweight eigenvalues or not (default False).
51
+ asymmetry : bool, optional
52
+ Whether to calculate asymmetry between lateral structures
53
+ (default False).
54
+ asymmetry_distance : {"euc"}, optional
55
+ Distance measurement to use if ``asymmetry=True``:
56
+
57
+ * ``"euc"`` : Euclidean
58
+
59
+ (default "euc").
60
+ use_cholmod : bool, optional
61
+ If True, attempts to use the Cholesky decomposition for improved
62
+ execution speed. Requires the ``scikit-sparse`` library. If it cannot
63
+ be found, an error will be thrown. If False, will use slower LU
64
+ decomposition (default False).
65
+ name : str, optional
66
+ The name of the marker. If None, will use the class name (default
67
+ None).
68
+
69
+ """
70
+
71
+ _EXT_DEPENDENCIES: ClassVar[List[Dict[str, Union[str, List[str]]]]] = [
72
+ {
73
+ "name": "freesurfer",
74
+ "commands": [
75
+ "mri_binarize",
76
+ "mri_pretess",
77
+ "mri_mc",
78
+ "mris_convert",
79
+ ],
80
+ },
81
+ ]
82
+
83
+ _DEPENDENCIES: ClassVar[Set[str]] = {"lapy", "numpy"}
84
+
85
+ _MARKER_INOUT_MAPPINGS: ClassVar[Dict[str, Dict[str, str]]] = {
86
+ "FreeSurfer": {
87
+ "eigenvalues": "scalar_table",
88
+ "areas": "vector",
89
+ "volumes": "vector",
90
+ "distances": "vector",
91
+ }
92
+ }
93
+
94
+ def __init__(
95
+ self,
96
+ num: int = 50,
97
+ skip_cortex=False,
98
+ keep_eigenvectors: bool = False,
99
+ norm: str = "none",
100
+ reweight: bool = False,
101
+ asymmetry: bool = False,
102
+ asymmetry_distance: str = "euc",
103
+ use_cholmod: bool = False,
104
+ name: Optional[str] = None,
105
+ ) -> None:
106
+ self.num = num
107
+ self.skip_cortex = skip_cortex
108
+ self.keep_eigenvectors = keep_eigenvectors
109
+ self.norm = norm
110
+ self.reweight = reweight
111
+ self.asymmetry = asymmetry
112
+ self.asymmetry_distance = asymmetry_distance
113
+ self.use_cholmod = use_cholmod
114
+ super().__init__(name=name, on="FreeSurfer")
115
+
116
+ def _create_aseg_surface(
117
+ self,
118
+ aseg_path: Path,
119
+ norm_path: Path,
120
+ indices: List,
121
+ ) -> Path:
122
+ """Generate a surface from the aseg and label files.
123
+
124
+ Parameters
125
+ ----------
126
+ aseg_path : pathlib.Path
127
+ The FreeSurfer aseg path.
128
+ norm_path : pathlib.Path
129
+ The FreeSurfer norm path.
130
+ indices : list
131
+ List of label indices to include in the surface generation.
132
+
133
+ Returns
134
+ -------
135
+ pathlib.Path
136
+ Path to the generated surface in VTK format.
137
+
138
+ """
139
+ tempfile_prefix = f"aseg.{uuid.uuid4()}"
140
+
141
+ # Set mri_binarize command
142
+ mri_binarize_output_path = self._tempdir / f"{tempfile_prefix}.mgz"
143
+ mri_binarize_cmd = [
144
+ "mri_binarize",
145
+ f"--i {aseg_path.resolve()}",
146
+ f"--match {''.join(indices)}",
147
+ f"--o {mri_binarize_output_path.resolve()}",
148
+ ]
149
+ # Call mri_binarize command
150
+ run_ext_cmd(name="mri_binarize", cmd=mri_binarize_cmd)
151
+
152
+ label_value = "1"
153
+ # Fix label (pretess)
154
+ # Set mri_pretess command
155
+ mri_pretess_cmd = [
156
+ "mri_pretess",
157
+ f"{mri_binarize_output_path.resolve()}",
158
+ f"{label_value}",
159
+ f"{norm_path.resolve()}",
160
+ f"{mri_binarize_output_path.resolve()}",
161
+ ]
162
+ # Call mri_pretess command
163
+ run_ext_cmd(name="mri_pretess", cmd=mri_pretess_cmd)
164
+
165
+ # Run marching cube to extract surface
166
+ # Set mri_mc command
167
+ mri_mc_output_path = self._tempdir / f"{tempfile_prefix}.surf"
168
+ mri_mc_cmd = [
169
+ "mri_mc",
170
+ f"{mri_binarize_output_path.resolve()}",
171
+ f"{label_value}",
172
+ f"{mri_mc_output_path.resolve()}",
173
+ ]
174
+ # Run mri_mc command
175
+ run_ext_cmd(name="mri_mc", cmd=mri_mc_cmd)
176
+
177
+ # Convert to vtk
178
+ # Set mris_convert command
179
+ surface_path = (
180
+ self._element_tempdir / f"aseg.final.{'_'.join(indices)}.vtk"
181
+ )
182
+ mris_convert_cmd = [
183
+ "mris_convert",
184
+ f"{mri_mc_output_path.resolve()}",
185
+ f"{surface_path.resolve()}",
186
+ ]
187
+ # Run mris_convert command
188
+ run_ext_cmd(name="mris_convert", cmd=mris_convert_cmd)
189
+
190
+ return surface_path
191
+
192
+ def _create_aseg_surfaces(
193
+ self,
194
+ aseg_path: Path,
195
+ norm_path: Path,
196
+ ) -> Dict[str, Path]:
197
+ """Create surfaces from FreeSurfer aseg labels.
198
+
199
+ Parameters
200
+ ----------
201
+ aseg_path : pathlib.Path
202
+ The FreeSurfer aseg path.
203
+ norm_path : pathlib.Path
204
+ The FreeSurfer norm path.
205
+
206
+ Returns
207
+ -------
208
+ dict
209
+ Dictionary of label names mapped to corresponding surface paths.
210
+
211
+ """
212
+ # Define aseg labels
213
+
214
+ # combined and individual aseg labels:
215
+ # - Left Striatum: left Caudate + Putamen + Accumbens
216
+ # - Right Striatum: right Caudate + Putamen + Accumbens
217
+ # - CorpusCallosum: 5 subregions combined
218
+ # - Cerebellum: brainstem + (left+right) cerebellum WM and GM
219
+ # - Ventricles: (left+right) lat.vent + inf.lat.vent + choroidplexus +
220
+ # 3rdVent + CSF
221
+ # - Lateral-Ventricle: lat.vent + inf.lat.vent + choroidplexus
222
+ # - 3rd-Ventricle: 3rd-Ventricle + CSF
223
+
224
+ aseg_labels = {
225
+ "CorpusCallosum": ["251", "252", "253", "254", "255"],
226
+ "Cerebellum": ["7", "8", "16", "46", "47"],
227
+ "Ventricles": ["4", "5", "14", "24", "31", "43", "44", "63"],
228
+ "3rd-Ventricle": ["14", "24"],
229
+ "4th-Ventricle": ["15"],
230
+ "Brain-Stem": ["16"],
231
+ "Left-Striatum": ["11", "12", "26"],
232
+ "Left-Lateral-Ventricle": ["4", "5", "31"],
233
+ "Left-Cerebellum-White-Matter": ["7"],
234
+ "Left-Cerebellum-Cortex": ["8"],
235
+ "Left-Thalamus-Proper": ["10"],
236
+ "Left-Caudate": ["11"],
237
+ "Left-Putamen": ["12"],
238
+ "Left-Pallidum": ["13"],
239
+ "Left-Hippocampus": ["17"],
240
+ "Left-Amygdala": ["18"],
241
+ "Left-Accumbens-area": ["26"],
242
+ "Left-VentralDC": ["28"],
243
+ "Right-Striatum": ["50", "51", "58"],
244
+ "Right-Lateral-Ventricle": ["43", "44", "63"],
245
+ "Right-Cerebellum-White-Matter": ["46"],
246
+ "Right-Cerebellum-Cortex": ["47"],
247
+ "Right-Thalamus-Proper": ["49"],
248
+ "Right-Caudate": ["50"],
249
+ "Right-Putamen": ["51"],
250
+ "Right-Pallidum": ["52"],
251
+ "Right-Hippocampus": ["53"],
252
+ "Right-Amygdala": ["54"],
253
+ "Right-Accumbens-area": ["58"],
254
+ "Right-VentralDC": ["60"],
255
+ }
256
+ return {
257
+ label: self._create_aseg_surface(
258
+ aseg_path=aseg_path,
259
+ norm_path=norm_path,
260
+ indices=indices,
261
+ )
262
+ for label, indices in aseg_labels.items()
263
+ }
264
+
265
+ def _create_cortical_surfaces(
266
+ self,
267
+ lh_white_path: Path,
268
+ rh_white_path: Path,
269
+ lh_pial_path: Path,
270
+ rh_pial_path: Path,
271
+ ) -> Dict[str, Path]:
272
+ """Create cortical surfaces from FreeSurfer labels.
273
+
274
+ Parameters
275
+ ----------
276
+ lh_white_path : pathlib.Path
277
+ The FreeSurfer lh.white path.
278
+ rh_white_path : pathlib.Path
279
+ The FreeSurfer rh.white path.
280
+ lh_pial_path : pathlib.Path
281
+ The FreeSurfer lh.pial path.
282
+ rh_pial_path : pathlib.Path
283
+ The FreeSurfer rh.pial path.
284
+
285
+ Returns
286
+ -------
287
+ dict
288
+ Cortical surface label names with their paths as dictionary.
289
+
290
+ """
291
+ return {
292
+ "lh-white-2d": surf_to_vtk(
293
+ lh_white_path.resolve(),
294
+ (self._element_tempdir / "lh.white.vtk").resolve(),
295
+ ),
296
+ "rh-white-2d": surf_to_vtk(
297
+ rh_white_path.resolve(),
298
+ (self._element_tempdir / "rh.white.vtk").resolve(),
299
+ ),
300
+ "lh-pial-2d": surf_to_vtk(
301
+ lh_pial_path.resolve(),
302
+ (self._element_tempdir / "lh.pial.vtk").resolve(),
303
+ ),
304
+ "rh-pial-2d": surf_to_vtk(
305
+ rh_pial_path.resolve(),
306
+ (self._element_tempdir / "rh.pial.vtk").resolve(),
307
+ ),
308
+ }
309
+
310
+ def _fix_nan(
311
+ self,
312
+ input_data: List[Union[float, str, npt.ArrayLike]],
313
+ ) -> np.ndarray:
314
+ """Convert BrainPrint output with string NaN to ``numpy.nan``.
315
+
316
+ Parameters
317
+ ----------
318
+ input_data : list of str, float or numpy.ndarray-like
319
+ The data to convert.
320
+
321
+ Returns
322
+ -------
323
+ np.ndarray
324
+ The converted data as ``numpy.ndarray``.
325
+
326
+ """
327
+ arr = np.asarray(input_data)
328
+ arr[arr == "NaN"] = np.nan
329
+ return arr.astype(np.float64)
330
+
331
+ def compute(
332
+ self,
333
+ input: Dict[str, Any],
334
+ extra_input: Optional[Dict] = None,
335
+ ) -> Dict:
336
+ """Compute.
337
+
338
+ Parameters
339
+ ----------
340
+ input : dict
341
+ The FreeSurfer data as dictionary.
342
+ extra_input : dict, optional
343
+ The other fields in the pipeline data object (default None).
344
+
345
+ Returns
346
+ -------
347
+ dict
348
+ The computed result as dictionary. This will be either returned
349
+ to the user or stored in the storage by calling the store method
350
+ with this as a parameter. The dictionary has the following keys:
351
+
352
+ * ``eigenvalues`` : dictionary with the following keys:
353
+
354
+ - ``data`` : eigenvalues as ``np.ndarray``
355
+ - ``col_names`` : surface labels as list of str
356
+ - ``row_names`` : eigenvalue count labels as list of str
357
+ - ``row_header_col_name`` : "eigenvalue"
358
+ ()
359
+ * ``areas`` : dictionary with the following keys:
360
+
361
+ - ``data`` : areas as ``np.ndarray``
362
+ - ``col_names`` : surface labels as list of str
363
+
364
+ * ``volumes`` : dictionary with the following keys:
365
+
366
+ - ``data`` : volumes as ``np.ndarray``
367
+ - ``col_names`` : surface labels as list of str
368
+
369
+ * ``distances`` : dictionary with the following keys
370
+ if ``asymmetry = True``:
371
+
372
+ - ``data`` : distances as ``np.ndarray``
373
+ - ``col_names`` : surface labels as list of str
374
+
375
+ References
376
+ ----------
377
+ .. [1] Wachinger, C., Golland, P., Kremen, W. et al. (2015)
378
+ BrainPrint: A discriminative characterization of brain
379
+ morphology.
380
+ NeuroImage, Volume 109, Pages 232-248.
381
+ https://doi.org/10.1016/j.neuroimage.2015.01.032.
382
+ .. [2] Reuter, M., Wolter, F.E., Peinecke, N. (2006)
383
+ Laplace-Beltrami spectra as 'Shape-DNA' of surfaces and solids.
384
+ Computer-Aided Design, Volume 38, Issue 4, Pages 342-366.
385
+ https://doi.org/10.1016/j.cad.2005.10.011.
386
+
387
+ """
388
+ logger.debug("Computing BrainPrint")
389
+
390
+ # Create component-scoped tempdir
391
+ self._tempdir = WorkDirManager().get_tempdir(prefix="brainprint")
392
+ # Create element-scoped tempdir so that the files are
393
+ # available later as nibabel stores file path reference for
394
+ # loading on computation
395
+ self._element_tempdir = WorkDirManager().get_element_tempdir(
396
+ prefix="brainprint"
397
+ )
398
+ # Generate surfaces
399
+ surfaces = self._create_aseg_surfaces(
400
+ aseg_path=input["aseg"]["path"],
401
+ norm_path=input["norm"]["path"],
402
+ )
403
+ if not self.skip_cortex:
404
+ cortical_surfaces = self._create_cortical_surfaces(
405
+ lh_white_path=input["lh_white"]["path"],
406
+ rh_white_path=input["rh_white"]["path"],
407
+ lh_pial_path=input["lh_pial"]["path"],
408
+ rh_pial_path=input["rh_pial"]["path"],
409
+ )
410
+ surfaces.update(cortical_surfaces)
411
+ # Compute brainprint
412
+ eigenvalues, _ = compute_brainprint(
413
+ surfaces=surfaces,
414
+ keep_eigenvectors=self.keep_eigenvectors,
415
+ num=self.num,
416
+ norm=self.norm,
417
+ reweight=self.reweight,
418
+ use_cholmod=self.use_cholmod,
419
+ )
420
+ # Calculate distances (if required)
421
+ distances = None
422
+ if self.asymmetry:
423
+ distances = compute_asymmetry(
424
+ eigenvalues=eigenvalues,
425
+ distance=self.asymmetry_distance,
426
+ skip_cortex=self.skip_cortex,
427
+ )
428
+
429
+ # Delete tempdir
430
+ WorkDirManager().delete_tempdir(self._tempdir)
431
+
432
+ output = {
433
+ "eigenvalues": {
434
+ "data": self._fix_nan(
435
+ [val[2:] for val in eigenvalues.values()]
436
+ ).T,
437
+ "col_names": list(eigenvalues.keys()),
438
+ "row_names": [f"ev{i}" for i in range(self.num)],
439
+ "row_header_col_name": "eigenvalue",
440
+ },
441
+ "areas": {
442
+ "data": self._fix_nan(
443
+ [val[0] for val in eigenvalues.values()]
444
+ ),
445
+ "col_names": list(eigenvalues.keys()),
446
+ },
447
+ "volumes": {
448
+ "data": self._fix_nan(
449
+ [val[1] for val in eigenvalues.values()]
450
+ ),
451
+ "col_names": list(eigenvalues.keys()),
452
+ },
453
+ }
454
+ if self.asymmetry:
455
+ output["distances"] = {
456
+ "data": self._fix_nan(list(distances.values())),
457
+ "col_names": list(distances.keys()),
458
+ }
459
+ return output
@@ -19,6 +19,9 @@ if TYPE_CHECKING:
19
19
  from junifer.datagrabber import BaseDataGrabber
20
20
 
21
21
 
22
+ __all__ = ["MarkerCollection"]
23
+
24
+
22
25
  class MarkerCollection:
23
26
  """Class for marker collection.
24
27
 
@@ -136,9 +139,14 @@ class MarkerCollection:
136
139
  "Validating Preprocessor: "
137
140
  f"{preprocessor.__class__.__name__}"
138
141
  )
142
+ # Copy existing data types
143
+ old_t_data = t_data.copy()
144
+ logger.info(f"Preprocessor input type: {t_data}")
139
145
  # Validate preprocessor
140
- t_data = preprocessor.validate(t_data)
141
- logger.info(f"Preprocess output type: {t_data}")
146
+ new_t_data = preprocessor.validate(old_t_data)
147
+ # Set new data types
148
+ t_data = list(set(old_t_data) | set(new_t_data))
149
+ logger.info(f"Preprocessor output type: {t_data}")
142
150
 
143
151
  for marker in self._markers:
144
152
  logger.info(f"Validating Marker: {marker.name}")
@@ -25,3 +25,13 @@ else:
25
25
  from .weighted_perm_entropy import WeightedPermEntropy
26
26
  from .sample_entropy import SampleEntropy
27
27
  from .multiscale_entropy_auc import MultiscaleEntropyAUC
28
+
29
+ __all__ = [
30
+ "HurstExponent",
31
+ "RangeEntropy",
32
+ "RangeEntropyAUC",
33
+ "PermEntropy",
34
+ "WeightedPermEntropy",
35
+ "SampleEntropy",
36
+ "MultiscaleEntropyAUC",
37
+ ]
@@ -24,6 +24,9 @@ if TYPE_CHECKING:
24
24
  import numpy as np
25
25
 
26
26
 
27
+ __all__ = ["ComplexityBase"]
28
+
29
+
27
30
  class ComplexityBase(BaseMarker):
28
31
  """Base class for complexity computation.
29
32
 
@@ -50,6 +53,12 @@ class ComplexityBase(BaseMarker):
50
53
 
51
54
  _DEPENDENCIES: ClassVar[Set[str]] = {"nilearn", "neurokit2"}
52
55
 
56
+ _MARKER_INOUT_MAPPINGS: ClassVar[Dict[str, Dict[str, str]]] = {
57
+ "BOLD": {
58
+ "complexity": "vector",
59
+ },
60
+ }
61
+
53
62
  def __init__(
54
63
  self,
55
64
  parcellation: Union[str, List[str]],
@@ -75,33 +84,6 @@ class ComplexityBase(BaseMarker):
75
84
  klass=NotImplementedError,
76
85
  )
77
86
 
78
- def get_valid_inputs(self) -> List[str]:
79
- """Get valid data types for input.
80
-
81
- Returns
82
- -------
83
- list of str
84
- The list of data types that can be used as input for this marker.
85
-
86
- """
87
- return ["BOLD"]
88
-
89
- def get_output_type(self, input_type: str) -> str:
90
- """Get output type.
91
-
92
- Parameters
93
- ----------
94
- input_type : str
95
- The data type input to the marker.
96
-
97
- Returns
98
- -------
99
- str
100
- The storage type output by the marker.
101
-
102
- """
103
- return "vector"
104
-
105
87
  def compute(
106
88
  self,
107
89
  input: Dict[str, Any],
@@ -121,29 +103,30 @@ class ComplexityBase(BaseMarker):
121
103
  Returns
122
104
  -------
123
105
  dict
124
- The computed result as dictionary. The following keys will be
125
- included in the dictionary:
106
+ The computed result as dictionary. This will be either returned
107
+ to the user or stored in the storage by calling the store method
108
+ with this as a parameter. The dictionary has the following keys:
126
109
 
127
- * ``data`` : ROI-wise complexity measures as ``numpy.ndarray``
128
- * ``col_names`` : ROI labels for the complexity measures as list
110
+ * ``complexity`` : dictionary with the following keys:
111
+
112
+ - ``data`` : ROI-wise complexity measures as ``numpy.ndarray``
113
+ - ``col_names`` : ROI labels as list of str
129
114
 
130
115
  """
131
- # Initialize a ParcelAggregation
116
+ # Extract the 2D time series using ParcelAggregation
132
117
  parcel_aggregation = ParcelAggregation(
133
118
  parcellation=self.parcellation,
134
119
  method=self.agg_method,
135
120
  method_params=self.agg_method_params,
136
121
  masks=self.masks,
137
122
  on="BOLD",
138
- )
139
- # Extract the 2D time series using parcel aggregation
140
- parcel_aggregation_map = parcel_aggregation.compute(
141
- input=input, extra_input=extra_input
142
- )
143
-
123
+ ).compute(input=input, extra_input=extra_input)
144
124
  # Compute complexity measure
145
- parcel_aggregation_map["data"] = self.compute_complexity(
146
- parcel_aggregation_map["data"]
147
- )
148
-
149
- return parcel_aggregation_map
125
+ return {
126
+ "complexity": {
127
+ "data": self.compute_complexity(
128
+ parcel_aggregation["aggregation"]["data"]
129
+ ),
130
+ "col_names": parcel_aggregation["aggregation"]["col_names"],
131
+ }
132
+ }
@@ -14,6 +14,9 @@ from ...utils import logger, warn_with_log
14
14
  from .complexity_base import ComplexityBase
15
15
 
16
16
 
17
+ __all__ = ["HurstExponent"]
18
+
19
+
17
20
  @register_marker
18
21
  class HurstExponent(ComplexityBase):
19
22
  """Class for Hurst exponent of a time series.
@@ -14,6 +14,9 @@ from ...utils import logger, warn_with_log
14
14
  from .complexity_base import ComplexityBase
15
15
 
16
16
 
17
+ __all__ = ["MultiscaleEntropyAUC"]
18
+
19
+
17
20
  @register_marker
18
21
  class MultiscaleEntropyAUC(ComplexityBase):
19
22
  """Class for AUC of multiscale entropy of a time series.
@@ -14,6 +14,9 @@ from ...utils import logger, warn_with_log
14
14
  from .complexity_base import ComplexityBase
15
15
 
16
16
 
17
+ __all__ = ["PermEntropy"]
18
+
19
+
17
20
  @register_marker
18
21
  class PermEntropy(ComplexityBase):
19
22
  """Class for permutation entropy of a time series.
@@ -14,6 +14,9 @@ from ...utils import logger, warn_with_log
14
14
  from .complexity_base import ComplexityBase
15
15
 
16
16
 
17
+ __all__ = ["RangeEntropy"]
18
+
19
+
17
20
  @register_marker
18
21
  class RangeEntropy(ComplexityBase):
19
22
  """Class for range entropy of a time series.
@@ -14,6 +14,9 @@ from ...utils import logger, warn_with_log
14
14
  from .complexity_base import ComplexityBase
15
15
 
16
16
 
17
+ __all__ = ["RangeEntropyAUC"]
18
+
19
+
17
20
  @register_marker
18
21
  class RangeEntropyAUC(ComplexityBase):
19
22
  """Class for AUC of range entropy values of a time series over r = 0 to 1.
@@ -14,6 +14,9 @@ from ...utils import logger, warn_with_log
14
14
  from .complexity_base import ComplexityBase
15
15
 
16
16
 
17
+ __all__ = ["SampleEntropy"]
18
+
19
+
17
20
  @register_marker
18
21
  class SampleEntropy(ComplexityBase):
19
22
  """Class for sample entropy of a time series.