junifer 0.0.5.dev242__py3-none-any.whl → 0.0.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (279) hide show
  1. junifer/__init__.py +2 -31
  2. junifer/__init__.pyi +37 -0
  3. junifer/_version.py +9 -4
  4. junifer/api/__init__.py +3 -5
  5. junifer/api/__init__.pyi +4 -0
  6. junifer/api/decorators.py +14 -19
  7. junifer/api/functions.py +165 -109
  8. junifer/api/py.typed +0 -0
  9. junifer/api/queue_context/__init__.py +2 -4
  10. junifer/api/queue_context/__init__.pyi +5 -0
  11. junifer/api/queue_context/gnu_parallel_local_adapter.py +22 -6
  12. junifer/api/queue_context/htcondor_adapter.py +23 -6
  13. junifer/api/queue_context/py.typed +0 -0
  14. junifer/api/queue_context/tests/test_gnu_parallel_local_adapter.py +3 -3
  15. junifer/api/queue_context/tests/test_htcondor_adapter.py +3 -3
  16. junifer/api/tests/test_functions.py +168 -74
  17. junifer/cli/__init__.py +24 -0
  18. junifer/cli/__init__.pyi +3 -0
  19. junifer/{api → cli}/cli.py +141 -125
  20. junifer/cli/parser.py +235 -0
  21. junifer/cli/py.typed +0 -0
  22. junifer/{api → cli}/tests/test_cli.py +8 -8
  23. junifer/{api/tests/test_api_utils.py → cli/tests/test_cli_utils.py} +5 -4
  24. junifer/{api → cli}/tests/test_parser.py +2 -2
  25. junifer/{api → cli}/utils.py +6 -16
  26. junifer/configs/juseless/__init__.py +2 -2
  27. junifer/configs/juseless/__init__.pyi +3 -0
  28. junifer/configs/juseless/datagrabbers/__init__.py +2 -12
  29. junifer/configs/juseless/datagrabbers/__init__.pyi +13 -0
  30. junifer/configs/juseless/datagrabbers/ixi_vbm.py +2 -2
  31. junifer/configs/juseless/datagrabbers/py.typed +0 -0
  32. junifer/configs/juseless/datagrabbers/tests/test_ucla.py +2 -2
  33. junifer/configs/juseless/datagrabbers/ucla.py +4 -4
  34. junifer/configs/juseless/py.typed +0 -0
  35. junifer/conftest.py +25 -0
  36. junifer/data/__init__.py +2 -42
  37. junifer/data/__init__.pyi +29 -0
  38. junifer/data/_dispatch.py +248 -0
  39. junifer/data/coordinates/__init__.py +9 -0
  40. junifer/data/coordinates/__init__.pyi +5 -0
  41. junifer/data/coordinates/_ants_coordinates_warper.py +104 -0
  42. junifer/data/coordinates/_coordinates.py +385 -0
  43. junifer/data/coordinates/_fsl_coordinates_warper.py +81 -0
  44. junifer/data/{tests → coordinates/tests}/test_coordinates.py +26 -33
  45. junifer/data/masks/__init__.py +9 -0
  46. junifer/data/masks/__init__.pyi +6 -0
  47. junifer/data/masks/_ants_mask_warper.py +177 -0
  48. junifer/data/masks/_fsl_mask_warper.py +106 -0
  49. junifer/data/masks/_masks.py +802 -0
  50. junifer/data/{tests → masks/tests}/test_masks.py +67 -63
  51. junifer/data/parcellations/__init__.py +9 -0
  52. junifer/data/parcellations/__init__.pyi +6 -0
  53. junifer/data/parcellations/_ants_parcellation_warper.py +166 -0
  54. junifer/data/parcellations/_fsl_parcellation_warper.py +89 -0
  55. junifer/data/parcellations/_parcellations.py +1388 -0
  56. junifer/data/{tests → parcellations/tests}/test_parcellations.py +165 -295
  57. junifer/data/pipeline_data_registry_base.py +76 -0
  58. junifer/data/py.typed +0 -0
  59. junifer/data/template_spaces.py +44 -79
  60. junifer/data/tests/test_data_utils.py +1 -2
  61. junifer/data/tests/test_template_spaces.py +8 -4
  62. junifer/data/utils.py +109 -4
  63. junifer/datagrabber/__init__.py +2 -26
  64. junifer/datagrabber/__init__.pyi +27 -0
  65. junifer/datagrabber/aomic/__init__.py +2 -4
  66. junifer/datagrabber/aomic/__init__.pyi +5 -0
  67. junifer/datagrabber/aomic/id1000.py +81 -52
  68. junifer/datagrabber/aomic/piop1.py +83 -55
  69. junifer/datagrabber/aomic/piop2.py +85 -56
  70. junifer/datagrabber/aomic/py.typed +0 -0
  71. junifer/datagrabber/aomic/tests/test_id1000.py +19 -12
  72. junifer/datagrabber/aomic/tests/test_piop1.py +52 -18
  73. junifer/datagrabber/aomic/tests/test_piop2.py +50 -17
  74. junifer/datagrabber/base.py +22 -18
  75. junifer/datagrabber/datalad_base.py +71 -34
  76. junifer/datagrabber/dmcc13_benchmark.py +31 -18
  77. junifer/datagrabber/hcp1200/__init__.py +2 -3
  78. junifer/datagrabber/hcp1200/__init__.pyi +4 -0
  79. junifer/datagrabber/hcp1200/datalad_hcp1200.py +3 -3
  80. junifer/datagrabber/hcp1200/hcp1200.py +26 -15
  81. junifer/datagrabber/hcp1200/py.typed +0 -0
  82. junifer/datagrabber/hcp1200/tests/test_hcp1200.py +8 -2
  83. junifer/datagrabber/multiple.py +14 -9
  84. junifer/datagrabber/pattern.py +132 -96
  85. junifer/datagrabber/pattern_validation_mixin.py +206 -94
  86. junifer/datagrabber/py.typed +0 -0
  87. junifer/datagrabber/tests/test_datalad_base.py +27 -12
  88. junifer/datagrabber/tests/test_dmcc13_benchmark.py +28 -11
  89. junifer/datagrabber/tests/test_multiple.py +48 -2
  90. junifer/datagrabber/tests/test_pattern_datalad.py +1 -1
  91. junifer/datagrabber/tests/test_pattern_validation_mixin.py +6 -6
  92. junifer/datareader/__init__.py +2 -2
  93. junifer/datareader/__init__.pyi +3 -0
  94. junifer/datareader/default.py +6 -6
  95. junifer/datareader/py.typed +0 -0
  96. junifer/external/nilearn/__init__.py +2 -3
  97. junifer/external/nilearn/__init__.pyi +4 -0
  98. junifer/external/nilearn/junifer_connectivity_measure.py +25 -17
  99. junifer/external/nilearn/junifer_nifti_spheres_masker.py +4 -4
  100. junifer/external/nilearn/py.typed +0 -0
  101. junifer/external/nilearn/tests/test_junifer_connectivity_measure.py +17 -16
  102. junifer/external/nilearn/tests/test_junifer_nifti_spheres_masker.py +2 -3
  103. junifer/markers/__init__.py +2 -38
  104. junifer/markers/__init__.pyi +37 -0
  105. junifer/markers/base.py +11 -14
  106. junifer/markers/brainprint.py +12 -14
  107. junifer/markers/complexity/__init__.py +2 -18
  108. junifer/markers/complexity/__init__.pyi +17 -0
  109. junifer/markers/complexity/complexity_base.py +9 -11
  110. junifer/markers/complexity/hurst_exponent.py +7 -7
  111. junifer/markers/complexity/multiscale_entropy_auc.py +7 -7
  112. junifer/markers/complexity/perm_entropy.py +7 -7
  113. junifer/markers/complexity/py.typed +0 -0
  114. junifer/markers/complexity/range_entropy.py +7 -7
  115. junifer/markers/complexity/range_entropy_auc.py +7 -7
  116. junifer/markers/complexity/sample_entropy.py +7 -7
  117. junifer/markers/complexity/tests/test_complexity_base.py +1 -1
  118. junifer/markers/complexity/tests/test_hurst_exponent.py +5 -5
  119. junifer/markers/complexity/tests/test_multiscale_entropy_auc.py +5 -5
  120. junifer/markers/complexity/tests/test_perm_entropy.py +5 -5
  121. junifer/markers/complexity/tests/test_range_entropy.py +5 -5
  122. junifer/markers/complexity/tests/test_range_entropy_auc.py +5 -5
  123. junifer/markers/complexity/tests/test_sample_entropy.py +5 -5
  124. junifer/markers/complexity/tests/test_weighted_perm_entropy.py +5 -5
  125. junifer/markers/complexity/weighted_perm_entropy.py +7 -7
  126. junifer/markers/ets_rss.py +12 -11
  127. junifer/markers/falff/__init__.py +2 -3
  128. junifer/markers/falff/__init__.pyi +4 -0
  129. junifer/markers/falff/_afni_falff.py +38 -45
  130. junifer/markers/falff/_junifer_falff.py +16 -19
  131. junifer/markers/falff/falff_base.py +7 -11
  132. junifer/markers/falff/falff_parcels.py +9 -9
  133. junifer/markers/falff/falff_spheres.py +8 -8
  134. junifer/markers/falff/py.typed +0 -0
  135. junifer/markers/falff/tests/test_falff_spheres.py +3 -1
  136. junifer/markers/functional_connectivity/__init__.py +2 -12
  137. junifer/markers/functional_connectivity/__init__.pyi +13 -0
  138. junifer/markers/functional_connectivity/crossparcellation_functional_connectivity.py +9 -8
  139. junifer/markers/functional_connectivity/edge_functional_connectivity_parcels.py +8 -8
  140. junifer/markers/functional_connectivity/edge_functional_connectivity_spheres.py +7 -7
  141. junifer/markers/functional_connectivity/functional_connectivity_base.py +13 -12
  142. junifer/markers/functional_connectivity/functional_connectivity_parcels.py +8 -8
  143. junifer/markers/functional_connectivity/functional_connectivity_spheres.py +7 -7
  144. junifer/markers/functional_connectivity/py.typed +0 -0
  145. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_parcels.py +1 -2
  146. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_spheres.py +1 -2
  147. junifer/markers/functional_connectivity/tests/test_functional_connectivity_parcels.py +6 -6
  148. junifer/markers/functional_connectivity/tests/test_functional_connectivity_spheres.py +5 -5
  149. junifer/markers/parcel_aggregation.py +22 -17
  150. junifer/markers/py.typed +0 -0
  151. junifer/markers/reho/__init__.py +2 -3
  152. junifer/markers/reho/__init__.pyi +4 -0
  153. junifer/markers/reho/_afni_reho.py +29 -35
  154. junifer/markers/reho/_junifer_reho.py +13 -14
  155. junifer/markers/reho/py.typed +0 -0
  156. junifer/markers/reho/reho_base.py +7 -11
  157. junifer/markers/reho/reho_parcels.py +10 -10
  158. junifer/markers/reho/reho_spheres.py +9 -9
  159. junifer/markers/sphere_aggregation.py +22 -17
  160. junifer/markers/temporal_snr/__init__.py +2 -3
  161. junifer/markers/temporal_snr/__init__.pyi +4 -0
  162. junifer/markers/temporal_snr/py.typed +0 -0
  163. junifer/markers/temporal_snr/temporal_snr_base.py +11 -10
  164. junifer/markers/temporal_snr/temporal_snr_parcels.py +8 -8
  165. junifer/markers/temporal_snr/temporal_snr_spheres.py +7 -7
  166. junifer/markers/tests/test_ets_rss.py +3 -3
  167. junifer/markers/tests/test_parcel_aggregation.py +24 -24
  168. junifer/markers/tests/test_sphere_aggregation.py +6 -6
  169. junifer/markers/utils.py +3 -3
  170. junifer/onthefly/__init__.py +2 -1
  171. junifer/onthefly/_brainprint.py +138 -0
  172. junifer/onthefly/read_transform.py +5 -8
  173. junifer/pipeline/__init__.py +2 -10
  174. junifer/pipeline/__init__.pyi +13 -0
  175. junifer/{markers/collection.py → pipeline/marker_collection.py} +8 -14
  176. junifer/pipeline/pipeline_component_registry.py +294 -0
  177. junifer/pipeline/pipeline_step_mixin.py +15 -11
  178. junifer/pipeline/py.typed +0 -0
  179. junifer/{markers/tests/test_collection.py → pipeline/tests/test_marker_collection.py} +2 -3
  180. junifer/pipeline/tests/test_pipeline_component_registry.py +200 -0
  181. junifer/pipeline/tests/test_pipeline_step_mixin.py +36 -37
  182. junifer/pipeline/tests/test_update_meta_mixin.py +4 -4
  183. junifer/pipeline/tests/test_workdir_manager.py +43 -0
  184. junifer/pipeline/update_meta_mixin.py +21 -17
  185. junifer/pipeline/utils.py +6 -6
  186. junifer/pipeline/workdir_manager.py +19 -5
  187. junifer/preprocess/__init__.py +2 -10
  188. junifer/preprocess/__init__.pyi +11 -0
  189. junifer/preprocess/base.py +10 -10
  190. junifer/preprocess/confounds/__init__.py +2 -2
  191. junifer/preprocess/confounds/__init__.pyi +3 -0
  192. junifer/preprocess/confounds/fmriprep_confound_remover.py +243 -64
  193. junifer/preprocess/confounds/py.typed +0 -0
  194. junifer/preprocess/confounds/tests/test_fmriprep_confound_remover.py +121 -14
  195. junifer/preprocess/py.typed +0 -0
  196. junifer/preprocess/smoothing/__init__.py +2 -2
  197. junifer/preprocess/smoothing/__init__.pyi +3 -0
  198. junifer/preprocess/smoothing/_afni_smoothing.py +40 -40
  199. junifer/preprocess/smoothing/_fsl_smoothing.py +22 -32
  200. junifer/preprocess/smoothing/_nilearn_smoothing.py +35 -14
  201. junifer/preprocess/smoothing/py.typed +0 -0
  202. junifer/preprocess/smoothing/smoothing.py +11 -13
  203. junifer/preprocess/warping/__init__.py +2 -2
  204. junifer/preprocess/warping/__init__.pyi +3 -0
  205. junifer/preprocess/warping/_ants_warper.py +136 -32
  206. junifer/preprocess/warping/_fsl_warper.py +73 -22
  207. junifer/preprocess/warping/py.typed +0 -0
  208. junifer/preprocess/warping/space_warper.py +39 -11
  209. junifer/preprocess/warping/tests/test_space_warper.py +5 -9
  210. junifer/py.typed +0 -0
  211. junifer/stats.py +5 -5
  212. junifer/storage/__init__.py +2 -10
  213. junifer/storage/__init__.pyi +11 -0
  214. junifer/storage/base.py +47 -13
  215. junifer/storage/hdf5.py +95 -33
  216. junifer/storage/pandas_base.py +12 -11
  217. junifer/storage/py.typed +0 -0
  218. junifer/storage/sqlite.py +11 -11
  219. junifer/storage/tests/test_hdf5.py +86 -4
  220. junifer/storage/tests/test_sqlite.py +2 -2
  221. junifer/storage/tests/test_storage_base.py +5 -2
  222. junifer/storage/tests/test_utils.py +33 -7
  223. junifer/storage/utils.py +95 -9
  224. junifer/testing/__init__.py +2 -3
  225. junifer/testing/__init__.pyi +4 -0
  226. junifer/testing/datagrabbers.py +10 -11
  227. junifer/testing/py.typed +0 -0
  228. junifer/testing/registry.py +4 -7
  229. junifer/testing/tests/test_testing_registry.py +9 -17
  230. junifer/tests/test_stats.py +2 -2
  231. junifer/typing/__init__.py +9 -0
  232. junifer/typing/__init__.pyi +31 -0
  233. junifer/typing/_typing.py +68 -0
  234. junifer/utils/__init__.py +2 -12
  235. junifer/utils/__init__.pyi +18 -0
  236. junifer/utils/_config.py +110 -0
  237. junifer/utils/_yaml.py +16 -0
  238. junifer/utils/helpers.py +6 -6
  239. junifer/utils/logging.py +117 -8
  240. junifer/utils/py.typed +0 -0
  241. junifer/{pipeline → utils}/singleton.py +19 -14
  242. junifer/utils/tests/test_config.py +59 -0
  243. {junifer-0.0.5.dev242.dist-info → junifer-0.0.6.dist-info}/METADATA +43 -38
  244. junifer-0.0.6.dist-info/RECORD +350 -0
  245. {junifer-0.0.5.dev242.dist-info → junifer-0.0.6.dist-info}/WHEEL +1 -1
  246. junifer-0.0.6.dist-info/entry_points.txt +2 -0
  247. junifer/api/parser.py +0 -118
  248. junifer/data/coordinates.py +0 -408
  249. junifer/data/masks.py +0 -670
  250. junifer/data/parcellations.py +0 -1828
  251. junifer/pipeline/registry.py +0 -177
  252. junifer/pipeline/tests/test_registry.py +0 -150
  253. junifer-0.0.5.dev242.dist-info/RECORD +0 -275
  254. junifer-0.0.5.dev242.dist-info/entry_points.txt +0 -2
  255. /junifer/{api → cli}/tests/data/gmd_mean.yaml +0 -0
  256. /junifer/{api → cli}/tests/data/gmd_mean_htcondor.yaml +0 -0
  257. /junifer/{api → cli}/tests/data/partly_cloudy_agg_mean_tian.yml +0 -0
  258. /junifer/data/{VOIs → coordinates/VOIs}/meta/AutobiographicalMemory_VOIs.txt +0 -0
  259. /junifer/data/{VOIs → coordinates/VOIs}/meta/CogAC_VOIs.txt +0 -0
  260. /junifer/data/{VOIs → coordinates/VOIs}/meta/CogAR_VOIs.txt +0 -0
  261. /junifer/data/{VOIs → coordinates/VOIs}/meta/DMNBuckner_VOIs.txt +0 -0
  262. /junifer/data/{VOIs → coordinates/VOIs}/meta/Dosenbach2010_MNI_VOIs.txt +0 -0
  263. /junifer/data/{VOIs → coordinates/VOIs}/meta/Empathy_VOIs.txt +0 -0
  264. /junifer/data/{VOIs → coordinates/VOIs}/meta/Motor_VOIs.txt +0 -0
  265. /junifer/data/{VOIs → coordinates/VOIs}/meta/MultiTask_VOIs.txt +0 -0
  266. /junifer/data/{VOIs → coordinates/VOIs}/meta/PhysioStress_VOIs.txt +0 -0
  267. /junifer/data/{VOIs → coordinates/VOIs}/meta/Power2011_MNI_VOIs.txt +0 -0
  268. /junifer/data/{VOIs → coordinates/VOIs}/meta/Power2013_MNI_VOIs.tsv +0 -0
  269. /junifer/data/{VOIs → coordinates/VOIs}/meta/Rew_VOIs.txt +0 -0
  270. /junifer/data/{VOIs → coordinates/VOIs}/meta/Somatosensory_VOIs.txt +0 -0
  271. /junifer/data/{VOIs → coordinates/VOIs}/meta/ToM_VOIs.txt +0 -0
  272. /junifer/data/{VOIs → coordinates/VOIs}/meta/VigAtt_VOIs.txt +0 -0
  273. /junifer/data/{VOIs → coordinates/VOIs}/meta/WM_VOIs.txt +0 -0
  274. /junifer/data/{VOIs → coordinates/VOIs}/meta/eMDN_VOIs.txt +0 -0
  275. /junifer/data/{VOIs → coordinates/VOIs}/meta/eSAD_VOIs.txt +0 -0
  276. /junifer/data/{VOIs → coordinates/VOIs}/meta/extDMN_VOIs.txt +0 -0
  277. {junifer-0.0.5.dev242.dist-info → junifer-0.0.6.dist-info/licenses}/AUTHORS.rst +0 -0
  278. {junifer-0.0.5.dev242.dist-info → junifer-0.0.6.dist-info/licenses}/LICENSE.md +0 -0
  279. {junifer-0.0.5.dev242.dist-info → junifer-0.0.6.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,1388 @@
1
+ """Provide class and function for parcellation registry and manipulation."""
2
+
3
+ # Authors: Federico Raimondo <f.raimondo@fz-juelich.de>
4
+ # Vera Komeyer <v.komeyer@fz-juelich.de>
5
+ # Synchon Mandal <s.mandal@fz-juelich.de>
6
+ # License: AGPL
7
+
8
+ from itertools import product
9
+ from pathlib import Path
10
+ from typing import TYPE_CHECKING, Any, Optional, Union
11
+
12
+ import nibabel as nib
13
+ import nilearn.image as nimg
14
+ import numpy as np
15
+ import pandas as pd
16
+ from junifer_data import get
17
+
18
+ from ...utils import logger, raise_error, warn_with_log
19
+ from ...utils.singleton import Singleton
20
+ from ..pipeline_data_registry_base import BasePipelineDataRegistry
21
+ from ..utils import (
22
+ JUNIFER_DATA_PARAMS,
23
+ closest_resolution,
24
+ get_dataset_path,
25
+ get_native_warper,
26
+ )
27
+ from ._ants_parcellation_warper import ANTsParcellationWarper
28
+ from ._fsl_parcellation_warper import FSLParcellationWarper
29
+
30
+
31
+ if TYPE_CHECKING:
32
+ from nibabel.nifti1 import Nifti1Image
33
+
34
+
35
+ __all__ = [
36
+ "ParcellationRegistry",
37
+ "merge_parcellations",
38
+ ]
39
+
40
+
41
+ class ParcellationRegistry(BasePipelineDataRegistry, metaclass=Singleton):
42
+ """Class for parcellation data registry.
43
+
44
+ This class is a singleton and is used for managing available parcellation
45
+ data in a centralized manner.
46
+
47
+ """
48
+
49
+ def __init__(self) -> None:
50
+ """Initialize the class."""
51
+ super().__init__()
52
+ # Each entry in registry is a dictionary that must contain at least
53
+ # the following keys:
54
+ # * 'family': the parcellation's family name (e.g., 'Schaefer', 'SUIT')
55
+ # * 'space': the parcellation's space (e.g., 'MNI', 'SUIT')
56
+ # and can also have optional key(s):
57
+ # * 'valid_resolutions': a list of valid resolutions for the
58
+ # parcellation (e.g., [1, 2])
59
+ # The built-in coordinates are files that are shipped with the
60
+ # junifer-data dataset.
61
+ # Make built-in and external dictionaries for validation later
62
+ self._builtin = {}
63
+ self._external = {}
64
+
65
+ # Add SUIT
66
+ self._builtin.update(
67
+ {
68
+ "SUITxSUIT": {
69
+ "family": "SUIT",
70
+ "space": "SUIT",
71
+ },
72
+ "SUITxMNI": {
73
+ "family": "SUIT",
74
+ "space": "MNI152NLin6Asym",
75
+ },
76
+ }
77
+ )
78
+ # Add Schaefer
79
+ for n_rois, t_net in product(range(100, 1001, 100), [7, 17]):
80
+ self._builtin.update(
81
+ {
82
+ f"Schaefer{n_rois}x{t_net}": {
83
+ "family": "Schaefer2018",
84
+ "n_rois": n_rois,
85
+ "yeo_networks": t_net,
86
+ "space": "MNI152NLin6Asym",
87
+ },
88
+ }
89
+ )
90
+ # Add Tian
91
+ for scale in range(1, 5):
92
+ self._builtin.update(
93
+ {
94
+ f"TianxS{scale}x7TxMNI6thgeneration": {
95
+ "family": "Melbourne",
96
+ "scale": scale,
97
+ "magneticfield": "7T",
98
+ "space": "MNI152NLin6Asym",
99
+ },
100
+ f"TianxS{scale}x3TxMNI6thgeneration": {
101
+ "family": "Melbourne",
102
+ "scale": scale,
103
+ "magneticfield": "3T",
104
+ "space": "MNI152NLin6Asym",
105
+ },
106
+ f"TianxS{scale}x3TxMNInonlinear2009cAsym": {
107
+ "family": "Melbourne",
108
+ "scale": scale,
109
+ "magneticfield": "3T",
110
+ "space": "MNI152NLin2009cAsym",
111
+ },
112
+ }
113
+ )
114
+ # Add AICHA
115
+ for version in (1, 2):
116
+ self._builtin.update(
117
+ {
118
+ f"AICHA_v{version}": {
119
+ "family": "AICHA",
120
+ "version": version,
121
+ "space": "IXI549Space",
122
+ },
123
+ }
124
+ )
125
+ # Add Shen
126
+ for year in (2013, 2015, 2019):
127
+ if year == 2013:
128
+ for n_rois in (50, 100, 150):
129
+ self._builtin.update(
130
+ {
131
+ f"Shen_{year}_{n_rois}": {
132
+ "family": "Shen",
133
+ "year": 2013,
134
+ "n_rois": n_rois,
135
+ "space": "MNI152NLin2009cAsym",
136
+ },
137
+ }
138
+ )
139
+ elif year == 2015:
140
+ self._builtin.update(
141
+ {
142
+ "Shen_2015_268": {
143
+ "family": "Shen",
144
+ "year": 2015,
145
+ "n_rois": 268,
146
+ "space": "MNI152NLin2009cAsym",
147
+ },
148
+ }
149
+ )
150
+ elif year == 2019:
151
+ self._builtin.update(
152
+ {
153
+ "Shen_2019_368": {
154
+ "family": "Shen",
155
+ "year": 2019,
156
+ "n_rois": 368,
157
+ "space": "MNI152NLin2009cAsym",
158
+ },
159
+ }
160
+ )
161
+ # Add Yan
162
+ for n_rois, yeo_network in product(range(100, 1001, 100), [7, 17]):
163
+ self._builtin.update(
164
+ {
165
+ f"Yan{n_rois}xYeo{yeo_network}": {
166
+ "family": "Yan2023",
167
+ "n_rois": n_rois,
168
+ "yeo_networks": yeo_network,
169
+ "space": "MNI152NLin6Asym",
170
+ },
171
+ }
172
+ )
173
+ self._builtin.update(
174
+ {
175
+ f"Yan{n_rois}xKong17": {
176
+ "family": "Yan2023",
177
+ "n_rois": n_rois,
178
+ "kong_networks": 17,
179
+ "space": "MNI152NLin6Asym",
180
+ },
181
+ }
182
+ )
183
+ # Add Brainnetome
184
+ for threshold in [0, 25, 50]:
185
+ self._builtin.update(
186
+ {
187
+ f"Brainnetome_thr{threshold}": {
188
+ "family": "Brainnetome",
189
+ "threshold": threshold,
190
+ "space": "MNI152NLin6Asym",
191
+ },
192
+ }
193
+ )
194
+
195
+ # Update registry with built-in ones
196
+ self._registry.update(self._builtin)
197
+
198
+ def register(
199
+ self,
200
+ name: str,
201
+ parcellation_path: Union[str, Path],
202
+ parcels_labels: list[str],
203
+ space: str,
204
+ overwrite: bool = False,
205
+ ) -> None:
206
+ """Register a custom user parcellation.
207
+
208
+ Parameters
209
+ ----------
210
+ name : str
211
+ The name of the parcellation.
212
+ parcellation_path : str or pathlib.Path
213
+ The path to the parcellation file.
214
+ parcels_labels : list of str
215
+ The list of labels for the parcellation.
216
+ space : str
217
+ The template space of the parcellation, e.g., "MNI152NLin6Asym".
218
+ overwrite : bool, optional
219
+ If True, overwrite an existing parcellation with the same name.
220
+ Does not apply to built-in parcellations (default False).
221
+
222
+ Raises
223
+ ------
224
+ ValueError
225
+ If the parcellation ``name`` is a built-in parcellation or
226
+ if the parcellation ``name`` is already registered and
227
+ ``overwrite=False``.
228
+
229
+ """
230
+ # Check for attempt of overwriting built-in parcellations
231
+ if name in self._builtin:
232
+ raise_error(
233
+ f"Parcellation: {name} already registered as "
234
+ "built-in parcellation."
235
+ )
236
+ # Check for attempt of overwriting external parcellations
237
+ if name in self._external:
238
+ if overwrite:
239
+ logger.info(f"Overwriting parcellation: {name}")
240
+ else:
241
+ raise_error(
242
+ f"Parcellation: {name} already registered. Set "
243
+ "`overwrite=True` to update its value."
244
+ )
245
+ # Convert str to Path
246
+ if not isinstance(parcellation_path, Path):
247
+ parcellation_path = Path(parcellation_path)
248
+ # Registration
249
+ logger.info(f"Registering parcellation: {name}")
250
+ # Add user parcellation info
251
+ self._external[name] = {
252
+ "path": parcellation_path,
253
+ "labels": parcels_labels,
254
+ "family": "CustomUserParcellation",
255
+ "space": space,
256
+ }
257
+ # Update registry
258
+ self._registry[name] = {
259
+ "path": parcellation_path,
260
+ "labels": parcels_labels,
261
+ "family": "CustomUserParcellation",
262
+ "space": space,
263
+ }
264
+
265
+ def deregister(self, name: str) -> None:
266
+ """De-register a custom user parcellation.
267
+
268
+ Parameters
269
+ ----------
270
+ name : str
271
+ The name of the parcellation.
272
+
273
+ """
274
+ logger.info(f"De-registering parcellation: {name}")
275
+ # Remove parcellation info
276
+ _ = self._external.pop(name)
277
+ # Update registry
278
+ _ = self._registry.pop(name)
279
+
280
+ def load(
281
+ self,
282
+ name: str,
283
+ target_space: str,
284
+ resolution: Optional[float] = None,
285
+ path_only: bool = False,
286
+ ) -> tuple[Optional["Nifti1Image"], list[str], Path, str]:
287
+ """Load parcellation and labels.
288
+
289
+ Parameters
290
+ ----------
291
+ name : str
292
+ The name of the parcellation.
293
+ target_space : str
294
+ The desired space of the parcellation.
295
+ resolution : float, optional
296
+ The desired resolution of the parcellation to load. If it is not
297
+ available, the closest resolution will be loaded. Preferably, use a
298
+ resolution higher than the desired one. By default, will load the
299
+ highest one (default None).
300
+ path_only : bool, optional
301
+ If True, the parcellation image will not be loaded (default False).
302
+
303
+ Returns
304
+ -------
305
+ Nifti1Image or None
306
+ Loaded parcellation image.
307
+ list of str
308
+ Parcellation labels.
309
+ pathlib.Path
310
+ File path to the parcellation image.
311
+ str
312
+ The space of the parcellation.
313
+
314
+ Raises
315
+ ------
316
+ ValueError
317
+ If ``name`` is invalid or
318
+ if the parcellation family is invalid or
319
+ if the parcellation values and labels
320
+ don't have equal dimension or if the value range is invalid.
321
+
322
+ """
323
+ # Check for valid parcellation name
324
+ if name not in self._registry:
325
+ raise_error(
326
+ f"Parcellation: {name} not found. "
327
+ f"Valid options are: {self.list}"
328
+ )
329
+
330
+ # Copy parcellation definition to avoid edits in original object
331
+ parcellation_definition = self._registry[name].copy()
332
+ t_family = parcellation_definition.pop("family")
333
+ # Remove space conditionally
334
+ if t_family not in ["SUIT", "Melbourne"]:
335
+ space = parcellation_definition.pop("space")
336
+ else:
337
+ space = parcellation_definition["space"]
338
+
339
+ # Check and get highest resolution
340
+ if space != target_space:
341
+ logger.info(
342
+ f"Parcellation will be warped from {space} to {target_space} "
343
+ "using highest resolution"
344
+ )
345
+ resolution = None
346
+
347
+ # Check if the parcellation family is custom or built-in
348
+ if t_family == "CustomUserParcellation":
349
+ parcellation_fname = parcellation_definition["path"]
350
+ parcellation_labels = parcellation_definition["labels"]
351
+ elif t_family in [
352
+ "Schaefer2018",
353
+ "SUIT",
354
+ "Melbourne",
355
+ "AICHA",
356
+ "Shen",
357
+ "Yan2023",
358
+ "Brainnetome",
359
+ ]:
360
+ # Load parcellation and labels
361
+ if t_family == "Schaefer2018":
362
+ parcellation_fname, parcellation_labels = _retrieve_schaefer(
363
+ resolution=resolution,
364
+ **parcellation_definition,
365
+ )
366
+ elif t_family == "SUIT":
367
+ parcellation_fname, parcellation_labels = _retrieve_suit(
368
+ resolution=resolution,
369
+ **parcellation_definition,
370
+ )
371
+ elif t_family == "Melbourne":
372
+ parcellation_fname, parcellation_labels = _retrieve_tian(
373
+ resolution=resolution,
374
+ **parcellation_definition,
375
+ )
376
+ elif t_family == "AICHA":
377
+ parcellation_fname, parcellation_labels = _retrieve_aicha(
378
+ resolution=resolution,
379
+ **parcellation_definition,
380
+ )
381
+ elif t_family == "Shen":
382
+ parcellation_fname, parcellation_labels = _retrieve_shen(
383
+ resolution=resolution,
384
+ **parcellation_definition,
385
+ )
386
+ elif t_family == "Yan2023":
387
+ parcellation_fname, parcellation_labels = _retrieve_yan(
388
+ resolution=resolution,
389
+ **parcellation_definition,
390
+ )
391
+ elif t_family == "Brainnetome":
392
+ parcellation_fname, parcellation_labels = (
393
+ _retrieve_brainnetome(
394
+ resolution=resolution,
395
+ **parcellation_definition,
396
+ )
397
+ )
398
+ else:
399
+ raise_error(f"Unknown parcellation family: {t_family}")
400
+
401
+ # Load parcellation image and values
402
+ logger.info(f"Loading parcellation: {parcellation_fname.absolute()!s}")
403
+ parcellation_img = None
404
+ if not path_only:
405
+ # Load image via nibabel
406
+ parcellation_img = nib.load(parcellation_fname)
407
+ # Get unique values
408
+ parcel_values = np.unique(parcellation_img.get_fdata())
409
+ # Check for dimension
410
+ if len(parcel_values) - 1 != len(parcellation_labels):
411
+ raise_error(
412
+ f"Parcellation {name} has {len(parcel_values) - 1} "
413
+ f"parcels but {len(parcellation_labels)} labels."
414
+ )
415
+ # Sort values
416
+ parcel_values.sort()
417
+ # Check if value range is invalid
418
+ if np.any(np.diff(parcel_values) != 1):
419
+ raise_error(
420
+ f"Parcellation {name} must have all the values in the "
421
+ f"range [0, {len(parcel_values)}]"
422
+ )
423
+
424
+ return parcellation_img, parcellation_labels, parcellation_fname, space
425
+
426
+ def get(
427
+ self,
428
+ parcellations: Union[str, list[str]],
429
+ target_data: dict[str, Any],
430
+ extra_input: Optional[dict[str, Any]] = None,
431
+ ) -> tuple["Nifti1Image", list[str]]:
432
+ """Get parcellation, tailored for the target image.
433
+
434
+ Parameters
435
+ ----------
436
+ parcellations : str or list of str
437
+ The name(s) of the parcellation(s).
438
+ target_data : dict
439
+ The corresponding item of the data object to which the parcellation
440
+ will be applied.
441
+ extra_input : dict, optional
442
+ The other fields in the data object. Useful for accessing other
443
+ data kinds that needs to be used in the computation of
444
+ parcellations (default None).
445
+
446
+ Returns
447
+ -------
448
+ Nifti1Image
449
+ The parcellation image.
450
+ list of str
451
+ Parcellation labels.
452
+
453
+ Raises
454
+ ------
455
+ ValueError
456
+ If ``extra_input`` is None when ``target_data``'s space is native.
457
+
458
+ """
459
+ # Check pre-requirements for space manipulation
460
+ target_space = target_data["space"]
461
+ logger.debug(f"Getting {parcellations} in {target_space} space.")
462
+ # Extra data type requirement check if target space is native
463
+ if target_space == "native":
464
+ # Check for extra inputs
465
+ if extra_input is None:
466
+ raise_error(
467
+ "No extra input provided, requires `Warp` and `T1w` "
468
+ "data types in particular for transformation to "
469
+ f"{target_data['space']} space for further computation."
470
+ )
471
+ # Get native space warper spec
472
+ warper_spec = get_native_warper(
473
+ target_data=target_data,
474
+ other_data=extra_input,
475
+ )
476
+ # Set target standard space to warp file space source
477
+ target_std_space = warper_spec["src"]
478
+ logger.debug(
479
+ f"Target space is native. Will warp from {target_std_space}"
480
+ )
481
+ else:
482
+ # Set target standard space to target space
483
+ target_std_space = target_space
484
+
485
+ # Get the min of the voxels sizes and use it as the resolution
486
+ target_img = target_data["data"]
487
+ resolution = np.min(target_img.header.get_zooms()[:3])
488
+
489
+ # Convert parcellations to list if not already
490
+ if not isinstance(parcellations, list):
491
+ parcellations = [parcellations]
492
+
493
+ # Load the parcellations and labels
494
+ all_parcellations = []
495
+ all_labels = []
496
+ for name in parcellations:
497
+ # Load parcellation
498
+ logger.debug(f"Loading parcellation {name}")
499
+ img, labels, _, space = self.load(
500
+ name=name,
501
+ resolution=resolution,
502
+ target_space=target_space,
503
+ )
504
+
505
+ # Convert parcellation spaces if required;
506
+ # cannot be "native" due to earlier check
507
+ if space != target_std_space:
508
+ logger.debug(
509
+ f"Warping {name} to {target_std_space} space using ANTs."
510
+ )
511
+ raw_img = ANTsParcellationWarper().warp(
512
+ parcellation_name=name,
513
+ parcellation_img=img,
514
+ src=space,
515
+ dst=target_std_space,
516
+ target_data=target_data,
517
+ warp_data=None,
518
+ )
519
+ # Remove extra dimension added by ANTs
520
+ img = nimg.math_img("np.squeeze(img)", img=raw_img)
521
+
522
+ if target_space != "native":
523
+ # No warping is going to happen, just resampling, because
524
+ # we are in the correct space
525
+ logger.debug(f"Resampling {name} to target image.")
526
+ # Resample parcellation to target image
527
+ img = nimg.resample_to_img(
528
+ source_img=img,
529
+ target_img=target_img,
530
+ interpolation="nearest",
531
+ copy=True,
532
+ )
533
+ else:
534
+ # Warp parcellation if target space is native as either
535
+ # the image is in the right non-native space or it's
536
+ # warped from one non-native space to another non-native space
537
+ logger.debug(
538
+ "Warping parcellation to native space using "
539
+ f"{warper_spec['warper']}."
540
+ )
541
+ # extra_input check done earlier and warper_spec exists
542
+ if warper_spec["warper"] == "fsl":
543
+ img = FSLParcellationWarper().warp(
544
+ parcellation_name="native",
545
+ parcellation_img=img,
546
+ target_data=target_data,
547
+ warp_data=warper_spec,
548
+ )
549
+ elif warper_spec["warper"] == "ants":
550
+ img = ANTsParcellationWarper().warp(
551
+ parcellation_name="native",
552
+ parcellation_img=img,
553
+ src="",
554
+ dst="native",
555
+ target_data=target_data,
556
+ warp_data=warper_spec,
557
+ )
558
+
559
+ all_parcellations.append(img)
560
+ all_labels.append(labels)
561
+
562
+ # Avoid merging if there is only one parcellation
563
+ if len(all_parcellations) == 1:
564
+ resampled_parcellation_img = all_parcellations[0]
565
+ labels = all_labels[0]
566
+ # Parcellations are already transformed to target standard space
567
+ else:
568
+ logger.debug("Merging parcellations.")
569
+ resampled_parcellation_img, labels = merge_parcellations(
570
+ parcellations_list=all_parcellations,
571
+ parcellations_names=parcellations,
572
+ labels_lists=all_labels,
573
+ )
574
+
575
+ return resampled_parcellation_img, labels
576
+
577
+
578
+ def _retrieve_schaefer(
579
+ resolution: Optional[float] = None,
580
+ n_rois: Optional[int] = None,
581
+ yeo_networks: int = 7,
582
+ ) -> tuple[Path, list[str]]:
583
+ """Retrieve Schaefer parcellation.
584
+
585
+ Parameters
586
+ ----------
587
+ resolution : float, optional
588
+ The desired resolution of the parcellation to load. If it is not
589
+ available, the closest resolution will be loaded. Preferably, use a
590
+ resolution higher than the desired one. By default, will load the
591
+ highest one (default None). Available resolutions for this
592
+ parcellation are 1mm and 2mm.
593
+ n_rois : {100, 200, 300, 400, 500, 600, 700, 800, 900, 1000}, optional
594
+ Granularity of the parceallation to be used (default None).
595
+ yeo_networks : {7, 17}, optional
596
+ Number of Yeo networks to use (default 7).
597
+
598
+ Returns
599
+ -------
600
+ pathlib.Path
601
+ File path to the parcellation image.
602
+ list of str
603
+ Parcellation labels.
604
+
605
+ Raises
606
+ ------
607
+ ValueError
608
+ If invalid value is provided for ``n_rois`` or ``yeo_networks``.
609
+
610
+ """
611
+ logger.info("Parcellation parameters:")
612
+ logger.info(f"\tresolution: {resolution}")
613
+ logger.info(f"\tn_rois: {n_rois}")
614
+ logger.info(f"\tyeo_networks: {yeo_networks}")
615
+
616
+ # Check n_rois value
617
+ _valid_n_rois = [100, 200, 300, 400, 500, 600, 700, 800, 900, 1000]
618
+ if n_rois not in _valid_n_rois:
619
+ raise_error(
620
+ f"The parameter `n_rois` ({n_rois}) needs to be one of the "
621
+ f"following: {_valid_n_rois}"
622
+ )
623
+
624
+ # Check networks
625
+ _valid_networks = [7, 17]
626
+ if yeo_networks not in _valid_networks:
627
+ raise_error(
628
+ f"The parameter `yeo_networks` ({yeo_networks}) needs to be one "
629
+ f"of the following: {_valid_networks}"
630
+ )
631
+
632
+ # Check resolution
633
+ _valid_resolutions = [1, 2]
634
+ resolution = closest_resolution(resolution, _valid_resolutions)
635
+
636
+ # Fetch file paths
637
+ path_prefix = Path("parcellations/Schaefer2018/Yeo2011")
638
+ parcellation_img_path = get(
639
+ file_path=path_prefix / f"Schaefer2018_{n_rois}Parcels_{yeo_networks}"
640
+ f"Networks_order_FSLMNI152_{resolution}mm.nii.gz",
641
+ dataset_path=get_dataset_path(),
642
+ **JUNIFER_DATA_PARAMS,
643
+ )
644
+ parcellation_label_path = get(
645
+ file_path=path_prefix
646
+ / f"Schaefer2018_{n_rois}Parcels_{yeo_networks}Networks_order.txt",
647
+ dataset_path=get_dataset_path(),
648
+ **JUNIFER_DATA_PARAMS,
649
+ )
650
+
651
+ # Load labels
652
+ labels = [
653
+ "_".join(x.split("_")[1:])
654
+ for x in pd.read_csv(parcellation_label_path, sep="\t", header=None)
655
+ .iloc[:, 1]
656
+ .to_list()
657
+ ]
658
+
659
+ return parcellation_img_path, labels
660
+
661
+
662
+ def _retrieve_tian(
663
+ resolution: Optional[float] = None,
664
+ scale: Optional[int] = None,
665
+ space: str = "MNI152NLin6Asym",
666
+ magneticfield: str = "3T",
667
+ ) -> tuple[Path, list[str]]:
668
+ """Retrieve Tian parcellation.
669
+
670
+ Parameters
671
+ ----------
672
+ resolution : float, optional
673
+ The desired resolution of the parcellation to load. If it is not
674
+ available, the closest resolution will be loaded. Preferably, use a
675
+ resolution higher than the desired one. By default, will load the
676
+ highest one (default None). Available resolutions for this
677
+ parcellation depend on the space and magnetic field.
678
+ scale : {1, 2, 3, 4}, optional
679
+ Scale of parcellation (defines granularity) (default None).
680
+ space : {"MNI152NLin6Asym", "MNI152NLin2009cAsym"}, optional
681
+ Space of parcellation (default "MNI152NLin6Asym"). (For more
682
+ information see https://github.com/yetianmed/subcortex)
683
+ magneticfield : {"3T", "7T"}, optional
684
+ Magnetic field (default "3T").
685
+
686
+ Returns
687
+ -------
688
+ pathlib.Path
689
+ File path to the parcellation image.
690
+ list of str
691
+ Parcellation labels.
692
+
693
+ Raises
694
+ ------
695
+ ValueError
696
+ If invalid value is provided for ``scale`` or ``magneticfield`` or
697
+ ``space``.
698
+
699
+ """
700
+ logger.info("Parcellation parameters:")
701
+ logger.info(f"\tresolution: {resolution}")
702
+ logger.info(f"\tscale: {scale}")
703
+ logger.info(f"\tspace: {space}")
704
+ logger.info(f"\tmagneticfield: {magneticfield}")
705
+
706
+ # Check scale
707
+ _valid_scales = [1, 2, 3, 4]
708
+ if scale not in _valid_scales:
709
+ raise_error(
710
+ f"The parameter `scale` ({scale}) needs to be one of the "
711
+ f"following: {_valid_scales}"
712
+ )
713
+
714
+ # Check resolution
715
+ if magneticfield == "3T":
716
+ _valid_spaces = ["MNI152NLin6Asym", "MNI152NLin2009cAsym"]
717
+ if space in _valid_spaces:
718
+ _valid_resolutions = [1, 2]
719
+ else:
720
+ raise_error(
721
+ f"The parameter `space` ({space}) for 3T needs to be one of "
722
+ f"the following: {_valid_spaces}"
723
+ )
724
+ elif magneticfield == "7T":
725
+ _valid_resolutions = [1.6]
726
+ if space != "MNI152NLin6Asym":
727
+ raise_error(
728
+ f"The parameter `space` ({space}) for 7T needs to be "
729
+ f"MNI152NLin6Asym"
730
+ )
731
+ else:
732
+ raise_error(
733
+ f"The parameter `magneticfield` ({magneticfield}) needs to be "
734
+ f"one of the following: 3T or 7T"
735
+ )
736
+ resolution = closest_resolution(resolution, _valid_resolutions)
737
+
738
+ # Fetch file paths
739
+ if magneticfield == "3T":
740
+ parcellation_fname_base_3T = Path(
741
+ "parcellations/Melbourne/v1.4/3T/Subcortex-Only"
742
+ )
743
+ if space == "MNI152NLin6Asym":
744
+ if resolution == 1:
745
+ parcellation_fname = (
746
+ parcellation_fname_base_3T
747
+ / f"Tian_Subcortex_S{scale}_{magneticfield}_1mm.nii.gz"
748
+ )
749
+ else:
750
+ parcellation_fname = parcellation_fname_base_3T / (
751
+ f"Tian_Subcortex_S{scale}_{magneticfield}.nii.gz"
752
+ )
753
+ elif space == "MNI152NLin2009cAsym":
754
+ space = "2009cAsym"
755
+ if resolution == 1:
756
+ parcellation_fname = parcellation_fname_base_3T / (
757
+ f"Tian_Subcortex_S{scale}_{magneticfield}_{space}_1mm.nii.gz"
758
+ )
759
+ else:
760
+ parcellation_fname = parcellation_fname_base_3T / (
761
+ f"Tian_Subcortex_S{scale}_{magneticfield}_{space}.nii.gz"
762
+ )
763
+
764
+ parcellation_img_path = get(
765
+ file_path=parcellation_fname,
766
+ dataset_path=get_dataset_path(),
767
+ **JUNIFER_DATA_PARAMS,
768
+ )
769
+ parcellation_label_path = get(
770
+ file_path=parcellation_fname_base_3T
771
+ / f"Tian_Subcortex_S{scale}_3T_label.txt",
772
+ dataset_path=get_dataset_path(),
773
+ **JUNIFER_DATA_PARAMS,
774
+ )
775
+ # Load labels
776
+ labels = pd.read_csv(parcellation_label_path, sep=" ", header=None)[
777
+ 0
778
+ ].to_list()
779
+ elif magneticfield == "7T":
780
+ parcellation_img_path = get(
781
+ file_path=Path(
782
+ "parcellations/Melbourne/v1.4/7T/"
783
+ f"Tian_Subcortex_S{scale}_{magneticfield}.nii.gz"
784
+ ),
785
+ dataset_path=get_dataset_path(),
786
+ **JUNIFER_DATA_PARAMS,
787
+ )
788
+ # define 7T labels (b/c currently no labels file available for 7T)
789
+ scale7Trois = {1: 16, 2: 34, 3: 54, 4: 62}
790
+ labels = [
791
+ ("parcel_" + str(x)) for x in np.arange(1, scale7Trois[scale] + 1)
792
+ ]
793
+ logger.info(
794
+ "Currently there are no labels provided for the 7T Tian "
795
+ "parcellation. A simple numbering scheme for distinction was "
796
+ "therefore used."
797
+ )
798
+
799
+ return parcellation_img_path, labels
800
+
801
+
802
+ def _retrieve_suit(
803
+ resolution: Optional[float],
804
+ space: str = "MNI152NLin6Asym",
805
+ ) -> tuple[Path, list[str]]:
806
+ """Retrieve SUIT parcellation.
807
+
808
+ Parameters
809
+ ----------
810
+ resolution : float, optional
811
+ The desired resolution of the parcellation to load. If it is not
812
+ available, the closest resolution will be loaded. Preferably, use a
813
+ resolution higher than the desired one. By default, will load the
814
+ highest one (default None). Available resolutions for this parcellation
815
+ are 1mm and 2mm.
816
+ space : {"MNI152NLin6Asym", "SUIT"}, optional
817
+ Space of parcellation (default "MNI152NLin6Asym"). (For more
818
+ information see http://www.diedrichsenlab.org/imaging/suit.htm).
819
+
820
+ Returns
821
+ -------
822
+ pathlib.Path
823
+ File path to the parcellation image.
824
+ list of str
825
+ Parcellation labels.
826
+
827
+ Raises
828
+ ------
829
+ ValueError
830
+ If invalid value is provided for ``space``.
831
+
832
+ """
833
+ logger.info("Parcellation parameters:")
834
+ logger.info(f"\tresolution: {resolution}")
835
+ logger.info(f"\tspace: {space}")
836
+
837
+ # Check space
838
+ _valid_spaces = ["MNI152NLin6Asym", "SUIT"]
839
+ if space not in _valid_spaces:
840
+ raise_error(
841
+ f"The parameter `space` ({space}) needs to be one of the "
842
+ f"following: {_valid_spaces}"
843
+ )
844
+
845
+ # Check resolution
846
+ _valid_resolutions = [1]
847
+ resolution = closest_resolution(resolution, _valid_resolutions)
848
+
849
+ # Format space if MNI; required for the file name
850
+ if space == "MNI152NLin6Asym":
851
+ space = "MNI"
852
+
853
+ # Fetch file paths
854
+ path_prefix = Path("parcellations/SUIT")
855
+ parcellation_img_path = get(
856
+ file_path=path_prefix / f"SUIT_{space}Space_{resolution}mm.nii",
857
+ dataset_path=get_dataset_path(),
858
+ **JUNIFER_DATA_PARAMS,
859
+ )
860
+ parcellation_label_path = get(
861
+ file_path=path_prefix / f"SUIT_{space}Space_{resolution}mm.tsv",
862
+ dataset_path=get_dataset_path(),
863
+ **JUNIFER_DATA_PARAMS,
864
+ )
865
+
866
+ # Load labels
867
+ labels = pd.read_csv(parcellation_label_path, sep="\t", usecols=["name"])[
868
+ "name"
869
+ ].to_list()
870
+
871
+ return parcellation_img_path, labels
872
+
873
+
874
+ def _retrieve_aicha(
875
+ resolution: Optional[float] = None,
876
+ version: int = 2,
877
+ ) -> tuple[Path, list[str]]:
878
+ """Retrieve AICHA parcellation.
879
+
880
+ Parameters
881
+ ----------
882
+ resolution : float, optional
883
+ The desired resolution of the parcellation to load. If it is not
884
+ available, the closest resolution will be loaded. Preferably, use a
885
+ resolution higher than the desired one. By default, will load the
886
+ highest one (default None). Available resolution for this
887
+ parcellation is 2mm.
888
+ version : {1, 2}, optional
889
+ The version of the parcellation to use (default 2).
890
+
891
+ Returns
892
+ -------
893
+ pathlib.Path
894
+ File path to the parcellation image.
895
+ list of str
896
+ Parcellation labels.
897
+
898
+ Raises
899
+ ------
900
+ ValueError
901
+ If invalid value is provided for ``version``.
902
+
903
+ Warns
904
+ -----
905
+ RuntimeWarning
906
+ Until the authors confirm the space, the warning will be issued.
907
+
908
+ Notes
909
+ -----
910
+ The resolution of the parcellation is 2mm and although v2 provides
911
+ 1mm, it is only for display purpose as noted in the release document.
912
+
913
+ """
914
+ # Issue warning until space is confirmed by authors
915
+ warn_with_log(
916
+ "The current space for AICHA parcellations are IXI549Space, but are "
917
+ "not confirmed by authors, until that this warning will be issued."
918
+ )
919
+
920
+ logger.info("Parcellation parameters:")
921
+ logger.info(f"\tresolution: {resolution}")
922
+ logger.info(f"\tversion: {version}")
923
+
924
+ # Check version
925
+ _valid_version = [1, 2]
926
+ if version not in _valid_version:
927
+ raise_error(
928
+ f"The parameter `version` ({version}) needs to be one of the "
929
+ f"following: {_valid_version}"
930
+ )
931
+
932
+ # Check resolution
933
+ _valid_resolutions = [1]
934
+ resolution = closest_resolution(resolution, _valid_resolutions)
935
+
936
+ # Fetch file paths
937
+ path_prefix = Path(f"parcellations/AICHA/v{version}")
938
+ parcellation_img_path = get(
939
+ file_path=path_prefix / "AICHA.nii",
940
+ dataset_path=get_dataset_path(),
941
+ **JUNIFER_DATA_PARAMS,
942
+ )
943
+ # Conditional label file fetch
944
+ if version == 1:
945
+ parcellation_label_path = get(
946
+ file_path=path_prefix / "AICHA_vol1.txt",
947
+ dataset_path=get_dataset_path(),
948
+ **JUNIFER_DATA_PARAMS,
949
+ )
950
+ elif version == 2:
951
+ parcellation_label_path = get(
952
+ file_path=path_prefix / "AICHA_vol3.txt",
953
+ dataset_path=get_dataset_path(),
954
+ **JUNIFER_DATA_PARAMS,
955
+ )
956
+
957
+ # Load labels
958
+ labels = pd.read_csv(
959
+ parcellation_label_path,
960
+ sep="\t",
961
+ header=None,
962
+ skiprows=[0],
963
+ )[0].to_list()
964
+
965
+ return parcellation_img_path, labels
966
+
967
+
968
+ def _retrieve_shen(
969
+ resolution: Optional[float] = None,
970
+ year: int = 2015,
971
+ n_rois: int = 268,
972
+ ) -> tuple[Path, list[str]]:
973
+ """Retrieve Shen parcellation.
974
+
975
+ Parameters
976
+ ----------
977
+ resolution : float, optional
978
+ The desired resolution of the parcellation to load. If it is not
979
+ available, the closest resolution will be loaded. Preferably, use a
980
+ resolution higher than the desired one. By default, will load the
981
+ highest one (default None). Available resolutions for this parcellation
982
+ are 1mm and 2mm for ``year = 2013`` and ``year = 2015`` but fixed to
983
+ 1mm for ``year = 2019``.
984
+ year : {2013, 2015, 2019}, optional
985
+ The year of the parcellation to use (default 2015).
986
+ n_rois : int, optional
987
+ Number of ROIs. Can be ``50, 100, or 150`` for ``year = 2013`` but is
988
+ fixed at ``268`` for ``year = 2015`` and at ``368`` for
989
+ ``year = 2019``.
990
+
991
+ Returns
992
+ -------
993
+ pathlib.Path
994
+ File path to the parcellation image.
995
+ list of str
996
+ Parcellation labels.
997
+
998
+ Raises
999
+ ------
1000
+ ValueError
1001
+ If invalid value or combination is provided for ``year`` and
1002
+ ``n_rois``.
1003
+
1004
+ """
1005
+ logger.info("Parcellation parameters:")
1006
+ logger.info(f"\tresolution: {resolution}")
1007
+ logger.info(f"\tyear: {year}")
1008
+ logger.info(f"\tn_rois: {n_rois}")
1009
+
1010
+ # Check resolution
1011
+ _valid_resolutions = [1, 2]
1012
+ resolution = closest_resolution(resolution, _valid_resolutions)
1013
+
1014
+ # Check year value
1015
+ _valid_year = (2013, 2015, 2019)
1016
+ if year not in _valid_year:
1017
+ raise_error(
1018
+ f"The parameter `year` ({year}) needs to be one of the "
1019
+ f"following: {_valid_year}"
1020
+ )
1021
+
1022
+ # Check n_rois value
1023
+ _valid_n_rois = [50, 100, 150, 268, 368]
1024
+ if n_rois not in _valid_n_rois:
1025
+ raise_error(
1026
+ f"The parameter `n_rois` ({n_rois}) needs to be one of the "
1027
+ f"following: {_valid_n_rois}"
1028
+ )
1029
+
1030
+ # Check combinations
1031
+ if resolution == 2 and year == 2019:
1032
+ raise_error(
1033
+ "The parameter combination `resolution = 2` and `year = 2019` is "
1034
+ "invalid"
1035
+ )
1036
+ if n_rois in (268, 368) and year == 2013:
1037
+ raise_error(
1038
+ f"The parameter combination `resolution = {resolution}` and "
1039
+ "`year = 2013` is invalid"
1040
+ )
1041
+ if n_rois in (50, 100, 150) and year in (2015, 2019):
1042
+ raise_error(
1043
+ f"The parameter combination `resolution = {resolution}` and "
1044
+ f"`year = {year}` is invalid"
1045
+ )
1046
+ if (n_rois == 268 and year == 2019) or (n_rois == 368 and year == 2015):
1047
+ raise_error(
1048
+ f"The parameter combination `resolution = {resolution}` and "
1049
+ f"`year = {year}` is invalid"
1050
+ )
1051
+
1052
+ # Fetch file paths based on year
1053
+ path_prefix = Path(f"parcellations/Shen/{year}")
1054
+ if year == 2013:
1055
+ parcellation_img_path = get(
1056
+ file_path=path_prefix / f"fconn_atlas_{n_rois}_{resolution}mm.nii",
1057
+ dataset_path=get_dataset_path(),
1058
+ **JUNIFER_DATA_PARAMS,
1059
+ )
1060
+ parcellation_label_path = get(
1061
+ file_path=path_prefix / f"Group_seg{n_rois}_BAindexing_setA.txt",
1062
+ dataset_path=get_dataset_path(),
1063
+ **JUNIFER_DATA_PARAMS,
1064
+ )
1065
+ labels = (
1066
+ pd.read_csv(
1067
+ parcellation_label_path,
1068
+ sep=",",
1069
+ header=None,
1070
+ skiprows=[0],
1071
+ )[1]
1072
+ .map(lambda x: x.strip()) # fix formatting
1073
+ .to_list()
1074
+ )
1075
+ elif year == 2015:
1076
+ parcellation_img_path = get(
1077
+ file_path=path_prefix
1078
+ / f"shen_{resolution}mm_268_parcellation.nii.gz",
1079
+ dataset_path=get_dataset_path(),
1080
+ **JUNIFER_DATA_PARAMS,
1081
+ )
1082
+ labels = list(range(1, 269))
1083
+ elif year == 2019:
1084
+ parcellation_img_path = get(
1085
+ file_path=path_prefix / "Shen_1mm_368_parcellation.nii.gz",
1086
+ dataset_path=get_dataset_path(),
1087
+ **JUNIFER_DATA_PARAMS,
1088
+ )
1089
+ labels = list(range(1, 369))
1090
+
1091
+ return parcellation_img_path, labels
1092
+
1093
+
1094
+ def _retrieve_yan(
1095
+ resolution: Optional[float] = None,
1096
+ n_rois: Optional[int] = None,
1097
+ yeo_networks: Optional[int] = None,
1098
+ kong_networks: Optional[int] = None,
1099
+ ) -> tuple[Path, list[str]]:
1100
+ """Retrieve Yan parcellation.
1101
+
1102
+ Parameters
1103
+ ----------
1104
+ resolution : float, optional
1105
+ The desired resolution of the parcellation to load. If it is not
1106
+ available, the closest resolution will be loaded. Preferably, use a
1107
+ resolution higher than the desired one. By default, will load the
1108
+ highest one (default None). Available resolutions for this
1109
+ parcellation are 1mm and 2mm.
1110
+ n_rois : {100, 200, 300, 400, 500, 600, 700, 800, 900, 1000}, optional
1111
+ Granularity of the parcellation to be used (default None).
1112
+ yeo_networks : {7, 17}, optional
1113
+ Number of Yeo networks to use (default None).
1114
+ kong_networks : {17}, optional
1115
+ Number of Kong networks to use (default None).
1116
+
1117
+ Returns
1118
+ -------
1119
+ pathlib.Path
1120
+ File path to the parcellation image.
1121
+ list of str
1122
+ Parcellation labels.
1123
+
1124
+ Raises
1125
+ ------
1126
+ ValueError
1127
+ If invalid value is provided for ``n_rois``, ``yeo_networks`` or
1128
+ ``kong_networks``.
1129
+
1130
+ """
1131
+ logger.info("Parcellation parameters:")
1132
+ logger.info(f"\tresolution: {resolution}")
1133
+ logger.info(f"\tn_rois: {n_rois}")
1134
+ logger.info(f"\tyeo_networks: {yeo_networks}")
1135
+ logger.info(f"\tkong_networks: {kong_networks}")
1136
+
1137
+ # Allow single network type
1138
+ if (not yeo_networks and not kong_networks) or (
1139
+ yeo_networks and kong_networks
1140
+ ):
1141
+ raise_error(
1142
+ "Either one of `yeo_networks` or `kong_networks` need to be "
1143
+ "specified."
1144
+ )
1145
+
1146
+ # Check resolution
1147
+ _valid_resolutions = [1, 2]
1148
+ resolution = closest_resolution(resolution, _valid_resolutions)
1149
+
1150
+ # Check n_rois value
1151
+ _valid_n_rois = list(range(100, 1001, 100))
1152
+ if n_rois not in _valid_n_rois:
1153
+ raise_error(
1154
+ f"The parameter `n_rois` ({n_rois}) needs to be one of the "
1155
+ f"following: {_valid_n_rois}"
1156
+ )
1157
+
1158
+ # Fetch file paths based on networks
1159
+ pre_path_prefix = Path("parcellations/Yan2023")
1160
+ if yeo_networks:
1161
+ # Check yeo_networks value
1162
+ _valid_yeo_networks = [7, 17]
1163
+ if yeo_networks not in _valid_yeo_networks:
1164
+ raise_error(
1165
+ f"The parameter `yeo_networks` ({yeo_networks}) needs to be "
1166
+ f"one of the following: {_valid_yeo_networks}"
1167
+ )
1168
+
1169
+ path_prefix = pre_path_prefix / "Yeo2011"
1170
+ parcellation_img_path = get(
1171
+ file_path=path_prefix
1172
+ / (
1173
+ f"{n_rois}Parcels_Yeo2011_{yeo_networks}Networks_FSLMNI152_"
1174
+ f"{resolution}mm.nii.gz"
1175
+ ),
1176
+ dataset_path=get_dataset_path(),
1177
+ **JUNIFER_DATA_PARAMS,
1178
+ )
1179
+ parcellation_label_path = get(
1180
+ file_path=path_prefix
1181
+ / f"{n_rois}Parcels_Yeo2011_{yeo_networks}Networks_LUT.txt",
1182
+ dataset_path=get_dataset_path(),
1183
+ **JUNIFER_DATA_PARAMS,
1184
+ )
1185
+ elif kong_networks:
1186
+ # Check kong_networks value
1187
+ _valid_kong_networks = [17]
1188
+ if kong_networks not in _valid_kong_networks:
1189
+ raise_error(
1190
+ f"The parameter `kong_networks` ({kong_networks}) needs to be "
1191
+ f"one of the following: {_valid_kong_networks}"
1192
+ )
1193
+
1194
+ path_prefix = pre_path_prefix / "Kong2022"
1195
+ parcellation_img_path = get(
1196
+ file_path=path_prefix
1197
+ / (
1198
+ f"{n_rois}Parcels_Kong2022_{kong_networks}Networks_FSLMNI152_"
1199
+ f"{resolution}mm.nii.gz"
1200
+ ),
1201
+ dataset_path=get_dataset_path(),
1202
+ **JUNIFER_DATA_PARAMS,
1203
+ )
1204
+ parcellation_label_path = get(
1205
+ file_path=path_prefix
1206
+ / f"{n_rois}Parcels_Kong2022_{kong_networks}Networks_LUT.txt",
1207
+ dataset_path=get_dataset_path(),
1208
+ **JUNIFER_DATA_PARAMS,
1209
+ )
1210
+
1211
+ # Load label file
1212
+ labels = pd.read_csv(parcellation_label_path, sep=" ", header=None)[
1213
+ 1
1214
+ ].to_list()
1215
+
1216
+ return parcellation_img_path, labels
1217
+
1218
+
1219
+ def _retrieve_brainnetome(
1220
+ resolution: Optional[float] = None,
1221
+ threshold: Optional[int] = None,
1222
+ ) -> tuple[Path, list[str]]:
1223
+ """Retrieve Brainnetome parcellation.
1224
+
1225
+ Parameters
1226
+ ----------
1227
+ resolution : {1.0, 1.25, 2.0}, optional
1228
+ The desired resolution of the parcellation to load. If it is not
1229
+ available, the closest resolution will be loaded. Preferably, use a
1230
+ resolution higher than the desired one. By default, will load the
1231
+ highest one (default None). Available resolutions for this
1232
+ parcellation are 1mm, 1.25mm and 2mm.
1233
+ threshold : {0, 25, 50}, optional
1234
+ The threshold for the probabilistic maps of subregion (default None).
1235
+
1236
+ Returns
1237
+ -------
1238
+ pathlib.Path
1239
+ File path to the parcellation image.
1240
+ list of str
1241
+ Parcellation labels.
1242
+
1243
+ Raises
1244
+ ------
1245
+ ValueError
1246
+ If invalid value is provided for ``threshold``.
1247
+
1248
+ """
1249
+ logger.info("Parcellation parameters:")
1250
+ logger.info(f"\tresolution: {resolution}")
1251
+ logger.info(f"\tthreshold: {threshold}")
1252
+
1253
+ # Check resolution
1254
+ _valid_resolutions = [1.0, 1.25, 2.0]
1255
+ resolution = closest_resolution(resolution, _valid_resolutions)
1256
+
1257
+ # Check threshold value
1258
+ _valid_threshold = [0, 25, 50]
1259
+ if threshold not in _valid_threshold:
1260
+ raise_error(
1261
+ f"The parameter `threshold` ({threshold}) needs to be one of the "
1262
+ f"following: {_valid_threshold}"
1263
+ )
1264
+ # Correct resolution for further stuff
1265
+ if resolution in [1.0, 2.0]:
1266
+ resolution = int(resolution)
1267
+
1268
+ # Fetch file path
1269
+ parcellation_img_path = get(
1270
+ file_path=Path(
1271
+ "parcellations/Brainnetome/"
1272
+ f"BNA-maxprob-thr{threshold}-{resolution}mm.nii.gz"
1273
+ ),
1274
+ dataset_path=get_dataset_path(),
1275
+ **JUNIFER_DATA_PARAMS,
1276
+ )
1277
+
1278
+ # Load labels
1279
+ labels = (
1280
+ sorted([f"SFG_L(R)_7_{i}" for i in range(1, 8)] * 2)
1281
+ + sorted([f"MFG_L(R)_7_{i}" for i in range(1, 8)] * 2)
1282
+ + sorted([f"IFG_L(R)_6_{i}" for i in range(1, 7)] * 2)
1283
+ + sorted([f"OrG_L(R)_6_{i}" for i in range(1, 7)] * 2)
1284
+ + sorted([f"PrG_L(R)_6_{i}" for i in range(1, 7)] * 2)
1285
+ + sorted([f"PCL_L(R)_2_{i}" for i in range(1, 3)] * 2)
1286
+ + sorted([f"STG_L(R)_6_{i}" for i in range(1, 7)] * 2)
1287
+ + sorted([f"MTG_L(R)_4_{i}" for i in range(1, 5)] * 2)
1288
+ + sorted([f"ITG_L(R)_7_{i}" for i in range(1, 8)] * 2)
1289
+ + sorted([f"FuG_L(R)_3_{i}" for i in range(1, 4)] * 2)
1290
+ + sorted([f"PhG_L(R)_6_{i}" for i in range(1, 7)] * 2)
1291
+ + sorted([f"pSTS_L(R)_2_{i}" for i in range(1, 3)] * 2)
1292
+ + sorted([f"SPL_L(R)_5_{i}" for i in range(1, 6)] * 2)
1293
+ + sorted([f"IPL_L(R)_6_{i}" for i in range(1, 7)] * 2)
1294
+ + sorted([f"PCun_L(R)_4_{i}" for i in range(1, 5)] * 2)
1295
+ + sorted([f"PoG_L(R)_4_{i}" for i in range(1, 5)] * 2)
1296
+ + sorted([f"INS_L(R)_6_{i}" for i in range(1, 7)] * 2)
1297
+ + sorted([f"CG_L(R)_7_{i}" for i in range(1, 8)] * 2)
1298
+ + sorted([f"MVOcC _L(R)_5_{i}" for i in range(1, 6)] * 2)
1299
+ + sorted([f"LOcC_L(R)_4_{i}" for i in range(1, 5)] * 2)
1300
+ + sorted([f"LOcC_L(R)_2_{i}" for i in range(1, 3)] * 2)
1301
+ + sorted([f"Amyg_L(R)_2_{i}" for i in range(1, 3)] * 2)
1302
+ + sorted([f"Hipp_L(R)_2_{i}" for i in range(1, 3)] * 2)
1303
+ + sorted([f"BG_L(R)_6_{i}" for i in range(1, 7)] * 2)
1304
+ + sorted([f"Tha_L(R)_8_{i}" for i in range(1, 9)] * 2)
1305
+ )
1306
+
1307
+ return parcellation_img_path, labels
1308
+
1309
+
1310
+ def merge_parcellations(
1311
+ parcellations_list: list["Nifti1Image"],
1312
+ parcellations_names: list[str],
1313
+ labels_lists: list[list[str]],
1314
+ ) -> tuple["Nifti1Image", list[str]]:
1315
+ """Merge all parcellations from a list into one parcellation.
1316
+
1317
+ Parameters
1318
+ ----------
1319
+ parcellations_list : list of niimg-like object
1320
+ List of parcellations to merge.
1321
+ parcellations_names: list of str
1322
+ List of names for parcellations at the corresponding indices.
1323
+ labels_lists : list of list of str
1324
+ A list of lists. Each list in the list contains the labels for the
1325
+ parcellation at the corresponding index.
1326
+
1327
+ Returns
1328
+ -------
1329
+ parcellation : niimg-like object
1330
+ The parcellation that results from merging the list of input
1331
+ parcellations.
1332
+ labels : list of str
1333
+ List of labels for the resultant parcellation.
1334
+
1335
+ """
1336
+ # Check for duplicated labels
1337
+ labels_lists_flat = [item for sublist in labels_lists for item in sublist]
1338
+ if len(labels_lists_flat) != len(set(labels_lists_flat)):
1339
+ warn_with_log(
1340
+ "The parcellations have duplicated labels. "
1341
+ "Each label will be prefixed with the parcellation name."
1342
+ )
1343
+ for i_parcellation, t_labels in enumerate(labels_lists):
1344
+ labels_lists[i_parcellation] = [
1345
+ f"{parcellations_names[i_parcellation]}_{t_label}"
1346
+ for t_label in t_labels
1347
+ ]
1348
+ overlapping_voxels = False
1349
+ ref_parc = parcellations_list[0]
1350
+ parc_data = ref_parc.get_fdata()
1351
+
1352
+ labels = labels_lists[0]
1353
+
1354
+ for t_parc, t_labels in zip(parcellations_list[1:], labels_lists[1:]):
1355
+ if t_parc.shape != ref_parc.shape:
1356
+ warn_with_log(
1357
+ "The parcellations have different resolutions!"
1358
+ "Resampling all parcellations to the first one in the list."
1359
+ )
1360
+ t_parc = nimg.resample_to_img(
1361
+ t_parc, ref_parc, interpolation="nearest", copy=True
1362
+ )
1363
+
1364
+ # Get the data from this parcellation
1365
+ t_parc_data = t_parc.get_fdata().copy() # must be copied
1366
+ # Increase the values of each ROI to match the labels
1367
+ t_parc_data[t_parc_data != 0] += len(labels)
1368
+
1369
+ # Only set new values for the voxels that are 0
1370
+ # This makes sure that the voxels that are in multiple
1371
+ # parcellations are assigned to the parcellation that was
1372
+ # first in the list.
1373
+ if np.any(parc_data[t_parc_data != 0] != 0):
1374
+ overlapping_voxels = True
1375
+
1376
+ parc_data[parc_data == 0] += t_parc_data[parc_data == 0]
1377
+ labels.extend(t_labels)
1378
+
1379
+ if overlapping_voxels:
1380
+ warn_with_log(
1381
+ "The parcellations have overlapping voxels. "
1382
+ "The overlapping voxels will be assigned to the "
1383
+ "parcellation that was first in the list."
1384
+ )
1385
+
1386
+ parcellation_img_res = nimg.new_img_like(parcellations_list[0], parc_data)
1387
+
1388
+ return parcellation_img_res, labels