junifer 0.0.5.dev242__py3-none-any.whl → 0.0.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (279) hide show
  1. junifer/__init__.py +2 -31
  2. junifer/__init__.pyi +37 -0
  3. junifer/_version.py +9 -4
  4. junifer/api/__init__.py +3 -5
  5. junifer/api/__init__.pyi +4 -0
  6. junifer/api/decorators.py +14 -19
  7. junifer/api/functions.py +165 -109
  8. junifer/api/py.typed +0 -0
  9. junifer/api/queue_context/__init__.py +2 -4
  10. junifer/api/queue_context/__init__.pyi +5 -0
  11. junifer/api/queue_context/gnu_parallel_local_adapter.py +22 -6
  12. junifer/api/queue_context/htcondor_adapter.py +23 -6
  13. junifer/api/queue_context/py.typed +0 -0
  14. junifer/api/queue_context/tests/test_gnu_parallel_local_adapter.py +3 -3
  15. junifer/api/queue_context/tests/test_htcondor_adapter.py +3 -3
  16. junifer/api/tests/test_functions.py +168 -74
  17. junifer/cli/__init__.py +24 -0
  18. junifer/cli/__init__.pyi +3 -0
  19. junifer/{api → cli}/cli.py +141 -125
  20. junifer/cli/parser.py +235 -0
  21. junifer/cli/py.typed +0 -0
  22. junifer/{api → cli}/tests/test_cli.py +8 -8
  23. junifer/{api/tests/test_api_utils.py → cli/tests/test_cli_utils.py} +5 -4
  24. junifer/{api → cli}/tests/test_parser.py +2 -2
  25. junifer/{api → cli}/utils.py +6 -16
  26. junifer/configs/juseless/__init__.py +2 -2
  27. junifer/configs/juseless/__init__.pyi +3 -0
  28. junifer/configs/juseless/datagrabbers/__init__.py +2 -12
  29. junifer/configs/juseless/datagrabbers/__init__.pyi +13 -0
  30. junifer/configs/juseless/datagrabbers/ixi_vbm.py +2 -2
  31. junifer/configs/juseless/datagrabbers/py.typed +0 -0
  32. junifer/configs/juseless/datagrabbers/tests/test_ucla.py +2 -2
  33. junifer/configs/juseless/datagrabbers/ucla.py +4 -4
  34. junifer/configs/juseless/py.typed +0 -0
  35. junifer/conftest.py +25 -0
  36. junifer/data/__init__.py +2 -42
  37. junifer/data/__init__.pyi +29 -0
  38. junifer/data/_dispatch.py +248 -0
  39. junifer/data/coordinates/__init__.py +9 -0
  40. junifer/data/coordinates/__init__.pyi +5 -0
  41. junifer/data/coordinates/_ants_coordinates_warper.py +104 -0
  42. junifer/data/coordinates/_coordinates.py +385 -0
  43. junifer/data/coordinates/_fsl_coordinates_warper.py +81 -0
  44. junifer/data/{tests → coordinates/tests}/test_coordinates.py +26 -33
  45. junifer/data/masks/__init__.py +9 -0
  46. junifer/data/masks/__init__.pyi +6 -0
  47. junifer/data/masks/_ants_mask_warper.py +177 -0
  48. junifer/data/masks/_fsl_mask_warper.py +106 -0
  49. junifer/data/masks/_masks.py +802 -0
  50. junifer/data/{tests → masks/tests}/test_masks.py +67 -63
  51. junifer/data/parcellations/__init__.py +9 -0
  52. junifer/data/parcellations/__init__.pyi +6 -0
  53. junifer/data/parcellations/_ants_parcellation_warper.py +166 -0
  54. junifer/data/parcellations/_fsl_parcellation_warper.py +89 -0
  55. junifer/data/parcellations/_parcellations.py +1388 -0
  56. junifer/data/{tests → parcellations/tests}/test_parcellations.py +165 -295
  57. junifer/data/pipeline_data_registry_base.py +76 -0
  58. junifer/data/py.typed +0 -0
  59. junifer/data/template_spaces.py +44 -79
  60. junifer/data/tests/test_data_utils.py +1 -2
  61. junifer/data/tests/test_template_spaces.py +8 -4
  62. junifer/data/utils.py +109 -4
  63. junifer/datagrabber/__init__.py +2 -26
  64. junifer/datagrabber/__init__.pyi +27 -0
  65. junifer/datagrabber/aomic/__init__.py +2 -4
  66. junifer/datagrabber/aomic/__init__.pyi +5 -0
  67. junifer/datagrabber/aomic/id1000.py +81 -52
  68. junifer/datagrabber/aomic/piop1.py +83 -55
  69. junifer/datagrabber/aomic/piop2.py +85 -56
  70. junifer/datagrabber/aomic/py.typed +0 -0
  71. junifer/datagrabber/aomic/tests/test_id1000.py +19 -12
  72. junifer/datagrabber/aomic/tests/test_piop1.py +52 -18
  73. junifer/datagrabber/aomic/tests/test_piop2.py +50 -17
  74. junifer/datagrabber/base.py +22 -18
  75. junifer/datagrabber/datalad_base.py +71 -34
  76. junifer/datagrabber/dmcc13_benchmark.py +31 -18
  77. junifer/datagrabber/hcp1200/__init__.py +2 -3
  78. junifer/datagrabber/hcp1200/__init__.pyi +4 -0
  79. junifer/datagrabber/hcp1200/datalad_hcp1200.py +3 -3
  80. junifer/datagrabber/hcp1200/hcp1200.py +26 -15
  81. junifer/datagrabber/hcp1200/py.typed +0 -0
  82. junifer/datagrabber/hcp1200/tests/test_hcp1200.py +8 -2
  83. junifer/datagrabber/multiple.py +14 -9
  84. junifer/datagrabber/pattern.py +132 -96
  85. junifer/datagrabber/pattern_validation_mixin.py +206 -94
  86. junifer/datagrabber/py.typed +0 -0
  87. junifer/datagrabber/tests/test_datalad_base.py +27 -12
  88. junifer/datagrabber/tests/test_dmcc13_benchmark.py +28 -11
  89. junifer/datagrabber/tests/test_multiple.py +48 -2
  90. junifer/datagrabber/tests/test_pattern_datalad.py +1 -1
  91. junifer/datagrabber/tests/test_pattern_validation_mixin.py +6 -6
  92. junifer/datareader/__init__.py +2 -2
  93. junifer/datareader/__init__.pyi +3 -0
  94. junifer/datareader/default.py +6 -6
  95. junifer/datareader/py.typed +0 -0
  96. junifer/external/nilearn/__init__.py +2 -3
  97. junifer/external/nilearn/__init__.pyi +4 -0
  98. junifer/external/nilearn/junifer_connectivity_measure.py +25 -17
  99. junifer/external/nilearn/junifer_nifti_spheres_masker.py +4 -4
  100. junifer/external/nilearn/py.typed +0 -0
  101. junifer/external/nilearn/tests/test_junifer_connectivity_measure.py +17 -16
  102. junifer/external/nilearn/tests/test_junifer_nifti_spheres_masker.py +2 -3
  103. junifer/markers/__init__.py +2 -38
  104. junifer/markers/__init__.pyi +37 -0
  105. junifer/markers/base.py +11 -14
  106. junifer/markers/brainprint.py +12 -14
  107. junifer/markers/complexity/__init__.py +2 -18
  108. junifer/markers/complexity/__init__.pyi +17 -0
  109. junifer/markers/complexity/complexity_base.py +9 -11
  110. junifer/markers/complexity/hurst_exponent.py +7 -7
  111. junifer/markers/complexity/multiscale_entropy_auc.py +7 -7
  112. junifer/markers/complexity/perm_entropy.py +7 -7
  113. junifer/markers/complexity/py.typed +0 -0
  114. junifer/markers/complexity/range_entropy.py +7 -7
  115. junifer/markers/complexity/range_entropy_auc.py +7 -7
  116. junifer/markers/complexity/sample_entropy.py +7 -7
  117. junifer/markers/complexity/tests/test_complexity_base.py +1 -1
  118. junifer/markers/complexity/tests/test_hurst_exponent.py +5 -5
  119. junifer/markers/complexity/tests/test_multiscale_entropy_auc.py +5 -5
  120. junifer/markers/complexity/tests/test_perm_entropy.py +5 -5
  121. junifer/markers/complexity/tests/test_range_entropy.py +5 -5
  122. junifer/markers/complexity/tests/test_range_entropy_auc.py +5 -5
  123. junifer/markers/complexity/tests/test_sample_entropy.py +5 -5
  124. junifer/markers/complexity/tests/test_weighted_perm_entropy.py +5 -5
  125. junifer/markers/complexity/weighted_perm_entropy.py +7 -7
  126. junifer/markers/ets_rss.py +12 -11
  127. junifer/markers/falff/__init__.py +2 -3
  128. junifer/markers/falff/__init__.pyi +4 -0
  129. junifer/markers/falff/_afni_falff.py +38 -45
  130. junifer/markers/falff/_junifer_falff.py +16 -19
  131. junifer/markers/falff/falff_base.py +7 -11
  132. junifer/markers/falff/falff_parcels.py +9 -9
  133. junifer/markers/falff/falff_spheres.py +8 -8
  134. junifer/markers/falff/py.typed +0 -0
  135. junifer/markers/falff/tests/test_falff_spheres.py +3 -1
  136. junifer/markers/functional_connectivity/__init__.py +2 -12
  137. junifer/markers/functional_connectivity/__init__.pyi +13 -0
  138. junifer/markers/functional_connectivity/crossparcellation_functional_connectivity.py +9 -8
  139. junifer/markers/functional_connectivity/edge_functional_connectivity_parcels.py +8 -8
  140. junifer/markers/functional_connectivity/edge_functional_connectivity_spheres.py +7 -7
  141. junifer/markers/functional_connectivity/functional_connectivity_base.py +13 -12
  142. junifer/markers/functional_connectivity/functional_connectivity_parcels.py +8 -8
  143. junifer/markers/functional_connectivity/functional_connectivity_spheres.py +7 -7
  144. junifer/markers/functional_connectivity/py.typed +0 -0
  145. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_parcels.py +1 -2
  146. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_spheres.py +1 -2
  147. junifer/markers/functional_connectivity/tests/test_functional_connectivity_parcels.py +6 -6
  148. junifer/markers/functional_connectivity/tests/test_functional_connectivity_spheres.py +5 -5
  149. junifer/markers/parcel_aggregation.py +22 -17
  150. junifer/markers/py.typed +0 -0
  151. junifer/markers/reho/__init__.py +2 -3
  152. junifer/markers/reho/__init__.pyi +4 -0
  153. junifer/markers/reho/_afni_reho.py +29 -35
  154. junifer/markers/reho/_junifer_reho.py +13 -14
  155. junifer/markers/reho/py.typed +0 -0
  156. junifer/markers/reho/reho_base.py +7 -11
  157. junifer/markers/reho/reho_parcels.py +10 -10
  158. junifer/markers/reho/reho_spheres.py +9 -9
  159. junifer/markers/sphere_aggregation.py +22 -17
  160. junifer/markers/temporal_snr/__init__.py +2 -3
  161. junifer/markers/temporal_snr/__init__.pyi +4 -0
  162. junifer/markers/temporal_snr/py.typed +0 -0
  163. junifer/markers/temporal_snr/temporal_snr_base.py +11 -10
  164. junifer/markers/temporal_snr/temporal_snr_parcels.py +8 -8
  165. junifer/markers/temporal_snr/temporal_snr_spheres.py +7 -7
  166. junifer/markers/tests/test_ets_rss.py +3 -3
  167. junifer/markers/tests/test_parcel_aggregation.py +24 -24
  168. junifer/markers/tests/test_sphere_aggregation.py +6 -6
  169. junifer/markers/utils.py +3 -3
  170. junifer/onthefly/__init__.py +2 -1
  171. junifer/onthefly/_brainprint.py +138 -0
  172. junifer/onthefly/read_transform.py +5 -8
  173. junifer/pipeline/__init__.py +2 -10
  174. junifer/pipeline/__init__.pyi +13 -0
  175. junifer/{markers/collection.py → pipeline/marker_collection.py} +8 -14
  176. junifer/pipeline/pipeline_component_registry.py +294 -0
  177. junifer/pipeline/pipeline_step_mixin.py +15 -11
  178. junifer/pipeline/py.typed +0 -0
  179. junifer/{markers/tests/test_collection.py → pipeline/tests/test_marker_collection.py} +2 -3
  180. junifer/pipeline/tests/test_pipeline_component_registry.py +200 -0
  181. junifer/pipeline/tests/test_pipeline_step_mixin.py +36 -37
  182. junifer/pipeline/tests/test_update_meta_mixin.py +4 -4
  183. junifer/pipeline/tests/test_workdir_manager.py +43 -0
  184. junifer/pipeline/update_meta_mixin.py +21 -17
  185. junifer/pipeline/utils.py +6 -6
  186. junifer/pipeline/workdir_manager.py +19 -5
  187. junifer/preprocess/__init__.py +2 -10
  188. junifer/preprocess/__init__.pyi +11 -0
  189. junifer/preprocess/base.py +10 -10
  190. junifer/preprocess/confounds/__init__.py +2 -2
  191. junifer/preprocess/confounds/__init__.pyi +3 -0
  192. junifer/preprocess/confounds/fmriprep_confound_remover.py +243 -64
  193. junifer/preprocess/confounds/py.typed +0 -0
  194. junifer/preprocess/confounds/tests/test_fmriprep_confound_remover.py +121 -14
  195. junifer/preprocess/py.typed +0 -0
  196. junifer/preprocess/smoothing/__init__.py +2 -2
  197. junifer/preprocess/smoothing/__init__.pyi +3 -0
  198. junifer/preprocess/smoothing/_afni_smoothing.py +40 -40
  199. junifer/preprocess/smoothing/_fsl_smoothing.py +22 -32
  200. junifer/preprocess/smoothing/_nilearn_smoothing.py +35 -14
  201. junifer/preprocess/smoothing/py.typed +0 -0
  202. junifer/preprocess/smoothing/smoothing.py +11 -13
  203. junifer/preprocess/warping/__init__.py +2 -2
  204. junifer/preprocess/warping/__init__.pyi +3 -0
  205. junifer/preprocess/warping/_ants_warper.py +136 -32
  206. junifer/preprocess/warping/_fsl_warper.py +73 -22
  207. junifer/preprocess/warping/py.typed +0 -0
  208. junifer/preprocess/warping/space_warper.py +39 -11
  209. junifer/preprocess/warping/tests/test_space_warper.py +5 -9
  210. junifer/py.typed +0 -0
  211. junifer/stats.py +5 -5
  212. junifer/storage/__init__.py +2 -10
  213. junifer/storage/__init__.pyi +11 -0
  214. junifer/storage/base.py +47 -13
  215. junifer/storage/hdf5.py +95 -33
  216. junifer/storage/pandas_base.py +12 -11
  217. junifer/storage/py.typed +0 -0
  218. junifer/storage/sqlite.py +11 -11
  219. junifer/storage/tests/test_hdf5.py +86 -4
  220. junifer/storage/tests/test_sqlite.py +2 -2
  221. junifer/storage/tests/test_storage_base.py +5 -2
  222. junifer/storage/tests/test_utils.py +33 -7
  223. junifer/storage/utils.py +95 -9
  224. junifer/testing/__init__.py +2 -3
  225. junifer/testing/__init__.pyi +4 -0
  226. junifer/testing/datagrabbers.py +10 -11
  227. junifer/testing/py.typed +0 -0
  228. junifer/testing/registry.py +4 -7
  229. junifer/testing/tests/test_testing_registry.py +9 -17
  230. junifer/tests/test_stats.py +2 -2
  231. junifer/typing/__init__.py +9 -0
  232. junifer/typing/__init__.pyi +31 -0
  233. junifer/typing/_typing.py +68 -0
  234. junifer/utils/__init__.py +2 -12
  235. junifer/utils/__init__.pyi +18 -0
  236. junifer/utils/_config.py +110 -0
  237. junifer/utils/_yaml.py +16 -0
  238. junifer/utils/helpers.py +6 -6
  239. junifer/utils/logging.py +117 -8
  240. junifer/utils/py.typed +0 -0
  241. junifer/{pipeline → utils}/singleton.py +19 -14
  242. junifer/utils/tests/test_config.py +59 -0
  243. {junifer-0.0.5.dev242.dist-info → junifer-0.0.6.dist-info}/METADATA +43 -38
  244. junifer-0.0.6.dist-info/RECORD +350 -0
  245. {junifer-0.0.5.dev242.dist-info → junifer-0.0.6.dist-info}/WHEEL +1 -1
  246. junifer-0.0.6.dist-info/entry_points.txt +2 -0
  247. junifer/api/parser.py +0 -118
  248. junifer/data/coordinates.py +0 -408
  249. junifer/data/masks.py +0 -670
  250. junifer/data/parcellations.py +0 -1828
  251. junifer/pipeline/registry.py +0 -177
  252. junifer/pipeline/tests/test_registry.py +0 -150
  253. junifer-0.0.5.dev242.dist-info/RECORD +0 -275
  254. junifer-0.0.5.dev242.dist-info/entry_points.txt +0 -2
  255. /junifer/{api → cli}/tests/data/gmd_mean.yaml +0 -0
  256. /junifer/{api → cli}/tests/data/gmd_mean_htcondor.yaml +0 -0
  257. /junifer/{api → cli}/tests/data/partly_cloudy_agg_mean_tian.yml +0 -0
  258. /junifer/data/{VOIs → coordinates/VOIs}/meta/AutobiographicalMemory_VOIs.txt +0 -0
  259. /junifer/data/{VOIs → coordinates/VOIs}/meta/CogAC_VOIs.txt +0 -0
  260. /junifer/data/{VOIs → coordinates/VOIs}/meta/CogAR_VOIs.txt +0 -0
  261. /junifer/data/{VOIs → coordinates/VOIs}/meta/DMNBuckner_VOIs.txt +0 -0
  262. /junifer/data/{VOIs → coordinates/VOIs}/meta/Dosenbach2010_MNI_VOIs.txt +0 -0
  263. /junifer/data/{VOIs → coordinates/VOIs}/meta/Empathy_VOIs.txt +0 -0
  264. /junifer/data/{VOIs → coordinates/VOIs}/meta/Motor_VOIs.txt +0 -0
  265. /junifer/data/{VOIs → coordinates/VOIs}/meta/MultiTask_VOIs.txt +0 -0
  266. /junifer/data/{VOIs → coordinates/VOIs}/meta/PhysioStress_VOIs.txt +0 -0
  267. /junifer/data/{VOIs → coordinates/VOIs}/meta/Power2011_MNI_VOIs.txt +0 -0
  268. /junifer/data/{VOIs → coordinates/VOIs}/meta/Power2013_MNI_VOIs.tsv +0 -0
  269. /junifer/data/{VOIs → coordinates/VOIs}/meta/Rew_VOIs.txt +0 -0
  270. /junifer/data/{VOIs → coordinates/VOIs}/meta/Somatosensory_VOIs.txt +0 -0
  271. /junifer/data/{VOIs → coordinates/VOIs}/meta/ToM_VOIs.txt +0 -0
  272. /junifer/data/{VOIs → coordinates/VOIs}/meta/VigAtt_VOIs.txt +0 -0
  273. /junifer/data/{VOIs → coordinates/VOIs}/meta/WM_VOIs.txt +0 -0
  274. /junifer/data/{VOIs → coordinates/VOIs}/meta/eMDN_VOIs.txt +0 -0
  275. /junifer/data/{VOIs → coordinates/VOIs}/meta/eSAD_VOIs.txt +0 -0
  276. /junifer/data/{VOIs → coordinates/VOIs}/meta/extDMN_VOIs.txt +0 -0
  277. {junifer-0.0.5.dev242.dist-info → junifer-0.0.6.dist-info/licenses}/AUTHORS.rst +0 -0
  278. {junifer-0.0.5.dev242.dist-info → junifer-0.0.6.dist-info/licenses}/LICENSE.md +0 -0
  279. {junifer-0.0.5.dev242.dist-info → junifer-0.0.6.dist-info}/top_level.txt +0 -0
@@ -1,1828 +0,0 @@
1
- """Functions for parcellation manipulation."""
2
-
3
- # Authors: Federico Raimondo <f.raimondo@fz-juelich.de>
4
- # Vera Komeyer <v.komeyer@fz-juelich.de>
5
- # Synchon Mandal <s.mandal@fz-juelich.de>
6
- # License: AGPL
7
-
8
- import io
9
- import shutil
10
- import tarfile
11
- import tempfile
12
- import typing
13
- import zipfile
14
- from pathlib import Path
15
- from typing import TYPE_CHECKING, Any, Dict, List, Optional, Tuple, Union
16
-
17
- import httpx
18
- import nibabel as nib
19
- import numpy as np
20
- import pandas as pd
21
- from nilearn import datasets, image
22
-
23
- from ..pipeline import WorkDirManager
24
- from ..utils import logger, raise_error, run_ext_cmd, warn_with_log
25
- from .template_spaces import get_template, get_xfm
26
- from .utils import closest_resolution
27
-
28
-
29
- if TYPE_CHECKING:
30
- from nibabel import Nifti1Image
31
-
32
-
33
- __all__ = [
34
- "register_parcellation",
35
- "list_parcellations",
36
- "get_parcellation",
37
- "load_parcellation",
38
- "merge_parcellations",
39
- ]
40
-
41
-
42
- # A dictionary containing all supported parcellations and their respective
43
- # valid parameters.
44
-
45
- # Each entry is a dictionary that must contain at least the following keys:
46
- # * 'family': the parcellation's family name (e.g. 'Schaefer', 'SUIT')
47
- # * 'space': the parcellation's space (e.g., 'MNI', 'SUIT')
48
-
49
- # Optional keys:
50
- # * 'valid_resolutions': a list of valid resolutions for the parcellation
51
- # (e.g. [1, 2])
52
-
53
- # TODO: have separate dictionary for built-in
54
- _available_parcellations: Dict[str, Dict[Any, Any]] = {
55
- "SUITxSUIT": {"family": "SUIT", "space": "SUIT"},
56
- "SUITxMNI": {"family": "SUIT", "space": "MNI152NLin6Asym"},
57
- }
58
-
59
- # Add Schaefer parcellation info
60
- for n_rois in range(100, 1001, 100):
61
- for t_net in [7, 17]:
62
- t_name = f"Schaefer{n_rois}x{t_net}"
63
- _available_parcellations[t_name] = {
64
- "family": "Schaefer",
65
- "n_rois": n_rois,
66
- "yeo_networks": t_net,
67
- "space": "MNI152NLin6Asym",
68
- }
69
- # Add Tian parcellation info
70
- for scale in range(1, 5):
71
- t_name = f"TianxS{scale}x7TxMNI6thgeneration"
72
- _available_parcellations[t_name] = {
73
- "family": "Tian",
74
- "scale": scale,
75
- "magneticfield": "7T",
76
- "space": "MNI152NLin6Asym",
77
- }
78
- t_name = f"TianxS{scale}x3TxMNI6thgeneration"
79
- _available_parcellations[t_name] = {
80
- "family": "Tian",
81
- "scale": scale,
82
- "magneticfield": "3T",
83
- "space": "MNI152NLin6Asym",
84
- }
85
- t_name = f"TianxS{scale}x3TxMNInonlinear2009cAsym"
86
- _available_parcellations[t_name] = {
87
- "family": "Tian",
88
- "scale": scale,
89
- "magneticfield": "3T",
90
- "space": "MNI152NLin2009cAsym",
91
- }
92
- # Add AICHA parcellation info
93
- for version in (1, 2):
94
- _available_parcellations[f"AICHA_v{version}"] = {
95
- "family": "AICHA",
96
- "version": version,
97
- "space": "IXI549Space",
98
- }
99
- # Add Shen parcellation info
100
- for year in (2013, 2015, 2019):
101
- if year == 2013:
102
- for n_rois in (50, 100, 150):
103
- _available_parcellations[f"Shen_{year}_{n_rois}"] = {
104
- "family": "Shen",
105
- "year": 2013,
106
- "n_rois": n_rois,
107
- "space": "MNI152NLin2009cAsym",
108
- }
109
- elif year == 2015:
110
- _available_parcellations["Shen_2015_268"] = {
111
- "family": "Shen",
112
- "year": 2015,
113
- "n_rois": 268,
114
- "space": "MNI152NLin2009cAsym",
115
- }
116
- elif year == 2019:
117
- _available_parcellations["Shen_2019_368"] = {
118
- "family": "Shen",
119
- "year": 2019,
120
- "n_rois": 368,
121
- "space": "MNI152NLin2009cAsym",
122
- }
123
- # Add Yan parcellation info
124
- for n_rois in range(100, 1001, 100):
125
- # Add Yeo networks
126
- for yeo_network in [7, 17]:
127
- _available_parcellations[f"Yan{n_rois}xYeo{yeo_network}"] = {
128
- "family": "Yan",
129
- "n_rois": n_rois,
130
- "yeo_networks": yeo_network,
131
- "space": "MNI152NLin6Asym",
132
- }
133
- # Add Kong networks
134
- _available_parcellations[f"Yan{n_rois}xKong17"] = {
135
- "family": "Yan",
136
- "n_rois": n_rois,
137
- "kong_networks": 17,
138
- "space": "MNI152NLin6Asym",
139
- }
140
- # Add Brainnetome parcellation info
141
- for threshold in [0, 25, 50]:
142
- _available_parcellations[f"Brainnetome_thr{threshold}"] = {
143
- "family": "Brainnetome",
144
- "threshold": threshold,
145
- "space": "MNI152NLin6Asym",
146
- }
147
-
148
-
149
- def register_parcellation(
150
- name: str,
151
- parcellation_path: Union[str, Path],
152
- parcels_labels: List[str],
153
- space: str,
154
- overwrite: bool = False,
155
- ) -> None:
156
- """Register a custom user parcellation.
157
-
158
- Parameters
159
- ----------
160
- name : str
161
- The name of the parcellation.
162
- parcellation_path : str or pathlib.Path
163
- The path to the parcellation file.
164
- parcels_labels : list of str
165
- The list of labels for the parcellation.
166
- space : str
167
- The template space of the parcellation, for e.g., "MNI152NLin6Asym".
168
- overwrite : bool, optional
169
- If True, overwrite an existing parcellation with the same name.
170
- Does not apply to built-in parcellations (default False).
171
-
172
- Raises
173
- ------
174
- ValueError
175
- If the parcellation name is already registered and overwrite is set to
176
- False or if the parcellation name is a built-in parcellation.
177
-
178
- """
179
- # Check for attempt of overwriting built-in parcellations
180
- if name in _available_parcellations:
181
- if overwrite is True:
182
- logger.info(f"Overwriting {name} parcellation")
183
- if (
184
- _available_parcellations[name]["family"]
185
- != "CustomUserParcellation"
186
- ):
187
- raise_error(
188
- f"Cannot overwrite {name} parcellation. "
189
- "It is a built-in parcellation."
190
- )
191
- else:
192
- raise_error(
193
- f"Parcellation {name} already registered. Set "
194
- "`overwrite=True` to update its value."
195
- )
196
- # Convert str to Path
197
- if not isinstance(parcellation_path, Path):
198
- parcellation_path = Path(parcellation_path)
199
- # Add user parcellation info
200
- _available_parcellations[name] = {
201
- "path": str(parcellation_path.absolute()),
202
- "labels": parcels_labels,
203
- "family": "CustomUserParcellation",
204
- "space": space,
205
- }
206
-
207
-
208
- def list_parcellations() -> List[str]:
209
- """List all the available parcellations.
210
-
211
- Returns
212
- -------
213
- list of str
214
- A list with all available parcellations.
215
-
216
- """
217
- return sorted(_available_parcellations.keys())
218
-
219
-
220
- def get_parcellation(
221
- parcellation: List[str],
222
- target_data: Dict[str, Any],
223
- extra_input: Optional[Dict[str, Any]] = None,
224
- ) -> Tuple["Nifti1Image", List[str]]:
225
- """Get parcellation, tailored for the target image.
226
-
227
- Parameters
228
- ----------
229
- parcellation : list of str
230
- The name(s) of the parcellation(s).
231
- target_data : dict
232
- The corresponding item of the data object to which the parcellation
233
- will be applied.
234
- extra_input : dict, optional
235
- The other fields in the data object. Useful for accessing other data
236
- kinds that needs to be used in the computation of parcellations
237
- (default None).
238
-
239
- Returns
240
- -------
241
- Nifti1Image
242
- The parcellation image.
243
- list of str
244
- Parcellation labels.
245
-
246
- Raises
247
- ------
248
- RuntimeError
249
- If warp / transformation file extension is not ".mat" or ".h5".
250
- ValueError
251
- If ``extra_input`` is None when ``target_data``'s space is native.
252
-
253
- """
254
- # Check pre-requirements for space manipulation
255
- target_space = target_data["space"]
256
- # Set target standard space to target space
257
- target_std_space = target_space
258
- # Extra data type requirement check if target space is native
259
- if target_space == "native":
260
- # Check for extra inputs
261
- if extra_input is None:
262
- raise_error(
263
- "No extra input provided, requires `Warp` and `T1w` "
264
- "data types in particular for transformation to "
265
- f"{target_data['space']} space for further computation."
266
- )
267
- # Set target standard space to warp file space source
268
- target_std_space = extra_input["Warp"]["src"]
269
-
270
- # Get the min of the voxels sizes and use it as the resolution
271
- target_img = target_data["data"]
272
- resolution = np.min(target_img.header.get_zooms()[:3])
273
-
274
- # Create component-scoped tempdir
275
- tempdir = WorkDirManager().get_tempdir(prefix="parcellations")
276
- # Create element-scoped tempdir so that warped parcellation is
277
- # available later as nibabel stores file path reference for
278
- # loading on computation
279
- element_tempdir = WorkDirManager().get_element_tempdir(
280
- prefix="parcellations"
281
- )
282
-
283
- # Load the parcellations
284
- all_parcellations = []
285
- all_labels = []
286
- for name in parcellation:
287
- img, labels, _, space = load_parcellation(
288
- name=name,
289
- resolution=resolution,
290
- )
291
-
292
- # Convert parcellation spaces if required
293
- if space != target_std_space:
294
- # Get xfm file
295
- xfm_file_path = get_xfm(src=space, dst=target_std_space)
296
- # Get target standard space template
297
- target_std_space_template_img = get_template(
298
- space=target_std_space,
299
- target_data=target_data,
300
- extra_input=extra_input,
301
- )
302
-
303
- # Save parcellation image to a component-scoped tempfile
304
- parcellation_path = tempdir / f"{name}.nii.gz"
305
- nib.save(img, parcellation_path)
306
-
307
- # Save template
308
- target_std_space_template_path = (
309
- tempdir / f"{target_std_space}_T1w_{resolution}.nii.gz"
310
- )
311
- nib.save(
312
- target_std_space_template_img, target_std_space_template_path
313
- )
314
-
315
- # Set warped parcellation path
316
- warped_parcellation_path = element_tempdir / (
317
- f"{name}_warped_from_{space}_to_" f"{target_std_space}.nii.gz"
318
- )
319
-
320
- logger.debug(
321
- f"Using ANTs to warp {name} "
322
- f"from {space} to {target_std_space}"
323
- )
324
- # Set antsApplyTransforms command
325
- apply_transforms_cmd = [
326
- "antsApplyTransforms",
327
- "-d 3",
328
- "-e 3",
329
- "-n 'GenericLabel[NearestNeighbor]'",
330
- f"-i {parcellation_path.resolve()}",
331
- f"-r {target_std_space_template_path.resolve()}",
332
- f"-t {xfm_file_path.resolve()}",
333
- f"-o {warped_parcellation_path.resolve()}",
334
- ]
335
- # Call antsApplyTransforms
336
- run_ext_cmd(name="antsApplyTransforms", cmd=apply_transforms_cmd)
337
-
338
- raw_img = nib.load(warped_parcellation_path)
339
- # Remove extra dimension added by ANTs
340
- img = image.math_img("np.squeeze(img)", img=raw_img)
341
-
342
- # Resample parcellation to target image
343
- img_to_merge = image.resample_to_img(
344
- source_img=img,
345
- target_img=target_img,
346
- interpolation="nearest",
347
- copy=True,
348
- )
349
-
350
- all_parcellations.append(img_to_merge)
351
- all_labels.append(labels)
352
-
353
- # Avoid merging if there is only one parcellation
354
- if len(all_parcellations) == 1:
355
- resampled_parcellation_img = all_parcellations[0]
356
- labels = all_labels[0]
357
- # Parcellations are already transformed to target standard space
358
- else:
359
- resampled_parcellation_img, labels = merge_parcellations(
360
- parcellations_list=all_parcellations,
361
- parcellations_names=parcellation,
362
- labels_lists=all_labels,
363
- )
364
-
365
- # Warp parcellation if target space is native
366
- if target_space == "native":
367
- # Save parcellation image to a component-scoped tempfile
368
- prewarp_parcellation_path = tempdir / "prewarp_parcellation.nii.gz"
369
- nib.save(resampled_parcellation_img, prewarp_parcellation_path)
370
-
371
- # Create an element-scoped tempfile for warped output
372
- warped_parcellation_path = (
373
- element_tempdir / "parcellation_warped.nii.gz"
374
- )
375
-
376
- # Check for warp file type to use correct tool
377
- warp_file_ext = extra_input["Warp"]["path"].suffix
378
- if warp_file_ext == ".mat":
379
- logger.debug("Using FSL for parcellation warping")
380
- # Set applywarp command
381
- applywarp_cmd = [
382
- "applywarp",
383
- "--interp=nn",
384
- f"-i {prewarp_parcellation_path.resolve()}",
385
- # use resampled reference
386
- f"-r {target_data['reference_path'].resolve()}",
387
- f"-w {extra_input['Warp']['path'].resolve()}",
388
- f"-o {warped_parcellation_path.resolve()}",
389
- ]
390
- # Call applywarp
391
- run_ext_cmd(name="applywarp", cmd=applywarp_cmd)
392
-
393
- elif warp_file_ext == ".h5":
394
- logger.debug("Using ANTs for parcellation warping")
395
- # Set antsApplyTransforms command
396
- apply_transforms_cmd = [
397
- "antsApplyTransforms",
398
- "-d 3",
399
- "-e 3",
400
- "-n 'GenericLabel[NearestNeighbor]'",
401
- f"-i {prewarp_parcellation_path.resolve()}",
402
- # use resampled reference
403
- f"-r {target_data['reference_path'].resolve()}",
404
- f"-t {extra_input['Warp']['path'].resolve()}",
405
- f"-o {warped_parcellation_path.resolve()}",
406
- ]
407
- # Call antsApplyTransforms
408
- run_ext_cmd(name="antsApplyTransforms", cmd=apply_transforms_cmd)
409
-
410
- else:
411
- raise_error(
412
- msg=(
413
- "Unknown warp / transformation file extension: "
414
- f"{warp_file_ext}"
415
- ),
416
- klass=RuntimeError,
417
- )
418
-
419
- # Load nifti
420
- resampled_parcellation_img = nib.load(warped_parcellation_path)
421
-
422
- # Delete tempdir
423
- WorkDirManager().delete_tempdir(tempdir)
424
-
425
- return resampled_parcellation_img, labels # type: ignore
426
-
427
-
428
- def load_parcellation(
429
- name: str,
430
- parcellations_dir: Union[str, Path, None] = None,
431
- resolution: Optional[float] = None,
432
- path_only: bool = False,
433
- ) -> Tuple[Optional["Nifti1Image"], List[str], Path, str]:
434
- """Load a brain parcellation (including a label file).
435
-
436
- If it is a built-in parcellation and the file is not present in the
437
- ``parcellations_dir`` directory, it will be downloaded.
438
-
439
- Parameters
440
- ----------
441
- name : str
442
- The name of the parcellation. Check valid options by calling
443
- :func:`.list_parcellations`.
444
- parcellations_dir : str or pathlib.Path, optional
445
- Path where the parcellations files are stored. The default location is
446
- "$HOME/junifer/data/parcellations" (default None).
447
- resolution : float, optional
448
- The desired resolution of the parcellation to load. If it is not
449
- available, the closest resolution will be loaded. Preferably, use a
450
- resolution higher than the desired one. By default, will load the
451
- highest one (default None).
452
- path_only : bool, optional
453
- If True, the parcellation image will not be loaded (default False).
454
-
455
- Returns
456
- -------
457
- Nifti1Image or None
458
- Loaded parcellation image.
459
- list of str
460
- Parcellation labels.
461
- pathlib.Path
462
- File path to the parcellation image.
463
- str
464
- The space of the parcellation.
465
-
466
- Raises
467
- ------
468
- ValueError
469
- If ``name`` is invalid or if the parcellation values and labels
470
- don't have equal dimension or if the value range is invalid.
471
-
472
- """
473
- # Check for valid parcellation name
474
- if name not in _available_parcellations:
475
- raise_error(
476
- f"Parcellation {name} not found. "
477
- f"Valid options are: {list_parcellations()}"
478
- )
479
-
480
- # Copy parcellation definition to avoid edits in original object
481
- parcellation_definition = _available_parcellations[name].copy()
482
- t_family = parcellation_definition.pop("family")
483
- # Remove space conditionally
484
- if t_family not in ["SUIT", "Tian"]:
485
- space = parcellation_definition.pop("space")
486
- else:
487
- space = parcellation_definition["space"]
488
-
489
- # Check if the parcellation family is custom or built-in
490
- if t_family == "CustomUserParcellation":
491
- parcellation_fname = Path(parcellation_definition["path"])
492
- parcellation_labels = parcellation_definition["labels"]
493
- else:
494
- parcellation_fname, parcellation_labels = _retrieve_parcellation(
495
- family=t_family,
496
- parcellations_dir=parcellations_dir,
497
- resolution=resolution,
498
- **parcellation_definition,
499
- )
500
-
501
- # Load parcellation image and values
502
- logger.info(f"Loading parcellation {parcellation_fname.absolute()!s}")
503
- parcellation_img = None
504
- if path_only is False:
505
- # Load image via nibabel
506
- parcellation_img = nib.load(parcellation_fname)
507
- # Get unique values
508
- parcel_values = np.unique(parcellation_img.get_fdata())
509
- # Check for dimension
510
- if len(parcel_values) - 1 != len(parcellation_labels):
511
- raise_error(
512
- f"Parcellation {name} has {len(parcel_values) - 1} parcels "
513
- f"but {len(parcellation_labels)} labels."
514
- )
515
- # Sort values
516
- parcel_values.sort()
517
- # Check if value range is invalid
518
- if np.any(np.diff(parcel_values) != 1):
519
- raise_error(
520
- f"Parcellation {name} must have all the values in the range "
521
- f"[0, {len(parcel_values)}]."
522
- )
523
-
524
- # Type-cast to remove errors
525
- parcellation_img = typing.cast("Nifti1Image", parcellation_img)
526
- return parcellation_img, parcellation_labels, parcellation_fname, space
527
-
528
-
529
- def _retrieve_parcellation(
530
- family: str,
531
- parcellations_dir: Union[str, Path, None] = None,
532
- resolution: Optional[float] = None,
533
- **kwargs,
534
- ) -> Tuple[Path, List[str]]:
535
- """Retrieve a brain parcellation object from nilearn or online source.
536
-
537
- Only returns one parcellation per call. Call function multiple times for
538
- different parameter specifications. Only retrieves parcellation if it is
539
- not yet in parcellations_dir.
540
-
541
- Parameters
542
- ----------
543
- family : {"Schaefer", "SUIT", "Tian", "AICHA", "Shen", "Yan"}
544
- The name of the parcellation family.
545
- parcellations_dir : str or pathlib.Path, optional
546
- Path where the retrieved parcellations file are stored. The default
547
- location is "$HOME/junifer/data/parcellations" (default None).
548
- resolution : float, optional
549
- The desired resolution of the parcellation to load. If it is not
550
- available, the closest resolution will be loaded. Preferably, use a
551
- resolution higher than the desired one. By default, will load the
552
- highest one (default None).
553
- **kwargs
554
- Use to specify parcellation-specific keyword arguments found in the
555
- following section.
556
-
557
- Other Parameters
558
- ----------------
559
- * Schaefer :
560
- ``n_rois`` : {100, 200, 300, 400, 500, 600, 700, 800, 900, 1000}
561
- Granularity of parcellation to be used.
562
- ``yeo_network`` : {7, 17}, optional
563
- Number of Yeo networks to use (default 7).
564
- * Tian :
565
- ``scale`` : {1, 2, 3, 4}
566
- Scale of parcellation (defines granularity).
567
- ``space`` : {"MNI152NLin6Asym", "MNI152NLin2009cAsym"}, optional
568
- Space of parcellation (default "MNI152NLin6Asym"). (For more
569
- information see https://github.com/yetianmed/subcortex)
570
- ``magneticfield`` : {"3T", "7T"}, optional
571
- Magnetic field (default "3T").
572
- * SUIT :
573
- ``space`` : {"MNI152NLin6Asym", "SUIT"}, optional
574
- Space of parcellation (default "MNI"). (For more information
575
- see http://www.diedrichsenlab.org/imaging/suit.htm).
576
- * AICHA :
577
- ``version`` : {1, 2}, optional
578
- Version of parcellation (default 2).
579
- * Shen :
580
- ``year`` : {2013, 2015, 2019}, optional
581
- Year of the parcellation to use (default 2015).
582
- ``n_rois`` : int, optional
583
- Number of ROIs to use. Can be ``50, 100, or 150`` for
584
- ``year = 2013`` but is fixed at ``268`` for ``year = 2015`` and at
585
- ``368`` for ``year = 2019``.
586
- * Yan :
587
- ``n_rois`` : {100, 200, 300, 400, 500, 600, 700, 800, 900, 1000}
588
- Granularity of the parcellation to be used.
589
- ``yeo_networks`` : {7, 17}, optional
590
- Number of Yeo networks to use (default None).
591
- ``kong_networks`` : {17}, optional
592
- Number of Kong networks to use (default None).
593
- * Brainnetome :
594
- ``threshold`` : {0, 25, 50}
595
- Threshold for the probabilistic maps of subregion.
596
-
597
- Returns
598
- -------
599
- pathlib.Path
600
- File path to the parcellation image.
601
- list of str
602
- Parcellation labels.
603
-
604
- Raises
605
- ------
606
- ValueError
607
- If the parcellation's name is invalid.
608
-
609
- """
610
- if parcellations_dir is None:
611
- parcellations_dir = (
612
- Path().home() / "junifer" / "data" / "parcellations"
613
- )
614
- # Create default junifer data directory if not present
615
- parcellations_dir.mkdir(exist_ok=True, parents=True)
616
- # Convert str to Path
617
- elif not isinstance(parcellations_dir, Path):
618
- parcellations_dir = Path(parcellations_dir)
619
-
620
- logger.info(f"Fetching one of {family} parcellations.")
621
-
622
- # Retrieval details per family
623
- if family == "Schaefer":
624
- parcellation_fname, parcellation_labels = _retrieve_schaefer(
625
- parcellations_dir=parcellations_dir,
626
- resolution=resolution,
627
- **kwargs,
628
- )
629
- elif family == "SUIT":
630
- parcellation_fname, parcellation_labels = _retrieve_suit(
631
- parcellations_dir=parcellations_dir,
632
- resolution=resolution,
633
- **kwargs,
634
- )
635
- elif family == "Tian":
636
- parcellation_fname, parcellation_labels = _retrieve_tian(
637
- parcellations_dir=parcellations_dir,
638
- resolution=resolution,
639
- **kwargs,
640
- )
641
- elif family == "AICHA":
642
- parcellation_fname, parcellation_labels = _retrieve_aicha(
643
- parcellations_dir=parcellations_dir,
644
- resolution=resolution,
645
- **kwargs,
646
- )
647
- elif family == "Shen":
648
- parcellation_fname, parcellation_labels = _retrieve_shen(
649
- parcellations_dir=parcellations_dir,
650
- resolution=resolution,
651
- **kwargs,
652
- )
653
- elif family == "Yan":
654
- parcellation_fname, parcellation_labels = _retrieve_yan(
655
- parcellations_dir=parcellations_dir,
656
- resolution=resolution,
657
- **kwargs,
658
- )
659
- elif family == "Brainnetome":
660
- parcellation_fname, parcellation_labels = _retrieve_brainnetome(
661
- parcellations_dir=parcellations_dir,
662
- resolution=resolution,
663
- **kwargs,
664
- )
665
- else:
666
- raise_error(
667
- f"The provided parcellation name {family} cannot be retrieved."
668
- )
669
-
670
- return parcellation_fname, parcellation_labels
671
-
672
-
673
- def _retrieve_schaefer(
674
- parcellations_dir: Path,
675
- resolution: Optional[float] = None,
676
- n_rois: Optional[int] = None,
677
- yeo_networks: int = 7,
678
- ) -> Tuple[Path, List[str]]:
679
- """Retrieve Schaefer parcellation.
680
-
681
- Parameters
682
- ----------
683
- parcellations_dir : pathlib.Path
684
- The path to the parcellation data directory.
685
- resolution : float, optional
686
- The desired resolution of the parcellation to load. If it is not
687
- available, the closest resolution will be loaded. Preferably, use a
688
- resolution higher than the desired one. By default, will load the
689
- highest one (default None). Available resolutions for this
690
- parcellation are 1mm and 2mm.
691
- n_rois : {100, 200, 300, 400, 500, 600, 700, 800, 900, 1000}, optional
692
- Granularity of the parceallation to be used (default None).
693
- yeo_networks : {7, 17}, optional
694
- Number of Yeo networks to use (default 7).
695
-
696
- Returns
697
- -------
698
- pathlib.Path
699
- File path to the parcellation image.
700
- list of str
701
- Parcellation labels.
702
-
703
- Raises
704
- ------
705
- ValueError
706
- If invalid value is provided for ``n_rois`` or ``yeo_networks`` or if
707
- there is a problem fetching the parcellation.
708
-
709
- """
710
- logger.info("Parcellation parameters:")
711
- logger.info(f"\tresolution: {resolution}")
712
- logger.info(f"\tn_rois: {n_rois}")
713
- logger.info(f"\tyeo_networks: {yeo_networks}")
714
-
715
- # Check n_rois value
716
- _valid_n_rois = [100, 200, 300, 400, 500, 600, 700, 800, 900, 1000]
717
- if n_rois not in _valid_n_rois:
718
- raise_error(
719
- f"The parameter `n_rois` ({n_rois}) needs to be one of the "
720
- f"following: {_valid_n_rois}"
721
- )
722
-
723
- # Check networks
724
- _valid_networks = [7, 17]
725
- if yeo_networks not in _valid_networks:
726
- raise_error(
727
- f"The parameter `yeo_networks` ({yeo_networks}) needs to be one "
728
- f"of the following: {_valid_networks}"
729
- )
730
-
731
- # Check resolution
732
- _valid_resolutions = [1, 2]
733
- resolution = closest_resolution(resolution, _valid_resolutions)
734
-
735
- # Define parcellation and label file names
736
- parcellation_fname = (
737
- parcellations_dir
738
- / "schaefer_2018"
739
- / (
740
- f"Schaefer2018_{n_rois}Parcels_{yeo_networks}Networks_order_"
741
- f"FSLMNI152_{resolution}mm.nii.gz"
742
- )
743
- )
744
- parcellation_lname = (
745
- parcellations_dir
746
- / "schaefer_2018"
747
- / (f"Schaefer2018_{n_rois}Parcels_{yeo_networks}Networks_order.txt")
748
- )
749
-
750
- # Check existence of parcellation
751
- if not (parcellation_fname.exists() and parcellation_lname.exists()):
752
- logger.info(
753
- "At least one of the parcellation files are missing. "
754
- "Fetching using nilearn."
755
- )
756
- datasets.fetch_atlas_schaefer_2018(
757
- n_rois=n_rois,
758
- yeo_networks=yeo_networks,
759
- resolution_mm=resolution, # type: ignore we know it's 1 or 2
760
- data_dir=parcellations_dir.resolve(),
761
- )
762
-
763
- # Load labels
764
- labels = [
765
- "_".join(x.split("_")[1:])
766
- for x in pd.read_csv(parcellation_lname, sep="\t", header=None)
767
- .iloc[:, 1]
768
- .to_list()
769
- ]
770
-
771
- return parcellation_fname, labels
772
-
773
-
774
- def _retrieve_tian(
775
- parcellations_dir: Path,
776
- resolution: Optional[float] = None,
777
- scale: Optional[int] = None,
778
- space: str = "MNI152NLin6Asym",
779
- magneticfield: str = "3T",
780
- ) -> Tuple[Path, List[str]]:
781
- """Retrieve Tian parcellation.
782
-
783
- Parameters
784
- ----------
785
- parcellations_dir : pathlib.Path
786
- The path to the parcellation data directory.
787
- resolution : float, optional
788
- The desired resolution of the parcellation to load. If it is not
789
- available, the closest resolution will be loaded. Preferably, use a
790
- resolution higher than the desired one. By default, will load the
791
- highest one (default None). Available resolutions for this
792
- parcellation depend on the space and magnetic field.
793
- scale : {1, 2, 3, 4}, optional
794
- Scale of parcellation (defines granularity) (default None).
795
- space : {"MNI152NLin6Asym", "MNI152NLin2009cAsym"}, optional
796
- Space of parcellation (default "MNI152NLin6Asym"). (For more
797
- information see https://github.com/yetianmed/subcortex)
798
- magneticfield : {"3T", "7T"}, optional
799
- Magnetic field (default "3T").
800
-
801
- Returns
802
- -------
803
- pathlib.Path
804
- File path to the parcellation image.
805
- list of str
806
- Parcellation labels.
807
-
808
- Raises
809
- ------
810
- RuntimeError
811
- If there is a problem fetching files.
812
- ValueError
813
- If invalid value is provided for ``scale`` or ``magneticfield`` or
814
- ``space``.
815
-
816
- """
817
- logger.info("Parcellation parameters:")
818
- logger.info(f"\tresolution: {resolution}")
819
- logger.info(f"\tscale: {scale}")
820
- logger.info(f"\tspace: {space}")
821
- logger.info(f"\tmagneticfield: {magneticfield}")
822
-
823
- # Check scale
824
- _valid_scales = [1, 2, 3, 4]
825
- if scale not in _valid_scales:
826
- raise_error(
827
- f"The parameter `scale` ({scale}) needs to be one of the "
828
- f"following: {_valid_scales}"
829
- )
830
-
831
- # Check resolution
832
- _valid_resolutions = [] # avoid pylance error
833
- if magneticfield == "3T":
834
- _valid_spaces = ["MNI152NLin6Asym", "MNI152NLin2009cAsym"]
835
- if space == "MNI152NLin6Asym":
836
- _valid_resolutions = [1, 2]
837
- elif space == "MNI152NLin2009cAsym":
838
- _valid_resolutions = [2]
839
- else:
840
- raise_error(
841
- f"The parameter `space` ({space}) for 3T needs to be one of "
842
- f"the following: {_valid_spaces}"
843
- )
844
- elif magneticfield == "7T":
845
- _valid_resolutions = [1.6]
846
- if space != "MNI152NLin6Asym":
847
- raise_error(
848
- f"The parameter `space` ({space}) for 7T needs to be "
849
- f"MNI152NLin6Asym"
850
- )
851
- else:
852
- raise_error(
853
- f"The parameter `magneticfield` ({magneticfield}) needs to be "
854
- f"one of the following: 3T or 7T"
855
- )
856
- resolution = closest_resolution(resolution, _valid_resolutions)
857
-
858
- # Define parcellation and label file names
859
- if magneticfield == "3T":
860
- parcellation_fname_base_3T = (
861
- parcellations_dir / "Tian2020MSA_v1.1" / "3T" / "Subcortex-Only"
862
- )
863
- parcellation_lname = parcellation_fname_base_3T / (
864
- f"Tian_Subcortex_S{scale}_3T_label.txt"
865
- )
866
- if space == "MNI152NLin6Asym":
867
- parcellation_fname = parcellation_fname_base_3T / (
868
- f"Tian_Subcortex_S{scale}_{magneticfield}.nii.gz"
869
- )
870
- if resolution == 1:
871
- parcellation_fname = (
872
- parcellation_fname_base_3T
873
- / f"Tian_Subcortex_S{scale}_{magneticfield}_1mm.nii.gz"
874
- )
875
- elif space == "MNI152NLin2009cAsym":
876
- space = "2009cAsym"
877
- parcellation_fname = parcellation_fname_base_3T / (
878
- f"Tian_Subcortex_S{scale}_{magneticfield}_{space}.nii.gz"
879
- )
880
- elif magneticfield == "7T":
881
- parcellation_fname_base_7T = (
882
- parcellations_dir / "Tian2020MSA_v1.1" / "7T"
883
- )
884
- parcellation_fname_base_7T.mkdir(exist_ok=True, parents=True)
885
- parcellation_fname = (
886
- parcellations_dir
887
- / "Tian2020MSA_v1.1"
888
- / f"{magneticfield}"
889
- / (f"Tian_Subcortex_S{scale}_{magneticfield}.nii.gz")
890
- )
891
- # define 7T labels (b/c currently no labels file available for 7T)
892
- scale7Trois = {1: 16, 2: 34, 3: 54, 4: 62}
893
- labels = [
894
- ("parcel_" + str(x)) for x in np.arange(1, scale7Trois[scale] + 1)
895
- ]
896
- parcellation_lname = parcellation_fname_base_7T / (
897
- f"Tian_Subcortex_S{scale}_7T_labelnumbering.txt"
898
- )
899
- with open(parcellation_lname, "w") as filehandle:
900
- for listitem in labels:
901
- filehandle.write(f"{listitem}\n")
902
- logger.info(
903
- "Currently there are no labels provided for the 7T Tian "
904
- "parcellation. A simple numbering scheme for distinction was "
905
- "therefore used."
906
- )
907
-
908
- # Check existence of parcellation
909
- if not (parcellation_fname.exists() and parcellation_lname.exists()):
910
- logger.info(
911
- "At least one of the parcellation files are missing, fetching."
912
- )
913
- # Set URL
914
- url = (
915
- "https://www.nitrc.org/frs/download.php/12012/Tian2020MSA_v1.1.zip"
916
- )
917
-
918
- logger.info(f"Downloading TIAN from {url}")
919
- # Store initial download in a tempdir
920
- with tempfile.TemporaryDirectory() as tmpdir:
921
- # Make HTTP request
922
- try:
923
- resp = httpx.get(url)
924
- resp.raise_for_status()
925
- except httpx.HTTPError as exc:
926
- raise_error(
927
- f"Error response {exc.response.status_code} while "
928
- f"requesting {exc.request.url!r}",
929
- klass=RuntimeError,
930
- )
931
- else:
932
- # Set tempfile for storing initial content and unzipping
933
- zip_fname = Path(tmpdir) / "Tian2020MSA_v1.1.zip"
934
- # Open tempfile and write content
935
- with open(zip_fname, "wb") as f:
936
- f.write(resp.content)
937
- # Unzip tempfile
938
- with zipfile.ZipFile(zip_fname, "r") as zip_ref:
939
- zip_ref.extractall(parcellations_dir.as_posix())
940
- # Clean after unzipping
941
- if (parcellations_dir / "__MACOSX").exists():
942
- shutil.rmtree((parcellations_dir / "__MACOSX").as_posix())
943
-
944
- # Load labels
945
- labels = pd.read_csv(parcellation_lname, sep=" ", header=None)[0].to_list()
946
-
947
- return parcellation_fname, labels
948
-
949
-
950
- def _retrieve_suit(
951
- parcellations_dir: Path,
952
- resolution: Optional[float],
953
- space: str = "MNI152NLin6Asym",
954
- ) -> Tuple[Path, List[str]]:
955
- """Retrieve SUIT parcellation.
956
-
957
- Parameters
958
- ----------
959
- parcellations_dir : pathlib.Path
960
- The path to the parcellation data directory.
961
- resolution : float, optional
962
- The desired resolution of the parcellation to load. If it is not
963
- available, the closest resolution will be loaded. Preferably, use a
964
- resolution higher than the desired one. By default, will load the
965
- highest one (default None). Available resolutions for this parcellation
966
- are 1mm and 2mm.
967
- space : {"MNI152NLin6Asym", "SUIT"}, optional
968
- Space of parcellation (default "MNI152NLin6Asym"). (For more
969
- information see http://www.diedrichsenlab.org/imaging/suit.htm).
970
-
971
- Returns
972
- -------
973
- pathlib.Path
974
- File path to the parcellation image.
975
- list of str
976
- Parcellation labels.
977
-
978
- Raises
979
- ------
980
- RuntimeError
981
- If there is a problem fetching files.
982
- ValueError
983
- If invalid value is provided for ``space``.
984
-
985
- """
986
- logger.info("Parcellation parameters:")
987
- logger.info(f"\tresolution: {resolution}")
988
- logger.info(f"\tspace: {space}")
989
-
990
- # Check space
991
- _valid_spaces = ["MNI152NLin6Asym", "SUIT"]
992
- if space not in _valid_spaces:
993
- raise_error(
994
- f"The parameter `space` ({space}) needs to be one of the "
995
- f"following: {_valid_spaces}"
996
- )
997
-
998
- # Check resolution
999
- _valid_resolutions = [1]
1000
- resolution = closest_resolution(resolution, _valid_resolutions)
1001
-
1002
- # Format space if MNI; required for the file name
1003
- if space == "MNI152NLin6Asym":
1004
- space = "MNI"
1005
-
1006
- # Define parcellation and label file names
1007
- parcellation_fname = (
1008
- parcellations_dir / "SUIT" / (f"SUIT_{space}Space_{resolution}mm.nii")
1009
- )
1010
- parcellation_lname = (
1011
- parcellations_dir / "SUIT" / (f"SUIT_{space}Space_{resolution}mm.tsv")
1012
- )
1013
-
1014
- # Check existence of parcellation
1015
- if not (parcellation_fname.exists() and parcellation_lname.exists()):
1016
- logger.info(
1017
- "At least one of the parcellation files is missing, fetching."
1018
- )
1019
- # Create local directory if not present
1020
- parcellation_fname.parent.mkdir(exist_ok=True, parents=True)
1021
- # Set URL
1022
- url_basis = (
1023
- "https://github.com/DiedrichsenLab/cerebellar_atlases/raw"
1024
- "/master/Diedrichsen_2009"
1025
- )
1026
- if space == "MNI":
1027
- url = f"{url_basis}/atl-Anatom_space-MNI_dseg.nii"
1028
- else: # if not MNI, then SUIT
1029
- url = f"{url_basis}/atl-Anatom_space-SUIT_dseg.nii"
1030
- url_labels = f"{url_basis}/atl-Anatom.tsv"
1031
-
1032
- # Make HTTP requests
1033
- with httpx.Client(follow_redirects=True) as client:
1034
- # Download parcellation file
1035
- logger.info(f"Downloading SUIT parcellation from {url}")
1036
- try:
1037
- img_resp = client.get(url)
1038
- img_resp.raise_for_status()
1039
- except httpx.HTTPError as exc:
1040
- raise_error(
1041
- f"Error response {exc.response.status_code} while "
1042
- f"requesting {exc.request.url!r}",
1043
- klass=RuntimeError,
1044
- )
1045
- else:
1046
- with open(parcellation_fname, "wb") as f:
1047
- f.write(img_resp.content)
1048
- # Download label file
1049
- logger.info(f"Downloading SUIT labels from {url_labels}")
1050
- try:
1051
- label_resp = client.get(url_labels)
1052
- label_resp.raise_for_status()
1053
- except httpx.HTTPError as exc:
1054
- raise_error(
1055
- f"Error response {exc.response.status_code} while "
1056
- f"requesting {exc.request.url!r}",
1057
- klass=RuntimeError,
1058
- )
1059
- else:
1060
- # Load labels
1061
- labels = pd.read_csv(
1062
- io.StringIO(label_resp.content.decode("utf-8")),
1063
- sep="\t",
1064
- usecols=["name"],
1065
- )
1066
- labels.to_csv(parcellation_lname, sep="\t", index=False)
1067
-
1068
- # Load labels
1069
- labels = pd.read_csv(parcellation_lname, sep="\t", usecols=["name"])[
1070
- "name"
1071
- ].to_list()
1072
-
1073
- return parcellation_fname, labels
1074
-
1075
-
1076
- def _retrieve_aicha(
1077
- parcellations_dir: Path,
1078
- resolution: Optional[float] = None,
1079
- version: int = 2,
1080
- ) -> Tuple[Path, List[str]]:
1081
- """Retrieve AICHA parcellation.
1082
-
1083
- Parameters
1084
- ----------
1085
- parcellations_dir : pathlib.Path
1086
- The path to the parcellation data directory.
1087
- resolution : float, optional
1088
- The desired resolution of the parcellation to load. If it is not
1089
- available, the closest resolution will be loaded. Preferably, use a
1090
- resolution higher than the desired one. By default, will load the
1091
- highest one (default None). Available resolution for this
1092
- parcellation is 2mm.
1093
- version : {1, 2}, optional
1094
- The version of the parcellation to use (default 2).
1095
-
1096
- Returns
1097
- -------
1098
- pathlib.Path
1099
- File path to the parcellation image.
1100
- list of str
1101
- Parcellation labels.
1102
-
1103
- Raises
1104
- ------
1105
- RuntimeError
1106
- If there is a problem fetching files.
1107
- ValueError
1108
- If invalid value is provided for ``version``.
1109
-
1110
- Warns
1111
- -----
1112
- RuntimeWarning
1113
- Until the authors confirm the space, the warning will be issued.
1114
-
1115
- Notes
1116
- -----
1117
- The resolution of the parcellation is 2mm and although v2 provides
1118
- 1mm, it is only for display purpose as noted in the release document.
1119
-
1120
- """
1121
- # Issue warning until space is confirmed by authors
1122
- warn_with_log(
1123
- "The current space for AICHA parcellations are IXI549Space, but are "
1124
- "not confirmed by authors, until that this warning will be issued."
1125
- )
1126
-
1127
- logger.info("Parcellation parameters:")
1128
- logger.info(f"\tresolution: {resolution}")
1129
- logger.info(f"\tversion: {version}")
1130
-
1131
- # Check version
1132
- _valid_version = [1, 2]
1133
- if version not in _valid_version:
1134
- raise_error(
1135
- f"The parameter `version` ({version}) needs to be one of the "
1136
- f"following: {_valid_version}"
1137
- )
1138
-
1139
- # Check resolution
1140
- _valid_resolutions = [1]
1141
- resolution = closest_resolution(resolution, _valid_resolutions)
1142
-
1143
- # Define parcellation and label file names
1144
- parcellation_fname = (
1145
- parcellations_dir / f"AICHA_v{version}" / "AICHA" / "AICHA.nii"
1146
- )
1147
- parcellation_lname = Path()
1148
- if version == 1:
1149
- parcellation_lname = (
1150
- parcellations_dir
1151
- / f"AICHA_v{version}"
1152
- / "AICHA"
1153
- / "AICHA_vol1.txt"
1154
- )
1155
- elif version == 2:
1156
- parcellation_lname = (
1157
- parcellations_dir
1158
- / f"AICHA_v{version}"
1159
- / "AICHA"
1160
- / "AICHA_vol3.txt"
1161
- )
1162
-
1163
- # Check existence of parcellation
1164
- if not (parcellation_fname.exists() and parcellation_lname.exists()):
1165
- logger.info(
1166
- "At least one of the parcellation files are missing, fetching."
1167
- )
1168
- # Set file name on server according to version
1169
- server_filename = ""
1170
- if version == 1:
1171
- server_filename = "aicha_v1.zip"
1172
- elif version == 2:
1173
- server_filename = "AICHA_v2.tar.zip"
1174
- # Set URL
1175
- url = f"http://www.gin.cnrs.fr/wp-content/uploads/{server_filename}"
1176
-
1177
- logger.info(f"Downloading AICHA v{version} from {url}")
1178
- # Store initial download in a tempdir
1179
- with tempfile.TemporaryDirectory() as tmpdir:
1180
- # Make HTTP request
1181
- try:
1182
- resp = httpx.get(url, follow_redirects=True)
1183
- resp.raise_for_status()
1184
- except httpx.HTTPError as exc:
1185
- raise_error(
1186
- f"Error response {exc.response.status_code} while "
1187
- f"requesting {exc.request.url!r}",
1188
- klass=RuntimeError,
1189
- )
1190
- else:
1191
- # Set tempfile for storing initial content and unzipping
1192
- parcellation_zip_path = Path(tmpdir) / server_filename
1193
- # Open tempfile and write content
1194
- with open(parcellation_zip_path, "wb") as f:
1195
- f.write(resp.content)
1196
- # Unzip tempfile
1197
- with zipfile.ZipFile(parcellation_zip_path, "r") as zip_ref:
1198
- if version == 1:
1199
- zip_ref.extractall(
1200
- (parcellations_dir / "AICHA_v1").as_posix()
1201
- )
1202
- elif version == 2:
1203
- zip_ref.extractall(Path(tmpdir).as_posix())
1204
- # Extract tarfile for v2
1205
- with tarfile.TarFile(
1206
- Path(tmpdir) / "aicha_v2.tar", "r"
1207
- ) as tar_ref:
1208
- tar_ref.extractall(
1209
- (parcellations_dir / "AICHA_v2").as_posix()
1210
- )
1211
- # Cleanup after unzipping
1212
- if (
1213
- parcellations_dir / f"AICHA_v{version}" / "__MACOSX"
1214
- ).exists():
1215
- shutil.rmtree(
1216
- (
1217
- parcellations_dir
1218
- / f"AICHA_v{version}"
1219
- / "__MACOSX"
1220
- ).as_posix()
1221
- )
1222
-
1223
- # Load labels
1224
- labels = pd.read_csv(
1225
- parcellation_lname, sep="\t", header=None, skiprows=[0] # type: ignore
1226
- )[0].to_list()
1227
-
1228
- return parcellation_fname, labels
1229
-
1230
-
1231
- def _retrieve_shen( # noqa: C901
1232
- parcellations_dir: Path,
1233
- resolution: Optional[float] = None,
1234
- year: int = 2015,
1235
- n_rois: int = 268,
1236
- ) -> Tuple[Path, List[str]]:
1237
- """Retrieve Shen parcellation.
1238
-
1239
- Parameters
1240
- ----------
1241
- parcellations_dir : pathlib.Path
1242
- The path to the parcellation data directory.
1243
- resolution : float, optional
1244
- The desired resolution of the parcellation to load. If it is not
1245
- available, the closest resolution will be loaded. Preferably, use a
1246
- resolution higher than the desired one. By default, will load the
1247
- highest one (default None). Available resolutions for this parcellation
1248
- are 1mm and 2mm for ``year = 2013`` and ``year = 2015`` but fixed to
1249
- 1mm for ``year = 2019``.
1250
- year : {2013, 2015, 2019}, optional
1251
- The year of the parcellation to use (default 2015).
1252
- n_rois : int, optional
1253
- Number of ROIs. Can be ``50, 100, or 150`` for ``year = 2013`` but is
1254
- fixed at ``268`` for ``year = 2015`` and at ``368`` for
1255
- ``year = 2019``.
1256
-
1257
- Returns
1258
- -------
1259
- pathlib.Path
1260
- File path to the parcellation image.
1261
- list of str
1262
- Parcellation labels.
1263
-
1264
- Raises
1265
- ------
1266
- RuntimeError
1267
- If there is a problem fetching files.
1268
- ValueError
1269
- If invalid value or combination is provided for ``year`` and
1270
- ``n_rois``.
1271
-
1272
- """
1273
- logger.info("Parcellation parameters:")
1274
- logger.info(f"\tresolution: {resolution}")
1275
- logger.info(f"\tyear: {year}")
1276
- logger.info(f"\tn_rois: {n_rois}")
1277
-
1278
- # Check resolution
1279
- _valid_resolutions = [1, 2]
1280
- resolution = closest_resolution(resolution, _valid_resolutions)
1281
-
1282
- # Check year value
1283
- _valid_year = (2013, 2015, 2019)
1284
- if year not in _valid_year:
1285
- raise_error(
1286
- f"The parameter `year` ({year}) needs to be one of the "
1287
- f"following: {_valid_year}"
1288
- )
1289
-
1290
- # Check n_rois value
1291
- _valid_n_rois = [50, 100, 150, 268, 368]
1292
- if n_rois not in _valid_n_rois:
1293
- raise_error(
1294
- f"The parameter `n_rois` ({n_rois}) needs to be one of the "
1295
- f"following: {_valid_n_rois}"
1296
- )
1297
-
1298
- # Check combinations
1299
- if resolution == 2 and year == 2019:
1300
- raise_error(
1301
- "The parameter combination `resolution = 2` and `year = 2019` is "
1302
- "invalid"
1303
- )
1304
- if n_rois in (268, 368) and year == 2013:
1305
- raise_error(
1306
- f"The parameter combination `resolution = {resolution}` and "
1307
- "`year = 2013` is invalid"
1308
- )
1309
- if n_rois in (50, 100, 150) and year in (2015, 2019):
1310
- raise_error(
1311
- f"The parameter combination `resolution = {resolution}` and "
1312
- f"`year = {year}` is invalid"
1313
- )
1314
- if (n_rois == 268 and year == 2019) or (n_rois == 368 and year == 2015):
1315
- raise_error(
1316
- f"The parameter combination `resolution = {resolution}` and "
1317
- f"`year = {year}` is invalid"
1318
- )
1319
-
1320
- # Define parcellation and label file names
1321
- if year == 2013:
1322
- parcellation_fname = (
1323
- parcellations_dir
1324
- / "Shen_2013"
1325
- / "shenetal_neuroimage2013"
1326
- / f"fconn_atlas_{n_rois}_{resolution}mm.nii"
1327
- )
1328
- parcellation_lname = (
1329
- parcellations_dir
1330
- / "Shen_2013"
1331
- / "shenetal_neuroimage2013"
1332
- / f"Group_seg{n_rois}_BAindexing_setA.txt"
1333
- )
1334
- elif year == 2015:
1335
- parcellation_fname = (
1336
- parcellations_dir
1337
- / "Shen_2015"
1338
- / f"shen_{resolution}mm_268_parcellation.nii.gz"
1339
- )
1340
- elif year == 2019:
1341
- parcellation_fname = (
1342
- parcellations_dir
1343
- / "Shen_2019"
1344
- / "Shen_1mm_368_parcellation.nii.gz"
1345
- )
1346
-
1347
- # Check existence of parcellation
1348
- if not parcellation_fname.exists():
1349
- logger.info(
1350
- "At least one of the parcellation files are missing, fetching."
1351
- )
1352
-
1353
- # Set URL based on year
1354
- url = ""
1355
- if year == 2013:
1356
- url = "https://www.nitrc.org/frs/download.php/5785/shenetal_neuroimage2013_funcatlas.zip"
1357
- elif year == 2015:
1358
- # Set URL based on resolution
1359
- if resolution == 1:
1360
- url = "https://www.nitrc.org/frs/download.php/7976/shen_1mm_268_parcellation.nii.gz"
1361
- elif resolution == 2:
1362
- url = "https://www.nitrc.org/frs/download.php/7977/shen_2mm_268_parcellation.nii.gz"
1363
- elif year == 2019:
1364
- url = "https://www.nitrc.org/frs/download.php/11629/shen_368.zip"
1365
-
1366
- logger.info(f"Downloading Shen {year} from {url}")
1367
- # Store initial download in a tempdir
1368
- with tempfile.TemporaryDirectory() as tmpdir:
1369
- # Make HTTP request
1370
- try:
1371
- resp = httpx.get(url)
1372
- resp.raise_for_status()
1373
- except httpx.HTTPError as exc:
1374
- raise_error(
1375
- f"Error response {exc.response.status_code} while "
1376
- f"requesting {exc.request.url!r}",
1377
- klass=RuntimeError,
1378
- )
1379
- else:
1380
- if year in (2013, 2019):
1381
- parcellation_zip_path = Path(tmpdir) / f"Shen{year}.zip"
1382
- # Open tempfile and write content
1383
- with open(parcellation_zip_path, "wb") as f:
1384
- f.write(resp.content)
1385
- # Unzip tempfile
1386
- with zipfile.ZipFile(
1387
- parcellation_zip_path, "r"
1388
- ) as zip_ref:
1389
- zip_ref.extractall(
1390
- (parcellations_dir / f"Shen_{year}").as_posix()
1391
- )
1392
- # Cleanup after unzipping
1393
- if (
1394
- parcellations_dir / f"Shen_{year}" / "__MACOSX"
1395
- ).exists():
1396
- shutil.rmtree(
1397
- (
1398
- parcellations_dir / f"Shen_{year}" / "__MACOSX"
1399
- ).as_posix()
1400
- )
1401
- elif year == 2015:
1402
- img_dir_path = parcellations_dir / "Shen_2015"
1403
- # Create local directory if not present
1404
- img_dir_path.mkdir(parents=True, exist_ok=True)
1405
- img_path = (
1406
- img_dir_path
1407
- / f"shen_{resolution}mm_268_parcellation.nii.gz"
1408
- )
1409
- # Create local file if not present
1410
- img_path.touch(exist_ok=True)
1411
- # Open tempfile and write content
1412
- with open(img_path, "wb") as f:
1413
- f.write(resp.content)
1414
-
1415
- # Load labels based on year
1416
- if year == 2013:
1417
- labels = (
1418
- pd.read_csv(
1419
- parcellation_lname, # type: ignore
1420
- sep=",", # type: ignore
1421
- header=None, # type: ignore
1422
- skiprows=[0], # type: ignore
1423
- )[1]
1424
- .map(lambda x: x.strip()) # fix formatting
1425
- .to_list()
1426
- )
1427
- elif year == 2015:
1428
- labels = list(range(1, 269))
1429
- elif year == 2019:
1430
- labels = list(range(1, 369))
1431
-
1432
- return parcellation_fname, labels
1433
-
1434
-
1435
- def _retrieve_yan(
1436
- parcellations_dir: Path,
1437
- resolution: Optional[float] = None,
1438
- n_rois: Optional[int] = None,
1439
- yeo_networks: Optional[int] = None,
1440
- kong_networks: Optional[int] = None,
1441
- ) -> Tuple[Path, List[str]]:
1442
- """Retrieve Yan parcellation.
1443
-
1444
- Parameters
1445
- ----------
1446
- parcellations_dir : pathlib.Path
1447
- The path to the parcellation data directory.
1448
- resolution : float, optional
1449
- The desired resolution of the parcellation to load. If it is not
1450
- available, the closest resolution will be loaded. Preferably, use a
1451
- resolution higher than the desired one. By default, will load the
1452
- highest one (default None). Available resolutions for this
1453
- parcellation are 1mm and 2mm.
1454
- n_rois : {100, 200, 300, 400, 500, 600, 700, 800, 900, 1000}, optional
1455
- Granularity of the parcellation to be used (default None).
1456
- yeo_networks : {7, 17}, optional
1457
- Number of Yeo networks to use (default None).
1458
- kong_networks : {17}, optional
1459
- Number of Kong networks to use (default None).
1460
-
1461
- Returns
1462
- -------
1463
- pathlib.Path
1464
- File path to the parcellation image.
1465
- list of str
1466
- Parcellation labels.
1467
-
1468
- Raises
1469
- ------
1470
- RuntimeError
1471
- If there is a problem fetching files.
1472
- ValueError
1473
- If invalid value is provided for ``n_rois``, ``yeo_networks`` or
1474
- ``kong_networks``.
1475
-
1476
- """
1477
- logger.info("Parcellation parameters:")
1478
- logger.info(f"\tresolution: {resolution}")
1479
- logger.info(f"\tn_rois: {n_rois}")
1480
- logger.info(f"\tyeo_networks: {yeo_networks}")
1481
- logger.info(f"\tkong_networks: {kong_networks}")
1482
-
1483
- # Allow single network type
1484
- if (not yeo_networks and not kong_networks) or (
1485
- yeo_networks and kong_networks
1486
- ):
1487
- raise_error(
1488
- "Either one of `yeo_networks` or `kong_networks` need to be "
1489
- "specified."
1490
- )
1491
-
1492
- # Check resolution
1493
- _valid_resolutions = [1, 2]
1494
- resolution = closest_resolution(resolution, _valid_resolutions)
1495
-
1496
- # Check n_rois value
1497
- _valid_n_rois = list(range(100, 1001, 100))
1498
- if n_rois not in _valid_n_rois:
1499
- raise_error(
1500
- f"The parameter `n_rois` ({n_rois}) needs to be one of the "
1501
- f"following: {_valid_n_rois}"
1502
- )
1503
-
1504
- parcellation_fname = Path()
1505
- parcellation_lname = Path()
1506
- if yeo_networks:
1507
- # Check yeo_networks value
1508
- _valid_yeo_networks = [7, 17]
1509
- if yeo_networks not in _valid_yeo_networks:
1510
- raise_error(
1511
- f"The parameter `yeo_networks` ({yeo_networks}) needs to be "
1512
- f"one of the following: {_valid_yeo_networks}"
1513
- )
1514
- # Define image and label file according to network
1515
- parcellation_fname = (
1516
- parcellations_dir
1517
- / "Yan_2023"
1518
- / (
1519
- f"{n_rois}Parcels_Yeo2011_{yeo_networks}Networks_FSLMNI152_"
1520
- f"{resolution}mm.nii.gz"
1521
- )
1522
- )
1523
- parcellation_lname = (
1524
- parcellations_dir
1525
- / "Yan_2023"
1526
- / f"{n_rois}Parcels_Yeo2011_{yeo_networks}Networks_LUT.txt"
1527
- )
1528
- elif kong_networks:
1529
- # Check kong_networks value
1530
- _valid_kong_networks = [17]
1531
- if kong_networks not in _valid_kong_networks:
1532
- raise_error(
1533
- f"The parameter `kong_networks` ({kong_networks}) needs to be "
1534
- f"one of the following: {_valid_kong_networks}"
1535
- )
1536
- # Define image and label file according to network
1537
- parcellation_fname = (
1538
- parcellations_dir
1539
- / "Yan_2023"
1540
- / (
1541
- f"{n_rois}Parcels_Kong2022_{kong_networks}Networks_FSLMNI152_"
1542
- f"{resolution}mm.nii.gz"
1543
- )
1544
- )
1545
- parcellation_lname = (
1546
- parcellations_dir
1547
- / "Yan_2023"
1548
- / f"{n_rois}Parcels_Kong2022_{kong_networks}Networks_LUT.txt"
1549
- )
1550
-
1551
- # Check for existence of parcellation:
1552
- if not parcellation_fname.exists() and not parcellation_lname.exists():
1553
- logger.info(
1554
- "At least one of the parcellation files are missing, fetching."
1555
- )
1556
-
1557
- # Set URL based on network
1558
- img_url = ""
1559
- label_url = ""
1560
- if yeo_networks:
1561
- img_url = (
1562
- "https://raw.githubusercontent.com/ThomasYeoLab/CBIG/"
1563
- "master/stable_projects/brain_parcellation/Yan2023_homotopic/"
1564
- f"parcellations/MNI/yeo{yeo_networks}/{n_rois}Parcels_Yeo2011"
1565
- f"_{yeo_networks}Networks_FSLMNI152_{resolution}mm.nii.gz"
1566
- )
1567
- label_url = (
1568
- "https://raw.githubusercontent.com/ThomasYeoLab/CBIG/"
1569
- "master/stable_projects/brain_parcellation/Yan2023_homotopic/"
1570
- f"parcellations/MNI/yeo{yeo_networks}/freeview_lut/{n_rois}"
1571
- f"Parcels_Yeo2011_{yeo_networks}Networks_LUT.txt"
1572
- )
1573
- elif kong_networks:
1574
- img_url = (
1575
- "https://raw.githubusercontent.com/ThomasYeoLab/CBIG/"
1576
- "master/stable_projects/brain_parcellation/Yan2023_homotopic/"
1577
- f"parcellations/MNI/kong17/{n_rois}Parcels_Kong2022"
1578
- f"_17Networks_FSLMNI152_{resolution}mm.nii.gz"
1579
- )
1580
- label_url = (
1581
- "https://raw.githubusercontent.com/ThomasYeoLab/CBIG/"
1582
- "master/stable_projects/brain_parcellation/Yan2023_homotopic/"
1583
- f"parcellations/MNI/kong17/freeview_lut/{n_rois}Parcels_"
1584
- "Kong2022_17Networks_LUT.txt"
1585
- )
1586
-
1587
- # Make HTTP requests
1588
- with httpx.Client() as client:
1589
- # Download parcellation file
1590
- logger.info(f"Downloading Yan 2023 parcellation from {img_url}")
1591
- try:
1592
- img_resp = client.get(img_url)
1593
- img_resp.raise_for_status()
1594
- except httpx.HTTPError as exc:
1595
- raise_error(
1596
- f"Error response {exc.response.status_code} while "
1597
- f"requesting {exc.request.url!r}",
1598
- klass=RuntimeError,
1599
- )
1600
- else:
1601
- parcellation_img_path = Path(parcellation_fname)
1602
- # Create local directory if not present
1603
- parcellation_img_path.parent.mkdir(parents=True, exist_ok=True)
1604
- # Create local file if not present
1605
- parcellation_img_path.touch(exist_ok=True)
1606
- # Open file and write content
1607
- with open(parcellation_img_path, "wb") as f:
1608
- f.write(img_resp.content)
1609
- # Download label file
1610
- logger.info(f"Downloading Yan 2023 labels from {label_url}")
1611
- try:
1612
- label_resp = client.get(label_url)
1613
- label_resp.raise_for_status()
1614
- except httpx.HTTPError as exc:
1615
- raise_error(
1616
- f"Error response {exc.response.status_code} while "
1617
- f"requesting {exc.request.url!r}",
1618
- klass=RuntimeError,
1619
- )
1620
- else:
1621
- parcellation_labels_path = Path(parcellation_lname)
1622
- # Create local file if not present
1623
- parcellation_labels_path.touch(exist_ok=True)
1624
- # Open file and write content
1625
- with open(parcellation_labels_path, "wb") as f:
1626
- f.write(label_resp.content)
1627
-
1628
- # Load label file
1629
- labels = pd.read_csv(parcellation_lname, sep=" ", header=None)[1].to_list()
1630
-
1631
- return parcellation_fname, labels
1632
-
1633
-
1634
- def _retrieve_brainnetome(
1635
- parcellations_dir: Path,
1636
- resolution: Optional[float] = None,
1637
- threshold: Optional[int] = None,
1638
- ) -> Tuple[Path, List[str]]:
1639
- """Retrieve Brainnetome parcellation.
1640
-
1641
- Parameters
1642
- ----------
1643
- parcellations_dir : pathlib.Path
1644
- The path to the parcellation data directory.
1645
- resolution : {1.0, 1.25, 2.0}, optional
1646
- The desired resolution of the parcellation to load. If it is not
1647
- available, the closest resolution will be loaded. Preferably, use a
1648
- resolution higher than the desired one. By default, will load the
1649
- highest one (default None). Available resolutions for this
1650
- parcellation are 1mm, 1.25mm and 2mm.
1651
- threshold : {0, 25, 50}, optional
1652
- The threshold for the probabilistic maps of subregion (default None).
1653
-
1654
- Returns
1655
- -------
1656
- pathlib.Path
1657
- File path to the parcellation image.
1658
- list of str
1659
- Parcellation labels.
1660
-
1661
- Raises
1662
- ------
1663
- RuntimeError
1664
- If there is a problem fetching files.
1665
- ValueError
1666
- If invalid value is provided for ``threshold``.
1667
-
1668
- """
1669
- logger.info("Parcellation parameters:")
1670
- logger.info(f"\tresolution: {resolution}")
1671
- logger.info(f"\tthreshold: {threshold}")
1672
-
1673
- # Check resolution
1674
- _valid_resolutions = [1.0, 1.25, 2.0]
1675
- resolution = closest_resolution(resolution, _valid_resolutions)
1676
-
1677
- # Check threshold value
1678
- _valid_threshold = [0, 25, 50]
1679
- if threshold not in _valid_threshold:
1680
- raise_error(
1681
- f"The parameter `threshold` ({threshold}) needs to be one of the "
1682
- f"following: {_valid_threshold}"
1683
- )
1684
- # Correct resolution for further stuff
1685
- if resolution in [1.0, 2.0]:
1686
- resolution = int(resolution)
1687
-
1688
- parcellation_fname = (
1689
- parcellations_dir
1690
- / "BNA246"
1691
- / f"BNA-maxprob-thr{threshold}-{resolution}mm.nii.gz"
1692
- )
1693
-
1694
- # Check for existence of parcellation
1695
- if not parcellation_fname.exists():
1696
- # Set URL
1697
- url = f"http://neurovault.org/media/images/1625/BNA-maxprob-thr{threshold}-{resolution}mm.nii.gz"
1698
-
1699
- logger.info(f"Downloading Brainnetome from {url}")
1700
- # Make HTTP request
1701
- try:
1702
- resp = httpx.get(url, follow_redirects=True)
1703
- resp.raise_for_status()
1704
- except httpx.HTTPError as exc:
1705
- raise_error(
1706
- f"Error response {exc.response.status_code} while "
1707
- f"requesting {exc.request.url!r}",
1708
- klass=RuntimeError,
1709
- )
1710
- else:
1711
- # Create local directory if not present
1712
- parcellation_fname.parent.mkdir(parents=True, exist_ok=True)
1713
- # Create file if not present
1714
- parcellation_fname.touch(exist_ok=True)
1715
- # Open file and write bytes
1716
- parcellation_fname.write_bytes(resp.content)
1717
-
1718
- # Load labels
1719
- labels = (
1720
- sorted([f"SFG_L(R)_7_{i}" for i in range(1, 8)] * 2)
1721
- + sorted([f"MFG_L(R)_7_{i}" for i in range(1, 8)] * 2)
1722
- + sorted([f"IFG_L(R)_6_{i}" for i in range(1, 7)] * 2)
1723
- + sorted([f"OrG_L(R)_6_{i}" for i in range(1, 7)] * 2)
1724
- + sorted([f"PrG_L(R)_6_{i}" for i in range(1, 7)] * 2)
1725
- + sorted([f"PCL_L(R)_2_{i}" for i in range(1, 3)] * 2)
1726
- + sorted([f"STG_L(R)_6_{i}" for i in range(1, 7)] * 2)
1727
- + sorted([f"MTG_L(R)_4_{i}" for i in range(1, 5)] * 2)
1728
- + sorted([f"ITG_L(R)_7_{i}" for i in range(1, 8)] * 2)
1729
- + sorted([f"FuG_L(R)_3_{i}" for i in range(1, 4)] * 2)
1730
- + sorted([f"PhG_L(R)_6_{i}" for i in range(1, 7)] * 2)
1731
- + sorted([f"pSTS_L(R)_2_{i}" for i in range(1, 3)] * 2)
1732
- + sorted([f"SPL_L(R)_5_{i}" for i in range(1, 6)] * 2)
1733
- + sorted([f"IPL_L(R)_6_{i}" for i in range(1, 7)] * 2)
1734
- + sorted([f"PCun_L(R)_4_{i}" for i in range(1, 5)] * 2)
1735
- + sorted([f"PoG_L(R)_4_{i}" for i in range(1, 5)] * 2)
1736
- + sorted([f"INS_L(R)_6_{i}" for i in range(1, 7)] * 2)
1737
- + sorted([f"CG_L(R)_7_{i}" for i in range(1, 8)] * 2)
1738
- + sorted([f"MVOcC _L(R)_5_{i}" for i in range(1, 6)] * 2)
1739
- + sorted([f"LOcC_L(R)_4_{i}" for i in range(1, 5)] * 2)
1740
- + sorted([f"LOcC_L(R)_2_{i}" for i in range(1, 3)] * 2)
1741
- + sorted([f"Amyg_L(R)_2_{i}" for i in range(1, 3)] * 2)
1742
- + sorted([f"Hipp_L(R)_2_{i}" for i in range(1, 3)] * 2)
1743
- + sorted([f"BG_L(R)_6_{i}" for i in range(1, 7)] * 2)
1744
- + sorted([f"Tha_L(R)_8_{i}" for i in range(1, 9)] * 2)
1745
- )
1746
-
1747
- return parcellation_fname, labels
1748
-
1749
-
1750
- def merge_parcellations(
1751
- parcellations_list: List["Nifti1Image"],
1752
- parcellations_names: List[str],
1753
- labels_lists: List[List[str]],
1754
- ) -> Tuple["Nifti1Image", List[str]]:
1755
- """Merge all parcellations from a list into one parcellation.
1756
-
1757
- Parameters
1758
- ----------
1759
- parcellations_list : list of niimg-like object
1760
- List of parcellations to merge.
1761
- parcellations_names: list of str
1762
- List of names for parcellations at the corresponding indices.
1763
- labels_lists : list of list of str
1764
- A list of lists. Each list in the list contains the labels for the
1765
- parcellation at the corresponding index.
1766
-
1767
- Returns
1768
- -------
1769
- parcellation : niimg-like object
1770
- The parcellation that results from merging the list of input
1771
- parcellations.
1772
- labels : list of str
1773
- List of labels for the resultant parcellation.
1774
-
1775
- """
1776
- # Check for duplicated labels
1777
- labels_lists_flat = [item for sublist in labels_lists for item in sublist]
1778
- if len(labels_lists_flat) != len(set(labels_lists_flat)):
1779
- warn_with_log(
1780
- "The parcellations have duplicated labels. "
1781
- "Each label will be prefixed with the parcellation name."
1782
- )
1783
- for i_parcellation, t_labels in enumerate(labels_lists):
1784
- labels_lists[i_parcellation] = [
1785
- f"{parcellations_names[i_parcellation]}_{t_label}"
1786
- for t_label in t_labels
1787
- ]
1788
- overlapping_voxels = False
1789
- ref_parc = parcellations_list[0]
1790
- parc_data = ref_parc.get_fdata()
1791
-
1792
- labels = labels_lists[0]
1793
-
1794
- for t_parc, t_labels in zip(parcellations_list[1:], labels_lists[1:]):
1795
- if t_parc.shape != ref_parc.shape:
1796
- warn_with_log(
1797
- "The parcellations have different resolutions!"
1798
- "Resampling all parcellations to the first one in the list."
1799
- )
1800
- t_parc = image.resample_to_img(
1801
- t_parc, ref_parc, interpolation="nearest", copy=True
1802
- )
1803
-
1804
- # Get the data from this parcellation
1805
- t_parc_data = t_parc.get_fdata().copy() # must be copied
1806
- # Increase the values of each ROI to match the labels
1807
- t_parc_data[t_parc_data != 0] += len(labels)
1808
-
1809
- # Only set new values for the voxels that are 0
1810
- # This makes sure that the voxels that are in multiple
1811
- # parcellations are assigned to the parcellation that was
1812
- # first in the list.
1813
- if np.any(parc_data[t_parc_data != 0] != 0):
1814
- overlapping_voxels = True
1815
-
1816
- parc_data[parc_data == 0] += t_parc_data[parc_data == 0]
1817
- labels.extend(t_labels)
1818
-
1819
- if overlapping_voxels:
1820
- warn_with_log(
1821
- "The parcellations have overlapping voxels. "
1822
- "The overlapping voxels will be assigned to the "
1823
- "parcellation that was first in the list."
1824
- )
1825
-
1826
- parcellation_img_res = image.new_img_like(parcellations_list[0], parc_data)
1827
-
1828
- return parcellation_img_res, labels