junifer 0.0.6.dev418__py3-none-any.whl → 0.0.6.dev445__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- junifer/_version.py +2 -2
- junifer/cli/tests/test_cli_utils.py +0 -2
- junifer/data/coordinates/_coordinates.py +128 -105
- junifer/data/coordinates/tests/test_coordinates.py +1 -2
- junifer/data/masks/_masks.py +81 -59
- junifer/data/masks/tests/test_masks.py +5 -2
- junifer/data/parcellations/_parcellations.py +297 -678
- junifer/data/parcellations/tests/test_parcellations.py +82 -211
- junifer/data/template_spaces.py +15 -87
- junifer/data/utils.py +103 -2
- {junifer-0.0.6.dev418.dist-info → junifer-0.0.6.dev445.dist-info}/METADATA +1 -2
- {junifer-0.0.6.dev418.dist-info → junifer-0.0.6.dev445.dist-info}/RECORD +17 -17
- {junifer-0.0.6.dev418.dist-info → junifer-0.0.6.dev445.dist-info}/AUTHORS.rst +0 -0
- {junifer-0.0.6.dev418.dist-info → junifer-0.0.6.dev445.dist-info}/LICENSE.md +0 -0
- {junifer-0.0.6.dev418.dist-info → junifer-0.0.6.dev445.dist-info}/WHEEL +0 -0
- {junifer-0.0.6.dev418.dist-info → junifer-0.0.6.dev445.dist-info}/entry_points.txt +0 -0
- {junifer-0.0.6.dev418.dist-info → junifer-0.0.6.dev445.dist-info}/top_level.txt +0 -0
@@ -5,31 +5,30 @@
|
|
5
5
|
# Synchon Mandal <s.mandal@fz-juelich.de>
|
6
6
|
# License: AGPL
|
7
7
|
|
8
|
-
import io
|
9
|
-
import shutil
|
10
|
-
import tarfile
|
11
|
-
import tempfile
|
12
|
-
import zipfile
|
13
8
|
from itertools import product
|
14
9
|
from pathlib import Path
|
15
10
|
from typing import TYPE_CHECKING, Any, Optional, Union
|
16
11
|
|
17
|
-
import httpx
|
18
12
|
import nibabel as nib
|
19
13
|
import nilearn.image as nimg
|
20
14
|
import numpy as np
|
21
15
|
import pandas as pd
|
22
|
-
from nilearn import datasets
|
23
16
|
|
24
17
|
from ...utils import logger, raise_error, warn_with_log
|
25
18
|
from ...utils.singleton import Singleton
|
26
19
|
from ..pipeline_data_registry_base import BasePipelineDataRegistry
|
27
|
-
from ..utils import
|
20
|
+
from ..utils import (
|
21
|
+
check_dataset,
|
22
|
+
closest_resolution,
|
23
|
+
fetch_file_via_datalad,
|
24
|
+
get_native_warper,
|
25
|
+
)
|
28
26
|
from ._ants_parcellation_warper import ANTsParcellationWarper
|
29
27
|
from ._fsl_parcellation_warper import FSLParcellationWarper
|
30
28
|
|
31
29
|
|
32
30
|
if TYPE_CHECKING:
|
31
|
+
from datalad.api import Dataset
|
33
32
|
from nibabel.nifti1 import Nifti1Image
|
34
33
|
|
35
34
|
|
@@ -49,6 +48,7 @@ class ParcellationRegistry(BasePipelineDataRegistry, metaclass=Singleton):
|
|
49
48
|
|
50
49
|
def __init__(self) -> None:
|
51
50
|
"""Initialize the class."""
|
51
|
+
super().__init__()
|
52
52
|
# Each entry in registry is a dictionary that must contain at least
|
53
53
|
# the following keys:
|
54
54
|
# * 'family': the parcellation's family name (e.g., 'Schaefer', 'SUIT')
|
@@ -56,6 +56,8 @@ class ParcellationRegistry(BasePipelineDataRegistry, metaclass=Singleton):
|
|
56
56
|
# and can also have optional key(s):
|
57
57
|
# * 'valid_resolutions': a list of valid resolutions for the
|
58
58
|
# parcellation (e.g., [1, 2])
|
59
|
+
# The built-in coordinates are files that are shipped with the
|
60
|
+
# junifer-data dataset.
|
59
61
|
# Make built-in and external dictionaries for validation later
|
60
62
|
self._builtin = {}
|
61
63
|
self._external = {}
|
@@ -63,8 +65,14 @@ class ParcellationRegistry(BasePipelineDataRegistry, metaclass=Singleton):
|
|
63
65
|
# Add SUIT
|
64
66
|
self._builtin.update(
|
65
67
|
{
|
66
|
-
"SUITxSUIT": {
|
67
|
-
|
68
|
+
"SUITxSUIT": {
|
69
|
+
"family": "SUIT",
|
70
|
+
"space": "SUIT",
|
71
|
+
},
|
72
|
+
"SUITxMNI": {
|
73
|
+
"family": "SUIT",
|
74
|
+
"space": "MNI152NLin6Asym",
|
75
|
+
},
|
68
76
|
}
|
69
77
|
)
|
70
78
|
# Add Schaefer
|
@@ -72,7 +80,7 @@ class ParcellationRegistry(BasePipelineDataRegistry, metaclass=Singleton):
|
|
72
80
|
self._builtin.update(
|
73
81
|
{
|
74
82
|
f"Schaefer{n_rois}x{t_net}": {
|
75
|
-
"family": "
|
83
|
+
"family": "Schaefer2018",
|
76
84
|
"n_rois": n_rois,
|
77
85
|
"yeo_networks": t_net,
|
78
86
|
"space": "MNI152NLin6Asym",
|
@@ -84,19 +92,19 @@ class ParcellationRegistry(BasePipelineDataRegistry, metaclass=Singleton):
|
|
84
92
|
self._builtin.update(
|
85
93
|
{
|
86
94
|
f"TianxS{scale}x7TxMNI6thgeneration": {
|
87
|
-
"family": "
|
95
|
+
"family": "Melbourne",
|
88
96
|
"scale": scale,
|
89
97
|
"magneticfield": "7T",
|
90
98
|
"space": "MNI152NLin6Asym",
|
91
99
|
},
|
92
100
|
f"TianxS{scale}x3TxMNI6thgeneration": {
|
93
|
-
"family": "
|
101
|
+
"family": "Melbourne",
|
94
102
|
"scale": scale,
|
95
103
|
"magneticfield": "3T",
|
96
104
|
"space": "MNI152NLin6Asym",
|
97
105
|
},
|
98
106
|
f"TianxS{scale}x3TxMNInonlinear2009cAsym": {
|
99
|
-
"family": "
|
107
|
+
"family": "Melbourne",
|
100
108
|
"scale": scale,
|
101
109
|
"magneticfield": "3T",
|
102
110
|
"space": "MNI152NLin2009cAsym",
|
@@ -155,7 +163,7 @@ class ParcellationRegistry(BasePipelineDataRegistry, metaclass=Singleton):
|
|
155
163
|
self._builtin.update(
|
156
164
|
{
|
157
165
|
f"Yan{n_rois}xYeo{yeo_network}": {
|
158
|
-
"family": "
|
166
|
+
"family": "Yan2023",
|
159
167
|
"n_rois": n_rois,
|
160
168
|
"yeo_networks": yeo_network,
|
161
169
|
"space": "MNI152NLin6Asym",
|
@@ -165,7 +173,7 @@ class ParcellationRegistry(BasePipelineDataRegistry, metaclass=Singleton):
|
|
165
173
|
self._builtin.update(
|
166
174
|
{
|
167
175
|
f"Yan{n_rois}xKong17": {
|
168
|
-
"family": "
|
176
|
+
"family": "Yan2023",
|
169
177
|
"n_rois": n_rois,
|
170
178
|
"kong_networks": 17,
|
171
179
|
"space": "MNI152NLin6Asym",
|
@@ -184,8 +192,8 @@ class ParcellationRegistry(BasePipelineDataRegistry, metaclass=Singleton):
|
|
184
192
|
}
|
185
193
|
)
|
186
194
|
|
187
|
-
#
|
188
|
-
self._registry
|
195
|
+
# Update registry with built-in ones
|
196
|
+
self._registry.update(self._builtin)
|
189
197
|
|
190
198
|
def register(
|
191
199
|
self,
|
@@ -214,20 +222,21 @@ class ParcellationRegistry(BasePipelineDataRegistry, metaclass=Singleton):
|
|
214
222
|
Raises
|
215
223
|
------
|
216
224
|
ValueError
|
217
|
-
If the parcellation ``name`` is
|
218
|
-
``
|
219
|
-
|
225
|
+
If the parcellation ``name`` is a built-in parcellation or
|
226
|
+
if the parcellation ``name`` is already registered and
|
227
|
+
``overwrite=False``.
|
220
228
|
|
221
229
|
"""
|
222
230
|
# Check for attempt of overwriting built-in parcellations
|
223
231
|
if name in self._builtin:
|
232
|
+
raise_error(
|
233
|
+
f"Parcellation: {name} already registered as "
|
234
|
+
"built-in parcellation."
|
235
|
+
)
|
236
|
+
# Check for attempt of overwriting external parcellations
|
237
|
+
if name in self._external:
|
224
238
|
if overwrite:
|
225
239
|
logger.info(f"Overwriting parcellation: {name}")
|
226
|
-
if self._registry[name]["family"] != "CustomUserParcellation":
|
227
|
-
raise_error(
|
228
|
-
f"Parcellation: {name} already registered as "
|
229
|
-
"built-in parcellation."
|
230
|
-
)
|
231
240
|
else:
|
232
241
|
raise_error(
|
233
242
|
f"Parcellation: {name} already registered. Set "
|
@@ -236,6 +245,7 @@ class ParcellationRegistry(BasePipelineDataRegistry, metaclass=Singleton):
|
|
236
245
|
# Convert str to Path
|
237
246
|
if not isinstance(parcellation_path, Path):
|
238
247
|
parcellation_path = Path(parcellation_path)
|
248
|
+
# Registration
|
239
249
|
logger.info(f"Registering parcellation: {name}")
|
240
250
|
# Add user parcellation info
|
241
251
|
self._external[name] = {
|
@@ -271,24 +281,17 @@ class ParcellationRegistry(BasePipelineDataRegistry, metaclass=Singleton):
|
|
271
281
|
self,
|
272
282
|
name: str,
|
273
283
|
target_space: str,
|
274
|
-
parcellations_dir: Union[str, Path, None] = None,
|
275
284
|
resolution: Optional[float] = None,
|
276
285
|
path_only: bool = False,
|
277
286
|
) -> tuple[Optional["Nifti1Image"], list[str], Path, str]:
|
278
287
|
"""Load parcellation and labels.
|
279
288
|
|
280
|
-
If it is a built-in parcellation and the file is not present in the
|
281
|
-
``parcellations_dir`` directory, it will be downloaded.
|
282
|
-
|
283
289
|
Parameters
|
284
290
|
----------
|
285
291
|
name : str
|
286
292
|
The name of the parcellation.
|
287
293
|
target_space : str
|
288
294
|
The desired space of the parcellation.
|
289
|
-
parcellations_dir : str or pathlib.Path, optional
|
290
|
-
Path where the parcellations files are stored. The default location
|
291
|
-
is "$HOME/junifer/data/parcellations" (default None).
|
292
295
|
resolution : float, optional
|
293
296
|
The desired resolution of the parcellation to load. If it is not
|
294
297
|
available, the closest resolution will be loaded. Preferably, use a
|
@@ -312,6 +315,7 @@ class ParcellationRegistry(BasePipelineDataRegistry, metaclass=Singleton):
|
|
312
315
|
------
|
313
316
|
ValueError
|
314
317
|
If ``name`` is invalid or
|
318
|
+
if the parcellation family is invalid or
|
315
319
|
if the parcellation values and labels
|
316
320
|
don't have equal dimension or if the value range is invalid.
|
317
321
|
|
@@ -327,7 +331,7 @@ class ParcellationRegistry(BasePipelineDataRegistry, metaclass=Singleton):
|
|
327
331
|
parcellation_definition = self._registry[name].copy()
|
328
332
|
t_family = parcellation_definition.pop("family")
|
329
333
|
# Remove space conditionally
|
330
|
-
if t_family not in ["SUIT", "
|
334
|
+
if t_family not in ["SUIT", "Melbourne"]:
|
331
335
|
space = parcellation_definition.pop("space")
|
332
336
|
else:
|
333
337
|
space = parcellation_definition["space"]
|
@@ -342,15 +346,66 @@ class ParcellationRegistry(BasePipelineDataRegistry, metaclass=Singleton):
|
|
342
346
|
|
343
347
|
# Check if the parcellation family is custom or built-in
|
344
348
|
if t_family == "CustomUserParcellation":
|
345
|
-
parcellation_fname =
|
349
|
+
parcellation_fname = parcellation_definition["path"]
|
346
350
|
parcellation_labels = parcellation_definition["labels"]
|
351
|
+
elif t_family in [
|
352
|
+
"Schaefer2018",
|
353
|
+
"SUIT",
|
354
|
+
"Melbourne",
|
355
|
+
"AICHA",
|
356
|
+
"Shen",
|
357
|
+
"Yan2023",
|
358
|
+
"Brainnetome",
|
359
|
+
]:
|
360
|
+
# Get dataset
|
361
|
+
dataset = check_dataset()
|
362
|
+
# Load parcellation and labels
|
363
|
+
if t_family == "Schaefer2018":
|
364
|
+
parcellation_fname, parcellation_labels = _retrieve_schaefer(
|
365
|
+
dataset=dataset,
|
366
|
+
resolution=resolution,
|
367
|
+
**parcellation_definition,
|
368
|
+
)
|
369
|
+
elif t_family == "SUIT":
|
370
|
+
parcellation_fname, parcellation_labels = _retrieve_suit(
|
371
|
+
dataset=dataset,
|
372
|
+
resolution=resolution,
|
373
|
+
**parcellation_definition,
|
374
|
+
)
|
375
|
+
elif t_family == "Melbourne":
|
376
|
+
parcellation_fname, parcellation_labels = _retrieve_tian(
|
377
|
+
dataset=dataset,
|
378
|
+
resolution=resolution,
|
379
|
+
**parcellation_definition,
|
380
|
+
)
|
381
|
+
elif t_family == "AICHA":
|
382
|
+
parcellation_fname, parcellation_labels = _retrieve_aicha(
|
383
|
+
dataset=dataset,
|
384
|
+
resolution=resolution,
|
385
|
+
**parcellation_definition,
|
386
|
+
)
|
387
|
+
elif t_family == "Shen":
|
388
|
+
parcellation_fname, parcellation_labels = _retrieve_shen(
|
389
|
+
dataset=dataset,
|
390
|
+
resolution=resolution,
|
391
|
+
**parcellation_definition,
|
392
|
+
)
|
393
|
+
elif t_family == "Yan2023":
|
394
|
+
parcellation_fname, parcellation_labels = _retrieve_yan(
|
395
|
+
dataset=dataset,
|
396
|
+
resolution=resolution,
|
397
|
+
**parcellation_definition,
|
398
|
+
)
|
399
|
+
elif t_family == "Brainnetome":
|
400
|
+
parcellation_fname, parcellation_labels = (
|
401
|
+
_retrieve_brainnetome(
|
402
|
+
dataset=dataset,
|
403
|
+
resolution=resolution,
|
404
|
+
**parcellation_definition,
|
405
|
+
)
|
406
|
+
)
|
347
407
|
else:
|
348
|
-
|
349
|
-
family=t_family,
|
350
|
-
parcellations_dir=parcellations_dir,
|
351
|
-
resolution=resolution,
|
352
|
-
**parcellation_definition,
|
353
|
-
)
|
408
|
+
raise_error(f"Unknown parcellation family: {t_family}")
|
354
409
|
|
355
410
|
# Load parcellation image and values
|
356
411
|
logger.info(f"Loading parcellation: {parcellation_fname.absolute()!s}")
|
@@ -529,152 +584,8 @@ class ParcellationRegistry(BasePipelineDataRegistry, metaclass=Singleton):
|
|
529
584
|
return resampled_parcellation_img, labels
|
530
585
|
|
531
586
|
|
532
|
-
def _retrieve_parcellation(
|
533
|
-
family: str,
|
534
|
-
parcellations_dir: Union[str, Path, None] = None,
|
535
|
-
resolution: Optional[float] = None,
|
536
|
-
**kwargs,
|
537
|
-
) -> tuple[Path, list[str]]:
|
538
|
-
"""Retrieve a brain parcellation object from nilearn or online source.
|
539
|
-
|
540
|
-
Only returns one parcellation per call. Call function multiple times for
|
541
|
-
different parameter specifications. Only retrieves parcellation if it is
|
542
|
-
not yet in parcellations_dir.
|
543
|
-
|
544
|
-
Parameters
|
545
|
-
----------
|
546
|
-
family : {"Schaefer", "SUIT", "Tian", "AICHA", "Shen", "Yan"}
|
547
|
-
The name of the parcellation family.
|
548
|
-
parcellations_dir : str or pathlib.Path, optional
|
549
|
-
Path where the retrieved parcellations file are stored. The default
|
550
|
-
location is "$HOME/junifer/data/parcellations" (default None).
|
551
|
-
resolution : float, optional
|
552
|
-
The desired resolution of the parcellation to load. If it is not
|
553
|
-
available, the closest resolution will be loaded. Preferably, use a
|
554
|
-
resolution higher than the desired one. By default, will load the
|
555
|
-
highest one (default None).
|
556
|
-
**kwargs
|
557
|
-
Use to specify parcellation-specific keyword arguments found in the
|
558
|
-
following section.
|
559
|
-
|
560
|
-
Other Parameters
|
561
|
-
----------------
|
562
|
-
* Schaefer :
|
563
|
-
``n_rois`` : {100, 200, 300, 400, 500, 600, 700, 800, 900, 1000}
|
564
|
-
Granularity of parcellation to be used.
|
565
|
-
``yeo_network`` : {7, 17}, optional
|
566
|
-
Number of Yeo networks to use (default 7).
|
567
|
-
* Tian :
|
568
|
-
``scale`` : {1, 2, 3, 4}
|
569
|
-
Scale of parcellation (defines granularity).
|
570
|
-
``space`` : {"MNI152NLin6Asym", "MNI152NLin2009cAsym"}, optional
|
571
|
-
Space of parcellation (default "MNI152NLin6Asym"). (For more
|
572
|
-
information see https://github.com/yetianmed/subcortex)
|
573
|
-
``magneticfield`` : {"3T", "7T"}, optional
|
574
|
-
Magnetic field (default "3T").
|
575
|
-
* SUIT :
|
576
|
-
``space`` : {"MNI152NLin6Asym", "SUIT"}, optional
|
577
|
-
Space of parcellation (default "MNI"). (For more information
|
578
|
-
see http://www.diedrichsenlab.org/imaging/suit.htm).
|
579
|
-
* AICHA :
|
580
|
-
``version`` : {1, 2}, optional
|
581
|
-
Version of parcellation (default 2).
|
582
|
-
* Shen :
|
583
|
-
``year`` : {2013, 2015, 2019}, optional
|
584
|
-
Year of the parcellation to use (default 2015).
|
585
|
-
``n_rois`` : int, optional
|
586
|
-
Number of ROIs to use. Can be ``50, 100, or 150`` for
|
587
|
-
``year = 2013`` but is fixed at ``268`` for ``year = 2015`` and at
|
588
|
-
``368`` for ``year = 2019``.
|
589
|
-
* Yan :
|
590
|
-
``n_rois`` : {100, 200, 300, 400, 500, 600, 700, 800, 900, 1000}
|
591
|
-
Granularity of the parcellation to be used.
|
592
|
-
``yeo_networks`` : {7, 17}, optional
|
593
|
-
Number of Yeo networks to use (default None).
|
594
|
-
``kong_networks`` : {17}, optional
|
595
|
-
Number of Kong networks to use (default None).
|
596
|
-
* Brainnetome :
|
597
|
-
``threshold`` : {0, 25, 50}
|
598
|
-
Threshold for the probabilistic maps of subregion.
|
599
|
-
|
600
|
-
Returns
|
601
|
-
-------
|
602
|
-
pathlib.Path
|
603
|
-
File path to the parcellation image.
|
604
|
-
list of str
|
605
|
-
Parcellation labels.
|
606
|
-
|
607
|
-
Raises
|
608
|
-
------
|
609
|
-
ValueError
|
610
|
-
If the parcellation's name is invalid.
|
611
|
-
|
612
|
-
"""
|
613
|
-
if parcellations_dir is None:
|
614
|
-
parcellations_dir = (
|
615
|
-
Path().home() / "junifer" / "data" / "parcellations"
|
616
|
-
)
|
617
|
-
# Create default junifer data directory if not present
|
618
|
-
parcellations_dir.mkdir(exist_ok=True, parents=True)
|
619
|
-
# Convert str to Path
|
620
|
-
elif not isinstance(parcellations_dir, Path):
|
621
|
-
parcellations_dir = Path(parcellations_dir)
|
622
|
-
|
623
|
-
logger.info(f"Fetching one of {family} parcellations.")
|
624
|
-
|
625
|
-
# Retrieval details per family
|
626
|
-
if family == "Schaefer":
|
627
|
-
parcellation_fname, parcellation_labels = _retrieve_schaefer(
|
628
|
-
parcellations_dir=parcellations_dir,
|
629
|
-
resolution=resolution,
|
630
|
-
**kwargs,
|
631
|
-
)
|
632
|
-
elif family == "SUIT":
|
633
|
-
parcellation_fname, parcellation_labels = _retrieve_suit(
|
634
|
-
parcellations_dir=parcellations_dir,
|
635
|
-
resolution=resolution,
|
636
|
-
**kwargs,
|
637
|
-
)
|
638
|
-
elif family == "Tian":
|
639
|
-
parcellation_fname, parcellation_labels = _retrieve_tian(
|
640
|
-
parcellations_dir=parcellations_dir,
|
641
|
-
resolution=resolution,
|
642
|
-
**kwargs,
|
643
|
-
)
|
644
|
-
elif family == "AICHA":
|
645
|
-
parcellation_fname, parcellation_labels = _retrieve_aicha(
|
646
|
-
parcellations_dir=parcellations_dir,
|
647
|
-
resolution=resolution,
|
648
|
-
**kwargs,
|
649
|
-
)
|
650
|
-
elif family == "Shen":
|
651
|
-
parcellation_fname, parcellation_labels = _retrieve_shen(
|
652
|
-
parcellations_dir=parcellations_dir,
|
653
|
-
resolution=resolution,
|
654
|
-
**kwargs,
|
655
|
-
)
|
656
|
-
elif family == "Yan":
|
657
|
-
parcellation_fname, parcellation_labels = _retrieve_yan(
|
658
|
-
parcellations_dir=parcellations_dir,
|
659
|
-
resolution=resolution,
|
660
|
-
**kwargs,
|
661
|
-
)
|
662
|
-
elif family == "Brainnetome":
|
663
|
-
parcellation_fname, parcellation_labels = _retrieve_brainnetome(
|
664
|
-
parcellations_dir=parcellations_dir,
|
665
|
-
resolution=resolution,
|
666
|
-
**kwargs,
|
667
|
-
)
|
668
|
-
else:
|
669
|
-
raise_error(
|
670
|
-
f"The provided parcellation name {family} cannot be retrieved."
|
671
|
-
)
|
672
|
-
|
673
|
-
return parcellation_fname, parcellation_labels
|
674
|
-
|
675
|
-
|
676
587
|
def _retrieve_schaefer(
|
677
|
-
|
588
|
+
dataset: "Dataset",
|
678
589
|
resolution: Optional[float] = None,
|
679
590
|
n_rois: Optional[int] = None,
|
680
591
|
yeo_networks: int = 7,
|
@@ -683,8 +594,8 @@ def _retrieve_schaefer(
|
|
683
594
|
|
684
595
|
Parameters
|
685
596
|
----------
|
686
|
-
|
687
|
-
The
|
597
|
+
dataset : datalad.api.Dataset
|
598
|
+
The datalad dataset to fetch parcellation from.
|
688
599
|
resolution : float, optional
|
689
600
|
The desired resolution of the parcellation to load. If it is not
|
690
601
|
available, the closest resolution will be loaded. Preferably, use a
|
@@ -706,8 +617,7 @@ def _retrieve_schaefer(
|
|
706
617
|
Raises
|
707
618
|
------
|
708
619
|
ValueError
|
709
|
-
If invalid value is provided for ``n_rois`` or ``yeo_networks
|
710
|
-
there is a problem fetching the parcellation.
|
620
|
+
If invalid value is provided for ``n_rois`` or ``yeo_networks``.
|
711
621
|
|
712
622
|
"""
|
713
623
|
logger.info("Parcellation parameters:")
|
@@ -735,47 +645,40 @@ def _retrieve_schaefer(
|
|
735
645
|
_valid_resolutions = [1, 2]
|
736
646
|
resolution = closest_resolution(resolution, _valid_resolutions)
|
737
647
|
|
738
|
-
#
|
739
|
-
|
740
|
-
|
741
|
-
|
648
|
+
# Fetch file paths
|
649
|
+
parcellation_img_path = fetch_file_via_datalad(
|
650
|
+
dataset=dataset,
|
651
|
+
file_path=dataset.pathobj
|
652
|
+
/ "parcellations"
|
653
|
+
/ "Schaefer2018"
|
654
|
+
/ "Yeo2011"
|
742
655
|
/ (
|
743
656
|
f"Schaefer2018_{n_rois}Parcels_{yeo_networks}Networks_order_"
|
744
657
|
f"FSLMNI152_{resolution}mm.nii.gz"
|
745
|
-
)
|
658
|
+
),
|
746
659
|
)
|
747
|
-
|
748
|
-
|
749
|
-
|
750
|
-
/
|
660
|
+
parcellation_label_path = fetch_file_via_datalad(
|
661
|
+
dataset=dataset,
|
662
|
+
file_path=dataset.pathobj
|
663
|
+
/ "parcellations"
|
664
|
+
/ "Schaefer2018"
|
665
|
+
/ "Yeo2011"
|
666
|
+
/ (f"Schaefer2018_{n_rois}Parcels_{yeo_networks}Networks_order.txt"),
|
751
667
|
)
|
752
668
|
|
753
|
-
# Check existence of parcellation
|
754
|
-
if not (parcellation_fname.exists() and parcellation_lname.exists()):
|
755
|
-
logger.info(
|
756
|
-
"At least one of the parcellation files are missing. "
|
757
|
-
"Fetching using nilearn."
|
758
|
-
)
|
759
|
-
datasets.fetch_atlas_schaefer_2018(
|
760
|
-
n_rois=n_rois,
|
761
|
-
yeo_networks=yeo_networks,
|
762
|
-
resolution_mm=resolution, # type: ignore we know it's 1 or 2
|
763
|
-
data_dir=parcellations_dir.resolve(),
|
764
|
-
)
|
765
|
-
|
766
669
|
# Load labels
|
767
670
|
labels = [
|
768
671
|
"_".join(x.split("_")[1:])
|
769
|
-
for x in pd.read_csv(
|
672
|
+
for x in pd.read_csv(parcellation_label_path, sep="\t", header=None)
|
770
673
|
.iloc[:, 1]
|
771
674
|
.to_list()
|
772
675
|
]
|
773
676
|
|
774
|
-
return
|
677
|
+
return parcellation_img_path, labels
|
775
678
|
|
776
679
|
|
777
680
|
def _retrieve_tian(
|
778
|
-
|
681
|
+
dataset: "Dataset",
|
779
682
|
resolution: Optional[float] = None,
|
780
683
|
scale: Optional[int] = None,
|
781
684
|
space: str = "MNI152NLin6Asym",
|
@@ -785,8 +688,8 @@ def _retrieve_tian(
|
|
785
688
|
|
786
689
|
Parameters
|
787
690
|
----------
|
788
|
-
|
789
|
-
The
|
691
|
+
dataset : datalad.api.Dataset
|
692
|
+
The datalad dataset to fetch parcellation from.
|
790
693
|
resolution : float, optional
|
791
694
|
The desired resolution of the parcellation to load. If it is not
|
792
695
|
available, the closest resolution will be loaded. Preferably, use a
|
@@ -810,8 +713,6 @@ def _retrieve_tian(
|
|
810
713
|
|
811
714
|
Raises
|
812
715
|
------
|
813
|
-
RuntimeError
|
814
|
-
If there is a problem fetching files.
|
815
716
|
ValueError
|
816
717
|
If invalid value is provided for ``scale`` or ``magneticfield`` or
|
817
718
|
``space``.
|
@@ -832,13 +733,10 @@ def _retrieve_tian(
|
|
832
733
|
)
|
833
734
|
|
834
735
|
# Check resolution
|
835
|
-
_valid_resolutions = [] # avoid pylance error
|
836
736
|
if magneticfield == "3T":
|
837
737
|
_valid_spaces = ["MNI152NLin6Asym", "MNI152NLin2009cAsym"]
|
838
|
-
if space
|
738
|
+
if space in _valid_spaces:
|
839
739
|
_valid_resolutions = [1, 2]
|
840
|
-
elif space == "MNI152NLin2009cAsym":
|
841
|
-
_valid_resolutions = [2]
|
842
740
|
else:
|
843
741
|
raise_error(
|
844
742
|
f"The parameter `space` ({space}) for 3T needs to be one of "
|
@@ -858,100 +756,76 @@ def _retrieve_tian(
|
|
858
756
|
)
|
859
757
|
resolution = closest_resolution(resolution, _valid_resolutions)
|
860
758
|
|
861
|
-
#
|
759
|
+
# Fetch file paths
|
862
760
|
if magneticfield == "3T":
|
863
761
|
parcellation_fname_base_3T = (
|
864
|
-
|
865
|
-
|
866
|
-
|
867
|
-
|
762
|
+
dataset.pathobj
|
763
|
+
/ "parcellations"
|
764
|
+
/ "Melbourne"
|
765
|
+
/ "v1.4"
|
766
|
+
/ "3T"
|
767
|
+
/ "Subcortex-Only"
|
868
768
|
)
|
869
769
|
if space == "MNI152NLin6Asym":
|
870
|
-
parcellation_fname = parcellation_fname_base_3T / (
|
871
|
-
f"Tian_Subcortex_S{scale}_{magneticfield}.nii.gz"
|
872
|
-
)
|
873
770
|
if resolution == 1:
|
874
771
|
parcellation_fname = (
|
875
772
|
parcellation_fname_base_3T
|
876
773
|
/ f"Tian_Subcortex_S{scale}_{magneticfield}_1mm.nii.gz"
|
877
774
|
)
|
775
|
+
else:
|
776
|
+
parcellation_fname = parcellation_fname_base_3T / (
|
777
|
+
f"Tian_Subcortex_S{scale}_{magneticfield}.nii.gz"
|
778
|
+
)
|
878
779
|
elif space == "MNI152NLin2009cAsym":
|
879
780
|
space = "2009cAsym"
|
880
|
-
|
881
|
-
|
882
|
-
|
883
|
-
|
884
|
-
|
885
|
-
|
781
|
+
if resolution == 1:
|
782
|
+
parcellation_fname = parcellation_fname_base_3T / (
|
783
|
+
f"Tian_Subcortex_S{scale}_{magneticfield}_{space}_1mm.nii.gz"
|
784
|
+
)
|
785
|
+
else:
|
786
|
+
parcellation_fname = parcellation_fname_base_3T / (
|
787
|
+
f"Tian_Subcortex_S{scale}_{magneticfield}_{space}.nii.gz"
|
788
|
+
)
|
789
|
+
|
790
|
+
parcellation_img_path = fetch_file_via_datalad(
|
791
|
+
dataset=dataset,
|
792
|
+
file_path=parcellation_fname,
|
886
793
|
)
|
887
|
-
|
888
|
-
|
889
|
-
|
890
|
-
/ "
|
891
|
-
|
892
|
-
|
794
|
+
parcellation_label_path = fetch_file_via_datalad(
|
795
|
+
dataset=dataset,
|
796
|
+
file_path=parcellation_fname_base_3T
|
797
|
+
/ f"Tian_Subcortex_S{scale}_3T_label.txt",
|
798
|
+
)
|
799
|
+
# Load labels
|
800
|
+
labels = pd.read_csv(parcellation_label_path, sep=" ", header=None)[
|
801
|
+
0
|
802
|
+
].to_list()
|
803
|
+
elif magneticfield == "7T":
|
804
|
+
parcellation_img_path = fetch_file_via_datalad(
|
805
|
+
dataset=dataset,
|
806
|
+
file_path=dataset.pathobj
|
807
|
+
/ "parcellations"
|
808
|
+
/ "Melbourne"
|
809
|
+
/ "v1.4"
|
810
|
+
/ "7T"
|
811
|
+
/ f"Tian_Subcortex_S{scale}_{magneticfield}.nii.gz",
|
893
812
|
)
|
894
813
|
# define 7T labels (b/c currently no labels file available for 7T)
|
895
814
|
scale7Trois = {1: 16, 2: 34, 3: 54, 4: 62}
|
896
815
|
labels = [
|
897
816
|
("parcel_" + str(x)) for x in np.arange(1, scale7Trois[scale] + 1)
|
898
817
|
]
|
899
|
-
parcellation_lname = parcellation_fname_base_7T / (
|
900
|
-
f"Tian_Subcortex_S{scale}_7T_labelnumbering.txt"
|
901
|
-
)
|
902
|
-
with open(parcellation_lname, "w") as filehandle:
|
903
|
-
for listitem in labels:
|
904
|
-
filehandle.write(f"{listitem}\n")
|
905
818
|
logger.info(
|
906
819
|
"Currently there are no labels provided for the 7T Tian "
|
907
820
|
"parcellation. A simple numbering scheme for distinction was "
|
908
821
|
"therefore used."
|
909
822
|
)
|
910
823
|
|
911
|
-
|
912
|
-
if not (parcellation_fname.exists() and parcellation_lname.exists()):
|
913
|
-
logger.info(
|
914
|
-
"At least one of the parcellation files are missing, fetching."
|
915
|
-
)
|
916
|
-
# Set URL
|
917
|
-
url = (
|
918
|
-
"https://www.nitrc.org/frs/download.php/12012/Tian2020MSA_v1.1.zip"
|
919
|
-
)
|
920
|
-
|
921
|
-
logger.info(f"Downloading TIAN from {url}")
|
922
|
-
# Store initial download in a tempdir
|
923
|
-
with tempfile.TemporaryDirectory() as tmpdir:
|
924
|
-
# Make HTTP request
|
925
|
-
try:
|
926
|
-
resp = httpx.get(url)
|
927
|
-
resp.raise_for_status()
|
928
|
-
except httpx.HTTPError as exc:
|
929
|
-
raise_error(
|
930
|
-
f"Error response {exc.response.status_code} while "
|
931
|
-
f"requesting {exc.request.url!r}",
|
932
|
-
klass=RuntimeError,
|
933
|
-
)
|
934
|
-
else:
|
935
|
-
# Set tempfile for storing initial content and unzipping
|
936
|
-
zip_fname = Path(tmpdir) / "Tian2020MSA_v1.1.zip"
|
937
|
-
# Open tempfile and write content
|
938
|
-
with open(zip_fname, "wb") as f:
|
939
|
-
f.write(resp.content)
|
940
|
-
# Unzip tempfile
|
941
|
-
with zipfile.ZipFile(zip_fname, "r") as zip_ref:
|
942
|
-
zip_ref.extractall(parcellations_dir.as_posix())
|
943
|
-
# Clean after unzipping
|
944
|
-
if (parcellations_dir / "__MACOSX").exists():
|
945
|
-
shutil.rmtree((parcellations_dir / "__MACOSX").as_posix())
|
946
|
-
|
947
|
-
# Load labels
|
948
|
-
labels = pd.read_csv(parcellation_lname, sep=" ", header=None)[0].to_list()
|
949
|
-
|
950
|
-
return parcellation_fname, labels
|
824
|
+
return parcellation_img_path, labels
|
951
825
|
|
952
826
|
|
953
827
|
def _retrieve_suit(
|
954
|
-
|
828
|
+
dataset: "Dataset",
|
955
829
|
resolution: Optional[float],
|
956
830
|
space: str = "MNI152NLin6Asym",
|
957
831
|
) -> tuple[Path, list[str]]:
|
@@ -959,8 +833,8 @@ def _retrieve_suit(
|
|
959
833
|
|
960
834
|
Parameters
|
961
835
|
----------
|
962
|
-
|
963
|
-
The
|
836
|
+
dataset : datalad.api.Dataset
|
837
|
+
The datalad dataset to fetch parcellation from.
|
964
838
|
resolution : float, optional
|
965
839
|
The desired resolution of the parcellation to load. If it is not
|
966
840
|
available, the closest resolution will be loaded. Preferably, use a
|
@@ -980,8 +854,6 @@ def _retrieve_suit(
|
|
980
854
|
|
981
855
|
Raises
|
982
856
|
------
|
983
|
-
RuntimeError
|
984
|
-
If there is a problem fetching files.
|
985
857
|
ValueError
|
986
858
|
If invalid value is provided for ``space``.
|
987
859
|
|
@@ -1006,78 +878,32 @@ def _retrieve_suit(
|
|
1006
878
|
if space == "MNI152NLin6Asym":
|
1007
879
|
space = "MNI"
|
1008
880
|
|
1009
|
-
#
|
1010
|
-
|
1011
|
-
|
881
|
+
# Fetch file paths
|
882
|
+
parcellation_img_path = fetch_file_via_datalad(
|
883
|
+
dataset=dataset,
|
884
|
+
file_path=dataset.pathobj
|
885
|
+
/ "parcellations"
|
886
|
+
/ "SUIT"
|
887
|
+
/ f"SUIT_{space}Space_{resolution}mm.nii",
|
1012
888
|
)
|
1013
|
-
|
1014
|
-
|
889
|
+
parcellation_label_path = fetch_file_via_datalad(
|
890
|
+
dataset=dataset,
|
891
|
+
file_path=dataset.pathobj
|
892
|
+
/ "parcellations"
|
893
|
+
/ "SUIT"
|
894
|
+
/ f"SUIT_{space}Space_{resolution}mm.tsv",
|
1015
895
|
)
|
1016
896
|
|
1017
|
-
# Check existence of parcellation
|
1018
|
-
if not (parcellation_fname.exists() and parcellation_lname.exists()):
|
1019
|
-
logger.info(
|
1020
|
-
"At least one of the parcellation files is missing, fetching."
|
1021
|
-
)
|
1022
|
-
# Create local directory if not present
|
1023
|
-
parcellation_fname.parent.mkdir(exist_ok=True, parents=True)
|
1024
|
-
# Set URL
|
1025
|
-
url_basis = (
|
1026
|
-
"https://github.com/DiedrichsenLab/cerebellar_atlases/raw"
|
1027
|
-
"/master/Diedrichsen_2009"
|
1028
|
-
)
|
1029
|
-
if space == "MNI":
|
1030
|
-
url = f"{url_basis}/atl-Anatom_space-MNI_dseg.nii"
|
1031
|
-
else: # if not MNI, then SUIT
|
1032
|
-
url = f"{url_basis}/atl-Anatom_space-SUIT_dseg.nii"
|
1033
|
-
url_labels = f"{url_basis}/atl-Anatom.tsv"
|
1034
|
-
|
1035
|
-
# Make HTTP requests
|
1036
|
-
with httpx.Client(follow_redirects=True) as client:
|
1037
|
-
# Download parcellation file
|
1038
|
-
logger.info(f"Downloading SUIT parcellation from {url}")
|
1039
|
-
try:
|
1040
|
-
img_resp = client.get(url)
|
1041
|
-
img_resp.raise_for_status()
|
1042
|
-
except httpx.HTTPError as exc:
|
1043
|
-
raise_error(
|
1044
|
-
f"Error response {exc.response.status_code} while "
|
1045
|
-
f"requesting {exc.request.url!r}",
|
1046
|
-
klass=RuntimeError,
|
1047
|
-
)
|
1048
|
-
else:
|
1049
|
-
with open(parcellation_fname, "wb") as f:
|
1050
|
-
f.write(img_resp.content)
|
1051
|
-
# Download label file
|
1052
|
-
logger.info(f"Downloading SUIT labels from {url_labels}")
|
1053
|
-
try:
|
1054
|
-
label_resp = client.get(url_labels)
|
1055
|
-
label_resp.raise_for_status()
|
1056
|
-
except httpx.HTTPError as exc:
|
1057
|
-
raise_error(
|
1058
|
-
f"Error response {exc.response.status_code} while "
|
1059
|
-
f"requesting {exc.request.url!r}",
|
1060
|
-
klass=RuntimeError,
|
1061
|
-
)
|
1062
|
-
else:
|
1063
|
-
# Load labels
|
1064
|
-
labels = pd.read_csv(
|
1065
|
-
io.StringIO(label_resp.content.decode("utf-8")),
|
1066
|
-
sep="\t",
|
1067
|
-
usecols=["name"],
|
1068
|
-
)
|
1069
|
-
labels.to_csv(parcellation_lname, sep="\t", index=False)
|
1070
|
-
|
1071
897
|
# Load labels
|
1072
|
-
labels = pd.read_csv(
|
898
|
+
labels = pd.read_csv(parcellation_label_path, sep="\t", usecols=["name"])[
|
1073
899
|
"name"
|
1074
900
|
].to_list()
|
1075
901
|
|
1076
|
-
return
|
902
|
+
return parcellation_img_path, labels
|
1077
903
|
|
1078
904
|
|
1079
905
|
def _retrieve_aicha(
|
1080
|
-
|
906
|
+
dataset: "Dataset",
|
1081
907
|
resolution: Optional[float] = None,
|
1082
908
|
version: int = 2,
|
1083
909
|
) -> tuple[Path, list[str]]:
|
@@ -1085,8 +911,8 @@ def _retrieve_aicha(
|
|
1085
911
|
|
1086
912
|
Parameters
|
1087
913
|
----------
|
1088
|
-
|
1089
|
-
The
|
914
|
+
dataset : datalad.api.Dataset
|
915
|
+
The datalad dataset to fetch parcellation from.
|
1090
916
|
resolution : float, optional
|
1091
917
|
The desired resolution of the parcellation to load. If it is not
|
1092
918
|
available, the closest resolution will be loaded. Preferably, use a
|
@@ -1105,8 +931,6 @@ def _retrieve_aicha(
|
|
1105
931
|
|
1106
932
|
Raises
|
1107
933
|
------
|
1108
|
-
RuntimeError
|
1109
|
-
If there is a problem fetching files.
|
1110
934
|
ValueError
|
1111
935
|
If invalid value is provided for ``version``.
|
1112
936
|
|
@@ -1143,99 +967,48 @@ def _retrieve_aicha(
|
|
1143
967
|
_valid_resolutions = [1]
|
1144
968
|
resolution = closest_resolution(resolution, _valid_resolutions)
|
1145
969
|
|
1146
|
-
#
|
1147
|
-
|
1148
|
-
|
970
|
+
# Fetch file paths
|
971
|
+
parcellation_img_path = fetch_file_via_datalad(
|
972
|
+
dataset=dataset,
|
973
|
+
file_path=dataset.pathobj
|
974
|
+
/ "parcellations"
|
975
|
+
/ "AICHA"
|
976
|
+
/ f"v{version}"
|
977
|
+
/ "AICHA.nii",
|
1149
978
|
)
|
1150
|
-
|
979
|
+
# Conditional label file fetch
|
1151
980
|
if version == 1:
|
1152
|
-
|
1153
|
-
|
1154
|
-
|
981
|
+
parcellation_label_path = fetch_file_via_datalad(
|
982
|
+
dataset=dataset,
|
983
|
+
file_path=dataset.pathobj
|
984
|
+
/ "parcellations"
|
1155
985
|
/ "AICHA"
|
1156
|
-
/ "
|
986
|
+
/ f"v{version}"
|
987
|
+
/ "AICHA_vol1.txt",
|
1157
988
|
)
|
1158
989
|
elif version == 2:
|
1159
|
-
|
1160
|
-
|
1161
|
-
|
990
|
+
parcellation_label_path = fetch_file_via_datalad(
|
991
|
+
dataset=dataset,
|
992
|
+
file_path=dataset.pathobj
|
993
|
+
/ "parcellations"
|
1162
994
|
/ "AICHA"
|
1163
|
-
/ "
|
995
|
+
/ f"v{version}"
|
996
|
+
/ "AICHA_vol3.txt",
|
1164
997
|
)
|
1165
998
|
|
1166
|
-
# Check existence of parcellation
|
1167
|
-
if not (parcellation_fname.exists() and parcellation_lname.exists()):
|
1168
|
-
logger.info(
|
1169
|
-
"At least one of the parcellation files are missing, fetching."
|
1170
|
-
)
|
1171
|
-
# Set file name on server according to version
|
1172
|
-
server_filename = ""
|
1173
|
-
if version == 1:
|
1174
|
-
server_filename = "aicha_v1.zip"
|
1175
|
-
elif version == 2:
|
1176
|
-
server_filename = "AICHA_v2.tar.zip"
|
1177
|
-
# Set URL
|
1178
|
-
url = f"http://www.gin.cnrs.fr/wp-content/uploads/{server_filename}"
|
1179
|
-
|
1180
|
-
logger.info(f"Downloading AICHA v{version} from {url}")
|
1181
|
-
# Store initial download in a tempdir
|
1182
|
-
with tempfile.TemporaryDirectory() as tmpdir:
|
1183
|
-
# Make HTTP request
|
1184
|
-
try:
|
1185
|
-
resp = httpx.get(url, follow_redirects=True)
|
1186
|
-
resp.raise_for_status()
|
1187
|
-
except httpx.HTTPError as exc:
|
1188
|
-
raise_error(
|
1189
|
-
f"Error response {exc.response.status_code} while "
|
1190
|
-
f"requesting {exc.request.url!r}",
|
1191
|
-
klass=RuntimeError,
|
1192
|
-
)
|
1193
|
-
else:
|
1194
|
-
# Set tempfile for storing initial content and unzipping
|
1195
|
-
parcellation_zip_path = Path(tmpdir) / server_filename
|
1196
|
-
# Open tempfile and write content
|
1197
|
-
with open(parcellation_zip_path, "wb") as f:
|
1198
|
-
f.write(resp.content)
|
1199
|
-
# Unzip tempfile
|
1200
|
-
with zipfile.ZipFile(parcellation_zip_path, "r") as zip_ref:
|
1201
|
-
if version == 1:
|
1202
|
-
zip_ref.extractall(
|
1203
|
-
(parcellations_dir / "AICHA_v1").as_posix()
|
1204
|
-
)
|
1205
|
-
elif version == 2:
|
1206
|
-
zip_ref.extractall(Path(tmpdir).as_posix())
|
1207
|
-
# Extract tarfile for v2
|
1208
|
-
with tarfile.TarFile(
|
1209
|
-
Path(tmpdir) / "aicha_v2.tar", "r"
|
1210
|
-
) as tar_ref:
|
1211
|
-
tar_ref.extractall(
|
1212
|
-
(parcellations_dir / "AICHA_v2").as_posix()
|
1213
|
-
)
|
1214
|
-
# Cleanup after unzipping
|
1215
|
-
if (
|
1216
|
-
parcellations_dir / f"AICHA_v{version}" / "__MACOSX"
|
1217
|
-
).exists():
|
1218
|
-
shutil.rmtree(
|
1219
|
-
(
|
1220
|
-
parcellations_dir
|
1221
|
-
/ f"AICHA_v{version}"
|
1222
|
-
/ "__MACOSX"
|
1223
|
-
).as_posix()
|
1224
|
-
)
|
1225
|
-
|
1226
999
|
# Load labels
|
1227
1000
|
labels = pd.read_csv(
|
1228
|
-
|
1001
|
+
parcellation_label_path,
|
1229
1002
|
sep="\t",
|
1230
1003
|
header=None,
|
1231
|
-
skiprows=[0],
|
1004
|
+
skiprows=[0],
|
1232
1005
|
)[0].to_list()
|
1233
1006
|
|
1234
|
-
return
|
1007
|
+
return parcellation_img_path, labels
|
1235
1008
|
|
1236
1009
|
|
1237
|
-
def _retrieve_shen(
|
1238
|
-
|
1010
|
+
def _retrieve_shen(
|
1011
|
+
dataset: "Dataset",
|
1239
1012
|
resolution: Optional[float] = None,
|
1240
1013
|
year: int = 2015,
|
1241
1014
|
n_rois: int = 268,
|
@@ -1244,8 +1017,8 @@ def _retrieve_shen( # noqa: C901
|
|
1244
1017
|
|
1245
1018
|
Parameters
|
1246
1019
|
----------
|
1247
|
-
|
1248
|
-
The
|
1020
|
+
dataset : datalad.api.Dataset
|
1021
|
+
The datalad dataset to fetch parcellation from.
|
1249
1022
|
resolution : float, optional
|
1250
1023
|
The desired resolution of the parcellation to load. If it is not
|
1251
1024
|
available, the closest resolution will be loaded. Preferably, use a
|
@@ -1269,8 +1042,6 @@ def _retrieve_shen( # noqa: C901
|
|
1269
1042
|
|
1270
1043
|
Raises
|
1271
1044
|
------
|
1272
|
-
RuntimeError
|
1273
|
-
If there is a problem fetching files.
|
1274
1045
|
ValueError
|
1275
1046
|
If invalid value or combination is provided for ``year`` and
|
1276
1047
|
``n_rois``.
|
@@ -1323,123 +1094,60 @@ def _retrieve_shen( # noqa: C901
|
|
1323
1094
|
f"`year = {year}` is invalid"
|
1324
1095
|
)
|
1325
1096
|
|
1326
|
-
#
|
1097
|
+
# Fetch file paths based on year
|
1327
1098
|
if year == 2013:
|
1328
|
-
|
1329
|
-
|
1330
|
-
|
1331
|
-
/ "
|
1332
|
-
/
|
1333
|
-
|
1334
|
-
|
1335
|
-
parcellations_dir
|
1336
|
-
/ "Shen_2013"
|
1337
|
-
/ "shenetal_neuroimage2013"
|
1338
|
-
/ f"Group_seg{n_rois}_BAindexing_setA.txt"
|
1339
|
-
)
|
1340
|
-
elif year == 2015:
|
1341
|
-
parcellation_fname = (
|
1342
|
-
parcellations_dir
|
1343
|
-
/ "Shen_2015"
|
1344
|
-
/ f"shen_{resolution}mm_268_parcellation.nii.gz"
|
1345
|
-
)
|
1346
|
-
elif year == 2019:
|
1347
|
-
parcellation_fname = (
|
1348
|
-
parcellations_dir
|
1349
|
-
/ "Shen_2019"
|
1350
|
-
/ "Shen_1mm_368_parcellation.nii.gz"
|
1099
|
+
parcellation_img_path = fetch_file_via_datalad(
|
1100
|
+
dataset=dataset,
|
1101
|
+
file_path=dataset.pathobj
|
1102
|
+
/ "parcellations"
|
1103
|
+
/ "Shen"
|
1104
|
+
/ "2013"
|
1105
|
+
/ f"fconn_atlas_{n_rois}_{resolution}mm.nii",
|
1351
1106
|
)
|
1352
|
-
|
1353
|
-
|
1354
|
-
|
1355
|
-
|
1356
|
-
|
1107
|
+
parcellation_label_path = fetch_file_via_datalad(
|
1108
|
+
dataset=dataset,
|
1109
|
+
file_path=dataset.pathobj
|
1110
|
+
/ "parcellations"
|
1111
|
+
/ "Shen"
|
1112
|
+
/ "2013"
|
1113
|
+
/ f"Group_seg{n_rois}_BAindexing_setA.txt",
|
1357
1114
|
)
|
1358
|
-
|
1359
|
-
# Set URL based on year
|
1360
|
-
url = ""
|
1361
|
-
if year == 2013:
|
1362
|
-
url = "https://www.nitrc.org/frs/download.php/5785/shenetal_neuroimage2013_funcatlas.zip"
|
1363
|
-
elif year == 2015:
|
1364
|
-
# Set URL based on resolution
|
1365
|
-
if resolution == 1:
|
1366
|
-
url = "https://www.nitrc.org/frs/download.php/7976/shen_1mm_268_parcellation.nii.gz"
|
1367
|
-
elif resolution == 2:
|
1368
|
-
url = "https://www.nitrc.org/frs/download.php/7977/shen_2mm_268_parcellation.nii.gz"
|
1369
|
-
elif year == 2019:
|
1370
|
-
url = "https://www.nitrc.org/frs/download.php/11629/shen_368.zip"
|
1371
|
-
|
1372
|
-
logger.info(f"Downloading Shen {year} from {url}")
|
1373
|
-
# Store initial download in a tempdir
|
1374
|
-
with tempfile.TemporaryDirectory() as tmpdir:
|
1375
|
-
# Make HTTP request
|
1376
|
-
try:
|
1377
|
-
resp = httpx.get(url)
|
1378
|
-
resp.raise_for_status()
|
1379
|
-
except httpx.HTTPError as exc:
|
1380
|
-
raise_error(
|
1381
|
-
f"Error response {exc.response.status_code} while "
|
1382
|
-
f"requesting {exc.request.url!r}",
|
1383
|
-
klass=RuntimeError,
|
1384
|
-
)
|
1385
|
-
else:
|
1386
|
-
if year in (2013, 2019):
|
1387
|
-
parcellation_zip_path = Path(tmpdir) / f"Shen{year}.zip"
|
1388
|
-
# Open tempfile and write content
|
1389
|
-
with open(parcellation_zip_path, "wb") as f:
|
1390
|
-
f.write(resp.content)
|
1391
|
-
# Unzip tempfile
|
1392
|
-
with zipfile.ZipFile(
|
1393
|
-
parcellation_zip_path, "r"
|
1394
|
-
) as zip_ref:
|
1395
|
-
zip_ref.extractall(
|
1396
|
-
(parcellations_dir / f"Shen_{year}").as_posix()
|
1397
|
-
)
|
1398
|
-
# Cleanup after unzipping
|
1399
|
-
if (
|
1400
|
-
parcellations_dir / f"Shen_{year}" / "__MACOSX"
|
1401
|
-
).exists():
|
1402
|
-
shutil.rmtree(
|
1403
|
-
(
|
1404
|
-
parcellations_dir / f"Shen_{year}" / "__MACOSX"
|
1405
|
-
).as_posix()
|
1406
|
-
)
|
1407
|
-
elif year == 2015:
|
1408
|
-
img_dir_path = parcellations_dir / "Shen_2015"
|
1409
|
-
# Create local directory if not present
|
1410
|
-
img_dir_path.mkdir(parents=True, exist_ok=True)
|
1411
|
-
img_path = (
|
1412
|
-
img_dir_path
|
1413
|
-
/ f"shen_{resolution}mm_268_parcellation.nii.gz"
|
1414
|
-
)
|
1415
|
-
# Create local file if not present
|
1416
|
-
img_path.touch(exist_ok=True)
|
1417
|
-
# Open tempfile and write content
|
1418
|
-
with open(img_path, "wb") as f:
|
1419
|
-
f.write(resp.content)
|
1420
|
-
|
1421
|
-
# Load labels based on year
|
1422
|
-
if year == 2013:
|
1423
1115
|
labels = (
|
1424
1116
|
pd.read_csv(
|
1425
|
-
|
1426
|
-
sep=",",
|
1427
|
-
header=None,
|
1428
|
-
skiprows=[0],
|
1117
|
+
parcellation_label_path,
|
1118
|
+
sep=",",
|
1119
|
+
header=None,
|
1120
|
+
skiprows=[0],
|
1429
1121
|
)[1]
|
1430
1122
|
.map(lambda x: x.strip()) # fix formatting
|
1431
1123
|
.to_list()
|
1432
1124
|
)
|
1433
1125
|
elif year == 2015:
|
1126
|
+
parcellation_img_path = fetch_file_via_datalad(
|
1127
|
+
dataset=dataset,
|
1128
|
+
file_path=dataset.pathobj
|
1129
|
+
/ "parcellations"
|
1130
|
+
/ "Shen"
|
1131
|
+
/ "2015"
|
1132
|
+
/ f"shen_{resolution}mm_268_parcellation.nii.gz",
|
1133
|
+
)
|
1434
1134
|
labels = list(range(1, 269))
|
1435
1135
|
elif year == 2019:
|
1136
|
+
parcellation_img_path = fetch_file_via_datalad(
|
1137
|
+
dataset=dataset,
|
1138
|
+
file_path=dataset.pathobj
|
1139
|
+
/ "parcellations"
|
1140
|
+
/ "Shen"
|
1141
|
+
/ "2019"
|
1142
|
+
/ "Shen_1mm_368_parcellation.nii.gz",
|
1143
|
+
)
|
1436
1144
|
labels = list(range(1, 369))
|
1437
1145
|
|
1438
|
-
return
|
1146
|
+
return parcellation_img_path, labels
|
1439
1147
|
|
1440
1148
|
|
1441
1149
|
def _retrieve_yan(
|
1442
|
-
|
1150
|
+
dataset: "Dataset",
|
1443
1151
|
resolution: Optional[float] = None,
|
1444
1152
|
n_rois: Optional[int] = None,
|
1445
1153
|
yeo_networks: Optional[int] = None,
|
@@ -1449,8 +1157,8 @@ def _retrieve_yan(
|
|
1449
1157
|
|
1450
1158
|
Parameters
|
1451
1159
|
----------
|
1452
|
-
|
1453
|
-
The
|
1160
|
+
dataset : datalad.api.Dataset
|
1161
|
+
The datalad dataset to fetch parcellation from.
|
1454
1162
|
resolution : float, optional
|
1455
1163
|
The desired resolution of the parcellation to load. If it is not
|
1456
1164
|
available, the closest resolution will be loaded. Preferably, use a
|
@@ -1473,8 +1181,6 @@ def _retrieve_yan(
|
|
1473
1181
|
|
1474
1182
|
Raises
|
1475
1183
|
------
|
1476
|
-
RuntimeError
|
1477
|
-
If there is a problem fetching files.
|
1478
1184
|
ValueError
|
1479
1185
|
If invalid value is provided for ``n_rois``, ``yeo_networks`` or
|
1480
1186
|
``kong_networks``.
|
@@ -1507,8 +1213,7 @@ def _retrieve_yan(
|
|
1507
1213
|
f"following: {_valid_n_rois}"
|
1508
1214
|
)
|
1509
1215
|
|
1510
|
-
|
1511
|
-
parcellation_lname = Path()
|
1216
|
+
# Fetch file paths based on networks
|
1512
1217
|
if yeo_networks:
|
1513
1218
|
# Check yeo_networks value
|
1514
1219
|
_valid_yeo_networks = [7, 17]
|
@@ -1517,19 +1222,25 @@ def _retrieve_yan(
|
|
1517
1222
|
f"The parameter `yeo_networks` ({yeo_networks}) needs to be "
|
1518
1223
|
f"one of the following: {_valid_yeo_networks}"
|
1519
1224
|
)
|
1520
|
-
|
1521
|
-
|
1522
|
-
|
1523
|
-
|
1225
|
+
|
1226
|
+
parcellation_img_path = fetch_file_via_datalad(
|
1227
|
+
dataset=dataset,
|
1228
|
+
file_path=dataset.pathobj
|
1229
|
+
/ "parcellations"
|
1230
|
+
/ "Yan2023"
|
1231
|
+
/ "Yeo2011"
|
1524
1232
|
/ (
|
1525
1233
|
f"{n_rois}Parcels_Yeo2011_{yeo_networks}Networks_FSLMNI152_"
|
1526
1234
|
f"{resolution}mm.nii.gz"
|
1527
|
-
)
|
1235
|
+
),
|
1528
1236
|
)
|
1529
|
-
|
1530
|
-
|
1531
|
-
|
1532
|
-
/
|
1237
|
+
parcellation_label_path = fetch_file_via_datalad(
|
1238
|
+
dataset=dataset,
|
1239
|
+
file_path=dataset.pathobj
|
1240
|
+
/ "parcellations"
|
1241
|
+
/ "Yan2023"
|
1242
|
+
/ "Yeo2011"
|
1243
|
+
/ f"{n_rois}Parcels_Yeo2011_{yeo_networks}Networks_LUT.txt",
|
1533
1244
|
)
|
1534
1245
|
elif kong_networks:
|
1535
1246
|
# Check kong_networks value
|
@@ -1539,106 +1250,37 @@ def _retrieve_yan(
|
|
1539
1250
|
f"The parameter `kong_networks` ({kong_networks}) needs to be "
|
1540
1251
|
f"one of the following: {_valid_kong_networks}"
|
1541
1252
|
)
|
1542
|
-
|
1543
|
-
|
1544
|
-
|
1545
|
-
|
1253
|
+
|
1254
|
+
parcellation_img_path = fetch_file_via_datalad(
|
1255
|
+
dataset=dataset,
|
1256
|
+
file_path=dataset.pathobj
|
1257
|
+
/ "parcellations"
|
1258
|
+
/ "Yan2023"
|
1259
|
+
/ "Kong2022"
|
1546
1260
|
/ (
|
1547
1261
|
f"{n_rois}Parcels_Kong2022_{kong_networks}Networks_FSLMNI152_"
|
1548
1262
|
f"{resolution}mm.nii.gz"
|
1549
|
-
)
|
1550
|
-
)
|
1551
|
-
parcellation_lname = (
|
1552
|
-
parcellations_dir
|
1553
|
-
/ "Yan_2023"
|
1554
|
-
/ f"{n_rois}Parcels_Kong2022_{kong_networks}Networks_LUT.txt"
|
1263
|
+
),
|
1555
1264
|
)
|
1556
|
-
|
1557
|
-
|
1558
|
-
|
1559
|
-
|
1560
|
-
|
1265
|
+
parcellation_label_path = fetch_file_via_datalad(
|
1266
|
+
dataset=dataset,
|
1267
|
+
file_path=dataset.pathobj
|
1268
|
+
/ "parcellations"
|
1269
|
+
/ "Yan2023"
|
1270
|
+
/ "Kong2022"
|
1271
|
+
/ f"{n_rois}Parcels_Kong2022_{kong_networks}Networks_LUT.txt",
|
1561
1272
|
)
|
1562
1273
|
|
1563
|
-
# Set URL based on network
|
1564
|
-
img_url = ""
|
1565
|
-
label_url = ""
|
1566
|
-
if yeo_networks:
|
1567
|
-
img_url = (
|
1568
|
-
"https://raw.githubusercontent.com/ThomasYeoLab/CBIG/"
|
1569
|
-
"master/stable_projects/brain_parcellation/Yan2023_homotopic/"
|
1570
|
-
f"parcellations/MNI/yeo{yeo_networks}/{n_rois}Parcels_Yeo2011"
|
1571
|
-
f"_{yeo_networks}Networks_FSLMNI152_{resolution}mm.nii.gz"
|
1572
|
-
)
|
1573
|
-
label_url = (
|
1574
|
-
"https://raw.githubusercontent.com/ThomasYeoLab/CBIG/"
|
1575
|
-
"master/stable_projects/brain_parcellation/Yan2023_homotopic/"
|
1576
|
-
f"parcellations/MNI/yeo{yeo_networks}/freeview_lut/{n_rois}"
|
1577
|
-
f"Parcels_Yeo2011_{yeo_networks}Networks_LUT.txt"
|
1578
|
-
)
|
1579
|
-
elif kong_networks:
|
1580
|
-
img_url = (
|
1581
|
-
"https://raw.githubusercontent.com/ThomasYeoLab/CBIG/"
|
1582
|
-
"master/stable_projects/brain_parcellation/Yan2023_homotopic/"
|
1583
|
-
f"parcellations/MNI/kong17/{n_rois}Parcels_Kong2022"
|
1584
|
-
f"_17Networks_FSLMNI152_{resolution}mm.nii.gz"
|
1585
|
-
)
|
1586
|
-
label_url = (
|
1587
|
-
"https://raw.githubusercontent.com/ThomasYeoLab/CBIG/"
|
1588
|
-
"master/stable_projects/brain_parcellation/Yan2023_homotopic/"
|
1589
|
-
f"parcellations/MNI/kong17/freeview_lut/{n_rois}Parcels_"
|
1590
|
-
"Kong2022_17Networks_LUT.txt"
|
1591
|
-
)
|
1592
|
-
|
1593
|
-
# Make HTTP requests
|
1594
|
-
with httpx.Client() as client:
|
1595
|
-
# Download parcellation file
|
1596
|
-
logger.info(f"Downloading Yan 2023 parcellation from {img_url}")
|
1597
|
-
try:
|
1598
|
-
img_resp = client.get(img_url)
|
1599
|
-
img_resp.raise_for_status()
|
1600
|
-
except httpx.HTTPError as exc:
|
1601
|
-
raise_error(
|
1602
|
-
f"Error response {exc.response.status_code} while "
|
1603
|
-
f"requesting {exc.request.url!r}",
|
1604
|
-
klass=RuntimeError,
|
1605
|
-
)
|
1606
|
-
else:
|
1607
|
-
parcellation_img_path = Path(parcellation_fname)
|
1608
|
-
# Create local directory if not present
|
1609
|
-
parcellation_img_path.parent.mkdir(parents=True, exist_ok=True)
|
1610
|
-
# Create local file if not present
|
1611
|
-
parcellation_img_path.touch(exist_ok=True)
|
1612
|
-
# Open file and write content
|
1613
|
-
with open(parcellation_img_path, "wb") as f:
|
1614
|
-
f.write(img_resp.content)
|
1615
|
-
# Download label file
|
1616
|
-
logger.info(f"Downloading Yan 2023 labels from {label_url}")
|
1617
|
-
try:
|
1618
|
-
label_resp = client.get(label_url)
|
1619
|
-
label_resp.raise_for_status()
|
1620
|
-
except httpx.HTTPError as exc:
|
1621
|
-
raise_error(
|
1622
|
-
f"Error response {exc.response.status_code} while "
|
1623
|
-
f"requesting {exc.request.url!r}",
|
1624
|
-
klass=RuntimeError,
|
1625
|
-
)
|
1626
|
-
else:
|
1627
|
-
parcellation_labels_path = Path(parcellation_lname)
|
1628
|
-
# Create local file if not present
|
1629
|
-
parcellation_labels_path.touch(exist_ok=True)
|
1630
|
-
# Open file and write content
|
1631
|
-
with open(parcellation_labels_path, "wb") as f:
|
1632
|
-
f.write(label_resp.content)
|
1633
|
-
|
1634
1274
|
# Load label file
|
1635
|
-
labels = pd.read_csv(
|
1275
|
+
labels = pd.read_csv(parcellation_label_path, sep=" ", header=None)[
|
1276
|
+
1
|
1277
|
+
].to_list()
|
1636
1278
|
|
1637
|
-
return
|
1279
|
+
return parcellation_img_path, labels
|
1638
1280
|
|
1639
1281
|
|
1640
1282
|
def _retrieve_brainnetome(
|
1641
|
-
|
1283
|
+
dataset: "Dataset",
|
1642
1284
|
resolution: Optional[float] = None,
|
1643
1285
|
threshold: Optional[int] = None,
|
1644
1286
|
) -> tuple[Path, list[str]]:
|
@@ -1646,8 +1288,8 @@ def _retrieve_brainnetome(
|
|
1646
1288
|
|
1647
1289
|
Parameters
|
1648
1290
|
----------
|
1649
|
-
|
1650
|
-
The
|
1291
|
+
dataset : datalad.api.Dataset
|
1292
|
+
The datalad dataset to fetch parcellation from.
|
1651
1293
|
resolution : {1.0, 1.25, 2.0}, optional
|
1652
1294
|
The desired resolution of the parcellation to load. If it is not
|
1653
1295
|
available, the closest resolution will be loaded. Preferably, use a
|
@@ -1666,8 +1308,6 @@ def _retrieve_brainnetome(
|
|
1666
1308
|
|
1667
1309
|
Raises
|
1668
1310
|
------
|
1669
|
-
RuntimeError
|
1670
|
-
If there is a problem fetching files.
|
1671
1311
|
ValueError
|
1672
1312
|
If invalid value is provided for ``threshold``.
|
1673
1313
|
|
@@ -1691,36 +1331,15 @@ def _retrieve_brainnetome(
|
|
1691
1331
|
if resolution in [1.0, 2.0]:
|
1692
1332
|
resolution = int(resolution)
|
1693
1333
|
|
1694
|
-
|
1695
|
-
|
1696
|
-
|
1697
|
-
|
1334
|
+
# Fetch file path
|
1335
|
+
parcellation_img_path = fetch_file_via_datalad(
|
1336
|
+
dataset=dataset,
|
1337
|
+
file_path=dataset.pathobj
|
1338
|
+
/ "parcellations"
|
1339
|
+
/ "Brainnetome"
|
1340
|
+
/ f"BNA-maxprob-thr{threshold}-{resolution}mm.nii.gz",
|
1698
1341
|
)
|
1699
1342
|
|
1700
|
-
# Check for existence of parcellation
|
1701
|
-
if not parcellation_fname.exists():
|
1702
|
-
# Set URL
|
1703
|
-
url = f"http://neurovault.org/media/images/1625/BNA-maxprob-thr{threshold}-{resolution}mm.nii.gz"
|
1704
|
-
|
1705
|
-
logger.info(f"Downloading Brainnetome from {url}")
|
1706
|
-
# Make HTTP request
|
1707
|
-
try:
|
1708
|
-
resp = httpx.get(url, follow_redirects=True)
|
1709
|
-
resp.raise_for_status()
|
1710
|
-
except httpx.HTTPError as exc:
|
1711
|
-
raise_error(
|
1712
|
-
f"Error response {exc.response.status_code} while "
|
1713
|
-
f"requesting {exc.request.url!r}",
|
1714
|
-
klass=RuntimeError,
|
1715
|
-
)
|
1716
|
-
else:
|
1717
|
-
# Create local directory if not present
|
1718
|
-
parcellation_fname.parent.mkdir(parents=True, exist_ok=True)
|
1719
|
-
# Create file if not present
|
1720
|
-
parcellation_fname.touch(exist_ok=True)
|
1721
|
-
# Open file and write bytes
|
1722
|
-
parcellation_fname.write_bytes(resp.content)
|
1723
|
-
|
1724
1343
|
# Load labels
|
1725
1344
|
labels = (
|
1726
1345
|
sorted([f"SFG_L(R)_7_{i}" for i in range(1, 8)] * 2)
|
@@ -1750,7 +1369,7 @@ def _retrieve_brainnetome(
|
|
1750
1369
|
+ sorted([f"Tha_L(R)_8_{i}" for i in range(1, 9)] * 2)
|
1751
1370
|
)
|
1752
1371
|
|
1753
|
-
return
|
1372
|
+
return parcellation_img_path, labels
|
1754
1373
|
|
1755
1374
|
|
1756
1375
|
def merge_parcellations(
|