junifer 0.0.7.dev153__py3-none-any.whl → 0.0.7.dev171__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- junifer/_version.py +2 -2
- junifer/conftest.py +32 -0
- junifer/markers/__init__.pyi +12 -2
- junifer/markers/falff/__init__.pyi +2 -1
- junifer/markers/falff/falff_maps.py +151 -0
- junifer/markers/falff/tests/test_falff_maps.py +167 -0
- junifer/markers/functional_connectivity/__init__.pyi +4 -0
- junifer/markers/functional_connectivity/edge_functional_connectivity_maps.py +115 -0
- junifer/markers/functional_connectivity/functional_connectivity_maps.py +95 -0
- junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_maps.py +74 -0
- junifer/markers/functional_connectivity/tests/test_functional_connectivity_maps.py +110 -0
- junifer/markers/maps_aggregation.py +201 -0
- junifer/markers/reho/__init__.pyi +2 -1
- junifer/markers/reho/reho_maps.py +161 -0
- junifer/markers/reho/tests/test_reho_maps.py +135 -0
- junifer/markers/temporal_snr/__init__.pyi +2 -1
- junifer/markers/temporal_snr/temporal_snr_maps.py +80 -0
- junifer/markers/temporal_snr/tests/test_temporal_snr_maps.py +67 -0
- junifer/markers/tests/test_maps_aggregation.py +430 -0
- junifer/typing/_typing.py +1 -3
- {junifer-0.0.7.dev153.dist-info → junifer-0.0.7.dev171.dist-info}/METADATA +1 -1
- {junifer-0.0.7.dev153.dist-info → junifer-0.0.7.dev171.dist-info}/RECORD +27 -15
- {junifer-0.0.7.dev153.dist-info → junifer-0.0.7.dev171.dist-info}/WHEEL +0 -0
- {junifer-0.0.7.dev153.dist-info → junifer-0.0.7.dev171.dist-info}/entry_points.txt +0 -0
- {junifer-0.0.7.dev153.dist-info → junifer-0.0.7.dev171.dist-info}/licenses/AUTHORS.rst +0 -0
- {junifer-0.0.7.dev153.dist-info → junifer-0.0.7.dev171.dist-info}/licenses/LICENSE.md +0 -0
- {junifer-0.0.7.dev153.dist-info → junifer-0.0.7.dev171.dist-info}/top_level.txt +0 -0
junifer/_version.py
CHANGED
@@ -28,7 +28,7 @@ version_tuple: VERSION_TUPLE
|
|
28
28
|
commit_id: COMMIT_ID
|
29
29
|
__commit_id__: COMMIT_ID
|
30
30
|
|
31
|
-
__version__ = version = '0.0.7.
|
32
|
-
__version_tuple__ = version_tuple = (0, 0, 7, '
|
31
|
+
__version__ = version = '0.0.7.dev171'
|
32
|
+
__version_tuple__ = version_tuple = (0, 0, 7, 'dev171')
|
33
33
|
|
34
34
|
__commit_id__ = commit_id = None
|
junifer/conftest.py
CHANGED
@@ -1,10 +1,14 @@
|
|
1
1
|
"""Provide conftest for pytest."""
|
2
2
|
|
3
3
|
# Authors: Federico Raimondo <f.raimondo@fz-juelich.de>
|
4
|
+
# Synchon Mandal <s.mandal@fz-juelich.de>
|
4
5
|
# License: AGPL
|
5
6
|
|
7
|
+
from pathlib import Path
|
8
|
+
|
6
9
|
import pytest
|
7
10
|
|
11
|
+
from junifer.datagrabber import PatternDataladDataGrabber
|
8
12
|
from junifer.utils.singleton import Singleton
|
9
13
|
|
10
14
|
|
@@ -23,3 +27,31 @@ def reset_singletons() -> None:
|
|
23
27
|
# Force deleting the singletons
|
24
28
|
for elem in to_remove:
|
25
29
|
del elem
|
30
|
+
|
31
|
+
|
32
|
+
@pytest.fixture
|
33
|
+
def maps_datagrabber(tmp_path: Path) -> PatternDataladDataGrabber:
|
34
|
+
"""Return a PatternDataladDataGrabber for maps testing.
|
35
|
+
|
36
|
+
Parameters
|
37
|
+
----------
|
38
|
+
tmp_path : pathlib.Path
|
39
|
+
The path to the test directory.
|
40
|
+
|
41
|
+
"""
|
42
|
+
dg = PatternDataladDataGrabber(
|
43
|
+
uri="https://github.com/OpenNeuroDatasets/ds005226.git",
|
44
|
+
types=["BOLD"],
|
45
|
+
patterns={
|
46
|
+
"BOLD": {
|
47
|
+
"pattern": (
|
48
|
+
"derivatives/pre-processed_data/space-MNI/{subject}/"
|
49
|
+
"{subject-padded}_task-{task}_run-{run}_space-MNI152NLin6Asym"
|
50
|
+
"_res-2_desc-preproc_bold.nii.gz"
|
51
|
+
),
|
52
|
+
"space": "MNI152NLin6Asym",
|
53
|
+
},
|
54
|
+
},
|
55
|
+
replacements=["subject", "subject-padded", "task", "run"],
|
56
|
+
)
|
57
|
+
return dg
|
junifer/markers/__init__.pyi
CHANGED
@@ -1,17 +1,23 @@
|
|
1
1
|
__all__ = [
|
2
2
|
"BaseMarker",
|
3
3
|
"RSSETSMarker",
|
4
|
+
"MapsAggregation",
|
4
5
|
"ParcelAggregation",
|
5
6
|
"SphereAggregation",
|
7
|
+
"FunctionalConnectivityMaps",
|
6
8
|
"FunctionalConnectivityParcels",
|
7
9
|
"FunctionalConnectivitySpheres",
|
8
10
|
"CrossParcellationFC",
|
11
|
+
"EdgeCentricFCMaps",
|
9
12
|
"EdgeCentricFCParcels",
|
10
13
|
"EdgeCentricFCSpheres",
|
14
|
+
"ReHoMaps",
|
11
15
|
"ReHoParcels",
|
12
16
|
"ReHoSpheres",
|
17
|
+
"ALFFMaps",
|
13
18
|
"ALFFParcels",
|
14
19
|
"ALFFSpheres",
|
20
|
+
"TemporalSNRMaps",
|
15
21
|
"TemporalSNRParcels",
|
16
22
|
"TemporalSNRSpheres",
|
17
23
|
"BrainPrint",
|
@@ -19,18 +25,22 @@ __all__ = [
|
|
19
25
|
|
20
26
|
from .base import BaseMarker
|
21
27
|
from .ets_rss import RSSETSMarker
|
28
|
+
from .maps_aggregation import MapsAggregation
|
22
29
|
from .parcel_aggregation import ParcelAggregation
|
23
30
|
from .sphere_aggregation import SphereAggregation
|
24
31
|
from .functional_connectivity import (
|
32
|
+
FunctionalConnectivityMaps,
|
25
33
|
FunctionalConnectivityParcels,
|
26
34
|
FunctionalConnectivitySpheres,
|
27
35
|
CrossParcellationFC,
|
36
|
+
EdgeCentricFCMaps,
|
28
37
|
EdgeCentricFCParcels,
|
29
38
|
EdgeCentricFCSpheres,
|
30
39
|
)
|
31
|
-
from .reho import ReHoParcels, ReHoSpheres
|
32
|
-
from .falff import ALFFParcels, ALFFSpheres
|
40
|
+
from .reho import ReHoMaps, ReHoParcels, ReHoSpheres
|
41
|
+
from .falff import ALFFMaps, ALFFParcels, ALFFSpheres
|
33
42
|
from .temporal_snr import (
|
43
|
+
TemporalSNRMaps,
|
34
44
|
TemporalSNRParcels,
|
35
45
|
TemporalSNRSpheres,
|
36
46
|
)
|
@@ -0,0 +1,151 @@
|
|
1
|
+
"""Provide class for ALFF / fALFF on maps."""
|
2
|
+
|
3
|
+
# Authors: Synchon Mandal <s.mandal@fz-juelich.de>
|
4
|
+
# License: AGPL
|
5
|
+
|
6
|
+
from typing import Any, Optional, Union
|
7
|
+
|
8
|
+
from ...api.decorators import register_marker
|
9
|
+
from ...utils import logger
|
10
|
+
from ..maps_aggregation import MapsAggregation
|
11
|
+
from .falff_base import ALFFBase
|
12
|
+
|
13
|
+
|
14
|
+
__all__ = ["ALFFMaps"]
|
15
|
+
|
16
|
+
|
17
|
+
@register_marker
|
18
|
+
class ALFFMaps(ALFFBase):
|
19
|
+
"""Class for ALFF / fALFF on maps.
|
20
|
+
|
21
|
+
Parameters
|
22
|
+
----------
|
23
|
+
maps : str
|
24
|
+
The name of the map(s) to use.
|
25
|
+
See :func:`.list_data` for options.
|
26
|
+
using : {"junifer", "afni"}
|
27
|
+
Implementation to use for computing ALFF:
|
28
|
+
|
29
|
+
* "junifer" : Use ``junifer``'s own ALFF implementation
|
30
|
+
* "afni" : Use AFNI's ``3dRSFC``
|
31
|
+
|
32
|
+
highpass : positive float, optional
|
33
|
+
The highpass cutoff frequency for the bandpass filter. If 0,
|
34
|
+
it will not apply a highpass filter (default 0.01).
|
35
|
+
lowpass : positive float, optional
|
36
|
+
The lowpass cutoff frequency for the bandpass filter (default 0.1).
|
37
|
+
tr : positive float, optional
|
38
|
+
The Repetition Time of the BOLD data. If None, will extract
|
39
|
+
the TR from NIfTI header (default None).
|
40
|
+
masks : str, dict or list of dict or str, optional
|
41
|
+
The specification of the masks to apply to regions before extracting
|
42
|
+
signals. Check :ref:`Using Masks <using_masks>` for more details.
|
43
|
+
If None, will not apply any mask (default None).
|
44
|
+
name : str, optional
|
45
|
+
The name of the marker. If None, will use the class name (default
|
46
|
+
None).
|
47
|
+
|
48
|
+
Notes
|
49
|
+
-----
|
50
|
+
The ``tr`` parameter is crucial for the correctness of fALFF/ALFF
|
51
|
+
computation. If a dataset is correctly preprocessed, the ``tr`` should be
|
52
|
+
extracted from the NIfTI without any issue. However, it has been
|
53
|
+
reported that some preprocessed data might not have the correct ``tr`` in
|
54
|
+
the NIfTI header.
|
55
|
+
|
56
|
+
ALFF/fALFF are computed using a bandpass butterworth filter. See
|
57
|
+
:func:`scipy.signal.butter` and :func:`scipy.signal.filtfilt` for more
|
58
|
+
details.
|
59
|
+
|
60
|
+
"""
|
61
|
+
|
62
|
+
def __init__(
|
63
|
+
self,
|
64
|
+
maps: str,
|
65
|
+
using: str,
|
66
|
+
highpass: float = 0.01,
|
67
|
+
lowpass: float = 0.1,
|
68
|
+
tr: Optional[float] = None,
|
69
|
+
masks: Union[str, dict, list[Union[dict, str]], None] = None,
|
70
|
+
name: Optional[str] = None,
|
71
|
+
) -> None:
|
72
|
+
# Superclass init first to validate `using` parameter
|
73
|
+
super().__init__(
|
74
|
+
highpass=highpass,
|
75
|
+
lowpass=lowpass,
|
76
|
+
using=using,
|
77
|
+
tr=tr,
|
78
|
+
name=name,
|
79
|
+
)
|
80
|
+
self.maps = maps
|
81
|
+
self.masks = masks
|
82
|
+
|
83
|
+
def compute(
|
84
|
+
self,
|
85
|
+
input: dict[str, Any],
|
86
|
+
extra_input: Optional[dict[str, Any]] = None,
|
87
|
+
) -> dict[str, Any]:
|
88
|
+
"""Compute.
|
89
|
+
|
90
|
+
Parameters
|
91
|
+
----------
|
92
|
+
input : dict
|
93
|
+
The BOLD data as dictionary.
|
94
|
+
extra_input : dict, optional
|
95
|
+
The other fields in the pipeline data object (default None).
|
96
|
+
|
97
|
+
Returns
|
98
|
+
-------
|
99
|
+
dict
|
100
|
+
The computed result as dictionary. This will be either returned
|
101
|
+
to the user or stored in the storage by calling the store method
|
102
|
+
with this as a parameter. The dictionary has the following keys:
|
103
|
+
|
104
|
+
* ``alff`` : dictionary with the following keys:
|
105
|
+
|
106
|
+
- ``data`` : ROI values as ``numpy.ndarray``
|
107
|
+
- ``col_names`` : ROI labels as list of str
|
108
|
+
|
109
|
+
* ``falff`` : dictionary with the following keys:
|
110
|
+
|
111
|
+
- ``data`` : ROI values as ``numpy.ndarray``
|
112
|
+
- ``col_names`` : ROI labels as list of str
|
113
|
+
|
114
|
+
"""
|
115
|
+
logger.info("Calculating ALFF / fALFF for maps")
|
116
|
+
|
117
|
+
# Compute ALFF + fALFF
|
118
|
+
alff_output, falff_output, alff_output_path, falff_output_path = (
|
119
|
+
self._compute(input_data=input)
|
120
|
+
)
|
121
|
+
|
122
|
+
# Perform aggregation on ALFF + fALFF
|
123
|
+
aggregation_alff_input = dict(input.items())
|
124
|
+
aggregation_falff_input = dict(input.items())
|
125
|
+
aggregation_alff_input["data"] = alff_output
|
126
|
+
aggregation_falff_input["data"] = falff_output
|
127
|
+
aggregation_alff_input["path"] = alff_output_path
|
128
|
+
aggregation_falff_input["path"] = falff_output_path
|
129
|
+
|
130
|
+
return {
|
131
|
+
"alff": {
|
132
|
+
**MapsAggregation(
|
133
|
+
maps=self.maps,
|
134
|
+
masks=self.masks,
|
135
|
+
on="BOLD",
|
136
|
+
).compute(
|
137
|
+
input=aggregation_alff_input,
|
138
|
+
extra_input=extra_input,
|
139
|
+
)["aggregation"],
|
140
|
+
},
|
141
|
+
"falff": {
|
142
|
+
**MapsAggregation(
|
143
|
+
maps=self.maps,
|
144
|
+
masks=self.masks,
|
145
|
+
on="BOLD",
|
146
|
+
).compute(
|
147
|
+
input=aggregation_falff_input,
|
148
|
+
extra_input=extra_input,
|
149
|
+
)["aggregation"],
|
150
|
+
},
|
151
|
+
}
|
@@ -0,0 +1,167 @@
|
|
1
|
+
"""Provide tests for ALFFMaps."""
|
2
|
+
|
3
|
+
# Authors: Synchon Mandal <s.mandal@fz-juelich.de>
|
4
|
+
# License: AGPL
|
5
|
+
|
6
|
+
import logging
|
7
|
+
from pathlib import Path
|
8
|
+
|
9
|
+
import pytest
|
10
|
+
import scipy as sp
|
11
|
+
|
12
|
+
from junifer.datagrabber import PatternDataladDataGrabber
|
13
|
+
from junifer.datareader import DefaultDataReader
|
14
|
+
from junifer.markers import ALFFMaps
|
15
|
+
from junifer.pipeline import WorkDirManager
|
16
|
+
from junifer.pipeline.utils import _check_afni
|
17
|
+
from junifer.storage import HDF5FeatureStorage
|
18
|
+
|
19
|
+
|
20
|
+
MAPS = "Smith_rsn_10"
|
21
|
+
|
22
|
+
|
23
|
+
@pytest.mark.parametrize(
|
24
|
+
"feature",
|
25
|
+
[
|
26
|
+
"alff",
|
27
|
+
"falff",
|
28
|
+
],
|
29
|
+
)
|
30
|
+
def test_ALFFMaps_get_output_type(feature: str) -> None:
|
31
|
+
"""Test ALFFMaps get_output_type().
|
32
|
+
|
33
|
+
Parameters
|
34
|
+
----------
|
35
|
+
feature : str
|
36
|
+
The parametrized feature name.
|
37
|
+
|
38
|
+
"""
|
39
|
+
assert "vector" == ALFFMaps(
|
40
|
+
maps=MAPS,
|
41
|
+
using="junifer",
|
42
|
+
).get_output_type(input_type="BOLD", output_feature=feature)
|
43
|
+
|
44
|
+
|
45
|
+
def test_ALFFMaps(
|
46
|
+
caplog: pytest.LogCaptureFixture,
|
47
|
+
tmp_path: Path,
|
48
|
+
maps_datagrabber: PatternDataladDataGrabber,
|
49
|
+
) -> None:
|
50
|
+
"""Test ALFFMaps.
|
51
|
+
|
52
|
+
Parameters
|
53
|
+
----------
|
54
|
+
caplog : pytest.LogCaptureFixture
|
55
|
+
The pytest.LogCaptureFixture object.
|
56
|
+
tmp_path : pathlib.Path
|
57
|
+
The path to the test directory.
|
58
|
+
maps_datagrabber : PatternDataladDataGrabber
|
59
|
+
The testing PatternDataladDataGrabber, as fixture.
|
60
|
+
|
61
|
+
"""
|
62
|
+
# Update workdir to current test's tmp_path
|
63
|
+
WorkDirManager().workdir = tmp_path
|
64
|
+
|
65
|
+
with caplog.at_level(logging.DEBUG):
|
66
|
+
with maps_datagrabber as dg:
|
67
|
+
element = dg[("sub-01", "sub-001", "rest", "1")]
|
68
|
+
element_data = DefaultDataReader().fit_transform(element)
|
69
|
+
|
70
|
+
# Initialize marker
|
71
|
+
marker = ALFFMaps(
|
72
|
+
maps=MAPS,
|
73
|
+
using="junifer",
|
74
|
+
)
|
75
|
+
# Check correct output
|
76
|
+
for name in ["alff", "falff"]:
|
77
|
+
assert "vector" == marker.get_output_type(
|
78
|
+
input_type="BOLD", output_feature=name
|
79
|
+
)
|
80
|
+
|
81
|
+
# Fit transform marker on data
|
82
|
+
output = marker.fit_transform(element_data)
|
83
|
+
|
84
|
+
assert "Creating cache" in caplog.text
|
85
|
+
|
86
|
+
# Get BOLD output
|
87
|
+
assert "BOLD" in output
|
88
|
+
for feature in output["BOLD"].keys():
|
89
|
+
output_bold = output["BOLD"][feature]
|
90
|
+
# Assert BOLD output keys
|
91
|
+
assert "data" in output_bold
|
92
|
+
assert "col_names" in output_bold
|
93
|
+
|
94
|
+
output_bold_data = output_bold["data"]
|
95
|
+
# Assert BOLD output data dimension
|
96
|
+
assert output_bold_data.ndim == 2
|
97
|
+
assert output_bold_data.shape == (1, 10)
|
98
|
+
|
99
|
+
# Reset log capture
|
100
|
+
caplog.clear()
|
101
|
+
# Initialize storage
|
102
|
+
storage = HDF5FeatureStorage(tmp_path / "falff_maps.hdf5")
|
103
|
+
# Fit transform marker on data with storage
|
104
|
+
marker.fit_transform(
|
105
|
+
input=element_data,
|
106
|
+
storage=storage,
|
107
|
+
)
|
108
|
+
# Check stored feature name
|
109
|
+
features = storage.list_features()
|
110
|
+
assert any(
|
111
|
+
x["name"] in ["BOLD_ALFFMaps_alff", "BOLD_ALFFMaps_falff"]
|
112
|
+
for x in features.values()
|
113
|
+
)
|
114
|
+
# Cache working correctly
|
115
|
+
assert "Creating cache" not in caplog.text
|
116
|
+
|
117
|
+
|
118
|
+
@pytest.mark.skipif(
|
119
|
+
_check_afni() is False, reason="requires AFNI to be in PATH"
|
120
|
+
)
|
121
|
+
def test_ALFFMaps_comparison(
|
122
|
+
tmp_path: Path, maps_datagrabber: PatternDataladDataGrabber
|
123
|
+
) -> None:
|
124
|
+
"""Test ALFFMaps implementation comparison.
|
125
|
+
|
126
|
+
Parameters
|
127
|
+
----------
|
128
|
+
tmp_path : pathlib.Path
|
129
|
+
The path to the test directory.
|
130
|
+
maps_datagrabber : PatternDataladDataGrabber
|
131
|
+
The testing PatternDataladDataGrabber, as fixture.
|
132
|
+
|
133
|
+
"""
|
134
|
+
# Update workdir to current test's tmp_path
|
135
|
+
WorkDirManager().workdir = tmp_path
|
136
|
+
|
137
|
+
with maps_datagrabber as dg:
|
138
|
+
element = dg[("sub-01", "sub-001", "rest", "1")]
|
139
|
+
element_data = DefaultDataReader().fit_transform(element)
|
140
|
+
|
141
|
+
# Initialize marker
|
142
|
+
junifer_marker = ALFFMaps(
|
143
|
+
maps=MAPS,
|
144
|
+
using="junifer",
|
145
|
+
)
|
146
|
+
# Fit transform marker on data
|
147
|
+
junifer_output = junifer_marker.fit_transform(element_data)
|
148
|
+
# Get BOLD output
|
149
|
+
junifer_output_bold = junifer_output["BOLD"]
|
150
|
+
|
151
|
+
# Initialize marker
|
152
|
+
afni_marker = ALFFMaps(
|
153
|
+
maps=MAPS,
|
154
|
+
using="afni",
|
155
|
+
)
|
156
|
+
# Fit transform marker on data
|
157
|
+
afni_output = afni_marker.fit_transform(element_data)
|
158
|
+
# Get BOLD output
|
159
|
+
afni_output_bold = afni_output["BOLD"]
|
160
|
+
|
161
|
+
for feature in afni_output_bold.keys():
|
162
|
+
# Check for Pearson correlation coefficient
|
163
|
+
r, _ = sp.stats.pearsonr(
|
164
|
+
junifer_output_bold[feature]["data"][0],
|
165
|
+
afni_output_bold[feature]["data"][0],
|
166
|
+
)
|
167
|
+
assert r > 0.97
|
@@ -1,13 +1,17 @@
|
|
1
1
|
__all__ = [
|
2
|
+
"FunctionalConnectivityMaps",
|
2
3
|
"FunctionalConnectivityParcels",
|
3
4
|
"FunctionalConnectivitySpheres",
|
4
5
|
"CrossParcellationFC",
|
6
|
+
"EdgeCentricFCMaps",
|
5
7
|
"EdgeCentricFCParcels",
|
6
8
|
"EdgeCentricFCSpheres",
|
7
9
|
]
|
8
10
|
|
11
|
+
from .functional_connectivity_maps import FunctionalConnectivityMaps
|
9
12
|
from .functional_connectivity_parcels import FunctionalConnectivityParcels
|
10
13
|
from .functional_connectivity_spheres import FunctionalConnectivitySpheres
|
11
14
|
from .crossparcellation_functional_connectivity import CrossParcellationFC
|
15
|
+
from .edge_functional_connectivity_maps import EdgeCentricFCMaps
|
12
16
|
from .edge_functional_connectivity_parcels import EdgeCentricFCParcels
|
13
17
|
from .edge_functional_connectivity_spheres import EdgeCentricFCSpheres
|
@@ -0,0 +1,115 @@
|
|
1
|
+
"""Provide class for edge-centric functional connectivity using maps."""
|
2
|
+
|
3
|
+
# Authors: Synchon Mandal <s.mandal@fz-juelich.de>
|
4
|
+
# License: AGPL
|
5
|
+
|
6
|
+
from typing import Any, Optional, Union
|
7
|
+
|
8
|
+
from ...api.decorators import register_marker
|
9
|
+
from ..maps_aggregation import MapsAggregation
|
10
|
+
from ..utils import _ets
|
11
|
+
from .functional_connectivity_base import FunctionalConnectivityBase
|
12
|
+
|
13
|
+
|
14
|
+
__all__ = ["EdgeCentricFCMaps"]
|
15
|
+
|
16
|
+
|
17
|
+
@register_marker
|
18
|
+
class EdgeCentricFCMaps(FunctionalConnectivityBase):
|
19
|
+
"""Class for edge-centric FC using maps.
|
20
|
+
|
21
|
+
Parameters
|
22
|
+
----------
|
23
|
+
maps : str
|
24
|
+
The name of the map(s) to use.
|
25
|
+
See :func:`.list_data` for options.
|
26
|
+
conn_method : str, optional
|
27
|
+
The method to perform connectivity measure using.
|
28
|
+
See :class:`.JuniferConnectivityMeasure` for options
|
29
|
+
(default "correlation").
|
30
|
+
conn_method_params : dict, optional
|
31
|
+
Parameters to pass to :class:`.JuniferConnectivityMeasure`.
|
32
|
+
If None, ``{"empirical": True}`` will be used, which would mean
|
33
|
+
:class:`sklearn.covariance.EmpiricalCovariance` is used to compute
|
34
|
+
covariance. If usage of :class:`sklearn.covariance.LedoitWolf` is
|
35
|
+
desired, ``{"empirical": False}`` should be passed
|
36
|
+
(default None).
|
37
|
+
masks : str, dict or list of dict or str, optional
|
38
|
+
The specification of the masks to apply to regions before extracting
|
39
|
+
signals. Check :ref:`Using Masks <using_masks>` for more details.
|
40
|
+
If None, will not apply any mask (default None).
|
41
|
+
name : str, optional
|
42
|
+
The name of the marker. If None, will use
|
43
|
+
``BOLD_EdgeCentricFCParcels`` (default None).
|
44
|
+
|
45
|
+
References
|
46
|
+
----------
|
47
|
+
.. [1] Jo et al. (2021)
|
48
|
+
Subject identification using edge-centric functional connectivity.
|
49
|
+
https://doi.org/10.1016/j.neuroimage.2021.118204
|
50
|
+
|
51
|
+
"""
|
52
|
+
|
53
|
+
def __init__(
|
54
|
+
self,
|
55
|
+
maps: str,
|
56
|
+
conn_method: str = "correlation",
|
57
|
+
conn_method_params: Optional[dict] = None,
|
58
|
+
masks: Union[str, dict, list[Union[dict, str]], None] = None,
|
59
|
+
name: Optional[str] = None,
|
60
|
+
) -> None:
|
61
|
+
self.maps = maps
|
62
|
+
super().__init__(
|
63
|
+
conn_method=conn_method,
|
64
|
+
conn_method_params=conn_method_params,
|
65
|
+
masks=masks,
|
66
|
+
name=name,
|
67
|
+
)
|
68
|
+
|
69
|
+
def aggregate(
|
70
|
+
self, input: dict[str, Any], extra_input: Optional[dict] = None
|
71
|
+
) -> dict:
|
72
|
+
"""Perform maps aggregation and ETS computation.
|
73
|
+
|
74
|
+
Parameters
|
75
|
+
----------
|
76
|
+
input : dict
|
77
|
+
A single input from the pipeline data object in which to compute
|
78
|
+
the marker.
|
79
|
+
extra_input : dict, optional
|
80
|
+
The other fields in the pipeline data object. Useful for accessing
|
81
|
+
other data kind that needs to be used in the computation. For
|
82
|
+
example, the functional connectivity markers can make use of the
|
83
|
+
confounds if available (default None).
|
84
|
+
|
85
|
+
Returns
|
86
|
+
-------
|
87
|
+
dict
|
88
|
+
The computed result as dictionary. This will be either returned
|
89
|
+
to the user or stored in the storage by calling the store method
|
90
|
+
with this as a parameter. The dictionary has the following keys:
|
91
|
+
|
92
|
+
* ``aggregation`` : dictionary with the following keys:
|
93
|
+
|
94
|
+
- ``data`` : ROI values as ``numpy.ndarray``
|
95
|
+
- ``col_names`` : ROI labels as list of str
|
96
|
+
|
97
|
+
"""
|
98
|
+
# Perform aggregation
|
99
|
+
aggregation = MapsAggregation(
|
100
|
+
maps=self.maps,
|
101
|
+
masks=self.masks,
|
102
|
+
on="BOLD",
|
103
|
+
).compute(input, extra_input=extra_input)
|
104
|
+
# Compute edgewise timeseries
|
105
|
+
ets, edge_names = _ets(
|
106
|
+
bold_ts=aggregation["aggregation"]["data"],
|
107
|
+
roi_names=aggregation["aggregation"]["col_names"],
|
108
|
+
)
|
109
|
+
|
110
|
+
return {
|
111
|
+
"aggregation": {
|
112
|
+
"data": ets,
|
113
|
+
"col_names": edge_names,
|
114
|
+
},
|
115
|
+
}
|
@@ -0,0 +1,95 @@
|
|
1
|
+
"""Provide class for functional connectivity using maps."""
|
2
|
+
|
3
|
+
# Authors: Synchon Mandal <s.mandal@fz-juelich.de>
|
4
|
+
# License: AGPL
|
5
|
+
|
6
|
+
from typing import Any, Optional, Union
|
7
|
+
|
8
|
+
from ...api.decorators import register_marker
|
9
|
+
from ..maps_aggregation import MapsAggregation
|
10
|
+
from .functional_connectivity_base import FunctionalConnectivityBase
|
11
|
+
|
12
|
+
|
13
|
+
__all__ = ["FunctionalConnectivityMaps"]
|
14
|
+
|
15
|
+
|
16
|
+
@register_marker
|
17
|
+
class FunctionalConnectivityMaps(FunctionalConnectivityBase):
|
18
|
+
"""Class for functional connectivity using maps.
|
19
|
+
|
20
|
+
Parameters
|
21
|
+
----------
|
22
|
+
maps : str
|
23
|
+
The name of the map(s) to use.
|
24
|
+
See :func:`.list_data` for options.
|
25
|
+
conn_method : str, optional
|
26
|
+
The method to perform connectivity measure using.
|
27
|
+
See :class:`.JuniferConnectivityMeasure` for options
|
28
|
+
(default "correlation").
|
29
|
+
conn_method_params : dict, optional
|
30
|
+
Parameters to pass to :class:`.JuniferConnectivityMeasure`.
|
31
|
+
If None, ``{"empirical": True}`` will be used, which would mean
|
32
|
+
:class:`sklearn.covariance.EmpiricalCovariance` is used to compute
|
33
|
+
covariance. If usage of :class:`sklearn.covariance.LedoitWolf` is
|
34
|
+
desired, ``{"empirical": False}`` should be passed
|
35
|
+
(default None).
|
36
|
+
masks : str, dict or list of dict or str, optional
|
37
|
+
The specification of the masks to apply to regions before extracting
|
38
|
+
signals. Check :ref:`Using Masks <using_masks>` for more details.
|
39
|
+
If None, will not apply any mask (default None).
|
40
|
+
name : str, optional
|
41
|
+
The name of the marker. If None, will use
|
42
|
+
``BOLD_FunctionalConnectivityMaps`` (default None).
|
43
|
+
|
44
|
+
"""
|
45
|
+
|
46
|
+
def __init__(
|
47
|
+
self,
|
48
|
+
maps: str,
|
49
|
+
conn_method: str = "correlation",
|
50
|
+
conn_method_params: Optional[dict] = None,
|
51
|
+
masks: Union[str, dict, list[Union[dict, str]], None] = None,
|
52
|
+
name: Optional[str] = None,
|
53
|
+
) -> None:
|
54
|
+
self.maps = maps
|
55
|
+
super().__init__(
|
56
|
+
conn_method=conn_method,
|
57
|
+
conn_method_params=conn_method_params,
|
58
|
+
masks=masks,
|
59
|
+
name=name,
|
60
|
+
)
|
61
|
+
|
62
|
+
def aggregate(
|
63
|
+
self, input: dict[str, Any], extra_input: Optional[dict] = None
|
64
|
+
) -> dict:
|
65
|
+
"""Perform maps aggregation.
|
66
|
+
|
67
|
+
Parameters
|
68
|
+
----------
|
69
|
+
input : dict
|
70
|
+
A single input from the pipeline data object in which to compute
|
71
|
+
the marker.
|
72
|
+
extra_input : dict, optional
|
73
|
+
The other fields in the pipeline data object. Useful for accessing
|
74
|
+
other data kind that needs to be used in the computation. For
|
75
|
+
example, the functional connectivity markers can make use of the
|
76
|
+
confounds if available (default None).
|
77
|
+
|
78
|
+
Returns
|
79
|
+
-------
|
80
|
+
dict
|
81
|
+
The computed result as dictionary. This will be either returned
|
82
|
+
to the user or stored in the storage by calling the store method
|
83
|
+
with this as a parameter. The dictionary has the following keys:
|
84
|
+
|
85
|
+
* ``aggregation`` : dictionary with the following keys:
|
86
|
+
|
87
|
+
- ``data`` : ROI values as ``numpy.ndarray``
|
88
|
+
- ``col_names`` : ROI labels as list of str
|
89
|
+
|
90
|
+
"""
|
91
|
+
return MapsAggregation(
|
92
|
+
maps=self.maps,
|
93
|
+
masks=self.masks,
|
94
|
+
on="BOLD",
|
95
|
+
).compute(input=input, extra_input=extra_input)
|