junifer 0.0.7.dev153__py3-none-any.whl → 0.0.7.dev169__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (27) hide show
  1. junifer/_version.py +2 -2
  2. junifer/conftest.py +32 -0
  3. junifer/markers/__init__.pyi +12 -2
  4. junifer/markers/falff/__init__.pyi +2 -1
  5. junifer/markers/falff/falff_maps.py +151 -0
  6. junifer/markers/falff/tests/test_falff_maps.py +167 -0
  7. junifer/markers/functional_connectivity/__init__.pyi +4 -0
  8. junifer/markers/functional_connectivity/edge_functional_connectivity_maps.py +115 -0
  9. junifer/markers/functional_connectivity/functional_connectivity_maps.py +95 -0
  10. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_maps.py +74 -0
  11. junifer/markers/functional_connectivity/tests/test_functional_connectivity_maps.py +110 -0
  12. junifer/markers/maps_aggregation.py +201 -0
  13. junifer/markers/reho/__init__.pyi +2 -1
  14. junifer/markers/reho/reho_maps.py +161 -0
  15. junifer/markers/reho/tests/test_reho_maps.py +135 -0
  16. junifer/markers/temporal_snr/__init__.pyi +2 -1
  17. junifer/markers/temporal_snr/temporal_snr_maps.py +80 -0
  18. junifer/markers/temporal_snr/tests/test_temporal_snr_maps.py +67 -0
  19. junifer/markers/tests/test_maps_aggregation.py +430 -0
  20. junifer/typing/_typing.py +1 -3
  21. {junifer-0.0.7.dev153.dist-info → junifer-0.0.7.dev169.dist-info}/METADATA +1 -1
  22. {junifer-0.0.7.dev153.dist-info → junifer-0.0.7.dev169.dist-info}/RECORD +27 -15
  23. {junifer-0.0.7.dev153.dist-info → junifer-0.0.7.dev169.dist-info}/WHEEL +0 -0
  24. {junifer-0.0.7.dev153.dist-info → junifer-0.0.7.dev169.dist-info}/entry_points.txt +0 -0
  25. {junifer-0.0.7.dev153.dist-info → junifer-0.0.7.dev169.dist-info}/licenses/AUTHORS.rst +0 -0
  26. {junifer-0.0.7.dev153.dist-info → junifer-0.0.7.dev169.dist-info}/licenses/LICENSE.md +0 -0
  27. {junifer-0.0.7.dev153.dist-info → junifer-0.0.7.dev169.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,74 @@
1
+ """Provide tests for edge-centric functional connectivity using maps."""
2
+
3
+ # Authors: Synchon Mandal <s.mandal@fz-juelich.de>
4
+ # License: AGPL
5
+
6
+ from pathlib import Path
7
+
8
+ import pytest
9
+
10
+ from junifer.datagrabber import PatternDataladDataGrabber
11
+ from junifer.datareader import DefaultDataReader
12
+ from junifer.markers import EdgeCentricFCMaps
13
+ from junifer.storage import HDF5FeatureStorage
14
+
15
+
16
+ @pytest.mark.parametrize(
17
+ "conn_method_params",
18
+ [
19
+ {"empirical": False},
20
+ {"empirical": True},
21
+ ],
22
+ )
23
+ def test_EdgeCentricFCMaps(
24
+ tmp_path: Path,
25
+ maps_datagrabber: PatternDataladDataGrabber,
26
+ conn_method_params: dict[str, bool],
27
+ ) -> None:
28
+ """Test EdgeCentricFCMaps.
29
+
30
+ Parameters
31
+ ----------
32
+ tmp_path : pathlib.Path
33
+ The path to the test directory.
34
+ maps_datagrabber : PatternDataladDataGrabber
35
+ The testing PatternDataladDataGrabber, as fixture.
36
+ conn_method_params : dict
37
+ The parametrized parameters to connectivity measure method.
38
+
39
+ """
40
+ with maps_datagrabber as dg:
41
+ element = dg[("sub-01", "sub-001", "rest", "1")]
42
+ element_data = DefaultDataReader().fit_transform(element)
43
+ # Setup marker
44
+ marker = EdgeCentricFCMaps(
45
+ maps="Smith_rsn_10",
46
+ conn_method="correlation",
47
+ conn_method_params=conn_method_params,
48
+ )
49
+ # Check correct output
50
+ assert "matrix" == marker.get_output_type(
51
+ input_type="BOLD", output_feature="functional_connectivity"
52
+ )
53
+
54
+ # Fit-transform the data
55
+ edge_fc = marker.fit_transform(element_data)
56
+ edge_fc_bold = edge_fc["BOLD"]["functional_connectivity"]
57
+
58
+ # For 10 ROIs we should get (10 * (10 -1) / 2) edges in the ETS
59
+ n_edges = int(10 * (10 - 1) / 2)
60
+ assert "data" in edge_fc_bold
61
+ assert "row_names" in edge_fc_bold
62
+ assert "col_names" in edge_fc_bold
63
+ assert edge_fc_bold["data"].shape == (n_edges, n_edges)
64
+ assert len(set(edge_fc_bold["row_names"])) == n_edges
65
+ assert len(set(edge_fc_bold["col_names"])) == n_edges
66
+
67
+ # Store
68
+ storage = HDF5FeatureStorage(uri=tmp_path / "test_edge_fc_maps.hdf5")
69
+ marker.fit_transform(input=element_data, storage=storage)
70
+ features = storage.list_features()
71
+ assert any(
72
+ x["name"] == "BOLD_EdgeCentricFCMaps_functional_connectivity"
73
+ for x in features.values()
74
+ )
@@ -0,0 +1,110 @@
1
+ """Provide tests for functional connectivity using maps."""
2
+
3
+ # Authors: Synchon Mandal <s.mandal@fz-juelich.de>
4
+ # License: AGPL
5
+
6
+ from pathlib import Path
7
+ from typing import TYPE_CHECKING
8
+
9
+ import pytest
10
+ from nilearn.connectome import ConnectivityMeasure
11
+ from nilearn.maskers import NiftiMapsMasker
12
+ from numpy.testing import assert_array_almost_equal
13
+ from sklearn.covariance import EmpiricalCovariance, LedoitWolf
14
+
15
+ from junifer.data import MapsRegistry
16
+ from junifer.datagrabber import PatternDataladDataGrabber
17
+ from junifer.datareader import DefaultDataReader
18
+ from junifer.markers import FunctionalConnectivityMaps
19
+ from junifer.storage import HDF5FeatureStorage
20
+
21
+
22
+ if TYPE_CHECKING:
23
+ from sklearn.base import BaseEstimator
24
+
25
+
26
+ @pytest.mark.parametrize(
27
+ "conn_method_params, cov_estimator",
28
+ [
29
+ ({"empirical": False}, LedoitWolf(store_precision=False)),
30
+ ({"empirical": True}, EmpiricalCovariance(store_precision=False)),
31
+ ],
32
+ )
33
+ def test_FunctionalConnectivityMaps(
34
+ tmp_path: Path,
35
+ maps_datagrabber: PatternDataladDataGrabber,
36
+ conn_method_params: dict[str, bool],
37
+ cov_estimator: type["BaseEstimator"],
38
+ ) -> None:
39
+ """Test FunctionalConnectivityParcels.
40
+
41
+ Parameters
42
+ ----------
43
+ tmp_path : pathlib.Path
44
+ The path to the test directory.
45
+ maps_datagrabber : PatternDataladDataGrabber
46
+ The testing PatternDataladDataGrabber, as fixture.
47
+ conn_method_params : dict
48
+ The parametrized parameters to connectivity measure method.
49
+ cov_estimator : estimator object
50
+ The parametrized covariance estimator.
51
+
52
+ """
53
+ with maps_datagrabber as dg:
54
+ element = dg[("sub-01", "sub-001", "rest", "1")]
55
+ element_data = DefaultDataReader().fit_transform(element)
56
+ # Setup marker
57
+ marker = FunctionalConnectivityMaps(
58
+ maps="Smith_rsn_10",
59
+ conn_method="correlation",
60
+ conn_method_params=conn_method_params,
61
+ )
62
+ # Check correct output
63
+ assert "matrix" == marker.get_output_type(
64
+ input_type="BOLD", output_feature="functional_connectivity"
65
+ )
66
+
67
+ # Fit-transform the data
68
+ fc = marker.fit_transform(element_data)
69
+ fc_bold = fc["BOLD"]["functional_connectivity"]
70
+
71
+ assert "data" in fc_bold
72
+ assert "row_names" in fc_bold
73
+ assert "col_names" in fc_bold
74
+ assert fc_bold["data"].shape == (10, 10)
75
+ assert len(set(fc_bold["row_names"])) == 10
76
+ assert len(set(fc_bold["col_names"])) == 10
77
+
78
+ # Compare with nilearn
79
+ # Load testing maps for the target data
80
+ maps, _ = MapsRegistry().get(
81
+ maps="Smith_rsn_10",
82
+ target_data=element_data["BOLD"],
83
+ )
84
+ # Extract timeseries
85
+ masker = NiftiMapsMasker(maps_img=maps, standardize=False)
86
+ extracted_timeseries = masker.fit_transform(
87
+ element_data["BOLD"]["data"]
88
+ )
89
+ # Compute the connectivity measure
90
+ connectivity_measure = ConnectivityMeasure(
91
+ cov_estimator=cov_estimator,
92
+ kind="correlation", # type: ignore
93
+ ).fit_transform([extracted_timeseries])[0]
94
+
95
+ # Check that FC are almost equal
96
+ assert_array_almost_equal(
97
+ connectivity_measure, fc_bold["data"], decimal=3
98
+ )
99
+
100
+ # Store
101
+ storage = HDF5FeatureStorage(
102
+ uri=tmp_path / "test_fc_maps.hdf5",
103
+ )
104
+ marker.fit_transform(input=element_data, storage=storage)
105
+ features = storage.list_features()
106
+ assert any(
107
+ x["name"]
108
+ == "BOLD_FunctionalConnectivityMaps_functional_connectivity"
109
+ for x in features.values()
110
+ )
@@ -0,0 +1,201 @@
1
+ """Provide class for maps aggregation."""
2
+
3
+ # Authors: Synchon Mandal <s.mandal@fz-juelich.de>
4
+ # License: AGPL
5
+
6
+ from typing import Any, ClassVar, Optional, Union
7
+
8
+ from nilearn.maskers import NiftiMapsMasker
9
+
10
+ from ..api.decorators import register_marker
11
+ from ..data import get_data
12
+ from ..stats import get_aggfunc_by_name
13
+ from ..typing import Dependencies, MarkerInOutMappings
14
+ from ..utils import logger, raise_error, warn_with_log
15
+ from .base import BaseMarker
16
+
17
+
18
+ __all__ = ["MapsAggregation"]
19
+
20
+
21
+ @register_marker
22
+ class MapsAggregation(BaseMarker):
23
+ """Class for maps aggregation.
24
+
25
+ Parameters
26
+ ----------
27
+ maps : str
28
+ The name of the map(s) to use.
29
+ See :func:`.list_data` for options.
30
+ time_method : str, optional
31
+ The method to use to aggregate the time series over the time points,
32
+ after aggregation (only applicable to BOLD data). If None,
33
+ it will not operate on the time dimension (default None).
34
+ time_method_params : dict, optional
35
+ The parameters to pass to the time aggregation method (default None).
36
+ masks : str, dict or list of dict or str, optional
37
+ The specification of the masks to apply to regions before extracting
38
+ signals. Check :ref:`Using Masks <using_masks>` for more details.
39
+ If None, will not apply any mask (default None).
40
+ on : {"T1w", "T2w", "BOLD", "VBM_GM", "VBM_WM", "VBM_CSF", "fALFF", \
41
+ "GCOR", "LCOR"} or list of the options, optional
42
+ The data types to apply the marker to. If None, will work on all
43
+ available data (default None).
44
+ name : str, optional
45
+ The name of the marker. If None, will use the class name (default
46
+ None).
47
+
48
+ Raises
49
+ ------
50
+ ValueError
51
+ If ``time_method`` is specified for non-BOLD data or if
52
+ ``time_method_params`` is not None when ``time_method`` is None.
53
+
54
+ """
55
+
56
+ _DEPENDENCIES: ClassVar[Dependencies] = {"nilearn", "numpy"}
57
+
58
+ _MARKER_INOUT_MAPPINGS: ClassVar[MarkerInOutMappings] = {
59
+ "T1w": {
60
+ "aggregation": "vector",
61
+ },
62
+ "T2w": {
63
+ "aggregation": "vector",
64
+ },
65
+ "BOLD": {
66
+ "aggregation": "timeseries",
67
+ },
68
+ "VBM_GM": {
69
+ "aggregation": "vector",
70
+ },
71
+ "VBM_WM": {
72
+ "aggregation": "vector",
73
+ },
74
+ "VBM_CSF": {
75
+ "aggregation": "vector",
76
+ },
77
+ "fALFF": {
78
+ "aggregation": "vector",
79
+ },
80
+ "GCOR": {
81
+ "aggregation": "vector",
82
+ },
83
+ "LCOR": {
84
+ "aggregation": "vector",
85
+ },
86
+ }
87
+
88
+ def __init__(
89
+ self,
90
+ maps: str,
91
+ time_method: Optional[str] = None,
92
+ time_method_params: Optional[dict[str, Any]] = None,
93
+ masks: Union[str, dict, list[Union[dict, str]], None] = None,
94
+ on: Union[list[str], str, None] = None,
95
+ name: Optional[str] = None,
96
+ ) -> None:
97
+ self.maps = maps
98
+ self.masks = masks
99
+ super().__init__(on=on, name=name)
100
+
101
+ # Verify after super init so self._on is set
102
+ if "BOLD" not in self._on and time_method is not None:
103
+ raise_error(
104
+ "`time_method` can only be used with BOLD data. "
105
+ "Please remove `time_method` parameter."
106
+ )
107
+ if time_method is None and time_method_params is not None:
108
+ raise_error(
109
+ "`time_method_params` can only be used with `time_method`. "
110
+ "Please remove `time_method_params` parameter."
111
+ )
112
+ self.time_method = time_method
113
+ self.time_method_params = time_method_params or {}
114
+
115
+ def compute(
116
+ self, input: dict[str, Any], extra_input: Optional[dict] = None
117
+ ) -> dict:
118
+ """Compute.
119
+
120
+ Parameters
121
+ ----------
122
+ input : dict
123
+ A single input from the pipeline data object in which to compute
124
+ the marker.
125
+ extra_input : dict, optional
126
+ The other fields in the pipeline data object. Useful for accessing
127
+ other data kind that needs to be used in the computation. For
128
+ example, the functional connectivity markers can make use of the
129
+ confounds if available (default None).
130
+
131
+ Returns
132
+ -------
133
+ dict
134
+ The computed result as dictionary. This will be either returned
135
+ to the user or stored in the storage by calling the store method
136
+ with this as a parameter. The dictionary has the following keys:
137
+
138
+ * ``aggregation`` : dictionary with the following keys:
139
+
140
+ - ``data`` : ROI values as ``numpy.ndarray``
141
+ - ``col_names`` : ROI labels as list of str
142
+
143
+ Warns
144
+ -----
145
+ RuntimeWarning
146
+ If time aggregation is required but only time point is available.
147
+
148
+ """
149
+ t_input_img = input["data"]
150
+ logger.debug("Maps aggregation")
151
+
152
+ # Get maps tailored to target image
153
+ maps_img, labels = get_data(
154
+ kind="maps",
155
+ names=self.maps,
156
+ target_data=input,
157
+ extra_input=extra_input,
158
+ )
159
+
160
+ # Load mask
161
+ mask_img = None
162
+ if self.masks is not None:
163
+ logger.debug(f"Masking with {self.masks}")
164
+ # Get tailored mask
165
+ mask_img = get_data(
166
+ kind="mask",
167
+ names=self.masks,
168
+ target_data=input,
169
+ extra_input=extra_input,
170
+ )
171
+
172
+ # Initialize masker
173
+ logger.debug("Masking")
174
+ masker = NiftiMapsMasker(
175
+ maps_img=maps_img,
176
+ mask_img=mask_img,
177
+ target_affine=t_input_img.affine,
178
+ )
179
+ # Mask the input data and extract data
180
+ data = masker.fit_transform(t_input_img)
181
+
182
+ # Apply time dimension aggregation if required
183
+ if self.time_method is not None:
184
+ if data.shape[0] > 1:
185
+ logger.debug("Aggregating time dimension")
186
+ time_agg_func = get_aggfunc_by_name(
187
+ self.time_method, func_params=self.time_method_params
188
+ )
189
+ data = time_agg_func(data, axis=0)
190
+ else:
191
+ warn_with_log(
192
+ "No time dimension to aggregate as only one time point is "
193
+ "available."
194
+ )
195
+ # Format the output
196
+ return {
197
+ "aggregation": {
198
+ "data": data,
199
+ "col_names": labels,
200
+ },
201
+ }
@@ -1,4 +1,5 @@
1
- __all__ = ["ReHoParcels", "ReHoSpheres"]
1
+ __all__ = ["ReHoMaps", "ReHoParcels", "ReHoSpheres"]
2
2
 
3
+ from .reho_maps import ReHoMaps
3
4
  from .reho_parcels import ReHoParcels
4
5
  from .reho_spheres import ReHoSpheres
@@ -0,0 +1,161 @@
1
+ """Provide class for regional homogeneity (ReHo) on maps."""
2
+
3
+ # Authors: Synchon Mandal <s.mandal@fz-juelich.de>
4
+ # License: AGPL
5
+
6
+ from typing import Any, Optional, Union
7
+
8
+ import numpy as np
9
+
10
+ from ...api.decorators import register_marker
11
+ from ...utils import logger
12
+ from ..maps_aggregation import MapsAggregation
13
+ from .reho_base import ReHoBase
14
+
15
+
16
+ __all__ = ["ReHoMaps"]
17
+
18
+
19
+ @register_marker
20
+ class ReHoMaps(ReHoBase):
21
+ """Class for regional homogeneity on maps.
22
+
23
+ Parameters
24
+ ----------
25
+ maps : str
26
+ The name of the map(s) to use.
27
+ See :func:`.list_data` for options.
28
+ using : {"junifer", "afni"}
29
+ Implementation to use for computing ReHo:
30
+
31
+ * "junifer" : Use ``junifer``'s own ReHo implementation
32
+ * "afni" : Use AFNI's ``3dReHo``
33
+
34
+ reho_params : dict, optional
35
+ Extra parameters for computing ReHo map as a dictionary (default None).
36
+ If ``using="afni"``, then the valid keys are:
37
+
38
+ * ``nneigh`` : {7, 19, 27}, optional (default 27)
39
+ Number of voxels in the neighbourhood, inclusive. Can be:
40
+
41
+ - 7 : for facewise neighbours only
42
+ - 19 : for face- and edge-wise neighbours
43
+ - 27 : for face-, edge-, and node-wise neighbors
44
+
45
+ * ``neigh_rad`` : positive float, optional
46
+ The radius of a desired neighbourhood (default None).
47
+ * ``neigh_x`` : positive float, optional
48
+ The semi-radius for x-axis of ellipsoidal volumes (default None).
49
+ * ``neigh_y`` : positive float, optional
50
+ The semi-radius for y-axis of ellipsoidal volumes (default None).
51
+ * ``neigh_z`` : positive float, optional
52
+ The semi-radius for z-axis of ellipsoidal volumes (default None).
53
+ * ``box_rad`` : positive int, optional
54
+ The number of voxels outward in a given cardinal direction for a
55
+ cubic box centered on a given voxel (default None).
56
+ * ``box_x`` : positive int, optional
57
+ The number of voxels for +/- x-axis of cuboidal volumes
58
+ (default None).
59
+ * ``box_y`` : positive int, optional
60
+ The number of voxels for +/- y-axis of cuboidal volumes
61
+ (default None).
62
+ * ``box_z`` : positive int, optional
63
+ The number of voxels for +/- z-axis of cuboidal volumes
64
+ (default None).
65
+
66
+ else if ``using="junifer"``, then the valid keys are:
67
+
68
+ * ``nneigh`` : {7, 19, 27, 125}, optional (default 27)
69
+ Number of voxels in the neighbourhood, inclusive. Can be:
70
+
71
+ * 7 : for facewise neighbours only
72
+ * 19 : for face- and edge-wise neighbours
73
+ * 27 : for face-, edge-, and node-wise neighbors
74
+ * 125 : for 5x5 cuboidal volume
75
+
76
+ masks : str, dict or list of dict or str, optional
77
+ The specification of the masks to apply to regions before extracting
78
+ signals. Check :ref:`Using Masks <using_masks>` for more details.
79
+ If None, will not apply any mask (default None).
80
+ name : str, optional
81
+ The name of the marker. If None, it will use the class name
82
+ (default None).
83
+
84
+ """
85
+
86
+ def __init__(
87
+ self,
88
+ maps: str,
89
+ using: str,
90
+ reho_params: Optional[dict] = None,
91
+ masks: Union[str, dict, list[Union[dict, str]], None] = None,
92
+ name: Optional[str] = None,
93
+ ) -> None:
94
+ # Superclass init first to validate `using` parameter
95
+ super().__init__(using=using, name=name)
96
+ self.maps = maps
97
+ self.reho_params = reho_params
98
+ self.masks = masks
99
+
100
+ def compute(
101
+ self,
102
+ input: dict[str, Any],
103
+ extra_input: Optional[dict[str, Any]] = None,
104
+ ) -> dict[str, Any]:
105
+ """Compute.
106
+
107
+ Parameters
108
+ ----------
109
+ input : dict
110
+ The BOLD data as dictionary.
111
+ extra_input : dict, optional
112
+ The other fields in the pipeline data object (default None).
113
+
114
+ Returns
115
+ -------
116
+ dict
117
+ The computed result as dictionary. This will be either returned
118
+ to the user or stored in the storage by calling the store method
119
+ with this as a parameter. The dictionary has the following keys:
120
+
121
+ * ``reho`` : dictionary with the following keys:
122
+
123
+ - ``data`` : ROI values as ``numpy.ndarray``
124
+ - ``col_names`` : ROI labels as list of str
125
+
126
+ """
127
+ logger.info("Calculating ReHo for maps")
128
+
129
+ # Compute voxelwise reho
130
+ # If the input data space is "native", then reho_file_path points to
131
+ # the input data path as it might be required for parcellation
132
+ # transformation to native space.
133
+ if self.reho_params is not None:
134
+ reho_map, reho_file_path = self._compute(
135
+ input_data=input, **self.reho_params
136
+ )
137
+ else:
138
+ reho_map, reho_file_path = self._compute(input_data=input)
139
+
140
+ # Perform aggregation on reho map
141
+ aggregation_input = dict(input.items())
142
+ aggregation_input["data"] = reho_map
143
+ aggregation_input["path"] = reho_file_path
144
+ maps_aggregation = MapsAggregation(
145
+ maps=self.maps,
146
+ masks=self.masks,
147
+ on="BOLD",
148
+ ).compute(
149
+ input=aggregation_input,
150
+ extra_input=extra_input,
151
+ )
152
+
153
+ return {
154
+ "reho": {
155
+ # Only use the first row and expand row dimension
156
+ "data": maps_aggregation["aggregation"]["data"][0][
157
+ np.newaxis, :
158
+ ],
159
+ "col_names": maps_aggregation["aggregation"]["col_names"],
160
+ }
161
+ }
@@ -0,0 +1,135 @@
1
+ """Provide tests for ReHoMaps."""
2
+
3
+ # Authors: Synchon Mandal <s.mandal@fz-juelich.de>
4
+ # License: AGPL
5
+
6
+ import logging
7
+ from pathlib import Path
8
+
9
+ import pytest
10
+ import scipy as sp
11
+
12
+ from junifer.datagrabber import PatternDataladDataGrabber
13
+ from junifer.datareader import DefaultDataReader
14
+ from junifer.markers import ReHoMaps
15
+ from junifer.pipeline import WorkDirManager
16
+ from junifer.pipeline.utils import _check_afni
17
+ from junifer.storage import HDF5FeatureStorage
18
+
19
+
20
+ def test_ReHoMaps(
21
+ caplog: pytest.LogCaptureFixture,
22
+ tmp_path: Path,
23
+ maps_datagrabber: PatternDataladDataGrabber,
24
+ ) -> None:
25
+ """Test ReHoMaps.
26
+
27
+ Parameters
28
+ ----------
29
+ caplog : pytest.LogCaptureFixture
30
+ The pytest.LogCaptureFixture object.
31
+ tmp_path : pathlib.Path
32
+ The path to the test directory.
33
+ maps_datagrabber : PatternDataladDataGrabber
34
+ The testing PatternDataladDataGrabber, as fixture.
35
+
36
+ """
37
+ # Update workdir to current test's tmp_path
38
+ WorkDirManager().workdir = tmp_path
39
+
40
+ with caplog.at_level(logging.DEBUG):
41
+ with maps_datagrabber as dg:
42
+ element = dg[("sub-01", "sub-001", "rest", "1")]
43
+ element_data = DefaultDataReader().fit_transform(element)
44
+
45
+ # Initialize marker
46
+ marker = ReHoMaps(
47
+ maps="Smith_rsn_10",
48
+ using="junifer",
49
+ )
50
+ # Check correct output
51
+ assert "vector" == marker.get_output_type(
52
+ input_type="BOLD", output_feature="reho"
53
+ )
54
+
55
+ # Fit transform marker on data
56
+ output = marker.fit_transform(element_data)
57
+
58
+ assert "Creating cache" in caplog.text
59
+
60
+ # Get BOLD output
61
+ assert "BOLD" in output
62
+ output_bold = output["BOLD"]["reho"]
63
+ # Assert BOLD output keys
64
+ assert "data" in output_bold
65
+ assert "col_names" in output_bold
66
+
67
+ output_bold_data = output_bold["data"]
68
+ # Assert BOLD output data dimension
69
+ assert output_bold_data.ndim == 2
70
+ # Assert BOLD output data is normalized
71
+ assert (output_bold_data > 0).all() and (
72
+ output_bold_data < 1
73
+ ).all()
74
+
75
+ # Reset log capture
76
+ caplog.clear()
77
+ # Initialize storage
78
+ storage = HDF5FeatureStorage(tmp_path / "reho_maps.hdf5")
79
+ # Fit transform marker on data with storage
80
+ marker.fit_transform(
81
+ input=element_data,
82
+ storage=storage,
83
+ )
84
+ # Check stored feature name
85
+ features = storage.list_features()
86
+ assert any(
87
+ x["name"] == "BOLD_ReHoMaps_reho" for x in features.values()
88
+ )
89
+ # Cache working correctly
90
+ assert "Creating cache" not in caplog.text
91
+
92
+
93
+ @pytest.mark.skipif(
94
+ _check_afni() is False, reason="requires AFNI to be in PATH"
95
+ )
96
+ def test_ReHoMaps_comparison(
97
+ tmp_path: Path, maps_datagrabber: PatternDataladDataGrabber
98
+ ) -> None:
99
+ """Test ReHoMaps implementation comparison.
100
+
101
+ Parameters
102
+ ----------
103
+ tmp_path : pathlib.Path
104
+ The path to the test directory.
105
+ maps_datagrabber : PatternDataladDataGrabber
106
+ The testing PatternDataladDataGrabber, as fixture.
107
+
108
+ """
109
+ # Update workdir to current test's tmp_path
110
+ WorkDirManager().workdir = tmp_path
111
+
112
+ with maps_datagrabber as dg:
113
+ element = dg[("sub-01", "sub-001", "rest", "1")]
114
+ element_data = DefaultDataReader().fit_transform(element)
115
+
116
+ # Initialize marker
117
+ junifer_marker = ReHoMaps(maps="Smith_rsn_10", using="junifer")
118
+ # Fit transform marker on data
119
+ junifer_output = junifer_marker.fit_transform(element_data)
120
+ # Get BOLD output
121
+ junifer_output_bold = junifer_output["BOLD"]["reho"]
122
+
123
+ # Initialize marker
124
+ afni_marker = ReHoMaps(maps="Smith_rsn_10", using="afni")
125
+ # Fit transform marker on data
126
+ afni_output = afni_marker.fit_transform(element_data)
127
+ # Get BOLD output
128
+ afni_output_bold = afni_output["BOLD"]["reho"]
129
+
130
+ # Check for Pearson correlation coefficient
131
+ r, _ = sp.stats.pearsonr(
132
+ junifer_output_bold["data"].flatten(),
133
+ afni_output_bold["data"].flatten(),
134
+ )
135
+ assert r >= 0.2 # this is very bad, but they differ...
@@ -1,4 +1,5 @@
1
- __all__ = ["TemporalSNRParcels", "TemporalSNRSpheres"]
1
+ __all__ = ["TemporalSNRMaps", "TemporalSNRParcels", "TemporalSNRSpheres"]
2
2
 
3
+ from .temporal_snr_maps import TemporalSNRMaps
3
4
  from .temporal_snr_parcels import TemporalSNRParcels
4
5
  from .temporal_snr_spheres import TemporalSNRSpheres