junifer 0.0.5__py3-none-any.whl → 0.0.5.dev11__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- junifer/__init__.py +0 -17
- junifer/_version.py +2 -2
- junifer/api/__init__.py +1 -4
- junifer/api/cli.py +1 -91
- junifer/api/decorators.py +0 -9
- junifer/api/functions.py +10 -56
- junifer/api/parser.py +0 -3
- junifer/api/queue_context/__init__.py +1 -4
- junifer/api/res/afni/run_afni_docker.sh +1 -1
- junifer/api/res/ants/run_ants_docker.sh +1 -1
- junifer/api/res/fsl/run_fsl_docker.sh +1 -1
- junifer/api/tests/test_api_utils.py +2 -4
- junifer/api/tests/test_cli.py +0 -83
- junifer/api/tests/test_functions.py +2 -27
- junifer/configs/__init__.py +1 -1
- junifer/configs/juseless/__init__.py +1 -4
- junifer/configs/juseless/datagrabbers/__init__.py +1 -10
- junifer/configs/juseless/datagrabbers/aomic_id1000_vbm.py +0 -3
- junifer/configs/juseless/datagrabbers/camcan_vbm.py +0 -3
- junifer/configs/juseless/datagrabbers/ixi_vbm.py +0 -3
- junifer/configs/juseless/datagrabbers/tests/test_ucla.py +3 -1
- junifer/configs/juseless/datagrabbers/ucla.py +9 -12
- junifer/configs/juseless/datagrabbers/ukb_vbm.py +0 -3
- junifer/data/__init__.py +1 -21
- junifer/data/coordinates.py +19 -10
- junifer/data/masks.py +87 -58
- junifer/data/parcellations.py +3 -14
- junifer/data/template_spaces.py +1 -4
- junifer/data/tests/test_masks.py +37 -26
- junifer/data/utils.py +0 -3
- junifer/datagrabber/__init__.py +1 -18
- junifer/datagrabber/aomic/__init__.py +0 -3
- junifer/datagrabber/aomic/id1000.py +37 -70
- junifer/datagrabber/aomic/piop1.py +36 -69
- junifer/datagrabber/aomic/piop2.py +38 -71
- junifer/datagrabber/aomic/tests/test_id1000.py +99 -44
- junifer/datagrabber/aomic/tests/test_piop1.py +108 -65
- junifer/datagrabber/aomic/tests/test_piop2.py +102 -45
- junifer/datagrabber/base.py +6 -13
- junifer/datagrabber/datalad_base.py +1 -13
- junifer/datagrabber/dmcc13_benchmark.py +53 -36
- junifer/datagrabber/hcp1200/__init__.py +0 -3
- junifer/datagrabber/hcp1200/datalad_hcp1200.py +0 -3
- junifer/datagrabber/hcp1200/hcp1200.py +1 -4
- junifer/datagrabber/multiple.py +6 -45
- junifer/datagrabber/pattern.py +62 -170
- junifer/datagrabber/pattern_datalad.py +12 -25
- junifer/datagrabber/tests/test_datagrabber_utils.py +218 -0
- junifer/datagrabber/tests/test_datalad_base.py +4 -4
- junifer/datagrabber/tests/test_dmcc13_benchmark.py +19 -46
- junifer/datagrabber/tests/test_multiple.py +84 -161
- junifer/datagrabber/tests/test_pattern.py +0 -45
- junifer/datagrabber/tests/test_pattern_datalad.py +4 -4
- junifer/datagrabber/utils.py +230 -0
- junifer/datareader/__init__.py +1 -4
- junifer/datareader/default.py +43 -95
- junifer/external/__init__.py +1 -1
- junifer/external/nilearn/__init__.py +1 -5
- junifer/external/nilearn/junifer_nifti_spheres_masker.py +9 -23
- junifer/external/nilearn/tests/test_junifer_nifti_spheres_masker.py +1 -76
- junifer/markers/__init__.py +1 -23
- junifer/markers/base.py +28 -68
- junifer/markers/collection.py +2 -10
- junifer/markers/complexity/__init__.py +0 -10
- junifer/markers/complexity/complexity_base.py +43 -26
- junifer/markers/complexity/hurst_exponent.py +0 -3
- junifer/markers/complexity/multiscale_entropy_auc.py +0 -3
- junifer/markers/complexity/perm_entropy.py +0 -3
- junifer/markers/complexity/range_entropy.py +0 -3
- junifer/markers/complexity/range_entropy_auc.py +0 -3
- junifer/markers/complexity/sample_entropy.py +0 -3
- junifer/markers/complexity/tests/test_hurst_exponent.py +3 -11
- junifer/markers/complexity/tests/test_multiscale_entropy_auc.py +3 -11
- junifer/markers/complexity/tests/test_perm_entropy.py +3 -11
- junifer/markers/complexity/tests/test_range_entropy.py +3 -11
- junifer/markers/complexity/tests/test_range_entropy_auc.py +3 -11
- junifer/markers/complexity/tests/test_sample_entropy.py +3 -11
- junifer/markers/complexity/tests/test_weighted_perm_entropy.py +3 -11
- junifer/markers/complexity/weighted_perm_entropy.py +0 -3
- junifer/markers/ets_rss.py +42 -27
- junifer/markers/falff/__init__.py +0 -3
- junifer/markers/falff/_afni_falff.py +2 -5
- junifer/markers/falff/_junifer_falff.py +0 -3
- junifer/markers/falff/falff_base.py +46 -20
- junifer/markers/falff/falff_parcels.py +27 -56
- junifer/markers/falff/falff_spheres.py +29 -60
- junifer/markers/falff/tests/test_falff_parcels.py +23 -39
- junifer/markers/falff/tests/test_falff_spheres.py +23 -39
- junifer/markers/functional_connectivity/__init__.py +0 -9
- junifer/markers/functional_connectivity/crossparcellation_functional_connectivity.py +60 -63
- junifer/markers/functional_connectivity/edge_functional_connectivity_parcels.py +32 -45
- junifer/markers/functional_connectivity/edge_functional_connectivity_spheres.py +36 -49
- junifer/markers/functional_connectivity/functional_connectivity_base.py +70 -71
- junifer/markers/functional_connectivity/functional_connectivity_parcels.py +25 -34
- junifer/markers/functional_connectivity/functional_connectivity_spheres.py +30 -40
- junifer/markers/functional_connectivity/tests/test_crossparcellation_functional_connectivity.py +7 -11
- junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_parcels.py +7 -27
- junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_spheres.py +12 -28
- junifer/markers/functional_connectivity/tests/test_functional_connectivity_parcels.py +11 -35
- junifer/markers/functional_connectivity/tests/test_functional_connectivity_spheres.py +62 -36
- junifer/markers/parcel_aggregation.py +61 -47
- junifer/markers/reho/__init__.py +0 -3
- junifer/markers/reho/_afni_reho.py +2 -5
- junifer/markers/reho/_junifer_reho.py +1 -4
- junifer/markers/reho/reho_base.py +27 -8
- junifer/markers/reho/reho_parcels.py +17 -28
- junifer/markers/reho/reho_spheres.py +18 -27
- junifer/markers/reho/tests/test_reho_parcels.py +3 -8
- junifer/markers/reho/tests/test_reho_spheres.py +3 -8
- junifer/markers/sphere_aggregation.py +59 -43
- junifer/markers/temporal_snr/__init__.py +0 -3
- junifer/markers/temporal_snr/temporal_snr_base.py +32 -23
- junifer/markers/temporal_snr/temporal_snr_parcels.py +6 -9
- junifer/markers/temporal_snr/temporal_snr_spheres.py +6 -9
- junifer/markers/temporal_snr/tests/test_temporal_snr_parcels.py +3 -6
- junifer/markers/temporal_snr/tests/test_temporal_snr_spheres.py +3 -6
- junifer/markers/tests/test_collection.py +8 -9
- junifer/markers/tests/test_ets_rss.py +9 -15
- junifer/markers/tests/test_markers_base.py +18 -17
- junifer/markers/tests/test_parcel_aggregation.py +32 -93
- junifer/markers/tests/test_sphere_aggregation.py +19 -72
- junifer/onthefly/__init__.py +1 -4
- junifer/onthefly/read_transform.py +0 -3
- junifer/pipeline/__init__.py +1 -9
- junifer/pipeline/pipeline_step_mixin.py +4 -21
- junifer/pipeline/registry.py +0 -3
- junifer/pipeline/singleton.py +0 -3
- junifer/pipeline/tests/test_registry.py +1 -1
- junifer/pipeline/update_meta_mixin.py +0 -3
- junifer/pipeline/utils.py +1 -67
- junifer/pipeline/workdir_manager.py +0 -3
- junifer/preprocess/__init__.py +2 -10
- junifer/preprocess/ants/__init__.py +4 -0
- junifer/preprocess/ants/ants_apply_transforms_warper.py +185 -0
- junifer/preprocess/ants/tests/test_ants_apply_transforms_warper.py +56 -0
- junifer/preprocess/base.py +3 -6
- junifer/preprocess/bold_warper.py +265 -0
- junifer/preprocess/confounds/__init__.py +0 -3
- junifer/preprocess/confounds/fmriprep_confound_remover.py +60 -47
- junifer/preprocess/confounds/tests/test_fmriprep_confound_remover.py +113 -72
- junifer/preprocess/fsl/__init__.py +4 -0
- junifer/preprocess/fsl/apply_warper.py +179 -0
- junifer/preprocess/fsl/tests/test_apply_warper.py +45 -0
- junifer/preprocess/tests/test_bold_warper.py +159 -0
- junifer/preprocess/warping/__init__.py +0 -3
- junifer/preprocess/warping/_ants_warper.py +0 -3
- junifer/preprocess/warping/_fsl_warper.py +0 -3
- junifer/stats.py +1 -4
- junifer/storage/__init__.py +1 -9
- junifer/storage/base.py +1 -40
- junifer/storage/hdf5.py +9 -71
- junifer/storage/pandas_base.py +0 -3
- junifer/storage/sqlite.py +0 -3
- junifer/storage/tests/test_hdf5.py +10 -82
- junifer/storage/utils.py +0 -9
- junifer/testing/__init__.py +1 -4
- junifer/testing/datagrabbers.py +6 -13
- junifer/testing/tests/test_partlycloudytesting_datagrabber.py +7 -7
- junifer/testing/utils.py +0 -3
- junifer/utils/__init__.py +2 -13
- junifer/utils/fs.py +0 -3
- junifer/utils/helpers.py +1 -32
- junifer/utils/logging.py +4 -33
- junifer/utils/tests/test_logging.py +0 -8
- {junifer-0.0.5.dist-info → junifer-0.0.5.dev11.dist-info}/METADATA +16 -17
- junifer-0.0.5.dev11.dist-info/RECORD +259 -0
- {junifer-0.0.5.dist-info → junifer-0.0.5.dev11.dist-info}/WHEEL +1 -1
- junifer/api/res/freesurfer/mri_binarize +0 -3
- junifer/api/res/freesurfer/mri_mc +0 -3
- junifer/api/res/freesurfer/mri_pretess +0 -3
- junifer/api/res/freesurfer/mris_convert +0 -3
- junifer/api/res/freesurfer/run_freesurfer_docker.sh +0 -61
- junifer/data/masks/ukb/UKB_15K_GM_template.nii.gz +0 -0
- junifer/datagrabber/pattern_validation_mixin.py +0 -388
- junifer/datagrabber/tests/test_pattern_validation_mixin.py +0 -249
- junifer/external/BrainPrint/brainprint/__init__.py +0 -4
- junifer/external/BrainPrint/brainprint/_version.py +0 -3
- junifer/external/BrainPrint/brainprint/asymmetry.py +0 -91
- junifer/external/BrainPrint/brainprint/brainprint.py +0 -441
- junifer/external/BrainPrint/brainprint/surfaces.py +0 -258
- junifer/external/BrainPrint/brainprint/utils/__init__.py +0 -1
- junifer/external/BrainPrint/brainprint/utils/_config.py +0 -112
- junifer/external/BrainPrint/brainprint/utils/utils.py +0 -188
- junifer/external/nilearn/junifer_connectivity_measure.py +0 -483
- junifer/external/nilearn/tests/test_junifer_connectivity_measure.py +0 -1089
- junifer/markers/brainprint.py +0 -459
- junifer/markers/tests/test_brainprint.py +0 -58
- junifer/preprocess/smoothing/__init__.py +0 -9
- junifer/preprocess/smoothing/_afni_smoothing.py +0 -119
- junifer/preprocess/smoothing/_fsl_smoothing.py +0 -116
- junifer/preprocess/smoothing/_nilearn_smoothing.py +0 -69
- junifer/preprocess/smoothing/smoothing.py +0 -174
- junifer/preprocess/smoothing/tests/test_smoothing.py +0 -94
- junifer-0.0.5.dist-info/RECORD +0 -275
- {junifer-0.0.5.dist-info → junifer-0.0.5.dev11.dist-info}/AUTHORS.rst +0 -0
- {junifer-0.0.5.dist-info → junifer-0.0.5.dev11.dist-info}/LICENSE.md +0 -0
- {junifer-0.0.5.dist-info → junifer-0.0.5.dev11.dist-info}/entry_points.txt +0 -0
- {junifer-0.0.5.dist-info → junifer-0.0.5.dev11.dist-info}/top_level.txt +0 -0
@@ -13,7 +13,6 @@ pytest.importorskip("neurokit2")
|
|
13
13
|
|
14
14
|
from junifer.datareader import DefaultDataReader # noqa: E402
|
15
15
|
from junifer.markers.complexity import WeightedPermEntropy # noqa: E402
|
16
|
-
from junifer.pipeline.utils import _check_ants # noqa: E402
|
17
16
|
from junifer.storage import SQLiteFeatureStorage # noqa: E402
|
18
17
|
from junifer.testing.datagrabbers import ( # noqa: E402
|
19
18
|
SPMAuditoryTestingDataGrabber,
|
@@ -24,9 +23,6 @@ from junifer.testing.datagrabbers import ( # noqa: E402
|
|
24
23
|
PARCELLATION = "Schaefer100x17"
|
25
24
|
|
26
25
|
|
27
|
-
@pytest.mark.skipif(
|
28
|
-
_check_ants() is False, reason="requires ANTs to be in PATH"
|
29
|
-
)
|
30
26
|
def test_compute() -> None:
|
31
27
|
"""Test WeightedPermEntropy compute()."""
|
32
28
|
with SPMAuditoryTestingDataGrabber() as dg:
|
@@ -39,19 +35,15 @@ def test_compute() -> None:
|
|
39
35
|
# Compute the marker
|
40
36
|
feature_map = marker.fit_transform(element_data)
|
41
37
|
# Assert the dimension of timeseries
|
42
|
-
assert feature_map["BOLD"]["
|
38
|
+
assert feature_map["BOLD"]["data"].ndim == 2
|
43
39
|
|
44
40
|
|
45
41
|
def test_get_output_type() -> None:
|
46
42
|
"""Test WeightedPermEntropy get_output_type()."""
|
47
|
-
|
48
|
-
|
49
|
-
).get_output_type(input_type="BOLD", output_feature="complexity")
|
43
|
+
marker = WeightedPermEntropy(parcellation=PARCELLATION)
|
44
|
+
assert marker.get_output_type("BOLD") == "vector"
|
50
45
|
|
51
46
|
|
52
|
-
@pytest.mark.skipif(
|
53
|
-
_check_ants() is False, reason="requires ANTs to be in PATH"
|
54
|
-
)
|
55
47
|
def test_store(tmp_path: Path) -> None:
|
56
48
|
"""Test WeightedPermEntropy store().
|
57
49
|
|
@@ -14,9 +14,6 @@ from ...utils import logger, warn_with_log
|
|
14
14
|
from .complexity_base import ComplexityBase
|
15
15
|
|
16
16
|
|
17
|
-
__all__ = ["WeightedPermEntropy"]
|
18
|
-
|
19
|
-
|
20
17
|
@register_marker
|
21
18
|
class WeightedPermEntropy(ComplexityBase):
|
22
19
|
"""Class for weighted permutation entropy of a time series.
|
junifer/markers/ets_rss.py
CHANGED
@@ -17,9 +17,6 @@ from .parcel_aggregation import ParcelAggregation
|
|
17
17
|
from .utils import _ets
|
18
18
|
|
19
19
|
|
20
|
-
__all__ = ["RSSETSMarker"]
|
21
|
-
|
22
|
-
|
23
20
|
@register_marker
|
24
21
|
class RSSETSMarker(BaseMarker):
|
25
22
|
"""Class for root sum of squares of edgewise timeseries.
|
@@ -47,12 +44,6 @@ class RSSETSMarker(BaseMarker):
|
|
47
44
|
|
48
45
|
_DEPENDENCIES: ClassVar[Set[str]] = {"nilearn"}
|
49
46
|
|
50
|
-
_MARKER_INOUT_MAPPINGS: ClassVar[Dict[str, Dict[str, str]]] = {
|
51
|
-
"BOLD": {
|
52
|
-
"rss_ets": "timeseries",
|
53
|
-
},
|
54
|
-
}
|
55
|
-
|
56
47
|
def __init__(
|
57
48
|
self,
|
58
49
|
parcellation: Union[str, List[str]],
|
@@ -67,6 +58,33 @@ class RSSETSMarker(BaseMarker):
|
|
67
58
|
self.masks = masks
|
68
59
|
super().__init__(name=name)
|
69
60
|
|
61
|
+
def get_valid_inputs(self) -> List[str]:
|
62
|
+
"""Get valid data types for input.
|
63
|
+
|
64
|
+
Returns
|
65
|
+
-------
|
66
|
+
list of str
|
67
|
+
The list of data types that can be used as input for this marker.
|
68
|
+
|
69
|
+
"""
|
70
|
+
return ["BOLD"]
|
71
|
+
|
72
|
+
def get_output_type(self, input_type: str) -> str:
|
73
|
+
"""Get output type.
|
74
|
+
|
75
|
+
Parameters
|
76
|
+
----------
|
77
|
+
input_type : str
|
78
|
+
The data type input to the marker.
|
79
|
+
|
80
|
+
Returns
|
81
|
+
-------
|
82
|
+
str
|
83
|
+
The storage type output by the marker.
|
84
|
+
|
85
|
+
"""
|
86
|
+
return "timeseries"
|
87
|
+
|
70
88
|
def compute(
|
71
89
|
self,
|
72
90
|
input: Dict[str, Any],
|
@@ -88,9 +106,8 @@ class RSSETSMarker(BaseMarker):
|
|
88
106
|
Returns
|
89
107
|
-------
|
90
108
|
dict
|
91
|
-
The computed result as dictionary.
|
92
|
-
|
93
|
-
with this as a parameter. The dictionary has the following keys:
|
109
|
+
The computed result as dictionary. The dictionary has the following
|
110
|
+
keys:
|
94
111
|
|
95
112
|
* ``data`` : the actual computed values as a numpy.ndarray
|
96
113
|
* ``col_names`` : the column labels for the computed values as list
|
@@ -104,22 +121,20 @@ class RSSETSMarker(BaseMarker):
|
|
104
121
|
|
105
122
|
"""
|
106
123
|
logger.debug("Calculating root sum of squares of edgewise timeseries.")
|
107
|
-
#
|
108
|
-
|
124
|
+
# Initialize a ParcelAggregation
|
125
|
+
parcel_aggregation = ParcelAggregation(
|
109
126
|
parcellation=self.parcellation,
|
110
127
|
method=self.agg_method,
|
111
128
|
method_params=self.agg_method_params,
|
112
129
|
masks=self.masks,
|
113
|
-
)
|
114
|
-
# Compute
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
}
|
125
|
-
}
|
130
|
+
)
|
131
|
+
# Compute the parcel aggregation
|
132
|
+
out = parcel_aggregation.compute(input=input, extra_input=extra_input)
|
133
|
+
edge_ts, _ = _ets(out["data"])
|
134
|
+
# Compute the RSS
|
135
|
+
out["data"] = np.sum(edge_ts**2, 1) ** 0.5
|
136
|
+
# Make it 2D
|
137
|
+
out["data"] = out["data"][:, np.newaxis]
|
138
|
+
# Set correct column label
|
139
|
+
out["col_names"] = ["root_sum_of_squares_ets"]
|
140
|
+
return out
|
@@ -26,9 +26,6 @@ if TYPE_CHECKING:
|
|
26
26
|
from nibabel import Nifti1Image
|
27
27
|
|
28
28
|
|
29
|
-
__all__ = ["AFNIALFF"]
|
30
|
-
|
31
|
-
|
32
29
|
@singleton
|
33
30
|
class AFNIALFF:
|
34
31
|
"""Class for computing ALFF using AFNI.
|
@@ -124,7 +121,7 @@ class AFNIALFF:
|
|
124
121
|
convert_alff_cmd = [
|
125
122
|
"3dAFNItoNIFTI",
|
126
123
|
f"-prefix {alff_afni_to_nifti_out_path.resolve()}",
|
127
|
-
f"{alff_falff_out_path_prefix}_ALFF+
|
124
|
+
f"{alff_falff_out_path_prefix}_ALFF+tlrc.BRIK",
|
128
125
|
]
|
129
126
|
# Call 3dAFNItoNIFTI
|
130
127
|
run_ext_cmd(name="3dAFNItoNIFTI", cmd=convert_alff_cmd)
|
@@ -136,7 +133,7 @@ class AFNIALFF:
|
|
136
133
|
convert_falff_cmd = [
|
137
134
|
"3dAFNItoNIFTI",
|
138
135
|
f"-prefix {falff_afni_to_nifti_out_path.resolve()}",
|
139
|
-
f"{alff_falff_out_path_prefix}_fALFF+
|
136
|
+
f"{alff_falff_out_path_prefix}_fALFF+tlrc.BRIK",
|
140
137
|
]
|
141
138
|
# Call 3dAFNItoNIFTI
|
142
139
|
run_ext_cmd(name="3dAFNItoNIFTI", cmd=convert_falff_cmd)
|
@@ -29,14 +29,13 @@ if TYPE_CHECKING:
|
|
29
29
|
from nibabel import Nifti1Image
|
30
30
|
|
31
31
|
|
32
|
-
__all__ = ["ALFFBase"]
|
33
|
-
|
34
|
-
|
35
32
|
class ALFFBase(BaseMarker):
|
36
33
|
"""Base class for (fractional) Amplitude Low Frequency Fluctuation.
|
37
34
|
|
38
35
|
Parameters
|
39
36
|
----------
|
37
|
+
fractional : bool
|
38
|
+
Whether to compute fractional ALFF.
|
40
39
|
highpass : positive float
|
41
40
|
Highpass cutoff frequency.
|
42
41
|
lowpass : positive float
|
@@ -83,15 +82,9 @@ class ALFFBase(BaseMarker):
|
|
83
82
|
},
|
84
83
|
]
|
85
84
|
|
86
|
-
_MARKER_INOUT_MAPPINGS: ClassVar[Dict[str, Dict[str, str]]] = {
|
87
|
-
"BOLD": {
|
88
|
-
"alff": "vector",
|
89
|
-
"falff": "vector",
|
90
|
-
},
|
91
|
-
}
|
92
|
-
|
93
85
|
def __init__(
|
94
86
|
self,
|
87
|
+
fractional: bool,
|
95
88
|
highpass: float,
|
96
89
|
lowpass: float,
|
97
90
|
using: str,
|
@@ -114,12 +107,45 @@ class ALFFBase(BaseMarker):
|
|
114
107
|
)
|
115
108
|
self.using = using
|
116
109
|
self.tr = tr
|
110
|
+
self.fractional = fractional
|
111
|
+
|
112
|
+
# Create a name based on the class name if none is provided
|
113
|
+
if name is None:
|
114
|
+
suffix = "_fractional" if fractional else ""
|
115
|
+
name = f"{self.__class__.__name__}{suffix}"
|
117
116
|
super().__init__(on="BOLD", name=name)
|
118
117
|
|
118
|
+
def get_valid_inputs(self) -> List[str]:
|
119
|
+
"""Get valid data types for input.
|
120
|
+
|
121
|
+
Returns
|
122
|
+
-------
|
123
|
+
list of str
|
124
|
+
The list of data types that can be used as input for this marker.
|
125
|
+
|
126
|
+
"""
|
127
|
+
return ["BOLD"]
|
128
|
+
|
129
|
+
def get_output_type(self, input_type: str) -> str:
|
130
|
+
"""Get output type.
|
131
|
+
|
132
|
+
Parameters
|
133
|
+
----------
|
134
|
+
input_type : str
|
135
|
+
The data type input to the marker.
|
136
|
+
|
137
|
+
Returns
|
138
|
+
-------
|
139
|
+
str
|
140
|
+
The storage type output by the marker.
|
141
|
+
|
142
|
+
"""
|
143
|
+
return "vector"
|
144
|
+
|
119
145
|
def _compute(
|
120
146
|
self,
|
121
147
|
input_data: Dict[str, Any],
|
122
|
-
) -> Tuple["Nifti1Image",
|
148
|
+
) -> Tuple["Nifti1Image", Path]:
|
123
149
|
"""Compute ALFF and fALFF.
|
124
150
|
|
125
151
|
Parameters
|
@@ -132,13 +158,9 @@ class ALFFBase(BaseMarker):
|
|
132
158
|
Returns
|
133
159
|
-------
|
134
160
|
Niimg-like object
|
135
|
-
The ALFF as NIfTI.
|
136
|
-
Niimg-like object
|
137
|
-
The fALFF as NIfTI.
|
138
|
-
pathlib.Path
|
139
|
-
The path to the ALFF as NIfTI.
|
161
|
+
The ALFF / fALFF as NIfTI.
|
140
162
|
pathlib.Path
|
141
|
-
The path to the fALFF as NIfTI.
|
163
|
+
The path to the ALFF / fALFF as NIfTI.
|
142
164
|
|
143
165
|
"""
|
144
166
|
logger.debug("Calculating ALFF and fALFF")
|
@@ -161,7 +183,11 @@ class ALFFBase(BaseMarker):
|
|
161
183
|
# parcellation / coordinates to native space, else the
|
162
184
|
# path should be passed for use later if required.
|
163
185
|
# TODO(synchon): will be taken care in #292
|
164
|
-
if input_data["space"] == "native":
|
165
|
-
return
|
186
|
+
if input_data["space"] == "native" and self.fractional:
|
187
|
+
return falff, input_data["path"]
|
188
|
+
elif input_data["space"] == "native" and not self.fractional:
|
189
|
+
return alff, input_data["path"]
|
190
|
+
elif input_data["space"] != "native" and self.fractional:
|
191
|
+
return falff, falff_path
|
166
192
|
else:
|
167
|
-
return alff,
|
193
|
+
return alff, alff_path
|
@@ -14,9 +14,6 @@ from ..parcel_aggregation import ParcelAggregation
|
|
14
14
|
from .falff_base import ALFFBase
|
15
15
|
|
16
16
|
|
17
|
-
__all__ = ["ALFFParcels"]
|
18
|
-
|
19
|
-
|
20
17
|
@register_marker
|
21
18
|
class ALFFParcels(ALFFBase):
|
22
19
|
"""Class for ALFF / fALFF on parcels.
|
@@ -26,6 +23,8 @@ class ALFFParcels(ALFFBase):
|
|
26
23
|
parcellation : str or list of str
|
27
24
|
The name(s) of the parcellation(s). Check valid options by calling
|
28
25
|
:func:`.list_parcellations`.
|
26
|
+
fractional : bool
|
27
|
+
Whether to compute fractional ALFF.
|
29
28
|
using : {"junifer", "afni"}
|
30
29
|
Implementation to use for computing ALFF:
|
31
30
|
|
@@ -71,6 +70,7 @@ class ALFFParcels(ALFFBase):
|
|
71
70
|
def __init__(
|
72
71
|
self,
|
73
72
|
parcellation: Union[str, List[str]],
|
73
|
+
fractional: bool,
|
74
74
|
using: str,
|
75
75
|
highpass: float = 0.01,
|
76
76
|
lowpass: float = 0.1,
|
@@ -82,6 +82,7 @@ class ALFFParcels(ALFFBase):
|
|
82
82
|
) -> None:
|
83
83
|
# Superclass init first to validate `using` parameter
|
84
84
|
super().__init__(
|
85
|
+
fractional=fractional,
|
85
86
|
highpass=highpass,
|
86
87
|
lowpass=lowpass,
|
87
88
|
using=using,
|
@@ -110,63 +111,33 @@ class ALFFParcels(ALFFBase):
|
|
110
111
|
Returns
|
111
112
|
-------
|
112
113
|
dict
|
113
|
-
The computed result as dictionary.
|
114
|
-
|
115
|
-
with this as a parameter. The dictionary has the following keys:
|
116
|
-
|
117
|
-
* ``alff`` : dictionary with the following keys:
|
118
|
-
|
119
|
-
- ``data`` : ROI values as ``numpy.ndarray``
|
120
|
-
- ``col_names`` : ROI labels as list of str
|
114
|
+
The computed result as dictionary. The dictionary has the following
|
115
|
+
keys:
|
121
116
|
|
122
|
-
* ``
|
123
|
-
|
124
|
-
- ``data`` : ROI values as ``numpy.ndarray``
|
125
|
-
- ``col_names`` : ROI labels as list of str
|
117
|
+
* ``data`` : the actual computed values as a numpy.ndarray
|
118
|
+
* ``col_names`` : the column labels for the computed values as list
|
126
119
|
|
127
120
|
"""
|
128
121
|
logger.info("Calculating ALFF / fALFF for parcels")
|
129
122
|
|
130
|
-
# Compute ALFF
|
131
|
-
|
132
|
-
|
123
|
+
# Compute ALFF / fALFF
|
124
|
+
output_data, output_file_path = self._compute(input_data=input)
|
125
|
+
|
126
|
+
# Initialize parcel aggregation
|
127
|
+
parcel_aggregation = ParcelAggregation(
|
128
|
+
parcellation=self.parcellation,
|
129
|
+
method=self.agg_method,
|
130
|
+
method_params=self.agg_method_params,
|
131
|
+
masks=self.masks,
|
132
|
+
on="BOLD",
|
133
|
+
)
|
134
|
+
# Perform aggregation on ALFF / fALFF
|
135
|
+
parcel_aggregation_input = dict(input.items())
|
136
|
+
parcel_aggregation_input["data"] = output_data
|
137
|
+
parcel_aggregation_input["path"] = output_file_path
|
138
|
+
output = parcel_aggregation.compute(
|
139
|
+
input=parcel_aggregation_input,
|
140
|
+
extra_input=extra_input,
|
133
141
|
)
|
134
142
|
|
135
|
-
|
136
|
-
aggregation_alff_input = dict(input.items())
|
137
|
-
aggregation_falff_input = dict(input.items())
|
138
|
-
aggregation_alff_input["data"] = alff_output
|
139
|
-
aggregation_falff_input["data"] = falff_output
|
140
|
-
aggregation_alff_input["path"] = alff_output_path
|
141
|
-
aggregation_falff_input["path"] = falff_output_path
|
142
|
-
|
143
|
-
return {
|
144
|
-
"alff": {
|
145
|
-
**ParcelAggregation(
|
146
|
-
parcellation=self.parcellation,
|
147
|
-
method=self.agg_method,
|
148
|
-
method_params=self.agg_method_params,
|
149
|
-
masks=self.masks,
|
150
|
-
on="BOLD",
|
151
|
-
).compute(
|
152
|
-
input=aggregation_alff_input,
|
153
|
-
extra_input=extra_input,
|
154
|
-
)[
|
155
|
-
"aggregation"
|
156
|
-
],
|
157
|
-
},
|
158
|
-
"falff": {
|
159
|
-
**ParcelAggregation(
|
160
|
-
parcellation=self.parcellation,
|
161
|
-
method=self.agg_method,
|
162
|
-
method_params=self.agg_method_params,
|
163
|
-
masks=self.masks,
|
164
|
-
on="BOLD",
|
165
|
-
).compute(
|
166
|
-
input=aggregation_falff_input,
|
167
|
-
extra_input=extra_input,
|
168
|
-
)[
|
169
|
-
"aggregation"
|
170
|
-
],
|
171
|
-
},
|
172
|
-
}
|
143
|
+
return output
|
@@ -14,9 +14,6 @@ from ..sphere_aggregation import SphereAggregation
|
|
14
14
|
from .falff_base import ALFFBase
|
15
15
|
|
16
16
|
|
17
|
-
__all__ = ["ALFFSpheres"]
|
18
|
-
|
19
|
-
|
20
17
|
@register_marker
|
21
18
|
class ALFFSpheres(ALFFBase):
|
22
19
|
"""Class for computing ALFF / fALFF on spheres.
|
@@ -26,6 +23,8 @@ class ALFFSpheres(ALFFBase):
|
|
26
23
|
coords : str
|
27
24
|
The name of the coordinates list to use. See
|
28
25
|
:func:`.list_coordinates` for options.
|
26
|
+
fractional : bool
|
27
|
+
Whether to compute fractional ALFF.
|
29
28
|
using : {"junifer", "afni"}
|
30
29
|
Implementation to use for computing ALFF:
|
31
30
|
|
@@ -78,6 +77,7 @@ class ALFFSpheres(ALFFBase):
|
|
78
77
|
def __init__(
|
79
78
|
self,
|
80
79
|
coords: str,
|
80
|
+
fractional: bool,
|
81
81
|
using: str,
|
82
82
|
radius: Optional[float] = None,
|
83
83
|
allow_overlap: bool = False,
|
@@ -91,6 +91,7 @@ class ALFFSpheres(ALFFBase):
|
|
91
91
|
) -> None:
|
92
92
|
# Superclass init first to validate `using` parameter
|
93
93
|
super().__init__(
|
94
|
+
fractional=fractional,
|
94
95
|
highpass=highpass,
|
95
96
|
lowpass=lowpass,
|
96
97
|
using=using,
|
@@ -121,67 +122,35 @@ class ALFFSpheres(ALFFBase):
|
|
121
122
|
Returns
|
122
123
|
-------
|
123
124
|
dict
|
124
|
-
The computed result as dictionary.
|
125
|
-
|
126
|
-
with this as a parameter. The dictionary has the following keys:
|
127
|
-
|
128
|
-
* ``alff`` : dictionary with the following keys:
|
129
|
-
|
130
|
-
- ``data`` : ROI values as ``numpy.ndarray``
|
131
|
-
- ``col_names`` : ROI labels as list of str
|
125
|
+
The computed result as dictionary. The dictionary has the following
|
126
|
+
keys:
|
132
127
|
|
133
|
-
* ``
|
134
|
-
|
135
|
-
- ``data`` : ROI values as ``numpy.ndarray``
|
136
|
-
- ``col_names`` : ROI labels as list of str
|
128
|
+
* ``data`` : the actual computed values as a numpy.ndarray
|
129
|
+
* ``col_names`` : the column labels for the computed values as list
|
137
130
|
|
138
131
|
"""
|
139
132
|
logger.info("Calculating ALFF / fALFF for spheres")
|
140
133
|
|
141
|
-
# Compute ALFF
|
142
|
-
|
143
|
-
|
134
|
+
# Compute ALFF / fALFF
|
135
|
+
output_data, output_file_path = self._compute(input_data=input)
|
136
|
+
|
137
|
+
# Initialize sphere aggregation
|
138
|
+
sphere_aggregation = SphereAggregation(
|
139
|
+
coords=self.coords,
|
140
|
+
radius=self.radius,
|
141
|
+
allow_overlap=self.allow_overlap,
|
142
|
+
method=self.agg_method,
|
143
|
+
method_params=self.agg_method_params,
|
144
|
+
masks=self.masks,
|
145
|
+
on="BOLD",
|
144
146
|
)
|
145
|
-
|
146
147
|
# Perform aggregation on ALFF / fALFF
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
|
155
|
-
|
156
|
-
**SphereAggregation(
|
157
|
-
coords=self.coords,
|
158
|
-
radius=self.radius,
|
159
|
-
allow_overlap=self.allow_overlap,
|
160
|
-
method=self.agg_method,
|
161
|
-
method_params=self.agg_method_params,
|
162
|
-
masks=self.masks,
|
163
|
-
on="BOLD",
|
164
|
-
).compute(
|
165
|
-
input=aggregation_alff_input,
|
166
|
-
extra_input=extra_input,
|
167
|
-
)[
|
168
|
-
"aggregation"
|
169
|
-
],
|
170
|
-
},
|
171
|
-
"falff": {
|
172
|
-
**SphereAggregation(
|
173
|
-
coords=self.coords,
|
174
|
-
radius=self.radius,
|
175
|
-
allow_overlap=self.allow_overlap,
|
176
|
-
method=self.agg_method,
|
177
|
-
method_params=self.agg_method_params,
|
178
|
-
masks=self.masks,
|
179
|
-
on="BOLD",
|
180
|
-
).compute(
|
181
|
-
input=aggregation_falff_input,
|
182
|
-
extra_input=extra_input,
|
183
|
-
)[
|
184
|
-
"aggregation"
|
185
|
-
],
|
186
|
-
},
|
187
|
-
}
|
148
|
+
sphere_aggregation_input = dict(input.items())
|
149
|
+
sphere_aggregation_input["data"] = output_data
|
150
|
+
sphere_aggregation_input["path"] = output_file_path
|
151
|
+
output = sphere_aggregation.compute(
|
152
|
+
input=sphere_aggregation_input,
|
153
|
+
extra_input=extra_input,
|
154
|
+
)
|
155
|
+
|
156
|
+
return output
|
@@ -21,28 +21,6 @@ from junifer.testing.datagrabbers import PartlyCloudyTestingDataGrabber
|
|
21
21
|
PARCELLATION = "TianxS1x3TxMNInonlinear2009cAsym"
|
22
22
|
|
23
23
|
|
24
|
-
@pytest.mark.parametrize(
|
25
|
-
"feature",
|
26
|
-
[
|
27
|
-
"alff",
|
28
|
-
"falff",
|
29
|
-
],
|
30
|
-
)
|
31
|
-
def test_ALFFParcels_get_output_type(feature: str) -> None:
|
32
|
-
"""Test ALFFParcels get_output_type().
|
33
|
-
|
34
|
-
Parameters
|
35
|
-
----------
|
36
|
-
feature : str
|
37
|
-
The parametrized feature name.
|
38
|
-
|
39
|
-
"""
|
40
|
-
assert "vector" == ALFFParcels(
|
41
|
-
parcellation=PARCELLATION,
|
42
|
-
using="junifer",
|
43
|
-
).get_output_type(input_type="BOLD", output_feature=feature)
|
44
|
-
|
45
|
-
|
46
24
|
def test_ALFFParcels(caplog: pytest.LogCaptureFixture, tmp_path: Path) -> None:
|
47
25
|
"""Test ALFFParcels.
|
48
26
|
|
@@ -63,6 +41,7 @@ def test_ALFFParcels(caplog: pytest.LogCaptureFixture, tmp_path: Path) -> None:
|
|
63
41
|
# Initialize marker
|
64
42
|
marker = ALFFParcels(
|
65
43
|
parcellation=PARCELLATION,
|
44
|
+
fractional=False,
|
66
45
|
using="junifer",
|
67
46
|
)
|
68
47
|
# Fit transform marker on data
|
@@ -72,16 +51,15 @@ def test_ALFFParcels(caplog: pytest.LogCaptureFixture, tmp_path: Path) -> None:
|
|
72
51
|
|
73
52
|
# Get BOLD output
|
74
53
|
assert "BOLD" in output
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
assert "col_names" in output_bold
|
54
|
+
output_bold = output["BOLD"]
|
55
|
+
# Assert BOLD output keys
|
56
|
+
assert "data" in output_bold
|
57
|
+
assert "col_names" in output_bold
|
80
58
|
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
59
|
+
output_bold_data = output_bold["data"]
|
60
|
+
# Assert BOLD output data dimension
|
61
|
+
assert output_bold_data.ndim == 2
|
62
|
+
assert output_bold_data.shape == (1, 16)
|
85
63
|
|
86
64
|
# Reset log capture
|
87
65
|
caplog.clear()
|
@@ -99,13 +77,18 @@ def test_ALFFParcels(caplog: pytest.LogCaptureFixture, tmp_path: Path) -> None:
|
|
99
77
|
@pytest.mark.skipif(
|
100
78
|
_check_afni() is False, reason="requires AFNI to be in PATH"
|
101
79
|
)
|
102
|
-
|
80
|
+
@pytest.mark.parametrize(
|
81
|
+
"fractional", [True, False], ids=["fractional", "non-fractional"]
|
82
|
+
)
|
83
|
+
def test_ALFFParcels_comparison(tmp_path: Path, fractional: bool) -> None:
|
103
84
|
"""Test ALFFParcels implementation comparison.
|
104
85
|
|
105
86
|
Parameters
|
106
87
|
----------
|
107
88
|
tmp_path : pathlib.Path
|
108
89
|
The path to the test directory.
|
90
|
+
fractional : bool
|
91
|
+
Whether to compute fractional ALFF or not.
|
109
92
|
|
110
93
|
"""
|
111
94
|
with PartlyCloudyTestingDataGrabber() as dg:
|
@@ -116,6 +99,7 @@ def test_ALFFParcels_comparison(tmp_path: Path) -> None:
|
|
116
99
|
# Initialize marker
|
117
100
|
junifer_marker = ALFFParcels(
|
118
101
|
parcellation=PARCELLATION,
|
102
|
+
fractional=fractional,
|
119
103
|
using="junifer",
|
120
104
|
)
|
121
105
|
# Fit transform marker on data
|
@@ -126,6 +110,7 @@ def test_ALFFParcels_comparison(tmp_path: Path) -> None:
|
|
126
110
|
# Initialize marker
|
127
111
|
afni_marker = ALFFParcels(
|
128
112
|
parcellation=PARCELLATION,
|
113
|
+
fractional=fractional,
|
129
114
|
using="afni",
|
130
115
|
)
|
131
116
|
# Fit transform marker on data
|
@@ -133,10 +118,9 @@ def test_ALFFParcels_comparison(tmp_path: Path) -> None:
|
|
133
118
|
# Get BOLD output
|
134
119
|
afni_output_bold = afni_output["BOLD"]
|
135
120
|
|
136
|
-
for
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
assert r > 0.97
|
121
|
+
# Check for Pearson correlation coefficient
|
122
|
+
r, _ = sp.stats.pearsonr(
|
123
|
+
junifer_output_bold["data"][0],
|
124
|
+
afni_output_bold["data"][0],
|
125
|
+
)
|
126
|
+
assert r > 0.97
|