junifer 0.0.4.dev831__py3-none-any.whl → 0.0.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- junifer/__init__.py +17 -0
- junifer/_version.py +2 -2
- junifer/api/__init__.py +4 -1
- junifer/api/cli.py +91 -1
- junifer/api/decorators.py +9 -0
- junifer/api/functions.py +56 -10
- junifer/api/parser.py +3 -0
- junifer/api/queue_context/__init__.py +4 -1
- junifer/api/queue_context/gnu_parallel_local_adapter.py +16 -6
- junifer/api/queue_context/htcondor_adapter.py +16 -5
- junifer/api/queue_context/tests/test_gnu_parallel_local_adapter.py +41 -12
- junifer/api/queue_context/tests/test_htcondor_adapter.py +48 -15
- junifer/api/res/afni/run_afni_docker.sh +1 -1
- junifer/api/res/ants/run_ants_docker.sh +1 -1
- junifer/api/res/freesurfer/mri_binarize +3 -0
- junifer/api/res/freesurfer/mri_mc +3 -0
- junifer/api/res/freesurfer/mri_pretess +3 -0
- junifer/api/res/freesurfer/mris_convert +3 -0
- junifer/api/res/freesurfer/run_freesurfer_docker.sh +61 -0
- junifer/api/res/fsl/run_fsl_docker.sh +1 -1
- junifer/api/res/{run_conda.sh → run_conda.bash} +1 -1
- junifer/api/res/run_conda.zsh +23 -0
- junifer/api/res/run_venv.bash +22 -0
- junifer/api/res/{run_venv.sh → run_venv.zsh} +1 -1
- junifer/api/tests/test_api_utils.py +4 -2
- junifer/api/tests/test_cli.py +83 -0
- junifer/api/tests/test_functions.py +27 -2
- junifer/configs/__init__.py +1 -1
- junifer/configs/juseless/__init__.py +4 -1
- junifer/configs/juseless/datagrabbers/__init__.py +10 -1
- junifer/configs/juseless/datagrabbers/aomic_id1000_vbm.py +4 -3
- junifer/configs/juseless/datagrabbers/camcan_vbm.py +3 -0
- junifer/configs/juseless/datagrabbers/ixi_vbm.py +4 -3
- junifer/configs/juseless/datagrabbers/tests/test_ucla.py +1 -3
- junifer/configs/juseless/datagrabbers/ucla.py +12 -9
- junifer/configs/juseless/datagrabbers/ukb_vbm.py +3 -0
- junifer/data/__init__.py +21 -1
- junifer/data/coordinates.py +10 -19
- junifer/data/masks/ukb/UKB_15K_GM_template.nii.gz +0 -0
- junifer/data/masks.py +58 -87
- junifer/data/parcellations.py +14 -3
- junifer/data/template_spaces.py +4 -1
- junifer/data/tests/test_masks.py +26 -37
- junifer/data/utils.py +3 -0
- junifer/datagrabber/__init__.py +18 -1
- junifer/datagrabber/aomic/__init__.py +3 -0
- junifer/datagrabber/aomic/id1000.py +70 -37
- junifer/datagrabber/aomic/piop1.py +69 -36
- junifer/datagrabber/aomic/piop2.py +71 -38
- junifer/datagrabber/aomic/tests/test_id1000.py +44 -100
- junifer/datagrabber/aomic/tests/test_piop1.py +65 -108
- junifer/datagrabber/aomic/tests/test_piop2.py +45 -102
- junifer/datagrabber/base.py +13 -6
- junifer/datagrabber/datalad_base.py +13 -1
- junifer/datagrabber/dmcc13_benchmark.py +36 -53
- junifer/datagrabber/hcp1200/__init__.py +3 -0
- junifer/datagrabber/hcp1200/datalad_hcp1200.py +3 -0
- junifer/datagrabber/hcp1200/hcp1200.py +4 -1
- junifer/datagrabber/multiple.py +45 -6
- junifer/datagrabber/pattern.py +170 -62
- junifer/datagrabber/pattern_datalad.py +25 -12
- junifer/datagrabber/pattern_validation_mixin.py +388 -0
- junifer/datagrabber/tests/test_datalad_base.py +4 -4
- junifer/datagrabber/tests/test_dmcc13_benchmark.py +46 -19
- junifer/datagrabber/tests/test_multiple.py +161 -84
- junifer/datagrabber/tests/test_pattern.py +45 -0
- junifer/datagrabber/tests/test_pattern_datalad.py +4 -4
- junifer/datagrabber/tests/test_pattern_validation_mixin.py +249 -0
- junifer/datareader/__init__.py +4 -1
- junifer/datareader/default.py +95 -43
- junifer/external/BrainPrint/brainprint/__init__.py +4 -0
- junifer/external/BrainPrint/brainprint/_version.py +3 -0
- junifer/external/BrainPrint/brainprint/asymmetry.py +91 -0
- junifer/external/BrainPrint/brainprint/brainprint.py +441 -0
- junifer/external/BrainPrint/brainprint/surfaces.py +258 -0
- junifer/external/BrainPrint/brainprint/utils/__init__.py +1 -0
- junifer/external/BrainPrint/brainprint/utils/_config.py +112 -0
- junifer/external/BrainPrint/brainprint/utils/utils.py +188 -0
- junifer/external/__init__.py +1 -1
- junifer/external/nilearn/__init__.py +5 -1
- junifer/external/nilearn/junifer_connectivity_measure.py +483 -0
- junifer/external/nilearn/junifer_nifti_spheres_masker.py +23 -9
- junifer/external/nilearn/tests/test_junifer_connectivity_measure.py +1089 -0
- junifer/external/nilearn/tests/test_junifer_nifti_spheres_masker.py +76 -1
- junifer/markers/__init__.py +23 -1
- junifer/markers/base.py +68 -28
- junifer/markers/brainprint.py +459 -0
- junifer/markers/collection.py +10 -2
- junifer/markers/complexity/__init__.py +10 -0
- junifer/markers/complexity/complexity_base.py +26 -43
- junifer/markers/complexity/hurst_exponent.py +3 -0
- junifer/markers/complexity/multiscale_entropy_auc.py +3 -0
- junifer/markers/complexity/perm_entropy.py +3 -0
- junifer/markers/complexity/range_entropy.py +3 -0
- junifer/markers/complexity/range_entropy_auc.py +3 -0
- junifer/markers/complexity/sample_entropy.py +3 -0
- junifer/markers/complexity/tests/test_hurst_exponent.py +11 -3
- junifer/markers/complexity/tests/test_multiscale_entropy_auc.py +11 -3
- junifer/markers/complexity/tests/test_perm_entropy.py +11 -3
- junifer/markers/complexity/tests/test_range_entropy.py +11 -3
- junifer/markers/complexity/tests/test_range_entropy_auc.py +11 -3
- junifer/markers/complexity/tests/test_sample_entropy.py +11 -3
- junifer/markers/complexity/tests/test_weighted_perm_entropy.py +11 -3
- junifer/markers/complexity/weighted_perm_entropy.py +3 -0
- junifer/markers/ets_rss.py +27 -42
- junifer/markers/falff/__init__.py +3 -0
- junifer/markers/falff/_afni_falff.py +5 -2
- junifer/markers/falff/_junifer_falff.py +3 -0
- junifer/markers/falff/falff_base.py +20 -46
- junifer/markers/falff/falff_parcels.py +56 -27
- junifer/markers/falff/falff_spheres.py +60 -29
- junifer/markers/falff/tests/test_falff_parcels.py +39 -23
- junifer/markers/falff/tests/test_falff_spheres.py +39 -23
- junifer/markers/functional_connectivity/__init__.py +9 -0
- junifer/markers/functional_connectivity/crossparcellation_functional_connectivity.py +63 -60
- junifer/markers/functional_connectivity/edge_functional_connectivity_parcels.py +45 -32
- junifer/markers/functional_connectivity/edge_functional_connectivity_spheres.py +49 -36
- junifer/markers/functional_connectivity/functional_connectivity_base.py +71 -70
- junifer/markers/functional_connectivity/functional_connectivity_parcels.py +34 -25
- junifer/markers/functional_connectivity/functional_connectivity_spheres.py +40 -30
- junifer/markers/functional_connectivity/tests/test_crossparcellation_functional_connectivity.py +11 -7
- junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_parcels.py +27 -7
- junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_spheres.py +28 -12
- junifer/markers/functional_connectivity/tests/test_functional_connectivity_parcels.py +35 -11
- junifer/markers/functional_connectivity/tests/test_functional_connectivity_spheres.py +36 -62
- junifer/markers/parcel_aggregation.py +47 -61
- junifer/markers/reho/__init__.py +3 -0
- junifer/markers/reho/_afni_reho.py +5 -2
- junifer/markers/reho/_junifer_reho.py +4 -1
- junifer/markers/reho/reho_base.py +8 -27
- junifer/markers/reho/reho_parcels.py +28 -17
- junifer/markers/reho/reho_spheres.py +27 -18
- junifer/markers/reho/tests/test_reho_parcels.py +8 -3
- junifer/markers/reho/tests/test_reho_spheres.py +8 -3
- junifer/markers/sphere_aggregation.py +43 -59
- junifer/markers/temporal_snr/__init__.py +3 -0
- junifer/markers/temporal_snr/temporal_snr_base.py +23 -32
- junifer/markers/temporal_snr/temporal_snr_parcels.py +9 -6
- junifer/markers/temporal_snr/temporal_snr_spheres.py +9 -6
- junifer/markers/temporal_snr/tests/test_temporal_snr_parcels.py +6 -3
- junifer/markers/temporal_snr/tests/test_temporal_snr_spheres.py +6 -3
- junifer/markers/tests/test_brainprint.py +58 -0
- junifer/markers/tests/test_collection.py +9 -8
- junifer/markers/tests/test_ets_rss.py +15 -9
- junifer/markers/tests/test_markers_base.py +17 -18
- junifer/markers/tests/test_parcel_aggregation.py +93 -32
- junifer/markers/tests/test_sphere_aggregation.py +72 -19
- junifer/onthefly/__init__.py +4 -1
- junifer/onthefly/read_transform.py +3 -0
- junifer/pipeline/__init__.py +9 -1
- junifer/pipeline/pipeline_step_mixin.py +21 -4
- junifer/pipeline/registry.py +3 -0
- junifer/pipeline/singleton.py +3 -0
- junifer/pipeline/tests/test_registry.py +1 -1
- junifer/pipeline/update_meta_mixin.py +3 -0
- junifer/pipeline/utils.py +67 -1
- junifer/pipeline/workdir_manager.py +3 -0
- junifer/preprocess/__init__.py +10 -2
- junifer/preprocess/base.py +6 -3
- junifer/preprocess/confounds/__init__.py +3 -0
- junifer/preprocess/confounds/fmriprep_confound_remover.py +47 -60
- junifer/preprocess/confounds/tests/test_fmriprep_confound_remover.py +72 -113
- junifer/preprocess/smoothing/__init__.py +9 -0
- junifer/preprocess/smoothing/_afni_smoothing.py +119 -0
- junifer/preprocess/smoothing/_fsl_smoothing.py +116 -0
- junifer/preprocess/smoothing/_nilearn_smoothing.py +69 -0
- junifer/preprocess/smoothing/smoothing.py +174 -0
- junifer/preprocess/smoothing/tests/test_smoothing.py +94 -0
- junifer/preprocess/warping/__init__.py +3 -0
- junifer/preprocess/warping/_ants_warper.py +3 -0
- junifer/preprocess/warping/_fsl_warper.py +3 -0
- junifer/stats.py +4 -1
- junifer/storage/__init__.py +9 -1
- junifer/storage/base.py +40 -1
- junifer/storage/hdf5.py +71 -9
- junifer/storage/pandas_base.py +3 -0
- junifer/storage/sqlite.py +3 -0
- junifer/storage/tests/test_hdf5.py +82 -10
- junifer/storage/utils.py +9 -0
- junifer/testing/__init__.py +4 -1
- junifer/testing/datagrabbers.py +13 -6
- junifer/testing/tests/test_partlycloudytesting_datagrabber.py +7 -7
- junifer/testing/utils.py +3 -0
- junifer/utils/__init__.py +13 -2
- junifer/utils/fs.py +3 -0
- junifer/utils/helpers.py +32 -1
- junifer/utils/logging.py +33 -4
- junifer/utils/tests/test_logging.py +8 -0
- {junifer-0.0.4.dev831.dist-info → junifer-0.0.5.dist-info}/METADATA +17 -16
- junifer-0.0.5.dist-info/RECORD +275 -0
- {junifer-0.0.4.dev831.dist-info → junifer-0.0.5.dist-info}/WHEEL +1 -1
- junifer/datagrabber/tests/test_datagrabber_utils.py +0 -218
- junifer/datagrabber/utils.py +0 -230
- junifer/preprocess/ants/__init__.py +0 -4
- junifer/preprocess/ants/ants_apply_transforms_warper.py +0 -185
- junifer/preprocess/ants/tests/test_ants_apply_transforms_warper.py +0 -56
- junifer/preprocess/bold_warper.py +0 -265
- junifer/preprocess/fsl/__init__.py +0 -4
- junifer/preprocess/fsl/apply_warper.py +0 -179
- junifer/preprocess/fsl/tests/test_apply_warper.py +0 -45
- junifer/preprocess/tests/test_bold_warper.py +0 -159
- junifer-0.0.4.dev831.dist-info/RECORD +0 -257
- {junifer-0.0.4.dev831.dist-info → junifer-0.0.5.dist-info}/AUTHORS.rst +0 -0
- {junifer-0.0.4.dev831.dist-info → junifer-0.0.5.dist-info}/LICENSE.md +0 -0
- {junifer-0.0.4.dev831.dist-info → junifer-0.0.5.dist-info}/entry_points.txt +0 -0
- {junifer-0.0.4.dev831.dist-info → junifer-0.0.5.dist-info}/top_level.txt +0 -0
@@ -14,6 +14,7 @@ pytest.importorskip("neurokit2")
|
|
14
14
|
|
15
15
|
from junifer.datareader import DefaultDataReader # noqa: E402
|
16
16
|
from junifer.markers.complexity import HurstExponent # noqa: E402
|
17
|
+
from junifer.pipeline.utils import _check_ants # noqa: E402
|
17
18
|
from junifer.storage import SQLiteFeatureStorage # noqa: E402
|
18
19
|
from junifer.testing.datagrabbers import ( # noqa: E402
|
19
20
|
SPMAuditoryTestingDataGrabber,
|
@@ -24,6 +25,9 @@ from junifer.testing.datagrabbers import ( # noqa: E402
|
|
24
25
|
PARCELLATION = "Schaefer100x17"
|
25
26
|
|
26
27
|
|
28
|
+
@pytest.mark.skipif(
|
29
|
+
_check_ants() is False, reason="requires ANTs to be in PATH"
|
30
|
+
)
|
27
31
|
def test_compute() -> None:
|
28
32
|
"""Test HurstExponent compute()."""
|
29
33
|
with SPMAuditoryTestingDataGrabber() as dg:
|
@@ -36,15 +40,19 @@ def test_compute() -> None:
|
|
36
40
|
# Compute the marker
|
37
41
|
feature_map = marker.fit_transform(element_data)
|
38
42
|
# Assert the dimension of timeseries
|
39
|
-
assert feature_map["BOLD"]["data"].ndim == 2
|
43
|
+
assert feature_map["BOLD"]["complexity"]["data"].ndim == 2
|
40
44
|
|
41
45
|
|
42
46
|
def test_get_output_type() -> None:
|
43
47
|
"""Test HurstExponent get_output_type()."""
|
44
|
-
|
45
|
-
|
48
|
+
assert "vector" == HurstExponent(
|
49
|
+
parcellation=PARCELLATION
|
50
|
+
).get_output_type(input_type="BOLD", output_feature="complexity")
|
46
51
|
|
47
52
|
|
53
|
+
@pytest.mark.skipif(
|
54
|
+
_check_ants() is False, reason="requires ANTs to be in PATH"
|
55
|
+
)
|
48
56
|
def test_store(tmp_path: Path) -> None:
|
49
57
|
"""Test HurstExponent store().
|
50
58
|
|
@@ -13,6 +13,7 @@ pytest.importorskip("neurokit2")
|
|
13
13
|
|
14
14
|
from junifer.datareader import DefaultDataReader # noqa: E402
|
15
15
|
from junifer.markers.complexity import MultiscaleEntropyAUC # noqa: E402
|
16
|
+
from junifer.pipeline.utils import _check_ants # noqa: E402
|
16
17
|
from junifer.storage import SQLiteFeatureStorage # noqa: E402
|
17
18
|
from junifer.testing.datagrabbers import ( # noqa: E402
|
18
19
|
SPMAuditoryTestingDataGrabber,
|
@@ -23,6 +24,9 @@ from junifer.testing.datagrabbers import ( # noqa: E402
|
|
23
24
|
PARCELLATION = "Schaefer100x17"
|
24
25
|
|
25
26
|
|
27
|
+
@pytest.mark.skipif(
|
28
|
+
_check_ants() is False, reason="requires ANTs to be in PATH"
|
29
|
+
)
|
26
30
|
def test_compute() -> None:
|
27
31
|
"""Test MultiscaleEntropyAUC compute()."""
|
28
32
|
with SPMAuditoryTestingDataGrabber() as dg:
|
@@ -35,15 +39,19 @@ def test_compute() -> None:
|
|
35
39
|
# Compute the marker
|
36
40
|
feature_map = marker.fit_transform(element_data)
|
37
41
|
# Assert the dimension of timeseries
|
38
|
-
assert feature_map["BOLD"]["data"].ndim == 2
|
42
|
+
assert feature_map["BOLD"]["complexity"]["data"].ndim == 2
|
39
43
|
|
40
44
|
|
41
45
|
def test_get_output_type() -> None:
|
42
46
|
"""Test MultiscaleEntropyAUC get_output_type()."""
|
43
|
-
|
44
|
-
|
47
|
+
assert "vector" == MultiscaleEntropyAUC(
|
48
|
+
parcellation=PARCELLATION
|
49
|
+
).get_output_type(input_type="BOLD", output_feature="complexity")
|
45
50
|
|
46
51
|
|
52
|
+
@pytest.mark.skipif(
|
53
|
+
_check_ants() is False, reason="requires ANTs to be in PATH"
|
54
|
+
)
|
47
55
|
def test_store(tmp_path: Path) -> None:
|
48
56
|
"""Test MultiscaleEntropyAUC store().
|
49
57
|
|
@@ -13,6 +13,7 @@ pytest.importorskip("neurokit2")
|
|
13
13
|
|
14
14
|
from junifer.datareader import DefaultDataReader # noqa: E402
|
15
15
|
from junifer.markers.complexity import PermEntropy # noqa: E402
|
16
|
+
from junifer.pipeline.utils import _check_ants # noqa: E402
|
16
17
|
from junifer.storage import SQLiteFeatureStorage # noqa: E402
|
17
18
|
from junifer.testing.datagrabbers import ( # noqa: E402
|
18
19
|
SPMAuditoryTestingDataGrabber,
|
@@ -23,6 +24,9 @@ from junifer.testing.datagrabbers import ( # noqa: E402
|
|
23
24
|
PARCELLATION = "Schaefer100x17"
|
24
25
|
|
25
26
|
|
27
|
+
@pytest.mark.skipif(
|
28
|
+
_check_ants() is False, reason="requires ANTs to be in PATH"
|
29
|
+
)
|
26
30
|
def test_compute() -> None:
|
27
31
|
"""Test PermEntropy compute()."""
|
28
32
|
with SPMAuditoryTestingDataGrabber() as dg:
|
@@ -35,15 +39,19 @@ def test_compute() -> None:
|
|
35
39
|
# Compute the marker
|
36
40
|
feature_map = marker.fit_transform(element_data)
|
37
41
|
# Assert the dimension of timeseries
|
38
|
-
assert feature_map["BOLD"]["data"].ndim == 2
|
42
|
+
assert feature_map["BOLD"]["complexity"]["data"].ndim == 2
|
39
43
|
|
40
44
|
|
41
45
|
def test_get_output_type() -> None:
|
42
46
|
"""Test PermEntropy get_output_type()."""
|
43
|
-
|
44
|
-
|
47
|
+
assert "vector" == PermEntropy(parcellation=PARCELLATION).get_output_type(
|
48
|
+
input_type="BOLD", output_feature="complexity"
|
49
|
+
)
|
45
50
|
|
46
51
|
|
52
|
+
@pytest.mark.skipif(
|
53
|
+
_check_ants() is False, reason="requires ANTs to be in PATH"
|
54
|
+
)
|
47
55
|
def test_store(tmp_path: Path) -> None:
|
48
56
|
"""Test PermEntropy store().
|
49
57
|
|
@@ -14,6 +14,7 @@ pytest.importorskip("neurokit2")
|
|
14
14
|
|
15
15
|
from junifer.datareader import DefaultDataReader # noqa: E402
|
16
16
|
from junifer.markers.complexity import RangeEntropy # noqa: E402
|
17
|
+
from junifer.pipeline.utils import _check_ants # noqa: E402
|
17
18
|
from junifer.storage import SQLiteFeatureStorage # noqa: E402
|
18
19
|
from junifer.testing.datagrabbers import ( # noqa: E402
|
19
20
|
SPMAuditoryTestingDataGrabber,
|
@@ -24,6 +25,9 @@ from junifer.testing.datagrabbers import ( # noqa: E402
|
|
24
25
|
PARCELLATION = "Schaefer100x17"
|
25
26
|
|
26
27
|
|
28
|
+
@pytest.mark.skipif(
|
29
|
+
_check_ants() is False, reason="requires ANTs to be in PATH"
|
30
|
+
)
|
27
31
|
def test_compute() -> None:
|
28
32
|
"""Test RangeEntropy compute()."""
|
29
33
|
with SPMAuditoryTestingDataGrabber() as dg:
|
@@ -36,15 +40,19 @@ def test_compute() -> None:
|
|
36
40
|
# Compute the marker
|
37
41
|
feature_map = marker.fit_transform(element_data)
|
38
42
|
# Assert the dimension of timeseries
|
39
|
-
assert feature_map["BOLD"]["data"].ndim == 2
|
43
|
+
assert feature_map["BOLD"]["complexity"]["data"].ndim == 2
|
40
44
|
|
41
45
|
|
42
46
|
def test_get_output_type() -> None:
|
43
47
|
"""Test RangeEntropy get_output_type()."""
|
44
|
-
|
45
|
-
|
48
|
+
assert "vector" == RangeEntropy(parcellation=PARCELLATION).get_output_type(
|
49
|
+
input_type="BOLD", output_feature="complexity"
|
50
|
+
)
|
46
51
|
|
47
52
|
|
53
|
+
@pytest.mark.skipif(
|
54
|
+
_check_ants() is False, reason="requires ANTs to be in PATH"
|
55
|
+
)
|
48
56
|
def test_store(tmp_path: Path) -> None:
|
49
57
|
"""Test RangeEntropy store().
|
50
58
|
|
@@ -14,6 +14,7 @@ pytest.importorskip("neurokit2")
|
|
14
14
|
|
15
15
|
from junifer.datareader import DefaultDataReader # noqa: E402
|
16
16
|
from junifer.markers.complexity import RangeEntropyAUC # noqa: E402
|
17
|
+
from junifer.pipeline.utils import _check_ants # noqa: E402
|
17
18
|
from junifer.storage import SQLiteFeatureStorage # noqa: E402
|
18
19
|
from junifer.testing.datagrabbers import ( # noqa: E402
|
19
20
|
SPMAuditoryTestingDataGrabber,
|
@@ -24,6 +25,9 @@ from junifer.testing.datagrabbers import ( # noqa: E402
|
|
24
25
|
PARCELLATION = "Schaefer100x17"
|
25
26
|
|
26
27
|
|
28
|
+
@pytest.mark.skipif(
|
29
|
+
_check_ants() is False, reason="requires ANTs to be in PATH"
|
30
|
+
)
|
27
31
|
def test_compute() -> None:
|
28
32
|
"""Test RangeEntropyAUC compute()."""
|
29
33
|
with SPMAuditoryTestingDataGrabber() as dg:
|
@@ -36,15 +40,19 @@ def test_compute() -> None:
|
|
36
40
|
# Compute the marker
|
37
41
|
feature_map = marker.fit_transform(element_data)
|
38
42
|
# Assert the dimension of timeseries
|
39
|
-
assert feature_map["BOLD"]["data"].ndim == 2
|
43
|
+
assert feature_map["BOLD"]["complexity"]["data"].ndim == 2
|
40
44
|
|
41
45
|
|
42
46
|
def test_get_output_type() -> None:
|
43
47
|
"""Test RangeEntropyAUC get_output_type()."""
|
44
|
-
|
45
|
-
|
48
|
+
assert "vector" == RangeEntropyAUC(
|
49
|
+
parcellation=PARCELLATION
|
50
|
+
).get_output_type(input_type="BOLD", output_feature="complexity")
|
46
51
|
|
47
52
|
|
53
|
+
@pytest.mark.skipif(
|
54
|
+
_check_ants() is False, reason="requires ANTs to be in PATH"
|
55
|
+
)
|
48
56
|
def test_store(tmp_path: Path) -> None:
|
49
57
|
"""Test RangeEntropyAUC store().
|
50
58
|
|
@@ -13,6 +13,7 @@ pytest.importorskip("neurokit2")
|
|
13
13
|
|
14
14
|
from junifer.datareader import DefaultDataReader # noqa: E402
|
15
15
|
from junifer.markers.complexity import SampleEntropy # noqa: E402
|
16
|
+
from junifer.pipeline.utils import _check_ants # noqa: E402
|
16
17
|
from junifer.storage import SQLiteFeatureStorage # noqa: E402
|
17
18
|
from junifer.testing.datagrabbers import ( # noqa: E402
|
18
19
|
SPMAuditoryTestingDataGrabber,
|
@@ -23,6 +24,9 @@ from junifer.testing.datagrabbers import ( # noqa: E402
|
|
23
24
|
PARCELLATION = "Schaefer100x17"
|
24
25
|
|
25
26
|
|
27
|
+
@pytest.mark.skipif(
|
28
|
+
_check_ants() is False, reason="requires ANTs to be in PATH"
|
29
|
+
)
|
26
30
|
def test_compute() -> None:
|
27
31
|
"""Test SampleEntropy compute()."""
|
28
32
|
with SPMAuditoryTestingDataGrabber() as dg:
|
@@ -35,15 +39,19 @@ def test_compute() -> None:
|
|
35
39
|
# Compute the marker
|
36
40
|
feature_map = marker.fit_transform(element_data)
|
37
41
|
# Assert the dimension of timeseries
|
38
|
-
assert feature_map["BOLD"]["data"].ndim == 2
|
42
|
+
assert feature_map["BOLD"]["complexity"]["data"].ndim == 2
|
39
43
|
|
40
44
|
|
41
45
|
def test_get_output_type() -> None:
|
42
46
|
"""Test SampleEntropy get_output_type()."""
|
43
|
-
|
44
|
-
|
47
|
+
assert "vector" == SampleEntropy(
|
48
|
+
parcellation=PARCELLATION
|
49
|
+
).get_output_type(input_type="BOLD", output_feature="complexity")
|
45
50
|
|
46
51
|
|
52
|
+
@pytest.mark.skipif(
|
53
|
+
_check_ants() is False, reason="requires ANTs to be in PATH"
|
54
|
+
)
|
47
55
|
def test_store(tmp_path: Path) -> None:
|
48
56
|
"""Test SampleEntropy store().
|
49
57
|
|
@@ -13,6 +13,7 @@ pytest.importorskip("neurokit2")
|
|
13
13
|
|
14
14
|
from junifer.datareader import DefaultDataReader # noqa: E402
|
15
15
|
from junifer.markers.complexity import WeightedPermEntropy # noqa: E402
|
16
|
+
from junifer.pipeline.utils import _check_ants # noqa: E402
|
16
17
|
from junifer.storage import SQLiteFeatureStorage # noqa: E402
|
17
18
|
from junifer.testing.datagrabbers import ( # noqa: E402
|
18
19
|
SPMAuditoryTestingDataGrabber,
|
@@ -23,6 +24,9 @@ from junifer.testing.datagrabbers import ( # noqa: E402
|
|
23
24
|
PARCELLATION = "Schaefer100x17"
|
24
25
|
|
25
26
|
|
27
|
+
@pytest.mark.skipif(
|
28
|
+
_check_ants() is False, reason="requires ANTs to be in PATH"
|
29
|
+
)
|
26
30
|
def test_compute() -> None:
|
27
31
|
"""Test WeightedPermEntropy compute()."""
|
28
32
|
with SPMAuditoryTestingDataGrabber() as dg:
|
@@ -35,15 +39,19 @@ def test_compute() -> None:
|
|
35
39
|
# Compute the marker
|
36
40
|
feature_map = marker.fit_transform(element_data)
|
37
41
|
# Assert the dimension of timeseries
|
38
|
-
assert feature_map["BOLD"]["data"].ndim == 2
|
42
|
+
assert feature_map["BOLD"]["complexity"]["data"].ndim == 2
|
39
43
|
|
40
44
|
|
41
45
|
def test_get_output_type() -> None:
|
42
46
|
"""Test WeightedPermEntropy get_output_type()."""
|
43
|
-
|
44
|
-
|
47
|
+
assert "vector" == WeightedPermEntropy(
|
48
|
+
parcellation=PARCELLATION
|
49
|
+
).get_output_type(input_type="BOLD", output_feature="complexity")
|
45
50
|
|
46
51
|
|
52
|
+
@pytest.mark.skipif(
|
53
|
+
_check_ants() is False, reason="requires ANTs to be in PATH"
|
54
|
+
)
|
47
55
|
def test_store(tmp_path: Path) -> None:
|
48
56
|
"""Test WeightedPermEntropy store().
|
49
57
|
|
@@ -14,6 +14,9 @@ from ...utils import logger, warn_with_log
|
|
14
14
|
from .complexity_base import ComplexityBase
|
15
15
|
|
16
16
|
|
17
|
+
__all__ = ["WeightedPermEntropy"]
|
18
|
+
|
19
|
+
|
17
20
|
@register_marker
|
18
21
|
class WeightedPermEntropy(ComplexityBase):
|
19
22
|
"""Class for weighted permutation entropy of a time series.
|
junifer/markers/ets_rss.py
CHANGED
@@ -17,6 +17,9 @@ from .parcel_aggregation import ParcelAggregation
|
|
17
17
|
from .utils import _ets
|
18
18
|
|
19
19
|
|
20
|
+
__all__ = ["RSSETSMarker"]
|
21
|
+
|
22
|
+
|
20
23
|
@register_marker
|
21
24
|
class RSSETSMarker(BaseMarker):
|
22
25
|
"""Class for root sum of squares of edgewise timeseries.
|
@@ -44,6 +47,12 @@ class RSSETSMarker(BaseMarker):
|
|
44
47
|
|
45
48
|
_DEPENDENCIES: ClassVar[Set[str]] = {"nilearn"}
|
46
49
|
|
50
|
+
_MARKER_INOUT_MAPPINGS: ClassVar[Dict[str, Dict[str, str]]] = {
|
51
|
+
"BOLD": {
|
52
|
+
"rss_ets": "timeseries",
|
53
|
+
},
|
54
|
+
}
|
55
|
+
|
47
56
|
def __init__(
|
48
57
|
self,
|
49
58
|
parcellation: Union[str, List[str]],
|
@@ -58,33 +67,6 @@ class RSSETSMarker(BaseMarker):
|
|
58
67
|
self.masks = masks
|
59
68
|
super().__init__(name=name)
|
60
69
|
|
61
|
-
def get_valid_inputs(self) -> List[str]:
|
62
|
-
"""Get valid data types for input.
|
63
|
-
|
64
|
-
Returns
|
65
|
-
-------
|
66
|
-
list of str
|
67
|
-
The list of data types that can be used as input for this marker.
|
68
|
-
|
69
|
-
"""
|
70
|
-
return ["BOLD"]
|
71
|
-
|
72
|
-
def get_output_type(self, input_type: str) -> str:
|
73
|
-
"""Get output type.
|
74
|
-
|
75
|
-
Parameters
|
76
|
-
----------
|
77
|
-
input_type : str
|
78
|
-
The data type input to the marker.
|
79
|
-
|
80
|
-
Returns
|
81
|
-
-------
|
82
|
-
str
|
83
|
-
The storage type output by the marker.
|
84
|
-
|
85
|
-
"""
|
86
|
-
return "timeseries"
|
87
|
-
|
88
70
|
def compute(
|
89
71
|
self,
|
90
72
|
input: Dict[str, Any],
|
@@ -106,8 +88,9 @@ class RSSETSMarker(BaseMarker):
|
|
106
88
|
Returns
|
107
89
|
-------
|
108
90
|
dict
|
109
|
-
The computed result as dictionary.
|
110
|
-
|
91
|
+
The computed result as dictionary. This will be either returned
|
92
|
+
to the user or stored in the storage by calling the store method
|
93
|
+
with this as a parameter. The dictionary has the following keys:
|
111
94
|
|
112
95
|
* ``data`` : the actual computed values as a numpy.ndarray
|
113
96
|
* ``col_names`` : the column labels for the computed values as list
|
@@ -121,20 +104,22 @@ class RSSETSMarker(BaseMarker):
|
|
121
104
|
|
122
105
|
"""
|
123
106
|
logger.debug("Calculating root sum of squares of edgewise timeseries.")
|
124
|
-
#
|
125
|
-
|
107
|
+
# Perform aggregation
|
108
|
+
aggregation = ParcelAggregation(
|
126
109
|
parcellation=self.parcellation,
|
127
110
|
method=self.agg_method,
|
128
111
|
method_params=self.agg_method_params,
|
129
112
|
masks=self.masks,
|
130
|
-
)
|
131
|
-
# Compute
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
113
|
+
).compute(input=input, extra_input=extra_input)
|
114
|
+
# Compute edgewise timeseries
|
115
|
+
edge_ts, _ = _ets(aggregation["aggregation"]["data"])
|
116
|
+
# Compute the RSS of edgewise timeseries
|
117
|
+
rss = np.sum(edge_ts**2, 1) ** 0.5
|
118
|
+
|
119
|
+
return {
|
120
|
+
"rss_ets": {
|
121
|
+
# Make it 2D
|
122
|
+
"data": rss[:, np.newaxis],
|
123
|
+
"col_names": ["root_sum_of_squares_ets"],
|
124
|
+
}
|
125
|
+
}
|
@@ -26,6 +26,9 @@ if TYPE_CHECKING:
|
|
26
26
|
from nibabel import Nifti1Image
|
27
27
|
|
28
28
|
|
29
|
+
__all__ = ["AFNIALFF"]
|
30
|
+
|
31
|
+
|
29
32
|
@singleton
|
30
33
|
class AFNIALFF:
|
31
34
|
"""Class for computing ALFF using AFNI.
|
@@ -121,7 +124,7 @@ class AFNIALFF:
|
|
121
124
|
convert_alff_cmd = [
|
122
125
|
"3dAFNItoNIFTI",
|
123
126
|
f"-prefix {alff_afni_to_nifti_out_path.resolve()}",
|
124
|
-
f"{alff_falff_out_path_prefix}_ALFF+
|
127
|
+
f"{alff_falff_out_path_prefix}_ALFF+orig.BRIK",
|
125
128
|
]
|
126
129
|
# Call 3dAFNItoNIFTI
|
127
130
|
run_ext_cmd(name="3dAFNItoNIFTI", cmd=convert_alff_cmd)
|
@@ -133,7 +136,7 @@ class AFNIALFF:
|
|
133
136
|
convert_falff_cmd = [
|
134
137
|
"3dAFNItoNIFTI",
|
135
138
|
f"-prefix {falff_afni_to_nifti_out_path.resolve()}",
|
136
|
-
f"{alff_falff_out_path_prefix}_fALFF+
|
139
|
+
f"{alff_falff_out_path_prefix}_fALFF+orig.BRIK",
|
137
140
|
]
|
138
141
|
# Call 3dAFNItoNIFTI
|
139
142
|
run_ext_cmd(name="3dAFNItoNIFTI", cmd=convert_falff_cmd)
|
@@ -29,13 +29,14 @@ if TYPE_CHECKING:
|
|
29
29
|
from nibabel import Nifti1Image
|
30
30
|
|
31
31
|
|
32
|
+
__all__ = ["ALFFBase"]
|
33
|
+
|
34
|
+
|
32
35
|
class ALFFBase(BaseMarker):
|
33
36
|
"""Base class for (fractional) Amplitude Low Frequency Fluctuation.
|
34
37
|
|
35
38
|
Parameters
|
36
39
|
----------
|
37
|
-
fractional : bool
|
38
|
-
Whether to compute fractional ALFF.
|
39
40
|
highpass : positive float
|
40
41
|
Highpass cutoff frequency.
|
41
42
|
lowpass : positive float
|
@@ -82,9 +83,15 @@ class ALFFBase(BaseMarker):
|
|
82
83
|
},
|
83
84
|
]
|
84
85
|
|
86
|
+
_MARKER_INOUT_MAPPINGS: ClassVar[Dict[str, Dict[str, str]]] = {
|
87
|
+
"BOLD": {
|
88
|
+
"alff": "vector",
|
89
|
+
"falff": "vector",
|
90
|
+
},
|
91
|
+
}
|
92
|
+
|
85
93
|
def __init__(
|
86
94
|
self,
|
87
|
-
fractional: bool,
|
88
95
|
highpass: float,
|
89
96
|
lowpass: float,
|
90
97
|
using: str,
|
@@ -107,45 +114,12 @@ class ALFFBase(BaseMarker):
|
|
107
114
|
)
|
108
115
|
self.using = using
|
109
116
|
self.tr = tr
|
110
|
-
self.fractional = fractional
|
111
|
-
|
112
|
-
# Create a name based on the class name if none is provided
|
113
|
-
if name is None:
|
114
|
-
suffix = "_fractional" if fractional else ""
|
115
|
-
name = f"{self.__class__.__name__}{suffix}"
|
116
117
|
super().__init__(on="BOLD", name=name)
|
117
118
|
|
118
|
-
def get_valid_inputs(self) -> List[str]:
|
119
|
-
"""Get valid data types for input.
|
120
|
-
|
121
|
-
Returns
|
122
|
-
-------
|
123
|
-
list of str
|
124
|
-
The list of data types that can be used as input for this marker.
|
125
|
-
|
126
|
-
"""
|
127
|
-
return ["BOLD"]
|
128
|
-
|
129
|
-
def get_output_type(self, input_type: str) -> str:
|
130
|
-
"""Get output type.
|
131
|
-
|
132
|
-
Parameters
|
133
|
-
----------
|
134
|
-
input_type : str
|
135
|
-
The data type input to the marker.
|
136
|
-
|
137
|
-
Returns
|
138
|
-
-------
|
139
|
-
str
|
140
|
-
The storage type output by the marker.
|
141
|
-
|
142
|
-
"""
|
143
|
-
return "vector"
|
144
|
-
|
145
119
|
def _compute(
|
146
120
|
self,
|
147
121
|
input_data: Dict[str, Any],
|
148
|
-
) -> Tuple["Nifti1Image", Path]:
|
122
|
+
) -> Tuple["Nifti1Image", "Nifti1Image", Path, Path]:
|
149
123
|
"""Compute ALFF and fALFF.
|
150
124
|
|
151
125
|
Parameters
|
@@ -158,9 +132,13 @@ class ALFFBase(BaseMarker):
|
|
158
132
|
Returns
|
159
133
|
-------
|
160
134
|
Niimg-like object
|
161
|
-
The ALFF
|
135
|
+
The ALFF as NIfTI.
|
136
|
+
Niimg-like object
|
137
|
+
The fALFF as NIfTI.
|
138
|
+
pathlib.Path
|
139
|
+
The path to the ALFF as NIfTI.
|
162
140
|
pathlib.Path
|
163
|
-
The path to the
|
141
|
+
The path to the fALFF as NIfTI.
|
164
142
|
|
165
143
|
"""
|
166
144
|
logger.debug("Calculating ALFF and fALFF")
|
@@ -183,11 +161,7 @@ class ALFFBase(BaseMarker):
|
|
183
161
|
# parcellation / coordinates to native space, else the
|
184
162
|
# path should be passed for use later if required.
|
185
163
|
# TODO(synchon): will be taken care in #292
|
186
|
-
if input_data["space"] == "native"
|
187
|
-
return falff, input_data["path"]
|
188
|
-
elif input_data["space"] == "native" and not self.fractional:
|
189
|
-
return alff, input_data["path"]
|
190
|
-
elif input_data["space"] != "native" and self.fractional:
|
191
|
-
return falff, falff_path
|
164
|
+
if input_data["space"] == "native":
|
165
|
+
return alff, falff, input_data["path"], input_data["path"]
|
192
166
|
else:
|
193
|
-
return alff, alff_path
|
167
|
+
return alff, falff, alff_path, falff_path
|