junifer 0.0.4.dev831__py3-none-any.whl → 0.0.5__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- junifer/__init__.py +17 -0
- junifer/_version.py +2 -2
- junifer/api/__init__.py +4 -1
- junifer/api/cli.py +91 -1
- junifer/api/decorators.py +9 -0
- junifer/api/functions.py +56 -10
- junifer/api/parser.py +3 -0
- junifer/api/queue_context/__init__.py +4 -1
- junifer/api/queue_context/gnu_parallel_local_adapter.py +16 -6
- junifer/api/queue_context/htcondor_adapter.py +16 -5
- junifer/api/queue_context/tests/test_gnu_parallel_local_adapter.py +41 -12
- junifer/api/queue_context/tests/test_htcondor_adapter.py +48 -15
- junifer/api/res/afni/run_afni_docker.sh +1 -1
- junifer/api/res/ants/run_ants_docker.sh +1 -1
- junifer/api/res/freesurfer/mri_binarize +3 -0
- junifer/api/res/freesurfer/mri_mc +3 -0
- junifer/api/res/freesurfer/mri_pretess +3 -0
- junifer/api/res/freesurfer/mris_convert +3 -0
- junifer/api/res/freesurfer/run_freesurfer_docker.sh +61 -0
- junifer/api/res/fsl/run_fsl_docker.sh +1 -1
- junifer/api/res/{run_conda.sh → run_conda.bash} +1 -1
- junifer/api/res/run_conda.zsh +23 -0
- junifer/api/res/run_venv.bash +22 -0
- junifer/api/res/{run_venv.sh → run_venv.zsh} +1 -1
- junifer/api/tests/test_api_utils.py +4 -2
- junifer/api/tests/test_cli.py +83 -0
- junifer/api/tests/test_functions.py +27 -2
- junifer/configs/__init__.py +1 -1
- junifer/configs/juseless/__init__.py +4 -1
- junifer/configs/juseless/datagrabbers/__init__.py +10 -1
- junifer/configs/juseless/datagrabbers/aomic_id1000_vbm.py +4 -3
- junifer/configs/juseless/datagrabbers/camcan_vbm.py +3 -0
- junifer/configs/juseless/datagrabbers/ixi_vbm.py +4 -3
- junifer/configs/juseless/datagrabbers/tests/test_ucla.py +1 -3
- junifer/configs/juseless/datagrabbers/ucla.py +12 -9
- junifer/configs/juseless/datagrabbers/ukb_vbm.py +3 -0
- junifer/data/__init__.py +21 -1
- junifer/data/coordinates.py +10 -19
- junifer/data/masks/ukb/UKB_15K_GM_template.nii.gz +0 -0
- junifer/data/masks.py +58 -87
- junifer/data/parcellations.py +14 -3
- junifer/data/template_spaces.py +4 -1
- junifer/data/tests/test_masks.py +26 -37
- junifer/data/utils.py +3 -0
- junifer/datagrabber/__init__.py +18 -1
- junifer/datagrabber/aomic/__init__.py +3 -0
- junifer/datagrabber/aomic/id1000.py +70 -37
- junifer/datagrabber/aomic/piop1.py +69 -36
- junifer/datagrabber/aomic/piop2.py +71 -38
- junifer/datagrabber/aomic/tests/test_id1000.py +44 -100
- junifer/datagrabber/aomic/tests/test_piop1.py +65 -108
- junifer/datagrabber/aomic/tests/test_piop2.py +45 -102
- junifer/datagrabber/base.py +13 -6
- junifer/datagrabber/datalad_base.py +13 -1
- junifer/datagrabber/dmcc13_benchmark.py +36 -53
- junifer/datagrabber/hcp1200/__init__.py +3 -0
- junifer/datagrabber/hcp1200/datalad_hcp1200.py +3 -0
- junifer/datagrabber/hcp1200/hcp1200.py +4 -1
- junifer/datagrabber/multiple.py +45 -6
- junifer/datagrabber/pattern.py +170 -62
- junifer/datagrabber/pattern_datalad.py +25 -12
- junifer/datagrabber/pattern_validation_mixin.py +388 -0
- junifer/datagrabber/tests/test_datalad_base.py +4 -4
- junifer/datagrabber/tests/test_dmcc13_benchmark.py +46 -19
- junifer/datagrabber/tests/test_multiple.py +161 -84
- junifer/datagrabber/tests/test_pattern.py +45 -0
- junifer/datagrabber/tests/test_pattern_datalad.py +4 -4
- junifer/datagrabber/tests/test_pattern_validation_mixin.py +249 -0
- junifer/datareader/__init__.py +4 -1
- junifer/datareader/default.py +95 -43
- junifer/external/BrainPrint/brainprint/__init__.py +4 -0
- junifer/external/BrainPrint/brainprint/_version.py +3 -0
- junifer/external/BrainPrint/brainprint/asymmetry.py +91 -0
- junifer/external/BrainPrint/brainprint/brainprint.py +441 -0
- junifer/external/BrainPrint/brainprint/surfaces.py +258 -0
- junifer/external/BrainPrint/brainprint/utils/__init__.py +1 -0
- junifer/external/BrainPrint/brainprint/utils/_config.py +112 -0
- junifer/external/BrainPrint/brainprint/utils/utils.py +188 -0
- junifer/external/__init__.py +1 -1
- junifer/external/nilearn/__init__.py +5 -1
- junifer/external/nilearn/junifer_connectivity_measure.py +483 -0
- junifer/external/nilearn/junifer_nifti_spheres_masker.py +23 -9
- junifer/external/nilearn/tests/test_junifer_connectivity_measure.py +1089 -0
- junifer/external/nilearn/tests/test_junifer_nifti_spheres_masker.py +76 -1
- junifer/markers/__init__.py +23 -1
- junifer/markers/base.py +68 -28
- junifer/markers/brainprint.py +459 -0
- junifer/markers/collection.py +10 -2
- junifer/markers/complexity/__init__.py +10 -0
- junifer/markers/complexity/complexity_base.py +26 -43
- junifer/markers/complexity/hurst_exponent.py +3 -0
- junifer/markers/complexity/multiscale_entropy_auc.py +3 -0
- junifer/markers/complexity/perm_entropy.py +3 -0
- junifer/markers/complexity/range_entropy.py +3 -0
- junifer/markers/complexity/range_entropy_auc.py +3 -0
- junifer/markers/complexity/sample_entropy.py +3 -0
- junifer/markers/complexity/tests/test_hurst_exponent.py +11 -3
- junifer/markers/complexity/tests/test_multiscale_entropy_auc.py +11 -3
- junifer/markers/complexity/tests/test_perm_entropy.py +11 -3
- junifer/markers/complexity/tests/test_range_entropy.py +11 -3
- junifer/markers/complexity/tests/test_range_entropy_auc.py +11 -3
- junifer/markers/complexity/tests/test_sample_entropy.py +11 -3
- junifer/markers/complexity/tests/test_weighted_perm_entropy.py +11 -3
- junifer/markers/complexity/weighted_perm_entropy.py +3 -0
- junifer/markers/ets_rss.py +27 -42
- junifer/markers/falff/__init__.py +3 -0
- junifer/markers/falff/_afni_falff.py +5 -2
- junifer/markers/falff/_junifer_falff.py +3 -0
- junifer/markers/falff/falff_base.py +20 -46
- junifer/markers/falff/falff_parcels.py +56 -27
- junifer/markers/falff/falff_spheres.py +60 -29
- junifer/markers/falff/tests/test_falff_parcels.py +39 -23
- junifer/markers/falff/tests/test_falff_spheres.py +39 -23
- junifer/markers/functional_connectivity/__init__.py +9 -0
- junifer/markers/functional_connectivity/crossparcellation_functional_connectivity.py +63 -60
- junifer/markers/functional_connectivity/edge_functional_connectivity_parcels.py +45 -32
- junifer/markers/functional_connectivity/edge_functional_connectivity_spheres.py +49 -36
- junifer/markers/functional_connectivity/functional_connectivity_base.py +71 -70
- junifer/markers/functional_connectivity/functional_connectivity_parcels.py +34 -25
- junifer/markers/functional_connectivity/functional_connectivity_spheres.py +40 -30
- junifer/markers/functional_connectivity/tests/test_crossparcellation_functional_connectivity.py +11 -7
- junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_parcels.py +27 -7
- junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_spheres.py +28 -12
- junifer/markers/functional_connectivity/tests/test_functional_connectivity_parcels.py +35 -11
- junifer/markers/functional_connectivity/tests/test_functional_connectivity_spheres.py +36 -62
- junifer/markers/parcel_aggregation.py +47 -61
- junifer/markers/reho/__init__.py +3 -0
- junifer/markers/reho/_afni_reho.py +5 -2
- junifer/markers/reho/_junifer_reho.py +4 -1
- junifer/markers/reho/reho_base.py +8 -27
- junifer/markers/reho/reho_parcels.py +28 -17
- junifer/markers/reho/reho_spheres.py +27 -18
- junifer/markers/reho/tests/test_reho_parcels.py +8 -3
- junifer/markers/reho/tests/test_reho_spheres.py +8 -3
- junifer/markers/sphere_aggregation.py +43 -59
- junifer/markers/temporal_snr/__init__.py +3 -0
- junifer/markers/temporal_snr/temporal_snr_base.py +23 -32
- junifer/markers/temporal_snr/temporal_snr_parcels.py +9 -6
- junifer/markers/temporal_snr/temporal_snr_spheres.py +9 -6
- junifer/markers/temporal_snr/tests/test_temporal_snr_parcels.py +6 -3
- junifer/markers/temporal_snr/tests/test_temporal_snr_spheres.py +6 -3
- junifer/markers/tests/test_brainprint.py +58 -0
- junifer/markers/tests/test_collection.py +9 -8
- junifer/markers/tests/test_ets_rss.py +15 -9
- junifer/markers/tests/test_markers_base.py +17 -18
- junifer/markers/tests/test_parcel_aggregation.py +93 -32
- junifer/markers/tests/test_sphere_aggregation.py +72 -19
- junifer/onthefly/__init__.py +4 -1
- junifer/onthefly/read_transform.py +3 -0
- junifer/pipeline/__init__.py +9 -1
- junifer/pipeline/pipeline_step_mixin.py +21 -4
- junifer/pipeline/registry.py +3 -0
- junifer/pipeline/singleton.py +3 -0
- junifer/pipeline/tests/test_registry.py +1 -1
- junifer/pipeline/update_meta_mixin.py +3 -0
- junifer/pipeline/utils.py +67 -1
- junifer/pipeline/workdir_manager.py +3 -0
- junifer/preprocess/__init__.py +10 -2
- junifer/preprocess/base.py +6 -3
- junifer/preprocess/confounds/__init__.py +3 -0
- junifer/preprocess/confounds/fmriprep_confound_remover.py +47 -60
- junifer/preprocess/confounds/tests/test_fmriprep_confound_remover.py +72 -113
- junifer/preprocess/smoothing/__init__.py +9 -0
- junifer/preprocess/smoothing/_afni_smoothing.py +119 -0
- junifer/preprocess/smoothing/_fsl_smoothing.py +116 -0
- junifer/preprocess/smoothing/_nilearn_smoothing.py +69 -0
- junifer/preprocess/smoothing/smoothing.py +174 -0
- junifer/preprocess/smoothing/tests/test_smoothing.py +94 -0
- junifer/preprocess/warping/__init__.py +3 -0
- junifer/preprocess/warping/_ants_warper.py +3 -0
- junifer/preprocess/warping/_fsl_warper.py +3 -0
- junifer/stats.py +4 -1
- junifer/storage/__init__.py +9 -1
- junifer/storage/base.py +40 -1
- junifer/storage/hdf5.py +71 -9
- junifer/storage/pandas_base.py +3 -0
- junifer/storage/sqlite.py +3 -0
- junifer/storage/tests/test_hdf5.py +82 -10
- junifer/storage/utils.py +9 -0
- junifer/testing/__init__.py +4 -1
- junifer/testing/datagrabbers.py +13 -6
- junifer/testing/tests/test_partlycloudytesting_datagrabber.py +7 -7
- junifer/testing/utils.py +3 -0
- junifer/utils/__init__.py +13 -2
- junifer/utils/fs.py +3 -0
- junifer/utils/helpers.py +32 -1
- junifer/utils/logging.py +33 -4
- junifer/utils/tests/test_logging.py +8 -0
- {junifer-0.0.4.dev831.dist-info → junifer-0.0.5.dist-info}/METADATA +17 -16
- junifer-0.0.5.dist-info/RECORD +275 -0
- {junifer-0.0.4.dev831.dist-info → junifer-0.0.5.dist-info}/WHEEL +1 -1
- junifer/datagrabber/tests/test_datagrabber_utils.py +0 -218
- junifer/datagrabber/utils.py +0 -230
- junifer/preprocess/ants/__init__.py +0 -4
- junifer/preprocess/ants/ants_apply_transforms_warper.py +0 -185
- junifer/preprocess/ants/tests/test_ants_apply_transforms_warper.py +0 -56
- junifer/preprocess/bold_warper.py +0 -265
- junifer/preprocess/fsl/__init__.py +0 -4
- junifer/preprocess/fsl/apply_warper.py +0 -179
- junifer/preprocess/fsl/tests/test_apply_warper.py +0 -45
- junifer/preprocess/tests/test_bold_warper.py +0 -159
- junifer-0.0.4.dev831.dist-info/RECORD +0 -257
- {junifer-0.0.4.dev831.dist-info → junifer-0.0.5.dist-info}/AUTHORS.rst +0 -0
- {junifer-0.0.4.dev831.dist-info → junifer-0.0.5.dist-info}/LICENSE.md +0 -0
- {junifer-0.0.4.dev831.dist-info → junifer-0.0.5.dist-info}/entry_points.txt +0 -0
- {junifer-0.0.4.dev831.dist-info → junifer-0.0.5.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,174 @@
|
|
1
|
+
"""Provide class for smoothing."""
|
2
|
+
|
3
|
+
# Authors: Synchon Mandal <s.mandal@fz-juelich.de>
|
4
|
+
# License: AGPL
|
5
|
+
|
6
|
+
from typing import Any, ClassVar, Dict, List, Optional, Tuple, Type, Union
|
7
|
+
|
8
|
+
from ...api.decorators import register_preprocessor
|
9
|
+
from ...utils import logger, raise_error
|
10
|
+
from ..base import BasePreprocessor
|
11
|
+
from ._afni_smoothing import AFNISmoothing
|
12
|
+
from ._fsl_smoothing import FSLSmoothing
|
13
|
+
from ._nilearn_smoothing import NilearnSmoothing
|
14
|
+
|
15
|
+
|
16
|
+
__all__ = ["Smoothing"]
|
17
|
+
|
18
|
+
|
19
|
+
@register_preprocessor
|
20
|
+
class Smoothing(BasePreprocessor):
|
21
|
+
"""Class for smoothing.
|
22
|
+
|
23
|
+
Parameters
|
24
|
+
----------
|
25
|
+
using : {"nilearn", "afni", "fsl"}
|
26
|
+
Implementation to use for smoothing:
|
27
|
+
|
28
|
+
* "nilearn" : Use :func:`nilearn.image.smooth_img`
|
29
|
+
* "afni" : Use AFNI's ``3dBlurToFWHM``
|
30
|
+
* "fsl" : Use FSL SUSAN's ``susan``
|
31
|
+
|
32
|
+
on : {"T1w", "T2w", "BOLD"} or list of the options
|
33
|
+
The data type to apply smoothing to.
|
34
|
+
smoothing_params : dict, optional
|
35
|
+
Extra parameters for smoothing as a dictionary (default None).
|
36
|
+
If ``using="nilearn"``, then the valid keys are:
|
37
|
+
|
38
|
+
* ``fmhw`` : scalar, ``numpy.ndarray``, tuple or list of scalar, \
|
39
|
+
"fast" or None
|
40
|
+
Smoothing strength, as a full-width at half maximum, in
|
41
|
+
millimeters:
|
42
|
+
|
43
|
+
- If nonzero scalar, width is identical in all 3 directions.
|
44
|
+
- If ``numpy.ndarray``, tuple, or list, it must have 3 elements,
|
45
|
+
giving the FWHM along each axis. If any of the elements is 0 or
|
46
|
+
None, smoothing is not performed along that axis.
|
47
|
+
- If ``"fast"``, a fast smoothing will be performed with a filter
|
48
|
+
``[0.2, 1, 0.2]`` in each direction and a normalisation to
|
49
|
+
preserve the local average value.
|
50
|
+
- If None, no filtering is performed (useful when just removal of
|
51
|
+
non-finite values is needed).
|
52
|
+
|
53
|
+
else if ``using="afni"``, then the valid keys are:
|
54
|
+
|
55
|
+
* ``fwhm`` : int or float
|
56
|
+
Smooth until the value. AFNI estimates the smoothing and then
|
57
|
+
applies smoothing to reach ``fwhm``.
|
58
|
+
|
59
|
+
else if ``using="fsl"``, then the valid keys are:
|
60
|
+
|
61
|
+
* ``brightness_threshold`` : float
|
62
|
+
Threshold to discriminate between noise and the underlying image.
|
63
|
+
The value should be set greater than the noise level and less than
|
64
|
+
the contrast of the underlying image.
|
65
|
+
* ``fwhm`` : float
|
66
|
+
Spatial extent of smoothing.
|
67
|
+
|
68
|
+
"""
|
69
|
+
|
70
|
+
_CONDITIONAL_DEPENDENCIES: ClassVar[List[Dict[str, Union[str, Type]]]] = [
|
71
|
+
{
|
72
|
+
"using": "nilearn",
|
73
|
+
"depends_on": NilearnSmoothing,
|
74
|
+
},
|
75
|
+
{
|
76
|
+
"using": "afni",
|
77
|
+
"depends_on": AFNISmoothing,
|
78
|
+
},
|
79
|
+
{
|
80
|
+
"using": "fsl",
|
81
|
+
"depends_on": FSLSmoothing,
|
82
|
+
},
|
83
|
+
]
|
84
|
+
|
85
|
+
def __init__(
|
86
|
+
self,
|
87
|
+
using: str,
|
88
|
+
on: Union[List[str], str],
|
89
|
+
smoothing_params: Optional[Dict] = None,
|
90
|
+
) -> None:
|
91
|
+
"""Initialize the class."""
|
92
|
+
# Validate `using` parameter
|
93
|
+
valid_using = [dep["using"] for dep in self._CONDITIONAL_DEPENDENCIES]
|
94
|
+
if using not in valid_using:
|
95
|
+
raise_error(
|
96
|
+
f"Invalid value for `using`, should be one of: {valid_using}"
|
97
|
+
)
|
98
|
+
self.using = using
|
99
|
+
self.smoothing_params = (
|
100
|
+
smoothing_params if smoothing_params is not None else {}
|
101
|
+
)
|
102
|
+
super().__init__(on=on)
|
103
|
+
|
104
|
+
def get_valid_inputs(self) -> List[str]:
|
105
|
+
"""Get valid data types for input.
|
106
|
+
|
107
|
+
Returns
|
108
|
+
-------
|
109
|
+
list of str
|
110
|
+
The list of data types that can be used as input for this
|
111
|
+
preprocessor.
|
112
|
+
|
113
|
+
"""
|
114
|
+
return ["T1w", "T2w", "BOLD"]
|
115
|
+
|
116
|
+
def get_output_type(self, input_type: str) -> str:
|
117
|
+
"""Get output type.
|
118
|
+
|
119
|
+
Parameters
|
120
|
+
----------
|
121
|
+
input_type : str
|
122
|
+
The data type input to the preprocessor.
|
123
|
+
|
124
|
+
Returns
|
125
|
+
-------
|
126
|
+
str
|
127
|
+
The data type output by the preprocessor.
|
128
|
+
|
129
|
+
"""
|
130
|
+
# Does not add any new keys
|
131
|
+
return input_type
|
132
|
+
|
133
|
+
def preprocess(
|
134
|
+
self,
|
135
|
+
input: Dict[str, Any],
|
136
|
+
extra_input: Optional[Dict[str, Any]] = None,
|
137
|
+
) -> Tuple[Dict[str, Any], Optional[Dict[str, Dict[str, Any]]]]:
|
138
|
+
"""Preprocess.
|
139
|
+
|
140
|
+
Parameters
|
141
|
+
----------
|
142
|
+
input : dict
|
143
|
+
The input from the Junifer Data object.
|
144
|
+
extra_input : dict, optional
|
145
|
+
The other fields in the Junifer Data object.
|
146
|
+
|
147
|
+
Returns
|
148
|
+
-------
|
149
|
+
dict
|
150
|
+
The computed result as dictionary.
|
151
|
+
None
|
152
|
+
Extra "helper" data types as dictionary to add to the Junifer Data
|
153
|
+
object.
|
154
|
+
|
155
|
+
"""
|
156
|
+
logger.debug("Smoothing")
|
157
|
+
|
158
|
+
# Conditional preprocessor
|
159
|
+
if self.using == "nilearn":
|
160
|
+
preprocessor = NilearnSmoothing()
|
161
|
+
elif self.using == "afni":
|
162
|
+
preprocessor = AFNISmoothing()
|
163
|
+
elif self.using == "fsl":
|
164
|
+
preprocessor = FSLSmoothing()
|
165
|
+
# Smooth
|
166
|
+
output = preprocessor.preprocess( # type: ignore
|
167
|
+
data=input["data"],
|
168
|
+
**self.smoothing_params,
|
169
|
+
)
|
170
|
+
|
171
|
+
# Modify target data
|
172
|
+
input["data"] = output
|
173
|
+
|
174
|
+
return input, None
|
@@ -0,0 +1,94 @@
|
|
1
|
+
"""Provide tests for Smoothing."""
|
2
|
+
|
3
|
+
# Authors: Synchon Mandal <s.mandal@fz-juelich.de>
|
4
|
+
# License: AGPL
|
5
|
+
|
6
|
+
|
7
|
+
import pytest
|
8
|
+
|
9
|
+
from junifer.datareader import DefaultDataReader
|
10
|
+
from junifer.pipeline.utils import _check_afni, _check_fsl
|
11
|
+
from junifer.preprocess import Smoothing
|
12
|
+
from junifer.testing.datagrabbers import SPMAuditoryTestingDataGrabber
|
13
|
+
|
14
|
+
|
15
|
+
@pytest.mark.parametrize(
|
16
|
+
"data_type",
|
17
|
+
["T1w", "BOLD"],
|
18
|
+
)
|
19
|
+
def test_Smoothing_nilearn(data_type: str) -> None:
|
20
|
+
"""Test Smoothing using nilearn.
|
21
|
+
|
22
|
+
Parameters
|
23
|
+
----------
|
24
|
+
data_type : str
|
25
|
+
The parametrized data type.
|
26
|
+
|
27
|
+
"""
|
28
|
+
with SPMAuditoryTestingDataGrabber() as dg:
|
29
|
+
# Read data
|
30
|
+
element_data = DefaultDataReader().fit_transform(dg["sub001"])
|
31
|
+
# Preprocess data
|
32
|
+
output = Smoothing(
|
33
|
+
using="nilearn",
|
34
|
+
on=data_type,
|
35
|
+
smoothing_params={"fwhm": "fast"},
|
36
|
+
).fit_transform(element_data)
|
37
|
+
|
38
|
+
assert isinstance(output, dict)
|
39
|
+
|
40
|
+
|
41
|
+
@pytest.mark.parametrize(
|
42
|
+
"data_type",
|
43
|
+
["T1w", "BOLD"],
|
44
|
+
)
|
45
|
+
@pytest.mark.skipif(
|
46
|
+
_check_afni() is False, reason="requires AFNI to be in PATH"
|
47
|
+
)
|
48
|
+
def test_Smoothing_afni(data_type: str) -> None:
|
49
|
+
"""Test Smoothing using AFNI.
|
50
|
+
|
51
|
+
Parameters
|
52
|
+
----------
|
53
|
+
data_type : str
|
54
|
+
The parametrized data type.
|
55
|
+
|
56
|
+
"""
|
57
|
+
with SPMAuditoryTestingDataGrabber() as dg:
|
58
|
+
# Read data
|
59
|
+
element_data = DefaultDataReader().fit_transform(dg["sub001"])
|
60
|
+
# Preprocess data
|
61
|
+
output = Smoothing(
|
62
|
+
using="afni",
|
63
|
+
on=data_type,
|
64
|
+
smoothing_params={"fwhm": 3},
|
65
|
+
).fit_transform(element_data)
|
66
|
+
|
67
|
+
assert isinstance(output, dict)
|
68
|
+
|
69
|
+
|
70
|
+
@pytest.mark.parametrize(
|
71
|
+
"data_type",
|
72
|
+
["T1w", "BOLD"],
|
73
|
+
)
|
74
|
+
@pytest.mark.skipif(_check_fsl() is False, reason="requires FSL to be in PATH")
|
75
|
+
def test_Smoothing_fsl(data_type: str) -> None:
|
76
|
+
"""Test Smoothing using FSL.
|
77
|
+
|
78
|
+
Parameters
|
79
|
+
----------
|
80
|
+
data_type : str
|
81
|
+
The parametrized data type.
|
82
|
+
|
83
|
+
"""
|
84
|
+
with SPMAuditoryTestingDataGrabber() as dg:
|
85
|
+
# Read data
|
86
|
+
element_data = DefaultDataReader().fit_transform(dg["sub001"])
|
87
|
+
# Preprocess data
|
88
|
+
output = Smoothing(
|
89
|
+
using="fsl",
|
90
|
+
on=data_type,
|
91
|
+
smoothing_params={"brightness_threshold": 10.0, "fwhm": 3.0},
|
92
|
+
).fit_transform(element_data)
|
93
|
+
|
94
|
+
assert isinstance(output, dict)
|
junifer/stats.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
"""
|
1
|
+
"""Statistical functions and helpers."""
|
2
2
|
|
3
3
|
# Authors: Federico Raimondo <f.raimondo@fz-juelich.de>
|
4
4
|
# Synchon Mandal <s.mandal@fz-juelich.de>
|
@@ -13,6 +13,9 @@ from scipy.stats.mstats import winsorize
|
|
13
13
|
from .utils import logger, raise_error
|
14
14
|
|
15
15
|
|
16
|
+
__all__ = ["get_aggfunc_by_name", "count", "winsorized_mean", "select"]
|
17
|
+
|
18
|
+
|
16
19
|
def get_aggfunc_by_name(
|
17
20
|
name: str, func_params: Optional[Dict[str, Any]] = None
|
18
21
|
) -> Callable:
|
junifer/storage/__init__.py
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
"""
|
1
|
+
"""Storages for storing extracted features."""
|
2
2
|
|
3
3
|
# Authors: Federico Raimondo <f.raimondo@fz-juelich.de>
|
4
4
|
# Synchon Mandal <s.mandal@fz-juelich.de>
|
@@ -8,3 +8,11 @@ from .base import BaseFeatureStorage
|
|
8
8
|
from .pandas_base import PandasBaseFeatureStorage
|
9
9
|
from .sqlite import SQLiteFeatureStorage
|
10
10
|
from .hdf5 import HDF5FeatureStorage
|
11
|
+
|
12
|
+
|
13
|
+
__all__ = [
|
14
|
+
"BaseFeatureStorage",
|
15
|
+
"PandasBaseFeatureStorage",
|
16
|
+
"SQLiteFeatureStorage",
|
17
|
+
"HDF5FeatureStorage",
|
18
|
+
]
|
junifer/storage/base.py
CHANGED
@@ -15,6 +15,9 @@ from ..utils import raise_error
|
|
15
15
|
from .utils import process_meta
|
16
16
|
|
17
17
|
|
18
|
+
__all__ = ["BaseFeatureStorage"]
|
19
|
+
|
20
|
+
|
18
21
|
class BaseFeatureStorage(ABC):
|
19
22
|
"""Abstract base class for feature storage.
|
20
23
|
|
@@ -189,7 +192,7 @@ class BaseFeatureStorage(ABC):
|
|
189
192
|
|
190
193
|
Parameters
|
191
194
|
----------
|
192
|
-
kind : {"matrix", "timeseries", "vector"}
|
195
|
+
kind : {"matrix", "timeseries", "vector", "scalar_table"}
|
193
196
|
The storage kind.
|
194
197
|
**kwargs
|
195
198
|
The keyword arguments.
|
@@ -218,6 +221,10 @@ class BaseFeatureStorage(ABC):
|
|
218
221
|
)
|
219
222
|
elif kind == "vector":
|
220
223
|
self.store_vector(meta_md5=meta_md5, element=t_element, **kwargs)
|
224
|
+
elif kind == "scalar_table":
|
225
|
+
self.store_scalar_table(
|
226
|
+
meta_md5=meta_md5, element=t_element, **kwargs
|
227
|
+
)
|
221
228
|
|
222
229
|
def store_matrix(
|
223
230
|
self,
|
@@ -313,6 +320,38 @@ class BaseFeatureStorage(ABC):
|
|
313
320
|
klass=NotImplementedError,
|
314
321
|
)
|
315
322
|
|
323
|
+
def store_scalar_table(
|
324
|
+
self,
|
325
|
+
meta_md5: str,
|
326
|
+
element: Dict,
|
327
|
+
data: np.ndarray,
|
328
|
+
col_names: Optional[Iterable[str]] = None,
|
329
|
+
row_names: Optional[Iterable[str]] = None,
|
330
|
+
row_header_col_name: Optional[str] = "feature",
|
331
|
+
) -> None:
|
332
|
+
"""Store table with scalar values.
|
333
|
+
|
334
|
+
Parameters
|
335
|
+
----------
|
336
|
+
meta_md5 : str
|
337
|
+
The metadata MD5 hash.
|
338
|
+
element : dict
|
339
|
+
The element as a dictionary.
|
340
|
+
data : numpy.ndarray
|
341
|
+
The timeseries data to store.
|
342
|
+
col_names : list or tuple of str, optional
|
343
|
+
The column labels (default None).
|
344
|
+
row_names : str, optional
|
345
|
+
The row labels (default None).
|
346
|
+
row_header_col_name : str, optional
|
347
|
+
The column name for the row header column (default "feature").
|
348
|
+
|
349
|
+
"""
|
350
|
+
raise_error(
|
351
|
+
msg="Concrete classes need to implement store_scalar_table().",
|
352
|
+
klass=NotImplementedError,
|
353
|
+
)
|
354
|
+
|
316
355
|
@abstractmethod
|
317
356
|
def collect(self) -> None:
|
318
357
|
"""Collect data."""
|
junifer/storage/hdf5.py
CHANGED
@@ -26,6 +26,9 @@ from .base import BaseFeatureStorage
|
|
26
26
|
from .utils import element_to_prefix, matrix_to_vector, store_matrix_checks
|
27
27
|
|
28
28
|
|
29
|
+
__all__ = ["HDF5FeatureStorage"]
|
30
|
+
|
31
|
+
|
29
32
|
def _create_chunk(
|
30
33
|
chunk_data: List[np.ndarray],
|
31
34
|
kind: str,
|
@@ -56,7 +59,8 @@ def _create_chunk(
|
|
56
59
|
Raises
|
57
60
|
------
|
58
61
|
ValueError
|
59
|
-
If `kind` is not one of ['vector', 'matrix', 'timeseries'
|
62
|
+
If `kind` is not one of ['vector', 'matrix', 'timeseries',
|
63
|
+
'scalar_table'].
|
60
64
|
|
61
65
|
"""
|
62
66
|
if kind in ["vector", "matrix"]:
|
@@ -77,7 +81,7 @@ def _create_chunk(
|
|
77
81
|
chunk_size=tuple(array_chunk_size),
|
78
82
|
n_chunk=i_chunk,
|
79
83
|
)
|
80
|
-
elif kind
|
84
|
+
elif kind in ["timeseries", "scalar_table"]:
|
81
85
|
out = ChunkedList(
|
82
86
|
data=chunk_data,
|
83
87
|
size=element_count,
|
@@ -86,7 +90,8 @@ def _create_chunk(
|
|
86
90
|
else:
|
87
91
|
raise_error(
|
88
92
|
f"Invalid kind: {kind}. "
|
89
|
-
"Must be one of ['vector', 'matrix', 'timeseries'
|
93
|
+
"Must be one of ['vector', 'matrix', 'timeseries',"
|
94
|
+
"'scalar_table']."
|
90
95
|
)
|
91
96
|
return out
|
92
97
|
|
@@ -146,7 +151,7 @@ class HDF5FeatureStorage(BaseFeatureStorage):
|
|
146
151
|
uri.parent.mkdir(parents=True, exist_ok=True)
|
147
152
|
|
148
153
|
# Available storage kinds
|
149
|
-
storage_types = ["vector", "timeseries", "matrix"]
|
154
|
+
storage_types = ["vector", "timeseries", "matrix", "scalar_table"]
|
150
155
|
|
151
156
|
super().__init__(
|
152
157
|
uri=uri,
|
@@ -169,7 +174,7 @@ class HDF5FeatureStorage(BaseFeatureStorage):
|
|
169
174
|
storage.
|
170
175
|
|
171
176
|
"""
|
172
|
-
return ["matrix", "vector", "timeseries"]
|
177
|
+
return ["matrix", "vector", "timeseries", "scalar_table"]
|
173
178
|
|
174
179
|
def _fetch_correct_uri_for_io(self, element: Optional[Dict]) -> str:
|
175
180
|
"""Return proper URI for I/O based on `element`.
|
@@ -508,6 +513,26 @@ class HDF5FeatureStorage(BaseFeatureStorage):
|
|
508
513
|
columns = hdf_data["column_headers"]
|
509
514
|
# Convert data from 3D to 2D
|
510
515
|
reshaped_data = np.concatenate(all_data, axis=0)
|
516
|
+
elif hdf_data["kind"] == "scalar_table":
|
517
|
+
# Create dictionary for aggregating index data
|
518
|
+
element_idx = defaultdict(list)
|
519
|
+
all_data = []
|
520
|
+
for idx, element in enumerate(hdf_data["element"]):
|
521
|
+
# Get row count for the element
|
522
|
+
t_data = hdf_data["data"][idx]
|
523
|
+
all_data.append(t_data)
|
524
|
+
n_rows = len(hdf_data["row_headers"])
|
525
|
+
# Set rows for the index
|
526
|
+
for key, val in element.items():
|
527
|
+
element_idx[key].extend([val] * n_rows)
|
528
|
+
# Add extra column for row header column name
|
529
|
+
element_idx[hdf_data["row_header_column_name"]].extend(
|
530
|
+
hdf_data["row_headers"]
|
531
|
+
)
|
532
|
+
# Set column headers for dataframe
|
533
|
+
columns = hdf_data["column_headers"]
|
534
|
+
# Convert data from 3D to 2D
|
535
|
+
reshaped_data = np.concatenate(all_data, axis=0)
|
511
536
|
|
512
537
|
# Create dataframe for index
|
513
538
|
idx_df = pd.DataFrame(data=element_idx) # type: ignore
|
@@ -643,7 +668,7 @@ class HDF5FeatureStorage(BaseFeatureStorage):
|
|
643
668
|
|
644
669
|
Parameters
|
645
670
|
----------
|
646
|
-
kind : {"matrix", "vector", "timeseries"}
|
671
|
+
kind : {"matrix", "vector", "timeseries", "scalar_table"}
|
647
672
|
The storage kind.
|
648
673
|
meta_md5 : str
|
649
674
|
The metadata MD5 hash.
|
@@ -739,8 +764,8 @@ class HDF5FeatureStorage(BaseFeatureStorage):
|
|
739
764
|
)
|
740
765
|
|
741
766
|
t_data = stored_data["data"]
|
742
|
-
if kind
|
743
|
-
t_data
|
767
|
+
if kind in ["timeseries", "scalar_table"]:
|
768
|
+
t_data += data
|
744
769
|
else:
|
745
770
|
t_data = np.concatenate((t_data, data), axis=-1)
|
746
771
|
# Existing entry; append to existing
|
@@ -921,6 +946,43 @@ class HDF5FeatureStorage(BaseFeatureStorage):
|
|
921
946
|
row_header_column_name="timepoint",
|
922
947
|
)
|
923
948
|
|
949
|
+
def store_scalar_table(
|
950
|
+
self,
|
951
|
+
meta_md5: str,
|
952
|
+
element: Dict,
|
953
|
+
data: np.ndarray,
|
954
|
+
col_names: Optional[Iterable[str]] = None,
|
955
|
+
row_names: Optional[Iterable[str]] = None,
|
956
|
+
row_header_col_name: Optional[str] = "feature",
|
957
|
+
) -> None:
|
958
|
+
"""Store table with scalar values.
|
959
|
+
|
960
|
+
Parameters
|
961
|
+
----------
|
962
|
+
meta_md5 : str
|
963
|
+
The metadata MD5 hash.
|
964
|
+
element : dict
|
965
|
+
The element as a dictionary.
|
966
|
+
data : numpy.ndarray
|
967
|
+
The scalar table data to store.
|
968
|
+
col_names : list or tuple of str, optional
|
969
|
+
The column labels (default None).
|
970
|
+
row_names : str, optional
|
971
|
+
The row labels (default None).
|
972
|
+
row_header_col_name : str, optional
|
973
|
+
The column name for the row header column (default "feature").
|
974
|
+
|
975
|
+
"""
|
976
|
+
self._store_data(
|
977
|
+
kind="scalar_table",
|
978
|
+
meta_md5=meta_md5,
|
979
|
+
element=[element], # convert to list
|
980
|
+
data=[data], # convert to list
|
981
|
+
column_headers=col_names,
|
982
|
+
row_headers=row_names,
|
983
|
+
row_header_column_name=row_header_col_name,
|
984
|
+
)
|
985
|
+
|
924
986
|
def collect(self) -> None:
|
925
987
|
"""Implement data collection.
|
926
988
|
|
@@ -1029,7 +1091,7 @@ class HDF5FeatureStorage(BaseFeatureStorage):
|
|
1029
1091
|
kind = static_data["kind"]
|
1030
1092
|
|
1031
1093
|
# Append the "dynamic" data
|
1032
|
-
if kind
|
1094
|
+
if kind in ["timeseries", "scalar_table"]:
|
1033
1095
|
chunk_data.extend(t_data["data"])
|
1034
1096
|
else:
|
1035
1097
|
chunk_data.append(t_data["data"])
|
junifer/storage/pandas_base.py
CHANGED
junifer/storage/sqlite.py
CHANGED