junifer 0.0.6.dev248__py3-none-any.whl → 0.0.6.dev252__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- junifer/_version.py +2 -2
- junifer/api/decorators.py +1 -2
- junifer/api/functions.py +18 -18
- junifer/api/queue_context/gnu_parallel_local_adapter.py +4 -4
- junifer/api/queue_context/htcondor_adapter.py +4 -4
- junifer/api/queue_context/tests/test_gnu_parallel_local_adapter.py +3 -3
- junifer/api/queue_context/tests/test_htcondor_adapter.py +3 -3
- junifer/api/tests/test_functions.py +32 -32
- junifer/cli/cli.py +3 -3
- junifer/cli/parser.py +4 -4
- junifer/cli/tests/test_cli.py +5 -5
- junifer/cli/utils.py +5 -6
- junifer/configs/juseless/datagrabbers/ixi_vbm.py +2 -2
- junifer/configs/juseless/datagrabbers/tests/test_ucla.py +2 -2
- junifer/configs/juseless/datagrabbers/ucla.py +4 -4
- junifer/data/_dispatch.py +11 -14
- junifer/data/coordinates/_ants_coordinates_warper.py +3 -3
- junifer/data/coordinates/_coordinates.py +6 -6
- junifer/data/coordinates/_fsl_coordinates_warper.py +3 -3
- junifer/data/masks/_ants_mask_warper.py +3 -3
- junifer/data/masks/_fsl_mask_warper.py +3 -3
- junifer/data/masks/_masks.py +6 -9
- junifer/data/masks/tests/test_masks.py +4 -4
- junifer/data/parcellations/_ants_parcellation_warper.py +3 -3
- junifer/data/parcellations/_fsl_parcellation_warper.py +3 -3
- junifer/data/parcellations/_parcellations.py +19 -19
- junifer/data/parcellations/tests/test_parcellations.py +1 -2
- junifer/data/pipeline_data_registry_base.py +3 -2
- junifer/data/template_spaces.py +3 -3
- junifer/data/tests/test_data_utils.py +1 -2
- junifer/data/utils.py +4 -3
- junifer/datagrabber/aomic/id1000.py +2 -2
- junifer/datagrabber/aomic/piop1.py +5 -5
- junifer/datagrabber/aomic/piop2.py +5 -5
- junifer/datagrabber/aomic/tests/test_id1000.py +3 -3
- junifer/datagrabber/aomic/tests/test_piop1.py +4 -4
- junifer/datagrabber/aomic/tests/test_piop2.py +4 -4
- junifer/datagrabber/base.py +12 -11
- junifer/datagrabber/datalad_base.py +3 -3
- junifer/datagrabber/dmcc13_benchmark.py +8 -8
- junifer/datagrabber/hcp1200/datalad_hcp1200.py +3 -3
- junifer/datagrabber/hcp1200/hcp1200.py +8 -8
- junifer/datagrabber/hcp1200/tests/test_hcp1200.py +2 -1
- junifer/datagrabber/multiple.py +7 -7
- junifer/datagrabber/pattern.py +10 -10
- junifer/datagrabber/pattern_validation_mixin.py +10 -10
- junifer/datagrabber/tests/test_datalad_base.py +7 -8
- junifer/datagrabber/tests/test_dmcc13_benchmark.py +2 -2
- junifer/datagrabber/tests/test_pattern_validation_mixin.py +6 -6
- junifer/datareader/default.py +6 -6
- junifer/external/nilearn/junifer_connectivity_measure.py +2 -2
- junifer/external/nilearn/junifer_nifti_spheres_masker.py +4 -4
- junifer/external/nilearn/tests/test_junifer_connectivity_measure.py +15 -15
- junifer/external/nilearn/tests/test_junifer_nifti_spheres_masker.py +2 -3
- junifer/markers/base.py +8 -8
- junifer/markers/brainprint.py +7 -9
- junifer/markers/complexity/complexity_base.py +6 -8
- junifer/markers/complexity/hurst_exponent.py +5 -5
- junifer/markers/complexity/multiscale_entropy_auc.py +5 -5
- junifer/markers/complexity/perm_entropy.py +5 -5
- junifer/markers/complexity/range_entropy.py +5 -5
- junifer/markers/complexity/range_entropy_auc.py +5 -5
- junifer/markers/complexity/sample_entropy.py +5 -5
- junifer/markers/complexity/weighted_perm_entropy.py +5 -5
- junifer/markers/ets_rss.py +7 -7
- junifer/markers/falff/_afni_falff.py +1 -2
- junifer/markers/falff/_junifer_falff.py +1 -2
- junifer/markers/falff/falff_base.py +2 -4
- junifer/markers/falff/falff_parcels.py +7 -7
- junifer/markers/falff/falff_spheres.py +6 -6
- junifer/markers/functional_connectivity/crossparcellation_functional_connectivity.py +6 -6
- junifer/markers/functional_connectivity/edge_functional_connectivity_parcels.py +7 -7
- junifer/markers/functional_connectivity/edge_functional_connectivity_spheres.py +6 -6
- junifer/markers/functional_connectivity/functional_connectivity_base.py +10 -10
- junifer/markers/functional_connectivity/functional_connectivity_parcels.py +7 -7
- junifer/markers/functional_connectivity/functional_connectivity_spheres.py +6 -6
- junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_parcels.py +1 -2
- junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_spheres.py +1 -2
- junifer/markers/functional_connectivity/tests/test_functional_connectivity_parcels.py +3 -3
- junifer/markers/functional_connectivity/tests/test_functional_connectivity_spheres.py +3 -3
- junifer/markers/parcel_aggregation.py +8 -8
- junifer/markers/reho/_afni_reho.py +1 -2
- junifer/markers/reho/_junifer_reho.py +1 -2
- junifer/markers/reho/reho_base.py +2 -4
- junifer/markers/reho/reho_parcels.py +8 -8
- junifer/markers/reho/reho_spheres.py +7 -7
- junifer/markers/sphere_aggregation.py +8 -8
- junifer/markers/temporal_snr/temporal_snr_base.py +8 -8
- junifer/markers/temporal_snr/temporal_snr_parcels.py +6 -6
- junifer/markers/temporal_snr/temporal_snr_spheres.py +5 -5
- junifer/markers/utils.py +3 -3
- junifer/onthefly/_brainprint.py +2 -2
- junifer/onthefly/read_transform.py +3 -3
- junifer/pipeline/marker_collection.py +4 -4
- junifer/pipeline/pipeline_component_registry.py +5 -4
- junifer/pipeline/pipeline_step_mixin.py +7 -7
- junifer/pipeline/tests/test_pipeline_component_registry.py +2 -3
- junifer/pipeline/tests/test_pipeline_step_mixin.py +19 -19
- junifer/pipeline/tests/test_update_meta_mixin.py +4 -4
- junifer/pipeline/update_meta_mixin.py +2 -2
- junifer/pipeline/utils.py +5 -5
- junifer/preprocess/base.py +10 -10
- junifer/preprocess/confounds/fmriprep_confound_remover.py +11 -14
- junifer/preprocess/confounds/tests/test_fmriprep_confound_remover.py +1 -2
- junifer/preprocess/smoothing/smoothing.py +7 -7
- junifer/preprocess/warping/_ants_warper.py +3 -4
- junifer/preprocess/warping/_fsl_warper.py +3 -4
- junifer/preprocess/warping/space_warper.py +6 -6
- junifer/preprocess/warping/tests/test_space_warper.py +3 -4
- junifer/stats.py +4 -4
- junifer/storage/base.py +14 -13
- junifer/storage/hdf5.py +21 -20
- junifer/storage/pandas_base.py +12 -11
- junifer/storage/sqlite.py +11 -11
- junifer/storage/tests/test_hdf5.py +1 -2
- junifer/storage/tests/test_sqlite.py +2 -2
- junifer/storage/tests/test_utils.py +8 -7
- junifer/storage/utils.py +7 -7
- junifer/testing/datagrabbers.py +9 -10
- junifer/tests/test_stats.py +2 -2
- junifer/typing/_typing.py +6 -9
- junifer/utils/helpers.py +2 -3
- junifer/utils/logging.py +5 -5
- junifer/utils/singleton.py +3 -3
- {junifer-0.0.6.dev248.dist-info → junifer-0.0.6.dev252.dist-info}/METADATA +2 -2
- {junifer-0.0.6.dev248.dist-info → junifer-0.0.6.dev252.dist-info}/RECORD +131 -131
- {junifer-0.0.6.dev248.dist-info → junifer-0.0.6.dev252.dist-info}/AUTHORS.rst +0 -0
- {junifer-0.0.6.dev248.dist-info → junifer-0.0.6.dev252.dist-info}/LICENSE.md +0 -0
- {junifer-0.0.6.dev248.dist-info → junifer-0.0.6.dev252.dist-info}/WHEEL +0 -0
- {junifer-0.0.6.dev248.dist-info → junifer-0.0.6.dev252.dist-info}/entry_points.txt +0 -0
- {junifer-0.0.6.dev248.dist-info → junifer-0.0.6.dev252.dist-info}/top_level.txt +0 -0
@@ -4,7 +4,7 @@
|
|
4
4
|
# License: AGPL
|
5
5
|
|
6
6
|
|
7
|
-
from typing import
|
7
|
+
from typing import Optional
|
8
8
|
|
9
9
|
import pandas as pd
|
10
10
|
|
@@ -20,8 +20,8 @@ def read_transform(
|
|
20
20
|
transform: str,
|
21
21
|
feature_name: Optional[str] = None,
|
22
22
|
feature_md5: Optional[str] = None,
|
23
|
-
transform_args: Optional[
|
24
|
-
transform_kw_args: Optional[
|
23
|
+
transform_args: Optional[tuple] = None,
|
24
|
+
transform_kw_args: Optional[dict] = None,
|
25
25
|
) -> pd.DataFrame:
|
26
26
|
"""Read stored feature and transform to specific statistical output.
|
27
27
|
|
@@ -5,7 +5,7 @@
|
|
5
5
|
# License: AGPL
|
6
6
|
|
7
7
|
from collections import Counter
|
8
|
-
from typing import
|
8
|
+
from typing import Optional
|
9
9
|
|
10
10
|
from ..datareader import DefaultDataReader
|
11
11
|
from ..pipeline import PipelineStepMixin, WorkDirManager
|
@@ -39,9 +39,9 @@ class MarkerCollection:
|
|
39
39
|
|
40
40
|
def __init__(
|
41
41
|
self,
|
42
|
-
markers:
|
42
|
+
markers: list[MarkerLike],
|
43
43
|
datareader: Optional[PipelineStepMixin] = None,
|
44
|
-
preprocessors: Optional[
|
44
|
+
preprocessors: Optional[list[PreprocessorLike]] = None,
|
45
45
|
storage: Optional[StorageLike] = None,
|
46
46
|
):
|
47
47
|
# Check that the markers have different names
|
@@ -59,7 +59,7 @@ class MarkerCollection:
|
|
59
59
|
self._preprocessors = preprocessors
|
60
60
|
self._storage = storage
|
61
61
|
|
62
|
-
def fit(self, input:
|
62
|
+
def fit(self, input: dict[str, dict]) -> Optional[dict]:
|
63
63
|
"""Fit the pipeline.
|
64
64
|
|
65
65
|
Parameters
|
@@ -6,7 +6,8 @@
|
|
6
6
|
# License: AGPL
|
7
7
|
|
8
8
|
import importlib
|
9
|
-
from
|
9
|
+
from collections.abc import Mapping
|
10
|
+
from typing import Optional, Union
|
10
11
|
|
11
12
|
from ..typing import DataGrabberLike, MarkerLike, PreprocessorLike, StorageLike
|
12
13
|
from ..utils import logger, raise_error
|
@@ -111,7 +112,7 @@ class PipelineComponentRegistry(metaclass=Singleton):
|
|
111
112
|
raise_error(msg=f"Invalid step: {step}", klass=ValueError)
|
112
113
|
|
113
114
|
@property
|
114
|
-
def steps(self) ->
|
115
|
+
def steps(self) -> list[str]:
|
115
116
|
"""Get valid pipeline steps."""
|
116
117
|
return self._steps
|
117
118
|
|
@@ -166,7 +167,7 @@ class PipelineComponentRegistry(metaclass=Singleton):
|
|
166
167
|
logger.info(f"De-registering {name} in {step}")
|
167
168
|
_ = self._components[step].pop(name)
|
168
169
|
|
169
|
-
def step_components(self, step: str) ->
|
170
|
+
def step_components(self, step: str) -> list[str]:
|
170
171
|
"""Get registered components for ``step``.
|
171
172
|
|
172
173
|
Parameters
|
@@ -235,7 +236,7 @@ class PipelineComponentRegistry(metaclass=Singleton):
|
|
235
236
|
step: str,
|
236
237
|
name: str,
|
237
238
|
baseclass: type,
|
238
|
-
init_params: Optional[
|
239
|
+
init_params: Optional[dict] = None,
|
239
240
|
) -> Union[DataGrabberLike, PreprocessorLike, MarkerLike, StorageLike]:
|
240
241
|
"""Build an instance of class registered as ``name``.
|
241
242
|
|
@@ -15,7 +15,7 @@ else:
|
|
15
15
|
|
16
16
|
from importlib.util import find_spec
|
17
17
|
from itertools import chain
|
18
|
-
from typing import Any
|
18
|
+
from typing import Any
|
19
19
|
|
20
20
|
from ..utils import raise_error
|
21
21
|
from .utils import check_ext_dependencies
|
@@ -27,7 +27,7 @@ __all__ = ["PipelineStepMixin"]
|
|
27
27
|
class PipelineStepMixin:
|
28
28
|
"""Mixin class for a pipeline step."""
|
29
29
|
|
30
|
-
def validate_input(self, input:
|
30
|
+
def validate_input(self, input: list[str]) -> list[str]:
|
31
31
|
"""Validate the input to the pipeline step.
|
32
32
|
|
33
33
|
Parameters
|
@@ -74,8 +74,8 @@ class PipelineStepMixin:
|
|
74
74
|
|
75
75
|
def _fit_transform(
|
76
76
|
self,
|
77
|
-
input:
|
78
|
-
) ->
|
77
|
+
input: dict[str, dict],
|
78
|
+
) -> dict[str, dict]:
|
79
79
|
"""Fit and transform.
|
80
80
|
|
81
81
|
Parameters
|
@@ -94,7 +94,7 @@ class PipelineStepMixin:
|
|
94
94
|
klass=NotImplementedError,
|
95
95
|
) # pragma: no cover
|
96
96
|
|
97
|
-
def validate(self, input:
|
97
|
+
def validate(self, input: list[str]) -> list[str]:
|
98
98
|
"""Validate the the pipeline step.
|
99
99
|
|
100
100
|
Parameters
|
@@ -228,8 +228,8 @@ class PipelineStepMixin:
|
|
228
228
|
return outputs
|
229
229
|
|
230
230
|
def fit_transform(
|
231
|
-
self, input:
|
232
|
-
) ->
|
231
|
+
self, input: dict[str, dict], **kwargs: Any
|
232
|
+
) -> dict[str, dict]:
|
233
233
|
"""Fit and transform.
|
234
234
|
|
235
235
|
Parameters
|
@@ -7,7 +7,6 @@
|
|
7
7
|
|
8
8
|
import logging
|
9
9
|
from abc import ABC
|
10
|
-
from typing import Type
|
11
10
|
|
12
11
|
import pytest
|
13
12
|
|
@@ -61,7 +60,7 @@ def test_pipeline_component_registry_components():
|
|
61
60
|
],
|
62
61
|
)
|
63
62
|
def test_pipeline_component_registry_register(
|
64
|
-
caplog: pytest.LogCaptureFixture, step: str, klass:
|
63
|
+
caplog: pytest.LogCaptureFixture, step: str, klass: type
|
65
64
|
) -> None:
|
66
65
|
"""Test register for PipelineComponentRegistry.
|
67
66
|
|
@@ -90,7 +89,7 @@ def test_pipeline_component_registry_register(
|
|
90
89
|
],
|
91
90
|
)
|
92
91
|
def test_pipeline_component_registry_deregister(
|
93
|
-
caplog: pytest.LogCaptureFixture, step: str, klass:
|
92
|
+
caplog: pytest.LogCaptureFixture, step: str, klass: type
|
94
93
|
) -> None:
|
95
94
|
"""Test de-register for PipelineComponentRegistry.
|
96
95
|
|
@@ -5,7 +5,7 @@
|
|
5
5
|
# License: AGPL
|
6
6
|
|
7
7
|
import warnings
|
8
|
-
from typing import ClassVar
|
8
|
+
from typing import ClassVar
|
9
9
|
|
10
10
|
import pytest
|
11
11
|
|
@@ -26,13 +26,13 @@ def test_PipelineStepMixin_correct_dependencies() -> None:
|
|
26
26
|
|
27
27
|
_DEPENDENCIES: ClassVar[Dependencies] = {"math"}
|
28
28
|
|
29
|
-
def validate_input(self, input:
|
29
|
+
def validate_input(self, input: list[str]) -> list[str]:
|
30
30
|
return input
|
31
31
|
|
32
32
|
def get_output_type(self, input_type: str) -> str:
|
33
33
|
return input_type
|
34
34
|
|
35
|
-
def _fit_transform(self, input:
|
35
|
+
def _fit_transform(self, input: dict[str, dict]) -> dict[str, dict]:
|
36
36
|
return {"input": input}
|
37
37
|
|
38
38
|
mixer = CorrectMixer()
|
@@ -47,13 +47,13 @@ def test_PipelineStepMixin_incorrect_dependencies() -> None:
|
|
47
47
|
|
48
48
|
_DEPENDENCIES: ClassVar[Dependencies] = {"foobar"}
|
49
49
|
|
50
|
-
def validate_input(self, input:
|
50
|
+
def validate_input(self, input: list[str]) -> list[str]:
|
51
51
|
return input
|
52
52
|
|
53
53
|
def get_output_type(self, input_type: str) -> str:
|
54
54
|
return input_type
|
55
55
|
|
56
|
-
def _fit_transform(self, input:
|
56
|
+
def _fit_transform(self, input: dict[str, dict]) -> dict[str, dict]:
|
57
57
|
return {"input": input}
|
58
58
|
|
59
59
|
mixer = IncorrectMixer()
|
@@ -72,13 +72,13 @@ def test_PipelineStepMixin_correct_ext_dependencies() -> None:
|
|
72
72
|
|
73
73
|
_EXT_DEPENDENCIES: ClassVar[ExternalDependencies] = [{"name": "afni"}]
|
74
74
|
|
75
|
-
def validate_input(self, input:
|
75
|
+
def validate_input(self, input: list[str]) -> list[str]:
|
76
76
|
return input
|
77
77
|
|
78
78
|
def get_output_type(self, input_type: str) -> str:
|
79
79
|
return input_type
|
80
80
|
|
81
|
-
def _fit_transform(self, input:
|
81
|
+
def _fit_transform(self, input: dict[str, dict]) -> dict[str, dict]:
|
82
82
|
return {"input": input}
|
83
83
|
|
84
84
|
mixer = CorrectMixer()
|
@@ -98,13 +98,13 @@ def test_PipelineStepMixin_ext_deps_correct_commands() -> None:
|
|
98
98
|
{"name": "afni", "commands": ["3dReHo"]}
|
99
99
|
]
|
100
100
|
|
101
|
-
def validate_input(self, input:
|
101
|
+
def validate_input(self, input: list[str]) -> list[str]:
|
102
102
|
return input
|
103
103
|
|
104
104
|
def get_output_type(self, input_type: str) -> str:
|
105
105
|
return input_type
|
106
106
|
|
107
|
-
def _fit_transform(self, input:
|
107
|
+
def _fit_transform(self, input: dict[str, dict]) -> dict[str, dict]:
|
108
108
|
return {"input": input}
|
109
109
|
|
110
110
|
mixer = CorrectMixer()
|
@@ -126,13 +126,13 @@ def test_PipelineStepMixin_ext_deps_incorrect_commands() -> None:
|
|
126
126
|
{"name": "afni", "commands": ["3d"]}
|
127
127
|
]
|
128
128
|
|
129
|
-
def validate_input(self, input:
|
129
|
+
def validate_input(self, input: list[str]) -> list[str]:
|
130
130
|
return input
|
131
131
|
|
132
132
|
def get_output_type(self, input_type: str) -> str:
|
133
133
|
return input_type
|
134
134
|
|
135
|
-
def _fit_transform(self, input:
|
135
|
+
def _fit_transform(self, input: dict[str, dict]) -> dict[str, dict]:
|
136
136
|
return {"input": input}
|
137
137
|
|
138
138
|
mixer = CorrectMixer()
|
@@ -150,13 +150,13 @@ def test_PipelineStepMixin_incorrect_ext_dependencies() -> None:
|
|
150
150
|
{"name": "foobar", "optional": True}
|
151
151
|
]
|
152
152
|
|
153
|
-
def validate_input(self, input:
|
153
|
+
def validate_input(self, input: list[str]) -> list[str]:
|
154
154
|
return input
|
155
155
|
|
156
156
|
def get_output_type(self, input_type: str) -> str:
|
157
157
|
return input_type
|
158
158
|
|
159
|
-
def _fit_transform(self, input:
|
159
|
+
def _fit_transform(self, input: dict[str, dict]) -> dict[str, dict]:
|
160
160
|
return {"input": input}
|
161
161
|
|
162
162
|
mixer = IncorrectMixer()
|
@@ -182,13 +182,13 @@ def test_PipelineStepMixin_correct_conditional_dependencies() -> None:
|
|
182
182
|
|
183
183
|
using = "math"
|
184
184
|
|
185
|
-
def validate_input(self, input:
|
185
|
+
def validate_input(self, input: list[str]) -> list[str]:
|
186
186
|
return input
|
187
187
|
|
188
188
|
def get_output_type(self, input_type: str) -> str:
|
189
189
|
return input_type
|
190
190
|
|
191
|
-
def _fit_transform(self, input:
|
191
|
+
def _fit_transform(self, input: dict[str, dict]) -> dict[str, dict]:
|
192
192
|
return {"input": input}
|
193
193
|
|
194
194
|
mixer = CorrectMixer()
|
@@ -211,13 +211,13 @@ def test_PipelineStepMixin_incorrect_conditional_dependencies() -> None:
|
|
211
211
|
},
|
212
212
|
]
|
213
213
|
|
214
|
-
def validate_input(self, input:
|
214
|
+
def validate_input(self, input: list[str]) -> list[str]:
|
215
215
|
return input
|
216
216
|
|
217
217
|
def get_output_type(self, input_type: str) -> str:
|
218
218
|
return input_type
|
219
219
|
|
220
|
-
def _fit_transform(self, input:
|
220
|
+
def _fit_transform(self, input: dict[str, dict]) -> dict[str, dict]:
|
221
221
|
return {"input": input}
|
222
222
|
|
223
223
|
mixer = IncorrectMixer()
|
@@ -246,13 +246,13 @@ def test_PipelineStepMixin_correct_conditional_ext_dependencies() -> None:
|
|
246
246
|
|
247
247
|
using = "afni"
|
248
248
|
|
249
|
-
def validate_input(self, input:
|
249
|
+
def validate_input(self, input: list[str]) -> list[str]:
|
250
250
|
return input
|
251
251
|
|
252
252
|
def get_output_type(self, input_type: str) -> str:
|
253
253
|
return input_type
|
254
254
|
|
255
|
-
def _fit_transform(self, input:
|
255
|
+
def _fit_transform(self, input: dict[str, dict]) -> dict[str, dict]:
|
256
256
|
return {"input": input}
|
257
257
|
|
258
258
|
mixer = CorrectMixer()
|
@@ -4,7 +4,7 @@
|
|
4
4
|
# Synchon Mandal <s.mandal@fz-juelich.de>
|
5
5
|
# License: AGPL
|
6
6
|
|
7
|
-
from typing import
|
7
|
+
from typing import Union
|
8
8
|
|
9
9
|
import pytest
|
10
10
|
|
@@ -21,10 +21,10 @@ from junifer.pipeline.update_meta_mixin import UpdateMetaMixin
|
|
21
21
|
],
|
22
22
|
)
|
23
23
|
def test_UpdateMetaMixin(
|
24
|
-
input:
|
24
|
+
input: dict,
|
25
25
|
step_name: str,
|
26
|
-
dependencies: Union[
|
27
|
-
expected:
|
26
|
+
dependencies: Union[set, list, str, None],
|
27
|
+
expected: set,
|
28
28
|
) -> None:
|
29
29
|
"""Test UpdateMetaMixin.
|
30
30
|
|
@@ -4,7 +4,7 @@
|
|
4
4
|
# Synchon Mandal <s.mandal@fz-juelich.de>
|
5
5
|
# License: AGPL
|
6
6
|
|
7
|
-
from typing import
|
7
|
+
from typing import Union
|
8
8
|
|
9
9
|
|
10
10
|
__all__ = ["UpdateMetaMixin"]
|
@@ -15,7 +15,7 @@ class UpdateMetaMixin:
|
|
15
15
|
|
16
16
|
def update_meta(
|
17
17
|
self,
|
18
|
-
input: Union[
|
18
|
+
input: Union[dict, list[dict]],
|
19
19
|
step_name: str,
|
20
20
|
) -> None:
|
21
21
|
"""Update metadata.
|
junifer/pipeline/utils.py
CHANGED
@@ -5,7 +5,7 @@
|
|
5
5
|
# License: AGPL
|
6
6
|
|
7
7
|
import subprocess
|
8
|
-
from typing import Any,
|
8
|
+
from typing import Any, Optional
|
9
9
|
|
10
10
|
from junifer.utils.logging import raise_error, warn_with_log
|
11
11
|
|
@@ -71,7 +71,7 @@ def check_ext_dependencies(
|
|
71
71
|
return found
|
72
72
|
|
73
73
|
|
74
|
-
def _check_afni(commands: Optional[
|
74
|
+
def _check_afni(commands: Optional[list[str]] = None) -> bool:
|
75
75
|
"""Check if AFNI is present in the system.
|
76
76
|
|
77
77
|
Parameters
|
@@ -131,7 +131,7 @@ def _check_afni(commands: Optional[List[str]] = None) -> bool:
|
|
131
131
|
return afni_found
|
132
132
|
|
133
133
|
|
134
|
-
def _check_fsl(commands: Optional[
|
134
|
+
def _check_fsl(commands: Optional[list[str]] = None) -> bool:
|
135
135
|
"""Check if FSL is present in the system.
|
136
136
|
|
137
137
|
Parameters
|
@@ -193,7 +193,7 @@ def _check_fsl(commands: Optional[List[str]] = None) -> bool:
|
|
193
193
|
return fsl_found
|
194
194
|
|
195
195
|
|
196
|
-
def _check_ants(commands: Optional[
|
196
|
+
def _check_ants(commands: Optional[list[str]] = None) -> bool:
|
197
197
|
"""Check if ANTs is present in the system.
|
198
198
|
|
199
199
|
Parameters
|
@@ -253,7 +253,7 @@ def _check_ants(commands: Optional[List[str]] = None) -> bool:
|
|
253
253
|
return ants_found
|
254
254
|
|
255
255
|
|
256
|
-
def _check_freesurfer(commands: Optional[
|
256
|
+
def _check_freesurfer(commands: Optional[list[str]] = None) -> bool:
|
257
257
|
"""Check if FreeSurfer is present in the system.
|
258
258
|
|
259
259
|
Parameters
|
junifer/preprocess/base.py
CHANGED
@@ -5,7 +5,7 @@
|
|
5
5
|
# License: AGPL
|
6
6
|
|
7
7
|
from abc import ABC, abstractmethod
|
8
|
-
from typing import Any,
|
8
|
+
from typing import Any, Optional, Union
|
9
9
|
|
10
10
|
from ..pipeline import PipelineStepMixin, UpdateMetaMixin
|
11
11
|
from ..utils import logger, raise_error
|
@@ -38,8 +38,8 @@ class BasePreprocessor(ABC, PipelineStepMixin, UpdateMetaMixin):
|
|
38
38
|
|
39
39
|
def __init__(
|
40
40
|
self,
|
41
|
-
on: Optional[Union[
|
42
|
-
required_data_types: Optional[Union[
|
41
|
+
on: Optional[Union[list[str], str]] = None,
|
42
|
+
required_data_types: Optional[Union[list[str], str]] = None,
|
43
43
|
) -> None:
|
44
44
|
"""Initialize the class."""
|
45
45
|
# Use all data types if not provided
|
@@ -60,7 +60,7 @@ class BasePreprocessor(ABC, PipelineStepMixin, UpdateMetaMixin):
|
|
60
60
|
else:
|
61
61
|
self._required_data_types = required_data_types
|
62
62
|
|
63
|
-
def validate_input(self, input:
|
63
|
+
def validate_input(self, input: list[str]) -> list[str]:
|
64
64
|
"""Validate input.
|
65
65
|
|
66
66
|
Parameters
|
@@ -90,7 +90,7 @@ class BasePreprocessor(ABC, PipelineStepMixin, UpdateMetaMixin):
|
|
90
90
|
return [x for x in self._on if x in input]
|
91
91
|
|
92
92
|
@abstractmethod
|
93
|
-
def get_valid_inputs(self) ->
|
93
|
+
def get_valid_inputs(self) -> list[str]:
|
94
94
|
"""Get valid data types for input.
|
95
95
|
|
96
96
|
Returns
|
@@ -128,9 +128,9 @@ class BasePreprocessor(ABC, PipelineStepMixin, UpdateMetaMixin):
|
|
128
128
|
@abstractmethod
|
129
129
|
def preprocess(
|
130
130
|
self,
|
131
|
-
input:
|
132
|
-
extra_input: Optional[
|
133
|
-
) ->
|
131
|
+
input: dict[str, Any],
|
132
|
+
extra_input: Optional[dict[str, Any]] = None,
|
133
|
+
) -> tuple[dict[str, Any], Optional[dict[str, dict[str, Any]]]]:
|
134
134
|
"""Preprocess.
|
135
135
|
|
136
136
|
Parameters
|
@@ -160,8 +160,8 @@ class BasePreprocessor(ABC, PipelineStepMixin, UpdateMetaMixin):
|
|
160
160
|
|
161
161
|
def _fit_transform(
|
162
162
|
self,
|
163
|
-
input:
|
164
|
-
) ->
|
163
|
+
input: dict[str, dict],
|
164
|
+
) -> dict:
|
165
165
|
"""Fit and transform.
|
166
166
|
|
167
167
|
Parameters
|
@@ -8,10 +8,7 @@
|
|
8
8
|
from typing import (
|
9
9
|
Any,
|
10
10
|
ClassVar,
|
11
|
-
Dict,
|
12
|
-
List,
|
13
11
|
Optional,
|
14
|
-
Tuple,
|
15
12
|
Union,
|
16
13
|
)
|
17
14
|
|
@@ -156,14 +153,14 @@ class fMRIPrepConfoundRemover(BasePreprocessor):
|
|
156
153
|
|
157
154
|
def __init__(
|
158
155
|
self,
|
159
|
-
strategy: Optional[
|
156
|
+
strategy: Optional[dict[str, str]] = None,
|
160
157
|
spike: Optional[float] = None,
|
161
158
|
detrend: bool = True,
|
162
159
|
standardize: bool = True,
|
163
160
|
low_pass: Optional[float] = None,
|
164
161
|
high_pass: Optional[float] = None,
|
165
162
|
t_r: Optional[float] = None,
|
166
|
-
masks: Union[str,
|
163
|
+
masks: Union[str, dict, list[Union[dict, str]], None] = None,
|
167
164
|
) -> None:
|
168
165
|
"""Initialize the class."""
|
169
166
|
if strategy is None:
|
@@ -211,7 +208,7 @@ class fMRIPrepConfoundRemover(BasePreprocessor):
|
|
211
208
|
)
|
212
209
|
super().__init__(on="BOLD", required_data_types=["BOLD"])
|
213
210
|
|
214
|
-
def get_valid_inputs(self) ->
|
211
|
+
def get_valid_inputs(self) -> list[str]:
|
215
212
|
"""Get valid data types for input.
|
216
213
|
|
217
214
|
Returns
|
@@ -240,7 +237,7 @@ class fMRIPrepConfoundRemover(BasePreprocessor):
|
|
240
237
|
# Does not add any new keys
|
241
238
|
return input_type
|
242
239
|
|
243
|
-
def _map_adhoc_to_fmriprep(self, input:
|
240
|
+
def _map_adhoc_to_fmriprep(self, input: dict[str, Any]) -> None:
|
244
241
|
"""Map the adhoc format to the fmpriprep format spec.
|
245
242
|
|
246
243
|
Based on the spec, map the column names to match the fmriprep format.
|
@@ -270,8 +267,8 @@ class fMRIPrepConfoundRemover(BasePreprocessor):
|
|
270
267
|
confounds_df.rename(columns=confounds_mapping, inplace=True)
|
271
268
|
|
272
269
|
def _process_fmriprep_spec(
|
273
|
-
self, input:
|
274
|
-
) ->
|
270
|
+
self, input: dict[str, Any]
|
271
|
+
) -> tuple[list[str], dict[str, str], dict[str, str], str]:
|
275
272
|
"""Process the fmpriprep format spec from the specified file.
|
276
273
|
|
277
274
|
Based on the strategy, find the relevant column names in the dataframe,
|
@@ -359,7 +356,7 @@ class fMRIPrepConfoundRemover(BasePreprocessor):
|
|
359
356
|
out = to_select, squares_to_compute, derivatives_to_compute, spike_name
|
360
357
|
return out
|
361
358
|
|
362
|
-
def _pick_confounds(self, input:
|
359
|
+
def _pick_confounds(self, input: dict[str, Any]) -> pd.DataFrame:
|
363
360
|
"""Select relevant confounds from the specified file.
|
364
361
|
|
365
362
|
Parameters
|
@@ -418,7 +415,7 @@ class fMRIPrepConfoundRemover(BasePreprocessor):
|
|
418
415
|
|
419
416
|
def _validate_data(
|
420
417
|
self,
|
421
|
-
input:
|
418
|
+
input: dict[str, Any],
|
422
419
|
) -> None:
|
423
420
|
"""Validate input data.
|
424
421
|
|
@@ -506,9 +503,9 @@ class fMRIPrepConfoundRemover(BasePreprocessor):
|
|
506
503
|
|
507
504
|
def preprocess(
|
508
505
|
self,
|
509
|
-
input:
|
510
|
-
extra_input: Optional[
|
511
|
-
) ->
|
506
|
+
input: dict[str, Any],
|
507
|
+
extra_input: Optional[dict[str, Any]] = None,
|
508
|
+
) -> tuple[dict[str, Any], Optional[dict[str, dict[str, Any]]]]:
|
512
509
|
"""Preprocess.
|
513
510
|
|
514
511
|
Parameters
|
@@ -5,7 +5,6 @@
|
|
5
5
|
# Synchon Mandal <s.mandal@fz-juelich.de>
|
6
6
|
# License: AGPL
|
7
7
|
|
8
|
-
from typing import List
|
9
8
|
|
10
9
|
import numpy as np
|
11
10
|
import pandas as pd
|
@@ -47,7 +46,7 @@ def test_fMRIPrepConfoundRemover_init() -> None:
|
|
47
46
|
["T1w", "BOLD"],
|
48
47
|
],
|
49
48
|
)
|
50
|
-
def test_fMRIPrepConfoundRemover_validate_input(input_:
|
49
|
+
def test_fMRIPrepConfoundRemover_validate_input(input_: list[str]) -> None:
|
51
50
|
"""Test fMRIPrepConfoundRemover validate_input.
|
52
51
|
|
53
52
|
Parameters
|
@@ -3,7 +3,7 @@
|
|
3
3
|
# Authors: Synchon Mandal <s.mandal@fz-juelich.de>
|
4
4
|
# License: AGPL
|
5
5
|
|
6
|
-
from typing import Any, ClassVar,
|
6
|
+
from typing import Any, ClassVar, Optional, Union
|
7
7
|
|
8
8
|
from ...api.decorators import register_preprocessor
|
9
9
|
from ...typing import ConditionalDependencies
|
@@ -86,8 +86,8 @@ class Smoothing(BasePreprocessor):
|
|
86
86
|
def __init__(
|
87
87
|
self,
|
88
88
|
using: str,
|
89
|
-
on: Union[
|
90
|
-
smoothing_params: Optional[
|
89
|
+
on: Union[list[str], str],
|
90
|
+
smoothing_params: Optional[dict] = None,
|
91
91
|
) -> None:
|
92
92
|
"""Initialize the class."""
|
93
93
|
# Validate `using` parameter
|
@@ -102,7 +102,7 @@ class Smoothing(BasePreprocessor):
|
|
102
102
|
)
|
103
103
|
super().__init__(on=on)
|
104
104
|
|
105
|
-
def get_valid_inputs(self) ->
|
105
|
+
def get_valid_inputs(self) -> list[str]:
|
106
106
|
"""Get valid data types for input.
|
107
107
|
|
108
108
|
Returns
|
@@ -133,9 +133,9 @@ class Smoothing(BasePreprocessor):
|
|
133
133
|
|
134
134
|
def preprocess(
|
135
135
|
self,
|
136
|
-
input:
|
137
|
-
extra_input: Optional[
|
138
|
-
) ->
|
136
|
+
input: dict[str, Any],
|
137
|
+
extra_input: Optional[dict[str, Any]] = None,
|
138
|
+
) -> tuple[dict[str, Any], Optional[dict[str, dict[str, Any]]]]:
|
139
139
|
"""Preprocess.
|
140
140
|
|
141
141
|
Parameters
|
@@ -6,7 +6,6 @@
|
|
6
6
|
from typing import (
|
7
7
|
Any,
|
8
8
|
ClassVar,
|
9
|
-
Dict,
|
10
9
|
)
|
11
10
|
|
12
11
|
import nibabel as nib
|
@@ -40,10 +39,10 @@ class ANTsWarper:
|
|
40
39
|
|
41
40
|
def preprocess(
|
42
41
|
self,
|
43
|
-
input:
|
44
|
-
extra_input:
|
42
|
+
input: dict[str, Any],
|
43
|
+
extra_input: dict[str, Any],
|
45
44
|
reference: str,
|
46
|
-
) ->
|
45
|
+
) -> dict[str, Any]:
|
47
46
|
"""Preprocess using ANTs.
|
48
47
|
|
49
48
|
Parameters
|
@@ -6,7 +6,6 @@
|
|
6
6
|
from typing import (
|
7
7
|
Any,
|
8
8
|
ClassVar,
|
9
|
-
Dict,
|
10
9
|
)
|
11
10
|
|
12
11
|
import nibabel as nib
|
@@ -39,9 +38,9 @@ class FSLWarper:
|
|
39
38
|
|
40
39
|
def preprocess(
|
41
40
|
self,
|
42
|
-
input:
|
43
|
-
extra_input:
|
44
|
-
) ->
|
41
|
+
input: dict[str, Any],
|
42
|
+
extra_input: dict[str, Any],
|
43
|
+
) -> dict[str, Any]:
|
45
44
|
"""Preprocess using FSL.
|
46
45
|
|
47
46
|
Parameters
|