junifer 0.0.3.dev188__py3-none-any.whl → 0.0.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- junifer/_version.py +14 -2
- junifer/api/cli.py +162 -17
- junifer/api/functions.py +87 -419
- junifer/api/parser.py +24 -0
- junifer/api/queue_context/__init__.py +8 -0
- junifer/api/queue_context/gnu_parallel_local_adapter.py +258 -0
- junifer/api/queue_context/htcondor_adapter.py +365 -0
- junifer/api/queue_context/queue_context_adapter.py +60 -0
- junifer/api/queue_context/tests/test_gnu_parallel_local_adapter.py +192 -0
- junifer/api/queue_context/tests/test_htcondor_adapter.py +257 -0
- junifer/api/res/afni/run_afni_docker.sh +6 -6
- junifer/api/res/ants/ResampleImage +3 -0
- junifer/api/res/ants/antsApplyTransforms +3 -0
- junifer/api/res/ants/antsApplyTransformsToPoints +3 -0
- junifer/api/res/ants/run_ants_docker.sh +39 -0
- junifer/api/res/fsl/applywarp +3 -0
- junifer/api/res/fsl/flirt +3 -0
- junifer/api/res/fsl/img2imgcoord +3 -0
- junifer/api/res/fsl/run_fsl_docker.sh +39 -0
- junifer/api/res/fsl/std2imgcoord +3 -0
- junifer/api/res/run_conda.sh +4 -4
- junifer/api/res/run_venv.sh +22 -0
- junifer/api/tests/data/partly_cloudy_agg_mean_tian.yml +16 -0
- junifer/api/tests/test_api_utils.py +21 -3
- junifer/api/tests/test_cli.py +232 -9
- junifer/api/tests/test_functions.py +211 -439
- junifer/api/tests/test_parser.py +1 -1
- junifer/configs/juseless/datagrabbers/aomic_id1000_vbm.py +6 -1
- junifer/configs/juseless/datagrabbers/camcan_vbm.py +6 -1
- junifer/configs/juseless/datagrabbers/ixi_vbm.py +6 -1
- junifer/configs/juseless/datagrabbers/tests/test_ucla.py +8 -8
- junifer/configs/juseless/datagrabbers/ucla.py +44 -26
- junifer/configs/juseless/datagrabbers/ukb_vbm.py +6 -1
- junifer/data/VOIs/meta/AutobiographicalMemory_VOIs.txt +23 -0
- junifer/data/VOIs/meta/Power2013_MNI_VOIs.tsv +264 -0
- junifer/data/__init__.py +4 -0
- junifer/data/coordinates.py +298 -31
- junifer/data/masks.py +360 -28
- junifer/data/parcellations.py +621 -188
- junifer/data/template_spaces.py +190 -0
- junifer/data/tests/test_coordinates.py +34 -3
- junifer/data/tests/test_data_utils.py +1 -0
- junifer/data/tests/test_masks.py +202 -86
- junifer/data/tests/test_parcellations.py +266 -55
- junifer/data/tests/test_template_spaces.py +104 -0
- junifer/data/utils.py +4 -2
- junifer/datagrabber/__init__.py +1 -0
- junifer/datagrabber/aomic/id1000.py +111 -70
- junifer/datagrabber/aomic/piop1.py +116 -53
- junifer/datagrabber/aomic/piop2.py +116 -53
- junifer/datagrabber/aomic/tests/test_id1000.py +27 -27
- junifer/datagrabber/aomic/tests/test_piop1.py +27 -27
- junifer/datagrabber/aomic/tests/test_piop2.py +27 -27
- junifer/datagrabber/base.py +62 -10
- junifer/datagrabber/datalad_base.py +0 -2
- junifer/datagrabber/dmcc13_benchmark.py +372 -0
- junifer/datagrabber/hcp1200/datalad_hcp1200.py +5 -0
- junifer/datagrabber/hcp1200/hcp1200.py +30 -13
- junifer/datagrabber/pattern.py +133 -27
- junifer/datagrabber/pattern_datalad.py +111 -13
- junifer/datagrabber/tests/test_base.py +57 -6
- junifer/datagrabber/tests/test_datagrabber_utils.py +204 -76
- junifer/datagrabber/tests/test_datalad_base.py +0 -6
- junifer/datagrabber/tests/test_dmcc13_benchmark.py +256 -0
- junifer/datagrabber/tests/test_multiple.py +43 -10
- junifer/datagrabber/tests/test_pattern.py +125 -178
- junifer/datagrabber/tests/test_pattern_datalad.py +44 -25
- junifer/datagrabber/utils.py +151 -16
- junifer/datareader/default.py +36 -10
- junifer/external/nilearn/junifer_nifti_spheres_masker.py +6 -0
- junifer/markers/base.py +25 -16
- junifer/markers/collection.py +35 -16
- junifer/markers/complexity/__init__.py +27 -0
- junifer/markers/complexity/complexity_base.py +149 -0
- junifer/markers/complexity/hurst_exponent.py +136 -0
- junifer/markers/complexity/multiscale_entropy_auc.py +140 -0
- junifer/markers/complexity/perm_entropy.py +132 -0
- junifer/markers/complexity/range_entropy.py +136 -0
- junifer/markers/complexity/range_entropy_auc.py +145 -0
- junifer/markers/complexity/sample_entropy.py +134 -0
- junifer/markers/complexity/tests/test_complexity_base.py +19 -0
- junifer/markers/complexity/tests/test_hurst_exponent.py +69 -0
- junifer/markers/complexity/tests/test_multiscale_entropy_auc.py +68 -0
- junifer/markers/complexity/tests/test_perm_entropy.py +68 -0
- junifer/markers/complexity/tests/test_range_entropy.py +69 -0
- junifer/markers/complexity/tests/test_range_entropy_auc.py +69 -0
- junifer/markers/complexity/tests/test_sample_entropy.py +68 -0
- junifer/markers/complexity/tests/test_weighted_perm_entropy.py +68 -0
- junifer/markers/complexity/weighted_perm_entropy.py +133 -0
- junifer/markers/falff/_afni_falff.py +153 -0
- junifer/markers/falff/_junifer_falff.py +142 -0
- junifer/markers/falff/falff_base.py +91 -84
- junifer/markers/falff/falff_parcels.py +61 -45
- junifer/markers/falff/falff_spheres.py +64 -48
- junifer/markers/falff/tests/test_falff_parcels.py +89 -121
- junifer/markers/falff/tests/test_falff_spheres.py +92 -127
- junifer/markers/functional_connectivity/crossparcellation_functional_connectivity.py +1 -0
- junifer/markers/functional_connectivity/edge_functional_connectivity_parcels.py +1 -0
- junifer/markers/functional_connectivity/functional_connectivity_base.py +1 -0
- junifer/markers/functional_connectivity/tests/test_crossparcellation_functional_connectivity.py +46 -44
- junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_parcels.py +34 -39
- junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_spheres.py +40 -52
- junifer/markers/functional_connectivity/tests/test_functional_connectivity_parcels.py +62 -70
- junifer/markers/functional_connectivity/tests/test_functional_connectivity_spheres.py +99 -85
- junifer/markers/parcel_aggregation.py +60 -38
- junifer/markers/reho/_afni_reho.py +192 -0
- junifer/markers/reho/_junifer_reho.py +281 -0
- junifer/markers/reho/reho_base.py +69 -34
- junifer/markers/reho/reho_parcels.py +26 -16
- junifer/markers/reho/reho_spheres.py +23 -9
- junifer/markers/reho/tests/test_reho_parcels.py +93 -92
- junifer/markers/reho/tests/test_reho_spheres.py +88 -86
- junifer/markers/sphere_aggregation.py +54 -9
- junifer/markers/temporal_snr/temporal_snr_base.py +1 -0
- junifer/markers/temporal_snr/tests/test_temporal_snr_parcels.py +38 -37
- junifer/markers/temporal_snr/tests/test_temporal_snr_spheres.py +34 -38
- junifer/markers/tests/test_collection.py +43 -42
- junifer/markers/tests/test_ets_rss.py +29 -37
- junifer/markers/tests/test_parcel_aggregation.py +587 -468
- junifer/markers/tests/test_sphere_aggregation.py +209 -157
- junifer/markers/utils.py +2 -40
- junifer/onthefly/read_transform.py +13 -6
- junifer/pipeline/__init__.py +1 -0
- junifer/pipeline/pipeline_step_mixin.py +105 -41
- junifer/pipeline/registry.py +17 -0
- junifer/pipeline/singleton.py +45 -0
- junifer/pipeline/tests/test_pipeline_step_mixin.py +139 -51
- junifer/pipeline/tests/test_update_meta_mixin.py +1 -0
- junifer/pipeline/tests/test_workdir_manager.py +104 -0
- junifer/pipeline/update_meta_mixin.py +8 -2
- junifer/pipeline/utils.py +154 -15
- junifer/pipeline/workdir_manager.py +246 -0
- junifer/preprocess/__init__.py +3 -0
- junifer/preprocess/ants/__init__.py +4 -0
- junifer/preprocess/ants/ants_apply_transforms_warper.py +185 -0
- junifer/preprocess/ants/tests/test_ants_apply_transforms_warper.py +56 -0
- junifer/preprocess/base.py +96 -69
- junifer/preprocess/bold_warper.py +265 -0
- junifer/preprocess/confounds/fmriprep_confound_remover.py +91 -134
- junifer/preprocess/confounds/tests/test_fmriprep_confound_remover.py +106 -111
- junifer/preprocess/fsl/__init__.py +4 -0
- junifer/preprocess/fsl/apply_warper.py +179 -0
- junifer/preprocess/fsl/tests/test_apply_warper.py +45 -0
- junifer/preprocess/tests/test_bold_warper.py +159 -0
- junifer/preprocess/tests/test_preprocess_base.py +6 -6
- junifer/preprocess/warping/__init__.py +6 -0
- junifer/preprocess/warping/_ants_warper.py +167 -0
- junifer/preprocess/warping/_fsl_warper.py +109 -0
- junifer/preprocess/warping/space_warper.py +213 -0
- junifer/preprocess/warping/tests/test_space_warper.py +198 -0
- junifer/stats.py +18 -4
- junifer/storage/base.py +9 -1
- junifer/storage/hdf5.py +8 -3
- junifer/storage/pandas_base.py +2 -1
- junifer/storage/sqlite.py +1 -0
- junifer/storage/tests/test_hdf5.py +2 -1
- junifer/storage/tests/test_sqlite.py +8 -8
- junifer/storage/tests/test_utils.py +6 -6
- junifer/storage/utils.py +1 -0
- junifer/testing/datagrabbers.py +11 -7
- junifer/testing/utils.py +1 -0
- junifer/tests/test_stats.py +2 -0
- junifer/utils/__init__.py +1 -0
- junifer/utils/helpers.py +53 -0
- junifer/utils/logging.py +14 -3
- junifer/utils/tests/test_helpers.py +35 -0
- {junifer-0.0.3.dev188.dist-info → junifer-0.0.4.dist-info}/METADATA +59 -28
- junifer-0.0.4.dist-info/RECORD +257 -0
- {junifer-0.0.3.dev188.dist-info → junifer-0.0.4.dist-info}/WHEEL +1 -1
- junifer/markers/falff/falff_estimator.py +0 -334
- junifer/markers/falff/tests/test_falff_estimator.py +0 -238
- junifer/markers/reho/reho_estimator.py +0 -515
- junifer/markers/reho/tests/test_reho_estimator.py +0 -260
- junifer-0.0.3.dev188.dist-info/RECORD +0 -199
- {junifer-0.0.3.dev188.dist-info → junifer-0.0.4.dist-info}/AUTHORS.rst +0 -0
- {junifer-0.0.3.dev188.dist-info → junifer-0.0.4.dist-info}/LICENSE.md +0 -0
- {junifer-0.0.3.dev188.dist-info → junifer-0.0.4.dist-info}/entry_points.txt +0 -0
- {junifer-0.0.3.dev188.dist-info → junifer-0.0.4.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,56 @@
|
|
1
|
+
"""Provide tests for AntsApplyTransformsWarper."""
|
2
|
+
|
3
|
+
# Authors: Synchon Mandal <s.mandal@fz-juelich.de>
|
4
|
+
# License: AGPL
|
5
|
+
|
6
|
+
import socket
|
7
|
+
|
8
|
+
import pytest
|
9
|
+
|
10
|
+
from junifer.datagrabber import DMCC13Benchmark
|
11
|
+
from junifer.datareader import DefaultDataReader
|
12
|
+
from junifer.pipeline.utils import _check_ants
|
13
|
+
from junifer.preprocess.ants.ants_apply_transforms_warper import (
|
14
|
+
_AntsApplyTransformsWarper,
|
15
|
+
)
|
16
|
+
|
17
|
+
|
18
|
+
def test_AntsApplyTransformsWarper_init() -> None:
|
19
|
+
"""Test AntsApplyTransformsWarper init."""
|
20
|
+
ants_apply_transforms_warper = _AntsApplyTransformsWarper(
|
21
|
+
reference="T1w", on="BOLD"
|
22
|
+
)
|
23
|
+
assert ants_apply_transforms_warper.ref == "T1w"
|
24
|
+
assert ants_apply_transforms_warper.on == "BOLD"
|
25
|
+
|
26
|
+
|
27
|
+
@pytest.mark.skipif(
|
28
|
+
_check_ants() is False, reason="requires ANTs to be in PATH"
|
29
|
+
)
|
30
|
+
@pytest.mark.skipif(
|
31
|
+
socket.gethostname() != "juseless",
|
32
|
+
reason="only for juseless",
|
33
|
+
)
|
34
|
+
def test_AntsApplyTransformsWarper_preprocess() -> None:
|
35
|
+
"""Test AntsApplyTransformsWarper preprocess."""
|
36
|
+
with DMCC13Benchmark(
|
37
|
+
types=["BOLD", "T1w", "Warp"],
|
38
|
+
sessions=["ses-wave1bas"],
|
39
|
+
tasks=["Rest"],
|
40
|
+
phase_encodings=["AP"],
|
41
|
+
runs=["1"],
|
42
|
+
native_t1w=True,
|
43
|
+
) as dg:
|
44
|
+
# Read data
|
45
|
+
element_data = DefaultDataReader().fit_transform(
|
46
|
+
dg[("sub-f9057kp", "ses-wave1bas", "Rest", "AP", "1")]
|
47
|
+
)
|
48
|
+
# Preprocess data
|
49
|
+
data_type, data = _AntsApplyTransformsWarper(
|
50
|
+
reference="T1w", on="BOLD"
|
51
|
+
).preprocess(
|
52
|
+
input=element_data["BOLD"],
|
53
|
+
extra_input=element_data,
|
54
|
+
)
|
55
|
+
assert isinstance(data_type, str)
|
56
|
+
assert isinstance(data, dict)
|
junifer/preprocess/base.py
CHANGED
@@ -12,29 +12,50 @@ from ..utils import logger, raise_error
|
|
12
12
|
|
13
13
|
|
14
14
|
class BasePreprocessor(ABC, PipelineStepMixin, UpdateMetaMixin):
|
15
|
-
"""
|
15
|
+
"""Abstract base class for all preprocessors.
|
16
|
+
|
17
|
+
For every interface that is required, one needs to provide a concrete
|
18
|
+
implementation of this abstract class.
|
16
19
|
|
17
20
|
Parameters
|
18
21
|
----------
|
19
|
-
on : str or list of str, optional
|
20
|
-
The
|
21
|
-
will work on all available data (default None).
|
22
|
+
on : str or list of str or None, optional
|
23
|
+
The data type to apply the preprocessor on. If None,
|
24
|
+
will work on all available data types (default None).
|
25
|
+
required_data_types : str or list of str, optional
|
26
|
+
The data types needed for computation. If None,
|
27
|
+
will be equal to ``on`` (default None).
|
28
|
+
|
29
|
+
Raises
|
30
|
+
------
|
31
|
+
ValueError
|
32
|
+
If required input data type(s) is(are) not found.
|
33
|
+
|
22
34
|
"""
|
23
35
|
|
24
36
|
def __init__(
|
25
37
|
self,
|
26
38
|
on: Optional[Union[List[str], str]] = None,
|
39
|
+
required_data_types: Optional[Union[List[str], str]] = None,
|
27
40
|
) -> None:
|
41
|
+
"""Initialize the class."""
|
42
|
+
# Use all data types if not provided
|
28
43
|
if on is None:
|
29
44
|
on = self.get_valid_inputs()
|
45
|
+
# Convert data types to list
|
30
46
|
if not isinstance(on, list):
|
31
47
|
on = [on]
|
32
|
-
|
48
|
+
# Check if required inputs are found
|
33
49
|
if any(x not in self.get_valid_inputs() for x in on):
|
34
50
|
name = self.__class__.__name__
|
35
51
|
wrong_on = [x for x in on if x not in self.get_valid_inputs()]
|
36
|
-
|
52
|
+
raise_error(f"{name} cannot be computed on {wrong_on}")
|
37
53
|
self._on = on
|
54
|
+
# Set required data types for validation
|
55
|
+
if required_data_types is None:
|
56
|
+
self._required_data_types = on
|
57
|
+
else:
|
58
|
+
self._required_data_types = required_data_types
|
38
59
|
|
39
60
|
def validate_input(self, input: List[str]) -> List[str]:
|
40
61
|
"""Validate input.
|
@@ -55,30 +76,45 @@ class BasePreprocessor(ABC, PipelineStepMixin, UpdateMetaMixin):
|
|
55
76
|
------
|
56
77
|
ValueError
|
57
78
|
If the input does not have the required data.
|
79
|
+
|
58
80
|
"""
|
59
|
-
if
|
81
|
+
if any(x not in input for x in self._required_data_types):
|
60
82
|
raise_error(
|
61
83
|
"Input does not have the required data."
|
62
84
|
f"\t Input: {input}"
|
63
|
-
f"\t Required (
|
85
|
+
f"\t Required (all of): {self._required_data_types}"
|
64
86
|
)
|
65
87
|
return [x for x in self._on if x in input]
|
66
88
|
|
67
89
|
@abstractmethod
|
68
|
-
def
|
90
|
+
def get_valid_inputs(self) -> List[str]:
|
91
|
+
"""Get valid data types for input.
|
92
|
+
|
93
|
+
Returns
|
94
|
+
-------
|
95
|
+
list of str
|
96
|
+
The list of data types that can be used as input for this
|
97
|
+
preprocessor.
|
98
|
+
|
99
|
+
"""
|
100
|
+
raise_error(
|
101
|
+
msg="Concrete classes need to implement get_valid_inputs().",
|
102
|
+
klass=NotImplementedError,
|
103
|
+
)
|
104
|
+
|
105
|
+
@abstractmethod
|
106
|
+
def get_output_type(self, input_type: str) -> str:
|
69
107
|
"""Get output type.
|
70
108
|
|
71
109
|
Parameters
|
72
110
|
----------
|
73
|
-
|
74
|
-
The input to the preprocessor.
|
75
|
-
available Junifer Data dictionary keys.
|
111
|
+
input_type : str
|
112
|
+
The data type input to the preprocessor.
|
76
113
|
|
77
114
|
Returns
|
78
115
|
-------
|
79
|
-
|
80
|
-
The
|
81
|
-
the pipeline step.
|
116
|
+
str
|
117
|
+
The data type output by the preprocessor.
|
82
118
|
|
83
119
|
"""
|
84
120
|
raise_error(
|
@@ -87,17 +123,35 @@ class BasePreprocessor(ABC, PipelineStepMixin, UpdateMetaMixin):
|
|
87
123
|
)
|
88
124
|
|
89
125
|
@abstractmethod
|
90
|
-
def
|
91
|
-
|
126
|
+
def preprocess(
|
127
|
+
self,
|
128
|
+
input: Dict[str, Any],
|
129
|
+
extra_input: Optional[Dict[str, Any]] = None,
|
130
|
+
) -> Tuple[Dict[str, Any], Optional[Dict[str, Dict[str, Any]]]]:
|
131
|
+
"""Preprocess.
|
132
|
+
|
133
|
+
Parameters
|
134
|
+
----------
|
135
|
+
input : dict
|
136
|
+
A single input from the Junifer Data object to preprocess.
|
137
|
+
extra_input : dict, optional
|
138
|
+
The other fields in the Junifer Data object. Useful for accessing
|
139
|
+
other data kind that needs to be used in the computation. For
|
140
|
+
example, the confound removers can make use of the
|
141
|
+
confounds if available (default None).
|
92
142
|
|
93
143
|
Returns
|
94
144
|
-------
|
95
|
-
|
96
|
-
The
|
97
|
-
|
145
|
+
dict
|
146
|
+
The computed result as dictionary.
|
147
|
+
dict or None
|
148
|
+
Extra "helper" data types as dictionary to add to the Junifer Data
|
149
|
+
object. For example, computed BOLD mask can be passed via this.
|
150
|
+
If no new "helper" data types is created, None is to be passed.
|
151
|
+
|
98
152
|
"""
|
99
153
|
raise_error(
|
100
|
-
msg="Concrete classes need to implement
|
154
|
+
msg="Concrete classes need to implement preprocess().",
|
101
155
|
klass=NotImplementedError,
|
102
156
|
)
|
103
157
|
|
@@ -118,63 +172,36 @@ class BasePreprocessor(ABC, PipelineStepMixin, UpdateMetaMixin):
|
|
118
172
|
The processed output as a dictionary.
|
119
173
|
|
120
174
|
"""
|
121
|
-
|
175
|
+
# Copy input to not modify the original
|
176
|
+
out = input.copy()
|
177
|
+
# For each data type, run preprocessing
|
122
178
|
for type_ in self._on:
|
179
|
+
# Check if data type is available
|
123
180
|
if type_ in input.keys():
|
124
181
|
logger.info(f"Preprocessing {type_}")
|
182
|
+
# Get data dict for data type
|
125
183
|
t_input = input[type_]
|
126
|
-
|
127
184
|
# Pass the other data types as extra input, removing
|
128
185
|
# the current type
|
129
|
-
extra_input = input
|
186
|
+
extra_input = input.copy()
|
130
187
|
extra_input.pop(type_)
|
131
188
|
logger.debug(
|
132
|
-
f"Extra
|
189
|
+
f"Extra data type for preprocess: {extra_input.keys()}"
|
133
190
|
)
|
134
|
-
|
191
|
+
# Preprocess data
|
192
|
+
t_out, t_extra_input = self.preprocess(
|
135
193
|
input=t_input, extra_input=extra_input
|
136
194
|
)
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
195
|
+
# Set output to the Junifer Data object
|
196
|
+
logger.debug(f"Adding {type_} to output")
|
197
|
+
out[type_] = t_out
|
198
|
+
# Check if helper data types are to be added
|
199
|
+
if t_extra_input is not None:
|
200
|
+
logger.debug(
|
201
|
+
f"Adding helper data types: {t_extra_input.keys()} "
|
202
|
+
"to output"
|
203
|
+
)
|
204
|
+
out.update(t_extra_input)
|
205
|
+
# Update metadata for step
|
206
|
+
self.update_meta(out[type_], "preprocess")
|
148
207
|
return out
|
149
|
-
|
150
|
-
@abstractmethod
|
151
|
-
def preprocess(
|
152
|
-
self,
|
153
|
-
input: Dict[str, Any],
|
154
|
-
extra_input: Optional[Dict[str, Any]] = None,
|
155
|
-
) -> Tuple[str, Dict[str, Any]]:
|
156
|
-
"""Preprocess.
|
157
|
-
|
158
|
-
Parameters
|
159
|
-
----------
|
160
|
-
input : dict
|
161
|
-
A single input from the Junifer Data object to preprocess.
|
162
|
-
extra_input : dict, optional
|
163
|
-
The other fields in the Junifer Data object. Useful for accessing
|
164
|
-
other data kind that needs to be used in the computation. For
|
165
|
-
example, the confound removers can make use of the
|
166
|
-
confounds if available (default None).
|
167
|
-
|
168
|
-
Returns
|
169
|
-
-------
|
170
|
-
key : str
|
171
|
-
The key to store the output in the Junifer Data object.
|
172
|
-
object : dict
|
173
|
-
The computed result as dictionary. This will be stored in the
|
174
|
-
Junifer Data object under the key 'key'.
|
175
|
-
|
176
|
-
"""
|
177
|
-
raise_error(
|
178
|
-
msg="Concrete classes need to implement preprocess().",
|
179
|
-
klass=NotImplementedError,
|
180
|
-
)
|
@@ -0,0 +1,265 @@
|
|
1
|
+
"""Provide class for warping BOLD to other template spaces."""
|
2
|
+
|
3
|
+
# Authors: Synchon Mandal <s.mandal@fz-juelich.de>
|
4
|
+
# License: AGPL
|
5
|
+
|
6
|
+
from typing import (
|
7
|
+
Any,
|
8
|
+
ClassVar,
|
9
|
+
Dict,
|
10
|
+
List,
|
11
|
+
Optional,
|
12
|
+
Tuple,
|
13
|
+
Type,
|
14
|
+
Union,
|
15
|
+
)
|
16
|
+
|
17
|
+
import nibabel as nib
|
18
|
+
from templateflow import api as tflow
|
19
|
+
|
20
|
+
from ..api.decorators import register_preprocessor
|
21
|
+
from ..data import get_template, get_xfm
|
22
|
+
from ..pipeline import WorkDirManager
|
23
|
+
from ..utils import logger, raise_error, run_ext_cmd
|
24
|
+
from .ants.ants_apply_transforms_warper import _AntsApplyTransformsWarper
|
25
|
+
from .base import BasePreprocessor
|
26
|
+
from .fsl.apply_warper import _ApplyWarper
|
27
|
+
|
28
|
+
|
29
|
+
@register_preprocessor
|
30
|
+
class BOLDWarper(BasePreprocessor):
|
31
|
+
"""Class for warping BOLD NIfTI images.
|
32
|
+
|
33
|
+
.. deprecated:: 0.0.3
|
34
|
+
`BOLDWarper` will be removed in v0.0.4, it is replaced by
|
35
|
+
`SpaceWarper` because the latter works also with T1w data.
|
36
|
+
|
37
|
+
Parameters
|
38
|
+
----------
|
39
|
+
using : {"fsl", "ants"}
|
40
|
+
Implementation to use for warping:
|
41
|
+
|
42
|
+
* "fsl" : Use FSL's ``applywarp``
|
43
|
+
* "afni" : Use ANTs' ``antsApplyTransforms``
|
44
|
+
|
45
|
+
reference : str
|
46
|
+
The data type to use as reference for warping, can be either a data
|
47
|
+
type like "T1w" or a template space like "MNI152NLin2009cAsym".
|
48
|
+
|
49
|
+
Raises
|
50
|
+
------
|
51
|
+
ValueError
|
52
|
+
If ``using`` is invalid or
|
53
|
+
if ``reference`` is invalid.
|
54
|
+
|
55
|
+
Notes
|
56
|
+
-----
|
57
|
+
If you are setting ``reference`` to a template space like
|
58
|
+
"MNI152NLin2009cAsym", make sure ANTs is available for the
|
59
|
+
transformation else it will fail during runtime. It is tricky to validate
|
60
|
+
this beforehand and difficult to enforce this as a requirement, hence the
|
61
|
+
heads-up.
|
62
|
+
|
63
|
+
"""
|
64
|
+
|
65
|
+
_CONDITIONAL_DEPENDENCIES: ClassVar[List[Dict[str, Union[str, Type]]]] = [
|
66
|
+
{
|
67
|
+
"using": "fsl",
|
68
|
+
"depends_on": _ApplyWarper,
|
69
|
+
},
|
70
|
+
{
|
71
|
+
"using": "ants",
|
72
|
+
"depends_on": _AntsApplyTransformsWarper,
|
73
|
+
},
|
74
|
+
]
|
75
|
+
|
76
|
+
def __init__(self, using: str, reference: str) -> None:
|
77
|
+
"""Initialize the class."""
|
78
|
+
# Validate `using` parameter
|
79
|
+
valid_using = [dep["using"] for dep in self._CONDITIONAL_DEPENDENCIES]
|
80
|
+
if using not in valid_using:
|
81
|
+
raise_error(
|
82
|
+
f"Invalid value for `using`, should be one of: {valid_using}"
|
83
|
+
)
|
84
|
+
self.using = using
|
85
|
+
self.ref = reference
|
86
|
+
# Initialize superclass based on reference
|
87
|
+
if self.ref == "T1w":
|
88
|
+
super().__init__(
|
89
|
+
on="BOLD", required_data_types=["BOLD", self.ref, "Warp"]
|
90
|
+
)
|
91
|
+
elif self.ref in tflow.templates():
|
92
|
+
super().__init__(on="BOLD", required_data_types=["BOLD"])
|
93
|
+
else:
|
94
|
+
raise_error(f"Unknown reference: {self.ref}")
|
95
|
+
|
96
|
+
def get_valid_inputs(self) -> List[str]:
|
97
|
+
"""Get valid data types for input.
|
98
|
+
|
99
|
+
Returns
|
100
|
+
-------
|
101
|
+
list of str
|
102
|
+
The list of data types that can be used as input for this
|
103
|
+
preprocessor.
|
104
|
+
|
105
|
+
"""
|
106
|
+
return ["BOLD"]
|
107
|
+
|
108
|
+
def get_output_type(self, input_type: str) -> str:
|
109
|
+
"""Get output type.
|
110
|
+
|
111
|
+
Parameters
|
112
|
+
----------
|
113
|
+
input_type : str
|
114
|
+
The data type input to the preprocessor.
|
115
|
+
|
116
|
+
Returns
|
117
|
+
-------
|
118
|
+
str
|
119
|
+
The data type output by the preprocessor.
|
120
|
+
|
121
|
+
"""
|
122
|
+
# Does not add any new keys
|
123
|
+
return input_type
|
124
|
+
|
125
|
+
def preprocess(
|
126
|
+
self,
|
127
|
+
input: Dict[str, Any],
|
128
|
+
extra_input: Optional[Dict[str, Any]] = None,
|
129
|
+
) -> Tuple[Dict[str, Any], Optional[Dict[str, Dict[str, Any]]]]:
|
130
|
+
"""Preprocess.
|
131
|
+
|
132
|
+
Parameters
|
133
|
+
----------
|
134
|
+
input : dict
|
135
|
+
The BOLD input from the Junifer Data object.
|
136
|
+
extra_input : dict, optional
|
137
|
+
The other fields in the Junifer Data object. Must include the
|
138
|
+
``Warp`` and ``ref`` value's keys if native space transformation is
|
139
|
+
needed.
|
140
|
+
|
141
|
+
Returns
|
142
|
+
-------
|
143
|
+
dict
|
144
|
+
The computed result as dictionary.
|
145
|
+
None
|
146
|
+
Extra "helper" data types as dictionary to add to the Junifer Data
|
147
|
+
object.
|
148
|
+
|
149
|
+
Raises
|
150
|
+
------
|
151
|
+
ValueError
|
152
|
+
If ``extra_input`` is None when transforming to native space
|
153
|
+
i.e., using "T1w" as reference.
|
154
|
+
RuntimeError
|
155
|
+
If warp / transformation file extension is not ".mat" or ".h5"
|
156
|
+
when transforming to native space or
|
157
|
+
if the BOLD data is in the correct space and does not require
|
158
|
+
warping.
|
159
|
+
|
160
|
+
"""
|
161
|
+
logger.info(f"Warping BOLD to {self.ref} space using BOLDWarper")
|
162
|
+
# Transform to native space
|
163
|
+
if self.ref == "T1w":
|
164
|
+
# Check for extra inputs
|
165
|
+
if extra_input is None:
|
166
|
+
raise_error(
|
167
|
+
"No extra input provided, requires `Warp` and "
|
168
|
+
f"`{self.ref}` data types in particular."
|
169
|
+
)
|
170
|
+
# Check for warp file type to use correct tool
|
171
|
+
warp_file_ext = extra_input["Warp"]["path"].suffix
|
172
|
+
if warp_file_ext == ".mat":
|
173
|
+
logger.debug("Using FSL with BOLDWarper")
|
174
|
+
# Initialize ApplyWarper for computation
|
175
|
+
apply_warper = _ApplyWarper(reference=self.ref, on="BOLD")
|
176
|
+
# Replace original BOLD data with warped BOLD data
|
177
|
+
_, input = apply_warper.preprocess(
|
178
|
+
input=input,
|
179
|
+
extra_input=extra_input,
|
180
|
+
)
|
181
|
+
elif warp_file_ext == ".h5":
|
182
|
+
logger.debug("Using ANTs with BOLDWarper")
|
183
|
+
# Initialize AntsApplyTransformsWarper for computation
|
184
|
+
ants_apply_transforms_warper = _AntsApplyTransformsWarper(
|
185
|
+
reference=self.ref, on="BOLD"
|
186
|
+
)
|
187
|
+
# Replace original BOLD data with warped BOLD data
|
188
|
+
_, input = ants_apply_transforms_warper.preprocess(
|
189
|
+
input=input,
|
190
|
+
extra_input=extra_input,
|
191
|
+
)
|
192
|
+
else:
|
193
|
+
raise_error(
|
194
|
+
msg=(
|
195
|
+
"Unknown warp / transformation file extension: "
|
196
|
+
f"{warp_file_ext}"
|
197
|
+
),
|
198
|
+
klass=RuntimeError,
|
199
|
+
)
|
200
|
+
# Transform to template space
|
201
|
+
else:
|
202
|
+
# Check pre-requirements for space manipulation
|
203
|
+
if self.ref == input["space"]:
|
204
|
+
raise_error(
|
205
|
+
(
|
206
|
+
f"Skipped warping as the BOLD data is in {self.ref} "
|
207
|
+
"space which would mean that you can remove the "
|
208
|
+
"BOLDWarper from the preprocess step."
|
209
|
+
),
|
210
|
+
klass=RuntimeError,
|
211
|
+
)
|
212
|
+
else:
|
213
|
+
# Get xfm file
|
214
|
+
xfm_file_path = get_xfm(src=input["space"], dst=self.ref)
|
215
|
+
# Get template space image
|
216
|
+
template_space_img = get_template(
|
217
|
+
space=self.ref,
|
218
|
+
target_data=input,
|
219
|
+
extra_input=None,
|
220
|
+
)
|
221
|
+
|
222
|
+
# Create component-scoped tempdir
|
223
|
+
tempdir = WorkDirManager().get_tempdir(prefix="boldwarper")
|
224
|
+
# Create element-scoped tempdir so that warped BOLD is
|
225
|
+
# available later as nibabel stores file path reference for
|
226
|
+
# loading on computation
|
227
|
+
element_tempdir = WorkDirManager().get_element_tempdir(
|
228
|
+
prefix="boldwarper"
|
229
|
+
)
|
230
|
+
|
231
|
+
# Save template
|
232
|
+
template_space_img_path = tempdir / f"{self.ref}_T1w.nii.gz"
|
233
|
+
nib.save(template_space_img, template_space_img_path)
|
234
|
+
|
235
|
+
# Create a tempfile for warped output
|
236
|
+
warped_bold_path = (
|
237
|
+
element_tempdir
|
238
|
+
/ f"bold_warped_from_{input['space']}_to_{self.ref}.nii.gz"
|
239
|
+
)
|
240
|
+
|
241
|
+
logger.debug(
|
242
|
+
f"Using ANTs to warp BOLD "
|
243
|
+
f"from {input['space']} to {self.ref}"
|
244
|
+
)
|
245
|
+
# Set antsApplyTransforms command
|
246
|
+
apply_transforms_cmd = [
|
247
|
+
"antsApplyTransforms",
|
248
|
+
"-d 3",
|
249
|
+
"-e 3",
|
250
|
+
"-n LanczosWindowedSinc",
|
251
|
+
f"-i {input['path'].resolve()}",
|
252
|
+
f"-r {template_space_img_path.resolve()}",
|
253
|
+
f"-t {xfm_file_path.resolve()}",
|
254
|
+
f"-o {warped_bold_path.resolve()}",
|
255
|
+
]
|
256
|
+
# Call antsApplyTransforms
|
257
|
+
run_ext_cmd(
|
258
|
+
name="antsApplyTransforms", cmd=apply_transforms_cmd
|
259
|
+
)
|
260
|
+
|
261
|
+
# Modify target data
|
262
|
+
input["data"] = nib.load(warped_bold_path)
|
263
|
+
input["space"] = self.ref
|
264
|
+
|
265
|
+
return input, None
|