junifer 0.0.3.dev188__py3-none-any.whl → 0.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (178) hide show
  1. junifer/_version.py +14 -2
  2. junifer/api/cli.py +162 -17
  3. junifer/api/functions.py +87 -419
  4. junifer/api/parser.py +24 -0
  5. junifer/api/queue_context/__init__.py +8 -0
  6. junifer/api/queue_context/gnu_parallel_local_adapter.py +258 -0
  7. junifer/api/queue_context/htcondor_adapter.py +365 -0
  8. junifer/api/queue_context/queue_context_adapter.py +60 -0
  9. junifer/api/queue_context/tests/test_gnu_parallel_local_adapter.py +192 -0
  10. junifer/api/queue_context/tests/test_htcondor_adapter.py +257 -0
  11. junifer/api/res/afni/run_afni_docker.sh +6 -6
  12. junifer/api/res/ants/ResampleImage +3 -0
  13. junifer/api/res/ants/antsApplyTransforms +3 -0
  14. junifer/api/res/ants/antsApplyTransformsToPoints +3 -0
  15. junifer/api/res/ants/run_ants_docker.sh +39 -0
  16. junifer/api/res/fsl/applywarp +3 -0
  17. junifer/api/res/fsl/flirt +3 -0
  18. junifer/api/res/fsl/img2imgcoord +3 -0
  19. junifer/api/res/fsl/run_fsl_docker.sh +39 -0
  20. junifer/api/res/fsl/std2imgcoord +3 -0
  21. junifer/api/res/run_conda.sh +4 -4
  22. junifer/api/res/run_venv.sh +22 -0
  23. junifer/api/tests/data/partly_cloudy_agg_mean_tian.yml +16 -0
  24. junifer/api/tests/test_api_utils.py +21 -3
  25. junifer/api/tests/test_cli.py +232 -9
  26. junifer/api/tests/test_functions.py +211 -439
  27. junifer/api/tests/test_parser.py +1 -1
  28. junifer/configs/juseless/datagrabbers/aomic_id1000_vbm.py +6 -1
  29. junifer/configs/juseless/datagrabbers/camcan_vbm.py +6 -1
  30. junifer/configs/juseless/datagrabbers/ixi_vbm.py +6 -1
  31. junifer/configs/juseless/datagrabbers/tests/test_ucla.py +8 -8
  32. junifer/configs/juseless/datagrabbers/ucla.py +44 -26
  33. junifer/configs/juseless/datagrabbers/ukb_vbm.py +6 -1
  34. junifer/data/VOIs/meta/AutobiographicalMemory_VOIs.txt +23 -0
  35. junifer/data/VOIs/meta/Power2013_MNI_VOIs.tsv +264 -0
  36. junifer/data/__init__.py +4 -0
  37. junifer/data/coordinates.py +298 -31
  38. junifer/data/masks.py +360 -28
  39. junifer/data/parcellations.py +621 -188
  40. junifer/data/template_spaces.py +190 -0
  41. junifer/data/tests/test_coordinates.py +34 -3
  42. junifer/data/tests/test_data_utils.py +1 -0
  43. junifer/data/tests/test_masks.py +202 -86
  44. junifer/data/tests/test_parcellations.py +266 -55
  45. junifer/data/tests/test_template_spaces.py +104 -0
  46. junifer/data/utils.py +4 -2
  47. junifer/datagrabber/__init__.py +1 -0
  48. junifer/datagrabber/aomic/id1000.py +111 -70
  49. junifer/datagrabber/aomic/piop1.py +116 -53
  50. junifer/datagrabber/aomic/piop2.py +116 -53
  51. junifer/datagrabber/aomic/tests/test_id1000.py +27 -27
  52. junifer/datagrabber/aomic/tests/test_piop1.py +27 -27
  53. junifer/datagrabber/aomic/tests/test_piop2.py +27 -27
  54. junifer/datagrabber/base.py +62 -10
  55. junifer/datagrabber/datalad_base.py +0 -2
  56. junifer/datagrabber/dmcc13_benchmark.py +372 -0
  57. junifer/datagrabber/hcp1200/datalad_hcp1200.py +5 -0
  58. junifer/datagrabber/hcp1200/hcp1200.py +30 -13
  59. junifer/datagrabber/pattern.py +133 -27
  60. junifer/datagrabber/pattern_datalad.py +111 -13
  61. junifer/datagrabber/tests/test_base.py +57 -6
  62. junifer/datagrabber/tests/test_datagrabber_utils.py +204 -76
  63. junifer/datagrabber/tests/test_datalad_base.py +0 -6
  64. junifer/datagrabber/tests/test_dmcc13_benchmark.py +256 -0
  65. junifer/datagrabber/tests/test_multiple.py +43 -10
  66. junifer/datagrabber/tests/test_pattern.py +125 -178
  67. junifer/datagrabber/tests/test_pattern_datalad.py +44 -25
  68. junifer/datagrabber/utils.py +151 -16
  69. junifer/datareader/default.py +36 -10
  70. junifer/external/nilearn/junifer_nifti_spheres_masker.py +6 -0
  71. junifer/markers/base.py +25 -16
  72. junifer/markers/collection.py +35 -16
  73. junifer/markers/complexity/__init__.py +27 -0
  74. junifer/markers/complexity/complexity_base.py +149 -0
  75. junifer/markers/complexity/hurst_exponent.py +136 -0
  76. junifer/markers/complexity/multiscale_entropy_auc.py +140 -0
  77. junifer/markers/complexity/perm_entropy.py +132 -0
  78. junifer/markers/complexity/range_entropy.py +136 -0
  79. junifer/markers/complexity/range_entropy_auc.py +145 -0
  80. junifer/markers/complexity/sample_entropy.py +134 -0
  81. junifer/markers/complexity/tests/test_complexity_base.py +19 -0
  82. junifer/markers/complexity/tests/test_hurst_exponent.py +69 -0
  83. junifer/markers/complexity/tests/test_multiscale_entropy_auc.py +68 -0
  84. junifer/markers/complexity/tests/test_perm_entropy.py +68 -0
  85. junifer/markers/complexity/tests/test_range_entropy.py +69 -0
  86. junifer/markers/complexity/tests/test_range_entropy_auc.py +69 -0
  87. junifer/markers/complexity/tests/test_sample_entropy.py +68 -0
  88. junifer/markers/complexity/tests/test_weighted_perm_entropy.py +68 -0
  89. junifer/markers/complexity/weighted_perm_entropy.py +133 -0
  90. junifer/markers/falff/_afni_falff.py +153 -0
  91. junifer/markers/falff/_junifer_falff.py +142 -0
  92. junifer/markers/falff/falff_base.py +91 -84
  93. junifer/markers/falff/falff_parcels.py +61 -45
  94. junifer/markers/falff/falff_spheres.py +64 -48
  95. junifer/markers/falff/tests/test_falff_parcels.py +89 -121
  96. junifer/markers/falff/tests/test_falff_spheres.py +92 -127
  97. junifer/markers/functional_connectivity/crossparcellation_functional_connectivity.py +1 -0
  98. junifer/markers/functional_connectivity/edge_functional_connectivity_parcels.py +1 -0
  99. junifer/markers/functional_connectivity/functional_connectivity_base.py +1 -0
  100. junifer/markers/functional_connectivity/tests/test_crossparcellation_functional_connectivity.py +46 -44
  101. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_parcels.py +34 -39
  102. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_spheres.py +40 -52
  103. junifer/markers/functional_connectivity/tests/test_functional_connectivity_parcels.py +62 -70
  104. junifer/markers/functional_connectivity/tests/test_functional_connectivity_spheres.py +99 -85
  105. junifer/markers/parcel_aggregation.py +60 -38
  106. junifer/markers/reho/_afni_reho.py +192 -0
  107. junifer/markers/reho/_junifer_reho.py +281 -0
  108. junifer/markers/reho/reho_base.py +69 -34
  109. junifer/markers/reho/reho_parcels.py +26 -16
  110. junifer/markers/reho/reho_spheres.py +23 -9
  111. junifer/markers/reho/tests/test_reho_parcels.py +93 -92
  112. junifer/markers/reho/tests/test_reho_spheres.py +88 -86
  113. junifer/markers/sphere_aggregation.py +54 -9
  114. junifer/markers/temporal_snr/temporal_snr_base.py +1 -0
  115. junifer/markers/temporal_snr/tests/test_temporal_snr_parcels.py +38 -37
  116. junifer/markers/temporal_snr/tests/test_temporal_snr_spheres.py +34 -38
  117. junifer/markers/tests/test_collection.py +43 -42
  118. junifer/markers/tests/test_ets_rss.py +29 -37
  119. junifer/markers/tests/test_parcel_aggregation.py +587 -468
  120. junifer/markers/tests/test_sphere_aggregation.py +209 -157
  121. junifer/markers/utils.py +2 -40
  122. junifer/onthefly/read_transform.py +13 -6
  123. junifer/pipeline/__init__.py +1 -0
  124. junifer/pipeline/pipeline_step_mixin.py +105 -41
  125. junifer/pipeline/registry.py +17 -0
  126. junifer/pipeline/singleton.py +45 -0
  127. junifer/pipeline/tests/test_pipeline_step_mixin.py +139 -51
  128. junifer/pipeline/tests/test_update_meta_mixin.py +1 -0
  129. junifer/pipeline/tests/test_workdir_manager.py +104 -0
  130. junifer/pipeline/update_meta_mixin.py +8 -2
  131. junifer/pipeline/utils.py +154 -15
  132. junifer/pipeline/workdir_manager.py +246 -0
  133. junifer/preprocess/__init__.py +3 -0
  134. junifer/preprocess/ants/__init__.py +4 -0
  135. junifer/preprocess/ants/ants_apply_transforms_warper.py +185 -0
  136. junifer/preprocess/ants/tests/test_ants_apply_transforms_warper.py +56 -0
  137. junifer/preprocess/base.py +96 -69
  138. junifer/preprocess/bold_warper.py +265 -0
  139. junifer/preprocess/confounds/fmriprep_confound_remover.py +91 -134
  140. junifer/preprocess/confounds/tests/test_fmriprep_confound_remover.py +106 -111
  141. junifer/preprocess/fsl/__init__.py +4 -0
  142. junifer/preprocess/fsl/apply_warper.py +179 -0
  143. junifer/preprocess/fsl/tests/test_apply_warper.py +45 -0
  144. junifer/preprocess/tests/test_bold_warper.py +159 -0
  145. junifer/preprocess/tests/test_preprocess_base.py +6 -6
  146. junifer/preprocess/warping/__init__.py +6 -0
  147. junifer/preprocess/warping/_ants_warper.py +167 -0
  148. junifer/preprocess/warping/_fsl_warper.py +109 -0
  149. junifer/preprocess/warping/space_warper.py +213 -0
  150. junifer/preprocess/warping/tests/test_space_warper.py +198 -0
  151. junifer/stats.py +18 -4
  152. junifer/storage/base.py +9 -1
  153. junifer/storage/hdf5.py +8 -3
  154. junifer/storage/pandas_base.py +2 -1
  155. junifer/storage/sqlite.py +1 -0
  156. junifer/storage/tests/test_hdf5.py +2 -1
  157. junifer/storage/tests/test_sqlite.py +8 -8
  158. junifer/storage/tests/test_utils.py +6 -6
  159. junifer/storage/utils.py +1 -0
  160. junifer/testing/datagrabbers.py +11 -7
  161. junifer/testing/utils.py +1 -0
  162. junifer/tests/test_stats.py +2 -0
  163. junifer/utils/__init__.py +1 -0
  164. junifer/utils/helpers.py +53 -0
  165. junifer/utils/logging.py +14 -3
  166. junifer/utils/tests/test_helpers.py +35 -0
  167. {junifer-0.0.3.dev188.dist-info → junifer-0.0.4.dist-info}/METADATA +59 -28
  168. junifer-0.0.4.dist-info/RECORD +257 -0
  169. {junifer-0.0.3.dev188.dist-info → junifer-0.0.4.dist-info}/WHEEL +1 -1
  170. junifer/markers/falff/falff_estimator.py +0 -334
  171. junifer/markers/falff/tests/test_falff_estimator.py +0 -238
  172. junifer/markers/reho/reho_estimator.py +0 -515
  173. junifer/markers/reho/tests/test_reho_estimator.py +0 -260
  174. junifer-0.0.3.dev188.dist-info/RECORD +0 -199
  175. {junifer-0.0.3.dev188.dist-info → junifer-0.0.4.dist-info}/AUTHORS.rst +0 -0
  176. {junifer-0.0.3.dev188.dist-info → junifer-0.0.4.dist-info}/LICENSE.md +0 -0
  177. {junifer-0.0.3.dev188.dist-info → junifer-0.0.4.dist-info}/entry_points.txt +0 -0
  178. {junifer-0.0.3.dev188.dist-info → junifer-0.0.4.dist-info}/top_level.txt +0 -0
@@ -1,16 +1,32 @@
1
- """Provide abstract class for computing fALFF."""
1
+ """Provide base class for ALFF / fALFF."""
2
2
 
3
3
  # Authors: Federico Raimondo <f.raimondo@fz-juelich.de>
4
4
  # Amir Omidvarnia <a.omidvarnia@fz-juelich.de>
5
5
  # Kaustubh R. Patil <k.patil@fz-juelich.de>
6
+ # Synchon Mandal <s.mandal@fz-juelich.de>
6
7
  # License: AGPL
7
8
 
8
- from abc import abstractmethod
9
- from typing import ClassVar, Dict, List, Optional, Union
10
-
11
- from ...utils.logging import raise_error
9
+ from pathlib import Path
10
+ from typing import (
11
+ TYPE_CHECKING,
12
+ Any,
13
+ ClassVar,
14
+ Dict,
15
+ List,
16
+ Optional,
17
+ Tuple,
18
+ Type,
19
+ Union,
20
+ )
21
+
22
+ from ...utils.logging import logger, raise_error
12
23
  from ..base import BaseMarker
13
- from .falff_estimator import ALFFEstimator
24
+ from ._afni_falff import AFNIALFF
25
+ from ._junifer_falff import JuniferALFF
26
+
27
+
28
+ if TYPE_CHECKING:
29
+ from nibabel import Nifti1Image
14
30
 
15
31
 
16
32
  class ALFFBase(BaseMarker):
@@ -24,33 +40,45 @@ class ALFFBase(BaseMarker):
24
40
  Highpass cutoff frequency.
25
41
  lowpass : positive float
26
42
  Lowpass cutoff frequency.
43
+ using : {"junifer", "afni"}
44
+ Implementation to use for computing ALFF:
45
+
46
+ * "junifer" : Use ``junifer``'s own ALFF implementation
47
+ * "afni" : Use AFNI's ``3dRSFC``
48
+
27
49
  tr : positive float, optional
28
50
  The Repetition Time of the BOLD data. If None, will extract
29
- the TR from NIFTI header (default None).
30
- use_afni : bool, optional
31
- Whether to use AFNI for computing. If None, will use AFNI only
32
- if available (default None).
51
+ the TR from NIfTI header (default None).
33
52
  name : str, optional
34
53
  The name of the marker. If None, it will use the class name
35
54
  (default None).
36
55
 
37
56
  Notes
38
57
  -----
39
- The `tr` parameter is crucial for the correctness of fALFF/ALFF
40
- computation. If a dataset is correctly preprocessed, the TR should be
41
- extracted from the NIFTI without any issue. However, it has been
42
- reported that some preprocessed data might not have the correct TR in
43
- the NIFTI header.
58
+ The ``tr`` parameter is crucial for the correctness of fALFF/ALFF
59
+ computation. If a dataset is correctly preprocessed, the ``tr`` should be
60
+ extracted from the NIfTI without any issue. However, it has been
61
+ reported that some preprocessed data might not have the correct ``tr`` in
62
+ the NIfTI header.
63
+
64
+ Raises
65
+ ------
66
+ ValueError
67
+ If ``highpass`` is not positive or zero or
68
+ if ``lowpass`` is not positive or
69
+ if ``highpass`` is higher than ``lowpass`` or
70
+ if ``using`` is invalid.
44
71
 
45
72
  """
46
73
 
47
- _EXT_DEPENDENCIES: ClassVar[
48
- List[Dict[str, Union[str, bool, List[str]]]]
49
- ] = [
74
+ _CONDITIONAL_DEPENDENCIES: ClassVar[List[Dict[str, Union[str, Type]]]] = [
75
+ {
76
+ "using": "afni",
77
+ "depends_on": AFNIALFF,
78
+ },
50
79
  {
51
- "name": "afni",
52
- "optional": True,
53
- "commands": ["3dRSFC", "3dAFNItoNIFTI"],
80
+ "using": "junifer",
81
+ "depends_on": JuniferALFF,
54
82
  },
55
83
  ]
56
84
 
@@ -59,8 +87,8 @@ class ALFFBase(BaseMarker):
59
87
  fractional: bool,
60
88
  highpass: float,
61
89
  lowpass: float,
90
+ using: str,
62
91
  tr: Optional[float] = None,
63
- use_afni: Optional[bool] = None,
64
92
  name: Optional[str] = None,
65
93
  ) -> None:
66
94
  if highpass < 0:
@@ -71,8 +99,14 @@ class ALFFBase(BaseMarker):
71
99
  raise_error("Highpass must be lower than lowpass")
72
100
  self.highpass = highpass
73
101
  self.lowpass = lowpass
102
+ # Validate `using` parameter
103
+ valid_using = [dep["using"] for dep in self._CONDITIONAL_DEPENDENCIES]
104
+ if using not in valid_using:
105
+ raise_error(
106
+ f"Invalid value for `using`, should be one of: {valid_using}"
107
+ )
108
+ self.using = using
74
109
  self.tr = tr
75
- self.use_afni = use_afni
76
110
  self.fractional = fractional
77
111
 
78
112
  # Create a name based on the class name if none is provided
@@ -108,79 +142,52 @@ class ALFFBase(BaseMarker):
108
142
  """
109
143
  return "vector"
110
144
 
111
- def compute(
145
+ def _compute(
112
146
  self,
113
- input: Dict[str, Dict],
114
- extra_input: Optional[Dict] = None,
115
- ) -> Dict:
116
- """Compute.
147
+ input_data: Dict[str, Any],
148
+ ) -> Tuple["Nifti1Image", Path]:
149
+ """Compute ALFF and fALFF.
117
150
 
118
151
  Parameters
119
152
  ----------
120
- input : dict
121
- A single input from the pipeline data object in which to compute
122
- the marker.
153
+ input_data : dict
154
+ The input to the marker.
123
155
  extra_input : dict, optional
124
- The other fields in the pipeline data object. Useful for accessing
125
- other data kind that needs to be used in the computation. For
126
- example, the functional connectivity markers can make use of the
127
- confounds if available (default None).
156
+ The other fields in the pipeline data object (default None).
128
157
 
129
158
  Returns
130
159
  -------
131
- dict
132
- The computed result as dictionary. This will be either returned
133
- to the user or stored in the storage by calling the store method
134
- with this as a parameter. The dictionary has the following keys:
135
-
136
- * ``data`` : the actual computed values as a numpy.ndarray
137
- * ``col_names`` : the column labels for the computed values as list
160
+ Niimg-like object
161
+ The ALFF / fALFF as NIfTI.
162
+ pathlib.Path
163
+ The path to the ALFF / fALFF as NIfTI.
138
164
 
139
165
  """
140
- if self.use_afni is None:
141
- raise_error(
142
- "Parameter `use_afni` must be set to True or False in order "
143
- "to compute this marker. It is currently set to None (default "
144
- "behaviour). This is intended to be for auto-detection. In "
145
- "order for that to happen, please call the `validate` method "
146
- "before calling the `compute` method."
147
- )
148
-
149
- estimator = ALFFEstimator()
150
-
151
- alff, falff = estimator.fit_transform(
152
- use_afni=self.use_afni,
153
- input_data=input,
166
+ logger.debug("Calculating ALFF and fALFF")
167
+
168
+ # Conditional estimator
169
+ if self.using == "afni":
170
+ estimator = AFNIALFF()
171
+ elif self.using == "junifer":
172
+ estimator = JuniferALFF()
173
+ # Compute ALFF + fALFF
174
+ alff, falff, alff_path, falff_path = estimator.compute( # type: ignore
175
+ data=input_data["data"],
154
176
  highpass=self.highpass,
155
177
  lowpass=self.lowpass,
156
178
  tr=self.tr,
157
179
  )
158
- post_data = falff if self.fractional else alff
159
-
160
- post_input = dict(input.items())
161
- post_input["data"] = post_data
162
- post_input["path"] = None
163
-
164
- out = self._postprocess(post_input, extra_input=extra_input)
165
-
166
- return out
167
-
168
- @abstractmethod
169
- def _postprocess(
170
- self, input: Dict, extra_input: Optional[Dict] = None
171
- ) -> Dict:
172
- """Postprocess the output of the estimator.
173
180
 
174
- Parameters
175
- ----------
176
- input : dict
177
- The output of the estimator. It must have the following
178
- extra_input : dict, optional
179
- The other fields in the pipeline data object. Useful for accessing
180
- other data kind that needs to be used in the computation. For
181
- example, the functional connectivity markers can make use of the
182
- confounds if available (default None).
183
- """
184
- raise_error(
185
- "_postprocess must be implemented", klass=NotImplementedError
186
- )
181
+ # If the input data space is native already, the original path should
182
+ # be propagated down as it might be required for transforming
183
+ # parcellation / coordinates to native space, else the
184
+ # path should be passed for use later if required.
185
+ # TODO(synchon): will be taken care in #292
186
+ if input_data["space"] == "native" and self.fractional:
187
+ return falff, input_data["path"]
188
+ elif input_data["space"] == "native" and not self.fractional:
189
+ return alff, input_data["path"]
190
+ elif input_data["space"] != "native" and self.fractional:
191
+ return falff, falff_path
192
+ else:
193
+ return alff, alff_path
@@ -1,20 +1,22 @@
1
- """Provide class for computing fALFF on parcels."""
1
+ """Provide class for ALFF / fALFF on parcels."""
2
2
 
3
3
  # Authors: Federico Raimondo <f.raimondo@fz-juelich.de>
4
4
  # Amir Omidvarnia <a.omidvarnia@fz-juelich.de>
5
5
  # Kaustubh R. Patil <k.patil@fz-juelich.de>
6
+ # Synchon Mandal <s.mandal@fz-juelich.de>
6
7
  # License: AGPL
7
8
 
8
- from typing import Dict, List, Optional, Union
9
+ from typing import Any, Dict, List, Optional, Union
9
10
 
10
11
  from ...api.decorators import register_marker
11
- from .. import ParcelAggregation
12
+ from ...utils import logger
13
+ from ..parcel_aggregation import ParcelAggregation
12
14
  from .falff_base import ALFFBase
13
15
 
14
16
 
15
17
  @register_marker
16
18
  class ALFFParcels(ALFFBase):
17
- """Class for computing fALFF/ALFF on parcels.
19
+ """Class for ALFF / fALFF on parcels.
18
20
 
19
21
  Parameters
20
22
  ----------
@@ -23,6 +25,12 @@ class ALFFParcels(ALFFBase):
23
25
  :func:`.list_parcellations`.
24
26
  fractional : bool
25
27
  Whether to compute fractional ALFF.
28
+ using : {"junifer", "afni"}
29
+ Implementation to use for computing ALFF:
30
+
31
+ * "junifer" : Use ``junifer``'s own ALFF implementation
32
+ * "afni" : Use AFNI's ``3dRSFC``
33
+
26
34
  highpass : positive float, optional
27
35
  The highpass cutoff frequency for the bandpass filter. If 0,
28
36
  it will not apply a highpass filter (default 0.01).
@@ -30,20 +38,17 @@ class ALFFParcels(ALFFBase):
30
38
  The lowpass cutoff frequency for the bandpass filter (default 0.1).
31
39
  tr : positive float, optional
32
40
  The Repetition Time of the BOLD data. If None, will extract
33
- the TR from NIFTI header (default None).
34
- use_afni : bool, optional
35
- Whether to use AFNI for computing. If None, will use AFNI only
36
- if available (default None).
41
+ the TR from NIfTI header (default None).
42
+ agg_method : str, optional
43
+ The method to perform aggregation using. Check valid options in
44
+ :func:`.get_aggfunc_by_name` (default "mean").
45
+ agg_method_params : dict, optional
46
+ Parameters to pass to the aggregation function. Check valid options in
47
+ :func:`.get_aggfunc_by_name` (default None).
37
48
  masks : str, dict or list of dict or str, optional
38
49
  The specification of the masks to apply to regions before extracting
39
50
  signals. Check :ref:`Using Masks <using_masks>` for more details.
40
51
  If None, will not apply any mask (default None).
41
- method : str, optional
42
- The method to perform aggregation using. Check valid options in
43
- :func:`.get_aggfunc_by_name` (default "mean").
44
- method_params : dict, optional
45
- Parameters to pass to the aggregation function. Check valid options in
46
- :func:`.get_aggfunc_by_name`.
47
52
  name : str, optional
48
53
  The name of the marker. If None, will use the class name (default
49
54
  None).
@@ -51,77 +56,88 @@ class ALFFParcels(ALFFBase):
51
56
  Notes
52
57
  -----
53
58
  The ``tr`` parameter is crucial for the correctness of fALFF/ALFF
54
- computation. If a dataset is correctly preprocessed, the TR should be
55
- extracted from the NIFTI without any issue. However, it has been
56
- reported that some preprocessed data might not have the correct TR in
57
- the NIFTI header.
59
+ computation. If a dataset is correctly preprocessed, the ``tr`` should be
60
+ extracted from the NIfTI without any issue. However, it has been
61
+ reported that some preprocessed data might not have the correct ``tr`` in
62
+ the NIfTI header.
58
63
 
59
64
  ALFF/fALFF are computed using a bandpass butterworth filter. See
60
65
  :func:`scipy.signal.butter` and :func:`scipy.signal.filtfilt` for more
61
66
  details.
67
+
62
68
  """
63
69
 
64
70
  def __init__(
65
71
  self,
66
72
  parcellation: Union[str, List[str]],
67
73
  fractional: bool,
74
+ using: str,
68
75
  highpass: float = 0.01,
69
76
  lowpass: float = 0.1,
70
77
  tr: Optional[float] = None,
71
- use_afni: Optional[bool] = None,
78
+ agg_method: str = "mean",
79
+ agg_method_params: Optional[Dict] = None,
72
80
  masks: Union[str, Dict, List[Union[Dict, str]], None] = None,
73
- method: str = "mean",
74
- method_params: Optional[Dict] = None,
75
81
  name: Optional[str] = None,
76
82
  ) -> None:
77
- self.parcellation = parcellation
78
- self.masks = masks
79
- self.method = method
80
- self.method_params = method_params
83
+ # Superclass init first to validate `using` parameter
81
84
  super().__init__(
82
85
  fractional=fractional,
83
86
  highpass=highpass,
84
87
  lowpass=lowpass,
88
+ using=using,
85
89
  tr=tr,
86
90
  name=name,
87
- use_afni=use_afni,
88
91
  )
92
+ self.parcellation = parcellation
93
+ self.agg_method = agg_method
94
+ self.agg_method_params = agg_method_params
95
+ self.masks = masks
89
96
 
90
- def _postprocess(
91
- self, input: Dict, extra_input: Optional[Dict] = None
92
- ) -> Dict:
93
- """Compute ALFF and fALFF.
97
+ def compute(
98
+ self,
99
+ input: Dict[str, Any],
100
+ extra_input: Optional[Dict[str, Any]] = None,
101
+ ) -> Dict[str, Any]:
102
+ """Compute.
94
103
 
95
104
  Parameters
96
105
  ----------
97
106
  input : dict
98
- A single input from the pipeline data object in which to compute
99
- the marker.
107
+ The BOLD data as dictionary.
100
108
  extra_input : dict, optional
101
- The other fields in the pipeline data object. Useful for accessing
102
- other data kind that needs to be used in the computation. For
103
- example, the functional connectivity markers can make use of the
104
- confounds if available (default None).
109
+ The other fields in the pipeline data object (default None).
105
110
 
106
111
  Returns
107
112
  -------
108
113
  dict
109
- The computed ALFF as dictionary. The dictionary has the following
114
+ The computed result as dictionary. The dictionary has the following
110
115
  keys:
111
116
 
112
117
  * ``data`` : the actual computed values as a numpy.ndarray
113
118
  * ``col_names`` : the column labels for the computed values as list
114
119
 
115
120
  """
116
- pa = ParcelAggregation(
121
+ logger.info("Calculating ALFF / fALFF for parcels")
122
+
123
+ # Compute ALFF / fALFF
124
+ output_data, output_file_path = self._compute(input_data=input)
125
+
126
+ # Initialize parcel aggregation
127
+ parcel_aggregation = ParcelAggregation(
117
128
  parcellation=self.parcellation,
118
- method=self.method,
119
- method_params=self.method_params,
129
+ method=self.agg_method,
130
+ method_params=self.agg_method_params,
120
131
  masks=self.masks,
121
- on="fALFF",
132
+ on="BOLD",
133
+ )
134
+ # Perform aggregation on ALFF / fALFF
135
+ parcel_aggregation_input = dict(input.items())
136
+ parcel_aggregation_input["data"] = output_data
137
+ parcel_aggregation_input["path"] = output_file_path
138
+ output = parcel_aggregation.compute(
139
+ input=parcel_aggregation_input,
140
+ extra_input=extra_input,
122
141
  )
123
142
 
124
- # get the 2D timeseries after parcel aggregation
125
- out = pa.compute(input, extra_input=extra_input)
126
-
127
- return out
143
+ return output
@@ -1,26 +1,36 @@
1
- """Provide class for computing fALFF on spheres."""
1
+ """Provide class for ALFF / fALFF on spheres."""
2
2
 
3
3
  # Authors: Federico Raimondo <f.raimondo@fz-juelich.de>
4
4
  # Amir Omidvarnia <a.omidvarnia@fz-juelich.de>
5
5
  # Kaustubh R. Patil <k.patil@fz-juelich.de>
6
+ # Synchon Mandal <s.mandal@fz-juelich.de>
6
7
  # License: AGPL
7
8
 
8
- from typing import Dict, List, Optional, Union
9
+ from typing import Any, Dict, List, Optional, Union
9
10
 
10
11
  from ...api.decorators import register_marker
11
- from .. import SphereAggregation
12
+ from ...utils import logger
13
+ from ..sphere_aggregation import SphereAggregation
12
14
  from .falff_base import ALFFBase
13
15
 
14
16
 
15
17
  @register_marker
16
18
  class ALFFSpheres(ALFFBase):
17
- """Class for computing fALFF/ALFF on spheres.
19
+ """Class for computing ALFF / fALFF on spheres.
18
20
 
19
21
  Parameters
20
22
  ----------
21
23
  coords : str
22
24
  The name of the coordinates list to use. See
23
25
  :func:`.list_coordinates` for options.
26
+ fractional : bool
27
+ Whether to compute fractional ALFF.
28
+ using : {"junifer", "afni"}
29
+ Implementation to use for computing ALFF:
30
+
31
+ * "junifer" : Use ``junifer``'s own ALFF implementation
32
+ * "afni" : Use AFNI's ``3dRSFC``
33
+
24
34
  radius : float, optional
25
35
  The radius of the sphere in mm. If None, the signal will be extracted
26
36
  from a single voxel. See :class:`nilearn.maskers.NiftiSpheresMasker`
@@ -28,8 +38,6 @@ class ALFFSpheres(ALFFBase):
28
38
  allow_overlap : bool, optional
29
39
  Whether to allow overlapping spheres. If False, an error is raised if
30
40
  the spheres overlap (default is False).
31
- fractional : bool
32
- Whether to compute fractional ALFF.
33
41
  highpass : positive float, optional
34
42
  The highpass cutoff frequency for the bandpass filter. If 0,
35
43
  it will not apply a highpass filter (default 0.01).
@@ -37,20 +45,17 @@ class ALFFSpheres(ALFFBase):
37
45
  The lowpass cutoff frequency for the bandpass filter (default 0.1).
38
46
  tr : positive float, optional
39
47
  The Repetition Time of the BOLD data. If None, will extract
40
- the TR from NIFTI header (default None).
41
- use_afni : bool, optional
42
- Whether to use AFNI for computing. If None, will use AFNI only
43
- if available (default None).
44
- masks : str, dict or list of dict or str, optional
45
- The specification of the masks to apply to regions before extracting
46
- signals. Check :ref:`Using Masks <using_masks>` for more details.
47
- If None, will not apply any mask (default None).
48
- method : str, optional
48
+ the TR from NIfTI header (default None).
49
+ agg_method : str, optional
49
50
  The method to perform aggregation using. Check valid options in
50
51
  :func:`.get_aggfunc_by_name` (default "mean").
51
- method_params : dict, optional
52
+ agg_method_params : dict, optional
52
53
  Parameters to pass to the aggregation function. Check valid options in
53
54
  :func:`.get_aggfunc_by_name`.
55
+ masks : str, dict or list of dict or str, optional
56
+ The specification of the masks to apply to regions before extracting
57
+ signals. Check :ref:`Using Masks <using_masks>` for more details.
58
+ If None, will not apply any mask (default None).
54
59
  name : str, optional
55
60
  The name of the marker. If None, will use the class name (default
56
61
  None).
@@ -58,83 +63,94 @@ class ALFFSpheres(ALFFBase):
58
63
  Notes
59
64
  -----
60
65
  The ``tr`` parameter is crucial for the correctness of fALFF/ALFF
61
- computation. If a dataset is correctly preprocessed, the TR should be
62
- extracted from the NIFTI without any issue. However, it has been
63
- reported that some preprocessed data might not have the correct TR in
66
+ computation. If a dataset is correctly preprocessed, the ``tr`` should be
67
+ extracted from the NIfTI without any issue. However, it has been
68
+ reported that some preprocessed data might not have the correct ``tr`` in
64
69
  the NIFTI header.
65
70
 
66
71
  ALFF/fALFF are computed using a bandpass butterworth filter. See
67
72
  :func:`scipy.signal.butter` and :func:`scipy.signal.filtfilt` for more
68
73
  details.
74
+
69
75
  """
70
76
 
71
77
  def __init__(
72
78
  self,
73
79
  coords: str,
74
80
  fractional: bool,
81
+ using: str,
75
82
  radius: Optional[float] = None,
76
83
  allow_overlap: bool = False,
77
84
  highpass: float = 0.01,
78
85
  lowpass: float = 0.1,
79
86
  tr: Optional[float] = None,
80
- use_afni: Optional[bool] = None,
87
+ agg_method: str = "mean",
88
+ agg_method_params: Optional[Dict] = None,
81
89
  masks: Union[str, Dict, List[Union[Dict, str]], None] = None,
82
- method: str = "mean",
83
- method_params: Optional[Dict] = None,
84
90
  name: Optional[str] = None,
85
91
  ) -> None:
86
- self.coords = coords
87
- self.radius = radius
88
- self.allow_overlap = allow_overlap
89
- self.masks = masks
90
- self.method = method
91
- self.method_params = method_params
92
+ # Superclass init first to validate `using` parameter
92
93
  super().__init__(
93
94
  fractional=fractional,
94
95
  highpass=highpass,
95
96
  lowpass=lowpass,
97
+ using=using,
96
98
  tr=tr,
97
99
  name=name,
98
- use_afni=use_afni,
99
100
  )
101
+ self.coords = coords
102
+ self.radius = radius
103
+ self.allow_overlap = allow_overlap
104
+ self.agg_method = agg_method
105
+ self.agg_method_params = agg_method_params
106
+ self.masks = masks
100
107
 
101
- def _postprocess(
102
- self, input: Dict, extra_input: Optional[Dict] = None
103
- ) -> Dict:
104
- """Compute ALFF and fALFF.
108
+ def compute(
109
+ self,
110
+ input: Dict[str, Any],
111
+ extra_input: Optional[Dict[str, Any]] = None,
112
+ ) -> Dict[str, Any]:
113
+ """Compute.
105
114
 
106
115
  Parameters
107
116
  ----------
108
117
  input : dict
109
- A single input from the pipeline data object in which to compute
110
- the marker.
118
+ The BOLD data as dictionary.
111
119
  extra_input : dict, optional
112
- The other fields in the pipeline data object. Useful for accessing
113
- other data kind that needs to be used in the computation. For
114
- example, the functional connectivity markers can make use of the
115
- confounds if available (default None).
120
+ The other fields in the pipeline data object (default None).
116
121
 
117
122
  Returns
118
123
  -------
119
124
  dict
120
- The computed ALFF as dictionary. The dictionary has the following
125
+ The computed result as dictionary. The dictionary has the following
121
126
  keys:
122
127
 
123
128
  * ``data`` : the actual computed values as a numpy.ndarray
124
129
  * ``col_names`` : the column labels for the computed values as list
125
130
 
126
131
  """
127
- pa = SphereAggregation(
132
+ logger.info("Calculating ALFF / fALFF for spheres")
133
+
134
+ # Compute ALFF / fALFF
135
+ output_data, output_file_path = self._compute(input_data=input)
136
+
137
+ # Initialize sphere aggregation
138
+ sphere_aggregation = SphereAggregation(
128
139
  coords=self.coords,
129
140
  radius=self.radius,
130
141
  allow_overlap=self.allow_overlap,
131
- method=self.method,
132
- method_params=self.method_params,
142
+ method=self.agg_method,
143
+ method_params=self.agg_method_params,
133
144
  masks=self.masks,
134
- on="fALFF",
145
+ on="BOLD",
146
+ )
147
+ # Perform aggregation on ALFF / fALFF
148
+ sphere_aggregation_input = dict(input.items())
149
+ sphere_aggregation_input["data"] = output_data
150
+ sphere_aggregation_input["path"] = output_file_path
151
+ output = sphere_aggregation.compute(
152
+ input=sphere_aggregation_input,
153
+ extra_input=extra_input,
135
154
  )
136
155
 
137
- # get the 2D timeseries after sphere aggregation
138
- out = pa.compute(input, extra_input=extra_input)
139
-
140
- return out
156
+ return output