junifer 0.0.5__py3-none-any.whl → 0.0.5.dev24__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (194) hide show
  1. junifer/__init__.py +0 -17
  2. junifer/_version.py +2 -2
  3. junifer/api/__init__.py +1 -4
  4. junifer/api/cli.py +1 -91
  5. junifer/api/decorators.py +0 -9
  6. junifer/api/functions.py +10 -56
  7. junifer/api/parser.py +0 -3
  8. junifer/api/queue_context/__init__.py +1 -4
  9. junifer/api/res/afni/run_afni_docker.sh +1 -1
  10. junifer/api/res/ants/run_ants_docker.sh +1 -1
  11. junifer/api/res/fsl/run_fsl_docker.sh +1 -1
  12. junifer/api/tests/test_api_utils.py +2 -4
  13. junifer/api/tests/test_cli.py +0 -83
  14. junifer/api/tests/test_functions.py +2 -27
  15. junifer/configs/__init__.py +1 -1
  16. junifer/configs/juseless/__init__.py +1 -4
  17. junifer/configs/juseless/datagrabbers/__init__.py +1 -10
  18. junifer/configs/juseless/datagrabbers/aomic_id1000_vbm.py +0 -3
  19. junifer/configs/juseless/datagrabbers/camcan_vbm.py +0 -3
  20. junifer/configs/juseless/datagrabbers/ixi_vbm.py +0 -3
  21. junifer/configs/juseless/datagrabbers/tests/test_ucla.py +3 -1
  22. junifer/configs/juseless/datagrabbers/ucla.py +9 -12
  23. junifer/configs/juseless/datagrabbers/ukb_vbm.py +0 -3
  24. junifer/data/__init__.py +1 -21
  25. junifer/data/coordinates.py +19 -10
  26. junifer/data/masks.py +87 -58
  27. junifer/data/parcellations.py +3 -14
  28. junifer/data/template_spaces.py +1 -4
  29. junifer/data/tests/test_masks.py +37 -26
  30. junifer/data/utils.py +0 -3
  31. junifer/datagrabber/__init__.py +1 -18
  32. junifer/datagrabber/aomic/__init__.py +0 -3
  33. junifer/datagrabber/aomic/id1000.py +37 -70
  34. junifer/datagrabber/aomic/piop1.py +36 -69
  35. junifer/datagrabber/aomic/piop2.py +38 -71
  36. junifer/datagrabber/aomic/tests/test_id1000.py +99 -44
  37. junifer/datagrabber/aomic/tests/test_piop1.py +108 -65
  38. junifer/datagrabber/aomic/tests/test_piop2.py +102 -45
  39. junifer/datagrabber/base.py +6 -13
  40. junifer/datagrabber/datalad_base.py +1 -13
  41. junifer/datagrabber/dmcc13_benchmark.py +53 -36
  42. junifer/datagrabber/hcp1200/__init__.py +0 -3
  43. junifer/datagrabber/hcp1200/datalad_hcp1200.py +0 -3
  44. junifer/datagrabber/hcp1200/hcp1200.py +1 -4
  45. junifer/datagrabber/multiple.py +6 -45
  46. junifer/datagrabber/pattern.py +62 -170
  47. junifer/datagrabber/pattern_datalad.py +12 -25
  48. junifer/datagrabber/tests/test_datagrabber_utils.py +218 -0
  49. junifer/datagrabber/tests/test_datalad_base.py +4 -4
  50. junifer/datagrabber/tests/test_dmcc13_benchmark.py +19 -46
  51. junifer/datagrabber/tests/test_multiple.py +84 -161
  52. junifer/datagrabber/tests/test_pattern.py +0 -45
  53. junifer/datagrabber/tests/test_pattern_datalad.py +4 -4
  54. junifer/datagrabber/utils.py +230 -0
  55. junifer/datareader/__init__.py +1 -4
  56. junifer/datareader/default.py +43 -95
  57. junifer/external/__init__.py +1 -1
  58. junifer/external/nilearn/__init__.py +1 -5
  59. junifer/external/nilearn/junifer_nifti_spheres_masker.py +9 -23
  60. junifer/external/nilearn/tests/test_junifer_nifti_spheres_masker.py +1 -76
  61. junifer/markers/__init__.py +1 -23
  62. junifer/markers/base.py +28 -68
  63. junifer/markers/collection.py +2 -10
  64. junifer/markers/complexity/__init__.py +0 -10
  65. junifer/markers/complexity/complexity_base.py +43 -26
  66. junifer/markers/complexity/hurst_exponent.py +0 -3
  67. junifer/markers/complexity/multiscale_entropy_auc.py +0 -3
  68. junifer/markers/complexity/perm_entropy.py +0 -3
  69. junifer/markers/complexity/range_entropy.py +0 -3
  70. junifer/markers/complexity/range_entropy_auc.py +0 -3
  71. junifer/markers/complexity/sample_entropy.py +0 -3
  72. junifer/markers/complexity/tests/test_hurst_exponent.py +3 -11
  73. junifer/markers/complexity/tests/test_multiscale_entropy_auc.py +3 -11
  74. junifer/markers/complexity/tests/test_perm_entropy.py +3 -11
  75. junifer/markers/complexity/tests/test_range_entropy.py +3 -11
  76. junifer/markers/complexity/tests/test_range_entropy_auc.py +3 -11
  77. junifer/markers/complexity/tests/test_sample_entropy.py +3 -11
  78. junifer/markers/complexity/tests/test_weighted_perm_entropy.py +3 -11
  79. junifer/markers/complexity/weighted_perm_entropy.py +0 -3
  80. junifer/markers/ets_rss.py +42 -27
  81. junifer/markers/falff/__init__.py +0 -3
  82. junifer/markers/falff/_afni_falff.py +2 -5
  83. junifer/markers/falff/_junifer_falff.py +0 -3
  84. junifer/markers/falff/falff_base.py +46 -20
  85. junifer/markers/falff/falff_parcels.py +27 -56
  86. junifer/markers/falff/falff_spheres.py +29 -60
  87. junifer/markers/falff/tests/test_falff_parcels.py +23 -39
  88. junifer/markers/falff/tests/test_falff_spheres.py +23 -39
  89. junifer/markers/functional_connectivity/__init__.py +0 -9
  90. junifer/markers/functional_connectivity/crossparcellation_functional_connectivity.py +60 -63
  91. junifer/markers/functional_connectivity/edge_functional_connectivity_parcels.py +32 -45
  92. junifer/markers/functional_connectivity/edge_functional_connectivity_spheres.py +36 -49
  93. junifer/markers/functional_connectivity/functional_connectivity_base.py +70 -71
  94. junifer/markers/functional_connectivity/functional_connectivity_parcels.py +25 -34
  95. junifer/markers/functional_connectivity/functional_connectivity_spheres.py +30 -40
  96. junifer/markers/functional_connectivity/tests/test_crossparcellation_functional_connectivity.py +7 -11
  97. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_parcels.py +7 -27
  98. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_spheres.py +12 -28
  99. junifer/markers/functional_connectivity/tests/test_functional_connectivity_parcels.py +11 -35
  100. junifer/markers/functional_connectivity/tests/test_functional_connectivity_spheres.py +62 -36
  101. junifer/markers/parcel_aggregation.py +61 -47
  102. junifer/markers/reho/__init__.py +0 -3
  103. junifer/markers/reho/_afni_reho.py +2 -5
  104. junifer/markers/reho/_junifer_reho.py +1 -4
  105. junifer/markers/reho/reho_base.py +27 -8
  106. junifer/markers/reho/reho_parcels.py +17 -28
  107. junifer/markers/reho/reho_spheres.py +18 -27
  108. junifer/markers/reho/tests/test_reho_parcels.py +3 -8
  109. junifer/markers/reho/tests/test_reho_spheres.py +3 -8
  110. junifer/markers/sphere_aggregation.py +59 -43
  111. junifer/markers/temporal_snr/__init__.py +0 -3
  112. junifer/markers/temporal_snr/temporal_snr_base.py +32 -23
  113. junifer/markers/temporal_snr/temporal_snr_parcels.py +6 -9
  114. junifer/markers/temporal_snr/temporal_snr_spheres.py +6 -9
  115. junifer/markers/temporal_snr/tests/test_temporal_snr_parcels.py +3 -6
  116. junifer/markers/temporal_snr/tests/test_temporal_snr_spheres.py +3 -6
  117. junifer/markers/tests/test_collection.py +8 -9
  118. junifer/markers/tests/test_ets_rss.py +9 -15
  119. junifer/markers/tests/test_markers_base.py +18 -17
  120. junifer/markers/tests/test_parcel_aggregation.py +32 -93
  121. junifer/markers/tests/test_sphere_aggregation.py +19 -72
  122. junifer/onthefly/__init__.py +1 -4
  123. junifer/onthefly/read_transform.py +0 -3
  124. junifer/pipeline/__init__.py +1 -9
  125. junifer/pipeline/pipeline_step_mixin.py +4 -21
  126. junifer/pipeline/registry.py +0 -3
  127. junifer/pipeline/singleton.py +0 -3
  128. junifer/pipeline/tests/test_registry.py +1 -1
  129. junifer/pipeline/update_meta_mixin.py +0 -3
  130. junifer/pipeline/utils.py +1 -67
  131. junifer/pipeline/workdir_manager.py +0 -3
  132. junifer/preprocess/__init__.py +2 -9
  133. junifer/preprocess/ants/__init__.py +4 -0
  134. junifer/preprocess/ants/ants_apply_transforms_warper.py +185 -0
  135. junifer/preprocess/ants/tests/test_ants_apply_transforms_warper.py +56 -0
  136. junifer/preprocess/base.py +3 -6
  137. junifer/preprocess/bold_warper.py +265 -0
  138. junifer/preprocess/confounds/__init__.py +0 -3
  139. junifer/preprocess/confounds/fmriprep_confound_remover.py +60 -47
  140. junifer/preprocess/confounds/tests/test_fmriprep_confound_remover.py +113 -72
  141. junifer/preprocess/fsl/__init__.py +4 -0
  142. junifer/preprocess/fsl/apply_warper.py +179 -0
  143. junifer/preprocess/fsl/tests/test_apply_warper.py +45 -0
  144. junifer/preprocess/smoothing/__init__.py +0 -3
  145. junifer/preprocess/smoothing/_afni_smoothing.py +1 -1
  146. junifer/preprocess/tests/test_bold_warper.py +159 -0
  147. junifer/preprocess/warping/__init__.py +0 -3
  148. junifer/preprocess/warping/_ants_warper.py +0 -3
  149. junifer/preprocess/warping/_fsl_warper.py +0 -3
  150. junifer/stats.py +1 -4
  151. junifer/storage/__init__.py +1 -9
  152. junifer/storage/base.py +1 -40
  153. junifer/storage/hdf5.py +9 -71
  154. junifer/storage/pandas_base.py +0 -3
  155. junifer/storage/sqlite.py +0 -3
  156. junifer/storage/tests/test_hdf5.py +10 -82
  157. junifer/storage/utils.py +0 -9
  158. junifer/testing/__init__.py +1 -4
  159. junifer/testing/datagrabbers.py +6 -13
  160. junifer/testing/tests/test_partlycloudytesting_datagrabber.py +7 -7
  161. junifer/testing/utils.py +0 -3
  162. junifer/utils/__init__.py +2 -13
  163. junifer/utils/fs.py +0 -3
  164. junifer/utils/helpers.py +1 -32
  165. junifer/utils/logging.py +4 -33
  166. junifer/utils/tests/test_logging.py +0 -8
  167. {junifer-0.0.5.dist-info → junifer-0.0.5.dev24.dist-info}/METADATA +16 -17
  168. junifer-0.0.5.dev24.dist-info/RECORD +265 -0
  169. {junifer-0.0.5.dist-info → junifer-0.0.5.dev24.dist-info}/WHEEL +1 -1
  170. junifer/api/res/freesurfer/mri_binarize +0 -3
  171. junifer/api/res/freesurfer/mri_mc +0 -3
  172. junifer/api/res/freesurfer/mri_pretess +0 -3
  173. junifer/api/res/freesurfer/mris_convert +0 -3
  174. junifer/api/res/freesurfer/run_freesurfer_docker.sh +0 -61
  175. junifer/data/masks/ukb/UKB_15K_GM_template.nii.gz +0 -0
  176. junifer/datagrabber/pattern_validation_mixin.py +0 -388
  177. junifer/datagrabber/tests/test_pattern_validation_mixin.py +0 -249
  178. junifer/external/BrainPrint/brainprint/__init__.py +0 -4
  179. junifer/external/BrainPrint/brainprint/_version.py +0 -3
  180. junifer/external/BrainPrint/brainprint/asymmetry.py +0 -91
  181. junifer/external/BrainPrint/brainprint/brainprint.py +0 -441
  182. junifer/external/BrainPrint/brainprint/surfaces.py +0 -258
  183. junifer/external/BrainPrint/brainprint/utils/__init__.py +0 -1
  184. junifer/external/BrainPrint/brainprint/utils/_config.py +0 -112
  185. junifer/external/BrainPrint/brainprint/utils/utils.py +0 -188
  186. junifer/external/nilearn/junifer_connectivity_measure.py +0 -483
  187. junifer/external/nilearn/tests/test_junifer_connectivity_measure.py +0 -1089
  188. junifer/markers/brainprint.py +0 -459
  189. junifer/markers/tests/test_brainprint.py +0 -58
  190. junifer-0.0.5.dist-info/RECORD +0 -275
  191. {junifer-0.0.5.dist-info → junifer-0.0.5.dev24.dist-info}/AUTHORS.rst +0 -0
  192. {junifer-0.0.5.dist-info → junifer-0.0.5.dev24.dist-info}/LICENSE.md +0 -0
  193. {junifer-0.0.5.dist-info → junifer-0.0.5.dev24.dist-info}/entry_points.txt +0 -0
  194. {junifer-0.0.5.dist-info → junifer-0.0.5.dev24.dist-info}/top_level.txt +0 -0
@@ -42,11 +42,6 @@ def test_ReHoParcels(caplog: pytest.LogCaptureFixture, tmp_path: Path) -> None:
42
42
  parcellation="TianxS1x3TxMNInonlinear2009cAsym",
43
43
  using="junifer",
44
44
  )
45
- # Check correct output
46
- assert "vector" == marker.get_output_type(
47
- input_type="BOLD", output_feature="reho"
48
- )
49
-
50
45
  # Fit transform marker on data
51
46
  output = marker.fit_transform(element_data)
52
47
 
@@ -54,7 +49,7 @@ def test_ReHoParcels(caplog: pytest.LogCaptureFixture, tmp_path: Path) -> None:
54
49
 
55
50
  # Get BOLD output
56
51
  assert "BOLD" in output
57
- output_bold = output["BOLD"]["reho"]
52
+ output_bold = output["BOLD"]
58
53
  # Assert BOLD output keys
59
54
  assert "data" in output_bold
60
55
  assert "col_names" in output_bold
@@ -107,14 +102,14 @@ def test_ReHoParcels_comparison(tmp_path: Path) -> None:
107
102
  # Fit transform marker on data
108
103
  junifer_output = junifer_marker.fit_transform(element_data)
109
104
  # Get BOLD output
110
- junifer_output_bold = junifer_output["BOLD"]["reho"]
105
+ junifer_output_bold = junifer_output["BOLD"]
111
106
 
112
107
  # Initialize marker
113
108
  afni_marker = ReHoParcels(parcellation="Schaefer100x7", using="afni")
114
109
  # Fit transform marker on data
115
110
  afni_output = afni_marker.fit_transform(element_data)
116
111
  # Get BOLD output
117
- afni_output_bold = afni_output["BOLD"]["reho"]
112
+ afni_output_bold = afni_output["BOLD"]
118
113
 
119
114
  # Check for Pearson correlation coefficient
120
115
  r, _ = sp.stats.pearsonr(
@@ -40,11 +40,6 @@ def test_ReHoSpheres(caplog: pytest.LogCaptureFixture, tmp_path: Path) -> None:
40
40
  marker = ReHoSpheres(
41
41
  coords=COORDINATES, using="junifer", radius=10.0
42
42
  )
43
- # Check correct output
44
- assert "vector" == marker.get_output_type(
45
- input_type="BOLD", output_feature="reho"
46
- )
47
-
48
43
  # Fit transform marker on data
49
44
  output = marker.fit_transform(element_data)
50
45
 
@@ -52,7 +47,7 @@ def test_ReHoSpheres(caplog: pytest.LogCaptureFixture, tmp_path: Path) -> None:
52
47
 
53
48
  # Get BOLD output
54
49
  assert "BOLD" in output
55
- output_bold = output["BOLD"]["reho"]
50
+ output_bold = output["BOLD"]
56
51
  # Assert BOLD output keys
57
52
  assert "data" in output_bold
58
53
  assert "col_names" in output_bold
@@ -104,7 +99,7 @@ def test_ReHoSpheres_comparison(tmp_path: Path) -> None:
104
99
  # Fit transform marker on data
105
100
  junifer_output = junifer_marker.fit_transform(element_data)
106
101
  # Get BOLD output
107
- junifer_output_bold = junifer_output["BOLD"]["reho"]
102
+ junifer_output_bold = junifer_output["BOLD"]
108
103
 
109
104
  # Initialize marker
110
105
  afni_marker = ReHoSpheres(
@@ -115,7 +110,7 @@ def test_ReHoSpheres_comparison(tmp_path: Path) -> None:
115
110
  # Fit transform marker on data
116
111
  afni_output = afni_marker.fit_transform(element_data)
117
112
  # Get BOLD output
118
- afni_output_bold = afni_output["BOLD"]["reho"]
113
+ afni_output_bold = afni_output["BOLD"]
119
114
 
120
115
  # Check for Pearson correlation coefficient
121
116
  r, _ = sp.stats.pearsonr(
@@ -14,9 +14,6 @@ from ..utils import logger, raise_error, warn_with_log
14
14
  from .base import BaseMarker
15
15
 
16
16
 
17
- __all__ = ["SphereAggregation"]
18
-
19
-
20
17
  @register_marker
21
18
  class SphereAggregation(BaseMarker):
22
19
  """Class for sphere aggregation.
@@ -68,36 +65,6 @@ class SphereAggregation(BaseMarker):
68
65
 
69
66
  _DEPENDENCIES: ClassVar[Set[str]] = {"nilearn", "numpy"}
70
67
 
71
- _MARKER_INOUT_MAPPINGS: ClassVar[Dict[str, Dict[str, str]]] = {
72
- "T1w": {
73
- "aggregation": "vector",
74
- },
75
- "T2w": {
76
- "aggregation": "vector",
77
- },
78
- "BOLD": {
79
- "aggregation": "timeseries",
80
- },
81
- "VBM_GM": {
82
- "aggregation": "vector",
83
- },
84
- "VBM_WM": {
85
- "aggregation": "vector",
86
- },
87
- "VBM_CSF": {
88
- "aggregation": "vector",
89
- },
90
- "fALFF": {
91
- "aggregation": "vector",
92
- },
93
- "GCOR": {
94
- "aggregation": "vector",
95
- },
96
- "LCOR": {
97
- "aggregation": "vector",
98
- },
99
- }
100
-
101
68
  def __init__(
102
69
  self,
103
70
  coords: str,
@@ -133,6 +100,61 @@ class SphereAggregation(BaseMarker):
133
100
  self.time_method = time_method
134
101
  self.time_method_params = time_method_params or {}
135
102
 
103
+ def get_valid_inputs(self) -> List[str]:
104
+ """Get valid data types for input.
105
+
106
+ Returns
107
+ -------
108
+ list of str
109
+ The list of data types that can be used as input for this marker.
110
+
111
+ """
112
+ return [
113
+ "T1w",
114
+ "T2w",
115
+ "BOLD",
116
+ "VBM_GM",
117
+ "VBM_WM",
118
+ "VBM_CSF",
119
+ "fALFF",
120
+ "GCOR",
121
+ "LCOR",
122
+ ]
123
+
124
+ def get_output_type(self, input_type: str) -> str:
125
+ """Get output type.
126
+
127
+ Parameters
128
+ ----------
129
+ input_type : str
130
+ The data type input to the marker.
131
+
132
+ Returns
133
+ -------
134
+ str
135
+ The storage type output by the marker.
136
+
137
+ Raises
138
+ ------
139
+ ValueError
140
+ If the ``input_type`` is invalid.
141
+
142
+ """
143
+
144
+ if input_type in [
145
+ "VBM_GM",
146
+ "VBM_WM",
147
+ "VBM_CSF",
148
+ "fALFF",
149
+ "GCOR",
150
+ "LCOR",
151
+ ]:
152
+ return "vector"
153
+ elif input_type == "BOLD":
154
+ return "timeseries"
155
+ else:
156
+ raise_error(f"Unknown input kind for {input_type}")
157
+
136
158
  def compute(
137
159
  self,
138
160
  input: Dict[str, Any],
@@ -158,10 +180,8 @@ class SphereAggregation(BaseMarker):
158
180
  to the user or stored in the storage by calling the store method
159
181
  with this as a parameter. The dictionary has the following keys:
160
182
 
161
- * ``aggregation`` : dictionary with the following keys:
162
-
163
- - ``data`` : ROI values as ``numpy.ndarray``
164
- - ``col_names`` : ROI labels as list of str
183
+ * ``data`` : the actual computed values as a numpy.ndarray
184
+ * ``col_names`` : the column labels for the computed values as list
165
185
 
166
186
  Warns
167
187
  -----
@@ -218,9 +238,5 @@ class SphereAggregation(BaseMarker):
218
238
  "available."
219
239
  )
220
240
  # Format the output
221
- return {
222
- "aggregation": {
223
- "data": out_values,
224
- "col_names": labels,
225
- },
226
- }
241
+ out = {"data": out_values, "col_names": labels}
242
+ return out
@@ -5,6 +5,3 @@
5
5
 
6
6
  from .temporal_snr_parcels import TemporalSNRParcels
7
7
  from .temporal_snr_spheres import TemporalSNRSpheres
8
-
9
-
10
- __all__ = ["TemporalSNRParcels", "TemporalSNRSpheres"]
@@ -13,9 +13,6 @@ from ...utils import raise_error
13
13
  from ..base import BaseMarker
14
14
 
15
15
 
16
- __all__ = ["TemporalSNRBase"]
17
-
18
-
19
16
  class TemporalSNRBase(BaseMarker):
20
17
  """Abstract base class for temporal SNR markers.
21
18
 
@@ -39,12 +36,6 @@ class TemporalSNRBase(BaseMarker):
39
36
 
40
37
  _DEPENDENCIES: ClassVar[Set[str]] = {"nilearn"}
41
38
 
42
- _MARKER_INOUT_MAPPINGS: ClassVar[Dict[str, Dict[str, str]]] = {
43
- "BOLD": {
44
- "tsnr": "vector",
45
- },
46
- }
47
-
48
39
  def __init__(
49
40
  self,
50
41
  agg_method: str = "mean",
@@ -67,6 +58,33 @@ class TemporalSNRBase(BaseMarker):
67
58
  klass=NotImplementedError,
68
59
  )
69
60
 
61
+ def get_valid_inputs(self) -> List[str]:
62
+ """Get valid data types for input.
63
+
64
+ Returns
65
+ -------
66
+ list of str
67
+ The list of data types that can be used as input for this marker.
68
+
69
+ """
70
+ return ["BOLD"]
71
+
72
+ def get_output_type(self, input_type: str) -> str:
73
+ """Get output type.
74
+
75
+ Parameters
76
+ ----------
77
+ input_type : str
78
+ The data type input to the marker.
79
+
80
+ Returns
81
+ -------
82
+ str
83
+ The storage type output by the marker.
84
+
85
+ """
86
+ return "vector"
87
+
70
88
  def compute(
71
89
  self,
72
90
  input: Dict[str, Any],
@@ -86,14 +104,11 @@ class TemporalSNRBase(BaseMarker):
86
104
  Returns
87
105
  -------
88
106
  dict
89
- The computed result as dictionary. This will be either returned
90
- to the user or stored in the storage by calling the store method
91
- with this as a parameter. The dictionary has the following keys:
92
-
93
- * ``tsnr`` : dictionary with the following keys:
107
+ The computed result as dictionary. The following keys will be
108
+ included in the dictionary:
94
109
 
95
- - ``data`` : computed tSNR as ``numpy.ndarray``
96
- - ``col_names`` : ROI labels as list of str
110
+ * ``data`` : the computed values as a ``numpy.ndarray``
111
+ * ``col_names`` : the column labels for the computed values as list
97
112
 
98
113
  """
99
114
  # Calculate voxelwise temporal signal-to-noise ratio in an image
@@ -111,10 +126,4 @@ class TemporalSNRBase(BaseMarker):
111
126
  mask_img=mask_img,
112
127
  )
113
128
  # Perform necessary aggregation and return
114
- return {
115
- "tsnr": {
116
- **self.aggregate(input=input, extra_input=extra_input)[
117
- "aggregation"
118
- ]
119
- }
120
- }
129
+ return self.aggregate(input=input, extra_input=extra_input)
@@ -10,9 +10,6 @@ from ..parcel_aggregation import ParcelAggregation
10
10
  from .temporal_snr_base import TemporalSNRBase
11
11
 
12
12
 
13
- __all__ = ["TemporalSNRParcels"]
14
-
15
-
16
13
  @register_marker
17
14
  class TemporalSNRParcels(TemporalSNRBase):
18
15
  """Class for temporal signal-to-noise ratio using parcellations.
@@ -77,16 +74,16 @@ class TemporalSNRParcels(TemporalSNRBase):
77
74
  to the user or stored in the storage by calling the store method
78
75
  with this as a parameter. The dictionary has the following keys:
79
76
 
80
- * ``aggregation`` : dictionary with the following keys:
81
-
82
- - ``data`` : ROI-wise tSNR values as ``numpy.ndarray``
83
- - ``col_names`` : ROI labels as list of str
77
+ * ``data`` : ROI-wise temporal SNR as a ``numpy.ndarray``
78
+ * ``col_names`` : the ROI labels for the computed values as list
84
79
 
85
80
  """
86
- return ParcelAggregation(
81
+ parcel_aggregation = ParcelAggregation(
87
82
  parcellation=self.parcellation,
88
83
  method=self.agg_method,
89
84
  method_params=self.agg_method_params,
90
85
  masks=self.masks,
91
86
  on="BOLD",
92
- ).compute(input=input, extra_input=extra_input)
87
+ )
88
+ # Return the 2D timeseries after parcel aggregation
89
+ return parcel_aggregation.compute(input=input, extra_input=extra_input)
@@ -11,9 +11,6 @@ from ..utils import raise_error
11
11
  from .temporal_snr_base import TemporalSNRBase
12
12
 
13
13
 
14
- __all__ = ["TemporalSNRSpheres"]
15
-
16
-
17
14
  @register_marker
18
15
  class TemporalSNRSpheres(TemporalSNRBase):
19
16
  """Class for temporal signal-to-noise ratio using coordinates (spheres).
@@ -92,13 +89,11 @@ class TemporalSNRSpheres(TemporalSNRBase):
92
89
  to the user or stored in the storage by calling the store method
93
90
  with this as a parameter. The dictionary has the following keys:
94
91
 
95
- * ``aggregation`` : dictionary with the following keys:
96
-
97
- - ``data`` : ROI-wise tSNR values as ``numpy.ndarray``
98
- - ``col_names`` : ROI labels as list of str
92
+ * ``data`` : VOI-wise temporal SNR as a ``numpy.ndarray``
93
+ * ``col_names`` : the VOI labels for the computed values as list
99
94
 
100
95
  """
101
- return SphereAggregation(
96
+ sphere_aggregation = SphereAggregation(
102
97
  coords=self.coords,
103
98
  radius=self.radius,
104
99
  allow_overlap=self.allow_overlap,
@@ -106,4 +101,6 @@ class TemporalSNRSpheres(TemporalSNRBase):
106
101
  method_params=self.agg_method_params,
107
102
  masks=self.masks,
108
103
  on="BOLD",
109
- ).compute(input=input, extra_input=extra_input)
104
+ )
105
+ # Return the 2D timeseries after sphere aggregation
106
+ return sphere_aggregation.compute(input=input, extra_input=extra_input)
@@ -20,13 +20,11 @@ def test_TemporalSNRParcels_computation() -> None:
20
20
  parcellation="TianxS1x3TxMNInonlinear2009cAsym"
21
21
  )
22
22
  # Check correct output
23
- assert "vector" == marker.get_output_type(
24
- input_type="BOLD", output_feature="tsnr"
25
- )
23
+ assert marker.get_output_type("BOLD") == "vector"
26
24
 
27
25
  # Fit-transform the data
28
26
  tsnr_parcels = marker.fit_transform(element_data)
29
- tsnr_parcels_bold = tsnr_parcels["BOLD"]["tsnr"]
27
+ tsnr_parcels_bold = tsnr_parcels["BOLD"]
30
28
 
31
29
  assert "data" in tsnr_parcels_bold
32
30
  assert "col_names" in tsnr_parcels_bold
@@ -53,6 +51,5 @@ def test_TemporalSNRParcels_storage(tmp_path: Path) -> None:
53
51
  marker.fit_transform(input=element_data, storage=storage)
54
52
  features = storage.list_features()
55
53
  assert any(
56
- x["name"] == "BOLD_TemporalSNRParcels_tsnr"
57
- for x in features.values()
54
+ x["name"] == "BOLD_TemporalSNRParcels" for x in features.values()
58
55
  )
@@ -20,13 +20,11 @@ def test_TemporalSNRSpheres_computation() -> None:
20
20
  element_data = DefaultDataReader().fit_transform(dg["sub001"])
21
21
  marker = TemporalSNRSpheres(coords="DMNBuckner", radius=5.0)
22
22
  # Check correct output
23
- assert "vector" == marker.get_output_type(
24
- input_type="BOLD", output_feature="tsnr"
25
- )
23
+ assert marker.get_output_type("BOLD") == "vector"
26
24
 
27
25
  # Fit-transform the data
28
26
  tsnr_spheres = marker.fit_transform(element_data)
29
- tsnr_spheres_bold = tsnr_spheres["BOLD"]["tsnr"]
27
+ tsnr_spheres_bold = tsnr_spheres["BOLD"]
30
28
 
31
29
  assert "data" in tsnr_spheres_bold
32
30
  assert "col_names" in tsnr_spheres_bold
@@ -51,8 +49,7 @@ def test_TemporalSNRSpheres_storage(tmp_path: Path) -> None:
51
49
  marker.fit_transform(input=element_data, storage=storage)
52
50
  features = storage.list_features()
53
51
  assert any(
54
- x["name"] == "BOLD_TemporalSNRSpheres_tsnr"
55
- for x in features.values()
52
+ x["name"] == "BOLD_TemporalSNRSpheres" for x in features.values()
56
53
  )
57
54
 
58
55
 
@@ -84,7 +84,7 @@ def test_marker_collection() -> None:
84
84
  for t_marker in markers:
85
85
  t_name = t_marker.name
86
86
  assert "BOLD" in out[t_name]
87
- t_bold = out[t_name]["BOLD"]["aggregation"]
87
+ t_bold = out[t_name]["BOLD"]
88
88
  assert "data" in t_bold
89
89
  assert "col_names" in t_bold
90
90
  assert "meta" in t_bold
@@ -107,8 +107,7 @@ def test_marker_collection() -> None:
107
107
  for t_marker in markers:
108
108
  t_name = t_marker.name
109
109
  assert_array_equal(
110
- out[t_name]["BOLD"]["aggregation"]["data"],
111
- out2[t_name]["BOLD"]["aggregation"]["data"],
110
+ out[t_name]["BOLD"]["data"], out2[t_name]["BOLD"]["data"]
112
111
  )
113
112
 
114
113
 
@@ -202,20 +201,20 @@ def test_marker_collection_storage(tmp_path: Path) -> None:
202
201
  feature_md5 = next(iter(features.keys()))
203
202
  t_feature = storage.read_df(feature_md5=feature_md5)
204
203
  fname = "tian_mean"
205
- t_data = out[fname]["BOLD"]["aggregation"]["data"] # type: ignore
206
- cols = out[fname]["BOLD"]["aggregation"]["col_names"] # type: ignore
204
+ t_data = out[fname]["BOLD"]["data"] # type: ignore
205
+ cols = out[fname]["BOLD"]["col_names"] # type: ignore
207
206
  assert_array_equal(t_feature[cols].values, t_data) # type: ignore
208
207
 
209
208
  feature_md5 = list(features.keys())[1]
210
209
  t_feature = storage.read_df(feature_md5=feature_md5)
211
210
  fname = "tian_std"
212
- t_data = out[fname]["BOLD"]["aggregation"]["data"] # type: ignore
213
- cols = out[fname]["BOLD"]["aggregation"]["col_names"] # type: ignore
211
+ t_data = out[fname]["BOLD"]["data"] # type: ignore
212
+ cols = out[fname]["BOLD"]["col_names"] # type: ignore
214
213
  assert_array_equal(t_feature[cols].values, t_data) # type: ignore
215
214
 
216
215
  feature_md5 = list(features.keys())[2]
217
216
  t_feature = storage.read_df(feature_md5=feature_md5)
218
217
  fname = "tian_trim_mean90"
219
- t_data = out[fname]["BOLD"]["aggregation"]["data"] # type: ignore
220
- cols = out[fname]["BOLD"]["aggregation"]["col_names"] # type: ignore
218
+ t_data = out[fname]["BOLD"]["data"] # type: ignore
219
+ cols = out[fname]["BOLD"]["col_names"] # type: ignore
221
220
  assert_array_equal(t_feature[cols].values, t_data) # type: ignore
@@ -26,9 +26,8 @@ def test_compute() -> None:
26
26
  with PartlyCloudyTestingDataGrabber() as dg:
27
27
  element_data = DefaultDataReader().fit_transform(dg["sub-01"])
28
28
  # Compute the RSSETSMarker
29
- rss_ets = RSSETSMarker(parcellation=PARCELLATION).compute(
30
- element_data["BOLD"]
31
- )
29
+ marker = RSSETSMarker(parcellation=PARCELLATION)
30
+ rss_ets = marker.compute(element_data["BOLD"])
32
31
 
33
32
  # Compare with nilearn
34
33
  # Load testing parcellation
@@ -42,14 +41,14 @@ def test_compute() -> None:
42
41
  element_data["BOLD"]["data"]
43
42
  )
44
43
  # Assert the dimension of timeseries
45
- assert extacted_timeseries.shape[0] == len(rss_ets["rss_ets"]["data"])
44
+ assert extacted_timeseries.shape[0] == len(rss_ets["data"])
46
45
 
47
46
 
48
47
  def test_get_output_type() -> None:
49
48
  """Test RSS ETS get_output_type()."""
50
49
  assert "timeseries" == RSSETSMarker(
51
50
  parcellation=PARCELLATION
52
- ).get_output_type(input_type="BOLD", output_feature="rss_ets")
51
+ ).get_output_type("BOLD")
53
52
 
54
53
 
55
54
  def test_store(tmp_path: Path) -> None:
@@ -62,17 +61,12 @@ def test_store(tmp_path: Path) -> None:
62
61
 
63
62
  """
64
63
  with PartlyCloudyTestingDataGrabber() as dg:
65
- # Get element data
66
64
  element_data = DefaultDataReader().fit_transform(dg["sub-01"])
65
+ # Compute the RSSETSMarker
66
+ marker = RSSETSMarker(parcellation=PARCELLATION)
67
67
  # Create storage
68
68
  storage = SQLiteFeatureStorage(tmp_path / "test_rss_ets.sqlite")
69
- # Compute the RSSETSMarker and store
70
- _ = RSSETSMarker(parcellation=PARCELLATION).fit_transform(
71
- input=element_data, storage=storage
72
- )
73
- # Retrieve features
69
+ # Store
70
+ marker.fit_transform(input=element_data, storage=storage)
74
71
  features = storage.list_features()
75
- # Check marker name
76
- assert any(
77
- x["name"] == "BOLD_RSSETSMarker_rss_ets" for x in features.values()
78
- )
72
+ assert any(x["name"] == "BOLD_RSSETSMarker" for x in features.values())
@@ -20,27 +20,27 @@ def test_base_marker_subclassing() -> None:
20
20
 
21
21
  # Create concrete class
22
22
  class MyBaseMarker(BaseMarker):
23
-
24
- _MARKER_INOUT_MAPPINGS = { # noqa: RUF012
25
- "BOLD": {
26
- "feat_1": "timeseries",
27
- },
28
- }
29
-
30
23
  def __init__(self, on, name=None) -> None:
31
24
  self.parameter = 1
32
25
  super().__init__(on, name)
33
26
 
27
+ def get_valid_inputs(self):
28
+ return ["BOLD", "T1w"]
29
+
30
+ def get_output_type(self, input):
31
+ if input == "BOLD":
32
+ return "timeseries"
33
+ raise ValueError(f"Cannot compute output type for {input}")
34
+
34
35
  def compute(self, input, extra_input):
35
36
  return {
36
- "feat_1": {
37
- "data": "data",
38
- "col_names": ["columns"],
39
- },
37
+ "data": "data",
38
+ "columns": "columns",
39
+ "row_names": "row_names",
40
40
  }
41
41
 
42
- with pytest.raises(ValueError, match=r"cannot be computed on \['T1w'\]"):
43
- MyBaseMarker(on=["BOLD", "T1w"])
42
+ with pytest.raises(ValueError, match=r"cannot be computed on \['T2w'\]"):
43
+ MyBaseMarker(on=["BOLD", "T2w"])
44
44
 
45
45
  # Create input for marker
46
46
  input_ = {
@@ -64,11 +64,12 @@ def test_base_marker_subclassing() -> None:
64
64
  output = marker.fit_transform(input=input_) # process
65
65
  # Check output
66
66
  assert "BOLD" in output
67
- assert "data" in output["BOLD"]["feat_1"]
68
- assert "col_names" in output["BOLD"]["feat_1"]
67
+ assert "data" in output["BOLD"]
68
+ assert "columns" in output["BOLD"]
69
+ assert "row_names" in output["BOLD"]
69
70
 
70
- assert "meta" in output["BOLD"]["feat_1"]
71
- meta = output["BOLD"]["feat_1"]["meta"]
71
+ assert "meta" in output["BOLD"]
72
+ meta = output["BOLD"]["meta"]
72
73
  assert "datagrabber" in meta
73
74
  assert "element" in meta
74
75
  assert "datareader" in meta