junifer 0.0.4.dev829__py3-none-any.whl → 0.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (206) hide show
  1. junifer/__init__.py +17 -0
  2. junifer/_version.py +2 -2
  3. junifer/api/__init__.py +4 -1
  4. junifer/api/cli.py +91 -1
  5. junifer/api/decorators.py +9 -0
  6. junifer/api/functions.py +56 -10
  7. junifer/api/parser.py +3 -0
  8. junifer/api/queue_context/__init__.py +4 -1
  9. junifer/api/queue_context/gnu_parallel_local_adapter.py +16 -6
  10. junifer/api/queue_context/htcondor_adapter.py +16 -5
  11. junifer/api/queue_context/tests/test_gnu_parallel_local_adapter.py +41 -12
  12. junifer/api/queue_context/tests/test_htcondor_adapter.py +48 -15
  13. junifer/api/res/afni/run_afni_docker.sh +1 -1
  14. junifer/api/res/ants/run_ants_docker.sh +1 -1
  15. junifer/api/res/freesurfer/mri_binarize +3 -0
  16. junifer/api/res/freesurfer/mri_mc +3 -0
  17. junifer/api/res/freesurfer/mri_pretess +3 -0
  18. junifer/api/res/freesurfer/mris_convert +3 -0
  19. junifer/api/res/freesurfer/run_freesurfer_docker.sh +61 -0
  20. junifer/api/res/fsl/run_fsl_docker.sh +1 -1
  21. junifer/api/res/{run_conda.sh → run_conda.bash} +1 -1
  22. junifer/api/res/run_conda.zsh +23 -0
  23. junifer/api/res/run_venv.bash +22 -0
  24. junifer/api/res/{run_venv.sh → run_venv.zsh} +1 -1
  25. junifer/api/tests/test_api_utils.py +4 -2
  26. junifer/api/tests/test_cli.py +83 -0
  27. junifer/api/tests/test_functions.py +27 -2
  28. junifer/configs/__init__.py +1 -1
  29. junifer/configs/juseless/__init__.py +4 -1
  30. junifer/configs/juseless/datagrabbers/__init__.py +10 -1
  31. junifer/configs/juseless/datagrabbers/aomic_id1000_vbm.py +4 -3
  32. junifer/configs/juseless/datagrabbers/camcan_vbm.py +3 -0
  33. junifer/configs/juseless/datagrabbers/ixi_vbm.py +4 -3
  34. junifer/configs/juseless/datagrabbers/tests/test_ucla.py +1 -3
  35. junifer/configs/juseless/datagrabbers/ucla.py +12 -9
  36. junifer/configs/juseless/datagrabbers/ukb_vbm.py +3 -0
  37. junifer/data/__init__.py +21 -1
  38. junifer/data/coordinates.py +10 -19
  39. junifer/data/masks/ukb/UKB_15K_GM_template.nii.gz +0 -0
  40. junifer/data/masks.py +58 -87
  41. junifer/data/parcellations.py +14 -3
  42. junifer/data/template_spaces.py +4 -1
  43. junifer/data/tests/test_masks.py +26 -37
  44. junifer/data/utils.py +3 -0
  45. junifer/datagrabber/__init__.py +18 -1
  46. junifer/datagrabber/aomic/__init__.py +3 -0
  47. junifer/datagrabber/aomic/id1000.py +70 -37
  48. junifer/datagrabber/aomic/piop1.py +69 -36
  49. junifer/datagrabber/aomic/piop2.py +71 -38
  50. junifer/datagrabber/aomic/tests/test_id1000.py +44 -100
  51. junifer/datagrabber/aomic/tests/test_piop1.py +65 -108
  52. junifer/datagrabber/aomic/tests/test_piop2.py +45 -102
  53. junifer/datagrabber/base.py +13 -6
  54. junifer/datagrabber/datalad_base.py +13 -1
  55. junifer/datagrabber/dmcc13_benchmark.py +36 -53
  56. junifer/datagrabber/hcp1200/__init__.py +3 -0
  57. junifer/datagrabber/hcp1200/datalad_hcp1200.py +3 -0
  58. junifer/datagrabber/hcp1200/hcp1200.py +4 -1
  59. junifer/datagrabber/multiple.py +45 -6
  60. junifer/datagrabber/pattern.py +170 -62
  61. junifer/datagrabber/pattern_datalad.py +25 -12
  62. junifer/datagrabber/pattern_validation_mixin.py +388 -0
  63. junifer/datagrabber/tests/test_datalad_base.py +4 -4
  64. junifer/datagrabber/tests/test_dmcc13_benchmark.py +46 -19
  65. junifer/datagrabber/tests/test_multiple.py +161 -84
  66. junifer/datagrabber/tests/test_pattern.py +45 -0
  67. junifer/datagrabber/tests/test_pattern_datalad.py +4 -4
  68. junifer/datagrabber/tests/test_pattern_validation_mixin.py +249 -0
  69. junifer/datareader/__init__.py +4 -1
  70. junifer/datareader/default.py +95 -43
  71. junifer/external/BrainPrint/brainprint/__init__.py +4 -0
  72. junifer/external/BrainPrint/brainprint/_version.py +3 -0
  73. junifer/external/BrainPrint/brainprint/asymmetry.py +91 -0
  74. junifer/external/BrainPrint/brainprint/brainprint.py +441 -0
  75. junifer/external/BrainPrint/brainprint/surfaces.py +258 -0
  76. junifer/external/BrainPrint/brainprint/utils/__init__.py +1 -0
  77. junifer/external/BrainPrint/brainprint/utils/_config.py +112 -0
  78. junifer/external/BrainPrint/brainprint/utils/utils.py +188 -0
  79. junifer/external/__init__.py +1 -1
  80. junifer/external/nilearn/__init__.py +5 -1
  81. junifer/external/nilearn/junifer_connectivity_measure.py +483 -0
  82. junifer/external/nilearn/junifer_nifti_spheres_masker.py +23 -9
  83. junifer/external/nilearn/tests/test_junifer_connectivity_measure.py +1089 -0
  84. junifer/external/nilearn/tests/test_junifer_nifti_spheres_masker.py +76 -1
  85. junifer/markers/__init__.py +23 -1
  86. junifer/markers/base.py +68 -28
  87. junifer/markers/brainprint.py +459 -0
  88. junifer/markers/collection.py +10 -2
  89. junifer/markers/complexity/__init__.py +10 -0
  90. junifer/markers/complexity/complexity_base.py +26 -43
  91. junifer/markers/complexity/hurst_exponent.py +3 -0
  92. junifer/markers/complexity/multiscale_entropy_auc.py +3 -0
  93. junifer/markers/complexity/perm_entropy.py +3 -0
  94. junifer/markers/complexity/range_entropy.py +3 -0
  95. junifer/markers/complexity/range_entropy_auc.py +3 -0
  96. junifer/markers/complexity/sample_entropy.py +3 -0
  97. junifer/markers/complexity/tests/test_hurst_exponent.py +11 -3
  98. junifer/markers/complexity/tests/test_multiscale_entropy_auc.py +11 -3
  99. junifer/markers/complexity/tests/test_perm_entropy.py +11 -3
  100. junifer/markers/complexity/tests/test_range_entropy.py +11 -3
  101. junifer/markers/complexity/tests/test_range_entropy_auc.py +11 -3
  102. junifer/markers/complexity/tests/test_sample_entropy.py +11 -3
  103. junifer/markers/complexity/tests/test_weighted_perm_entropy.py +11 -3
  104. junifer/markers/complexity/weighted_perm_entropy.py +3 -0
  105. junifer/markers/ets_rss.py +27 -42
  106. junifer/markers/falff/__init__.py +3 -0
  107. junifer/markers/falff/_afni_falff.py +5 -2
  108. junifer/markers/falff/_junifer_falff.py +3 -0
  109. junifer/markers/falff/falff_base.py +20 -46
  110. junifer/markers/falff/falff_parcels.py +56 -27
  111. junifer/markers/falff/falff_spheres.py +60 -29
  112. junifer/markers/falff/tests/test_falff_parcels.py +39 -23
  113. junifer/markers/falff/tests/test_falff_spheres.py +39 -23
  114. junifer/markers/functional_connectivity/__init__.py +9 -0
  115. junifer/markers/functional_connectivity/crossparcellation_functional_connectivity.py +63 -60
  116. junifer/markers/functional_connectivity/edge_functional_connectivity_parcels.py +45 -32
  117. junifer/markers/functional_connectivity/edge_functional_connectivity_spheres.py +49 -36
  118. junifer/markers/functional_connectivity/functional_connectivity_base.py +71 -70
  119. junifer/markers/functional_connectivity/functional_connectivity_parcels.py +34 -25
  120. junifer/markers/functional_connectivity/functional_connectivity_spheres.py +40 -30
  121. junifer/markers/functional_connectivity/tests/test_crossparcellation_functional_connectivity.py +11 -7
  122. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_parcels.py +27 -7
  123. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_spheres.py +28 -12
  124. junifer/markers/functional_connectivity/tests/test_functional_connectivity_parcels.py +35 -11
  125. junifer/markers/functional_connectivity/tests/test_functional_connectivity_spheres.py +36 -62
  126. junifer/markers/parcel_aggregation.py +47 -61
  127. junifer/markers/reho/__init__.py +3 -0
  128. junifer/markers/reho/_afni_reho.py +5 -2
  129. junifer/markers/reho/_junifer_reho.py +4 -1
  130. junifer/markers/reho/reho_base.py +8 -27
  131. junifer/markers/reho/reho_parcels.py +28 -17
  132. junifer/markers/reho/reho_spheres.py +27 -18
  133. junifer/markers/reho/tests/test_reho_parcels.py +8 -3
  134. junifer/markers/reho/tests/test_reho_spheres.py +8 -3
  135. junifer/markers/sphere_aggregation.py +43 -59
  136. junifer/markers/temporal_snr/__init__.py +3 -0
  137. junifer/markers/temporal_snr/temporal_snr_base.py +23 -32
  138. junifer/markers/temporal_snr/temporal_snr_parcels.py +9 -6
  139. junifer/markers/temporal_snr/temporal_snr_spheres.py +9 -6
  140. junifer/markers/temporal_snr/tests/test_temporal_snr_parcels.py +6 -3
  141. junifer/markers/temporal_snr/tests/test_temporal_snr_spheres.py +6 -3
  142. junifer/markers/tests/test_brainprint.py +58 -0
  143. junifer/markers/tests/test_collection.py +9 -8
  144. junifer/markers/tests/test_ets_rss.py +15 -9
  145. junifer/markers/tests/test_markers_base.py +17 -18
  146. junifer/markers/tests/test_parcel_aggregation.py +93 -32
  147. junifer/markers/tests/test_sphere_aggregation.py +72 -19
  148. junifer/onthefly/__init__.py +4 -1
  149. junifer/onthefly/read_transform.py +3 -0
  150. junifer/pipeline/__init__.py +9 -1
  151. junifer/pipeline/pipeline_step_mixin.py +21 -4
  152. junifer/pipeline/registry.py +3 -0
  153. junifer/pipeline/singleton.py +3 -0
  154. junifer/pipeline/tests/test_registry.py +1 -1
  155. junifer/pipeline/update_meta_mixin.py +3 -0
  156. junifer/pipeline/utils.py +67 -1
  157. junifer/pipeline/workdir_manager.py +3 -0
  158. junifer/preprocess/__init__.py +10 -2
  159. junifer/preprocess/base.py +6 -3
  160. junifer/preprocess/confounds/__init__.py +3 -0
  161. junifer/preprocess/confounds/fmriprep_confound_remover.py +47 -60
  162. junifer/preprocess/confounds/tests/test_fmriprep_confound_remover.py +72 -113
  163. junifer/preprocess/smoothing/__init__.py +9 -0
  164. junifer/preprocess/smoothing/_afni_smoothing.py +119 -0
  165. junifer/preprocess/smoothing/_fsl_smoothing.py +116 -0
  166. junifer/preprocess/smoothing/_nilearn_smoothing.py +69 -0
  167. junifer/preprocess/smoothing/smoothing.py +174 -0
  168. junifer/preprocess/smoothing/tests/test_smoothing.py +94 -0
  169. junifer/preprocess/warping/__init__.py +3 -0
  170. junifer/preprocess/warping/_ants_warper.py +3 -0
  171. junifer/preprocess/warping/_fsl_warper.py +3 -0
  172. junifer/stats.py +4 -1
  173. junifer/storage/__init__.py +9 -1
  174. junifer/storage/base.py +40 -1
  175. junifer/storage/hdf5.py +71 -9
  176. junifer/storage/pandas_base.py +3 -0
  177. junifer/storage/sqlite.py +3 -0
  178. junifer/storage/tests/test_hdf5.py +82 -10
  179. junifer/storage/utils.py +9 -0
  180. junifer/testing/__init__.py +4 -1
  181. junifer/testing/datagrabbers.py +13 -6
  182. junifer/testing/tests/test_partlycloudytesting_datagrabber.py +7 -7
  183. junifer/testing/utils.py +3 -0
  184. junifer/utils/__init__.py +13 -2
  185. junifer/utils/fs.py +3 -0
  186. junifer/utils/helpers.py +32 -1
  187. junifer/utils/logging.py +33 -4
  188. junifer/utils/tests/test_logging.py +8 -0
  189. {junifer-0.0.4.dev829.dist-info → junifer-0.0.5.dist-info}/METADATA +17 -16
  190. junifer-0.0.5.dist-info/RECORD +275 -0
  191. {junifer-0.0.4.dev829.dist-info → junifer-0.0.5.dist-info}/WHEEL +1 -1
  192. junifer/datagrabber/tests/test_datagrabber_utils.py +0 -218
  193. junifer/datagrabber/utils.py +0 -230
  194. junifer/preprocess/ants/__init__.py +0 -4
  195. junifer/preprocess/ants/ants_apply_transforms_warper.py +0 -185
  196. junifer/preprocess/ants/tests/test_ants_apply_transforms_warper.py +0 -56
  197. junifer/preprocess/bold_warper.py +0 -265
  198. junifer/preprocess/fsl/__init__.py +0 -4
  199. junifer/preprocess/fsl/apply_warper.py +0 -179
  200. junifer/preprocess/fsl/tests/test_apply_warper.py +0 -45
  201. junifer/preprocess/tests/test_bold_warper.py +0 -159
  202. junifer-0.0.4.dev829.dist-info/RECORD +0 -257
  203. {junifer-0.0.4.dev829.dist-info → junifer-0.0.5.dist-info}/AUTHORS.rst +0 -0
  204. {junifer-0.0.4.dev829.dist-info → junifer-0.0.5.dist-info}/LICENSE.md +0 -0
  205. {junifer-0.0.4.dev829.dist-info → junifer-0.0.5.dist-info}/entry_points.txt +0 -0
  206. {junifer-0.0.4.dev829.dist-info → junifer-0.0.5.dist-info}/top_level.txt +0 -0
@@ -13,6 +13,9 @@ from ...utils import raise_error
13
13
  from ..base import BaseMarker
14
14
 
15
15
 
16
+ __all__ = ["TemporalSNRBase"]
17
+
18
+
16
19
  class TemporalSNRBase(BaseMarker):
17
20
  """Abstract base class for temporal SNR markers.
18
21
 
@@ -36,6 +39,12 @@ class TemporalSNRBase(BaseMarker):
36
39
 
37
40
  _DEPENDENCIES: ClassVar[Set[str]] = {"nilearn"}
38
41
 
42
+ _MARKER_INOUT_MAPPINGS: ClassVar[Dict[str, Dict[str, str]]] = {
43
+ "BOLD": {
44
+ "tsnr": "vector",
45
+ },
46
+ }
47
+
39
48
  def __init__(
40
49
  self,
41
50
  agg_method: str = "mean",
@@ -58,33 +67,6 @@ class TemporalSNRBase(BaseMarker):
58
67
  klass=NotImplementedError,
59
68
  )
60
69
 
61
- def get_valid_inputs(self) -> List[str]:
62
- """Get valid data types for input.
63
-
64
- Returns
65
- -------
66
- list of str
67
- The list of data types that can be used as input for this marker.
68
-
69
- """
70
- return ["BOLD"]
71
-
72
- def get_output_type(self, input_type: str) -> str:
73
- """Get output type.
74
-
75
- Parameters
76
- ----------
77
- input_type : str
78
- The data type input to the marker.
79
-
80
- Returns
81
- -------
82
- str
83
- The storage type output by the marker.
84
-
85
- """
86
- return "vector"
87
-
88
70
  def compute(
89
71
  self,
90
72
  input: Dict[str, Any],
@@ -104,11 +86,14 @@ class TemporalSNRBase(BaseMarker):
104
86
  Returns
105
87
  -------
106
88
  dict
107
- The computed result as dictionary. The following keys will be
108
- included in the dictionary:
89
+ The computed result as dictionary. This will be either returned
90
+ to the user or stored in the storage by calling the store method
91
+ with this as a parameter. The dictionary has the following keys:
92
+
93
+ * ``tsnr`` : dictionary with the following keys:
109
94
 
110
- * ``data`` : the computed values as a ``numpy.ndarray``
111
- * ``col_names`` : the column labels for the computed values as list
95
+ - ``data`` : computed tSNR as ``numpy.ndarray``
96
+ - ``col_names`` : ROI labels as list of str
112
97
 
113
98
  """
114
99
  # Calculate voxelwise temporal signal-to-noise ratio in an image
@@ -126,4 +111,10 @@ class TemporalSNRBase(BaseMarker):
126
111
  mask_img=mask_img,
127
112
  )
128
113
  # Perform necessary aggregation and return
129
- return self.aggregate(input=input, extra_input=extra_input)
114
+ return {
115
+ "tsnr": {
116
+ **self.aggregate(input=input, extra_input=extra_input)[
117
+ "aggregation"
118
+ ]
119
+ }
120
+ }
@@ -10,6 +10,9 @@ from ..parcel_aggregation import ParcelAggregation
10
10
  from .temporal_snr_base import TemporalSNRBase
11
11
 
12
12
 
13
+ __all__ = ["TemporalSNRParcels"]
14
+
15
+
13
16
  @register_marker
14
17
  class TemporalSNRParcels(TemporalSNRBase):
15
18
  """Class for temporal signal-to-noise ratio using parcellations.
@@ -74,16 +77,16 @@ class TemporalSNRParcels(TemporalSNRBase):
74
77
  to the user or stored in the storage by calling the store method
75
78
  with this as a parameter. The dictionary has the following keys:
76
79
 
77
- * ``data`` : ROI-wise temporal SNR as a ``numpy.ndarray``
78
- * ``col_names`` : the ROI labels for the computed values as list
80
+ * ``aggregation`` : dictionary with the following keys:
81
+
82
+ - ``data`` : ROI-wise tSNR values as ``numpy.ndarray``
83
+ - ``col_names`` : ROI labels as list of str
79
84
 
80
85
  """
81
- parcel_aggregation = ParcelAggregation(
86
+ return ParcelAggregation(
82
87
  parcellation=self.parcellation,
83
88
  method=self.agg_method,
84
89
  method_params=self.agg_method_params,
85
90
  masks=self.masks,
86
91
  on="BOLD",
87
- )
88
- # Return the 2D timeseries after parcel aggregation
89
- return parcel_aggregation.compute(input=input, extra_input=extra_input)
92
+ ).compute(input=input, extra_input=extra_input)
@@ -11,6 +11,9 @@ from ..utils import raise_error
11
11
  from .temporal_snr_base import TemporalSNRBase
12
12
 
13
13
 
14
+ __all__ = ["TemporalSNRSpheres"]
15
+
16
+
14
17
  @register_marker
15
18
  class TemporalSNRSpheres(TemporalSNRBase):
16
19
  """Class for temporal signal-to-noise ratio using coordinates (spheres).
@@ -89,11 +92,13 @@ class TemporalSNRSpheres(TemporalSNRBase):
89
92
  to the user or stored in the storage by calling the store method
90
93
  with this as a parameter. The dictionary has the following keys:
91
94
 
92
- * ``data`` : VOI-wise temporal SNR as a ``numpy.ndarray``
93
- * ``col_names`` : the VOI labels for the computed values as list
95
+ * ``aggregation`` : dictionary with the following keys:
96
+
97
+ - ``data`` : ROI-wise tSNR values as ``numpy.ndarray``
98
+ - ``col_names`` : ROI labels as list of str
94
99
 
95
100
  """
96
- sphere_aggregation = SphereAggregation(
101
+ return SphereAggregation(
97
102
  coords=self.coords,
98
103
  radius=self.radius,
99
104
  allow_overlap=self.allow_overlap,
@@ -101,6 +106,4 @@ class TemporalSNRSpheres(TemporalSNRBase):
101
106
  method_params=self.agg_method_params,
102
107
  masks=self.masks,
103
108
  on="BOLD",
104
- )
105
- # Return the 2D timeseries after sphere aggregation
106
- return sphere_aggregation.compute(input=input, extra_input=extra_input)
109
+ ).compute(input=input, extra_input=extra_input)
@@ -20,11 +20,13 @@ def test_TemporalSNRParcels_computation() -> None:
20
20
  parcellation="TianxS1x3TxMNInonlinear2009cAsym"
21
21
  )
22
22
  # Check correct output
23
- assert marker.get_output_type("BOLD") == "vector"
23
+ assert "vector" == marker.get_output_type(
24
+ input_type="BOLD", output_feature="tsnr"
25
+ )
24
26
 
25
27
  # Fit-transform the data
26
28
  tsnr_parcels = marker.fit_transform(element_data)
27
- tsnr_parcels_bold = tsnr_parcels["BOLD"]
29
+ tsnr_parcels_bold = tsnr_parcels["BOLD"]["tsnr"]
28
30
 
29
31
  assert "data" in tsnr_parcels_bold
30
32
  assert "col_names" in tsnr_parcels_bold
@@ -51,5 +53,6 @@ def test_TemporalSNRParcels_storage(tmp_path: Path) -> None:
51
53
  marker.fit_transform(input=element_data, storage=storage)
52
54
  features = storage.list_features()
53
55
  assert any(
54
- x["name"] == "BOLD_TemporalSNRParcels" for x in features.values()
56
+ x["name"] == "BOLD_TemporalSNRParcels_tsnr"
57
+ for x in features.values()
55
58
  )
@@ -20,11 +20,13 @@ def test_TemporalSNRSpheres_computation() -> None:
20
20
  element_data = DefaultDataReader().fit_transform(dg["sub001"])
21
21
  marker = TemporalSNRSpheres(coords="DMNBuckner", radius=5.0)
22
22
  # Check correct output
23
- assert marker.get_output_type("BOLD") == "vector"
23
+ assert "vector" == marker.get_output_type(
24
+ input_type="BOLD", output_feature="tsnr"
25
+ )
24
26
 
25
27
  # Fit-transform the data
26
28
  tsnr_spheres = marker.fit_transform(element_data)
27
- tsnr_spheres_bold = tsnr_spheres["BOLD"]
29
+ tsnr_spheres_bold = tsnr_spheres["BOLD"]["tsnr"]
28
30
 
29
31
  assert "data" in tsnr_spheres_bold
30
32
  assert "col_names" in tsnr_spheres_bold
@@ -49,7 +51,8 @@ def test_TemporalSNRSpheres_storage(tmp_path: Path) -> None:
49
51
  marker.fit_transform(input=element_data, storage=storage)
50
52
  features = storage.list_features()
51
53
  assert any(
52
- x["name"] == "BOLD_TemporalSNRSpheres" for x in features.values()
54
+ x["name"] == "BOLD_TemporalSNRSpheres_tsnr"
55
+ for x in features.values()
53
56
  )
54
57
 
55
58
 
@@ -0,0 +1,58 @@
1
+ """Provide tests for BrainPrint."""
2
+
3
+ # Authors: Synchon Mandal <s.mandal@fz-juelich.de>
4
+ # License: AGPL
5
+
6
+ import socket
7
+
8
+ import pytest
9
+
10
+ from junifer.datagrabber import DataladAOMICID1000
11
+ from junifer.datareader import DefaultDataReader
12
+ from junifer.markers import BrainPrint
13
+ from junifer.pipeline.utils import _check_freesurfer
14
+
15
+
16
+ @pytest.mark.parametrize(
17
+ "feature, storage_type",
18
+ [
19
+ ("eigenvalues", "scalar_table"),
20
+ ("areas", "vector"),
21
+ ("volumes", "vector"),
22
+ ("distances", "vector"),
23
+ ],
24
+ )
25
+ def test_get_output_type(feature: str, storage_type: str) -> None:
26
+ """Test BrainPrint get_output_type().
27
+
28
+ Parameters
29
+ ----------
30
+ feature : str
31
+ The parametrized feature name.
32
+ storage_type : str
33
+ The parametrized storage type.
34
+
35
+ """
36
+ assert storage_type == BrainPrint().get_output_type(
37
+ input_type="FreeSurfer", output_feature=feature
38
+ )
39
+
40
+
41
+ @pytest.mark.skipif(
42
+ _check_freesurfer() is False, reason="requires FreeSurfer to be in PATH"
43
+ )
44
+ @pytest.mark.skipif(
45
+ socket.gethostname() != "juseless",
46
+ reason="only for juseless",
47
+ )
48
+ def test_compute() -> None:
49
+ """Test BrainPrint compute()."""
50
+ with DataladAOMICID1000(types="FreeSurfer") as dg:
51
+ # Fetch element
52
+ element = dg["sub-0001"]
53
+ # Fetch element data
54
+ element_data = DefaultDataReader().fit_transform(element)
55
+ # Compute marker
56
+ feature_map = BrainPrint().fit_transform(element_data)
57
+ # Assert the output keys
58
+ assert {"eigenvalues", "areas", "volumes"} == set(feature_map.keys())
@@ -84,7 +84,7 @@ def test_marker_collection() -> None:
84
84
  for t_marker in markers:
85
85
  t_name = t_marker.name
86
86
  assert "BOLD" in out[t_name]
87
- t_bold = out[t_name]["BOLD"]
87
+ t_bold = out[t_name]["BOLD"]["aggregation"]
88
88
  assert "data" in t_bold
89
89
  assert "col_names" in t_bold
90
90
  assert "meta" in t_bold
@@ -107,7 +107,8 @@ def test_marker_collection() -> None:
107
107
  for t_marker in markers:
108
108
  t_name = t_marker.name
109
109
  assert_array_equal(
110
- out[t_name]["BOLD"]["data"], out2[t_name]["BOLD"]["data"]
110
+ out[t_name]["BOLD"]["aggregation"]["data"],
111
+ out2[t_name]["BOLD"]["aggregation"]["data"],
111
112
  )
112
113
 
113
114
 
@@ -201,20 +202,20 @@ def test_marker_collection_storage(tmp_path: Path) -> None:
201
202
  feature_md5 = next(iter(features.keys()))
202
203
  t_feature = storage.read_df(feature_md5=feature_md5)
203
204
  fname = "tian_mean"
204
- t_data = out[fname]["BOLD"]["data"] # type: ignore
205
- cols = out[fname]["BOLD"]["col_names"] # type: ignore
205
+ t_data = out[fname]["BOLD"]["aggregation"]["data"] # type: ignore
206
+ cols = out[fname]["BOLD"]["aggregation"]["col_names"] # type: ignore
206
207
  assert_array_equal(t_feature[cols].values, t_data) # type: ignore
207
208
 
208
209
  feature_md5 = list(features.keys())[1]
209
210
  t_feature = storage.read_df(feature_md5=feature_md5)
210
211
  fname = "tian_std"
211
- t_data = out[fname]["BOLD"]["data"] # type: ignore
212
- cols = out[fname]["BOLD"]["col_names"] # type: ignore
212
+ t_data = out[fname]["BOLD"]["aggregation"]["data"] # type: ignore
213
+ cols = out[fname]["BOLD"]["aggregation"]["col_names"] # type: ignore
213
214
  assert_array_equal(t_feature[cols].values, t_data) # type: ignore
214
215
 
215
216
  feature_md5 = list(features.keys())[2]
216
217
  t_feature = storage.read_df(feature_md5=feature_md5)
217
218
  fname = "tian_trim_mean90"
218
- t_data = out[fname]["BOLD"]["data"] # type: ignore
219
- cols = out[fname]["BOLD"]["col_names"] # type: ignore
219
+ t_data = out[fname]["BOLD"]["aggregation"]["data"] # type: ignore
220
+ cols = out[fname]["BOLD"]["aggregation"]["col_names"] # type: ignore
220
221
  assert_array_equal(t_feature[cols].values, t_data) # type: ignore
@@ -26,8 +26,9 @@ def test_compute() -> None:
26
26
  with PartlyCloudyTestingDataGrabber() as dg:
27
27
  element_data = DefaultDataReader().fit_transform(dg["sub-01"])
28
28
  # Compute the RSSETSMarker
29
- marker = RSSETSMarker(parcellation=PARCELLATION)
30
- rss_ets = marker.compute(element_data["BOLD"])
29
+ rss_ets = RSSETSMarker(parcellation=PARCELLATION).compute(
30
+ element_data["BOLD"]
31
+ )
31
32
 
32
33
  # Compare with nilearn
33
34
  # Load testing parcellation
@@ -41,14 +42,14 @@ def test_compute() -> None:
41
42
  element_data["BOLD"]["data"]
42
43
  )
43
44
  # Assert the dimension of timeseries
44
- assert extacted_timeseries.shape[0] == len(rss_ets["data"])
45
+ assert extacted_timeseries.shape[0] == len(rss_ets["rss_ets"]["data"])
45
46
 
46
47
 
47
48
  def test_get_output_type() -> None:
48
49
  """Test RSS ETS get_output_type()."""
49
50
  assert "timeseries" == RSSETSMarker(
50
51
  parcellation=PARCELLATION
51
- ).get_output_type("BOLD")
52
+ ).get_output_type(input_type="BOLD", output_feature="rss_ets")
52
53
 
53
54
 
54
55
  def test_store(tmp_path: Path) -> None:
@@ -61,12 +62,17 @@ def test_store(tmp_path: Path) -> None:
61
62
 
62
63
  """
63
64
  with PartlyCloudyTestingDataGrabber() as dg:
65
+ # Get element data
64
66
  element_data = DefaultDataReader().fit_transform(dg["sub-01"])
65
- # Compute the RSSETSMarker
66
- marker = RSSETSMarker(parcellation=PARCELLATION)
67
67
  # Create storage
68
68
  storage = SQLiteFeatureStorage(tmp_path / "test_rss_ets.sqlite")
69
- # Store
70
- marker.fit_transform(input=element_data, storage=storage)
69
+ # Compute the RSSETSMarker and store
70
+ _ = RSSETSMarker(parcellation=PARCELLATION).fit_transform(
71
+ input=element_data, storage=storage
72
+ )
73
+ # Retrieve features
71
74
  features = storage.list_features()
72
- assert any(x["name"] == "BOLD_RSSETSMarker" for x in features.values())
75
+ # Check marker name
76
+ assert any(
77
+ x["name"] == "BOLD_RSSETSMarker_rss_ets" for x in features.values()
78
+ )
@@ -20,27 +20,27 @@ def test_base_marker_subclassing() -> None:
20
20
 
21
21
  # Create concrete class
22
22
  class MyBaseMarker(BaseMarker):
23
+
24
+ _MARKER_INOUT_MAPPINGS = { # noqa: RUF012
25
+ "BOLD": {
26
+ "feat_1": "timeseries",
27
+ },
28
+ }
29
+
23
30
  def __init__(self, on, name=None) -> None:
24
31
  self.parameter = 1
25
32
  super().__init__(on, name)
26
33
 
27
- def get_valid_inputs(self):
28
- return ["BOLD", "T1w"]
29
-
30
- def get_output_type(self, input):
31
- if input == "BOLD":
32
- return "timeseries"
33
- raise ValueError(f"Cannot compute output type for {input}")
34
-
35
34
  def compute(self, input, extra_input):
36
35
  return {
37
- "data": "data",
38
- "columns": "columns",
39
- "row_names": "row_names",
36
+ "feat_1": {
37
+ "data": "data",
38
+ "col_names": ["columns"],
39
+ },
40
40
  }
41
41
 
42
- with pytest.raises(ValueError, match=r"cannot be computed on \['T2w'\]"):
43
- MyBaseMarker(on=["BOLD", "T2w"])
42
+ with pytest.raises(ValueError, match=r"cannot be computed on \['T1w'\]"):
43
+ MyBaseMarker(on=["BOLD", "T1w"])
44
44
 
45
45
  # Create input for marker
46
46
  input_ = {
@@ -64,12 +64,11 @@ def test_base_marker_subclassing() -> None:
64
64
  output = marker.fit_transform(input=input_) # process
65
65
  # Check output
66
66
  assert "BOLD" in output
67
- assert "data" in output["BOLD"]
68
- assert "columns" in output["BOLD"]
69
- assert "row_names" in output["BOLD"]
67
+ assert "data" in output["BOLD"]["feat_1"]
68
+ assert "col_names" in output["BOLD"]["feat_1"]
70
69
 
71
- assert "meta" in output["BOLD"]
72
- meta = output["BOLD"]["meta"]
70
+ assert "meta" in output["BOLD"]["feat_1"]
71
+ meta = output["BOLD"]["feat_1"]["meta"]
73
72
  assert "datagrabber" in meta
74
73
  assert "element" in meta
75
74
  assert "datareader" in meta
@@ -23,16 +23,63 @@ from junifer.storage import SQLiteFeatureStorage
23
23
  from junifer.testing.datagrabbers import PartlyCloudyTestingDataGrabber
24
24
 
25
25
 
26
- def test_ParcelAggregation_input_output() -> None:
27
- """Test ParcelAggregation input and output types."""
28
- marker = ParcelAggregation(
29
- parcellation="Schaefer100x7", method="mean", on="VBM_GM"
30
- )
31
- for in_, out_ in [("VBM_GM", "vector"), ("BOLD", "timeseries")]:
32
- assert marker.get_output_type(in_) == out_
26
+ @pytest.mark.parametrize(
27
+ "input_type, storage_type",
28
+ [
29
+ (
30
+ "T1w",
31
+ "vector",
32
+ ),
33
+ (
34
+ "T2w",
35
+ "vector",
36
+ ),
37
+ (
38
+ "BOLD",
39
+ "timeseries",
40
+ ),
41
+ (
42
+ "VBM_GM",
43
+ "vector",
44
+ ),
45
+ (
46
+ "VBM_WM",
47
+ "vector",
48
+ ),
49
+ (
50
+ "VBM_CSF",
51
+ "vector",
52
+ ),
53
+ (
54
+ "fALFF",
55
+ "vector",
56
+ ),
57
+ (
58
+ "GCOR",
59
+ "vector",
60
+ ),
61
+ (
62
+ "LCOR",
63
+ "vector",
64
+ ),
65
+ ],
66
+ )
67
+ def test_ParcelAggregation_input_output(
68
+ input_type: str, storage_type: str
69
+ ) -> None:
70
+ """Test ParcelAggregation input and output types.
33
71
 
34
- with pytest.raises(ValueError, match="Unknown input"):
35
- marker.get_output_type("unknown")
72
+ Parameters
73
+ ----------
74
+ input_type : str
75
+ The parametrized input type.
76
+ storage_type : str
77
+ The parametrized storage type.
78
+
79
+ """
80
+ assert storage_type == ParcelAggregation(
81
+ parcellation="Schaefer100x7", method="mean", on=input_type
82
+ ).get_output_type(input_type=input_type, output_feature="aggregation")
36
83
 
37
84
 
38
85
  def test_ParcelAggregation_3D() -> None:
@@ -85,8 +132,8 @@ def test_ParcelAggregation_3D() -> None:
85
132
  )
86
133
 
87
134
  parcel_agg_mean_bold_data = marker.fit_transform(element_data)["BOLD"][
88
- "data"
89
- ]
135
+ "aggregation"
136
+ ]["data"]
90
137
  # Check that arrays are almost equal
91
138
  assert_array_equal(parcel_agg_mean_bold_data, manual)
92
139
  assert_array_almost_equal(nifti_labels_masked_bold, manual)
@@ -113,8 +160,8 @@ def test_ParcelAggregation_3D() -> None:
113
160
  on="BOLD",
114
161
  )
115
162
  parcel_agg_std_bold_data = marker.fit_transform(element_data)["BOLD"][
116
- "data"
117
- ]
163
+ "aggregation"
164
+ ]["data"]
118
165
  assert parcel_agg_std_bold_data.ndim == 2
119
166
  assert parcel_agg_std_bold_data.shape[0] == 1
120
167
  assert_array_equal(parcel_agg_std_bold_data, manual)
@@ -139,7 +186,7 @@ def test_ParcelAggregation_3D() -> None:
139
186
  )
140
187
  parcel_agg_trim_mean_bold_data = marker.fit_transform(element_data)[
141
188
  "BOLD"
142
- ]["data"]
189
+ ]["aggregation"]["data"]
143
190
  assert parcel_agg_trim_mean_bold_data.ndim == 2
144
191
  assert parcel_agg_trim_mean_bold_data.shape[0] == 1
145
192
  assert_array_equal(parcel_agg_trim_mean_bold_data, manual)
@@ -154,8 +201,8 @@ def test_ParcelAggregation_4D():
154
201
  parcellation="TianxS1x3TxMNInonlinear2009cAsym", method="mean"
155
202
  )
156
203
  parcel_agg_bold_data = marker.fit_transform(element_data)["BOLD"][
157
- "data"
158
- ]
204
+ "aggregation"
205
+ ]["data"]
159
206
 
160
207
  # Compare with nilearn
161
208
  # Load testing parcellation
@@ -204,7 +251,8 @@ def test_ParcelAggregation_storage(tmp_path: Path) -> None:
204
251
  marker.fit_transform(input=element_data, storage=storage)
205
252
  features = storage.list_features()
206
253
  assert any(
207
- x["name"] == "BOLD_ParcelAggregation" for x in features.values()
254
+ x["name"] == "BOLD_ParcelAggregation_aggregation"
255
+ for x in features.values()
208
256
  )
209
257
 
210
258
  # Store 4D
@@ -221,7 +269,8 @@ def test_ParcelAggregation_storage(tmp_path: Path) -> None:
221
269
  marker.fit_transform(input=element_data, storage=storage)
222
270
  features = storage.list_features()
223
271
  assert any(
224
- x["name"] == "BOLD_ParcelAggregation" for x in features.values()
272
+ x["name"] == "BOLD_ParcelAggregation_aggregation"
273
+ for x in features.values()
225
274
  )
226
275
 
227
276
 
@@ -241,8 +290,8 @@ def test_ParcelAggregation_3D_mask() -> None:
241
290
  ..., 0:1
242
291
  ]
243
292
  parcel_agg_bold_data = marker.fit_transform(element_data)["BOLD"][
244
- "data"
245
- ]
293
+ "aggregation"
294
+ ]["data"]
246
295
 
247
296
  # Compare with nilearn
248
297
  # Load testing parcellation
@@ -316,8 +365,8 @@ def test_ParcelAggregation_3D_mask_computed() -> None:
316
365
  on="BOLD",
317
366
  )
318
367
  parcel_agg_mean_bold_data = marker.fit_transform(element_data)["BOLD"][
319
- "data"
320
- ]
368
+ "aggregation"
369
+ ]["data"]
321
370
 
322
371
  assert parcel_agg_mean_bold_data.ndim == 2
323
372
  assert parcel_agg_mean_bold_data.shape[0] == 1
@@ -397,7 +446,9 @@ def test_ParcelAggregation_3D_multiple_non_overlapping(tmp_path: Path) -> None:
397
446
  name="tian_mean",
398
447
  on="BOLD",
399
448
  )
400
- orig_mean = marker_original.fit_transform(element_data)["BOLD"]
449
+ orig_mean = marker_original.fit_transform(element_data)["BOLD"][
450
+ "aggregation"
451
+ ]
401
452
 
402
453
  orig_mean_data = orig_mean["data"]
403
454
  assert orig_mean_data.ndim == 2
@@ -417,7 +468,9 @@ def test_ParcelAggregation_3D_multiple_non_overlapping(tmp_path: Path) -> None:
417
468
  # No warnings should be raised
418
469
  with warnings.catch_warnings():
419
470
  warnings.simplefilter("error", category=UserWarning)
420
- split_mean = marker_split.fit_transform(element_data)["BOLD"]
471
+ split_mean = marker_split.fit_transform(element_data)["BOLD"][
472
+ "aggregation"
473
+ ]
421
474
 
422
475
  split_mean_data = split_mean["data"]
423
476
 
@@ -497,7 +550,9 @@ def test_ParcelAggregation_3D_multiple_overlapping(tmp_path: Path) -> None:
497
550
  name="tian_mean",
498
551
  on="BOLD",
499
552
  )
500
- orig_mean = marker_original.fit_transform(element_data)["BOLD"]
553
+ orig_mean = marker_original.fit_transform(element_data)["BOLD"][
554
+ "aggregation"
555
+ ]
501
556
 
502
557
  orig_mean_data = orig_mean["data"]
503
558
  assert orig_mean_data.ndim == 2
@@ -515,7 +570,9 @@ def test_ParcelAggregation_3D_multiple_overlapping(tmp_path: Path) -> None:
515
570
  )
516
571
  # Warning should be raised
517
572
  with pytest.warns(RuntimeWarning, match="overlapping voxels"):
518
- split_mean = marker_split.fit_transform(element_data)["BOLD"]
573
+ split_mean = marker_split.fit_transform(element_data)["BOLD"][
574
+ "aggregation"
575
+ ]
519
576
 
520
577
  split_mean_data = split_mean["data"]
521
578
 
@@ -602,7 +659,9 @@ def test_ParcelAggregation_3D_multiple_duplicated_labels(
602
659
  name="tian_mean",
603
660
  on="BOLD",
604
661
  )
605
- orig_mean = marker_original.fit_transform(element_data)["BOLD"]
662
+ orig_mean = marker_original.fit_transform(element_data)["BOLD"][
663
+ "aggregation"
664
+ ]
606
665
 
607
666
  orig_mean_data = orig_mean["data"]
608
667
  assert orig_mean_data.ndim == 2
@@ -621,7 +680,9 @@ def test_ParcelAggregation_3D_multiple_duplicated_labels(
621
680
 
622
681
  # Warning should be raised
623
682
  with pytest.warns(RuntimeWarning, match="duplicated labels."):
624
- split_mean = marker_split.fit_transform(element_data)["BOLD"]
683
+ split_mean = marker_split.fit_transform(element_data)["BOLD"][
684
+ "aggregation"
685
+ ]
625
686
 
626
687
  split_mean_data = split_mean["data"]
627
688
 
@@ -653,8 +714,8 @@ def test_ParcelAggregation_4D_agg_time():
653
714
  on="BOLD",
654
715
  )
655
716
  parcel_agg_bold_data = marker.fit_transform(element_data)["BOLD"][
656
- "data"
657
- ]
717
+ "aggregation"
718
+ ]["data"]
658
719
 
659
720
  # Compare with nilearn
660
721
  # Loading testing parcellation
@@ -689,8 +750,8 @@ def test_ParcelAggregation_4D_agg_time():
689
750
  on="BOLD",
690
751
  )
691
752
  parcel_agg_bold_data = marker.fit_transform(element_data)["BOLD"][
692
- "data"
693
- ]
753
+ "aggregation"
754
+ ]["data"]
694
755
 
695
756
  assert parcel_agg_bold_data.ndim == 2
696
757
  assert_array_equal(