junifer 0.0.5__py3-none-any.whl → 0.0.5.dev11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (198) hide show
  1. junifer/__init__.py +0 -17
  2. junifer/_version.py +2 -2
  3. junifer/api/__init__.py +1 -4
  4. junifer/api/cli.py +1 -91
  5. junifer/api/decorators.py +0 -9
  6. junifer/api/functions.py +10 -56
  7. junifer/api/parser.py +0 -3
  8. junifer/api/queue_context/__init__.py +1 -4
  9. junifer/api/res/afni/run_afni_docker.sh +1 -1
  10. junifer/api/res/ants/run_ants_docker.sh +1 -1
  11. junifer/api/res/fsl/run_fsl_docker.sh +1 -1
  12. junifer/api/tests/test_api_utils.py +2 -4
  13. junifer/api/tests/test_cli.py +0 -83
  14. junifer/api/tests/test_functions.py +2 -27
  15. junifer/configs/__init__.py +1 -1
  16. junifer/configs/juseless/__init__.py +1 -4
  17. junifer/configs/juseless/datagrabbers/__init__.py +1 -10
  18. junifer/configs/juseless/datagrabbers/aomic_id1000_vbm.py +0 -3
  19. junifer/configs/juseless/datagrabbers/camcan_vbm.py +0 -3
  20. junifer/configs/juseless/datagrabbers/ixi_vbm.py +0 -3
  21. junifer/configs/juseless/datagrabbers/tests/test_ucla.py +3 -1
  22. junifer/configs/juseless/datagrabbers/ucla.py +9 -12
  23. junifer/configs/juseless/datagrabbers/ukb_vbm.py +0 -3
  24. junifer/data/__init__.py +1 -21
  25. junifer/data/coordinates.py +19 -10
  26. junifer/data/masks.py +87 -58
  27. junifer/data/parcellations.py +3 -14
  28. junifer/data/template_spaces.py +1 -4
  29. junifer/data/tests/test_masks.py +37 -26
  30. junifer/data/utils.py +0 -3
  31. junifer/datagrabber/__init__.py +1 -18
  32. junifer/datagrabber/aomic/__init__.py +0 -3
  33. junifer/datagrabber/aomic/id1000.py +37 -70
  34. junifer/datagrabber/aomic/piop1.py +36 -69
  35. junifer/datagrabber/aomic/piop2.py +38 -71
  36. junifer/datagrabber/aomic/tests/test_id1000.py +99 -44
  37. junifer/datagrabber/aomic/tests/test_piop1.py +108 -65
  38. junifer/datagrabber/aomic/tests/test_piop2.py +102 -45
  39. junifer/datagrabber/base.py +6 -13
  40. junifer/datagrabber/datalad_base.py +1 -13
  41. junifer/datagrabber/dmcc13_benchmark.py +53 -36
  42. junifer/datagrabber/hcp1200/__init__.py +0 -3
  43. junifer/datagrabber/hcp1200/datalad_hcp1200.py +0 -3
  44. junifer/datagrabber/hcp1200/hcp1200.py +1 -4
  45. junifer/datagrabber/multiple.py +6 -45
  46. junifer/datagrabber/pattern.py +62 -170
  47. junifer/datagrabber/pattern_datalad.py +12 -25
  48. junifer/datagrabber/tests/test_datagrabber_utils.py +218 -0
  49. junifer/datagrabber/tests/test_datalad_base.py +4 -4
  50. junifer/datagrabber/tests/test_dmcc13_benchmark.py +19 -46
  51. junifer/datagrabber/tests/test_multiple.py +84 -161
  52. junifer/datagrabber/tests/test_pattern.py +0 -45
  53. junifer/datagrabber/tests/test_pattern_datalad.py +4 -4
  54. junifer/datagrabber/utils.py +230 -0
  55. junifer/datareader/__init__.py +1 -4
  56. junifer/datareader/default.py +43 -95
  57. junifer/external/__init__.py +1 -1
  58. junifer/external/nilearn/__init__.py +1 -5
  59. junifer/external/nilearn/junifer_nifti_spheres_masker.py +9 -23
  60. junifer/external/nilearn/tests/test_junifer_nifti_spheres_masker.py +1 -76
  61. junifer/markers/__init__.py +1 -23
  62. junifer/markers/base.py +28 -68
  63. junifer/markers/collection.py +2 -10
  64. junifer/markers/complexity/__init__.py +0 -10
  65. junifer/markers/complexity/complexity_base.py +43 -26
  66. junifer/markers/complexity/hurst_exponent.py +0 -3
  67. junifer/markers/complexity/multiscale_entropy_auc.py +0 -3
  68. junifer/markers/complexity/perm_entropy.py +0 -3
  69. junifer/markers/complexity/range_entropy.py +0 -3
  70. junifer/markers/complexity/range_entropy_auc.py +0 -3
  71. junifer/markers/complexity/sample_entropy.py +0 -3
  72. junifer/markers/complexity/tests/test_hurst_exponent.py +3 -11
  73. junifer/markers/complexity/tests/test_multiscale_entropy_auc.py +3 -11
  74. junifer/markers/complexity/tests/test_perm_entropy.py +3 -11
  75. junifer/markers/complexity/tests/test_range_entropy.py +3 -11
  76. junifer/markers/complexity/tests/test_range_entropy_auc.py +3 -11
  77. junifer/markers/complexity/tests/test_sample_entropy.py +3 -11
  78. junifer/markers/complexity/tests/test_weighted_perm_entropy.py +3 -11
  79. junifer/markers/complexity/weighted_perm_entropy.py +0 -3
  80. junifer/markers/ets_rss.py +42 -27
  81. junifer/markers/falff/__init__.py +0 -3
  82. junifer/markers/falff/_afni_falff.py +2 -5
  83. junifer/markers/falff/_junifer_falff.py +0 -3
  84. junifer/markers/falff/falff_base.py +46 -20
  85. junifer/markers/falff/falff_parcels.py +27 -56
  86. junifer/markers/falff/falff_spheres.py +29 -60
  87. junifer/markers/falff/tests/test_falff_parcels.py +23 -39
  88. junifer/markers/falff/tests/test_falff_spheres.py +23 -39
  89. junifer/markers/functional_connectivity/__init__.py +0 -9
  90. junifer/markers/functional_connectivity/crossparcellation_functional_connectivity.py +60 -63
  91. junifer/markers/functional_connectivity/edge_functional_connectivity_parcels.py +32 -45
  92. junifer/markers/functional_connectivity/edge_functional_connectivity_spheres.py +36 -49
  93. junifer/markers/functional_connectivity/functional_connectivity_base.py +70 -71
  94. junifer/markers/functional_connectivity/functional_connectivity_parcels.py +25 -34
  95. junifer/markers/functional_connectivity/functional_connectivity_spheres.py +30 -40
  96. junifer/markers/functional_connectivity/tests/test_crossparcellation_functional_connectivity.py +7 -11
  97. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_parcels.py +7 -27
  98. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_spheres.py +12 -28
  99. junifer/markers/functional_connectivity/tests/test_functional_connectivity_parcels.py +11 -35
  100. junifer/markers/functional_connectivity/tests/test_functional_connectivity_spheres.py +62 -36
  101. junifer/markers/parcel_aggregation.py +61 -47
  102. junifer/markers/reho/__init__.py +0 -3
  103. junifer/markers/reho/_afni_reho.py +2 -5
  104. junifer/markers/reho/_junifer_reho.py +1 -4
  105. junifer/markers/reho/reho_base.py +27 -8
  106. junifer/markers/reho/reho_parcels.py +17 -28
  107. junifer/markers/reho/reho_spheres.py +18 -27
  108. junifer/markers/reho/tests/test_reho_parcels.py +3 -8
  109. junifer/markers/reho/tests/test_reho_spheres.py +3 -8
  110. junifer/markers/sphere_aggregation.py +59 -43
  111. junifer/markers/temporal_snr/__init__.py +0 -3
  112. junifer/markers/temporal_snr/temporal_snr_base.py +32 -23
  113. junifer/markers/temporal_snr/temporal_snr_parcels.py +6 -9
  114. junifer/markers/temporal_snr/temporal_snr_spheres.py +6 -9
  115. junifer/markers/temporal_snr/tests/test_temporal_snr_parcels.py +3 -6
  116. junifer/markers/temporal_snr/tests/test_temporal_snr_spheres.py +3 -6
  117. junifer/markers/tests/test_collection.py +8 -9
  118. junifer/markers/tests/test_ets_rss.py +9 -15
  119. junifer/markers/tests/test_markers_base.py +18 -17
  120. junifer/markers/tests/test_parcel_aggregation.py +32 -93
  121. junifer/markers/tests/test_sphere_aggregation.py +19 -72
  122. junifer/onthefly/__init__.py +1 -4
  123. junifer/onthefly/read_transform.py +0 -3
  124. junifer/pipeline/__init__.py +1 -9
  125. junifer/pipeline/pipeline_step_mixin.py +4 -21
  126. junifer/pipeline/registry.py +0 -3
  127. junifer/pipeline/singleton.py +0 -3
  128. junifer/pipeline/tests/test_registry.py +1 -1
  129. junifer/pipeline/update_meta_mixin.py +0 -3
  130. junifer/pipeline/utils.py +1 -67
  131. junifer/pipeline/workdir_manager.py +0 -3
  132. junifer/preprocess/__init__.py +2 -10
  133. junifer/preprocess/ants/__init__.py +4 -0
  134. junifer/preprocess/ants/ants_apply_transforms_warper.py +185 -0
  135. junifer/preprocess/ants/tests/test_ants_apply_transforms_warper.py +56 -0
  136. junifer/preprocess/base.py +3 -6
  137. junifer/preprocess/bold_warper.py +265 -0
  138. junifer/preprocess/confounds/__init__.py +0 -3
  139. junifer/preprocess/confounds/fmriprep_confound_remover.py +60 -47
  140. junifer/preprocess/confounds/tests/test_fmriprep_confound_remover.py +113 -72
  141. junifer/preprocess/fsl/__init__.py +4 -0
  142. junifer/preprocess/fsl/apply_warper.py +179 -0
  143. junifer/preprocess/fsl/tests/test_apply_warper.py +45 -0
  144. junifer/preprocess/tests/test_bold_warper.py +159 -0
  145. junifer/preprocess/warping/__init__.py +0 -3
  146. junifer/preprocess/warping/_ants_warper.py +0 -3
  147. junifer/preprocess/warping/_fsl_warper.py +0 -3
  148. junifer/stats.py +1 -4
  149. junifer/storage/__init__.py +1 -9
  150. junifer/storage/base.py +1 -40
  151. junifer/storage/hdf5.py +9 -71
  152. junifer/storage/pandas_base.py +0 -3
  153. junifer/storage/sqlite.py +0 -3
  154. junifer/storage/tests/test_hdf5.py +10 -82
  155. junifer/storage/utils.py +0 -9
  156. junifer/testing/__init__.py +1 -4
  157. junifer/testing/datagrabbers.py +6 -13
  158. junifer/testing/tests/test_partlycloudytesting_datagrabber.py +7 -7
  159. junifer/testing/utils.py +0 -3
  160. junifer/utils/__init__.py +2 -13
  161. junifer/utils/fs.py +0 -3
  162. junifer/utils/helpers.py +1 -32
  163. junifer/utils/logging.py +4 -33
  164. junifer/utils/tests/test_logging.py +0 -8
  165. {junifer-0.0.5.dist-info → junifer-0.0.5.dev11.dist-info}/METADATA +16 -17
  166. junifer-0.0.5.dev11.dist-info/RECORD +259 -0
  167. {junifer-0.0.5.dist-info → junifer-0.0.5.dev11.dist-info}/WHEEL +1 -1
  168. junifer/api/res/freesurfer/mri_binarize +0 -3
  169. junifer/api/res/freesurfer/mri_mc +0 -3
  170. junifer/api/res/freesurfer/mri_pretess +0 -3
  171. junifer/api/res/freesurfer/mris_convert +0 -3
  172. junifer/api/res/freesurfer/run_freesurfer_docker.sh +0 -61
  173. junifer/data/masks/ukb/UKB_15K_GM_template.nii.gz +0 -0
  174. junifer/datagrabber/pattern_validation_mixin.py +0 -388
  175. junifer/datagrabber/tests/test_pattern_validation_mixin.py +0 -249
  176. junifer/external/BrainPrint/brainprint/__init__.py +0 -4
  177. junifer/external/BrainPrint/brainprint/_version.py +0 -3
  178. junifer/external/BrainPrint/brainprint/asymmetry.py +0 -91
  179. junifer/external/BrainPrint/brainprint/brainprint.py +0 -441
  180. junifer/external/BrainPrint/brainprint/surfaces.py +0 -258
  181. junifer/external/BrainPrint/brainprint/utils/__init__.py +0 -1
  182. junifer/external/BrainPrint/brainprint/utils/_config.py +0 -112
  183. junifer/external/BrainPrint/brainprint/utils/utils.py +0 -188
  184. junifer/external/nilearn/junifer_connectivity_measure.py +0 -483
  185. junifer/external/nilearn/tests/test_junifer_connectivity_measure.py +0 -1089
  186. junifer/markers/brainprint.py +0 -459
  187. junifer/markers/tests/test_brainprint.py +0 -58
  188. junifer/preprocess/smoothing/__init__.py +0 -9
  189. junifer/preprocess/smoothing/_afni_smoothing.py +0 -119
  190. junifer/preprocess/smoothing/_fsl_smoothing.py +0 -116
  191. junifer/preprocess/smoothing/_nilearn_smoothing.py +0 -69
  192. junifer/preprocess/smoothing/smoothing.py +0 -174
  193. junifer/preprocess/smoothing/tests/test_smoothing.py +0 -94
  194. junifer-0.0.5.dist-info/RECORD +0 -275
  195. {junifer-0.0.5.dist-info → junifer-0.0.5.dev11.dist-info}/AUTHORS.rst +0 -0
  196. {junifer-0.0.5.dist-info → junifer-0.0.5.dev11.dist-info}/LICENSE.md +0 -0
  197. {junifer-0.0.5.dist-info → junifer-0.0.5.dev11.dist-info}/entry_points.txt +0 -0
  198. {junifer-0.0.5.dist-info → junifer-0.0.5.dev11.dist-info}/top_level.txt +0 -0
@@ -6,13 +6,10 @@
6
6
  # License: AGPL
7
7
 
8
8
  from pathlib import Path
9
- from typing import TYPE_CHECKING, Dict, Type
10
9
 
11
- import pytest
12
10
  from nilearn.connectome import ConnectivityMeasure
13
11
  from nilearn.maskers import NiftiLabelsMasker
14
12
  from numpy.testing import assert_array_almost_equal
15
- from sklearn.covariance import EmpiricalCovariance, LedoitWolf
16
13
 
17
14
  from junifer.data import get_parcellation
18
15
  from junifer.datareader import DefaultDataReader
@@ -23,51 +20,26 @@ from junifer.storage import SQLiteFeatureStorage
23
20
  from junifer.testing.datagrabbers import PartlyCloudyTestingDataGrabber
24
21
 
25
22
 
26
- if TYPE_CHECKING:
27
- from sklearn.base import BaseEstimator
28
-
29
-
30
- @pytest.mark.parametrize(
31
- "conn_method_params, cov_estimator",
32
- [
33
- ({"empirical": False}, LedoitWolf(store_precision=False)),
34
- ({"empirical": True}, EmpiricalCovariance(store_precision=False)),
35
- ],
36
- )
37
- def test_FunctionalConnectivityParcels(
38
- tmp_path: Path,
39
- conn_method_params: Dict[str, bool],
40
- cov_estimator: Type["BaseEstimator"],
41
- ) -> None:
23
+ def test_FunctionalConnectivityParcels(tmp_path: Path) -> None:
42
24
  """Test FunctionalConnectivityParcels.
43
25
 
44
26
  Parameters
45
27
  ----------
46
28
  tmp_path : pathlib.Path
47
29
  The path to the test directory.
48
- conn_method_params : dict
49
- The parametrized parameters to connectivity measure method.
50
- cov_estimator : estimator object
51
- The parametrized covariance estimator.
52
30
 
53
31
  """
54
32
  with PartlyCloudyTestingDataGrabber() as dg:
55
- # Get element data
56
33
  element_data = DefaultDataReader().fit_transform(dg["sub-01"])
57
- # Setup marker
58
34
  marker = FunctionalConnectivityParcels(
59
- parcellation="TianxS1x3TxMNInonlinear2009cAsym",
60
- conn_method="correlation",
61
- conn_method_params=conn_method_params,
35
+ parcellation="TianxS1x3TxMNInonlinear2009cAsym"
62
36
  )
63
37
  # Check correct output
64
- assert "matrix" == marker.get_output_type(
65
- input_type="BOLD", output_feature="functional_connectivity"
66
- )
38
+ assert marker.get_output_type("BOLD") == "matrix"
67
39
 
68
40
  # Fit-transform the data
69
41
  fc = marker.fit_transform(element_data)
70
- fc_bold = fc["BOLD"]["functional_connectivity"]
42
+ fc_bold = fc["BOLD"]
71
43
 
72
44
  assert "data" in fc_bold
73
45
  assert "row_names" in fc_bold
@@ -91,7 +63,7 @@ def test_FunctionalConnectivityParcels(
91
63
  )
92
64
  # Compute the connectivity measure
93
65
  connectivity_measure = ConnectivityMeasure(
94
- cov_estimator=cov_estimator, kind="correlation" # type: ignore
66
+ kind="covariance"
95
67
  ).fit_transform([extracted_timeseries])[0]
96
68
 
97
69
  # Check that FC are almost equal
@@ -99,6 +71,11 @@ def test_FunctionalConnectivityParcels(
99
71
  connectivity_measure, fc_bold["data"], decimal=3
100
72
  )
101
73
 
74
+ # Check empirical correlation method parameters
75
+ marker = FunctionalConnectivityParcels(
76
+ parcellation="TianxS1x3TxMNInonlinear2009cAsym",
77
+ cor_method_params={"empirical": True},
78
+ )
102
79
  # Store
103
80
  storage = SQLiteFeatureStorage(
104
81
  uri=tmp_path / "test_fc_parcels.sqlite", upsert="ignore"
@@ -106,7 +83,6 @@ def test_FunctionalConnectivityParcels(
106
83
  marker.fit_transform(input=element_data, storage=storage)
107
84
  features = storage.list_features()
108
85
  assert any(
109
- x["name"]
110
- == "BOLD_FunctionalConnectivityParcels_functional_connectivity"
86
+ x["name"] == "BOLD_FunctionalConnectivityParcels"
111
87
  for x in features.values()
112
88
  )
@@ -7,13 +7,12 @@
7
7
  # License: AGPL
8
8
 
9
9
  from pathlib import Path
10
- from typing import TYPE_CHECKING, Dict, Type
11
10
 
12
11
  import pytest
13
12
  from nilearn.connectome import ConnectivityMeasure
14
13
  from nilearn.maskers import NiftiSpheresMasker
15
14
  from numpy.testing import assert_array_almost_equal
16
- from sklearn.covariance import EmpiricalCovariance, LedoitWolf
15
+ from sklearn.covariance import EmpiricalCovariance
17
16
 
18
17
  from junifer.data import get_coordinates
19
18
  from junifer.datareader import DefaultDataReader
@@ -24,52 +23,26 @@ from junifer.storage import SQLiteFeatureStorage
24
23
  from junifer.testing.datagrabbers import SPMAuditoryTestingDataGrabber
25
24
 
26
25
 
27
- if TYPE_CHECKING:
28
- from sklearn.base import BaseEstimator
29
-
30
-
31
- @pytest.mark.parametrize(
32
- "conn_method_params, cov_estimator",
33
- [
34
- ({"empirical": False}, LedoitWolf(store_precision=False)),
35
- ({"empirical": True}, EmpiricalCovariance(store_precision=False)),
36
- ],
37
- )
38
- def test_FunctionalConnectivitySpheres(
39
- tmp_path: Path,
40
- conn_method_params: Dict[str, bool],
41
- cov_estimator: Type["BaseEstimator"],
42
- ) -> None:
26
+ def test_FunctionalConnectivitySpheres(tmp_path: Path) -> None:
43
27
  """Test FunctionalConnectivitySpheres.
44
28
 
45
29
  Parameters
46
30
  ----------
47
31
  tmp_path : pathlib.Path
48
32
  The path to the test directory.
49
- conn_method_params : dict
50
- The parametrized parameters to connectivity measure method.
51
- cov_estimator : estimator object
52
- The parametrized covariance estimator.
53
33
 
54
34
  """
55
35
  with SPMAuditoryTestingDataGrabber() as dg:
56
- # Get element data
57
36
  element_data = DefaultDataReader().fit_transform(dg["sub001"])
58
- # Setup marker
59
37
  marker = FunctionalConnectivitySpheres(
60
- coords="DMNBuckner",
61
- radius=5.0,
62
- conn_method="correlation",
63
- conn_method_params=conn_method_params,
38
+ coords="DMNBuckner", radius=5.0, cor_method="correlation"
64
39
  )
65
40
  # Check correct output
66
- assert "matrix" == marker.get_output_type(
67
- input_type="BOLD", output_feature="functional_connectivity"
68
- )
41
+ assert marker.get_output_type("BOLD") == "matrix"
69
42
 
70
43
  # Fit-transform the data
71
44
  fc = marker.fit_transform(element_data)
72
- fc_bold = fc["BOLD"]["functional_connectivity"]
45
+ fc_bold = fc["BOLD"]
73
46
 
74
47
  assert "data" in fc_bold
75
48
  assert "row_names" in fc_bold
@@ -92,7 +65,7 @@ def test_FunctionalConnectivitySpheres(
92
65
  )
93
66
  # Compute the connectivity measure
94
67
  connectivity_measure = ConnectivityMeasure(
95
- cov_estimator=cov_estimator, kind="correlation" # type: ignore
68
+ kind="correlation"
96
69
  ).fit_transform([extracted_timeseries])[0]
97
70
 
98
71
  # Check that FC are almost equal
@@ -107,15 +80,68 @@ def test_FunctionalConnectivitySpheres(
107
80
  marker.fit_transform(input=element_data, storage=storage)
108
81
  features = storage.list_features()
109
82
  assert any(
110
- x["name"]
111
- == "BOLD_FunctionalConnectivitySpheres_functional_connectivity"
83
+ x["name"] == "BOLD_FunctionalConnectivitySpheres"
112
84
  for x in features.values()
113
85
  )
114
86
 
115
87
 
88
+ def test_FunctionalConnectivitySpheres_empirical(tmp_path: Path) -> None:
89
+ """Test FunctionalConnectivitySpheres with empirical covariance.
90
+
91
+ Parameters
92
+ ----------
93
+ tmp_path : pathlib.Path
94
+ The path to the test directory.
95
+
96
+ """
97
+ with SPMAuditoryTestingDataGrabber() as dg:
98
+ element_data = DefaultDataReader().fit_transform(dg["sub001"])
99
+ marker = FunctionalConnectivitySpheres(
100
+ coords="DMNBuckner",
101
+ radius=5.0,
102
+ cor_method="correlation",
103
+ cor_method_params={"empirical": True},
104
+ )
105
+ # Check correct output
106
+ assert marker.get_output_type("BOLD") == "matrix"
107
+
108
+ # Fit-transform the data
109
+ fc = marker.fit_transform(element_data)
110
+ fc_bold = fc["BOLD"]
111
+
112
+ assert "data" in fc_bold
113
+ assert "row_names" in fc_bold
114
+ assert "col_names" in fc_bold
115
+ assert fc_bold["data"].shape == (6, 6)
116
+ assert len(set(fc_bold["row_names"])) == 6
117
+ assert len(set(fc_bold["col_names"])) == 6
118
+
119
+ # Compare with nilearn
120
+ # Load testing coordinates for the target data
121
+ testing_coords, _ = get_coordinates(
122
+ coords="DMNBuckner", target_data=element_data["BOLD"]
123
+ )
124
+ # Extract timeseries
125
+ nifti_spheres_masker = NiftiSpheresMasker(
126
+ seeds=testing_coords, radius=5.0
127
+ )
128
+ extracted_timeseries = nifti_spheres_masker.fit_transform(
129
+ element_data["BOLD"]["data"]
130
+ )
131
+ # Compute the connectivity measure
132
+ connectivity_measure = ConnectivityMeasure(
133
+ cov_estimator=EmpiricalCovariance(), kind="correlation" # type: ignore
134
+ ).fit_transform([extracted_timeseries])[0]
135
+
136
+ # Check that FC are almost equal
137
+ assert_array_almost_equal(
138
+ connectivity_measure, fc_bold["data"], decimal=3
139
+ )
140
+
141
+
116
142
  def test_FunctionalConnectivitySpheres_error() -> None:
117
143
  """Test FunctionalConnectivitySpheres errors."""
118
144
  with pytest.raises(ValueError, match="radius should be > 0"):
119
145
  FunctionalConnectivitySpheres(
120
- coords="DMNBuckner", radius=-0.1, conn_method="correlation"
146
+ coords="DMNBuckner", radius=-0.1, cor_method="correlation"
121
147
  )
@@ -17,9 +17,6 @@ from ..utils import logger, raise_error, warn_with_log
17
17
  from .base import BaseMarker
18
18
 
19
19
 
20
- __all__ = ["ParcelAggregation"]
21
-
22
-
23
20
  @register_marker
24
21
  class ParcelAggregation(BaseMarker):
25
22
  """Class for parcel aggregation.
@@ -63,36 +60,6 @@ class ParcelAggregation(BaseMarker):
63
60
 
64
61
  _DEPENDENCIES: ClassVar[Set[str]] = {"nilearn", "numpy"}
65
62
 
66
- _MARKER_INOUT_MAPPINGS: ClassVar[Dict[str, Dict[str, str]]] = {
67
- "T1w": {
68
- "aggregation": "vector",
69
- },
70
- "T2w": {
71
- "aggregation": "vector",
72
- },
73
- "BOLD": {
74
- "aggregation": "timeseries",
75
- },
76
- "VBM_GM": {
77
- "aggregation": "vector",
78
- },
79
- "VBM_WM": {
80
- "aggregation": "vector",
81
- },
82
- "VBM_CSF": {
83
- "aggregation": "vector",
84
- },
85
- "fALFF": {
86
- "aggregation": "vector",
87
- },
88
- "GCOR": {
89
- "aggregation": "vector",
90
- },
91
- "LCOR": {
92
- "aggregation": "vector",
93
- },
94
- }
95
-
96
63
  def __init__(
97
64
  self,
98
65
  parcellation: Union[str, List[str]],
@@ -126,6 +93,61 @@ class ParcelAggregation(BaseMarker):
126
93
  self.time_method = time_method
127
94
  self.time_method_params = time_method_params or {}
128
95
 
96
+ def get_valid_inputs(self) -> List[str]:
97
+ """Get valid data types for input.
98
+
99
+ Returns
100
+ -------
101
+ list of str
102
+ The list of data types that can be used as input for this marker.
103
+
104
+ """
105
+ return [
106
+ "T1w",
107
+ "T2w",
108
+ "BOLD",
109
+ "VBM_GM",
110
+ "VBM_WM",
111
+ "VBM_CSF",
112
+ "fALFF",
113
+ "GCOR",
114
+ "LCOR",
115
+ ]
116
+
117
+ def get_output_type(self, input_type: str) -> str:
118
+ """Get output type.
119
+
120
+ Parameters
121
+ ----------
122
+ input_type : str
123
+ The data type input to the marker.
124
+
125
+ Returns
126
+ -------
127
+ str
128
+ The storage type output by the marker.
129
+
130
+ Raises
131
+ ------
132
+ ValueError
133
+ If the ``input_type`` is invalid.
134
+
135
+ """
136
+
137
+ if input_type in [
138
+ "VBM_GM",
139
+ "VBM_WM",
140
+ "VBM_CSF",
141
+ "fALFF",
142
+ "GCOR",
143
+ "LCOR",
144
+ ]:
145
+ return "vector"
146
+ elif input_type == "BOLD":
147
+ return "timeseries"
148
+ else:
149
+ raise_error(f"Unknown input kind for {input_type}")
150
+
129
151
  def compute(
130
152
  self, input: Dict[str, Any], extra_input: Optional[Dict] = None
131
153
  ) -> Dict:
@@ -149,10 +171,8 @@ class ParcelAggregation(BaseMarker):
149
171
  to the user or stored in the storage by calling the store method
150
172
  with this as a parameter. The dictionary has the following keys:
151
173
 
152
- * ``aggregation`` : dictionary with the following keys:
153
-
154
- - ``data`` : ROI values as ``numpy.ndarray``
155
- - ``col_names`` : ROI labels as list of str
174
+ * ``data`` : the actual computed values as a numpy.ndarray
175
+ * ``col_names`` : the column labels for the computed values as list
156
176
 
157
177
  Warns
158
178
  -----
@@ -175,9 +195,7 @@ class ParcelAggregation(BaseMarker):
175
195
  )
176
196
 
177
197
  # Get binarized parcellation image for masking
178
- parcellation_bin = math_img(
179
- "np.squeeze(img) != 0", img=parcellation_img
180
- )
198
+ parcellation_bin = math_img("img != 0", img=parcellation_img)
181
199
 
182
200
  # Load mask
183
201
  if self.masks is not None:
@@ -188,7 +206,7 @@ class ParcelAggregation(BaseMarker):
188
206
  )
189
207
  # Get "logical and" version of parcellation and mask
190
208
  parcellation_bin = math_img(
191
- "np.logical_and(img, np.squeeze(mask))",
209
+ "np.logical_and(img, mask)",
192
210
  img=parcellation_bin,
193
211
  mask=mask_img,
194
212
  )
@@ -230,9 +248,5 @@ class ParcelAggregation(BaseMarker):
230
248
  "available."
231
249
  )
232
250
  # Format the output
233
- return {
234
- "aggregation": {
235
- "data": out_values,
236
- "col_names": labels,
237
- },
238
- }
251
+ out = {"data": out_values, "col_names": labels}
252
+ return out
@@ -5,6 +5,3 @@
5
5
 
6
6
  from .reho_parcels import ReHoParcels
7
7
  from .reho_spheres import ReHoSpheres
8
-
9
-
10
- __all__ = ["ReHoParcels", "ReHoSpheres"]
@@ -26,9 +26,6 @@ if TYPE_CHECKING:
26
26
  from nibabel import Nifti1Image
27
27
 
28
28
 
29
- __all__ = ["AFNIReHo"]
30
-
31
-
32
29
  @singleton
33
30
  class AFNIReHo:
34
31
  """Class for computing ReHo using AFNI.
@@ -75,7 +72,7 @@ class AFNIReHo:
75
72
  Number of voxels in the neighbourhood, inclusive. Can be:
76
73
 
77
74
  * 7 : for facewise neighbours only
78
- * 19 : for face- and edge-wise neighbours
75
+ * 19 : for face- and edge-wise nieghbours
79
76
  * 27 : for face-, edge-, and node-wise neighbors
80
77
 
81
78
  (default 27).
@@ -181,7 +178,7 @@ class AFNIReHo:
181
178
  convert_cmd = [
182
179
  "3dAFNItoNIFTI",
183
180
  f"-prefix {reho_afni_to_nifti_out_path.resolve()}",
184
- f"{reho_out_path_prefix}+orig.BRIK",
181
+ f"{reho_out_path_prefix}+tlrc.BRIK",
185
182
  ]
186
183
  # Call 3dAFNItoNIFTI
187
184
  run_ext_cmd(name="3dAFNItoNIFTI", cmd=convert_cmd)
@@ -28,9 +28,6 @@ if TYPE_CHECKING:
28
28
  from nibabel import Nifti1Image
29
29
 
30
30
 
31
- __all__ = ["JuniferReHo"]
32
-
33
-
34
31
  @singleton
35
32
  class JuniferReHo:
36
33
  """Class for computing ReHo using junifer.
@@ -63,7 +60,7 @@ class JuniferReHo:
63
60
  Number of voxels in the neighbourhood, inclusive. Can be:
64
61
 
65
62
  * 7 : for facewise neighbours only
66
- * 19 : for face- and edge-wise neighbours
63
+ * 19 : for face- and edge-wise nieghbours
67
64
  * 27 : for face-, edge-, and node-wise neighbors
68
65
  * 125 : for 5x5 cuboidal volume
69
66
 
@@ -26,8 +26,6 @@ from ._junifer_reho import JuniferReHo
26
26
  if TYPE_CHECKING:
27
27
  from nibabel import Nifti1Image
28
28
 
29
- __all__ = ["ReHoBase"]
30
-
31
29
 
32
30
  class ReHoBase(BaseMarker):
33
31
  """Base class for regional homogeneity computation.
@@ -62,12 +60,6 @@ class ReHoBase(BaseMarker):
62
60
  },
63
61
  ]
64
62
 
65
- _MARKER_INOUT_MAPPINGS: ClassVar[Dict[str, Dict[str, str]]] = {
66
- "BOLD": {
67
- "reho": "vector",
68
- },
69
- }
70
-
71
63
  def __init__(
72
64
  self,
73
65
  using: str,
@@ -82,6 +74,33 @@ class ReHoBase(BaseMarker):
82
74
  self.using = using
83
75
  super().__init__(on="BOLD", name=name)
84
76
 
77
+ def get_valid_inputs(self) -> List[str]:
78
+ """Get valid data types for input.
79
+
80
+ Returns
81
+ -------
82
+ list of str
83
+ The list of data types that can be used as input for this marker.
84
+
85
+ """
86
+ return ["BOLD"]
87
+
88
+ def get_output_type(self, input_type: str) -> str:
89
+ """Get output type.
90
+
91
+ Parameters
92
+ ----------
93
+ input_type : str
94
+ The data type input to the marker.
95
+
96
+ Returns
97
+ -------
98
+ str
99
+ The storage type output by the marker.
100
+
101
+ """
102
+ return "vector"
103
+
85
104
  def _compute(
86
105
  self,
87
106
  input_data: Dict[str, Any],
@@ -14,9 +14,6 @@ from ..parcel_aggregation import ParcelAggregation
14
14
  from .reho_base import ReHoBase
15
15
 
16
16
 
17
- __all__ = ["ReHoParcels"]
18
-
19
-
20
17
  @register_marker
21
18
  class ReHoParcels(ReHoBase):
22
19
  """Class for regional homogeneity on parcels.
@@ -40,7 +37,7 @@ class ReHoParcels(ReHoBase):
40
37
  Number of voxels in the neighbourhood, inclusive. Can be:
41
38
 
42
39
  - 7 : for facewise neighbours only
43
- - 19 : for face- and edge-wise neighbours
40
+ - 19 : for face- and edge-wise nieghbours
44
41
  - 27 : for face-, edge-, and node-wise neighbors
45
42
 
46
43
  * ``neigh_rad`` : positive float, optional
@@ -70,7 +67,7 @@ class ReHoParcels(ReHoBase):
70
67
  Number of voxels in the neighbourhood, inclusive. Can be:
71
68
 
72
69
  * 7 : for facewise neighbours only
73
- * 19 : for face- and edge-wise neighbours
70
+ * 19 : for face- and edge-wise nieghbours
74
71
  * 27 : for face-, edge-, and node-wise neighbors
75
72
  * 125 : for 5x5 cuboidal volume
76
73
 
@@ -125,14 +122,11 @@ class ReHoParcels(ReHoBase):
125
122
  Returns
126
123
  -------
127
124
  dict
128
- The computed result as dictionary. This will be either returned
129
- to the user or stored in the storage by calling the store method
130
- with this as a parameter. The dictionary has the following keys:
125
+ The computed result as dictionary. The dictionary has the following
126
+ keys:
131
127
 
132
- * ``reho`` : dictionary with the following keys:
133
-
134
- - ``data`` : ROI values as ``numpy.ndarray``
135
- - ``col_names`` : ROI labels as list of str
128
+ * ``data`` : the actual computed values as a 1D numpy.ndarray
129
+ * ``col_names`` : the column labels for the parcels as a list
136
130
 
137
131
  """
138
132
  logger.info("Calculating ReHo for parcels")
@@ -148,27 +142,22 @@ class ReHoParcels(ReHoBase):
148
142
  else:
149
143
  reho_map, reho_file_path = self._compute(input_data=input)
150
144
 
151
- # Perform aggregation on reho map
152
- aggregation_input = dict(input.items())
153
- aggregation_input["data"] = reho_map
154
- aggregation_input["path"] = reho_file_path
145
+ # Initialize parcel aggregation
155
146
  parcel_aggregation = ParcelAggregation(
156
147
  parcellation=self.parcellation,
157
148
  method=self.agg_method,
158
149
  method_params=self.agg_method_params,
159
150
  masks=self.masks,
160
151
  on="BOLD",
161
- ).compute(
162
- input=aggregation_input,
152
+ )
153
+ # Perform aggregation on reho map
154
+ parcel_aggregation_input = dict(input.items())
155
+ parcel_aggregation_input["data"] = reho_map
156
+ parcel_aggregation_input["path"] = reho_file_path
157
+ output = parcel_aggregation.compute(
158
+ input=parcel_aggregation_input,
163
159
  extra_input=extra_input,
164
160
  )
165
-
166
- return {
167
- "reho": {
168
- # Only use the first row and expand row dimension
169
- "data": parcel_aggregation["aggregation"]["data"][0][
170
- np.newaxis, :
171
- ],
172
- "col_names": parcel_aggregation["aggregation"]["col_names"],
173
- }
174
- }
161
+ # Only use the first row and expand row dimension
162
+ output["data"] = output["data"][0][np.newaxis, :]
163
+ return output
@@ -14,9 +14,6 @@ from ..sphere_aggregation import SphereAggregation
14
14
  from .reho_base import ReHoBase
15
15
 
16
16
 
17
- __all__ = ["ReHoSpheres"]
18
-
19
-
20
17
  @register_marker
21
18
  class ReHoSpheres(ReHoBase):
22
19
  """Class for regional homogeneity on spheres.
@@ -51,7 +48,7 @@ class ReHoSpheres(ReHoBase):
51
48
  Number of voxels in the neighbourhood, inclusive. Can be:
52
49
 
53
50
  - 7 : for facewise neighbours only
54
- - 19 : for face- and edge-wise neighbours
51
+ - 19 : for face- and edge-wise nieghbours
55
52
  - 27 : for face-, edge-, and node-wise neighbors
56
53
 
57
54
  * ``neigh_rad`` : positive float, optional
@@ -81,7 +78,7 @@ class ReHoSpheres(ReHoBase):
81
78
  Number of voxels in the neighbourhood, inclusive. Can be:
82
79
 
83
80
  * 7 : for facewise neighbours only
84
- * 19 : for face- and edge-wise neighbours
81
+ * 19 : for face- and edge-wise nieghbours
85
82
  * 27 : for face-, edge-, and node-wise neighbors
86
83
  * 125 : for 5x5 cuboidal volume
87
84
 
@@ -140,14 +137,11 @@ class ReHoSpheres(ReHoBase):
140
137
  Returns
141
138
  -------
142
139
  dict
143
- The computed result as dictionary. This will be either returned
144
- to the user or stored in the storage by calling the store method
145
- with this as a parameter. The dictionary has the following keys:
146
-
147
- * ``reho`` : dictionary with the following keys:
140
+ The computed result as dictionary. The dictionary has the following
141
+ keys:
148
142
 
149
- - ``data`` : ROI values as ``numpy.ndarray``
150
- - ``col_names`` : ROI labels as list of str
143
+ * ``data`` : the actual computed values as a 1D numpy.ndarray
144
+ * ``col_names`` : the column labels for the spheres as a list
151
145
 
152
146
  """
153
147
  logger.info("Calculating ReHo for spheres")
@@ -163,10 +157,7 @@ class ReHoSpheres(ReHoBase):
163
157
  else:
164
158
  reho_map, reho_file_path = self._compute(input_data=input)
165
159
 
166
- # Perform aggregation on reho map
167
- aggregation_input = dict(input.items())
168
- aggregation_input["data"] = reho_map
169
- aggregation_input["path"] = reho_file_path
160
+ # Initialize sphere aggregation
170
161
  sphere_aggregation = SphereAggregation(
171
162
  coords=self.coords,
172
163
  radius=self.radius,
@@ -175,14 +166,14 @@ class ReHoSpheres(ReHoBase):
175
166
  method_params=self.agg_method_params,
176
167
  masks=self.masks,
177
168
  on="BOLD",
178
- ).compute(input=aggregation_input, extra_input=extra_input)
179
-
180
- return {
181
- "reho": {
182
- # Only use the first row and expand row dimension
183
- "data": sphere_aggregation["aggregation"]["data"][0][
184
- np.newaxis, :
185
- ],
186
- "col_names": sphere_aggregation["aggregation"]["col_names"],
187
- }
188
- }
169
+ )
170
+ # Perform aggregation on reho map
171
+ sphere_aggregation_input = dict(input.items())
172
+ sphere_aggregation_input["data"] = reho_map
173
+ sphere_aggregation_input["path"] = reho_file_path
174
+ output = sphere_aggregation.compute(
175
+ input=sphere_aggregation_input, extra_input=extra_input
176
+ )
177
+ # Only use the first row and expand row dimension
178
+ output["data"] = output["data"][0][np.newaxis, :]
179
+ return output