junifer 0.0.5.dev183__py3-none-any.whl → 0.0.5.dev202__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (57) hide show
  1. junifer/_version.py +2 -2
  2. junifer/datagrabber/tests/test_datalad_base.py +4 -4
  3. junifer/datagrabber/tests/test_pattern_datalad.py +4 -4
  4. junifer/markers/base.py +49 -23
  5. junifer/markers/brainprint.py +56 -265
  6. junifer/markers/complexity/complexity_base.py +23 -43
  7. junifer/markers/complexity/tests/test_hurst_exponent.py +4 -3
  8. junifer/markers/complexity/tests/test_multiscale_entropy_auc.py +4 -3
  9. junifer/markers/complexity/tests/test_perm_entropy.py +4 -3
  10. junifer/markers/complexity/tests/test_range_entropy.py +4 -3
  11. junifer/markers/complexity/tests/test_range_entropy_auc.py +4 -3
  12. junifer/markers/complexity/tests/test_sample_entropy.py +4 -3
  13. junifer/markers/complexity/tests/test_weighted_perm_entropy.py +4 -3
  14. junifer/markers/ets_rss.py +24 -42
  15. junifer/markers/falff/falff_base.py +17 -46
  16. junifer/markers/falff/falff_parcels.py +53 -27
  17. junifer/markers/falff/falff_spheres.py +57 -29
  18. junifer/markers/falff/tests/test_falff_parcels.py +39 -23
  19. junifer/markers/falff/tests/test_falff_spheres.py +39 -23
  20. junifer/markers/functional_connectivity/crossparcellation_functional_connectivity.py +32 -48
  21. junifer/markers/functional_connectivity/edge_functional_connectivity_parcels.py +16 -10
  22. junifer/markers/functional_connectivity/edge_functional_connectivity_spheres.py +13 -9
  23. junifer/markers/functional_connectivity/functional_connectivity_base.py +26 -40
  24. junifer/markers/functional_connectivity/functional_connectivity_parcels.py +6 -6
  25. junifer/markers/functional_connectivity/functional_connectivity_spheres.py +6 -6
  26. junifer/markers/functional_connectivity/tests/test_crossparcellation_functional_connectivity.py +8 -4
  27. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_parcels.py +6 -3
  28. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_spheres.py +6 -3
  29. junifer/markers/functional_connectivity/tests/test_functional_connectivity_parcels.py +6 -3
  30. junifer/markers/functional_connectivity/tests/test_functional_connectivity_spheres.py +10 -5
  31. junifer/markers/parcel_aggregation.py +40 -59
  32. junifer/markers/reho/reho_base.py +6 -27
  33. junifer/markers/reho/reho_parcels.py +23 -15
  34. junifer/markers/reho/reho_spheres.py +22 -16
  35. junifer/markers/reho/tests/test_reho_parcels.py +8 -3
  36. junifer/markers/reho/tests/test_reho_spheres.py +8 -3
  37. junifer/markers/sphere_aggregation.py +40 -59
  38. junifer/markers/temporal_snr/temporal_snr_base.py +20 -32
  39. junifer/markers/temporal_snr/temporal_snr_parcels.py +6 -6
  40. junifer/markers/temporal_snr/temporal_snr_spheres.py +6 -6
  41. junifer/markers/temporal_snr/tests/test_temporal_snr_parcels.py +6 -3
  42. junifer/markers/temporal_snr/tests/test_temporal_snr_spheres.py +6 -3
  43. junifer/markers/tests/test_brainprint.py +23 -12
  44. junifer/markers/tests/test_collection.py +9 -8
  45. junifer/markers/tests/test_ets_rss.py +15 -9
  46. junifer/markers/tests/test_markers_base.py +17 -18
  47. junifer/markers/tests/test_parcel_aggregation.py +93 -32
  48. junifer/markers/tests/test_sphere_aggregation.py +72 -19
  49. junifer/pipeline/pipeline_step_mixin.py +11 -1
  50. junifer/pipeline/tests/test_registry.py +1 -1
  51. {junifer-0.0.5.dev183.dist-info → junifer-0.0.5.dev202.dist-info}/METADATA +1 -1
  52. {junifer-0.0.5.dev183.dist-info → junifer-0.0.5.dev202.dist-info}/RECORD +57 -57
  53. {junifer-0.0.5.dev183.dist-info → junifer-0.0.5.dev202.dist-info}/WHEEL +1 -1
  54. {junifer-0.0.5.dev183.dist-info → junifer-0.0.5.dev202.dist-info}/AUTHORS.rst +0 -0
  55. {junifer-0.0.5.dev183.dist-info → junifer-0.0.5.dev202.dist-info}/LICENSE.md +0 -0
  56. {junifer-0.0.5.dev183.dist-info → junifer-0.0.5.dev202.dist-info}/entry_points.txt +0 -0
  57. {junifer-0.0.5.dev183.dist-info → junifer-0.0.5.dev202.dist-info}/top_level.txt +0 -0
@@ -21,6 +21,28 @@ from junifer.testing.datagrabbers import PartlyCloudyTestingDataGrabber
21
21
  COORDINATES = "DMNBuckner"
22
22
 
23
23
 
24
+ @pytest.mark.parametrize(
25
+ "feature",
26
+ [
27
+ "alff",
28
+ "falff",
29
+ ],
30
+ )
31
+ def test_ALFFSpheres_get_output_type(feature: str) -> None:
32
+ """Test ALFFSpheres get_output_type().
33
+
34
+ Parameters
35
+ ----------
36
+ feature : str
37
+ The parametrized feature name.
38
+
39
+ """
40
+ assert "vector" == ALFFSpheres(
41
+ coords=COORDINATES,
42
+ using="junifer",
43
+ ).get_output_type(input_type="BOLD", output_feature=feature)
44
+
45
+
24
46
  def test_ALFFSpheres(caplog: pytest.LogCaptureFixture, tmp_path: Path) -> None:
25
47
  """Test ALFFSpheres.
26
48
 
@@ -41,7 +63,6 @@ def test_ALFFSpheres(caplog: pytest.LogCaptureFixture, tmp_path: Path) -> None:
41
63
  # Initialize marker
42
64
  marker = ALFFSpheres(
43
65
  coords=COORDINATES,
44
- fractional=False,
45
66
  using="junifer",
46
67
  radius=5.0,
47
68
  )
@@ -52,15 +73,16 @@ def test_ALFFSpheres(caplog: pytest.LogCaptureFixture, tmp_path: Path) -> None:
52
73
 
53
74
  # Get BOLD output
54
75
  assert "BOLD" in output
55
- output_bold = output["BOLD"]
56
- # Assert BOLD output keys
57
- assert "data" in output_bold
58
- assert "col_names" in output_bold
76
+ for feature in output["BOLD"].keys():
77
+ output_bold = output["BOLD"][feature]
78
+ # Assert BOLD output keys
79
+ assert "data" in output_bold
80
+ assert "col_names" in output_bold
59
81
 
60
- output_bold_data = output_bold["data"]
61
- # Assert BOLD output data dimension
62
- assert output_bold_data.ndim == 2
63
- assert output_bold_data.shape == (1, 6)
82
+ output_bold_data = output_bold["data"]
83
+ # Assert BOLD output data dimension
84
+ assert output_bold_data.ndim == 2
85
+ assert output_bold_data.shape == (1, 6)
64
86
 
65
87
  # Reset log capture
66
88
  caplog.clear()
@@ -78,18 +100,13 @@ def test_ALFFSpheres(caplog: pytest.LogCaptureFixture, tmp_path: Path) -> None:
78
100
  @pytest.mark.skipif(
79
101
  _check_afni() is False, reason="requires AFNI to be in PATH"
80
102
  )
81
- @pytest.mark.parametrize(
82
- "fractional", [True, False], ids=["fractional", "non-fractional"]
83
- )
84
- def test_ALFFSpheres_comparison(tmp_path: Path, fractional: bool) -> None:
103
+ def test_ALFFSpheres_comparison(tmp_path: Path) -> None:
85
104
  """Test ALFFSpheres implementation comparison.
86
105
 
87
106
  Parameters
88
107
  ----------
89
108
  tmp_path : pathlib.Path
90
109
  The path to the test directory.
91
- fractional : bool
92
- Whether to compute fractional ALFF or not.
93
110
 
94
111
  """
95
112
  with PartlyCloudyTestingDataGrabber() as dg:
@@ -100,7 +117,6 @@ def test_ALFFSpheres_comparison(tmp_path: Path, fractional: bool) -> None:
100
117
  # Initialize marker
101
118
  junifer_marker = ALFFSpheres(
102
119
  coords=COORDINATES,
103
- fractional=fractional,
104
120
  using="junifer",
105
121
  radius=5.0,
106
122
  )
@@ -112,7 +128,6 @@ def test_ALFFSpheres_comparison(tmp_path: Path, fractional: bool) -> None:
112
128
  # Initialize marker
113
129
  afni_marker = ALFFSpheres(
114
130
  coords=COORDINATES,
115
- fractional=fractional,
116
131
  using="afni",
117
132
  radius=5.0,
118
133
  )
@@ -121,9 +136,10 @@ def test_ALFFSpheres_comparison(tmp_path: Path, fractional: bool) -> None:
121
136
  # Get BOLD output
122
137
  afni_output_bold = afni_output["BOLD"]
123
138
 
124
- # Check for Pearson correlation coefficient
125
- r, _ = sp.stats.pearsonr(
126
- junifer_output_bold["data"][0],
127
- afni_output_bold["data"][0],
128
- )
129
- assert r > 0.99
139
+ for feature in afni_output_bold.keys():
140
+ # Check for Pearson correlation coefficient
141
+ r, _ = sp.stats.pearsonr(
142
+ junifer_output_bold[feature]["data"][0],
143
+ afni_output_bold[feature]["data"][0],
144
+ )
145
+ assert r > 0.99
@@ -45,6 +45,12 @@ class CrossParcellationFC(BaseMarker):
45
45
 
46
46
  _DEPENDENCIES: ClassVar[Set[str]] = {"nilearn"}
47
47
 
48
+ _MARKER_INOUT_MAPPINGS: ClassVar[Dict[str, Dict[str, str]]] = {
49
+ "BOLD": {
50
+ "functional_connectivity": "matrix",
51
+ },
52
+ }
53
+
48
54
  def __init__(
49
55
  self,
50
56
  parcellation_one: str,
@@ -65,33 +71,6 @@ class CrossParcellationFC(BaseMarker):
65
71
  self.masks = masks
66
72
  super().__init__(on=["BOLD"], name=name)
67
73
 
68
- def get_valid_inputs(self) -> List[str]:
69
- """Get valid data types for input.
70
-
71
- Returns
72
- -------
73
- list of str
74
- The list of data types that can be used as input for this marker
75
-
76
- """
77
- return ["BOLD"]
78
-
79
- def get_output_type(self, input_type: str) -> str:
80
- """Get output type.
81
-
82
- Parameters
83
- ----------
84
- input_type : str
85
- The data type input to the marker.
86
-
87
- Returns
88
- -------
89
- str
90
- The storage type output by the marker.
91
-
92
- """
93
- return "matrix"
94
-
95
74
  def compute(
96
75
  self,
97
76
  input: Dict[str, Any],
@@ -118,10 +97,14 @@ class CrossParcellationFC(BaseMarker):
118
97
  to the user or stored in the storage by calling the store method
119
98
  with this as a parameter. The dictionary has the following keys:
120
99
 
121
- * ``data`` : the correlation values between the two parcellations
122
- as a numpy.ndarray
123
- * ``col_names`` : the ROIs for first parcellation as a list
124
- * ``row_names`` : the ROIs for second parcellation as a list
100
+ * ``functional_connectivity`` : dictionary with the following keys:
101
+
102
+ - ``data`` : correlation between the two parcellations as
103
+ ``numpy.ndarray``
104
+ - ``col_names`` : ROI labels for first parcellation as list of
105
+ str
106
+ - ``row_names`` : ROI labels for second parcellation as list of
107
+ str
125
108
 
126
109
  """
127
110
  logger.debug(
@@ -129,31 +112,32 @@ class CrossParcellationFC(BaseMarker):
129
112
  f" {self.parcellation_one} and "
130
113
  f"{self.parcellation_two} parcellations."
131
114
  )
132
- # Initialize a ParcelAggregation
133
- parcellation_one_dict = ParcelAggregation(
115
+ # Perform aggregation using two parcellations
116
+ aggregation_parcellation_one = ParcelAggregation(
134
117
  parcellation=self.parcellation_one,
135
118
  method=self.aggregation_method,
136
119
  masks=self.masks,
137
120
  ).compute(input, extra_input=extra_input)
138
- parcellation_two_dict = ParcelAggregation(
121
+ aggregation_parcellation_two = ParcelAggregation(
139
122
  parcellation=self.parcellation_two,
140
123
  method=self.aggregation_method,
141
124
  masks=self.masks,
142
125
  ).compute(input, extra_input=extra_input)
143
126
 
144
- parcellated_ts_one = parcellation_one_dict["data"]
145
- parcellated_ts_two = parcellation_two_dict["data"]
146
- # columns should be named after parcellation 1
147
- # rows should be named after parcellation 2
148
-
149
- result = _correlate_dataframes(
150
- pd.DataFrame(parcellated_ts_one),
151
- pd.DataFrame(parcellated_ts_two),
152
- method=self.correlation_method,
153
- ).values
154
-
155
127
  return {
156
- "data": result,
157
- "col_names": parcellation_one_dict["col_names"],
158
- "row_names": parcellation_two_dict["col_names"],
128
+ "functional_connectivity": {
129
+ "data": _correlate_dataframes(
130
+ pd.DataFrame(
131
+ aggregation_parcellation_one["aggregation"]["data"]
132
+ ),
133
+ pd.DataFrame(
134
+ aggregation_parcellation_two["aggregation"]["data"]
135
+ ),
136
+ method=self.correlation_method,
137
+ ).values,
138
+ # Columns should be named after parcellation 1
139
+ "col_names": aggregation_parcellation_one["col_names"],
140
+ # Rows should be named after parcellation 2
141
+ "row_names": aggregation_parcellation_two["col_names"],
142
+ },
159
143
  }
@@ -98,23 +98,29 @@ class EdgeCentricFCParcels(FunctionalConnectivityBase):
98
98
  to the user or stored in the storage by calling the store method
99
99
  with this as a parameter. The dictionary has the following keys:
100
100
 
101
- * ``data`` : the actual computed values as a numpy.ndarray
102
- * ``col_names`` : the column labels for the computed values as list
101
+ * ``aggregation`` : dictionary with the following keys:
102
+
103
+ - ``data`` : ROI values as ``numpy.ndarray``
104
+ - ``col_names`` : ROI labels as list of str
103
105
 
104
106
  """
105
- parcel_aggregation = ParcelAggregation(
107
+ # Perform aggregation
108
+ aggregation = ParcelAggregation(
106
109
  parcellation=self.parcellation,
107
110
  method=self.agg_method,
108
111
  method_params=self.agg_method_params,
109
112
  masks=self.masks,
110
113
  on="BOLD",
111
- )
112
-
113
- bold_aggregated = parcel_aggregation.compute(
114
- input, extra_input=extra_input
115
- )
114
+ ).compute(input, extra_input=extra_input)
115
+ # Compute edgewise timeseries
116
116
  ets, edge_names = _ets(
117
- bold_aggregated["data"], bold_aggregated["col_names"]
117
+ bold_ts=aggregation["aggregation"]["data"],
118
+ roi_names=aggregation["aggregation"]["col_names"],
118
119
  )
119
120
 
120
- return {"data": ets, "col_names": edge_names}
121
+ return {
122
+ "aggregation": {
123
+ "data": ets,
124
+ "col_names": edge_names,
125
+ },
126
+ }
@@ -110,11 +110,14 @@ class EdgeCentricFCSpheres(FunctionalConnectivityBase):
110
110
  to the user or stored in the storage by calling the store method
111
111
  with this as a parameter. The dictionary has the following keys:
112
112
 
113
- * ``data`` : the actual computed values as a numpy.ndarray
114
- * ``col_names`` : the column labels for the computed values as list
113
+ * ``aggregation`` : dictionary with the following keys:
114
+
115
+ - ``data`` : ROI values as ``numpy.ndarray``
116
+ - ``col_names`` : ROI labels as list of str
115
117
 
116
118
  """
117
- sphere_aggregation = SphereAggregation(
119
+ # Perform aggregation
120
+ aggregation = SphereAggregation(
118
121
  coords=self.coords,
119
122
  radius=self.radius,
120
123
  allow_overlap=self.allow_overlap,
@@ -122,12 +125,13 @@ class EdgeCentricFCSpheres(FunctionalConnectivityBase):
122
125
  method_params=self.agg_method_params,
123
126
  masks=self.masks,
124
127
  on="BOLD",
125
- )
126
- bold_aggregated = sphere_aggregation.compute(
127
- input, extra_input=extra_input
128
- )
128
+ ).compute(input, extra_input=extra_input)
129
+ # Compute edgewise timeseries
129
130
  ets, edge_names = _ets(
130
- bold_aggregated["data"], bold_aggregated["col_names"]
131
+ bold_ts=aggregation["aggregation"]["data"],
132
+ roi_names=aggregation["aggregation"]["col_names"],
131
133
  )
132
134
 
133
- return {"data": ets, "col_names": edge_names}
135
+ return {
136
+ "aggregation": {"data": ets, "col_names": edge_names},
137
+ }
@@ -47,6 +47,12 @@ class FunctionalConnectivityBase(BaseMarker):
47
47
 
48
48
  _DEPENDENCIES: ClassVar[Set[str]] = {"nilearn", "scikit-learn"}
49
49
 
50
+ _MARKER_INOUT_MAPPINGS: ClassVar[Dict[str, Dict[str, str]]] = {
51
+ "BOLD": {
52
+ "functional_connectivity": "matrix",
53
+ },
54
+ }
55
+
50
56
  def __init__(
51
57
  self,
52
58
  agg_method: str = "mean",
@@ -80,33 +86,6 @@ class FunctionalConnectivityBase(BaseMarker):
80
86
  klass=NotImplementedError,
81
87
  )
82
88
 
83
- def get_valid_inputs(self) -> List[str]:
84
- """Get valid data types for input.
85
-
86
- Returns
87
- -------
88
- list of str
89
- The list of data types that can be used as input for this marker.
90
-
91
- """
92
- return ["BOLD"]
93
-
94
- def get_output_type(self, input_type: str) -> str:
95
- """Get output type.
96
-
97
- Parameters
98
- ----------
99
- input_type : str
100
- The data type input to the marker.
101
-
102
- Returns
103
- -------
104
- str
105
- The storage type output by the marker.
106
-
107
- """
108
- return "matrix"
109
-
110
89
  def compute(
111
90
  self,
112
91
  input: Dict[str, Any],
@@ -128,13 +107,16 @@ class FunctionalConnectivityBase(BaseMarker):
128
107
  Returns
129
108
  -------
130
109
  dict
131
- The computed result as dictionary. The following keys will be
132
- included in the dictionary:
110
+ The computed result as dictionary. This will be either returned
111
+ to the user or stored in the storage by calling the store method
112
+ with this as a parameter. The dictionary has the following keys:
113
+
114
+ * ``functional_connectivity`` : dictionary with the following keys:
133
115
 
134
- * ``data`` : functional connectivity matrix as a ``numpy.ndarray``.
135
- * ``row_names`` : row names as a list
136
- * ``col_names`` : column names as a list
137
- * ``matrix_kind`` : the kind of matrix (tril, triu or full)
116
+ - ``data`` : functional connectivity matrix as ``numpy.ndarray``
117
+ - ``row_names`` : ROI labels as list of str
118
+ - ``col_names`` : ROI labels as list of str
119
+ - ``matrix_kind`` : the kind of matrix (tril, triu or full)
138
120
 
139
121
  """
140
122
  # Perform necessary aggregation
@@ -148,10 +130,14 @@ class FunctionalConnectivityBase(BaseMarker):
148
130
  else:
149
131
  connectivity = ConnectivityMeasure(kind=self.cor_method)
150
132
  # Create dictionary for output
151
- out = {}
152
- out["data"] = connectivity.fit_transform([aggregation["data"]])[0]
153
- # Create column names
154
- out["row_names"] = aggregation["col_names"]
155
- out["col_names"] = aggregation["col_names"]
156
- out["matrix_kind"] = "tril"
157
- return out
133
+ return {
134
+ "functional_connectivity": {
135
+ "data": connectivity.fit_transform(
136
+ [aggregation["aggregation"]["data"]]
137
+ )[0],
138
+ # Create column names
139
+ "row_names": aggregation["aggregation"]["col_names"],
140
+ "col_names": aggregation["aggregation"]["col_names"],
141
+ "matrix_kind": "tril",
142
+ },
143
+ }
@@ -90,16 +90,16 @@ class FunctionalConnectivityParcels(FunctionalConnectivityBase):
90
90
  to the user or stored in the storage by calling the store method
91
91
  with this as a parameter. The dictionary has the following keys:
92
92
 
93
- * ``data`` : the actual computed values as a numpy.ndarray
94
- * ``col_names`` : the column labels for the computed values as list
93
+ * ``aggregation`` : dictionary with the following keys:
94
+
95
+ - ``data`` : ROI values as ``numpy.ndarray``
96
+ - ``col_names`` : ROI labels as list of str
95
97
 
96
98
  """
97
- parcel_aggregation = ParcelAggregation(
99
+ return ParcelAggregation(
98
100
  parcellation=self.parcellation,
99
101
  method=self.agg_method,
100
102
  method_params=self.agg_method_params,
101
103
  masks=self.masks,
102
104
  on="BOLD",
103
- )
104
- # Return the 2D timeseries after parcel aggregation
105
- return parcel_aggregation.compute(input, extra_input=extra_input)
105
+ ).compute(input=input, extra_input=extra_input)
@@ -104,11 +104,13 @@ class FunctionalConnectivitySpheres(FunctionalConnectivityBase):
104
104
  to the user or stored in the storage by calling the store method
105
105
  with this as a parameter. The dictionary has the following keys:
106
106
 
107
- * ``data`` : the actual computed values as a numpy.ndarray
108
- * ``col_names`` : the column labels for the computed values as list
107
+ * ``aggregation`` : dictionary with the following keys:
108
+
109
+ - ``data`` : ROI values as ``numpy.ndarray``
110
+ - ``col_names`` : ROI labels as list of str
109
111
 
110
112
  """
111
- sphere_aggregation = SphereAggregation(
113
+ return SphereAggregation(
112
114
  coords=self.coords,
113
115
  radius=self.radius,
114
116
  allow_overlap=self.allow_overlap,
@@ -116,6 +118,4 @@ class FunctionalConnectivitySpheres(FunctionalConnectivityBase):
116
118
  method_params=self.agg_method_params,
117
119
  masks=self.masks,
118
120
  on="BOLD",
119
- )
120
- # Return the 2D timeseries after sphere aggregation
121
- return sphere_aggregation.compute(input, extra_input=extra_input)
121
+ ).compute(input=input, extra_input=extra_input)
@@ -33,10 +33,11 @@ def test_init() -> None:
33
33
 
34
34
  def test_get_output_type() -> None:
35
35
  """Test CrossParcellationFC get_output_type()."""
36
- crossparcellation = CrossParcellationFC(
36
+ assert "matrix" == CrossParcellationFC(
37
37
  parcellation_one=parcellation_one, parcellation_two=parcellation_two
38
+ ).get_output_type(
39
+ input_type="BOLD", output_feature="functional_connectivity"
38
40
  )
39
- assert "matrix" == crossparcellation.get_output_type("BOLD")
40
41
 
41
42
 
42
43
  @pytest.mark.skipif(
@@ -59,7 +60,9 @@ def test_compute(tmp_path: Path) -> None:
59
60
  parcellation_two=parcellation_two,
60
61
  correlation_method="spearman",
61
62
  )
62
- out = crossparcellation.compute(element_data["BOLD"])
63
+ out = crossparcellation.compute(element_data["BOLD"])[
64
+ "functional_connectivity"
65
+ ]
63
66
  assert out["data"].shape == (200, 100)
64
67
  assert len(out["col_names"]) == 100
65
68
  assert len(out["row_names"]) == 200
@@ -92,5 +95,6 @@ def test_store(tmp_path: Path) -> None:
92
95
  crossparcellation.fit_transform(input=element_data, storage=storage)
93
96
  features = storage.list_features()
94
97
  assert any(
95
- x["name"] == "BOLD_CrossParcellationFC" for x in features.values()
98
+ x["name"] == "BOLD_CrossParcellationFC_functional_connectivity"
99
+ for x in features.values()
96
100
  )
@@ -28,11 +28,13 @@ def test_EdgeCentricFCParcels(tmp_path: Path) -> None:
28
28
  cor_method_params={"empirical": True},
29
29
  )
30
30
  # Check correct output
31
- assert marker.get_output_type("BOLD") == "matrix"
31
+ assert "matrix" == marker.get_output_type(
32
+ input_type="BOLD", output_feature="functional_connectivity"
33
+ )
32
34
 
33
35
  # Fit-transform the data
34
36
  edge_fc = marker.fit_transform(element_data)
35
- edge_fc_bold = edge_fc["BOLD"]
37
+ edge_fc_bold = edge_fc["BOLD"]["functional_connectivity"]
36
38
 
37
39
  # For 16 ROIs we should get (16 * (16 -1) / 2) edges in the ETS
38
40
  n_edges = int(16 * (16 - 1) / 2)
@@ -51,5 +53,6 @@ def test_EdgeCentricFCParcels(tmp_path: Path) -> None:
51
53
  marker.fit_transform(input=element_data, storage=storage)
52
54
  features = storage.list_features()
53
55
  assert any(
54
- x["name"] == "BOLD_EdgeCentricFCParcels" for x in features.values()
56
+ x["name"] == "BOLD_EdgeCentricFCParcels_functional_connectivity"
57
+ for x in features.values()
55
58
  )
@@ -27,11 +27,13 @@ def test_EdgeCentricFCSpheres(tmp_path: Path) -> None:
27
27
  coords="DMNBuckner", radius=5.0, cor_method="correlation"
28
28
  )
29
29
  # Check correct output
30
- assert marker.get_output_type("BOLD") == "matrix"
30
+ assert "matrix" == marker.get_output_type(
31
+ input_type="BOLD", output_feature="functional_connectivity"
32
+ )
31
33
 
32
34
  # Fit-transform the data
33
35
  edge_fc = marker.fit_transform(element_data)
34
- edge_fc_bold = edge_fc["BOLD"]
36
+ edge_fc_bold = edge_fc["BOLD"]["functional_connectivity"]
35
37
 
36
38
  # There are six DMNBuckner coordinates, so
37
39
  # for 6 ROIs we should get (6 * (6 -1) / 2) edges in the ETS
@@ -57,5 +59,6 @@ def test_EdgeCentricFCSpheres(tmp_path: Path) -> None:
57
59
  marker.fit_transform(input=element_data, storage=storage)
58
60
  features = storage.list_features()
59
61
  assert any(
60
- x["name"] == "BOLD_EdgeCentricFCSpheres" for x in features.values()
62
+ x["name"] == "BOLD_EdgeCentricFCSpheres_functional_connectivity"
63
+ for x in features.values()
61
64
  )
@@ -35,11 +35,13 @@ def test_FunctionalConnectivityParcels(tmp_path: Path) -> None:
35
35
  parcellation="TianxS1x3TxMNInonlinear2009cAsym"
36
36
  )
37
37
  # Check correct output
38
- assert marker.get_output_type("BOLD") == "matrix"
38
+ assert "matrix" == marker.get_output_type(
39
+ input_type="BOLD", output_feature="functional_connectivity"
40
+ )
39
41
 
40
42
  # Fit-transform the data
41
43
  fc = marker.fit_transform(element_data)
42
- fc_bold = fc["BOLD"]
44
+ fc_bold = fc["BOLD"]["functional_connectivity"]
43
45
 
44
46
  assert "data" in fc_bold
45
47
  assert "row_names" in fc_bold
@@ -83,6 +85,7 @@ def test_FunctionalConnectivityParcels(tmp_path: Path) -> None:
83
85
  marker.fit_transform(input=element_data, storage=storage)
84
86
  features = storage.list_features()
85
87
  assert any(
86
- x["name"] == "BOLD_FunctionalConnectivityParcels"
88
+ x["name"]
89
+ == "BOLD_FunctionalConnectivityParcels_functional_connectivity"
87
90
  for x in features.values()
88
91
  )
@@ -38,11 +38,13 @@ def test_FunctionalConnectivitySpheres(tmp_path: Path) -> None:
38
38
  coords="DMNBuckner", radius=5.0, cor_method="correlation"
39
39
  )
40
40
  # Check correct output
41
- assert marker.get_output_type("BOLD") == "matrix"
41
+ assert "matrix" == marker.get_output_type(
42
+ input_type="BOLD", output_feature="functional_connectivity"
43
+ )
42
44
 
43
45
  # Fit-transform the data
44
46
  fc = marker.fit_transform(element_data)
45
- fc_bold = fc["BOLD"]
47
+ fc_bold = fc["BOLD"]["functional_connectivity"]
46
48
 
47
49
  assert "data" in fc_bold
48
50
  assert "row_names" in fc_bold
@@ -80,7 +82,8 @@ def test_FunctionalConnectivitySpheres(tmp_path: Path) -> None:
80
82
  marker.fit_transform(input=element_data, storage=storage)
81
83
  features = storage.list_features()
82
84
  assert any(
83
- x["name"] == "BOLD_FunctionalConnectivitySpheres"
85
+ x["name"]
86
+ == "BOLD_FunctionalConnectivitySpheres_functional_connectivity"
84
87
  for x in features.values()
85
88
  )
86
89
 
@@ -103,11 +106,13 @@ def test_FunctionalConnectivitySpheres_empirical(tmp_path: Path) -> None:
103
106
  cor_method_params={"empirical": True},
104
107
  )
105
108
  # Check correct output
106
- assert marker.get_output_type("BOLD") == "matrix"
109
+ assert "matrix" == marker.get_output_type(
110
+ input_type="BOLD", output_feature="functional_connectivity"
111
+ )
107
112
 
108
113
  # Fit-transform the data
109
114
  fc = marker.fit_transform(element_data)
110
- fc_bold = fc["BOLD"]
115
+ fc_bold = fc["BOLD"]["functional_connectivity"]
111
116
 
112
117
  assert "data" in fc_bold
113
118
  assert "row_names" in fc_bold