junifer 0.0.3.dev188__py3-none-any.whl → 0.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (178) hide show
  1. junifer/_version.py +14 -2
  2. junifer/api/cli.py +162 -17
  3. junifer/api/functions.py +87 -419
  4. junifer/api/parser.py +24 -0
  5. junifer/api/queue_context/__init__.py +8 -0
  6. junifer/api/queue_context/gnu_parallel_local_adapter.py +258 -0
  7. junifer/api/queue_context/htcondor_adapter.py +365 -0
  8. junifer/api/queue_context/queue_context_adapter.py +60 -0
  9. junifer/api/queue_context/tests/test_gnu_parallel_local_adapter.py +192 -0
  10. junifer/api/queue_context/tests/test_htcondor_adapter.py +257 -0
  11. junifer/api/res/afni/run_afni_docker.sh +6 -6
  12. junifer/api/res/ants/ResampleImage +3 -0
  13. junifer/api/res/ants/antsApplyTransforms +3 -0
  14. junifer/api/res/ants/antsApplyTransformsToPoints +3 -0
  15. junifer/api/res/ants/run_ants_docker.sh +39 -0
  16. junifer/api/res/fsl/applywarp +3 -0
  17. junifer/api/res/fsl/flirt +3 -0
  18. junifer/api/res/fsl/img2imgcoord +3 -0
  19. junifer/api/res/fsl/run_fsl_docker.sh +39 -0
  20. junifer/api/res/fsl/std2imgcoord +3 -0
  21. junifer/api/res/run_conda.sh +4 -4
  22. junifer/api/res/run_venv.sh +22 -0
  23. junifer/api/tests/data/partly_cloudy_agg_mean_tian.yml +16 -0
  24. junifer/api/tests/test_api_utils.py +21 -3
  25. junifer/api/tests/test_cli.py +232 -9
  26. junifer/api/tests/test_functions.py +211 -439
  27. junifer/api/tests/test_parser.py +1 -1
  28. junifer/configs/juseless/datagrabbers/aomic_id1000_vbm.py +6 -1
  29. junifer/configs/juseless/datagrabbers/camcan_vbm.py +6 -1
  30. junifer/configs/juseless/datagrabbers/ixi_vbm.py +6 -1
  31. junifer/configs/juseless/datagrabbers/tests/test_ucla.py +8 -8
  32. junifer/configs/juseless/datagrabbers/ucla.py +44 -26
  33. junifer/configs/juseless/datagrabbers/ukb_vbm.py +6 -1
  34. junifer/data/VOIs/meta/AutobiographicalMemory_VOIs.txt +23 -0
  35. junifer/data/VOIs/meta/Power2013_MNI_VOIs.tsv +264 -0
  36. junifer/data/__init__.py +4 -0
  37. junifer/data/coordinates.py +298 -31
  38. junifer/data/masks.py +360 -28
  39. junifer/data/parcellations.py +621 -188
  40. junifer/data/template_spaces.py +190 -0
  41. junifer/data/tests/test_coordinates.py +34 -3
  42. junifer/data/tests/test_data_utils.py +1 -0
  43. junifer/data/tests/test_masks.py +202 -86
  44. junifer/data/tests/test_parcellations.py +266 -55
  45. junifer/data/tests/test_template_spaces.py +104 -0
  46. junifer/data/utils.py +4 -2
  47. junifer/datagrabber/__init__.py +1 -0
  48. junifer/datagrabber/aomic/id1000.py +111 -70
  49. junifer/datagrabber/aomic/piop1.py +116 -53
  50. junifer/datagrabber/aomic/piop2.py +116 -53
  51. junifer/datagrabber/aomic/tests/test_id1000.py +27 -27
  52. junifer/datagrabber/aomic/tests/test_piop1.py +27 -27
  53. junifer/datagrabber/aomic/tests/test_piop2.py +27 -27
  54. junifer/datagrabber/base.py +62 -10
  55. junifer/datagrabber/datalad_base.py +0 -2
  56. junifer/datagrabber/dmcc13_benchmark.py +372 -0
  57. junifer/datagrabber/hcp1200/datalad_hcp1200.py +5 -0
  58. junifer/datagrabber/hcp1200/hcp1200.py +30 -13
  59. junifer/datagrabber/pattern.py +133 -27
  60. junifer/datagrabber/pattern_datalad.py +111 -13
  61. junifer/datagrabber/tests/test_base.py +57 -6
  62. junifer/datagrabber/tests/test_datagrabber_utils.py +204 -76
  63. junifer/datagrabber/tests/test_datalad_base.py +0 -6
  64. junifer/datagrabber/tests/test_dmcc13_benchmark.py +256 -0
  65. junifer/datagrabber/tests/test_multiple.py +43 -10
  66. junifer/datagrabber/tests/test_pattern.py +125 -178
  67. junifer/datagrabber/tests/test_pattern_datalad.py +44 -25
  68. junifer/datagrabber/utils.py +151 -16
  69. junifer/datareader/default.py +36 -10
  70. junifer/external/nilearn/junifer_nifti_spheres_masker.py +6 -0
  71. junifer/markers/base.py +25 -16
  72. junifer/markers/collection.py +35 -16
  73. junifer/markers/complexity/__init__.py +27 -0
  74. junifer/markers/complexity/complexity_base.py +149 -0
  75. junifer/markers/complexity/hurst_exponent.py +136 -0
  76. junifer/markers/complexity/multiscale_entropy_auc.py +140 -0
  77. junifer/markers/complexity/perm_entropy.py +132 -0
  78. junifer/markers/complexity/range_entropy.py +136 -0
  79. junifer/markers/complexity/range_entropy_auc.py +145 -0
  80. junifer/markers/complexity/sample_entropy.py +134 -0
  81. junifer/markers/complexity/tests/test_complexity_base.py +19 -0
  82. junifer/markers/complexity/tests/test_hurst_exponent.py +69 -0
  83. junifer/markers/complexity/tests/test_multiscale_entropy_auc.py +68 -0
  84. junifer/markers/complexity/tests/test_perm_entropy.py +68 -0
  85. junifer/markers/complexity/tests/test_range_entropy.py +69 -0
  86. junifer/markers/complexity/tests/test_range_entropy_auc.py +69 -0
  87. junifer/markers/complexity/tests/test_sample_entropy.py +68 -0
  88. junifer/markers/complexity/tests/test_weighted_perm_entropy.py +68 -0
  89. junifer/markers/complexity/weighted_perm_entropy.py +133 -0
  90. junifer/markers/falff/_afni_falff.py +153 -0
  91. junifer/markers/falff/_junifer_falff.py +142 -0
  92. junifer/markers/falff/falff_base.py +91 -84
  93. junifer/markers/falff/falff_parcels.py +61 -45
  94. junifer/markers/falff/falff_spheres.py +64 -48
  95. junifer/markers/falff/tests/test_falff_parcels.py +89 -121
  96. junifer/markers/falff/tests/test_falff_spheres.py +92 -127
  97. junifer/markers/functional_connectivity/crossparcellation_functional_connectivity.py +1 -0
  98. junifer/markers/functional_connectivity/edge_functional_connectivity_parcels.py +1 -0
  99. junifer/markers/functional_connectivity/functional_connectivity_base.py +1 -0
  100. junifer/markers/functional_connectivity/tests/test_crossparcellation_functional_connectivity.py +46 -44
  101. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_parcels.py +34 -39
  102. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_spheres.py +40 -52
  103. junifer/markers/functional_connectivity/tests/test_functional_connectivity_parcels.py +62 -70
  104. junifer/markers/functional_connectivity/tests/test_functional_connectivity_spheres.py +99 -85
  105. junifer/markers/parcel_aggregation.py +60 -38
  106. junifer/markers/reho/_afni_reho.py +192 -0
  107. junifer/markers/reho/_junifer_reho.py +281 -0
  108. junifer/markers/reho/reho_base.py +69 -34
  109. junifer/markers/reho/reho_parcels.py +26 -16
  110. junifer/markers/reho/reho_spheres.py +23 -9
  111. junifer/markers/reho/tests/test_reho_parcels.py +93 -92
  112. junifer/markers/reho/tests/test_reho_spheres.py +88 -86
  113. junifer/markers/sphere_aggregation.py +54 -9
  114. junifer/markers/temporal_snr/temporal_snr_base.py +1 -0
  115. junifer/markers/temporal_snr/tests/test_temporal_snr_parcels.py +38 -37
  116. junifer/markers/temporal_snr/tests/test_temporal_snr_spheres.py +34 -38
  117. junifer/markers/tests/test_collection.py +43 -42
  118. junifer/markers/tests/test_ets_rss.py +29 -37
  119. junifer/markers/tests/test_parcel_aggregation.py +587 -468
  120. junifer/markers/tests/test_sphere_aggregation.py +209 -157
  121. junifer/markers/utils.py +2 -40
  122. junifer/onthefly/read_transform.py +13 -6
  123. junifer/pipeline/__init__.py +1 -0
  124. junifer/pipeline/pipeline_step_mixin.py +105 -41
  125. junifer/pipeline/registry.py +17 -0
  126. junifer/pipeline/singleton.py +45 -0
  127. junifer/pipeline/tests/test_pipeline_step_mixin.py +139 -51
  128. junifer/pipeline/tests/test_update_meta_mixin.py +1 -0
  129. junifer/pipeline/tests/test_workdir_manager.py +104 -0
  130. junifer/pipeline/update_meta_mixin.py +8 -2
  131. junifer/pipeline/utils.py +154 -15
  132. junifer/pipeline/workdir_manager.py +246 -0
  133. junifer/preprocess/__init__.py +3 -0
  134. junifer/preprocess/ants/__init__.py +4 -0
  135. junifer/preprocess/ants/ants_apply_transforms_warper.py +185 -0
  136. junifer/preprocess/ants/tests/test_ants_apply_transforms_warper.py +56 -0
  137. junifer/preprocess/base.py +96 -69
  138. junifer/preprocess/bold_warper.py +265 -0
  139. junifer/preprocess/confounds/fmriprep_confound_remover.py +91 -134
  140. junifer/preprocess/confounds/tests/test_fmriprep_confound_remover.py +106 -111
  141. junifer/preprocess/fsl/__init__.py +4 -0
  142. junifer/preprocess/fsl/apply_warper.py +179 -0
  143. junifer/preprocess/fsl/tests/test_apply_warper.py +45 -0
  144. junifer/preprocess/tests/test_bold_warper.py +159 -0
  145. junifer/preprocess/tests/test_preprocess_base.py +6 -6
  146. junifer/preprocess/warping/__init__.py +6 -0
  147. junifer/preprocess/warping/_ants_warper.py +167 -0
  148. junifer/preprocess/warping/_fsl_warper.py +109 -0
  149. junifer/preprocess/warping/space_warper.py +213 -0
  150. junifer/preprocess/warping/tests/test_space_warper.py +198 -0
  151. junifer/stats.py +18 -4
  152. junifer/storage/base.py +9 -1
  153. junifer/storage/hdf5.py +8 -3
  154. junifer/storage/pandas_base.py +2 -1
  155. junifer/storage/sqlite.py +1 -0
  156. junifer/storage/tests/test_hdf5.py +2 -1
  157. junifer/storage/tests/test_sqlite.py +8 -8
  158. junifer/storage/tests/test_utils.py +6 -6
  159. junifer/storage/utils.py +1 -0
  160. junifer/testing/datagrabbers.py +11 -7
  161. junifer/testing/utils.py +1 -0
  162. junifer/tests/test_stats.py +2 -0
  163. junifer/utils/__init__.py +1 -0
  164. junifer/utils/helpers.py +53 -0
  165. junifer/utils/logging.py +14 -3
  166. junifer/utils/tests/test_helpers.py +35 -0
  167. {junifer-0.0.3.dev188.dist-info → junifer-0.0.4.dist-info}/METADATA +59 -28
  168. junifer-0.0.4.dist-info/RECORD +257 -0
  169. {junifer-0.0.3.dev188.dist-info → junifer-0.0.4.dist-info}/WHEEL +1 -1
  170. junifer/markers/falff/falff_estimator.py +0 -334
  171. junifer/markers/falff/tests/test_falff_estimator.py +0 -238
  172. junifer/markers/reho/reho_estimator.py +0 -515
  173. junifer/markers/reho/tests/test_reho_estimator.py +0 -260
  174. junifer-0.0.3.dev188.dist-info/RECORD +0 -199
  175. {junifer-0.0.3.dev188.dist-info → junifer-0.0.4.dist-info}/AUTHORS.rst +0 -0
  176. {junifer-0.0.3.dev188.dist-info → junifer-0.0.4.dist-info}/LICENSE.md +0 -0
  177. {junifer-0.0.3.dev188.dist-info → junifer-0.0.4.dist-info}/entry_points.txt +0 -0
  178. {junifer-0.0.3.dev188.dist-info → junifer-0.0.4.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,149 @@
1
+ """Provide base class for complexity."""
2
+
3
+ # Authors: Amir Omidvarnia <a.omidvarnia@fz-juelich.de>
4
+ # License: AGPL
5
+
6
+ from abc import abstractmethod
7
+ from typing import (
8
+ TYPE_CHECKING,
9
+ Any,
10
+ ClassVar,
11
+ Dict,
12
+ List,
13
+ Optional,
14
+ Set,
15
+ Union,
16
+ )
17
+
18
+ from ...utils import raise_error
19
+ from ..base import BaseMarker
20
+ from ..parcel_aggregation import ParcelAggregation
21
+
22
+
23
+ if TYPE_CHECKING:
24
+ import numpy as np
25
+
26
+
27
+ class ComplexityBase(BaseMarker):
28
+ """Base class for complexity computation.
29
+
30
+ Parameters
31
+ ----------
32
+ parcellation : str or list of str
33
+ The name(s) of the parcellation(s). Check valid options by calling
34
+ :func:`junifer.data.parcellations.list_parcellations`.
35
+ agg_method : str, optional
36
+ The method to perform aggregation using. Check valid options in
37
+ :func:`junifer.stats.get_aggfunc_by_name` (default "mean").
38
+ agg_method_params : dict, optional
39
+ Parameters to pass to the aggregation function. Check valid options in
40
+ :func:`junifer.stats.get_aggfunc_by_name` (default None).
41
+ masks : str, dict or list of dict or str, optional
42
+ The specification of the masks to apply to regions before extracting
43
+ signals. Check :ref:`Using Masks <using_masks>` for more details.
44
+ If None, will not apply any mask (default None).
45
+ name : str, optional
46
+ The name of the marker. If None, it will use the class name
47
+ (default None).
48
+
49
+ """
50
+
51
+ _DEPENDENCIES: ClassVar[Set[str]] = {"nilearn", "neurokit2"}
52
+
53
+ def __init__(
54
+ self,
55
+ parcellation: Union[str, List[str]],
56
+ agg_method: str = "mean",
57
+ agg_method_params: Optional[Dict] = None,
58
+ masks: Union[str, Dict, List[Union[Dict, str]], None] = None,
59
+ name: Optional[str] = None,
60
+ ) -> None:
61
+ self.parcellation = parcellation
62
+ self.agg_method = agg_method
63
+ self.agg_method_params = agg_method_params
64
+ self.masks = masks
65
+ super().__init__(on="BOLD", name=name)
66
+
67
+ @abstractmethod
68
+ def compute_complexity(
69
+ self,
70
+ extracted_bold_values: "np.ndarray",
71
+ ) -> "np.ndarray":
72
+ """Compute complexity measure."""
73
+ raise_error(
74
+ msg="Concrete classes need to implement compute_complexity().",
75
+ klass=NotImplementedError,
76
+ )
77
+
78
+ def get_valid_inputs(self) -> List[str]:
79
+ """Get valid data types for input.
80
+
81
+ Returns
82
+ -------
83
+ list of str
84
+ The list of data types that can be used as input for this marker.
85
+
86
+ """
87
+ return ["BOLD"]
88
+
89
+ def get_output_type(self, input_type: str) -> str:
90
+ """Get output type.
91
+
92
+ Parameters
93
+ ----------
94
+ input_type : str
95
+ The data type input to the marker.
96
+
97
+ Returns
98
+ -------
99
+ str
100
+ The storage type output by the marker.
101
+
102
+ """
103
+ return "vector"
104
+
105
+ def compute(
106
+ self,
107
+ input: Dict[str, Any],
108
+ extra_input: Optional[Dict[str, Any]] = None,
109
+ ) -> Dict[str, Any]:
110
+ """Compute.
111
+
112
+ Parameters
113
+ ----------
114
+ input : dict
115
+ A single input from the pipeline data object in which to compute
116
+ the marker.
117
+ extra_input : dict, optional
118
+ The other fields in the pipeline data object. Useful for accessing
119
+ other data kind that needs to be used in the computation.
120
+
121
+ Returns
122
+ -------
123
+ dict
124
+ The computed result as dictionary. The following keys will be
125
+ included in the dictionary:
126
+
127
+ * ``data`` : ROI-wise complexity measures as ``numpy.ndarray``
128
+ * ``col_names`` : ROI labels for the complexity measures as list
129
+
130
+ """
131
+ # Initialize a ParcelAggregation
132
+ parcel_aggregation = ParcelAggregation(
133
+ parcellation=self.parcellation,
134
+ method=self.agg_method,
135
+ method_params=self.agg_method_params,
136
+ masks=self.masks,
137
+ on="BOLD",
138
+ )
139
+ # Extract the 2D time series using parcel aggregation
140
+ parcel_aggregation_map = parcel_aggregation.compute(
141
+ input=input, extra_input=extra_input
142
+ )
143
+
144
+ # Compute complexity measure
145
+ parcel_aggregation_map["data"] = self.compute_complexity(
146
+ parcel_aggregation_map["data"]
147
+ )
148
+
149
+ return parcel_aggregation_map
@@ -0,0 +1,136 @@
1
+ """Provide class for Hurst exponent of a time series."""
2
+
3
+ # Authors: Amir Omidvarnia <a.omidvarnia@fz-juelich.de>
4
+ # Leonard Sasse <l.sasse@fz-juelich.de>
5
+ # License: AGPL
6
+
7
+ from typing import Dict, List, Optional, Union
8
+
9
+ import neurokit2 as nk
10
+ import numpy as np
11
+
12
+ from ...api.decorators import register_marker
13
+ from ...utils import logger, warn_with_log
14
+ from .complexity_base import ComplexityBase
15
+
16
+
17
+ @register_marker
18
+ class HurstExponent(ComplexityBase):
19
+ """Class for Hurst exponent of a time series.
20
+
21
+ Parameters
22
+ ----------
23
+ parcellation : str or list of str
24
+ The name(s) of the parcellation(s). Check valid options by calling
25
+ :func:`junifer.data.parcellations.list_parcellations`.
26
+ agg_method : str, optional
27
+ The method to perform aggregation using. Check valid options in
28
+ :func:`junifer.stats.get_aggfunc_by_name` (default "mean").
29
+ agg_method_params : dict, optional
30
+ Parameters to pass to the aggregation function. Check valid options in
31
+ :func:`junifer.stats.get_aggfunc_by_name` (default None).
32
+ masks : str, dict or list of dict or str, optional
33
+ The specification of the masks to apply to regions before extracting
34
+ signals. Check :ref:`Using Masks <using_masks>` for more details.
35
+ If None, will not apply any mask (default None).
36
+ params : dict, optional
37
+ Parameters to pass to the Hurst exponent calculation function. For more
38
+ information, check out ``junifer.markers.utils._hurst_exponent``.
39
+ If None, value is set to {"method": "dfa"} (default None).
40
+ name : str, optional
41
+ The name of the marker. If None, it will use the class name
42
+ (default None).
43
+
44
+ Warnings
45
+ --------
46
+ This class is not automatically imported by junifer and requires you to
47
+ import it explicitly. You can do it programmatically by
48
+ ``from junifer.markers.complexity import HurstExponent`` or in the YAML by
49
+ ``with: junifer.markers.complexity``.
50
+
51
+ """
52
+
53
+ def __init__(
54
+ self,
55
+ parcellation: Union[str, List[str]],
56
+ agg_method: str = "mean",
57
+ agg_method_params: Optional[Dict] = None,
58
+ masks: Union[str, Dict, List[Union[Dict, str]], None] = None,
59
+ params: Optional[Dict] = None,
60
+ name: Optional[str] = None,
61
+ ) -> None:
62
+ super().__init__(
63
+ parcellation=parcellation,
64
+ agg_method=agg_method,
65
+ agg_method_params=agg_method_params,
66
+ masks=masks,
67
+ name=name,
68
+ )
69
+ if params is None:
70
+ self.params = {"method": "dfa"}
71
+ else:
72
+ self.params = params
73
+
74
+ def compute_complexity(
75
+ self,
76
+ extracted_bold_values: np.ndarray,
77
+ ) -> np.ndarray:
78
+ """Compute complexity measure.
79
+
80
+ Take a timeseries of brain areas, and calculate Hurst exponent using
81
+ the detrended fluctuation analysis method assuming the data is
82
+ monofractal [1].
83
+
84
+ Parameters
85
+ ----------
86
+ extracted_bold_values : numpy.ndarray
87
+ The BOLD values extracted via parcel aggregation.
88
+
89
+ Returns
90
+ -------
91
+ numpy.ndarray
92
+ The values after computing complexity measure.
93
+
94
+ References
95
+ ----------
96
+ .. [1] Peng, C.; Havlin, S.; Stanley, H.E.; Goldberger, A.L.
97
+ Quantification of scaling exponents and crossover phenomena in
98
+ nonstationary heartbeat time series.
99
+ Chaos Interdiscip. J. Nonlinear Sci., 5, 82-87, 1995.
100
+
101
+ See Also
102
+ --------
103
+ neurokit2.fractal_dfa
104
+
105
+ """
106
+ logger.info(f"Calculating Hurst exponent ({self.params['method']}).")
107
+
108
+ _, n_roi = extracted_bold_values.shape
109
+ hurst_roi = np.zeros((n_roi, 1))
110
+
111
+ if self.params["method"] == "dfa":
112
+ for idx_roi in range(n_roi):
113
+ sig = extracted_bold_values[:, idx_roi]
114
+ tmp = nk.fractal_dfa(
115
+ sig,
116
+ scale="default",
117
+ overlap=True,
118
+ integrate=True,
119
+ order=1,
120
+ multifractal=False,
121
+ q="default", # q = 2 for monofractal Hurst exponent
122
+ maxdfa=False,
123
+ show=False,
124
+ )
125
+
126
+ hurst_roi[idx_roi] = tmp[0]
127
+
128
+ else:
129
+ hurst_roi = np.empty((n_roi, 1))
130
+ hurst_roi[:] = np.nan
131
+ warn_with_log("The DFA method is available only!")
132
+
133
+ if np.isnan(np.sum(hurst_roi)):
134
+ warn_with_log("There is NaN in the Hurst exponent values!")
135
+
136
+ return hurst_roi.T # 1 X n_roi
@@ -0,0 +1,140 @@
1
+ """Provide class for the AUC of multiscale entropy of a time series."""
2
+
3
+ # Authors: Amir Omidvarnia <a.omidvarnia@fz-juelich.de>
4
+ # Leonard Sasse <l.sasse@fz-juelich.de>
5
+ # License: AGPL
6
+
7
+ from typing import Dict, List, Optional, Union
8
+
9
+ import neurokit2 as nk
10
+ import numpy as np
11
+
12
+ from ...api.decorators import register_marker
13
+ from ...utils import logger, warn_with_log
14
+ from .complexity_base import ComplexityBase
15
+
16
+
17
+ @register_marker
18
+ class MultiscaleEntropyAUC(ComplexityBase):
19
+ """Class for AUC of multiscale entropy of a time series.
20
+
21
+ Parameters
22
+ ----------
23
+ parcellation : str or list of str
24
+ The name(s) of the parcellation(s). Check valid options by calling
25
+ :func:`junifer.data.parcellations.list_parcellations`.
26
+ agg_method : str, optional
27
+ The method to perform aggregation using. Check valid options in
28
+ :func:`junifer.stats.get_aggfunc_by_name` (default "mean").
29
+ agg_method_params : dict, optional
30
+ Parameters to pass to the aggregation function. Check valid options in
31
+ :func:`junifer.stats.get_aggfunc_by_name` (default None).
32
+ masks : str, dict or list of dict or str, optional
33
+ The specification of the masks to apply to regions before extracting
34
+ signals. Check :ref:`Using Masks <using_masks>` for more details.
35
+ If None, will not apply any mask (default None).
36
+ params : dict, optional
37
+ Parameters to pass to the AUC of multiscale entropy calculation
38
+ function. For more information, check out
39
+ ``junifer.markers.utils._multiscale_entropy_auc``. If None, value
40
+ is set to {"m": 2, "tol": 0.5, "scale": 10} (default None).
41
+ name : str, optional
42
+ The name of the marker. If None, it will use the class name
43
+ (default None).
44
+
45
+ Warnings
46
+ --------
47
+ This class is not automatically imported by junifer and requires you to
48
+ import it explicitly. You can do it programmatically by
49
+ ``from junifer.markers.complexity import MultiscaleEntropyAUC`` or in the
50
+ YAML by ``with: junifer.markers.complexity``.
51
+
52
+ """
53
+
54
+ def __init__(
55
+ self,
56
+ parcellation: Union[str, List[str]],
57
+ agg_method: str = "mean",
58
+ agg_method_params: Optional[Dict] = None,
59
+ masks: Union[str, Dict, List[Union[Dict, str]], None] = None,
60
+ params: Optional[Dict] = None,
61
+ name: Optional[str] = None,
62
+ ) -> None:
63
+ super().__init__(
64
+ parcellation=parcellation,
65
+ agg_method=agg_method,
66
+ agg_method_params=agg_method_params,
67
+ masks=masks,
68
+ name=name,
69
+ )
70
+ if params is None:
71
+ self.params = {"m": 2, "tol": 0.5, "scale": 10}
72
+ else:
73
+ self.params = params
74
+
75
+ def compute_complexity(
76
+ self,
77
+ extracted_bold_values: np.ndarray,
78
+ ) -> np.ndarray:
79
+ """Compute complexity measure.
80
+
81
+ Take a timeseries of brain areas, calculate multiscale entropy for each
82
+ region and calculate the AUC of the entropy curves leading to a
83
+ region-wise map of the brain [1].
84
+
85
+ Parameters
86
+ ----------
87
+ extracted_bold_values : numpy.ndarray
88
+ The BOLD values extracted via parcel aggregation.
89
+
90
+ Returns
91
+ -------
92
+ numpy.ndarray
93
+ The values after computing complexity measure.
94
+
95
+ References
96
+ ----------
97
+ .. [1] Costa, M., Goldberger, A. L., & Peng, C. K.
98
+ Multiscale entropy analysis of complex physiologic time series.
99
+ Physical review letters, 89(6), 068102, 2002.
100
+
101
+ See Also
102
+ --------
103
+ neurokit2.entropy_multiscale
104
+
105
+ """
106
+ logger.info("Calculating AUC of multiscale entropy.")
107
+
108
+ emb_dim = self.params["m"]
109
+ tol = self.params["tol"]
110
+ scale = self.params["scale"]
111
+
112
+ assert isinstance(emb_dim, int), "Embedding dimension must be integer."
113
+ assert isinstance(scale, int), "Scale must be integer."
114
+ assert isinstance(
115
+ tol, float
116
+ ), "Tolerance must be a positive float number."
117
+
118
+ _, n_roi = extracted_bold_values.shape
119
+ MSEn_auc_roi = np.zeros((n_roi, 1))
120
+ for idx_roi in range(n_roi):
121
+ sig = extracted_bold_values[:, idx_roi]
122
+ tol_corrected = tol * np.std(sig)
123
+ tmp = nk.entropy_multiscale(
124
+ sig,
125
+ scale=scale,
126
+ dimension=emb_dim,
127
+ tolerance=tol_corrected,
128
+ method="MSEn",
129
+ )
130
+
131
+ MSEn_auc_roi[idx_roi] = tmp[0]
132
+
133
+ if np.isnan(np.sum(MSEn_auc_roi)):
134
+ warn_with_log(
135
+ "There is NaN in the entropy values, likely due "
136
+ "to too short data length. A possible solution "
137
+ "may be to choose a smaller value for 'scale'."
138
+ )
139
+
140
+ return MSEn_auc_roi.T # 1 X n_roi
@@ -0,0 +1,132 @@
1
+ """Provide class for permutation entropy of a time series."""
2
+
3
+ # Authors: Amir Omidvarnia <a.omidvarnia@fz-juelich.de>
4
+ # Leonard Sasse <l.sasse@fz-juelich.de>
5
+ # License: AGPL
6
+
7
+ from typing import Dict, List, Optional, Union
8
+
9
+ import neurokit2 as nk
10
+ import numpy as np
11
+
12
+ from ...api.decorators import register_marker
13
+ from ...utils import logger, warn_with_log
14
+ from .complexity_base import ComplexityBase
15
+
16
+
17
+ @register_marker
18
+ class PermEntropy(ComplexityBase):
19
+ """Class for permutation entropy of a time series.
20
+
21
+ Parameters
22
+ ----------
23
+ parcellation : str or list of str
24
+ The name(s) of the parcellation(s). Check valid options by calling
25
+ :func:`junifer.data.parcellations.list_parcellations`.
26
+ agg_method : str, optional
27
+ The method to perform aggregation using. Check valid options in
28
+ :func:`junifer.stats.get_aggfunc_by_name` (default "mean").
29
+ agg_method_params : dict, optional
30
+ Parameters to pass to the aggregation function. Check valid options in
31
+ :func:`junifer.stats.get_aggfunc_by_name` (default None).
32
+ masks : str, dict or list of dict or str, optional
33
+ The specification of the masks to apply to regions before extracting
34
+ signals. Check :ref:`Using Masks <using_masks>` for more details.
35
+ If None, will not apply any mask (default None).
36
+ params : dict, optional
37
+ Parameters to pass to the permutation entropy calculation function.
38
+ For more information, check out
39
+ ``junifer.markers.utils._perm_entropy``. If None, value is set to
40
+ {"m": 2, "delay": 1} (default None).
41
+ name : str, optional
42
+ The name of the marker. If None, it will use the class name
43
+ (default None).
44
+
45
+ Warnings
46
+ --------
47
+ This class is not automatically imported by junifer and requires you to
48
+ import it explicitly. You can do it programmatically by
49
+ ``from junifer.markers.complexity import PermEntropy`` or in the YAML by
50
+ ``with: junifer.markers.complexity``.
51
+
52
+ """
53
+
54
+ def __init__(
55
+ self,
56
+ parcellation: Union[str, List[str]],
57
+ agg_method: str = "mean",
58
+ agg_method_params: Optional[Dict] = None,
59
+ masks: Union[str, Dict, List[Union[Dict, str]], None] = None,
60
+ params: Optional[Dict] = None,
61
+ name: Optional[str] = None,
62
+ ) -> None:
63
+ super().__init__(
64
+ parcellation=parcellation,
65
+ agg_method=agg_method,
66
+ agg_method_params=agg_method_params,
67
+ masks=masks,
68
+ name=name,
69
+ )
70
+ if params is None:
71
+ self.params = {"m": 4, "delay": 1}
72
+ else:
73
+ self.params = params
74
+
75
+ def compute_complexity(
76
+ self,
77
+ extracted_bold_values: np.ndarray,
78
+ ) -> np.ndarray:
79
+ """Compute complexity measure.
80
+
81
+ Take a timeseries of brain areas, and calculate permutation entropy
82
+ according to the method outlined in [1].
83
+
84
+ Parameters
85
+ ----------
86
+ extracted_bold_values : numpy.ndarray
87
+ The BOLD values extracted via parcel aggregation.
88
+
89
+ Returns
90
+ -------
91
+ numpy.ndarray
92
+ The values after computing complexity measure.
93
+
94
+ References
95
+ ----------
96
+ .. [1] Bandt, C., & Pompe, B. (2002)
97
+ Permutation entropy: a natural complexity measure for time
98
+ series.
99
+ Physical review letters, 88(17), 174102.
100
+
101
+ See Also
102
+ --------
103
+ neurokit2.entropy_permutation
104
+
105
+ """
106
+ logger.info("Calculating permutation entropy.")
107
+
108
+ emb_dim = self.params["m"]
109
+ delay = self.params["delay"]
110
+
111
+ assert isinstance(emb_dim, int), "Embedding dimension must be integer."
112
+ assert isinstance(delay, int), "Delay must be integer."
113
+
114
+ _, n_roi = extracted_bold_values.shape
115
+ perm_en_roi = np.zeros((n_roi, 1))
116
+
117
+ for idx_roi in range(n_roi):
118
+ sig = extracted_bold_values[:, idx_roi]
119
+ tmp = nk.entropy_permutation(
120
+ sig,
121
+ dimension=emb_dim,
122
+ delay=delay,
123
+ weighted=False, # PE, not wPE
124
+ corrected=True, # Normalized PE
125
+ )
126
+
127
+ perm_en_roi[idx_roi] = tmp[0]
128
+
129
+ if np.isnan(np.sum(perm_en_roi)):
130
+ warn_with_log("There is NaN in the permutation entropy values!")
131
+
132
+ return perm_en_roi.T # 1 X n_roi
@@ -0,0 +1,136 @@
1
+ """Provide class for range entropy of a time series."""
2
+
3
+ # Authors: Amir Omidvarnia <a.omidvarnia@fz-juelich.de>
4
+ # Leonard Sasse <l.sasse@fz-juelich.de>
5
+ # License: AGPL
6
+
7
+ from typing import Dict, List, Optional, Union
8
+
9
+ import neurokit2 as nk
10
+ import numpy as np
11
+
12
+ from ...api.decorators import register_marker
13
+ from ...utils import logger, warn_with_log
14
+ from .complexity_base import ComplexityBase
15
+
16
+
17
+ @register_marker
18
+ class RangeEntropy(ComplexityBase):
19
+ """Class for range entropy of a time series.
20
+
21
+ Parameters
22
+ ----------
23
+ parcellation : str or list of str
24
+ The name(s) of the parcellation(s). Check valid options by calling
25
+ :func:`junifer.data.parcellations.list_parcellations`.
26
+ agg_method : str, optional
27
+ The method to perform aggregation using. Check valid options in
28
+ :func:`junifer.stats.get_aggfunc_by_name` (default "mean").
29
+ agg_method_params : dict, optional
30
+ Parameters to pass to the aggregation function. Check valid options in
31
+ :func:`junifer.stats.get_aggfunc_by_name` (default None).
32
+ masks : str, dict or list of dict or str, optional
33
+ The specification of the masks to apply to regions before extracting
34
+ signals. Check :ref:`Using Masks <using_masks>` for more details.
35
+ If None, will not apply any mask (default None).
36
+ params : dict, optional
37
+ Parameters to pass to the range entropy calculation function. For more
38
+ information, check out ``junifer.markers.utils._range_entropy``.
39
+ If None, value is set to {"m": 2, "tol": 0.5, "delay": 1}
40
+ (default None).
41
+ name : str, optional
42
+ The name of the marker. If None, it will use the class name
43
+ (default None).
44
+
45
+ Warnings
46
+ --------
47
+ This class is not automatically imported by junifer and requires you to
48
+ import it explicitly. You can do it programmatically by
49
+ ``from junifer.markers.complexity import RangeEntropy`` or in the YAML by
50
+ ``with: junifer.markers.complexity``.
51
+
52
+ """
53
+
54
+ def __init__(
55
+ self,
56
+ parcellation: Union[str, List[str]],
57
+ agg_method: str = "mean",
58
+ agg_method_params: Optional[Dict] = None,
59
+ masks: Union[str, Dict, List[Union[Dict, str]], None] = None,
60
+ params: Optional[Dict] = None,
61
+ name: Optional[str] = None,
62
+ ) -> None:
63
+ super().__init__(
64
+ parcellation=parcellation,
65
+ agg_method=agg_method,
66
+ agg_method_params=agg_method_params,
67
+ masks=masks,
68
+ name=name,
69
+ )
70
+ if params is None:
71
+ self.params = {"m": 2, "tol": 0.5, "delay": 1}
72
+ else:
73
+ self.params = params
74
+
75
+ def compute_complexity(
76
+ self,
77
+ extracted_bold_values: np.ndarray,
78
+ ) -> np.ndarray:
79
+ """Compute complexity measure.
80
+
81
+ Take a timeseries of brain areas, and calculate
82
+ range entropy according to the method outlined in [1].
83
+
84
+ Parameters
85
+ ----------
86
+ extracted_bold_values : numpy.ndarray
87
+ The BOLD values extracted via parcel aggregation.
88
+
89
+ Returns
90
+ -------
91
+ numpy.ndarray
92
+ The values after computing complexity measure.
93
+
94
+ References
95
+ ----------
96
+ .. [1] A. Omidvarnia et al. (2018)
97
+ Range Entropy: A Bridge between Signal Complexity and
98
+ Self-Similarity.
99
+ Entropy, vol. 20, no. 12, p. 962, 2018.
100
+
101
+ See Also
102
+ --------
103
+ neurokit2.entropy_range
104
+
105
+ """
106
+ logger.info("Calculating range entropy.")
107
+
108
+ emb_dim = self.params["m"]
109
+ delay = self.params["delay"]
110
+ tolerance = self.params["tol"]
111
+
112
+ assert isinstance(emb_dim, int), "Embedding dimension must be integer."
113
+ assert isinstance(delay, int), "Delay must be integer."
114
+ assert isinstance(
115
+ tolerance, float
116
+ ), "Tolerance must be a float number between 0 and 1."
117
+
118
+ _, n_roi = extracted_bold_values.shape
119
+ range_en_roi = np.zeros((n_roi, 1))
120
+
121
+ for idx_roi in range(n_roi):
122
+ sig = extracted_bold_values[:, idx_roi]
123
+ tmp = nk.entropy_range(
124
+ sig,
125
+ dimension=emb_dim,
126
+ delay=delay,
127
+ tolerance=tolerance,
128
+ method="mSampEn", # RangeEn B
129
+ )
130
+
131
+ range_en_roi[idx_roi] = tmp[0]
132
+
133
+ if np.isnan(np.sum(range_en_roi)):
134
+ warn_with_log("There is NaN in the range entropy values!")
135
+
136
+ return range_en_roi.T # 1 X n_roi