junifer 0.0.3.dev188__py3-none-any.whl → 0.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (178) hide show
  1. junifer/_version.py +14 -2
  2. junifer/api/cli.py +162 -17
  3. junifer/api/functions.py +87 -419
  4. junifer/api/parser.py +24 -0
  5. junifer/api/queue_context/__init__.py +8 -0
  6. junifer/api/queue_context/gnu_parallel_local_adapter.py +258 -0
  7. junifer/api/queue_context/htcondor_adapter.py +365 -0
  8. junifer/api/queue_context/queue_context_adapter.py +60 -0
  9. junifer/api/queue_context/tests/test_gnu_parallel_local_adapter.py +192 -0
  10. junifer/api/queue_context/tests/test_htcondor_adapter.py +257 -0
  11. junifer/api/res/afni/run_afni_docker.sh +6 -6
  12. junifer/api/res/ants/ResampleImage +3 -0
  13. junifer/api/res/ants/antsApplyTransforms +3 -0
  14. junifer/api/res/ants/antsApplyTransformsToPoints +3 -0
  15. junifer/api/res/ants/run_ants_docker.sh +39 -0
  16. junifer/api/res/fsl/applywarp +3 -0
  17. junifer/api/res/fsl/flirt +3 -0
  18. junifer/api/res/fsl/img2imgcoord +3 -0
  19. junifer/api/res/fsl/run_fsl_docker.sh +39 -0
  20. junifer/api/res/fsl/std2imgcoord +3 -0
  21. junifer/api/res/run_conda.sh +4 -4
  22. junifer/api/res/run_venv.sh +22 -0
  23. junifer/api/tests/data/partly_cloudy_agg_mean_tian.yml +16 -0
  24. junifer/api/tests/test_api_utils.py +21 -3
  25. junifer/api/tests/test_cli.py +232 -9
  26. junifer/api/tests/test_functions.py +211 -439
  27. junifer/api/tests/test_parser.py +1 -1
  28. junifer/configs/juseless/datagrabbers/aomic_id1000_vbm.py +6 -1
  29. junifer/configs/juseless/datagrabbers/camcan_vbm.py +6 -1
  30. junifer/configs/juseless/datagrabbers/ixi_vbm.py +6 -1
  31. junifer/configs/juseless/datagrabbers/tests/test_ucla.py +8 -8
  32. junifer/configs/juseless/datagrabbers/ucla.py +44 -26
  33. junifer/configs/juseless/datagrabbers/ukb_vbm.py +6 -1
  34. junifer/data/VOIs/meta/AutobiographicalMemory_VOIs.txt +23 -0
  35. junifer/data/VOIs/meta/Power2013_MNI_VOIs.tsv +264 -0
  36. junifer/data/__init__.py +4 -0
  37. junifer/data/coordinates.py +298 -31
  38. junifer/data/masks.py +360 -28
  39. junifer/data/parcellations.py +621 -188
  40. junifer/data/template_spaces.py +190 -0
  41. junifer/data/tests/test_coordinates.py +34 -3
  42. junifer/data/tests/test_data_utils.py +1 -0
  43. junifer/data/tests/test_masks.py +202 -86
  44. junifer/data/tests/test_parcellations.py +266 -55
  45. junifer/data/tests/test_template_spaces.py +104 -0
  46. junifer/data/utils.py +4 -2
  47. junifer/datagrabber/__init__.py +1 -0
  48. junifer/datagrabber/aomic/id1000.py +111 -70
  49. junifer/datagrabber/aomic/piop1.py +116 -53
  50. junifer/datagrabber/aomic/piop2.py +116 -53
  51. junifer/datagrabber/aomic/tests/test_id1000.py +27 -27
  52. junifer/datagrabber/aomic/tests/test_piop1.py +27 -27
  53. junifer/datagrabber/aomic/tests/test_piop2.py +27 -27
  54. junifer/datagrabber/base.py +62 -10
  55. junifer/datagrabber/datalad_base.py +0 -2
  56. junifer/datagrabber/dmcc13_benchmark.py +372 -0
  57. junifer/datagrabber/hcp1200/datalad_hcp1200.py +5 -0
  58. junifer/datagrabber/hcp1200/hcp1200.py +30 -13
  59. junifer/datagrabber/pattern.py +133 -27
  60. junifer/datagrabber/pattern_datalad.py +111 -13
  61. junifer/datagrabber/tests/test_base.py +57 -6
  62. junifer/datagrabber/tests/test_datagrabber_utils.py +204 -76
  63. junifer/datagrabber/tests/test_datalad_base.py +0 -6
  64. junifer/datagrabber/tests/test_dmcc13_benchmark.py +256 -0
  65. junifer/datagrabber/tests/test_multiple.py +43 -10
  66. junifer/datagrabber/tests/test_pattern.py +125 -178
  67. junifer/datagrabber/tests/test_pattern_datalad.py +44 -25
  68. junifer/datagrabber/utils.py +151 -16
  69. junifer/datareader/default.py +36 -10
  70. junifer/external/nilearn/junifer_nifti_spheres_masker.py +6 -0
  71. junifer/markers/base.py +25 -16
  72. junifer/markers/collection.py +35 -16
  73. junifer/markers/complexity/__init__.py +27 -0
  74. junifer/markers/complexity/complexity_base.py +149 -0
  75. junifer/markers/complexity/hurst_exponent.py +136 -0
  76. junifer/markers/complexity/multiscale_entropy_auc.py +140 -0
  77. junifer/markers/complexity/perm_entropy.py +132 -0
  78. junifer/markers/complexity/range_entropy.py +136 -0
  79. junifer/markers/complexity/range_entropy_auc.py +145 -0
  80. junifer/markers/complexity/sample_entropy.py +134 -0
  81. junifer/markers/complexity/tests/test_complexity_base.py +19 -0
  82. junifer/markers/complexity/tests/test_hurst_exponent.py +69 -0
  83. junifer/markers/complexity/tests/test_multiscale_entropy_auc.py +68 -0
  84. junifer/markers/complexity/tests/test_perm_entropy.py +68 -0
  85. junifer/markers/complexity/tests/test_range_entropy.py +69 -0
  86. junifer/markers/complexity/tests/test_range_entropy_auc.py +69 -0
  87. junifer/markers/complexity/tests/test_sample_entropy.py +68 -0
  88. junifer/markers/complexity/tests/test_weighted_perm_entropy.py +68 -0
  89. junifer/markers/complexity/weighted_perm_entropy.py +133 -0
  90. junifer/markers/falff/_afni_falff.py +153 -0
  91. junifer/markers/falff/_junifer_falff.py +142 -0
  92. junifer/markers/falff/falff_base.py +91 -84
  93. junifer/markers/falff/falff_parcels.py +61 -45
  94. junifer/markers/falff/falff_spheres.py +64 -48
  95. junifer/markers/falff/tests/test_falff_parcels.py +89 -121
  96. junifer/markers/falff/tests/test_falff_spheres.py +92 -127
  97. junifer/markers/functional_connectivity/crossparcellation_functional_connectivity.py +1 -0
  98. junifer/markers/functional_connectivity/edge_functional_connectivity_parcels.py +1 -0
  99. junifer/markers/functional_connectivity/functional_connectivity_base.py +1 -0
  100. junifer/markers/functional_connectivity/tests/test_crossparcellation_functional_connectivity.py +46 -44
  101. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_parcels.py +34 -39
  102. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_spheres.py +40 -52
  103. junifer/markers/functional_connectivity/tests/test_functional_connectivity_parcels.py +62 -70
  104. junifer/markers/functional_connectivity/tests/test_functional_connectivity_spheres.py +99 -85
  105. junifer/markers/parcel_aggregation.py +60 -38
  106. junifer/markers/reho/_afni_reho.py +192 -0
  107. junifer/markers/reho/_junifer_reho.py +281 -0
  108. junifer/markers/reho/reho_base.py +69 -34
  109. junifer/markers/reho/reho_parcels.py +26 -16
  110. junifer/markers/reho/reho_spheres.py +23 -9
  111. junifer/markers/reho/tests/test_reho_parcels.py +93 -92
  112. junifer/markers/reho/tests/test_reho_spheres.py +88 -86
  113. junifer/markers/sphere_aggregation.py +54 -9
  114. junifer/markers/temporal_snr/temporal_snr_base.py +1 -0
  115. junifer/markers/temporal_snr/tests/test_temporal_snr_parcels.py +38 -37
  116. junifer/markers/temporal_snr/tests/test_temporal_snr_spheres.py +34 -38
  117. junifer/markers/tests/test_collection.py +43 -42
  118. junifer/markers/tests/test_ets_rss.py +29 -37
  119. junifer/markers/tests/test_parcel_aggregation.py +587 -468
  120. junifer/markers/tests/test_sphere_aggregation.py +209 -157
  121. junifer/markers/utils.py +2 -40
  122. junifer/onthefly/read_transform.py +13 -6
  123. junifer/pipeline/__init__.py +1 -0
  124. junifer/pipeline/pipeline_step_mixin.py +105 -41
  125. junifer/pipeline/registry.py +17 -0
  126. junifer/pipeline/singleton.py +45 -0
  127. junifer/pipeline/tests/test_pipeline_step_mixin.py +139 -51
  128. junifer/pipeline/tests/test_update_meta_mixin.py +1 -0
  129. junifer/pipeline/tests/test_workdir_manager.py +104 -0
  130. junifer/pipeline/update_meta_mixin.py +8 -2
  131. junifer/pipeline/utils.py +154 -15
  132. junifer/pipeline/workdir_manager.py +246 -0
  133. junifer/preprocess/__init__.py +3 -0
  134. junifer/preprocess/ants/__init__.py +4 -0
  135. junifer/preprocess/ants/ants_apply_transforms_warper.py +185 -0
  136. junifer/preprocess/ants/tests/test_ants_apply_transforms_warper.py +56 -0
  137. junifer/preprocess/base.py +96 -69
  138. junifer/preprocess/bold_warper.py +265 -0
  139. junifer/preprocess/confounds/fmriprep_confound_remover.py +91 -134
  140. junifer/preprocess/confounds/tests/test_fmriprep_confound_remover.py +106 -111
  141. junifer/preprocess/fsl/__init__.py +4 -0
  142. junifer/preprocess/fsl/apply_warper.py +179 -0
  143. junifer/preprocess/fsl/tests/test_apply_warper.py +45 -0
  144. junifer/preprocess/tests/test_bold_warper.py +159 -0
  145. junifer/preprocess/tests/test_preprocess_base.py +6 -6
  146. junifer/preprocess/warping/__init__.py +6 -0
  147. junifer/preprocess/warping/_ants_warper.py +167 -0
  148. junifer/preprocess/warping/_fsl_warper.py +109 -0
  149. junifer/preprocess/warping/space_warper.py +213 -0
  150. junifer/preprocess/warping/tests/test_space_warper.py +198 -0
  151. junifer/stats.py +18 -4
  152. junifer/storage/base.py +9 -1
  153. junifer/storage/hdf5.py +8 -3
  154. junifer/storage/pandas_base.py +2 -1
  155. junifer/storage/sqlite.py +1 -0
  156. junifer/storage/tests/test_hdf5.py +2 -1
  157. junifer/storage/tests/test_sqlite.py +8 -8
  158. junifer/storage/tests/test_utils.py +6 -6
  159. junifer/storage/utils.py +1 -0
  160. junifer/testing/datagrabbers.py +11 -7
  161. junifer/testing/utils.py +1 -0
  162. junifer/tests/test_stats.py +2 -0
  163. junifer/utils/__init__.py +1 -0
  164. junifer/utils/helpers.py +53 -0
  165. junifer/utils/logging.py +14 -3
  166. junifer/utils/tests/test_helpers.py +35 -0
  167. {junifer-0.0.3.dev188.dist-info → junifer-0.0.4.dist-info}/METADATA +59 -28
  168. junifer-0.0.4.dist-info/RECORD +257 -0
  169. {junifer-0.0.3.dev188.dist-info → junifer-0.0.4.dist-info}/WHEEL +1 -1
  170. junifer/markers/falff/falff_estimator.py +0 -334
  171. junifer/markers/falff/tests/test_falff_estimator.py +0 -238
  172. junifer/markers/reho/reho_estimator.py +0 -515
  173. junifer/markers/reho/tests/test_reho_estimator.py +0 -260
  174. junifer-0.0.3.dev188.dist-info/RECORD +0 -199
  175. {junifer-0.0.3.dev188.dist-info → junifer-0.0.4.dist-info}/AUTHORS.rst +0 -0
  176. {junifer-0.0.3.dev188.dist-info → junifer-0.0.4.dist-info}/LICENSE.md +0 -0
  177. {junifer-0.0.3.dev188.dist-info → junifer-0.0.4.dist-info}/entry_points.txt +0 -0
  178. {junifer-0.0.3.dev188.dist-info → junifer-0.0.4.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,198 @@
1
+ """Provide tests for SpaceWarper."""
2
+
3
+ # Authors: Synchon Mandal <s.mandal@fz-juelich.de>
4
+ # License: AGPL
5
+
6
+ import socket
7
+ from typing import TYPE_CHECKING, Tuple, Type
8
+
9
+ import pytest
10
+ from numpy.testing import assert_array_equal, assert_raises
11
+
12
+ from junifer.datagrabber import DataladHCP1200, DMCC13Benchmark
13
+ from junifer.datareader import DefaultDataReader
14
+ from junifer.pipeline.utils import _check_ants, _check_fsl
15
+ from junifer.preprocess import SpaceWarper
16
+ from junifer.testing.datagrabbers import PartlyCloudyTestingDataGrabber
17
+
18
+
19
+ if TYPE_CHECKING:
20
+ from junifer.datagrabber import BaseDataGrabber
21
+
22
+
23
+ @pytest.mark.parametrize(
24
+ "using, reference, error_type, error_msg",
25
+ [
26
+ ("jam", "T1w", ValueError, "`using`"),
27
+ ("ants", "juice", ValueError, "reference"),
28
+ ("ants", "MNI152NLin2009cAsym", RuntimeError, "remove"),
29
+ ("fsl", "MNI152NLin2009cAsym", RuntimeError, "ANTs"),
30
+ ],
31
+ )
32
+ def test_SpaceWarper_errors(
33
+ using: str,
34
+ reference: str,
35
+ error_type: Type[Exception],
36
+ error_msg: str,
37
+ ) -> None:
38
+ """Test SpaceWarper errors.
39
+
40
+ Parameters
41
+ ----------
42
+ using : str
43
+ The parametrized implementation method.
44
+ reference : str
45
+ The parametrized reference to use.
46
+ error_type : Exception-like object
47
+ The parametrized exception to check.
48
+ error_msg : str
49
+ The parametrized exception message to check.
50
+
51
+ """
52
+ with PartlyCloudyTestingDataGrabber() as dg:
53
+ # Read data
54
+ element_data = DefaultDataReader().fit_transform(dg["sub-01"])
55
+ # Preprocess data
56
+ with pytest.raises(error_type, match=error_msg):
57
+ SpaceWarper(
58
+ using=using,
59
+ reference=reference,
60
+ on="BOLD",
61
+ ).preprocess(
62
+ input=element_data["BOLD"],
63
+ extra_input=element_data,
64
+ )
65
+
66
+
67
+ @pytest.mark.parametrize(
68
+ "datagrabber, element, using",
69
+ [
70
+ [
71
+ DMCC13Benchmark(
72
+ types=["BOLD", "T1w", "Warp"],
73
+ sessions=["ses-wave1bas"],
74
+ tasks=["Rest"],
75
+ phase_encodings=["AP"],
76
+ runs=["1"],
77
+ native_t1w=True,
78
+ ),
79
+ ("sub-f9057kp", "ses-wave1bas", "Rest", "AP", "1"),
80
+ "ants",
81
+ ],
82
+ [
83
+ DataladHCP1200(
84
+ tasks=["REST1"],
85
+ phase_encodings=["LR"],
86
+ ica_fix=True,
87
+ ),
88
+ ("100206", "REST1", "LR"),
89
+ "fsl",
90
+ ],
91
+ ],
92
+ )
93
+ @pytest.mark.skipif(_check_fsl() is False, reason="requires FSL to be in PATH")
94
+ @pytest.mark.skipif(
95
+ _check_ants() is False, reason="requires ANTs to be in PATH"
96
+ )
97
+ @pytest.mark.skipif(
98
+ socket.gethostname() != "juseless",
99
+ reason="only for juseless",
100
+ )
101
+ def test_SpaceWarper_native(
102
+ datagrabber: "BaseDataGrabber", element: Tuple[str, ...], using: str
103
+ ) -> None:
104
+ """Test SpaceWarper for native space warping.
105
+
106
+ Parameters
107
+ ----------
108
+ datagrabber : DataGrabber-like object
109
+ The parametrized DataGrabber objects.
110
+ element : tuple of str
111
+ The parametrized elements.
112
+ using : str
113
+ The parametrized implementation method.
114
+
115
+ """
116
+ with datagrabber as dg:
117
+ # Read data
118
+ element_data = DefaultDataReader().fit_transform(dg[element])
119
+ # Preprocess data
120
+ output, _ = SpaceWarper(
121
+ using=using,
122
+ reference="T1w",
123
+ on="BOLD",
124
+ ).preprocess(
125
+ input=element_data["BOLD"],
126
+ extra_input=element_data,
127
+ )
128
+ # Check
129
+ assert isinstance(output, dict)
130
+
131
+
132
+ @pytest.mark.parametrize(
133
+ "datagrabber, element, space",
134
+ [
135
+ [
136
+ DMCC13Benchmark(
137
+ types=["T1w"],
138
+ sessions=["ses-wave1bas"],
139
+ tasks=["Rest"],
140
+ phase_encodings=["AP"],
141
+ runs=["1"],
142
+ native_t1w=False,
143
+ ),
144
+ ("sub-f9057kp", "ses-wave1bas", "Rest", "AP", "1"),
145
+ "MNI152NLin2009aAsym",
146
+ ],
147
+ [
148
+ DMCC13Benchmark(
149
+ types=["T1w"],
150
+ sessions=["ses-wave1bas"],
151
+ tasks=["Rest"],
152
+ phase_encodings=["AP"],
153
+ runs=["1"],
154
+ native_t1w=False,
155
+ ),
156
+ ("sub-f9057kp", "ses-wave1bas", "Rest", "AP", "1"),
157
+ "MNI152NLin6Asym",
158
+ ],
159
+ ],
160
+ )
161
+ @pytest.mark.skipif(
162
+ _check_ants() is False, reason="requires ANTs to be in PATH"
163
+ )
164
+ def test_SpaceWarper_multi_mni(
165
+ datagrabber: "BaseDataGrabber",
166
+ element: Tuple[str, ...],
167
+ space: str,
168
+ ) -> None:
169
+ """Test SpaceWarper for MNI space warping.
170
+
171
+ Parameters
172
+ ----------
173
+ datagrabber : DataGrabber-like object
174
+ The parametrized DataGrabber objects.
175
+ element : tuple of str
176
+ The parametrized elements.
177
+ space : str
178
+ The parametrized template space to transform to.
179
+
180
+ """
181
+ with datagrabber as dg:
182
+ # Read data
183
+ element_data = DefaultDataReader().fit_transform(dg[element])
184
+ pre_xfm_data = element_data["T1w"]["data"].get_fdata().copy()
185
+ # Preprocess data
186
+ output, _ = SpaceWarper(
187
+ using="ants",
188
+ reference=space,
189
+ on=["T1w"],
190
+ ).preprocess(
191
+ input=element_data["T1w"],
192
+ extra_input=element_data,
193
+ )
194
+ # Checks
195
+ assert isinstance(output, dict)
196
+ assert output["space"] == space
197
+ with assert_raises(AssertionError):
198
+ assert_array_equal(pre_xfm_data, output["data"])
junifer/stats.py CHANGED
@@ -7,7 +7,7 @@
7
7
  from typing import Any, Callable, Dict, List, Optional
8
8
 
9
9
  import numpy as np
10
- from scipy.stats import trim_mean
10
+ from scipy.stats import mode, trim_mean
11
11
  from scipy.stats.mstats import winsorize
12
12
 
13
13
  from .utils import logger, raise_error
@@ -24,10 +24,11 @@ def get_aggfunc_by_name(
24
24
  Name to identify the function. Currently supported names and
25
25
  corresponding functions are:
26
26
 
27
- * ``winsorized_mean`` -> :func:`scipy.stats.mstats.winsorize`
28
27
  * ``mean`` -> :func:`numpy.mean`
29
- * ``std`` -> :func:`numpy.std`
28
+ * ``winsorized_mean`` -> :func:`scipy.stats.mstats.winsorize`
30
29
  * ``trim_mean`` -> :func:`scipy.stats.trim_mean`
30
+ * ``mode`` -> :func:`scipy.stats.mode`
31
+ * ``std`` -> :func:`numpy.std`
31
32
  * ``count`` -> :func:`.count`
32
33
  * ``select`` -> :func:`.select`
33
34
 
@@ -40,6 +41,7 @@ def get_aggfunc_by_name(
40
41
  -------
41
42
  function
42
43
  Respective function with ``func_params`` parameter set.
44
+
43
45
  """
44
46
  from functools import partial # local import to avoid sphinx error
45
47
 
@@ -51,6 +53,7 @@ def get_aggfunc_by_name(
51
53
  "trim_mean",
52
54
  "count",
53
55
  "select",
56
+ "mode",
54
57
  }
55
58
  if func_params is None:
56
59
  func_params = {}
@@ -93,6 +96,8 @@ def get_aggfunc_by_name(
93
96
  elif pick is not None and drop is not None:
94
97
  raise_error("Either pick or drop must be specified, not both.")
95
98
  func = partial(select, **func_params)
99
+ elif name == "mode":
100
+ func = partial(mode, **func_params)
96
101
  else:
97
102
  raise_error(
98
103
  f"Function {name} unknown. Please provide any of "
@@ -115,6 +120,7 @@ def count(data: np.ndarray, axis: int = 0) -> np.ndarray:
115
120
  -------
116
121
  numpy.ndarray
117
122
  Number of elements along the given axis.
123
+
118
124
  """
119
125
  ax_size = data.shape[axis]
120
126
  if axis < 0:
@@ -137,7 +143,7 @@ def winsorized_mean(
137
143
  The axis to calculate winsorized mean on (default None).
138
144
  **win_params : dict
139
145
  Dictionary containing the keyword arguments for the winsorize function.
140
- E.g. ``{'limits': [0.1, 0.1]}``.
146
+ E.g., ``{'limits': [0.1, 0.1]}``.
141
147
 
142
148
  Returns
143
149
  -------
@@ -149,6 +155,7 @@ def winsorized_mean(
149
155
  --------
150
156
  scipy.stats.mstats.winsorize :
151
157
  The winsorize function used in this function.
158
+
152
159
  """
153
160
  win_dat = winsorize(data, axis=axis, **win_params)
154
161
  win_mean = win_dat.mean(axis=axis)
@@ -180,6 +187,13 @@ def select(
180
187
  numpy.ndarray
181
188
  Subset of the inputted data with the select settings
182
189
  applied as specified in ``select_params``.
190
+
191
+ Raises
192
+ ------
193
+ ValueError
194
+ If both ``pick`` and ``drop`` are None or
195
+ if both ``pick`` and ``drop`` are not None.
196
+
183
197
  """
184
198
 
185
199
  if pick is None and drop is None:
junifer/storage/base.py CHANGED
@@ -30,6 +30,11 @@ class BaseFeatureStorage(ABC):
30
30
  single_output : bool, optional
31
31
  Whether to have single output (default True).
32
32
 
33
+ Raises
34
+ ------
35
+ ValueError
36
+ If required storage type(s) is(are) missing from ``storage_types``.
37
+
33
38
  """
34
39
 
35
40
  def __init__(
@@ -39,13 +44,15 @@ class BaseFeatureStorage(ABC):
39
44
  single_output: bool = True,
40
45
  ) -> None:
41
46
  self.uri = uri
47
+ # Convert storage_types to list
42
48
  if not isinstance(storage_types, list):
43
49
  storage_types = [storage_types]
50
+ # Check if required inputs are found
44
51
  if any(x not in self.get_valid_inputs() for x in storage_types):
45
52
  wrong_storage_types = [
46
53
  x for x in storage_types if x not in self.get_valid_inputs()
47
54
  ]
48
- raise ValueError(
55
+ raise_error(
49
56
  f"{self.__class__.__name__} cannot store {wrong_storage_types}"
50
57
  )
51
58
  self._valid_inputs = storage_types
@@ -170,6 +177,7 @@ class BaseFeatureStorage(ABC):
170
177
  The element as a dictionary.
171
178
  meta : dict
172
179
  The metadata as a dictionary.
180
+
173
181
  """
174
182
  raise_error(
175
183
  msg="Concrete classes need to implement store_metadata().",
junifer/storage/hdf5.py CHANGED
@@ -57,6 +57,7 @@ def _create_chunk(
57
57
  ------
58
58
  ValueError
59
59
  If `kind` is not one of ['vector', 'matrix', 'timeseries'].
60
+
60
61
  """
61
62
  if kind in ["vector", "matrix"]:
62
63
  features_data = np.concatenate(chunk_data, axis=-1)
@@ -678,9 +679,13 @@ class HDF5FeatureStorage(BaseFeatureStorage):
678
679
  elif isinstance(data, list):
679
680
  if self.force_float32:
680
681
  data = [
681
- x.astype(dtype=np.dtype("float32"), casting="same_kind")
682
- if x.dtype == np.dtype("float64")
683
- else x
682
+ (
683
+ x.astype(
684
+ dtype=np.dtype("float32"), casting="same_kind"
685
+ )
686
+ if x.dtype == np.dtype("float64")
687
+ else x
688
+ )
684
689
  for x in data
685
690
  ]
686
691
  # Handle cases for existing and new entry
@@ -114,7 +114,8 @@ class PandasBaseFeatureStorage(BaseFeatureStorage):
114
114
  if len(elem_idx) == 1:
115
115
  # Create normal index for vector
116
116
  index = pd.Index(
117
- data=list(elem_idx.values())[0], name=list(elem_idx.keys())[0]
117
+ data=next(iter(elem_idx.values())),
118
+ name=next(iter(elem_idx.keys())),
118
119
  )
119
120
  else:
120
121
  # Create multiindex for timeseries
junifer/storage/sqlite.py CHANGED
@@ -366,6 +366,7 @@ class SQLiteFeatureStorage(PandasBaseFeatureStorage):
366
366
  The element as a dictionary.
367
367
  meta : dict
368
368
  The metadata as a dictionary.
369
+
369
370
  """
370
371
  # Get sqlalchemy engine
371
372
  engine = self.get_engine(element=element)
@@ -175,7 +175,7 @@ def test_store_metadata_and_list_features(tmp_path: Path) -> None:
175
175
  # List the stored features
176
176
  features = storage.list_features()
177
177
  # Get the first MD5
178
- feature_md5 = list(features.keys())[0]
178
+ feature_md5 = next(iter(features.keys()))
179
179
  # Check the MD5
180
180
  assert meta_md5 == feature_md5
181
181
 
@@ -838,6 +838,7 @@ def _create_data_to_store(n_elements: int, kind: str) -> Tuple[str, Dict]:
838
838
  The meta md5.
839
839
  dict
840
840
  The data to store.
841
+
841
842
  """
842
843
  all_data = []
843
844
  t_md5 = None
@@ -335,7 +335,7 @@ def test_store_df_and_read_df(tmp_path: Path) -> None:
335
335
  with pytest.raises(ValueError, match="Only one"):
336
336
  storage.read_df(feature_name="wrong_name", feature_md5="wrong_md5")
337
337
  # Get MD5 hash of features
338
- feature_md5 = list(features.keys())[0]
338
+ feature_md5 = next(iter(features.keys()))
339
339
  assert "feature_md5" == feature_md5
340
340
  # Check for key
341
341
  assert "BOLD_markername" == features[feature_md5]["name"]
@@ -373,7 +373,7 @@ def test_store_metadata(tmp_path: Path) -> None:
373
373
  meta_md5=meta_md5, element=element_to_store, meta=meta_to_store
374
374
  )
375
375
  features = storage.list_features()
376
- feature_md5 = list(features.keys())[0]
376
+ feature_md5 = next(iter(features.keys()))
377
377
  assert meta_md5 == feature_md5
378
378
 
379
379
 
@@ -474,7 +474,7 @@ def test_store_matrix(tmp_path: Path) -> None:
474
474
  stored_names = [f"{i}~{j}" for i in row_names for j in col_names]
475
475
 
476
476
  features = storage.list_features()
477
- feature_md5 = list(features.keys())[0]
477
+ feature_md5 = next(iter(features.keys()))
478
478
  assert "BOLD_fc" == features[feature_md5]["name"]
479
479
 
480
480
  read_df = storage.read_df(feature_md5=feature_md5)
@@ -498,7 +498,7 @@ def test_store_matrix(tmp_path: Path) -> None:
498
498
  for j in range(data.shape[1])
499
499
  ]
500
500
  features = storage.list_features()
501
- feature_md5 = list(features.keys())[0]
501
+ feature_md5 = next(iter(features.keys()))
502
502
  assert "BOLD_fc" == features[feature_md5]["name"]
503
503
  read_df = storage.read_df(feature_md5=feature_md5)
504
504
  assert list(read_df.columns) == stored_names
@@ -563,7 +563,7 @@ def test_store_matrix(tmp_path: Path) -> None:
563
563
  ]
564
564
 
565
565
  features = storage.list_features()
566
- feature_md5 = list(features.keys())[0]
566
+ feature_md5 = next(iter(features.keys()))
567
567
  assert "BOLD_fc" == features[feature_md5]["name"]
568
568
  read_df = storage.read_df(feature_md5=feature_md5)
569
569
  assert list(read_df.columns) == stored_names
@@ -595,7 +595,7 @@ def test_store_matrix(tmp_path: Path) -> None:
595
595
  ]
596
596
 
597
597
  features = storage.list_features()
598
- feature_md5 = list(features.keys())[0]
598
+ feature_md5 = next(iter(features.keys()))
599
599
  assert "BOLD_fc" == features[feature_md5]["name"]
600
600
  read_df = storage.read_df(feature_md5=feature_md5)
601
601
  assert list(read_df.columns) == stored_names
@@ -632,7 +632,7 @@ def test_store_matrix(tmp_path: Path) -> None:
632
632
  ]
633
633
 
634
634
  features = storage.list_features()
635
- feature_md5 = list(features.keys())[0]
635
+ feature_md5 = next(iter(features.keys()))
636
636
  assert "BOLD_fc" == features[feature_md5]["name"]
637
637
  read_df = storage.read_df(feature_md5=feature_md5)
638
638
  assert list(read_df.columns) == stored_names
@@ -663,7 +663,7 @@ def test_store_matrix(tmp_path: Path) -> None:
663
663
  ]
664
664
 
665
665
  features = storage.list_features()
666
- feature_md5 = list(features.keys())[0]
666
+ feature_md5 = next(iter(features.keys()))
667
667
  assert "BOLD_fc" == features[feature_md5]["name"]
668
668
  read_df = storage.read_df(feature_md5=feature_md5)
669
669
  assert list(read_df.columns) == stored_names
@@ -23,12 +23,12 @@ from junifer.storage.utils import (
23
23
  "dependency, max_version",
24
24
  [
25
25
  ("click", "8.2"),
26
- ("numpy", "1.26"),
27
- ("datalad", "0.19"),
28
- ("pandas", "1.6"),
29
- ("nibabel", "4.1"),
30
- ("nilearn", "0.10.0"),
31
- ("sqlalchemy", "1.5.0"),
26
+ ("numpy", "1.27"),
27
+ ("datalad", "0.20"),
28
+ ("pandas", "2.2"),
29
+ ("nibabel", "5.11"),
30
+ ("nilearn", "0.11.0"),
31
+ ("sqlalchemy", "2.1.0"),
32
32
  ("ruamel.yaml", "0.18.0"),
33
33
  ],
34
34
  )
junifer/storage/utils.py CHANGED
@@ -131,6 +131,7 @@ def element_to_prefix(element: Dict) -> str:
131
131
  -------
132
132
  str
133
133
  The element converted to prefix.
134
+
134
135
  """
135
136
  logger.debug(f"Converting element {element} to prefix.")
136
137
  prefix = "element"
@@ -55,7 +55,10 @@ class OasisVBMTestingDataGrabber(BaseDataGrabber):
55
55
  """
56
56
  out = {}
57
57
  i_sub = int(subject.split("-")[1]) - 1
58
- out["VBM_GM"] = {"path": Path(self._dataset.gray_matter_maps[i_sub])}
58
+ out["VBM_GM"] = {
59
+ "path": Path(self._dataset.gray_matter_maps[i_sub]),
60
+ "space": "MNI152Lin",
61
+ }
59
62
 
60
63
  return out
61
64
 
@@ -141,8 +144,8 @@ class SPMAuditoryTestingDataGrabber(BaseDataGrabber):
141
144
  anat_fname = self.datadir / f"{subject}_T1w.nii.gz"
142
145
  nib.save(fmri_img, fmri_fname)
143
146
  nib.save(anat_img, anat_fname)
144
- out["BOLD"] = {"path": fmri_fname}
145
- out["T1w"] = {"path": anat_fname}
147
+ out["BOLD"] = {"path": fmri_fname, "space": "MNI152Lin"}
148
+ out["T1w"] = {"path": anat_fname, "space": "native"}
146
149
  return out
147
150
 
148
151
 
@@ -236,12 +239,13 @@ class PartlyCloudyTestingDataGrabber(BaseDataGrabber):
236
239
  """
237
240
  out = {}
238
241
  i_sub = int(subject.split("-")[1]) - 1
239
- out["BOLD"] = {"path": Path(self._dataset["func"][i_sub])}
240
- conf_format = "fmriprep"
241
-
242
+ out["BOLD"] = {
243
+ "path": Path(self._dataset["func"][i_sub]),
244
+ "space": "MNI152NLin2009cAsym",
245
+ }
242
246
  out["BOLD_confounds"] = {
243
247
  "path": Path(self._dataset["confounds"][i_sub]),
244
- "format": conf_format,
248
+ "format": "fmriprep",
245
249
  }
246
250
 
247
251
  return out
junifer/testing/utils.py CHANGED
@@ -18,6 +18,7 @@ def get_testing_data(fname: str) -> Path:
18
18
  -------
19
19
  pathlib.Path
20
20
  The absolute path to the file.
21
+
21
22
  """
22
23
  t_path = Path(__file__).parent / "data" / fname
23
24
  if not t_path.exists():
@@ -21,6 +21,8 @@ from junifer.stats import count, get_aggfunc_by_name, select, winsorized_mean
21
21
  ("count", None),
22
22
  ("trim_mean", None),
23
23
  ("trim_mean", {"proportiontocut": 0.1}),
24
+ ("mode", None),
25
+ ("mode", {"keepdims": True}),
24
26
  ],
25
27
  )
26
28
  def test_get_aggfunc_by_name(name: str, params: Optional[Dict]) -> None:
junifer/utils/__init__.py CHANGED
@@ -6,3 +6,4 @@
6
6
 
7
7
  from .fs import make_executable
8
8
  from .logging import configure_logging, logger, raise_error, warn_with_log
9
+ from .helpers import run_ext_cmd
@@ -0,0 +1,53 @@
1
+ """Provide helper functions for the package."""
2
+
3
+ # Authors: Synchon Mandal <s.mandal@fz-juelich.de>
4
+ # License: AGPL
5
+
6
+ import subprocess
7
+ from typing import List
8
+
9
+ from .logging import logger, raise_error
10
+
11
+
12
+ def run_ext_cmd(name: str, cmd: List[str]) -> None:
13
+ """Run external command via subprocess.
14
+
15
+ Parameters
16
+ ----------
17
+ name : str
18
+ The name of the command.
19
+ cmd : list of str
20
+ The command to run as list of string.
21
+
22
+ Raises
23
+ ------
24
+ RuntimeError
25
+ If command fails.
26
+
27
+ """
28
+ # Convert list to str
29
+ cmd_str = " ".join(cmd)
30
+ logger.info(f"{name} command to be executed:\n{cmd_str}")
31
+ # Run command via subprocess
32
+ process = subprocess.run(
33
+ cmd_str, # string needed with shell=True
34
+ stdin=subprocess.DEVNULL,
35
+ stdout=subprocess.PIPE,
36
+ stderr=subprocess.STDOUT,
37
+ shell=True, # needed for respecting $PATH
38
+ check=False,
39
+ )
40
+ # Check for success or failure
41
+ if process.returncode == 0:
42
+ logger.info(
43
+ f"{name} command succeeded with the following output:\n"
44
+ f"{process.stdout}"
45
+ )
46
+ else:
47
+ raise_error(
48
+ msg=(
49
+ f"{name} command failed with the following error:\n"
50
+ f"{process.stdout}"
51
+ ),
52
+ klass=RuntimeError,
53
+ )
junifer/utils/logging.py CHANGED
@@ -4,9 +4,13 @@
4
4
  # Synchon Mandal <s.mandal@fz-juelich.de>
5
5
  # License: AGPL
6
6
 
7
+ try:
8
+ from distutils.version import LooseVersion
9
+ except ImportError: # pragma: no cover
10
+ from looseversion import LooseVersion
11
+
7
12
  import logging
8
13
  import sys
9
- from distutils.version import LooseVersion
10
14
  from pathlib import Path
11
15
  from subprocess import PIPE, Popen, TimeoutExpired
12
16
  from typing import Dict, NoReturn, Optional, Type, Union
@@ -60,9 +64,16 @@ def _get_git_head(path: Path) -> str:
60
64
  Empty string if timeout expired for subprocess command execution else
61
65
  git HEAD information.
62
66
 
67
+ Raises
68
+ ------
69
+ FileNotFoundError
70
+ If ``path`` is invalid.
71
+
63
72
  """
64
73
  if not path.exists():
65
- raise ValueError(f"This path does not exist: {path}")
74
+ raise_error(
75
+ msg=f"This path does not exist: {path}", klass=FileNotFoundError
76
+ )
66
77
  command = f"cd {path}; git rev-parse --verify HEAD"
67
78
  process = Popen(
68
79
  args=command,
@@ -88,7 +99,7 @@ def get_versions() -> Dict:
88
99
 
89
100
  """
90
101
  module_versions = {}
91
- for name, module in sys.modules.items():
102
+ for name, module in sys.modules.copy().items():
92
103
  # Bypassing sub-modules of packages and
93
104
  # allowing ruamel.yaml
94
105
  if "." in name and name != "ruamel.yaml":
@@ -0,0 +1,35 @@
1
+ """Provide tests for helper functions."""
2
+
3
+ # Authors: Synchon Mandal <s.mandal@fz-juelich.de>
4
+ # License: AGPL
5
+
6
+ import logging
7
+
8
+ import pytest
9
+
10
+ from junifer.utils.helpers import run_ext_cmd
11
+
12
+
13
+ def test_run_ext_cmd_success(caplog: pytest.LogCaptureFixture) -> None:
14
+ """Test external command run success.
15
+
16
+ Parameters
17
+ ----------
18
+ caplog : pytest.LogCaptureFixture
19
+ The pytest.LogCaptureFixture object.
20
+
21
+ """
22
+ # Set log capturing at INFO
23
+ with caplog.at_level(logging.INFO):
24
+ # Run external command
25
+ run_ext_cmd(name="pwd", cmd=["pwd"])
26
+ # Check logging message
27
+ assert "executed" in caplog.text
28
+ assert "succeeded" in caplog.text
29
+
30
+
31
+ def test_run_ext_cmd_failure() -> None:
32
+ """Test external command run failure."""
33
+ with pytest.raises(RuntimeError, match="failed"):
34
+ # Run external command
35
+ run_ext_cmd(name="flymetothemoon", cmd=["flymetothemoon"])