junifer 0.0.5__py3-none-any.whl → 0.0.5.dev24__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (194) hide show
  1. junifer/__init__.py +0 -17
  2. junifer/_version.py +2 -2
  3. junifer/api/__init__.py +1 -4
  4. junifer/api/cli.py +1 -91
  5. junifer/api/decorators.py +0 -9
  6. junifer/api/functions.py +10 -56
  7. junifer/api/parser.py +0 -3
  8. junifer/api/queue_context/__init__.py +1 -4
  9. junifer/api/res/afni/run_afni_docker.sh +1 -1
  10. junifer/api/res/ants/run_ants_docker.sh +1 -1
  11. junifer/api/res/fsl/run_fsl_docker.sh +1 -1
  12. junifer/api/tests/test_api_utils.py +2 -4
  13. junifer/api/tests/test_cli.py +0 -83
  14. junifer/api/tests/test_functions.py +2 -27
  15. junifer/configs/__init__.py +1 -1
  16. junifer/configs/juseless/__init__.py +1 -4
  17. junifer/configs/juseless/datagrabbers/__init__.py +1 -10
  18. junifer/configs/juseless/datagrabbers/aomic_id1000_vbm.py +0 -3
  19. junifer/configs/juseless/datagrabbers/camcan_vbm.py +0 -3
  20. junifer/configs/juseless/datagrabbers/ixi_vbm.py +0 -3
  21. junifer/configs/juseless/datagrabbers/tests/test_ucla.py +3 -1
  22. junifer/configs/juseless/datagrabbers/ucla.py +9 -12
  23. junifer/configs/juseless/datagrabbers/ukb_vbm.py +0 -3
  24. junifer/data/__init__.py +1 -21
  25. junifer/data/coordinates.py +19 -10
  26. junifer/data/masks.py +87 -58
  27. junifer/data/parcellations.py +3 -14
  28. junifer/data/template_spaces.py +1 -4
  29. junifer/data/tests/test_masks.py +37 -26
  30. junifer/data/utils.py +0 -3
  31. junifer/datagrabber/__init__.py +1 -18
  32. junifer/datagrabber/aomic/__init__.py +0 -3
  33. junifer/datagrabber/aomic/id1000.py +37 -70
  34. junifer/datagrabber/aomic/piop1.py +36 -69
  35. junifer/datagrabber/aomic/piop2.py +38 -71
  36. junifer/datagrabber/aomic/tests/test_id1000.py +99 -44
  37. junifer/datagrabber/aomic/tests/test_piop1.py +108 -65
  38. junifer/datagrabber/aomic/tests/test_piop2.py +102 -45
  39. junifer/datagrabber/base.py +6 -13
  40. junifer/datagrabber/datalad_base.py +1 -13
  41. junifer/datagrabber/dmcc13_benchmark.py +53 -36
  42. junifer/datagrabber/hcp1200/__init__.py +0 -3
  43. junifer/datagrabber/hcp1200/datalad_hcp1200.py +0 -3
  44. junifer/datagrabber/hcp1200/hcp1200.py +1 -4
  45. junifer/datagrabber/multiple.py +6 -45
  46. junifer/datagrabber/pattern.py +62 -170
  47. junifer/datagrabber/pattern_datalad.py +12 -25
  48. junifer/datagrabber/tests/test_datagrabber_utils.py +218 -0
  49. junifer/datagrabber/tests/test_datalad_base.py +4 -4
  50. junifer/datagrabber/tests/test_dmcc13_benchmark.py +19 -46
  51. junifer/datagrabber/tests/test_multiple.py +84 -161
  52. junifer/datagrabber/tests/test_pattern.py +0 -45
  53. junifer/datagrabber/tests/test_pattern_datalad.py +4 -4
  54. junifer/datagrabber/utils.py +230 -0
  55. junifer/datareader/__init__.py +1 -4
  56. junifer/datareader/default.py +43 -95
  57. junifer/external/__init__.py +1 -1
  58. junifer/external/nilearn/__init__.py +1 -5
  59. junifer/external/nilearn/junifer_nifti_spheres_masker.py +9 -23
  60. junifer/external/nilearn/tests/test_junifer_nifti_spheres_masker.py +1 -76
  61. junifer/markers/__init__.py +1 -23
  62. junifer/markers/base.py +28 -68
  63. junifer/markers/collection.py +2 -10
  64. junifer/markers/complexity/__init__.py +0 -10
  65. junifer/markers/complexity/complexity_base.py +43 -26
  66. junifer/markers/complexity/hurst_exponent.py +0 -3
  67. junifer/markers/complexity/multiscale_entropy_auc.py +0 -3
  68. junifer/markers/complexity/perm_entropy.py +0 -3
  69. junifer/markers/complexity/range_entropy.py +0 -3
  70. junifer/markers/complexity/range_entropy_auc.py +0 -3
  71. junifer/markers/complexity/sample_entropy.py +0 -3
  72. junifer/markers/complexity/tests/test_hurst_exponent.py +3 -11
  73. junifer/markers/complexity/tests/test_multiscale_entropy_auc.py +3 -11
  74. junifer/markers/complexity/tests/test_perm_entropy.py +3 -11
  75. junifer/markers/complexity/tests/test_range_entropy.py +3 -11
  76. junifer/markers/complexity/tests/test_range_entropy_auc.py +3 -11
  77. junifer/markers/complexity/tests/test_sample_entropy.py +3 -11
  78. junifer/markers/complexity/tests/test_weighted_perm_entropy.py +3 -11
  79. junifer/markers/complexity/weighted_perm_entropy.py +0 -3
  80. junifer/markers/ets_rss.py +42 -27
  81. junifer/markers/falff/__init__.py +0 -3
  82. junifer/markers/falff/_afni_falff.py +2 -5
  83. junifer/markers/falff/_junifer_falff.py +0 -3
  84. junifer/markers/falff/falff_base.py +46 -20
  85. junifer/markers/falff/falff_parcels.py +27 -56
  86. junifer/markers/falff/falff_spheres.py +29 -60
  87. junifer/markers/falff/tests/test_falff_parcels.py +23 -39
  88. junifer/markers/falff/tests/test_falff_spheres.py +23 -39
  89. junifer/markers/functional_connectivity/__init__.py +0 -9
  90. junifer/markers/functional_connectivity/crossparcellation_functional_connectivity.py +60 -63
  91. junifer/markers/functional_connectivity/edge_functional_connectivity_parcels.py +32 -45
  92. junifer/markers/functional_connectivity/edge_functional_connectivity_spheres.py +36 -49
  93. junifer/markers/functional_connectivity/functional_connectivity_base.py +70 -71
  94. junifer/markers/functional_connectivity/functional_connectivity_parcels.py +25 -34
  95. junifer/markers/functional_connectivity/functional_connectivity_spheres.py +30 -40
  96. junifer/markers/functional_connectivity/tests/test_crossparcellation_functional_connectivity.py +7 -11
  97. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_parcels.py +7 -27
  98. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_spheres.py +12 -28
  99. junifer/markers/functional_connectivity/tests/test_functional_connectivity_parcels.py +11 -35
  100. junifer/markers/functional_connectivity/tests/test_functional_connectivity_spheres.py +62 -36
  101. junifer/markers/parcel_aggregation.py +61 -47
  102. junifer/markers/reho/__init__.py +0 -3
  103. junifer/markers/reho/_afni_reho.py +2 -5
  104. junifer/markers/reho/_junifer_reho.py +1 -4
  105. junifer/markers/reho/reho_base.py +27 -8
  106. junifer/markers/reho/reho_parcels.py +17 -28
  107. junifer/markers/reho/reho_spheres.py +18 -27
  108. junifer/markers/reho/tests/test_reho_parcels.py +3 -8
  109. junifer/markers/reho/tests/test_reho_spheres.py +3 -8
  110. junifer/markers/sphere_aggregation.py +59 -43
  111. junifer/markers/temporal_snr/__init__.py +0 -3
  112. junifer/markers/temporal_snr/temporal_snr_base.py +32 -23
  113. junifer/markers/temporal_snr/temporal_snr_parcels.py +6 -9
  114. junifer/markers/temporal_snr/temporal_snr_spheres.py +6 -9
  115. junifer/markers/temporal_snr/tests/test_temporal_snr_parcels.py +3 -6
  116. junifer/markers/temporal_snr/tests/test_temporal_snr_spheres.py +3 -6
  117. junifer/markers/tests/test_collection.py +8 -9
  118. junifer/markers/tests/test_ets_rss.py +9 -15
  119. junifer/markers/tests/test_markers_base.py +18 -17
  120. junifer/markers/tests/test_parcel_aggregation.py +32 -93
  121. junifer/markers/tests/test_sphere_aggregation.py +19 -72
  122. junifer/onthefly/__init__.py +1 -4
  123. junifer/onthefly/read_transform.py +0 -3
  124. junifer/pipeline/__init__.py +1 -9
  125. junifer/pipeline/pipeline_step_mixin.py +4 -21
  126. junifer/pipeline/registry.py +0 -3
  127. junifer/pipeline/singleton.py +0 -3
  128. junifer/pipeline/tests/test_registry.py +1 -1
  129. junifer/pipeline/update_meta_mixin.py +0 -3
  130. junifer/pipeline/utils.py +1 -67
  131. junifer/pipeline/workdir_manager.py +0 -3
  132. junifer/preprocess/__init__.py +2 -9
  133. junifer/preprocess/ants/__init__.py +4 -0
  134. junifer/preprocess/ants/ants_apply_transforms_warper.py +185 -0
  135. junifer/preprocess/ants/tests/test_ants_apply_transforms_warper.py +56 -0
  136. junifer/preprocess/base.py +3 -6
  137. junifer/preprocess/bold_warper.py +265 -0
  138. junifer/preprocess/confounds/__init__.py +0 -3
  139. junifer/preprocess/confounds/fmriprep_confound_remover.py +60 -47
  140. junifer/preprocess/confounds/tests/test_fmriprep_confound_remover.py +113 -72
  141. junifer/preprocess/fsl/__init__.py +4 -0
  142. junifer/preprocess/fsl/apply_warper.py +179 -0
  143. junifer/preprocess/fsl/tests/test_apply_warper.py +45 -0
  144. junifer/preprocess/smoothing/__init__.py +0 -3
  145. junifer/preprocess/smoothing/_afni_smoothing.py +1 -1
  146. junifer/preprocess/tests/test_bold_warper.py +159 -0
  147. junifer/preprocess/warping/__init__.py +0 -3
  148. junifer/preprocess/warping/_ants_warper.py +0 -3
  149. junifer/preprocess/warping/_fsl_warper.py +0 -3
  150. junifer/stats.py +1 -4
  151. junifer/storage/__init__.py +1 -9
  152. junifer/storage/base.py +1 -40
  153. junifer/storage/hdf5.py +9 -71
  154. junifer/storage/pandas_base.py +0 -3
  155. junifer/storage/sqlite.py +0 -3
  156. junifer/storage/tests/test_hdf5.py +10 -82
  157. junifer/storage/utils.py +0 -9
  158. junifer/testing/__init__.py +1 -4
  159. junifer/testing/datagrabbers.py +6 -13
  160. junifer/testing/tests/test_partlycloudytesting_datagrabber.py +7 -7
  161. junifer/testing/utils.py +0 -3
  162. junifer/utils/__init__.py +2 -13
  163. junifer/utils/fs.py +0 -3
  164. junifer/utils/helpers.py +1 -32
  165. junifer/utils/logging.py +4 -33
  166. junifer/utils/tests/test_logging.py +0 -8
  167. {junifer-0.0.5.dist-info → junifer-0.0.5.dev24.dist-info}/METADATA +16 -17
  168. junifer-0.0.5.dev24.dist-info/RECORD +265 -0
  169. {junifer-0.0.5.dist-info → junifer-0.0.5.dev24.dist-info}/WHEEL +1 -1
  170. junifer/api/res/freesurfer/mri_binarize +0 -3
  171. junifer/api/res/freesurfer/mri_mc +0 -3
  172. junifer/api/res/freesurfer/mri_pretess +0 -3
  173. junifer/api/res/freesurfer/mris_convert +0 -3
  174. junifer/api/res/freesurfer/run_freesurfer_docker.sh +0 -61
  175. junifer/data/masks/ukb/UKB_15K_GM_template.nii.gz +0 -0
  176. junifer/datagrabber/pattern_validation_mixin.py +0 -388
  177. junifer/datagrabber/tests/test_pattern_validation_mixin.py +0 -249
  178. junifer/external/BrainPrint/brainprint/__init__.py +0 -4
  179. junifer/external/BrainPrint/brainprint/_version.py +0 -3
  180. junifer/external/BrainPrint/brainprint/asymmetry.py +0 -91
  181. junifer/external/BrainPrint/brainprint/brainprint.py +0 -441
  182. junifer/external/BrainPrint/brainprint/surfaces.py +0 -258
  183. junifer/external/BrainPrint/brainprint/utils/__init__.py +0 -1
  184. junifer/external/BrainPrint/brainprint/utils/_config.py +0 -112
  185. junifer/external/BrainPrint/brainprint/utils/utils.py +0 -188
  186. junifer/external/nilearn/junifer_connectivity_measure.py +0 -483
  187. junifer/external/nilearn/tests/test_junifer_connectivity_measure.py +0 -1089
  188. junifer/markers/brainprint.py +0 -459
  189. junifer/markers/tests/test_brainprint.py +0 -58
  190. junifer-0.0.5.dist-info/RECORD +0 -275
  191. {junifer-0.0.5.dist-info → junifer-0.0.5.dev24.dist-info}/AUTHORS.rst +0 -0
  192. {junifer-0.0.5.dist-info → junifer-0.0.5.dev24.dist-info}/LICENSE.md +0 -0
  193. {junifer-0.0.5.dist-info → junifer-0.0.5.dev24.dist-info}/entry_points.txt +0 -0
  194. {junifer-0.0.5.dist-info → junifer-0.0.5.dev24.dist-info}/top_level.txt +0 -0
@@ -236,51 +236,6 @@ def test_PatternDataGrabber(tmp_path: Path) -> None:
236
236
  assert out1["VBM_GM"]["path"] != out2["VBM_GM"]["path"]
237
237
 
238
238
 
239
- def test_PatternDataGrabber_unix_path_expansion(tmp_path: Path) -> None:
240
- """Test PatterDataGrabber for patterns with unix path expansion.
241
-
242
- Parameters
243
- ----------
244
- tmp_path : pathlib.Path
245
- The path to the test directory.
246
-
247
- """
248
- # Create test data root dir
249
- freesurfer_dir = tmp_path / "derivatives" / "freesurfer"
250
- freesurfer_dir.mkdir(parents=True, exist_ok=True)
251
- # Create test data sub dirs and files
252
- for dir_name in ["fsaverage", "sub-0001"]:
253
- mri_dir = freesurfer_dir / dir_name / "mri"
254
- mri_dir.mkdir(parents=True, exist_ok=True)
255
- # Create files
256
- (mri_dir / "T1.mgz").touch(exist_ok=True)
257
- (mri_dir / "aseg.mgz").touch(exist_ok=True)
258
- # Create datagrabber
259
- dg = PatternDataGrabber(
260
- datadir=tmp_path,
261
- types=["FreeSurfer"],
262
- patterns={
263
- "FreeSurfer": {
264
- "pattern": "derivatives/freesurfer/[!f]{subject}/mri/T1.mg[z]",
265
- "aseg": {
266
- "pattern": (
267
- "derivatives/freesurfer/[!f]{subject}/mri/aseg.mg[z]"
268
- )
269
- },
270
- },
271
- },
272
- replacements=["subject"],
273
- )
274
- # Check that "fsaverage" is filtered
275
- elements = dg.get_elements()
276
- assert elements == ["sub-0001"]
277
- # Fetch data
278
- out = dg["sub-0001"]
279
- # Check paths are found
280
- assert set(out["FreeSurfer"].keys()) == {"path", "aseg", "meta"}
281
- assert list(out["FreeSurfer"]["aseg"].keys()) == ["path"]
282
-
283
-
284
239
  def test_PatternDataGrabber_confounds_format_error_on_init() -> None:
285
240
  """Test PatterDataGrabber confounds format error on initialisation."""
286
241
  with pytest.raises(
@@ -15,13 +15,13 @@ from junifer.datagrabber import PatternDataladDataGrabber
15
15
  _testing_dataset = {
16
16
  "example_bids": {
17
17
  "uri": "https://gin.g-node.org/juaml/datalad-example-bids",
18
- "commit": "b87897cbe51bf0ee5514becaa5c7dd76491db5ad",
19
- "id": "8fddff30-6993-420a-9d1e-b5b028c59468",
18
+ "commit": "522dfb203afcd2cd55799bf347f9b211919a7338",
19
+ "id": "fec92475-d9c0-4409-92ba-f041b6a12c40",
20
20
  },
21
21
  "example_bids_ses": {
22
22
  "uri": "https://gin.g-node.org/juaml/datalad-example-bids-ses",
23
- "commit": "6b163aa98af76a9eac0272273c27e14127850181",
24
- "id": "715c17cf-a1b9-42d6-9af8-9f74c1a4a724",
23
+ "commit": "3d08d55d1faad4f12ab64ac9497544a0d924d47a",
24
+ "id": "c83500d0-532f-45be-baf1-0dab703bdc2a",
25
25
  },
26
26
  }
27
27
 
@@ -0,0 +1,230 @@
1
+ """Provide utility functions for the datagrabber sub-package."""
2
+
3
+ # Authors: Federico Raimondo <f.raimondo@fz-juelich.de>
4
+ # Synchon Mandal <s.mandal@fz-juelich.de>
5
+ # License: AGPL
6
+
7
+ from typing import Dict, List
8
+
9
+ from ..utils import logger, raise_error
10
+
11
+
12
+ # Define schema for pattern-based datagrabber's patterns
13
+ PATTERNS_SCHEMA = {
14
+ "T1w": {
15
+ "mandatory": ["pattern", "space"],
16
+ "optional": ["mask_item"],
17
+ },
18
+ "T1w_mask": {
19
+ "mandatory": ["pattern", "space"],
20
+ "optional": [],
21
+ },
22
+ "T2w": {
23
+ "mandatory": ["pattern", "space"],
24
+ "optional": ["mask_item"],
25
+ },
26
+ "T2w_mask": {
27
+ "mandatory": ["pattern", "space"],
28
+ "optional": [],
29
+ },
30
+ "BOLD": {
31
+ "mandatory": ["pattern", "space"],
32
+ "optional": ["mask_item"],
33
+ },
34
+ "BOLD_confounds": {
35
+ "mandatory": ["pattern", "format"],
36
+ "optional": [],
37
+ },
38
+ "BOLD_mask": {
39
+ "mandatory": ["pattern", "space"],
40
+ "optional": [],
41
+ },
42
+ "Warp": {
43
+ "mandatory": ["pattern", "src", "dst"],
44
+ "optional": [],
45
+ },
46
+ "VBM_GM": {
47
+ "mandatory": ["pattern", "space"],
48
+ "optional": [],
49
+ },
50
+ "VBM_WM": {
51
+ "mandatory": ["pattern", "space"],
52
+ "optional": [],
53
+ },
54
+ "VBM_CSF": {
55
+ "mandatory": ["pattern", "space"],
56
+ "optional": [],
57
+ },
58
+ "DWI": {
59
+ "mandatory": ["pattern"],
60
+ "optional": [],
61
+ },
62
+ }
63
+
64
+
65
+ def validate_types(types: List[str]) -> None:
66
+ """Validate the types.
67
+
68
+ Parameters
69
+ ----------
70
+ types : list of str
71
+ The object to validate.
72
+
73
+ Raises
74
+ ------
75
+ TypeError
76
+ If ``types`` is not a list or if the values are not string.
77
+
78
+ """
79
+ if not isinstance(types, list):
80
+ raise_error(msg="`types` must be a list", klass=TypeError)
81
+ if any(not isinstance(x, str) for x in types):
82
+ raise_error(msg="`types` must be a list of strings", klass=TypeError)
83
+
84
+
85
+ def validate_replacements(
86
+ replacements: List[str], patterns: Dict[str, Dict[str, str]]
87
+ ) -> None:
88
+ """Validate the replacements.
89
+
90
+ Parameters
91
+ ----------
92
+ replacements : list of str
93
+ The object to validate.
94
+ patterns : dict
95
+ The patterns to validate against.
96
+
97
+ Raises
98
+ ------
99
+ TypeError
100
+ If ``replacements`` is not a list or if the values are not string.
101
+ ValueError
102
+ If a value in ``replacements`` is not part of a data type pattern or
103
+ if no data type patterns contain all values in ``replacements``.
104
+
105
+ """
106
+ if not isinstance(replacements, list):
107
+ raise_error(msg="`replacements` must be a list.", klass=TypeError)
108
+
109
+ if any(not isinstance(x, str) for x in replacements):
110
+ raise_error(
111
+ msg="`replacements` must be a list of strings.", klass=TypeError
112
+ )
113
+
114
+ for x in replacements:
115
+ if all(
116
+ x not in y
117
+ for y in [
118
+ data_type_val["pattern"] for data_type_val in patterns.values()
119
+ ]
120
+ ):
121
+ raise_error(msg=f"Replacement: {x} is not part of any pattern.")
122
+
123
+ # Check that at least one pattern has all the replacements
124
+ at_least_one = False
125
+ for data_type_val in patterns.values():
126
+ if all(x in data_type_val["pattern"] for x in replacements):
127
+ at_least_one = True
128
+ if at_least_one is False:
129
+ raise_error(msg="At least one pattern must contain all replacements.")
130
+
131
+
132
+ def validate_patterns(
133
+ types: List[str], patterns: Dict[str, Dict[str, str]]
134
+ ) -> None:
135
+ """Validate the patterns.
136
+
137
+ Parameters
138
+ ----------
139
+ types : list of str
140
+ The types list.
141
+ patterns : dict
142
+ The object to validate.
143
+
144
+ Raises
145
+ ------
146
+ KeyError
147
+ If any mandatory key is missing for a data type.
148
+ RuntimeError
149
+ If an unknown key is found for a data type.
150
+ TypeError
151
+ If ``patterns`` is not a dictionary.
152
+ ValueError
153
+ If length of ``types`` and ``patterns`` are different or
154
+ if ``patterns`` is missing entries from ``types`` or
155
+ if unknown data type is found in ``patterns`` or
156
+ if data type pattern key contains '*' as value.
157
+
158
+ """
159
+ # Validate the types
160
+ validate_types(types)
161
+ if not isinstance(patterns, dict):
162
+ raise_error(msg="`patterns` must be a dict.", klass=TypeError)
163
+ # Unequal length of objects
164
+ if len(types) > len(patterns):
165
+ raise_error(
166
+ msg="Length of `types` more than that of `patterns`.",
167
+ klass=ValueError,
168
+ )
169
+ # Missing type in patterns
170
+ if any(x not in patterns for x in types):
171
+ raise_error(
172
+ msg="`patterns` must contain all `types`", klass=ValueError
173
+ )
174
+ # Check against schema
175
+ for data_type_key, data_type_val in patterns.items():
176
+ # Check if valid data type is provided
177
+ if data_type_key not in PATTERNS_SCHEMA:
178
+ raise_error(
179
+ f"Unknown data type: {data_type_key}, "
180
+ f"should be one of: {list(PATTERNS_SCHEMA.keys())}"
181
+ )
182
+ # Check mandatory keys for data type
183
+ for mandatory_key in PATTERNS_SCHEMA[data_type_key]["mandatory"]:
184
+ if mandatory_key not in data_type_val:
185
+ raise_error(
186
+ msg=(
187
+ f"Mandatory key: `{mandatory_key}` missing for "
188
+ f"{data_type_key}"
189
+ ),
190
+ klass=KeyError,
191
+ )
192
+ else:
193
+ logger.debug(
194
+ f"Mandatory key: `{mandatory_key}` found for "
195
+ f"{data_type_key}"
196
+ )
197
+ # Check optional keys for data type
198
+ for optional_key in PATTERNS_SCHEMA[data_type_key]["optional"]:
199
+ if optional_key not in data_type_val:
200
+ logger.debug(
201
+ f"Optional key: `{optional_key}` missing for "
202
+ f"{data_type_key}"
203
+ )
204
+ else:
205
+ logger.debug(
206
+ f"Optional key: `{optional_key}` found for "
207
+ f"{data_type_key}"
208
+ )
209
+ # Check stray key for data type
210
+ for key in data_type_val.keys():
211
+ if key not in (
212
+ PATTERNS_SCHEMA[data_type_key]["mandatory"]
213
+ + PATTERNS_SCHEMA[data_type_key]["optional"]
214
+ ):
215
+ raise_error(
216
+ msg=(
217
+ f"Key: {key} not accepted for {data_type_key} "
218
+ "pattern, remove it to proceed"
219
+ ),
220
+ klass=RuntimeError,
221
+ )
222
+ # Wildcard check in patterns
223
+ if "}*" in data_type_val["pattern"]:
224
+ raise_error(
225
+ msg=(
226
+ f"`{data_type_key}.pattern` must not contain `*` "
227
+ "following a replacement"
228
+ ),
229
+ klass=ValueError,
230
+ )
@@ -1,4 +1,4 @@
1
- """DataReaders for datasets' data loading."""
1
+ """Provide imports for datareader sub-package."""
2
2
 
3
3
  # Authors: Federico Raimondo <f.raimondo@fz-juelich.de>
4
4
  # Leonard Sasse <l.sasse@fz-juelich.de>
@@ -6,6 +6,3 @@
6
6
  # License: AGPL
7
7
 
8
8
  from .default import DefaultDataReader
9
-
10
-
11
- __all__ = ["DefaultDataReader"]
@@ -5,7 +5,7 @@
5
5
  # License: AGPL
6
6
 
7
7
  from pathlib import Path
8
- from typing import Dict, List, Optional, Union
8
+ from typing import Dict, List, Optional
9
9
 
10
10
  import nibabel as nib
11
11
  import pandas as pd
@@ -15,9 +15,6 @@ from ..pipeline import PipelineStepMixin, UpdateMetaMixin
15
15
  from ..utils.logging import logger, warn_with_log
16
16
 
17
17
 
18
- __all__ = ["DefaultDataReader"]
19
-
20
-
21
18
  # Map each file extension to a type
22
19
  _extensions = {
23
20
  ".nii": "NIFTI",
@@ -105,106 +102,57 @@ class DefaultDataReader(PipelineStepMixin, UpdateMetaMixin):
105
102
  if params is None:
106
103
  params = {}
107
104
  # For each type of data, try to read it
108
- for type_key, type_val in input.items():
109
- # Skip Warp and FreeSurfer data type
110
- if type_key in ["Warp", "FreeSurfer"]:
105
+ for type_ in input.keys():
106
+ # Skip Warp data type
107
+ if type_ == "Warp":
111
108
  continue
112
109
 
113
110
  # Check for malformed datagrabber specification
114
- if "path" not in type_val:
111
+ if "path" not in input[type_]:
115
112
  warn_with_log(
116
- f"Input type {type_key} does not provide a path. Skipping."
113
+ f"Input type {type_} does not provide a path. Skipping."
117
114
  )
118
115
  continue
119
116
 
120
- # Iterate to check for nested "types" like mask;
121
- # need to copy to avoid runtime error for changing dict size
122
- for k, v in type_val.copy().items():
123
- # Read data for base data type
124
- if k == "path":
125
- # Convert str to Path
126
- if not isinstance(v, Path):
127
- v = Path(v)
128
- # Update path
129
- out[type_key]["path"] = v
130
- logger.info(f"Reading {type_key} from {v.absolute()!s}")
131
- # Retrieve loading params for the data type
132
- t_params = params.get(type_key, {})
133
- # Read data
134
- out[type_key]["data"] = _read_data(
135
- data_type=type_key, path=v, read_params=t_params
136
- )
137
- # Read data for nested data type
138
- if isinstance(v, dict) and "path" in v:
139
- # Set path
140
- nested_path = v["path"]
141
- # Convert str to Path
142
- if not isinstance(nested_path, Path):
143
- nested_path = Path(nested_path)
144
- # Update path
145
- out[type_key][k]["path"] = nested_path
146
- # Set nested type key for easier access
147
- nested_type = f"{type_key}.{k}"
148
- logger.info(
149
- f"Reading {nested_type} from "
150
- f"{nested_path.absolute()!s}"
151
- )
152
- # Retrieve loading params for the nested data type
153
- nested_params = params.get(nested_type, {})
117
+ # Retrieve actual path
118
+ t_path = input[type_]["path"]
119
+ # Retrieve loading params for the data type
120
+ t_params = params.get(type_, {})
121
+
122
+ # Convert str to Path
123
+ if not isinstance(t_path, Path):
124
+ t_path = Path(t_path)
125
+ out[type_]["path"] = t_path
126
+
127
+ logger.info(f"Reading {type_} from {t_path.as_posix()}")
128
+ # Initialize variable for file data
129
+ fread = None
130
+ # Lowercase path
131
+ fname = t_path.name.lower()
132
+ # Loop through extensions to find the correct one
133
+ for ext, ftype in _extensions.items():
134
+ if fname.endswith(ext):
135
+ logger.info(f"{type_} is type {ftype}")
136
+ # Retrieve reader function
137
+ reader_func = _readers[ftype]["func"]
138
+ # Retrieve reader function params
139
+ reader_params = _readers[ftype]["params"]
140
+ # Update reader function params
141
+ if reader_params is not None:
142
+ t_params.update(reader_params)
143
+ logger.debug(f"Calling {reader_func} with {t_params}")
154
144
  # Read data
155
- out[type_key][k]["data"] = _read_data(
156
- data_type=nested_type,
157
- path=nested_path,
158
- read_params=nested_params,
159
- )
145
+ fread = reader_func(t_path, **t_params)
146
+ break
147
+ # If no file data is found due to unknown extension
148
+ if fread is None:
149
+ logger.info(
150
+ f"Unknown file type {t_path.as_posix()}, skipping reading"
151
+ )
160
152
 
153
+ # Set file data for output
154
+ out[type_]["data"] = fread
161
155
  # Update metadata for step
162
- self.update_meta(out[type_key], "datareader")
156
+ self.update_meta(out[type_], "datareader")
163
157
 
164
158
  return out
165
-
166
-
167
- def _read_data(
168
- data_type: str, path: Path, read_params: Dict
169
- ) -> Union[nib.Nifti1Image, pd.DataFrame, None]:
170
- """Read data for data type.
171
-
172
- Parameters
173
- ----------
174
- data_type : str
175
- The data type being read.
176
- path : pathlib.Path
177
- The path to read data from.
178
- read_params : dict
179
- Parameters for reader function.
180
-
181
- Returns
182
- -------
183
- nibabel.Nifti1Image or pandas.DataFrame or pandas.TextFileReader or None
184
- The data loaded in memory if file type is known else None.
185
-
186
- """
187
- # Initialize variable for file data
188
- fread = None
189
- # Lowercase path
190
- fname = path.name.lower()
191
- # Loop through extensions to find the correct one
192
- for ext, ftype in _extensions.items():
193
- if fname.endswith(ext):
194
- logger.info(f"{data_type} is of type {ftype}")
195
- # Retrieve reader function
196
- reader_func = _readers[ftype]["func"]
197
- # Retrieve reader function params
198
- reader_params = _readers[ftype]["params"]
199
- # Update reader function params
200
- if reader_params is not None:
201
- read_params.update(reader_params)
202
- logger.debug(f"Calling {reader_func!s} with {read_params}")
203
- # Read data
204
- fread = reader_func(path, **read_params)
205
- break
206
- # If no file data is found due to unknown extension
207
- if fread is None:
208
- logger.info(f"Unknown file type {path.absolute()!s}, skipping reading")
209
-
210
- return fread
@@ -1,4 +1,4 @@
1
- """External tools adapted for junifer."""
1
+ """Provide imports for external sub-package."""
2
2
 
3
3
  # Authors: Synchon Mandal <s.mandal@fz-juelich.de>
4
4
  # License: AGPL
@@ -1,10 +1,6 @@
1
- """Custom objects adapted from nilearn."""
1
+ """Provide imports for custom nilearn objects sub-package."""
2
2
 
3
3
  # Authors: Synchon Mandal <s.mandal@fz-juelich.de>
4
4
  # License: AGPL
5
5
 
6
6
  from .junifer_nifti_spheres_masker import JuniferNiftiSpheresMasker
7
- from .junifer_connectivity_measure import JuniferConnectivityMeasure
8
-
9
-
10
- __all__ = ["JuniferNiftiSpheresMasker", "JuniferConnectivityMeasure"]
@@ -10,9 +10,9 @@ from nilearn import image, masking
10
10
  from nilearn._utils.class_inspect import get_params
11
11
  from nilearn._utils.niimg import img_data_dtype
12
12
  from nilearn._utils.niimg_conversions import (
13
+ _safe_get_data,
13
14
  check_niimg_3d,
14
15
  check_niimg_4d,
15
- safe_get_data,
16
16
  )
17
17
  from nilearn.maskers import NiftiSpheresMasker
18
18
  from nilearn.maskers.base_masker import _filter_and_extract
@@ -29,12 +29,9 @@ if TYPE_CHECKING:
29
29
  from pandas import DataFrame
30
30
 
31
31
 
32
- __all__ = ["JuniferNiftiSpheresMasker"]
33
-
34
-
35
32
  # New BSD License
36
33
 
37
- # Copyright (c) The nilearn developers.
34
+ # Copyright (c) 2007 - 2022 The nilearn developers.
38
35
  # All rights reserved.
39
36
 
40
37
 
@@ -102,23 +99,12 @@ def _apply_mask_and_get_affinity(
102
99
  Contains the boolean indices for each sphere.
103
100
  shape: (number of seeds, number of voxels)
104
101
 
105
- Raises
106
- ------
107
- ValueError
108
- If ``niimg`` and ``mask_img`` are both provided or
109
- if overlap is detected between spheres.
110
-
111
- Warns
112
- -----
113
- RuntimeWarning
114
- If the provided images contain NaN, they will be converted to zeroes.
115
-
116
102
  """
117
103
  seeds = list(seeds)
118
104
 
119
105
  # Compute world coordinates of all in-mask voxels.
120
106
  if niimg is None:
121
- mask, affine = masking.load_mask_img(mask_img)
107
+ mask, affine = masking._load_mask_img(mask_img)
122
108
  # Get coordinate for all voxels inside of mask
123
109
  mask_coords = np.asarray(np.nonzero(mask)).T.tolist()
124
110
  X = None
@@ -132,21 +118,21 @@ def _apply_mask_and_get_affinity(
132
118
  target_shape=niimg.shape[:3],
133
119
  interpolation="nearest",
134
120
  )
135
- mask, _ = masking.load_mask_img(mask_img)
121
+ mask, _ = masking._load_mask_img(mask_img)
136
122
  mask_coords = list(zip(*np.where(mask != 0)))
137
123
 
138
- X = masking.apply_mask_fmri(niimg, mask_img)
124
+ X = masking._apply_mask_fmri(niimg, mask_img)
139
125
 
140
126
  elif niimg is not None:
141
127
  affine = niimg.affine
142
- if np.isnan(np.sum(safe_get_data(niimg))):
128
+ if np.isnan(np.sum(_safe_get_data(niimg))):
143
129
  warn_with_log(
144
130
  "The imgs you have fed into fit_transform() contains NaN "
145
131
  "values which will be converted to zeroes."
146
132
  )
147
- X = safe_get_data(niimg, True).reshape([-1, niimg.shape[3]]).T
133
+ X = _safe_get_data(niimg, True).reshape([-1, niimg.shape[3]]).T
148
134
  else:
149
- X = safe_get_data(niimg).reshape([-1, niimg.shape[3]]).T
135
+ X = _safe_get_data(niimg).reshape([-1, niimg.shape[3]]).T
150
136
 
151
137
  mask_coords = list(np.ndindex(niimg.shape[:3]))
152
138
 
@@ -224,7 +210,7 @@ def _iter_signals_from_spheres(
224
210
  X, A = _apply_mask_and_get_affinity(
225
211
  seeds, niimg, radius, allow_overlap, mask_img=mask_img
226
212
  )
227
- for row in A.rows:
213
+ for _, row in enumerate(A.rows):
228
214
  yield X[:, row]
229
215
 
230
216
 
@@ -4,14 +4,12 @@
4
4
  # License: AGPL
5
5
 
6
6
  import warnings
7
- from typing import List, Tuple
8
7
 
9
8
  import nibabel
10
9
  import numpy as np
11
10
  import pytest
12
11
  from nilearn._utils import data_gen
13
12
  from nilearn.image import get_data
14
- from nilearn.maskers import NiftiSpheresMasker
15
13
  from numpy.testing import assert_array_equal
16
14
 
17
15
  from junifer.external.nilearn import JuniferNiftiSpheresMasker
@@ -19,7 +17,7 @@ from junifer.external.nilearn import JuniferNiftiSpheresMasker
19
17
 
20
18
  # New BSD License
21
19
 
22
- # Copyright (c) The nilearn developers.
20
+ # Copyright (c) 2007 - 2022 The nilearn developers.
23
21
  # All rights reserved.
24
22
 
25
23
 
@@ -333,76 +331,3 @@ def test_nifti_spheres_masker_io_shapes() -> None:
333
331
  )
334
332
  test_data = masker.transform(img_4d)
335
333
  assert test_data.shape == (n_volumes, n_regions)
336
-
337
-
338
- @pytest.mark.parametrize(
339
- "shape",
340
- [
341
- (10, 11, 12),
342
- (10, 11, 12, 5),
343
- ],
344
- )
345
- @pytest.mark.parametrize(
346
- "radius, allow_overlap",
347
- [
348
- (2.0, True),
349
- (2.0, False),
350
- (3.0, True),
351
- (4.0, True),
352
- (5.0, True),
353
- ],
354
- )
355
- @pytest.mark.parametrize(
356
- "coords",
357
- [
358
- [(1, 1, 1)],
359
- [(1, 1, 1), (4, 4, 4)],
360
- [(1, 1, 1), (4, 4, 4), (10, 10, 10)],
361
- ],
362
- )
363
- def test_junifer_and_nilearn_mean_agg_are_equal(
364
- shape: Tuple[int, ...],
365
- radius: float,
366
- allow_overlap: bool,
367
- coords: List[Tuple[int, int, int]],
368
- ) -> None:
369
- """Test junifer's masker behaves same as nilearn's when agg is mean.
370
-
371
- Parameters
372
- ----------
373
- shape : tuple of int
374
- The parametrized shape of the input image.
375
- radius : float
376
- The parametrized radius of the spheres.
377
- allow_overlap : bool
378
- The parametrized option to overlap spheres or not.
379
- coords : list of tuple of int, int and int
380
- The parametrized seeds.
381
-
382
- """
383
- # Set affine
384
- affine = np.eye(4)
385
- # Generate random image
386
- input_img, mask_img = data_gen.generate_random_img(
387
- shape=shape,
388
- affine=affine,
389
- )
390
- # Compute junifer's version
391
- junifer_masker = JuniferNiftiSpheresMasker(
392
- seeds=coords,
393
- radius=radius,
394
- allow_overlap=allow_overlap,
395
- mask_img=mask_img,
396
- )
397
- junifer_output = junifer_masker.fit_transform(input_img)
398
- # Compute nilearn's version
399
- nilearn_masker = NiftiSpheresMasker(
400
- seeds=coords,
401
- radius=radius,
402
- allow_overlap=allow_overlap,
403
- mask_img=mask_img,
404
- )
405
- nilearn_output = nilearn_masker.fit_transform(input_img)
406
- # Checks
407
- assert junifer_output.shape == nilearn_output.shape
408
- np.testing.assert_almost_equal(junifer_output, nilearn_output)