junifer 0.0.6.dev227__py3-none-any.whl → 0.0.6.dev252__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (131) hide show
  1. junifer/_version.py +2 -2
  2. junifer/api/decorators.py +1 -2
  3. junifer/api/functions.py +18 -18
  4. junifer/api/queue_context/gnu_parallel_local_adapter.py +4 -4
  5. junifer/api/queue_context/htcondor_adapter.py +4 -4
  6. junifer/api/queue_context/tests/test_gnu_parallel_local_adapter.py +3 -3
  7. junifer/api/queue_context/tests/test_htcondor_adapter.py +3 -3
  8. junifer/api/tests/test_functions.py +32 -32
  9. junifer/cli/cli.py +3 -3
  10. junifer/cli/parser.py +4 -4
  11. junifer/cli/tests/test_cli.py +5 -5
  12. junifer/cli/utils.py +5 -6
  13. junifer/configs/juseless/datagrabbers/ixi_vbm.py +2 -2
  14. junifer/configs/juseless/datagrabbers/tests/test_ucla.py +2 -2
  15. junifer/configs/juseless/datagrabbers/ucla.py +4 -4
  16. junifer/data/_dispatch.py +11 -14
  17. junifer/data/coordinates/_ants_coordinates_warper.py +6 -8
  18. junifer/data/coordinates/_coordinates.py +34 -21
  19. junifer/data/coordinates/_fsl_coordinates_warper.py +6 -8
  20. junifer/data/masks/_ants_mask_warper.py +18 -11
  21. junifer/data/masks/_fsl_mask_warper.py +6 -8
  22. junifer/data/masks/_masks.py +27 -34
  23. junifer/data/masks/tests/test_masks.py +4 -4
  24. junifer/data/parcellations/_ants_parcellation_warper.py +18 -11
  25. junifer/data/parcellations/_fsl_parcellation_warper.py +6 -8
  26. junifer/data/parcellations/_parcellations.py +39 -43
  27. junifer/data/parcellations/tests/test_parcellations.py +1 -2
  28. junifer/data/pipeline_data_registry_base.py +3 -2
  29. junifer/data/template_spaces.py +3 -3
  30. junifer/data/tests/test_data_utils.py +1 -2
  31. junifer/data/utils.py +69 -4
  32. junifer/datagrabber/aomic/id1000.py +24 -11
  33. junifer/datagrabber/aomic/piop1.py +27 -14
  34. junifer/datagrabber/aomic/piop2.py +27 -14
  35. junifer/datagrabber/aomic/tests/test_id1000.py +3 -3
  36. junifer/datagrabber/aomic/tests/test_piop1.py +4 -4
  37. junifer/datagrabber/aomic/tests/test_piop2.py +4 -4
  38. junifer/datagrabber/base.py +18 -12
  39. junifer/datagrabber/datalad_base.py +18 -11
  40. junifer/datagrabber/dmcc13_benchmark.py +31 -18
  41. junifer/datagrabber/hcp1200/datalad_hcp1200.py +3 -3
  42. junifer/datagrabber/hcp1200/hcp1200.py +26 -15
  43. junifer/datagrabber/hcp1200/tests/test_hcp1200.py +2 -1
  44. junifer/datagrabber/multiple.py +7 -7
  45. junifer/datagrabber/pattern.py +75 -45
  46. junifer/datagrabber/pattern_validation_mixin.py +204 -94
  47. junifer/datagrabber/tests/test_datalad_base.py +7 -8
  48. junifer/datagrabber/tests/test_dmcc13_benchmark.py +28 -11
  49. junifer/datagrabber/tests/test_pattern_validation_mixin.py +6 -6
  50. junifer/datareader/default.py +6 -6
  51. junifer/external/nilearn/junifer_connectivity_measure.py +2 -2
  52. junifer/external/nilearn/junifer_nifti_spheres_masker.py +4 -4
  53. junifer/external/nilearn/tests/test_junifer_connectivity_measure.py +15 -15
  54. junifer/external/nilearn/tests/test_junifer_nifti_spheres_masker.py +2 -3
  55. junifer/markers/base.py +8 -8
  56. junifer/markers/brainprint.py +7 -9
  57. junifer/markers/complexity/complexity_base.py +6 -8
  58. junifer/markers/complexity/hurst_exponent.py +5 -5
  59. junifer/markers/complexity/multiscale_entropy_auc.py +5 -5
  60. junifer/markers/complexity/perm_entropy.py +5 -5
  61. junifer/markers/complexity/range_entropy.py +5 -5
  62. junifer/markers/complexity/range_entropy_auc.py +5 -5
  63. junifer/markers/complexity/sample_entropy.py +5 -5
  64. junifer/markers/complexity/weighted_perm_entropy.py +5 -5
  65. junifer/markers/ets_rss.py +7 -7
  66. junifer/markers/falff/_afni_falff.py +1 -2
  67. junifer/markers/falff/_junifer_falff.py +1 -2
  68. junifer/markers/falff/falff_base.py +2 -4
  69. junifer/markers/falff/falff_parcels.py +7 -7
  70. junifer/markers/falff/falff_spheres.py +6 -6
  71. junifer/markers/functional_connectivity/crossparcellation_functional_connectivity.py +6 -6
  72. junifer/markers/functional_connectivity/edge_functional_connectivity_parcels.py +7 -7
  73. junifer/markers/functional_connectivity/edge_functional_connectivity_spheres.py +6 -6
  74. junifer/markers/functional_connectivity/functional_connectivity_base.py +10 -10
  75. junifer/markers/functional_connectivity/functional_connectivity_parcels.py +7 -7
  76. junifer/markers/functional_connectivity/functional_connectivity_spheres.py +6 -6
  77. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_parcels.py +1 -2
  78. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_spheres.py +1 -2
  79. junifer/markers/functional_connectivity/tests/test_functional_connectivity_parcels.py +3 -3
  80. junifer/markers/functional_connectivity/tests/test_functional_connectivity_spheres.py +3 -3
  81. junifer/markers/parcel_aggregation.py +8 -8
  82. junifer/markers/reho/_afni_reho.py +1 -2
  83. junifer/markers/reho/_junifer_reho.py +1 -2
  84. junifer/markers/reho/reho_base.py +2 -4
  85. junifer/markers/reho/reho_parcels.py +8 -8
  86. junifer/markers/reho/reho_spheres.py +7 -7
  87. junifer/markers/sphere_aggregation.py +8 -8
  88. junifer/markers/temporal_snr/temporal_snr_base.py +8 -8
  89. junifer/markers/temporal_snr/temporal_snr_parcels.py +6 -6
  90. junifer/markers/temporal_snr/temporal_snr_spheres.py +5 -5
  91. junifer/markers/utils.py +3 -3
  92. junifer/onthefly/_brainprint.py +2 -2
  93. junifer/onthefly/read_transform.py +3 -3
  94. junifer/pipeline/marker_collection.py +4 -4
  95. junifer/pipeline/pipeline_component_registry.py +5 -4
  96. junifer/pipeline/pipeline_step_mixin.py +15 -11
  97. junifer/pipeline/tests/test_pipeline_component_registry.py +2 -3
  98. junifer/pipeline/tests/test_pipeline_step_mixin.py +19 -19
  99. junifer/pipeline/tests/test_update_meta_mixin.py +4 -4
  100. junifer/pipeline/update_meta_mixin.py +21 -17
  101. junifer/pipeline/utils.py +5 -5
  102. junifer/preprocess/base.py +10 -10
  103. junifer/preprocess/confounds/fmriprep_confound_remover.py +11 -14
  104. junifer/preprocess/confounds/tests/test_fmriprep_confound_remover.py +1 -2
  105. junifer/preprocess/smoothing/smoothing.py +7 -7
  106. junifer/preprocess/warping/_ants_warper.py +26 -6
  107. junifer/preprocess/warping/_fsl_warper.py +22 -7
  108. junifer/preprocess/warping/space_warper.py +37 -10
  109. junifer/preprocess/warping/tests/test_space_warper.py +3 -4
  110. junifer/stats.py +4 -4
  111. junifer/storage/base.py +14 -13
  112. junifer/storage/hdf5.py +21 -20
  113. junifer/storage/pandas_base.py +12 -11
  114. junifer/storage/sqlite.py +11 -11
  115. junifer/storage/tests/test_hdf5.py +1 -2
  116. junifer/storage/tests/test_sqlite.py +2 -2
  117. junifer/storage/tests/test_utils.py +8 -7
  118. junifer/storage/utils.py +7 -7
  119. junifer/testing/datagrabbers.py +9 -10
  120. junifer/tests/test_stats.py +2 -2
  121. junifer/typing/_typing.py +6 -9
  122. junifer/utils/helpers.py +2 -3
  123. junifer/utils/logging.py +5 -5
  124. junifer/utils/singleton.py +3 -3
  125. {junifer-0.0.6.dev227.dist-info → junifer-0.0.6.dev252.dist-info}/METADATA +2 -2
  126. {junifer-0.0.6.dev227.dist-info → junifer-0.0.6.dev252.dist-info}/RECORD +131 -131
  127. {junifer-0.0.6.dev227.dist-info → junifer-0.0.6.dev252.dist-info}/WHEEL +1 -1
  128. {junifer-0.0.6.dev227.dist-info → junifer-0.0.6.dev252.dist-info}/AUTHORS.rst +0 -0
  129. {junifer-0.0.6.dev227.dist-info → junifer-0.0.6.dev252.dist-info}/LICENSE.md +0 -0
  130. {junifer-0.0.6.dev227.dist-info → junifer-0.0.6.dev252.dist-info}/entry_points.txt +0 -0
  131. {junifer-0.0.6.dev227.dist-info → junifer-0.0.6.dev252.dist-info}/top_level.txt +0 -0
@@ -7,7 +7,7 @@
7
7
  # Synchon Mandal <s.mandal@fz-juelich.de>
8
8
  # License: AGPL
9
9
 
10
- from typing import List, Optional, Union
10
+ from typing import Optional, Union
11
11
 
12
12
  import pytest
13
13
 
@@ -35,8 +35,8 @@ URI = "https://gin.g-node.org/juaml/datalad-example-aomicpiop2"
35
35
  )
36
36
  def test_DataladAOMICPIOP2(
37
37
  type_: str,
38
- nested_types: Optional[List[str]],
39
- tasks: Optional[List[str]],
38
+ nested_types: Optional[list[str]],
39
+ tasks: Optional[list[str]],
40
40
  ) -> None:
41
41
  """Test DataladAOMICPIOP2 DataGrabber.
42
42
 
@@ -97,7 +97,7 @@ def test_DataladAOMICPIOP2(
97
97
  ],
98
98
  )
99
99
  def test_DataladAOMICPIOP2_partial_data_access(
100
- types: Union[str, List[str]],
100
+ types: Union[str, list[str]],
101
101
  ) -> None:
102
102
  """Test DataladAOMICPIOP2 DataGrabber partial data access.
103
103
 
@@ -6,8 +6,9 @@
6
6
  # License: AGPL
7
7
 
8
8
  from abc import ABC, abstractmethod
9
+ from collections.abc import Iterator
9
10
  from pathlib import Path
10
- from typing import Dict, Iterator, List, Tuple, Union
11
+ from typing import Union
11
12
 
12
13
  from ..pipeline import UpdateMetaMixin
13
14
  from ..utils import logger, raise_error
@@ -36,7 +37,7 @@ class BaseDataGrabber(ABC, UpdateMetaMixin):
36
37
 
37
38
  """
38
39
 
39
- def __init__(self, types: List[str], datadir: Union[str, Path]) -> None:
40
+ def __init__(self, types: list[str], datadir: Union[str, Path]) -> None:
40
41
  # Validate types
41
42
  if not isinstance(types, list):
42
43
  raise_error(msg="`types` must be a list", klass=TypeError)
@@ -67,8 +68,8 @@ class BaseDataGrabber(ABC, UpdateMetaMixin):
67
68
  yield from self.get_elements()
68
69
 
69
70
  def __getitem__(
70
- self, element: Union[str, Tuple[str, ...]]
71
- ) -> Dict[str, Dict]:
71
+ self, element: Union[str, tuple[str, ...]]
72
+ ) -> dict[str, dict]:
72
73
  """Enable indexing support.
73
74
 
74
75
  Parameters
@@ -89,14 +90,19 @@ class BaseDataGrabber(ABC, UpdateMetaMixin):
89
90
  element = (element,)
90
91
  # Zip through element keys and actual values to construct element
91
92
  # access dictionary
92
- named_element: Dict = dict(zip(self.get_element_keys(), element))
93
+ named_element: dict = dict(zip(self.get_element_keys(), element))
93
94
  logger.debug(f"Named element: {named_element}")
94
95
  # Fetch element
95
96
  out = self.get_item(**named_element)
96
97
  # Update metadata
97
98
  for _, t_val in out.items():
98
99
  self.update_meta(t_val, "datagrabber")
99
- t_val["meta"]["element"] = named_element
100
+ # Conditional for list dtype vals like Warp
101
+ if isinstance(t_val, list):
102
+ for entry in t_val:
103
+ entry["meta"]["element"] = named_element
104
+ else:
105
+ t_val["meta"]["element"] = named_element
100
106
 
101
107
  return out
102
108
 
@@ -108,7 +114,7 @@ class BaseDataGrabber(ABC, UpdateMetaMixin):
108
114
  """Context exit."""
109
115
  return None
110
116
 
111
- def get_types(self) -> List[str]:
117
+ def get_types(self) -> list[str]:
112
118
  """Get types.
113
119
 
114
120
  Returns
@@ -131,7 +137,7 @@ class BaseDataGrabber(ABC, UpdateMetaMixin):
131
137
  """
132
138
  return self._datadir
133
139
 
134
- def filter(self, selection: List[Union[str, Tuple[str]]]) -> Iterator:
140
+ def filter(self, selection: list[Union[str, tuple[str]]]) -> Iterator:
135
141
  """Filter elements to be grabbed.
136
142
 
137
143
  Parameters
@@ -146,7 +152,7 @@ class BaseDataGrabber(ABC, UpdateMetaMixin):
146
152
 
147
153
  """
148
154
 
149
- def filter_func(element: Union[str, Tuple[str]]) -> bool:
155
+ def filter_func(element: Union[str, tuple[str]]) -> bool:
150
156
  """Filter element based on selection.
151
157
 
152
158
  Parameters
@@ -177,7 +183,7 @@ class BaseDataGrabber(ABC, UpdateMetaMixin):
177
183
  yield from filter(filter_func, self.get_elements())
178
184
 
179
185
  @abstractmethod
180
- def get_element_keys(self) -> List[str]:
186
+ def get_element_keys(self) -> list[str]:
181
187
  """Get element keys.
182
188
 
183
189
  For each item in the ``element`` tuple passed to ``__getitem__()``,
@@ -195,7 +201,7 @@ class BaseDataGrabber(ABC, UpdateMetaMixin):
195
201
  ) # pragma: no cover
196
202
 
197
203
  @abstractmethod
198
- def get_elements(self) -> List[Union[str, Tuple[str]]]:
204
+ def get_elements(self) -> list[Union[str, tuple[str]]]:
199
205
  """Get elements.
200
206
 
201
207
  Returns
@@ -212,7 +218,7 @@ class BaseDataGrabber(ABC, UpdateMetaMixin):
212
218
  ) # pragma: no cover
213
219
 
214
220
  @abstractmethod
215
- def get_item(self, **element: Dict) -> Dict[str, Dict]:
221
+ def get_item(self, **element: dict) -> dict[str, dict]:
216
222
  """Get the specified item from the dataset.
217
223
 
218
224
  Parameters
@@ -9,7 +9,7 @@ import atexit
9
9
  import os
10
10
  import tempfile
11
11
  from pathlib import Path
12
- from typing import Dict, Optional, Tuple, Union
12
+ from typing import Optional, Union
13
13
 
14
14
  import datalad
15
15
  import datalad.api as dl
@@ -165,7 +165,7 @@ class DataladDataGrabber(BaseDataGrabber):
165
165
  raise_error("Could not get dataset ID from remote")
166
166
  return remote_id
167
167
 
168
- def _dataset_get(self, out: Dict) -> Dict:
168
+ def _dataset_get(self, out: dict) -> dict:
169
169
  """Get the dataset found from the path in ``out``.
170
170
 
171
171
  Parameters
@@ -181,14 +181,21 @@ class DataladDataGrabber(BaseDataGrabber):
181
181
  """
182
182
  to_get = []
183
183
  for type_val in out.values():
184
- # Iterate to check for nested "types" like mask
185
- for k, v in type_val.items():
186
- # Add base data type path
187
- if k == "path":
188
- to_get.append(v)
189
- # Add nested data type path
190
- if isinstance(v, dict) and "path" in v:
191
- to_get.append(v["path"])
184
+ # Conditional for list dtype vals like Warp
185
+ if isinstance(type_val, list):
186
+ for entry in type_val:
187
+ for k, v in entry.items():
188
+ if k == "path":
189
+ to_get.append(v)
190
+ else:
191
+ # Iterate to check for nested "types" like mask
192
+ for k, v in type_val.items():
193
+ # Add base data type path
194
+ if k == "path":
195
+ to_get.append(v)
196
+ # Add nested data type path
197
+ if isinstance(v, dict) and "path" in v:
198
+ to_get.append(v["path"])
192
199
 
193
200
  if len(to_get) > 0:
194
201
  logger.debug(f"Getting {len(to_get)} files using datalad:")
@@ -276,7 +283,7 @@ class DataladDataGrabber(BaseDataGrabber):
276
283
  logger.debug(f"Dropping {f}")
277
284
  self._dataset.drop(f, result_renderer="disabled")
278
285
 
279
- def __getitem__(self, element: Union[str, Tuple]) -> Dict:
286
+ def __getitem__(self, element: Union[str, tuple]) -> dict:
280
287
  """Implement single element indexing in the Datalad database.
281
288
 
282
289
  It will first obtain the paths from the parent class and then
@@ -5,7 +5,7 @@
5
5
 
6
6
  from itertools import product
7
7
  from pathlib import Path
8
- from typing import Dict, List, Union
8
+ from typing import Union
9
9
 
10
10
  from ..api.decorators import register_datagrabber
11
11
  from ..utils import raise_error
@@ -59,11 +59,11 @@ class DMCC13Benchmark(PatternDataladDataGrabber):
59
59
  def __init__(
60
60
  self,
61
61
  datadir: Union[str, Path, None] = None,
62
- types: Union[str, List[str], None] = None,
63
- sessions: Union[str, List[str], None] = None,
64
- tasks: Union[str, List[str], None] = None,
65
- phase_encodings: Union[str, List[str], None] = None,
66
- runs: Union[str, List[str], None] = None,
62
+ types: Union[str, list[str], None] = None,
63
+ sessions: Union[str, list[str], None] = None,
64
+ tasks: Union[str, list[str], None] = None,
65
+ phase_encodings: Union[str, list[str], None] = None,
66
+ runs: Union[str, list[str], None] = None,
67
67
  native_t1w: bool = False,
68
68
  ) -> None:
69
69
  # Declare all sessions
@@ -150,7 +150,7 @@ class DMCC13Benchmark(PatternDataladDataGrabber):
150
150
  "mask": {
151
151
  "pattern": (
152
152
  "derivatives/fmriprep-1.3.2/{subject}/{session}/"
153
- "/func/{subject}_{session}_task-{task}_acq-mb4"
153
+ "func/{subject}_{session}_task-{task}_acq-mb4"
154
154
  "{phase_encoding}_run-{run}_"
155
155
  "space-MNI152NLin2009cAsym_desc-brain_mask.nii.gz"
156
156
  ),
@@ -221,15 +221,28 @@ class DMCC13Benchmark(PatternDataladDataGrabber):
221
221
  "space": "native",
222
222
  },
223
223
  },
224
- "Warp": {
225
- "pattern": (
226
- "derivatives/fmriprep-1.3.2/{subject}/anat/"
227
- "{subject}_from-MNI152NLin2009cAsym_to-T1w_"
228
- "mode-image_xfm.h5"
229
- ),
230
- "src": "MNI152NLin2009cAsym",
231
- "dst": "native",
232
- },
224
+ "Warp": [
225
+ {
226
+ "pattern": (
227
+ "derivatives/fmriprep-1.3.2/{subject}/anat/"
228
+ "{subject}_from-MNI152NLin2009cAsym_to-T1w_"
229
+ "mode-image_xfm.h5"
230
+ ),
231
+ "src": "MNI152NLin2009cAsym",
232
+ "dst": "native",
233
+ "warper": "ants",
234
+ },
235
+ {
236
+ "pattern": (
237
+ "derivatives/fmriprep-1.3.2/{subject}/anat/"
238
+ "{subject}_from-T1w_to-MNI152NLin2009cAsym_"
239
+ "mode-image_xfm.h5"
240
+ ),
241
+ "src": "native",
242
+ "dst": "MNI152NLin2009cAsym",
243
+ "warper": "ants",
244
+ },
245
+ ],
233
246
  }
234
247
  )
235
248
  # Set default types
@@ -258,7 +271,7 @@ class DMCC13Benchmark(PatternDataladDataGrabber):
258
271
  task: str,
259
272
  phase_encoding: str,
260
273
  run: str,
261
- ) -> Dict:
274
+ ) -> dict:
262
275
  """Index one element in the dataset.
263
276
 
264
277
  Parameters
@@ -296,7 +309,7 @@ class DMCC13Benchmark(PatternDataladDataGrabber):
296
309
  )
297
310
  return out
298
311
 
299
- def get_elements(self) -> List:
312
+ def get_elements(self) -> list:
300
313
  """Implement fetching list of subjects in the dataset.
301
314
 
302
315
  Returns
@@ -6,7 +6,7 @@
6
6
  # License: AGPL
7
7
 
8
8
  from pathlib import Path
9
- from typing import List, Union
9
+ from typing import Union
10
10
 
11
11
  from junifer.datagrabber.datalad_base import DataladDataGrabber
12
12
 
@@ -50,8 +50,8 @@ class DataladHCP1200(DataladDataGrabber, HCP1200):
50
50
  def __init__(
51
51
  self,
52
52
  datadir: Union[str, Path, None] = None,
53
- tasks: Union[str, List[str], None] = None,
54
- phase_encodings: Union[str, List[str], None] = None,
53
+ tasks: Union[str, list[str], None] = None,
54
+ phase_encodings: Union[str, list[str], None] = None,
55
55
  ica_fix: bool = False,
56
56
  ) -> None:
57
57
  uri = (
@@ -7,7 +7,7 @@
7
7
 
8
8
  from itertools import product
9
9
  from pathlib import Path
10
- from typing import Dict, List, Union
10
+ from typing import Union
11
11
 
12
12
  from ...api.decorators import register_datagrabber
13
13
  from ...utils import raise_error
@@ -48,8 +48,8 @@ class HCP1200(PatternDataGrabber):
48
48
  def __init__(
49
49
  self,
50
50
  datadir: Union[str, Path],
51
- tasks: Union[str, List[str], None] = None,
52
- phase_encodings: Union[str, List[str], None] = None,
51
+ tasks: Union[str, list[str], None] = None,
52
+ phase_encodings: Union[str, list[str], None] = None,
53
53
  ica_fix: bool = False,
54
54
  ) -> None:
55
55
  # All tasks
@@ -66,10 +66,10 @@ class HCP1200(PatternDataGrabber):
66
66
  ]
67
67
  # Set default tasks
68
68
  if tasks is None:
69
- self.tasks: List[str] = all_tasks
69
+ self.tasks: list[str] = all_tasks
70
70
  # Convert single task into list
71
71
  else:
72
- if not isinstance(tasks, List):
72
+ if not isinstance(tasks, list):
73
73
  tasks = [tasks]
74
74
  # Check for invalid task(s)
75
75
  for task in tasks:
@@ -78,7 +78,7 @@ class HCP1200(PatternDataGrabber):
78
78
  f"'{task}' is not a valid HCP-YA fMRI task input. "
79
79
  f"Valid task values can be any or all of {all_tasks}."
80
80
  )
81
- self.tasks: List[str] = tasks
81
+ self.tasks: list[str] = tasks
82
82
 
83
83
  # All phase encodings
84
84
  all_phase_encodings = ["LR", "RL"]
@@ -122,13 +122,24 @@ class HCP1200(PatternDataGrabber):
122
122
  "pattern": "{subject}/T1w/T1w_acpc_dc_restore.nii.gz",
123
123
  "space": "native",
124
124
  },
125
- "Warp": {
126
- "pattern": (
127
- "{subject}/MNINonLinear/xfms/standard2acpc_dc.nii.gz"
128
- ),
129
- "src": "MNI152NLin6Asym",
130
- "dst": "native",
131
- },
125
+ "Warp": [
126
+ {
127
+ "pattern": (
128
+ "{subject}/MNINonLinear/xfms/standard2acpc_dc.nii.gz"
129
+ ),
130
+ "src": "MNI152NLin6Asym",
131
+ "dst": "native",
132
+ "warper": "fsl",
133
+ },
134
+ {
135
+ "pattern": (
136
+ "{subject}/MNINonLinear/xfms/acpc_dc2standard.nii.gz"
137
+ ),
138
+ "src": "native",
139
+ "dst": "MNI152NLin6Asym",
140
+ "warper": "fsl",
141
+ },
142
+ ],
132
143
  }
133
144
  # The replacements
134
145
  replacements = ["subject", "task", "phase_encoding"]
@@ -139,7 +150,7 @@ class HCP1200(PatternDataGrabber):
139
150
  replacements=replacements,
140
151
  )
141
152
 
142
- def get_item(self, subject: str, task: str, phase_encoding: str) -> Dict:
153
+ def get_item(self, subject: str, task: str, phase_encoding: str) -> dict:
143
154
  """Implement single element indexing in the database.
144
155
 
145
156
  Parameters
@@ -169,7 +180,7 @@ class HCP1200(PatternDataGrabber):
169
180
  subject=subject, task=new_task, phase_encoding=phase_encoding
170
181
  )
171
182
 
172
- def get_elements(self) -> List:
183
+ def get_elements(self) -> list:
173
184
  """Implement fetching list of elements in the dataset.
174
185
 
175
186
  Returns
@@ -5,8 +5,9 @@
5
5
 
6
6
  import shutil
7
7
  import tempfile
8
+ from collections.abc import Iterable
8
9
  from pathlib import Path
9
- from typing import Iterable, Optional
10
+ from typing import Optional
10
11
 
11
12
  import pytest
12
13
 
@@ -5,7 +5,7 @@
5
5
  # Synchon Mandal <s.mandal@fz-juelich.de>
6
6
  # License: AGPL
7
7
 
8
- from typing import Dict, List, Tuple, Union
8
+ from typing import Union
9
9
 
10
10
  from ..api.decorators import register_datagrabber
11
11
  from ..typing import DataGrabberLike
@@ -38,7 +38,7 @@ class MultipleDataGrabber(BaseDataGrabber):
38
38
 
39
39
  """
40
40
 
41
- def __init__(self, datagrabbers: List[DataGrabberLike], **kwargs) -> None:
41
+ def __init__(self, datagrabbers: list[DataGrabberLike], **kwargs) -> None:
42
42
  # Check datagrabbers consistency
43
43
  # Check for same element keys
44
44
  first_keys = datagrabbers[0].get_element_keys()
@@ -79,7 +79,7 @@ class MultipleDataGrabber(BaseDataGrabber):
79
79
  )
80
80
  self._datagrabbers = datagrabbers
81
81
 
82
- def __getitem__(self, element: Union[str, Tuple]) -> Dict:
82
+ def __getitem__(self, element: Union[str, tuple]) -> dict:
83
83
  """Implement indexing.
84
84
 
85
85
  Parameters
@@ -127,7 +127,7 @@ class MultipleDataGrabber(BaseDataGrabber):
127
127
  dg.__exit__(exc_type, exc_value, exc_traceback)
128
128
 
129
129
  # TODO: return type should be List[List[str]], but base type is List[str]
130
- def get_types(self) -> List[str]:
130
+ def get_types(self) -> list[str]:
131
131
  """Get types.
132
132
 
133
133
  Returns
@@ -139,7 +139,7 @@ class MultipleDataGrabber(BaseDataGrabber):
139
139
  types = [x for dg in self._datagrabbers for x in dg.get_types()]
140
140
  return types
141
141
 
142
- def get_element_keys(self) -> List[str]:
142
+ def get_element_keys(self) -> list[str]:
143
143
  """Get element keys.
144
144
 
145
145
  For each item in the ``element`` tuple passed to ``__getitem__()``,
@@ -153,7 +153,7 @@ class MultipleDataGrabber(BaseDataGrabber):
153
153
  """
154
154
  return self._datagrabbers[0].get_element_keys()
155
155
 
156
- def get_elements(self) -> List:
156
+ def get_elements(self) -> list:
157
157
  """Get elements.
158
158
 
159
159
  Returns
@@ -171,7 +171,7 @@ class MultipleDataGrabber(BaseDataGrabber):
171
171
  elements.intersection_update(s)
172
172
  return list(elements)
173
173
 
174
- def get_item(self, **_: Dict) -> Dict[str, Dict]:
174
+ def get_item(self, **_: dict) -> dict[str, dict]:
175
175
  """Get the specified item from the dataset.
176
176
 
177
177
  Parameters
@@ -8,7 +8,7 @@
8
8
  import re
9
9
  from copy import deepcopy
10
10
  from pathlib import Path
11
- from typing import Dict, List, Optional, Tuple, Union
11
+ from typing import Optional, Union
12
12
 
13
13
  import numpy as np
14
14
 
@@ -89,7 +89,7 @@ class PatternDataGrabber(BaseDataGrabber, PatternValidationMixin):
89
89
  .. code-block:: none
90
90
 
91
91
  {
92
- "mandatory": ["pattern", "src", "dst"],
92
+ "mandatory": ["pattern", "src", "dst", "warper"],
93
93
  "optional": []
94
94
  }
95
95
 
@@ -127,11 +127,22 @@ class PatternDataGrabber(BaseDataGrabber, PatternValidationMixin):
127
127
  "pattern": "...",
128
128
  "space": "...",
129
129
  },
130
- "Warp": {
131
- "pattern": "...",
132
- "src": "...",
133
- "dst": "...",
134
- }
130
+ }
131
+
132
+ except ``Warp``, which needs to be a list of dictionaries as there can
133
+ be multiple spaces to warp (for example, with fMRIPrep):
134
+
135
+ .. code-block:: none
136
+
137
+ {
138
+ "Warp": [
139
+ {
140
+ "pattern": "...",
141
+ "src": "...",
142
+ "dst": "...",
143
+ "warper": "...",
144
+ },
145
+ ],
135
146
  }
136
147
 
137
148
  taken from :class:`.HCP1200`.
@@ -159,9 +170,9 @@ class PatternDataGrabber(BaseDataGrabber, PatternValidationMixin):
159
170
 
160
171
  def __init__(
161
172
  self,
162
- types: List[str],
163
- patterns: Dict[str, Dict[str, str]],
164
- replacements: Union[List[str], str],
173
+ types: list[str],
174
+ patterns: dict[str, dict[str, str]],
175
+ replacements: Union[list[str], str],
165
176
  datadir: Union[str, Path],
166
177
  confounds_format: Optional[str] = None,
167
178
  partial_pattern_ok: bool = False,
@@ -204,7 +215,7 @@ class PatternDataGrabber(BaseDataGrabber, PatternValidationMixin):
204
215
 
205
216
  def _replace_patterns_regex(
206
217
  self, pattern: str
207
- ) -> Tuple[str, str, List[str]]:
218
+ ) -> tuple[str, str, list[str]]:
208
219
  """Replace the patterns in ``pattern`` with the named groups.
209
220
 
210
221
  It allows elements to be obtained from the filesystem.
@@ -250,7 +261,7 @@ class PatternDataGrabber(BaseDataGrabber, PatternValidationMixin):
250
261
 
251
262
  return re_pattern, glob_pattern, t_replacements
252
263
 
253
- def _replace_patterns_glob(self, element: Dict, pattern: str) -> str:
264
+ def _replace_patterns_glob(self, element: dict, pattern: str) -> str:
254
265
  """Replace ``pattern`` with the ``element`` so it can be globbed.
255
266
 
256
267
  Parameters
@@ -283,7 +294,7 @@ class PatternDataGrabber(BaseDataGrabber, PatternValidationMixin):
283
294
  return pattern.format(**element)
284
295
 
285
296
  def _get_path_from_patterns(
286
- self, element: Dict, pattern: str, data_type: str
297
+ self, element: dict, pattern: str, data_type: str
287
298
  ) -> Path:
288
299
  """Get path from resolved patterns.
289
300
 
@@ -340,7 +351,7 @@ class PatternDataGrabber(BaseDataGrabber, PatternValidationMixin):
340
351
 
341
352
  return path
342
353
 
343
- def get_element_keys(self) -> List[str]:
354
+ def get_element_keys(self) -> list[str]:
344
355
  """Get element keys.
345
356
 
346
357
  For each item in the "element" tuple, this functions returns the
@@ -355,7 +366,7 @@ class PatternDataGrabber(BaseDataGrabber, PatternValidationMixin):
355
366
  """
356
367
  return self.replacements
357
368
 
358
- def get_item(self, **element: str) -> Dict[str, Dict]:
369
+ def get_item(self, **element: str) -> dict[str, dict]:
359
370
  """Implement single element indexing for the datagrabber.
360
371
 
361
372
  This method constructs a real path to the requested item's data, by
@@ -380,46 +391,65 @@ class PatternDataGrabber(BaseDataGrabber, PatternValidationMixin):
380
391
  t_pattern = self.patterns[t_type]
381
392
  # Copy data type dictionary in output
382
393
  out[t_type] = deepcopy(t_pattern)
383
- # Iterate to check for nested "types" like mask
384
- for k, v in t_pattern.items():
385
- # Resolve pattern for base data type
386
- if k == "pattern":
387
- logger.info(f"Resolving path from pattern for {t_type}")
394
+ # Conditional for list dtype vals like Warp
395
+ if isinstance(t_pattern, list):
396
+ for idx, entry in enumerate(t_pattern):
397
+ logger.info(
398
+ f"Resolving path from pattern for {t_type}.{idx}"
399
+ )
388
400
  # Resolve pattern
389
- base_data_type_pattern_path = self._get_path_from_patterns(
401
+ dtype_pattern_path = self._get_path_from_patterns(
390
402
  element=element,
391
- pattern=v,
392
- data_type=t_type,
403
+ pattern=entry["pattern"],
404
+ data_type=f"{t_type}.{idx}",
393
405
  )
394
406
  # Remove pattern key
395
- out[t_type].pop("pattern")
407
+ out[t_type][idx].pop("pattern")
396
408
  # Add path key
397
- out[t_type].update({"path": base_data_type_pattern_path})
398
- # Resolve pattern for nested data type
399
- if isinstance(v, dict) and "pattern" in v:
400
- # Set nested type key for easier access
401
- t_nested_type = f"{t_type}.{k}"
402
- logger.info(
403
- f"Resolving path from pattern for {t_nested_type}"
404
- )
405
- # Resolve pattern
406
- nested_data_type_pattern_path = (
407
- self._get_path_from_patterns(
409
+ out[t_type][idx].update({"path": dtype_pattern_path})
410
+ else:
411
+ # Iterate to check for nested "types" like mask
412
+ for k, v in t_pattern.items():
413
+ # Resolve pattern for base data type
414
+ if k == "pattern":
415
+ logger.info(
416
+ f"Resolving path from pattern for {t_type}"
417
+ )
418
+ # Resolve pattern
419
+ base_dtype_pattern_path = self._get_path_from_patterns(
408
420
  element=element,
409
- pattern=v["pattern"],
410
- data_type=t_nested_type,
421
+ pattern=v,
422
+ data_type=t_type,
423
+ )
424
+ # Remove pattern key
425
+ out[t_type].pop("pattern")
426
+ # Add path key
427
+ out[t_type].update({"path": base_dtype_pattern_path})
428
+ # Resolve pattern for nested data type
429
+ if isinstance(v, dict) and "pattern" in v:
430
+ # Set nested type key for easier access
431
+ t_nested_type = f"{t_type}.{k}"
432
+ logger.info(
433
+ f"Resolving path from pattern for {t_nested_type}"
434
+ )
435
+ # Resolve pattern
436
+ nested_dtype_pattern_path = (
437
+ self._get_path_from_patterns(
438
+ element=element,
439
+ pattern=v["pattern"],
440
+ data_type=t_nested_type,
441
+ )
442
+ )
443
+ # Remove pattern key
444
+ out[t_type][k].pop("pattern")
445
+ # Add path key
446
+ out[t_type][k].update(
447
+ {"path": nested_dtype_pattern_path}
411
448
  )
412
- )
413
- # Remove pattern key
414
- out[t_type][k].pop("pattern")
415
- # Add path key
416
- out[t_type][k].update(
417
- {"path": nested_data_type_pattern_path}
418
- )
419
449
 
420
450
  return out
421
451
 
422
- def get_elements(self) -> List:
452
+ def get_elements(self) -> list:
423
453
  """Implement fetching list of elements in the dataset.
424
454
 
425
455
  It will use regex to search for "replacements" in the "patterns" and