junifer 0.0.6.dev248__py3-none-any.whl → 0.0.6.dev258__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (131) hide show
  1. junifer/_version.py +2 -2
  2. junifer/api/decorators.py +1 -2
  3. junifer/api/functions.py +18 -18
  4. junifer/api/queue_context/gnu_parallel_local_adapter.py +4 -4
  5. junifer/api/queue_context/htcondor_adapter.py +4 -4
  6. junifer/api/queue_context/tests/test_gnu_parallel_local_adapter.py +3 -3
  7. junifer/api/queue_context/tests/test_htcondor_adapter.py +3 -3
  8. junifer/api/tests/test_functions.py +32 -32
  9. junifer/cli/cli.py +3 -3
  10. junifer/cli/parser.py +4 -4
  11. junifer/cli/tests/test_cli.py +5 -5
  12. junifer/cli/utils.py +5 -6
  13. junifer/configs/juseless/datagrabbers/ixi_vbm.py +2 -2
  14. junifer/configs/juseless/datagrabbers/tests/test_ucla.py +2 -2
  15. junifer/configs/juseless/datagrabbers/ucla.py +4 -4
  16. junifer/data/_dispatch.py +11 -14
  17. junifer/data/coordinates/_ants_coordinates_warper.py +16 -6
  18. junifer/data/coordinates/_coordinates.py +7 -7
  19. junifer/data/coordinates/_fsl_coordinates_warper.py +3 -3
  20. junifer/data/masks/_ants_mask_warper.py +3 -3
  21. junifer/data/masks/_fsl_mask_warper.py +3 -3
  22. junifer/data/masks/_masks.py +6 -9
  23. junifer/data/masks/tests/test_masks.py +4 -4
  24. junifer/data/parcellations/_ants_parcellation_warper.py +3 -3
  25. junifer/data/parcellations/_fsl_parcellation_warper.py +3 -3
  26. junifer/data/parcellations/_parcellations.py +19 -19
  27. junifer/data/parcellations/tests/test_parcellations.py +1 -2
  28. junifer/data/pipeline_data_registry_base.py +3 -2
  29. junifer/data/template_spaces.py +3 -3
  30. junifer/data/tests/test_data_utils.py +1 -2
  31. junifer/data/utils.py +4 -3
  32. junifer/datagrabber/aomic/id1000.py +2 -2
  33. junifer/datagrabber/aomic/piop1.py +5 -5
  34. junifer/datagrabber/aomic/piop2.py +5 -5
  35. junifer/datagrabber/aomic/tests/test_id1000.py +3 -3
  36. junifer/datagrabber/aomic/tests/test_piop1.py +4 -4
  37. junifer/datagrabber/aomic/tests/test_piop2.py +4 -4
  38. junifer/datagrabber/base.py +12 -11
  39. junifer/datagrabber/datalad_base.py +3 -3
  40. junifer/datagrabber/dmcc13_benchmark.py +8 -8
  41. junifer/datagrabber/hcp1200/datalad_hcp1200.py +3 -3
  42. junifer/datagrabber/hcp1200/hcp1200.py +8 -8
  43. junifer/datagrabber/hcp1200/tests/test_hcp1200.py +2 -1
  44. junifer/datagrabber/multiple.py +7 -7
  45. junifer/datagrabber/pattern.py +10 -10
  46. junifer/datagrabber/pattern_validation_mixin.py +10 -10
  47. junifer/datagrabber/tests/test_datalad_base.py +7 -8
  48. junifer/datagrabber/tests/test_dmcc13_benchmark.py +2 -2
  49. junifer/datagrabber/tests/test_pattern_validation_mixin.py +6 -6
  50. junifer/datareader/default.py +6 -6
  51. junifer/external/nilearn/junifer_connectivity_measure.py +2 -2
  52. junifer/external/nilearn/junifer_nifti_spheres_masker.py +4 -4
  53. junifer/external/nilearn/tests/test_junifer_connectivity_measure.py +15 -15
  54. junifer/external/nilearn/tests/test_junifer_nifti_spheres_masker.py +2 -3
  55. junifer/markers/base.py +8 -8
  56. junifer/markers/brainprint.py +7 -9
  57. junifer/markers/complexity/complexity_base.py +6 -8
  58. junifer/markers/complexity/hurst_exponent.py +5 -5
  59. junifer/markers/complexity/multiscale_entropy_auc.py +5 -5
  60. junifer/markers/complexity/perm_entropy.py +5 -5
  61. junifer/markers/complexity/range_entropy.py +5 -5
  62. junifer/markers/complexity/range_entropy_auc.py +5 -5
  63. junifer/markers/complexity/sample_entropy.py +5 -5
  64. junifer/markers/complexity/weighted_perm_entropy.py +5 -5
  65. junifer/markers/ets_rss.py +7 -7
  66. junifer/markers/falff/_afni_falff.py +1 -2
  67. junifer/markers/falff/_junifer_falff.py +1 -2
  68. junifer/markers/falff/falff_base.py +2 -4
  69. junifer/markers/falff/falff_parcels.py +7 -7
  70. junifer/markers/falff/falff_spheres.py +6 -6
  71. junifer/markers/functional_connectivity/crossparcellation_functional_connectivity.py +6 -6
  72. junifer/markers/functional_connectivity/edge_functional_connectivity_parcels.py +7 -7
  73. junifer/markers/functional_connectivity/edge_functional_connectivity_spheres.py +6 -6
  74. junifer/markers/functional_connectivity/functional_connectivity_base.py +10 -10
  75. junifer/markers/functional_connectivity/functional_connectivity_parcels.py +7 -7
  76. junifer/markers/functional_connectivity/functional_connectivity_spheres.py +6 -6
  77. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_parcels.py +1 -2
  78. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_spheres.py +1 -2
  79. junifer/markers/functional_connectivity/tests/test_functional_connectivity_parcels.py +3 -3
  80. junifer/markers/functional_connectivity/tests/test_functional_connectivity_spheres.py +3 -3
  81. junifer/markers/parcel_aggregation.py +8 -8
  82. junifer/markers/reho/_afni_reho.py +1 -2
  83. junifer/markers/reho/_junifer_reho.py +1 -2
  84. junifer/markers/reho/reho_base.py +2 -4
  85. junifer/markers/reho/reho_parcels.py +8 -8
  86. junifer/markers/reho/reho_spheres.py +7 -7
  87. junifer/markers/sphere_aggregation.py +8 -8
  88. junifer/markers/temporal_snr/temporal_snr_base.py +8 -8
  89. junifer/markers/temporal_snr/temporal_snr_parcels.py +6 -6
  90. junifer/markers/temporal_snr/temporal_snr_spheres.py +5 -5
  91. junifer/markers/utils.py +3 -3
  92. junifer/onthefly/_brainprint.py +2 -2
  93. junifer/onthefly/read_transform.py +3 -3
  94. junifer/pipeline/marker_collection.py +4 -4
  95. junifer/pipeline/pipeline_component_registry.py +5 -4
  96. junifer/pipeline/pipeline_step_mixin.py +7 -7
  97. junifer/pipeline/tests/test_pipeline_component_registry.py +2 -3
  98. junifer/pipeline/tests/test_pipeline_step_mixin.py +19 -19
  99. junifer/pipeline/tests/test_update_meta_mixin.py +4 -4
  100. junifer/pipeline/update_meta_mixin.py +2 -2
  101. junifer/pipeline/utils.py +5 -5
  102. junifer/preprocess/base.py +10 -10
  103. junifer/preprocess/confounds/fmriprep_confound_remover.py +11 -14
  104. junifer/preprocess/confounds/tests/test_fmriprep_confound_remover.py +1 -2
  105. junifer/preprocess/smoothing/smoothing.py +7 -7
  106. junifer/preprocess/warping/_ants_warper.py +3 -4
  107. junifer/preprocess/warping/_fsl_warper.py +3 -4
  108. junifer/preprocess/warping/space_warper.py +6 -6
  109. junifer/preprocess/warping/tests/test_space_warper.py +3 -4
  110. junifer/stats.py +4 -4
  111. junifer/storage/base.py +14 -13
  112. junifer/storage/hdf5.py +21 -20
  113. junifer/storage/pandas_base.py +12 -11
  114. junifer/storage/sqlite.py +11 -11
  115. junifer/storage/tests/test_hdf5.py +1 -2
  116. junifer/storage/tests/test_sqlite.py +2 -2
  117. junifer/storage/tests/test_utils.py +8 -7
  118. junifer/storage/utils.py +7 -7
  119. junifer/testing/datagrabbers.py +9 -10
  120. junifer/tests/test_stats.py +2 -2
  121. junifer/typing/_typing.py +6 -9
  122. junifer/utils/helpers.py +2 -3
  123. junifer/utils/logging.py +5 -5
  124. junifer/utils/singleton.py +3 -3
  125. {junifer-0.0.6.dev248.dist-info → junifer-0.0.6.dev258.dist-info}/METADATA +2 -2
  126. {junifer-0.0.6.dev248.dist-info → junifer-0.0.6.dev258.dist-info}/RECORD +131 -131
  127. {junifer-0.0.6.dev248.dist-info → junifer-0.0.6.dev258.dist-info}/AUTHORS.rst +0 -0
  128. {junifer-0.0.6.dev248.dist-info → junifer-0.0.6.dev258.dist-info}/LICENSE.md +0 -0
  129. {junifer-0.0.6.dev248.dist-info → junifer-0.0.6.dev258.dist-info}/WHEEL +0 -0
  130. {junifer-0.0.6.dev248.dist-info → junifer-0.0.6.dev258.dist-info}/entry_points.txt +0 -0
  131. {junifer-0.0.6.dev248.dist-info → junifer-0.0.6.dev258.dist-info}/top_level.txt +0 -0
@@ -7,7 +7,7 @@
7
7
  # Synchon Mandal <s.mandal@fz-juelich.de>
8
8
  # License: AGPL
9
9
 
10
- from typing import List, Optional, Union
10
+ from typing import Optional, Union
11
11
 
12
12
  import pytest
13
13
 
@@ -31,7 +31,7 @@ URI = "https://gin.g-node.org/juaml/datalad-example-aomic1000"
31
31
  )
32
32
  def test_DataladAOMICID1000(
33
33
  type_: str,
34
- nested_types: Optional[List[str]],
34
+ nested_types: Optional[list[str]],
35
35
  ) -> None:
36
36
  """Test DataladAOMICID1000 DataGrabber.
37
37
 
@@ -87,7 +87,7 @@ def test_DataladAOMICID1000(
87
87
  ],
88
88
  )
89
89
  def test_DataladAOMICID1000_partial_data_access(
90
- types: Union[str, List[str]],
90
+ types: Union[str, list[str]],
91
91
  ) -> None:
92
92
  """Test DataladAOMICID1000 DataGrabber partial data access.
93
93
 
@@ -7,7 +7,7 @@
7
7
  # Synchon Mandal <s.mandal@fz-juelich.de>
8
8
  # License: AGPL
9
9
 
10
- from typing import List, Optional, Union
10
+ from typing import Optional, Union
11
11
 
12
12
  import pytest
13
13
 
@@ -40,8 +40,8 @@ URI = "https://gin.g-node.org/juaml/datalad-example-aomicpiop1"
40
40
  )
41
41
  def test_DataladAOMICPIOP1(
42
42
  type_: str,
43
- nested_types: Optional[List[str]],
44
- tasks: Optional[List[str]],
43
+ nested_types: Optional[list[str]],
44
+ tasks: Optional[list[str]],
45
45
  ) -> None:
46
46
  """Test DataladAOMICPIOP1 DataGrabber.
47
47
 
@@ -117,7 +117,7 @@ def test_DataladAOMICPIOP1(
117
117
  ],
118
118
  )
119
119
  def test_DataladAOMICPIOP1_partial_data_access(
120
- types: Union[str, List[str]],
120
+ types: Union[str, list[str]],
121
121
  ) -> None:
122
122
  """Test DataladAOMICPIOP1 DataGrabber partial data access.
123
123
 
@@ -7,7 +7,7 @@
7
7
  # Synchon Mandal <s.mandal@fz-juelich.de>
8
8
  # License: AGPL
9
9
 
10
- from typing import List, Optional, Union
10
+ from typing import Optional, Union
11
11
 
12
12
  import pytest
13
13
 
@@ -35,8 +35,8 @@ URI = "https://gin.g-node.org/juaml/datalad-example-aomicpiop2"
35
35
  )
36
36
  def test_DataladAOMICPIOP2(
37
37
  type_: str,
38
- nested_types: Optional[List[str]],
39
- tasks: Optional[List[str]],
38
+ nested_types: Optional[list[str]],
39
+ tasks: Optional[list[str]],
40
40
  ) -> None:
41
41
  """Test DataladAOMICPIOP2 DataGrabber.
42
42
 
@@ -97,7 +97,7 @@ def test_DataladAOMICPIOP2(
97
97
  ],
98
98
  )
99
99
  def test_DataladAOMICPIOP2_partial_data_access(
100
- types: Union[str, List[str]],
100
+ types: Union[str, list[str]],
101
101
  ) -> None:
102
102
  """Test DataladAOMICPIOP2 DataGrabber partial data access.
103
103
 
@@ -6,8 +6,9 @@
6
6
  # License: AGPL
7
7
 
8
8
  from abc import ABC, abstractmethod
9
+ from collections.abc import Iterator
9
10
  from pathlib import Path
10
- from typing import Dict, Iterator, List, Tuple, Union
11
+ from typing import Union
11
12
 
12
13
  from ..pipeline import UpdateMetaMixin
13
14
  from ..utils import logger, raise_error
@@ -36,7 +37,7 @@ class BaseDataGrabber(ABC, UpdateMetaMixin):
36
37
 
37
38
  """
38
39
 
39
- def __init__(self, types: List[str], datadir: Union[str, Path]) -> None:
40
+ def __init__(self, types: list[str], datadir: Union[str, Path]) -> None:
40
41
  # Validate types
41
42
  if not isinstance(types, list):
42
43
  raise_error(msg="`types` must be a list", klass=TypeError)
@@ -67,8 +68,8 @@ class BaseDataGrabber(ABC, UpdateMetaMixin):
67
68
  yield from self.get_elements()
68
69
 
69
70
  def __getitem__(
70
- self, element: Union[str, Tuple[str, ...]]
71
- ) -> Dict[str, Dict]:
71
+ self, element: Union[str, tuple[str, ...]]
72
+ ) -> dict[str, dict]:
72
73
  """Enable indexing support.
73
74
 
74
75
  Parameters
@@ -89,7 +90,7 @@ class BaseDataGrabber(ABC, UpdateMetaMixin):
89
90
  element = (element,)
90
91
  # Zip through element keys and actual values to construct element
91
92
  # access dictionary
92
- named_element: Dict = dict(zip(self.get_element_keys(), element))
93
+ named_element: dict = dict(zip(self.get_element_keys(), element))
93
94
  logger.debug(f"Named element: {named_element}")
94
95
  # Fetch element
95
96
  out = self.get_item(**named_element)
@@ -113,7 +114,7 @@ class BaseDataGrabber(ABC, UpdateMetaMixin):
113
114
  """Context exit."""
114
115
  return None
115
116
 
116
- def get_types(self) -> List[str]:
117
+ def get_types(self) -> list[str]:
117
118
  """Get types.
118
119
 
119
120
  Returns
@@ -136,7 +137,7 @@ class BaseDataGrabber(ABC, UpdateMetaMixin):
136
137
  """
137
138
  return self._datadir
138
139
 
139
- def filter(self, selection: List[Union[str, Tuple[str]]]) -> Iterator:
140
+ def filter(self, selection: list[Union[str, tuple[str]]]) -> Iterator:
140
141
  """Filter elements to be grabbed.
141
142
 
142
143
  Parameters
@@ -151,7 +152,7 @@ class BaseDataGrabber(ABC, UpdateMetaMixin):
151
152
 
152
153
  """
153
154
 
154
- def filter_func(element: Union[str, Tuple[str]]) -> bool:
155
+ def filter_func(element: Union[str, tuple[str]]) -> bool:
155
156
  """Filter element based on selection.
156
157
 
157
158
  Parameters
@@ -182,7 +183,7 @@ class BaseDataGrabber(ABC, UpdateMetaMixin):
182
183
  yield from filter(filter_func, self.get_elements())
183
184
 
184
185
  @abstractmethod
185
- def get_element_keys(self) -> List[str]:
186
+ def get_element_keys(self) -> list[str]:
186
187
  """Get element keys.
187
188
 
188
189
  For each item in the ``element`` tuple passed to ``__getitem__()``,
@@ -200,7 +201,7 @@ class BaseDataGrabber(ABC, UpdateMetaMixin):
200
201
  ) # pragma: no cover
201
202
 
202
203
  @abstractmethod
203
- def get_elements(self) -> List[Union[str, Tuple[str]]]:
204
+ def get_elements(self) -> list[Union[str, tuple[str]]]:
204
205
  """Get elements.
205
206
 
206
207
  Returns
@@ -217,7 +218,7 @@ class BaseDataGrabber(ABC, UpdateMetaMixin):
217
218
  ) # pragma: no cover
218
219
 
219
220
  @abstractmethod
220
- def get_item(self, **element: Dict) -> Dict[str, Dict]:
221
+ def get_item(self, **element: dict) -> dict[str, dict]:
221
222
  """Get the specified item from the dataset.
222
223
 
223
224
  Parameters
@@ -9,7 +9,7 @@ import atexit
9
9
  import os
10
10
  import tempfile
11
11
  from pathlib import Path
12
- from typing import Dict, Optional, Tuple, Union
12
+ from typing import Optional, Union
13
13
 
14
14
  import datalad
15
15
  import datalad.api as dl
@@ -165,7 +165,7 @@ class DataladDataGrabber(BaseDataGrabber):
165
165
  raise_error("Could not get dataset ID from remote")
166
166
  return remote_id
167
167
 
168
- def _dataset_get(self, out: Dict) -> Dict:
168
+ def _dataset_get(self, out: dict) -> dict:
169
169
  """Get the dataset found from the path in ``out``.
170
170
 
171
171
  Parameters
@@ -283,7 +283,7 @@ class DataladDataGrabber(BaseDataGrabber):
283
283
  logger.debug(f"Dropping {f}")
284
284
  self._dataset.drop(f, result_renderer="disabled")
285
285
 
286
- def __getitem__(self, element: Union[str, Tuple]) -> Dict:
286
+ def __getitem__(self, element: Union[str, tuple]) -> dict:
287
287
  """Implement single element indexing in the Datalad database.
288
288
 
289
289
  It will first obtain the paths from the parent class and then
@@ -5,7 +5,7 @@
5
5
 
6
6
  from itertools import product
7
7
  from pathlib import Path
8
- from typing import Dict, List, Union
8
+ from typing import Union
9
9
 
10
10
  from ..api.decorators import register_datagrabber
11
11
  from ..utils import raise_error
@@ -59,11 +59,11 @@ class DMCC13Benchmark(PatternDataladDataGrabber):
59
59
  def __init__(
60
60
  self,
61
61
  datadir: Union[str, Path, None] = None,
62
- types: Union[str, List[str], None] = None,
63
- sessions: Union[str, List[str], None] = None,
64
- tasks: Union[str, List[str], None] = None,
65
- phase_encodings: Union[str, List[str], None] = None,
66
- runs: Union[str, List[str], None] = None,
62
+ types: Union[str, list[str], None] = None,
63
+ sessions: Union[str, list[str], None] = None,
64
+ tasks: Union[str, list[str], None] = None,
65
+ phase_encodings: Union[str, list[str], None] = None,
66
+ runs: Union[str, list[str], None] = None,
67
67
  native_t1w: bool = False,
68
68
  ) -> None:
69
69
  # Declare all sessions
@@ -271,7 +271,7 @@ class DMCC13Benchmark(PatternDataladDataGrabber):
271
271
  task: str,
272
272
  phase_encoding: str,
273
273
  run: str,
274
- ) -> Dict:
274
+ ) -> dict:
275
275
  """Index one element in the dataset.
276
276
 
277
277
  Parameters
@@ -309,7 +309,7 @@ class DMCC13Benchmark(PatternDataladDataGrabber):
309
309
  )
310
310
  return out
311
311
 
312
- def get_elements(self) -> List:
312
+ def get_elements(self) -> list:
313
313
  """Implement fetching list of subjects in the dataset.
314
314
 
315
315
  Returns
@@ -6,7 +6,7 @@
6
6
  # License: AGPL
7
7
 
8
8
  from pathlib import Path
9
- from typing import List, Union
9
+ from typing import Union
10
10
 
11
11
  from junifer.datagrabber.datalad_base import DataladDataGrabber
12
12
 
@@ -50,8 +50,8 @@ class DataladHCP1200(DataladDataGrabber, HCP1200):
50
50
  def __init__(
51
51
  self,
52
52
  datadir: Union[str, Path, None] = None,
53
- tasks: Union[str, List[str], None] = None,
54
- phase_encodings: Union[str, List[str], None] = None,
53
+ tasks: Union[str, list[str], None] = None,
54
+ phase_encodings: Union[str, list[str], None] = None,
55
55
  ica_fix: bool = False,
56
56
  ) -> None:
57
57
  uri = (
@@ -7,7 +7,7 @@
7
7
 
8
8
  from itertools import product
9
9
  from pathlib import Path
10
- from typing import Dict, List, Union
10
+ from typing import Union
11
11
 
12
12
  from ...api.decorators import register_datagrabber
13
13
  from ...utils import raise_error
@@ -48,8 +48,8 @@ class HCP1200(PatternDataGrabber):
48
48
  def __init__(
49
49
  self,
50
50
  datadir: Union[str, Path],
51
- tasks: Union[str, List[str], None] = None,
52
- phase_encodings: Union[str, List[str], None] = None,
51
+ tasks: Union[str, list[str], None] = None,
52
+ phase_encodings: Union[str, list[str], None] = None,
53
53
  ica_fix: bool = False,
54
54
  ) -> None:
55
55
  # All tasks
@@ -66,10 +66,10 @@ class HCP1200(PatternDataGrabber):
66
66
  ]
67
67
  # Set default tasks
68
68
  if tasks is None:
69
- self.tasks: List[str] = all_tasks
69
+ self.tasks: list[str] = all_tasks
70
70
  # Convert single task into list
71
71
  else:
72
- if not isinstance(tasks, List):
72
+ if not isinstance(tasks, list):
73
73
  tasks = [tasks]
74
74
  # Check for invalid task(s)
75
75
  for task in tasks:
@@ -78,7 +78,7 @@ class HCP1200(PatternDataGrabber):
78
78
  f"'{task}' is not a valid HCP-YA fMRI task input. "
79
79
  f"Valid task values can be any or all of {all_tasks}."
80
80
  )
81
- self.tasks: List[str] = tasks
81
+ self.tasks: list[str] = tasks
82
82
 
83
83
  # All phase encodings
84
84
  all_phase_encodings = ["LR", "RL"]
@@ -150,7 +150,7 @@ class HCP1200(PatternDataGrabber):
150
150
  replacements=replacements,
151
151
  )
152
152
 
153
- def get_item(self, subject: str, task: str, phase_encoding: str) -> Dict:
153
+ def get_item(self, subject: str, task: str, phase_encoding: str) -> dict:
154
154
  """Implement single element indexing in the database.
155
155
 
156
156
  Parameters
@@ -180,7 +180,7 @@ class HCP1200(PatternDataGrabber):
180
180
  subject=subject, task=new_task, phase_encoding=phase_encoding
181
181
  )
182
182
 
183
- def get_elements(self) -> List:
183
+ def get_elements(self) -> list:
184
184
  """Implement fetching list of elements in the dataset.
185
185
 
186
186
  Returns
@@ -5,8 +5,9 @@
5
5
 
6
6
  import shutil
7
7
  import tempfile
8
+ from collections.abc import Iterable
8
9
  from pathlib import Path
9
- from typing import Iterable, Optional
10
+ from typing import Optional
10
11
 
11
12
  import pytest
12
13
 
@@ -5,7 +5,7 @@
5
5
  # Synchon Mandal <s.mandal@fz-juelich.de>
6
6
  # License: AGPL
7
7
 
8
- from typing import Dict, List, Tuple, Union
8
+ from typing import Union
9
9
 
10
10
  from ..api.decorators import register_datagrabber
11
11
  from ..typing import DataGrabberLike
@@ -38,7 +38,7 @@ class MultipleDataGrabber(BaseDataGrabber):
38
38
 
39
39
  """
40
40
 
41
- def __init__(self, datagrabbers: List[DataGrabberLike], **kwargs) -> None:
41
+ def __init__(self, datagrabbers: list[DataGrabberLike], **kwargs) -> None:
42
42
  # Check datagrabbers consistency
43
43
  # Check for same element keys
44
44
  first_keys = datagrabbers[0].get_element_keys()
@@ -79,7 +79,7 @@ class MultipleDataGrabber(BaseDataGrabber):
79
79
  )
80
80
  self._datagrabbers = datagrabbers
81
81
 
82
- def __getitem__(self, element: Union[str, Tuple]) -> Dict:
82
+ def __getitem__(self, element: Union[str, tuple]) -> dict:
83
83
  """Implement indexing.
84
84
 
85
85
  Parameters
@@ -127,7 +127,7 @@ class MultipleDataGrabber(BaseDataGrabber):
127
127
  dg.__exit__(exc_type, exc_value, exc_traceback)
128
128
 
129
129
  # TODO: return type should be List[List[str]], but base type is List[str]
130
- def get_types(self) -> List[str]:
130
+ def get_types(self) -> list[str]:
131
131
  """Get types.
132
132
 
133
133
  Returns
@@ -139,7 +139,7 @@ class MultipleDataGrabber(BaseDataGrabber):
139
139
  types = [x for dg in self._datagrabbers for x in dg.get_types()]
140
140
  return types
141
141
 
142
- def get_element_keys(self) -> List[str]:
142
+ def get_element_keys(self) -> list[str]:
143
143
  """Get element keys.
144
144
 
145
145
  For each item in the ``element`` tuple passed to ``__getitem__()``,
@@ -153,7 +153,7 @@ class MultipleDataGrabber(BaseDataGrabber):
153
153
  """
154
154
  return self._datagrabbers[0].get_element_keys()
155
155
 
156
- def get_elements(self) -> List:
156
+ def get_elements(self) -> list:
157
157
  """Get elements.
158
158
 
159
159
  Returns
@@ -171,7 +171,7 @@ class MultipleDataGrabber(BaseDataGrabber):
171
171
  elements.intersection_update(s)
172
172
  return list(elements)
173
173
 
174
- def get_item(self, **_: Dict) -> Dict[str, Dict]:
174
+ def get_item(self, **_: dict) -> dict[str, dict]:
175
175
  """Get the specified item from the dataset.
176
176
 
177
177
  Parameters
@@ -8,7 +8,7 @@
8
8
  import re
9
9
  from copy import deepcopy
10
10
  from pathlib import Path
11
- from typing import Dict, List, Optional, Tuple, Union
11
+ from typing import Optional, Union
12
12
 
13
13
  import numpy as np
14
14
 
@@ -170,9 +170,9 @@ class PatternDataGrabber(BaseDataGrabber, PatternValidationMixin):
170
170
 
171
171
  def __init__(
172
172
  self,
173
- types: List[str],
174
- patterns: Dict[str, Dict[str, str]],
175
- replacements: Union[List[str], str],
173
+ types: list[str],
174
+ patterns: dict[str, dict[str, str]],
175
+ replacements: Union[list[str], str],
176
176
  datadir: Union[str, Path],
177
177
  confounds_format: Optional[str] = None,
178
178
  partial_pattern_ok: bool = False,
@@ -215,7 +215,7 @@ class PatternDataGrabber(BaseDataGrabber, PatternValidationMixin):
215
215
 
216
216
  def _replace_patterns_regex(
217
217
  self, pattern: str
218
- ) -> Tuple[str, str, List[str]]:
218
+ ) -> tuple[str, str, list[str]]:
219
219
  """Replace the patterns in ``pattern`` with the named groups.
220
220
 
221
221
  It allows elements to be obtained from the filesystem.
@@ -261,7 +261,7 @@ class PatternDataGrabber(BaseDataGrabber, PatternValidationMixin):
261
261
 
262
262
  return re_pattern, glob_pattern, t_replacements
263
263
 
264
- def _replace_patterns_glob(self, element: Dict, pattern: str) -> str:
264
+ def _replace_patterns_glob(self, element: dict, pattern: str) -> str:
265
265
  """Replace ``pattern`` with the ``element`` so it can be globbed.
266
266
 
267
267
  Parameters
@@ -294,7 +294,7 @@ class PatternDataGrabber(BaseDataGrabber, PatternValidationMixin):
294
294
  return pattern.format(**element)
295
295
 
296
296
  def _get_path_from_patterns(
297
- self, element: Dict, pattern: str, data_type: str
297
+ self, element: dict, pattern: str, data_type: str
298
298
  ) -> Path:
299
299
  """Get path from resolved patterns.
300
300
 
@@ -351,7 +351,7 @@ class PatternDataGrabber(BaseDataGrabber, PatternValidationMixin):
351
351
 
352
352
  return path
353
353
 
354
- def get_element_keys(self) -> List[str]:
354
+ def get_element_keys(self) -> list[str]:
355
355
  """Get element keys.
356
356
 
357
357
  For each item in the "element" tuple, this functions returns the
@@ -366,7 +366,7 @@ class PatternDataGrabber(BaseDataGrabber, PatternValidationMixin):
366
366
  """
367
367
  return self.replacements
368
368
 
369
- def get_item(self, **element: str) -> Dict[str, Dict]:
369
+ def get_item(self, **element: str) -> dict[str, dict]:
370
370
  """Implement single element indexing for the datagrabber.
371
371
 
372
372
  This method constructs a real path to the requested item's data, by
@@ -449,7 +449,7 @@ class PatternDataGrabber(BaseDataGrabber, PatternValidationMixin):
449
449
 
450
450
  return out
451
451
 
452
- def get_elements(self) -> List:
452
+ def get_elements(self) -> list:
453
453
  """Implement fetching list of elements in the dataset.
454
454
 
455
455
  It will use regex to search for "replacements" in the "patterns" and
@@ -3,7 +3,7 @@
3
3
  # Authors: Synchon Mandal <s.mandal@fz-juelich.de>
4
4
  # License: AGPL
5
5
 
6
- from typing import Dict, List, Union
6
+ from typing import Union
7
7
 
8
8
  from ..utils import logger, raise_error, warn_with_log
9
9
 
@@ -72,7 +72,7 @@ PATTERNS_SCHEMA = {
72
72
  class PatternValidationMixin:
73
73
  """Mixin class for pattern validation."""
74
74
 
75
- def _validate_types(self, types: List[str]) -> None:
75
+ def _validate_types(self, types: list[str]) -> None:
76
76
  """Validate the types.
77
77
 
78
78
  Parameters
@@ -95,8 +95,8 @@ class PatternValidationMixin:
95
95
 
96
96
  def _validate_replacements(
97
97
  self,
98
- replacements: List[str],
99
- patterns: Dict[str, Union[Dict[str, str], List[Dict[str, str]]]],
98
+ replacements: list[str],
99
+ patterns: dict[str, Union[dict[str, str], list[dict[str, str]]]],
100
100
  partial_pattern_ok: bool,
101
101
  ) -> None:
102
102
  """Validate the replacements.
@@ -181,8 +181,8 @@ class PatternValidationMixin:
181
181
 
182
182
  def _validate_mandatory_keys(
183
183
  self,
184
- keys: List[str],
185
- schema: List[str],
184
+ keys: list[str],
185
+ schema: list[str],
186
186
  data_type: str,
187
187
  partial_pattern_ok: bool = False,
188
188
  ) -> None:
@@ -230,7 +230,7 @@ class PatternValidationMixin:
230
230
  logger.debug(f"Mandatory key: `{key}` found for {data_type}.")
231
231
 
232
232
  def _identify_stray_keys(
233
- self, keys: List[str], schema: List[str], data_type: str
233
+ self, keys: list[str], schema: list[str], data_type: str
234
234
  ) -> None:
235
235
  """Identify stray keys.
236
236
 
@@ -261,9 +261,9 @@ class PatternValidationMixin:
261
261
 
262
262
  def validate_patterns(
263
263
  self,
264
- types: List[str],
265
- replacements: List[str],
266
- patterns: Dict[str, Union[Dict[str, str], List[Dict[str, str]]]],
264
+ types: list[str],
265
+ replacements: list[str],
266
+ patterns: dict[str, Union[dict[str, str], list[dict[str, str]]]],
267
267
  partial_pattern_ok: bool = False,
268
268
  ) -> None:
269
269
  """Validate the patterns.
@@ -4,7 +4,6 @@
4
4
  # License: AGPL
5
5
 
6
6
  from pathlib import Path
7
- from typing import Type
8
7
 
9
8
  import datalad.api as dl
10
9
  import pytest
@@ -27,7 +26,7 @@ _testing_dataset = {
27
26
 
28
27
 
29
28
  @pytest.fixture
30
- def concrete_datagrabber() -> Type[DataladDataGrabber]:
29
+ def concrete_datagrabber() -> type[DataladDataGrabber]:
31
30
  """Return a concrete datalad-based DataGrabber.
32
31
 
33
32
  Returns
@@ -69,7 +68,7 @@ def concrete_datagrabber() -> Type[DataladDataGrabber]:
69
68
 
70
69
 
71
70
  def test_DataladDataGrabber_install_errors(
72
- tmp_path: Path, concrete_datagrabber: Type
71
+ tmp_path: Path, concrete_datagrabber: type
73
72
  ) -> None:
74
73
  """Test DataladDataGrabber install errors / warnings.
75
74
 
@@ -108,7 +107,7 @@ def test_DataladDataGrabber_install_errors(
108
107
 
109
108
 
110
109
  def test_DataladDataGrabber_clone_cleanup(
111
- tmp_path: Path, concrete_datagrabber: Type
110
+ tmp_path: Path, concrete_datagrabber: type
112
111
  ) -> None:
113
112
  """Test DataladDataGrabber clone and remove.
114
113
 
@@ -157,7 +156,7 @@ def test_DataladDataGrabber_clone_cleanup(
157
156
 
158
157
 
159
158
  def test_DataladDataGrabber_clone_create_cleanup(
160
- concrete_datagrabber: Type,
159
+ concrete_datagrabber: type,
161
160
  ) -> None:
162
161
  """Test DataladDataGrabber tempdir clone and remove.
163
162
 
@@ -203,7 +202,7 @@ def test_DataladDataGrabber_clone_create_cleanup(
203
202
 
204
203
 
205
204
  def test_DataladDataGrabber_previously_cloned(
206
- tmp_path: Path, concrete_datagrabber: Type
205
+ tmp_path: Path, concrete_datagrabber: type
207
206
  ) -> None:
208
207
  """Test DataladDataGrabber on cloned dataset.
209
208
 
@@ -272,7 +271,7 @@ def test_DataladDataGrabber_previously_cloned(
272
271
 
273
272
 
274
273
  def test_DataladDataGrabber_previously_cloned_and_get(
275
- tmp_path: Path, concrete_datagrabber: Type
274
+ tmp_path: Path, concrete_datagrabber: type
276
275
  ) -> None:
277
276
  """Test DataladDataGrabber on cloned dataset with files present.
278
277
 
@@ -355,7 +354,7 @@ def test_DataladDataGrabber_previously_cloned_and_get(
355
354
 
356
355
 
357
356
  def test_DataladDataGrabber_previously_cloned_and_get_dirty(
358
- tmp_path: Path, concrete_datagrabber: Type
357
+ tmp_path: Path, concrete_datagrabber: type
359
358
  ) -> None:
360
359
  """Test DataladDataGrabber on a dirty cloned dataset.
361
360
 
@@ -3,7 +3,7 @@
3
3
  # Authors: Synchon Mandal <s.mandal@fz-juelich.de>
4
4
  # License: AGPL
5
5
 
6
- from typing import List, Optional, Union
6
+ from typing import Optional, Union
7
7
 
8
8
  import pytest
9
9
 
@@ -218,7 +218,7 @@ def test_DMCC13Benchmark(
218
218
  ],
219
219
  )
220
220
  def test_DMCC13Benchmark_partial_data_access(
221
- types: Union[str, List[str]],
221
+ types: Union[str, list[str]],
222
222
  native_t1w: bool,
223
223
  ) -> None:
224
224
  """Test DMCC13Benchmark DataGrabber partial data access.
@@ -4,8 +4,8 @@
4
4
  # Synchon Mandal <s.mandal@fz-juelich.de>
5
5
  # License: AGPL
6
6
 
7
- from contextlib import nullcontext
8
- from typing import ContextManager, Dict, List, Union
7
+ from contextlib import AbstractContextManager, nullcontext
8
+ from typing import Union
9
9
 
10
10
  import pytest
11
11
 
@@ -186,10 +186,10 @@ from junifer.datagrabber.pattern_validation_mixin import PatternValidationMixin
186
186
  ],
187
187
  )
188
188
  def test_PatternValidationMixin(
189
- types: Union[str, List[str], List[int]],
190
- replacements: Union[str, List[str], List[int]],
191
- patterns: Union[str, Dict[str, Dict[str, str]]],
192
- expect: ContextManager,
189
+ types: Union[str, list[str], list[int]],
190
+ replacements: Union[str, list[str], list[int]],
191
+ patterns: Union[str, dict[str, dict[str, str]]],
192
+ expect: AbstractContextManager,
193
193
  ) -> None:
194
194
  """Test validation.
195
195