junifer 0.0.6.dev227__py3-none-any.whl → 0.0.6.dev252__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (131) hide show
  1. junifer/_version.py +2 -2
  2. junifer/api/decorators.py +1 -2
  3. junifer/api/functions.py +18 -18
  4. junifer/api/queue_context/gnu_parallel_local_adapter.py +4 -4
  5. junifer/api/queue_context/htcondor_adapter.py +4 -4
  6. junifer/api/queue_context/tests/test_gnu_parallel_local_adapter.py +3 -3
  7. junifer/api/queue_context/tests/test_htcondor_adapter.py +3 -3
  8. junifer/api/tests/test_functions.py +32 -32
  9. junifer/cli/cli.py +3 -3
  10. junifer/cli/parser.py +4 -4
  11. junifer/cli/tests/test_cli.py +5 -5
  12. junifer/cli/utils.py +5 -6
  13. junifer/configs/juseless/datagrabbers/ixi_vbm.py +2 -2
  14. junifer/configs/juseless/datagrabbers/tests/test_ucla.py +2 -2
  15. junifer/configs/juseless/datagrabbers/ucla.py +4 -4
  16. junifer/data/_dispatch.py +11 -14
  17. junifer/data/coordinates/_ants_coordinates_warper.py +6 -8
  18. junifer/data/coordinates/_coordinates.py +34 -21
  19. junifer/data/coordinates/_fsl_coordinates_warper.py +6 -8
  20. junifer/data/masks/_ants_mask_warper.py +18 -11
  21. junifer/data/masks/_fsl_mask_warper.py +6 -8
  22. junifer/data/masks/_masks.py +27 -34
  23. junifer/data/masks/tests/test_masks.py +4 -4
  24. junifer/data/parcellations/_ants_parcellation_warper.py +18 -11
  25. junifer/data/parcellations/_fsl_parcellation_warper.py +6 -8
  26. junifer/data/parcellations/_parcellations.py +39 -43
  27. junifer/data/parcellations/tests/test_parcellations.py +1 -2
  28. junifer/data/pipeline_data_registry_base.py +3 -2
  29. junifer/data/template_spaces.py +3 -3
  30. junifer/data/tests/test_data_utils.py +1 -2
  31. junifer/data/utils.py +69 -4
  32. junifer/datagrabber/aomic/id1000.py +24 -11
  33. junifer/datagrabber/aomic/piop1.py +27 -14
  34. junifer/datagrabber/aomic/piop2.py +27 -14
  35. junifer/datagrabber/aomic/tests/test_id1000.py +3 -3
  36. junifer/datagrabber/aomic/tests/test_piop1.py +4 -4
  37. junifer/datagrabber/aomic/tests/test_piop2.py +4 -4
  38. junifer/datagrabber/base.py +18 -12
  39. junifer/datagrabber/datalad_base.py +18 -11
  40. junifer/datagrabber/dmcc13_benchmark.py +31 -18
  41. junifer/datagrabber/hcp1200/datalad_hcp1200.py +3 -3
  42. junifer/datagrabber/hcp1200/hcp1200.py +26 -15
  43. junifer/datagrabber/hcp1200/tests/test_hcp1200.py +2 -1
  44. junifer/datagrabber/multiple.py +7 -7
  45. junifer/datagrabber/pattern.py +75 -45
  46. junifer/datagrabber/pattern_validation_mixin.py +204 -94
  47. junifer/datagrabber/tests/test_datalad_base.py +7 -8
  48. junifer/datagrabber/tests/test_dmcc13_benchmark.py +28 -11
  49. junifer/datagrabber/tests/test_pattern_validation_mixin.py +6 -6
  50. junifer/datareader/default.py +6 -6
  51. junifer/external/nilearn/junifer_connectivity_measure.py +2 -2
  52. junifer/external/nilearn/junifer_nifti_spheres_masker.py +4 -4
  53. junifer/external/nilearn/tests/test_junifer_connectivity_measure.py +15 -15
  54. junifer/external/nilearn/tests/test_junifer_nifti_spheres_masker.py +2 -3
  55. junifer/markers/base.py +8 -8
  56. junifer/markers/brainprint.py +7 -9
  57. junifer/markers/complexity/complexity_base.py +6 -8
  58. junifer/markers/complexity/hurst_exponent.py +5 -5
  59. junifer/markers/complexity/multiscale_entropy_auc.py +5 -5
  60. junifer/markers/complexity/perm_entropy.py +5 -5
  61. junifer/markers/complexity/range_entropy.py +5 -5
  62. junifer/markers/complexity/range_entropy_auc.py +5 -5
  63. junifer/markers/complexity/sample_entropy.py +5 -5
  64. junifer/markers/complexity/weighted_perm_entropy.py +5 -5
  65. junifer/markers/ets_rss.py +7 -7
  66. junifer/markers/falff/_afni_falff.py +1 -2
  67. junifer/markers/falff/_junifer_falff.py +1 -2
  68. junifer/markers/falff/falff_base.py +2 -4
  69. junifer/markers/falff/falff_parcels.py +7 -7
  70. junifer/markers/falff/falff_spheres.py +6 -6
  71. junifer/markers/functional_connectivity/crossparcellation_functional_connectivity.py +6 -6
  72. junifer/markers/functional_connectivity/edge_functional_connectivity_parcels.py +7 -7
  73. junifer/markers/functional_connectivity/edge_functional_connectivity_spheres.py +6 -6
  74. junifer/markers/functional_connectivity/functional_connectivity_base.py +10 -10
  75. junifer/markers/functional_connectivity/functional_connectivity_parcels.py +7 -7
  76. junifer/markers/functional_connectivity/functional_connectivity_spheres.py +6 -6
  77. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_parcels.py +1 -2
  78. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_spheres.py +1 -2
  79. junifer/markers/functional_connectivity/tests/test_functional_connectivity_parcels.py +3 -3
  80. junifer/markers/functional_connectivity/tests/test_functional_connectivity_spheres.py +3 -3
  81. junifer/markers/parcel_aggregation.py +8 -8
  82. junifer/markers/reho/_afni_reho.py +1 -2
  83. junifer/markers/reho/_junifer_reho.py +1 -2
  84. junifer/markers/reho/reho_base.py +2 -4
  85. junifer/markers/reho/reho_parcels.py +8 -8
  86. junifer/markers/reho/reho_spheres.py +7 -7
  87. junifer/markers/sphere_aggregation.py +8 -8
  88. junifer/markers/temporal_snr/temporal_snr_base.py +8 -8
  89. junifer/markers/temporal_snr/temporal_snr_parcels.py +6 -6
  90. junifer/markers/temporal_snr/temporal_snr_spheres.py +5 -5
  91. junifer/markers/utils.py +3 -3
  92. junifer/onthefly/_brainprint.py +2 -2
  93. junifer/onthefly/read_transform.py +3 -3
  94. junifer/pipeline/marker_collection.py +4 -4
  95. junifer/pipeline/pipeline_component_registry.py +5 -4
  96. junifer/pipeline/pipeline_step_mixin.py +15 -11
  97. junifer/pipeline/tests/test_pipeline_component_registry.py +2 -3
  98. junifer/pipeline/tests/test_pipeline_step_mixin.py +19 -19
  99. junifer/pipeline/tests/test_update_meta_mixin.py +4 -4
  100. junifer/pipeline/update_meta_mixin.py +21 -17
  101. junifer/pipeline/utils.py +5 -5
  102. junifer/preprocess/base.py +10 -10
  103. junifer/preprocess/confounds/fmriprep_confound_remover.py +11 -14
  104. junifer/preprocess/confounds/tests/test_fmriprep_confound_remover.py +1 -2
  105. junifer/preprocess/smoothing/smoothing.py +7 -7
  106. junifer/preprocess/warping/_ants_warper.py +26 -6
  107. junifer/preprocess/warping/_fsl_warper.py +22 -7
  108. junifer/preprocess/warping/space_warper.py +37 -10
  109. junifer/preprocess/warping/tests/test_space_warper.py +3 -4
  110. junifer/stats.py +4 -4
  111. junifer/storage/base.py +14 -13
  112. junifer/storage/hdf5.py +21 -20
  113. junifer/storage/pandas_base.py +12 -11
  114. junifer/storage/sqlite.py +11 -11
  115. junifer/storage/tests/test_hdf5.py +1 -2
  116. junifer/storage/tests/test_sqlite.py +2 -2
  117. junifer/storage/tests/test_utils.py +8 -7
  118. junifer/storage/utils.py +7 -7
  119. junifer/testing/datagrabbers.py +9 -10
  120. junifer/tests/test_stats.py +2 -2
  121. junifer/typing/_typing.py +6 -9
  122. junifer/utils/helpers.py +2 -3
  123. junifer/utils/logging.py +5 -5
  124. junifer/utils/singleton.py +3 -3
  125. {junifer-0.0.6.dev227.dist-info → junifer-0.0.6.dev252.dist-info}/METADATA +2 -2
  126. {junifer-0.0.6.dev227.dist-info → junifer-0.0.6.dev252.dist-info}/RECORD +131 -131
  127. {junifer-0.0.6.dev227.dist-info → junifer-0.0.6.dev252.dist-info}/WHEEL +1 -1
  128. {junifer-0.0.6.dev227.dist-info → junifer-0.0.6.dev252.dist-info}/AUTHORS.rst +0 -0
  129. {junifer-0.0.6.dev227.dist-info → junifer-0.0.6.dev252.dist-info}/LICENSE.md +0 -0
  130. {junifer-0.0.6.dev227.dist-info → junifer-0.0.6.dev252.dist-info}/entry_points.txt +0 -0
  131. {junifer-0.0.6.dev227.dist-info → junifer-0.0.6.dev252.dist-info}/top_level.txt +0 -0
@@ -3,7 +3,7 @@
3
3
  # Authors: Synchon Mandal <s.mandal@fz-juelich.de>
4
4
  # License: AGPL
5
5
 
6
- from typing import Dict, List
6
+ from typing import Union
7
7
 
8
8
  from ..utils import logger, raise_error, warn_with_log
9
9
 
@@ -36,7 +36,7 @@ PATTERNS_SCHEMA = {
36
36
  },
37
37
  },
38
38
  "Warp": {
39
- "mandatory": ["pattern", "src", "dst"],
39
+ "mandatory": ["pattern", "src", "dst", "warper"],
40
40
  "optional": {},
41
41
  },
42
42
  "VBM_GM": {
@@ -72,7 +72,7 @@ PATTERNS_SCHEMA = {
72
72
  class PatternValidationMixin:
73
73
  """Mixin class for pattern validation."""
74
74
 
75
- def _validate_types(self, types: List[str]) -> None:
75
+ def _validate_types(self, types: list[str]) -> None:
76
76
  """Validate the types.
77
77
 
78
78
  Parameters
@@ -95,8 +95,8 @@ class PatternValidationMixin:
95
95
 
96
96
  def _validate_replacements(
97
97
  self,
98
- replacements: List[str],
99
- patterns: Dict[str, Dict[str, str]],
98
+ replacements: list[str],
99
+ patterns: dict[str, Union[dict[str, str], list[dict[str, str]]]],
100
100
  partial_pattern_ok: bool,
101
101
  ) -> None:
102
102
  """Validate the replacements.
@@ -132,45 +132,57 @@ class PatternValidationMixin:
132
132
 
133
133
  if any(not isinstance(x, str) for x in replacements):
134
134
  raise_error(
135
- msg="`replacements` must be a list of strings.",
135
+ msg="`replacements` must be a list of strings",
136
136
  klass=TypeError,
137
137
  )
138
138
 
139
+ # Make a list of all patterns recursively
140
+ all_patterns = []
141
+ for dtype_val in patterns.values():
142
+ # Conditional for list dtype vals like Warp
143
+ if isinstance(dtype_val, list):
144
+ for entry in dtype_val:
145
+ all_patterns.append(entry.get("pattern", ""))
146
+ else:
147
+ all_patterns.append(dtype_val.get("pattern", ""))
148
+ # Check for stray replacements
139
149
  for x in replacements:
140
- if all(
141
- x not in y
142
- for y in [
143
- data_type_val.get("pattern", "")
144
- for data_type_val in patterns.values()
145
- ]
146
- ):
150
+ if all(x not in y for y in all_patterns):
147
151
  if partial_pattern_ok:
148
152
  warn_with_log(
149
153
  f"Replacement: `{x}` is not part of any pattern, "
150
154
  "things might not work as expected if you are unsure "
151
- "of what you are doing"
155
+ "of what you are doing."
152
156
  )
153
157
  else:
154
158
  raise_error(
155
- msg=f"Replacement: {x} is not part of any pattern."
159
+ msg=f"Replacement: `{x}` is not part of any pattern"
156
160
  )
157
161
 
158
162
  # Check that at least one pattern has all the replacements
159
163
  at_least_one = False
160
- for data_type_val in patterns.values():
161
- if all(
162
- x in data_type_val.get("pattern", "") for x in replacements
163
- ):
164
- at_least_one = True
164
+ for dtype_val in patterns.values():
165
+ # Conditional for list dtype vals like Warp
166
+ if isinstance(dtype_val, list):
167
+ for entry in dtype_val:
168
+ if all(
169
+ x in entry.get("pattern", "") for x in replacements
170
+ ):
171
+ at_least_one = True
172
+ else:
173
+ if all(
174
+ x in dtype_val.get("pattern", "") for x in replacements
175
+ ):
176
+ at_least_one = True
165
177
  if not at_least_one and not partial_pattern_ok:
166
178
  raise_error(
167
- msg="At least one pattern must contain all replacements."
179
+ msg="At least one pattern must contain all replacements"
168
180
  )
169
181
 
170
182
  def _validate_mandatory_keys(
171
183
  self,
172
- keys: List[str],
173
- schema: List[str],
184
+ keys: list[str],
185
+ schema: list[str],
174
186
  data_type: str,
175
187
  partial_pattern_ok: bool = False,
176
188
  ) -> None:
@@ -207,7 +219,7 @@ class PatternValidationMixin:
207
219
  warn_with_log(
208
220
  f"Mandatory key: `{key}` not found for {data_type}, "
209
221
  "things might not work as expected if you are unsure "
210
- "of what you are doing"
222
+ "of what you are doing."
211
223
  )
212
224
  else:
213
225
  raise_error(
@@ -215,10 +227,10 @@ class PatternValidationMixin:
215
227
  klass=KeyError,
216
228
  )
217
229
  else:
218
- logger.debug(f"Mandatory key: `{key}` found for {data_type}")
230
+ logger.debug(f"Mandatory key: `{key}` found for {data_type}.")
219
231
 
220
232
  def _identify_stray_keys(
221
- self, keys: List[str], schema: List[str], data_type: str
233
+ self, keys: list[str], schema: list[str], data_type: str
222
234
  ) -> None:
223
235
  """Identify stray keys.
224
236
 
@@ -249,9 +261,9 @@ class PatternValidationMixin:
249
261
 
250
262
  def validate_patterns(
251
263
  self,
252
- types: List[str],
253
- replacements: List[str],
254
- patterns: Dict[str, Dict[str, str]],
264
+ types: list[str],
265
+ replacements: list[str],
266
+ patterns: dict[str, Union[dict[str, str], list[dict[str, str]]]],
255
267
  partial_pattern_ok: bool = False,
256
268
  ) -> None:
257
269
  """Validate the patterns.
@@ -298,87 +310,185 @@ class PatternValidationMixin:
298
310
  msg="`patterns` must contain all `types`", klass=ValueError
299
311
  )
300
312
  # Check against schema
301
- for data_type_key, data_type_val in patterns.items():
313
+ for dtype_key, dtype_val in patterns.items():
302
314
  # Check if valid data type is provided
303
- if data_type_key not in PATTERNS_SCHEMA:
315
+ if dtype_key not in PATTERNS_SCHEMA:
304
316
  raise_error(
305
- f"Unknown data type: {data_type_key}, "
317
+ f"Unknown data type: {dtype_key}, "
306
318
  f"should be one of: {list(PATTERNS_SCHEMA.keys())}"
307
319
  )
308
- # Check mandatory keys for data type
309
- self._validate_mandatory_keys(
310
- keys=list(data_type_val),
311
- schema=PATTERNS_SCHEMA[data_type_key]["mandatory"],
312
- data_type=data_type_key,
313
- partial_pattern_ok=partial_pattern_ok,
314
- )
315
- # Check optional keys for data type
316
- for optional_key, optional_val in PATTERNS_SCHEMA[data_type_key][
317
- "optional"
318
- ].items():
319
- if optional_key not in data_type_val:
320
- logger.debug(
321
- f"Optional key: `{optional_key}` missing for "
322
- f"{data_type_key}"
323
- )
324
- else:
325
- logger.debug(
326
- f"Optional key: `{optional_key}` found for "
327
- f"{data_type_key}"
328
- )
329
- # Set nested type name for easier access
330
- nested_data_type = f"{data_type_key}.{optional_key}"
331
- nested_mandatory_keys_schema = PATTERNS_SCHEMA[
332
- data_type_key
333
- ]["optional"][optional_key]["mandatory"]
334
- nested_optional_keys_schema = PATTERNS_SCHEMA[
335
- data_type_key
336
- ]["optional"][optional_key]["optional"]
337
- # Check mandatory keys for nested type
320
+ # Conditional for list dtype vals like Warp
321
+ if isinstance(dtype_val, list):
322
+ for idx, entry in enumerate(dtype_val):
323
+ # Check mandatory keys for data type
338
324
  self._validate_mandatory_keys(
339
- keys=list(optional_val["mandatory"]),
340
- schema=nested_mandatory_keys_schema,
341
- data_type=nested_data_type,
325
+ keys=list(entry),
326
+ schema=PATTERNS_SCHEMA[dtype_key]["mandatory"],
327
+ data_type=f"{dtype_key}.{idx}",
342
328
  partial_pattern_ok=partial_pattern_ok,
343
329
  )
344
- # Check optional keys for nested type
345
- for nested_optional_key in nested_optional_keys_schema:
346
- if nested_optional_key not in optional_val["optional"]:
330
+ # Check optional keys for data type
331
+ for optional_key, optional_val in PATTERNS_SCHEMA[
332
+ dtype_key
333
+ ]["optional"].items():
334
+ if optional_key not in entry:
347
335
  logger.debug(
348
- f"Optional key: `{nested_optional_key}` "
349
- f"missing for {nested_data_type}"
336
+ f"Optional key: `{optional_key}` missing for "
337
+ f"{dtype_key}.{idx}"
350
338
  )
351
339
  else:
352
340
  logger.debug(
353
- f"Optional key: `{nested_optional_key}` found "
354
- f"for {nested_data_type}"
341
+ f"Optional key: `{optional_key}` found for "
342
+ f"{dtype_key}.{idx}"
343
+ )
344
+ # Set nested type name for easier access
345
+ nested_dtype = f"{dtype_key}.{idx}.{optional_key}"
346
+ nested_mandatory_keys_schema = PATTERNS_SCHEMA[
347
+ dtype_key
348
+ ]["optional"][optional_key]["mandatory"]
349
+ nested_optional_keys_schema = PATTERNS_SCHEMA[
350
+ dtype_key
351
+ ]["optional"][optional_key]["optional"]
352
+ # Check mandatory keys for nested type
353
+ self._validate_mandatory_keys(
354
+ keys=list(optional_val["mandatory"]),
355
+ schema=nested_mandatory_keys_schema,
356
+ data_type=nested_dtype,
357
+ partial_pattern_ok=partial_pattern_ok,
358
+ )
359
+ # Check optional keys for nested type
360
+ for (
361
+ nested_optional_key
362
+ ) in nested_optional_keys_schema:
363
+ if (
364
+ nested_optional_key
365
+ not in optional_val["optional"]
366
+ ):
367
+ logger.debug(
368
+ f"Optional key: "
369
+ f"`{nested_optional_key}` missing for "
370
+ f"{nested_dtype}"
371
+ )
372
+ else:
373
+ logger.debug(
374
+ f"Optional key: "
375
+ f"`{nested_optional_key}` found for "
376
+ f"{nested_dtype}"
377
+ )
378
+ # Check stray key for nested data type
379
+ self._identify_stray_keys(
380
+ keys=(
381
+ optional_val["mandatory"]
382
+ + optional_val["optional"]
383
+ ),
384
+ schema=(
385
+ nested_mandatory_keys_schema
386
+ + nested_optional_keys_schema
387
+ ),
388
+ data_type=nested_dtype,
355
389
  )
356
- # Check stray key for nested data type
390
+ # Check stray key for data type
357
391
  self._identify_stray_keys(
358
- keys=optional_val["mandatory"]
359
- + optional_val["optional"],
360
- schema=nested_mandatory_keys_schema
361
- + nested_optional_keys_schema,
362
- data_type=nested_data_type,
392
+ keys=list(entry.keys()),
393
+ schema=(
394
+ PATTERNS_SCHEMA[dtype_key]["mandatory"]
395
+ + list(
396
+ PATTERNS_SCHEMA[dtype_key]["optional"].keys()
397
+ )
398
+ ),
399
+ data_type=dtype_key,
363
400
  )
364
- # Check stray key for data type
365
- self._identify_stray_keys(
366
- keys=list(data_type_val.keys()),
367
- schema=(
368
- PATTERNS_SCHEMA[data_type_key]["mandatory"]
369
- + list(PATTERNS_SCHEMA[data_type_key]["optional"].keys())
370
- ),
371
- data_type=data_type_key,
372
- )
373
- # Wildcard check in patterns
374
- if "}*" in data_type_val.get("pattern", ""):
375
- raise_error(
376
- msg=(
377
- f"`{data_type_key}.pattern` must not contain `*` "
378
- "following a replacement"
401
+ # Wildcard check in patterns
402
+ if "}*" in entry.get("pattern", ""):
403
+ raise_error(
404
+ msg=(
405
+ f"`{dtype_key}.pattern` must not contain `*` "
406
+ "following a replacement"
407
+ ),
408
+ klass=ValueError,
409
+ )
410
+ else:
411
+ # Check mandatory keys for data type
412
+ self._validate_mandatory_keys(
413
+ keys=list(dtype_val),
414
+ schema=PATTERNS_SCHEMA[dtype_key]["mandatory"],
415
+ data_type=dtype_key,
416
+ partial_pattern_ok=partial_pattern_ok,
417
+ )
418
+ # Check optional keys for data type
419
+ for optional_key, optional_val in PATTERNS_SCHEMA[dtype_key][
420
+ "optional"
421
+ ].items():
422
+ if optional_key not in dtype_val:
423
+ logger.debug(
424
+ f"Optional key: `{optional_key}` missing for "
425
+ f"{dtype_key}."
426
+ )
427
+ else:
428
+ logger.debug(
429
+ f"Optional key: `{optional_key}` found for "
430
+ f"{dtype_key}."
431
+ )
432
+ # Set nested type name for easier access
433
+ nested_dtype = f"{dtype_key}.{optional_key}"
434
+ nested_mandatory_keys_schema = PATTERNS_SCHEMA[
435
+ dtype_key
436
+ ]["optional"][optional_key]["mandatory"]
437
+ nested_optional_keys_schema = PATTERNS_SCHEMA[
438
+ dtype_key
439
+ ]["optional"][optional_key]["optional"]
440
+ # Check mandatory keys for nested type
441
+ self._validate_mandatory_keys(
442
+ keys=list(optional_val["mandatory"]),
443
+ schema=nested_mandatory_keys_schema,
444
+ data_type=nested_dtype,
445
+ partial_pattern_ok=partial_pattern_ok,
446
+ )
447
+ # Check optional keys for nested type
448
+ for nested_optional_key in nested_optional_keys_schema:
449
+ if (
450
+ nested_optional_key
451
+ not in optional_val["optional"]
452
+ ):
453
+ logger.debug(
454
+ f"Optional key: `{nested_optional_key}` "
455
+ f"missing for {nested_dtype}"
456
+ )
457
+ else:
458
+ logger.debug(
459
+ f"Optional key: `{nested_optional_key}` "
460
+ f"found for {nested_dtype}"
461
+ )
462
+ # Check stray key for nested data type
463
+ self._identify_stray_keys(
464
+ keys=(
465
+ optional_val["mandatory"]
466
+ + optional_val["optional"]
467
+ ),
468
+ schema=(
469
+ nested_mandatory_keys_schema
470
+ + nested_optional_keys_schema
471
+ ),
472
+ data_type=nested_dtype,
473
+ )
474
+ # Check stray key for data type
475
+ self._identify_stray_keys(
476
+ keys=list(dtype_val.keys()),
477
+ schema=(
478
+ PATTERNS_SCHEMA[dtype_key]["mandatory"]
479
+ + list(PATTERNS_SCHEMA[dtype_key]["optional"].keys())
379
480
  ),
380
- klass=ValueError,
481
+ data_type=dtype_key,
381
482
  )
483
+ # Wildcard check in patterns
484
+ if "}*" in dtype_val.get("pattern", ""):
485
+ raise_error(
486
+ msg=(
487
+ f"`{dtype_key}.pattern` must not contain `*` "
488
+ "following a replacement"
489
+ ),
490
+ klass=ValueError,
491
+ )
382
492
 
383
493
  # Validate replacements
384
494
  self._validate_replacements(
@@ -4,7 +4,6 @@
4
4
  # License: AGPL
5
5
 
6
6
  from pathlib import Path
7
- from typing import Type
8
7
 
9
8
  import datalad.api as dl
10
9
  import pytest
@@ -27,7 +26,7 @@ _testing_dataset = {
27
26
 
28
27
 
29
28
  @pytest.fixture
30
- def concrete_datagrabber() -> Type[DataladDataGrabber]:
29
+ def concrete_datagrabber() -> type[DataladDataGrabber]:
31
30
  """Return a concrete datalad-based DataGrabber.
32
31
 
33
32
  Returns
@@ -69,7 +68,7 @@ def concrete_datagrabber() -> Type[DataladDataGrabber]:
69
68
 
70
69
 
71
70
  def test_DataladDataGrabber_install_errors(
72
- tmp_path: Path, concrete_datagrabber: Type
71
+ tmp_path: Path, concrete_datagrabber: type
73
72
  ) -> None:
74
73
  """Test DataladDataGrabber install errors / warnings.
75
74
 
@@ -108,7 +107,7 @@ def test_DataladDataGrabber_install_errors(
108
107
 
109
108
 
110
109
  def test_DataladDataGrabber_clone_cleanup(
111
- tmp_path: Path, concrete_datagrabber: Type
110
+ tmp_path: Path, concrete_datagrabber: type
112
111
  ) -> None:
113
112
  """Test DataladDataGrabber clone and remove.
114
113
 
@@ -157,7 +156,7 @@ def test_DataladDataGrabber_clone_cleanup(
157
156
 
158
157
 
159
158
  def test_DataladDataGrabber_clone_create_cleanup(
160
- concrete_datagrabber: Type,
159
+ concrete_datagrabber: type,
161
160
  ) -> None:
162
161
  """Test DataladDataGrabber tempdir clone and remove.
163
162
 
@@ -203,7 +202,7 @@ def test_DataladDataGrabber_clone_create_cleanup(
203
202
 
204
203
 
205
204
  def test_DataladDataGrabber_previously_cloned(
206
- tmp_path: Path, concrete_datagrabber: Type
205
+ tmp_path: Path, concrete_datagrabber: type
207
206
  ) -> None:
208
207
  """Test DataladDataGrabber on cloned dataset.
209
208
 
@@ -272,7 +271,7 @@ def test_DataladDataGrabber_previously_cloned(
272
271
 
273
272
 
274
273
  def test_DataladDataGrabber_previously_cloned_and_get(
275
- tmp_path: Path, concrete_datagrabber: Type
274
+ tmp_path: Path, concrete_datagrabber: type
276
275
  ) -> None:
277
276
  """Test DataladDataGrabber on cloned dataset with files present.
278
277
 
@@ -355,7 +354,7 @@ def test_DataladDataGrabber_previously_cloned_and_get(
355
354
 
356
355
 
357
356
  def test_DataladDataGrabber_previously_cloned_and_get_dirty(
358
- tmp_path: Path, concrete_datagrabber: Type
357
+ tmp_path: Path, concrete_datagrabber: type
359
358
  ) -> None:
360
359
  """Test DataladDataGrabber on a dirty cloned dataset.
361
360
 
@@ -3,7 +3,7 @@
3
3
  # Authors: Synchon Mandal <s.mandal@fz-juelich.de>
4
4
  # License: AGPL
5
5
 
6
- from typing import List, Optional, Union
6
+ from typing import Optional, Union
7
7
 
8
8
  import pytest
9
9
 
@@ -116,7 +116,12 @@ def test_DMCC13Benchmark(
116
116
  data_file_names.extend(
117
117
  [
118
118
  "sub-01_desc-preproc_T1w.nii.gz",
119
- "sub-01_from-MNI152NLin2009cAsym_to-T1w_mode-image_xfm.h5",
119
+ [
120
+ "sub-01_from-MNI152NLin2009cAsym_to-T1w"
121
+ "_mode-image_xfm.h5",
122
+ "sub-01_from-T1w_to-MNI152NLin2009cAsym"
123
+ "_mode-image_xfm.h5",
124
+ ],
120
125
  ]
121
126
  )
122
127
  else:
@@ -127,14 +132,26 @@ def test_DMCC13Benchmark(
127
132
  for data_type, data_file_name in zip(data_types, data_file_names):
128
133
  # Assert data type
129
134
  assert data_type in out
130
- # Assert data file path exists
131
- assert out[data_type]["path"].exists()
132
- # Assert data file path is a file
133
- assert out[data_type]["path"].is_file()
134
- # Assert data file name
135
- assert out[data_type]["path"].name == data_file_name
136
- # Assert metadata
137
- assert "meta" in out[data_type]
135
+ # Conditional for Warp
136
+ if data_type == "Warp":
137
+ for idx, fname in enumerate(data_file_name):
138
+ # Assert data file path exists
139
+ assert out[data_type][idx]["path"].exists()
140
+ # Assert data file path is a file
141
+ assert out[data_type][idx]["path"].is_file()
142
+ # Assert data file name
143
+ assert out[data_type][idx]["path"].name == fname
144
+ # Assert metadata
145
+ assert "meta" in out[data_type][idx]
146
+ else:
147
+ # Assert data file path exists
148
+ assert out[data_type]["path"].exists()
149
+ # Assert data file path is a file
150
+ assert out[data_type]["path"].is_file()
151
+ # Assert data file name
152
+ assert out[data_type]["path"].name == data_file_name
153
+ # Assert metadata
154
+ assert "meta" in out[data_type]
138
155
 
139
156
  # Check BOLD nested data types
140
157
  for type_, file_name in zip(
@@ -201,7 +218,7 @@ def test_DMCC13Benchmark(
201
218
  ],
202
219
  )
203
220
  def test_DMCC13Benchmark_partial_data_access(
204
- types: Union[str, List[str]],
221
+ types: Union[str, list[str]],
205
222
  native_t1w: bool,
206
223
  ) -> None:
207
224
  """Test DMCC13Benchmark DataGrabber partial data access.
@@ -4,8 +4,8 @@
4
4
  # Synchon Mandal <s.mandal@fz-juelich.de>
5
5
  # License: AGPL
6
6
 
7
- from contextlib import nullcontext
8
- from typing import ContextManager, Dict, List, Union
7
+ from contextlib import AbstractContextManager, nullcontext
8
+ from typing import Union
9
9
 
10
10
  import pytest
11
11
 
@@ -186,10 +186,10 @@ from junifer.datagrabber.pattern_validation_mixin import PatternValidationMixin
186
186
  ],
187
187
  )
188
188
  def test_PatternValidationMixin(
189
- types: Union[str, List[str], List[int]],
190
- replacements: Union[str, List[str], List[int]],
191
- patterns: Union[str, Dict[str, Dict[str, str]]],
192
- expect: ContextManager,
189
+ types: Union[str, list[str], list[int]],
190
+ replacements: Union[str, list[str], list[int]],
191
+ patterns: Union[str, dict[str, dict[str, str]]],
192
+ expect: AbstractContextManager,
193
193
  ) -> None:
194
194
  """Test validation.
195
195
 
@@ -5,7 +5,7 @@
5
5
  # License: AGPL
6
6
 
7
7
  from pathlib import Path
8
- from typing import Dict, List, Optional, Union
8
+ from typing import Optional, Union
9
9
 
10
10
  import nibabel as nib
11
11
  import pandas as pd
@@ -37,7 +37,7 @@ _readers["TSV"] = {"func": pd.read_csv, "params": {"sep": "\t"}}
37
37
  class DefaultDataReader(PipelineStepMixin, UpdateMetaMixin):
38
38
  """Concrete implementation for common data reading."""
39
39
 
40
- def validate_input(self, input: List[str]) -> List[str]:
40
+ def validate_input(self, input: list[str]) -> list[str]:
41
41
  """Validate input.
42
42
 
43
43
  Parameters
@@ -75,9 +75,9 @@ class DefaultDataReader(PipelineStepMixin, UpdateMetaMixin):
75
75
 
76
76
  def _fit_transform(
77
77
  self,
78
- input: Dict[str, Dict],
79
- params: Optional[Dict] = None,
80
- ) -> Dict:
78
+ input: dict[str, dict],
79
+ params: Optional[dict] = None,
80
+ ) -> dict:
81
81
  """Fit and transform.
82
82
 
83
83
  Parameters
@@ -165,7 +165,7 @@ class DefaultDataReader(PipelineStepMixin, UpdateMetaMixin):
165
165
 
166
166
 
167
167
  def _read_data(
168
- data_type: str, path: Path, read_params: Dict
168
+ data_type: str, path: Path, read_params: dict
169
169
  ) -> Union[nib.Nifti1Image, pd.DataFrame, None]:
170
170
  """Read data for data type.
171
171
 
@@ -3,7 +3,7 @@
3
3
  # Authors: Synchon Mandal <s.mandal@fz-juelich.de>
4
4
  # License: AGPL
5
5
 
6
- from typing import Callable, List, Optional
6
+ from typing import Callable, Optional
7
7
 
8
8
  import numpy as np
9
9
  from nilearn import signal
@@ -186,7 +186,7 @@ def _map_eigenvalues(
186
186
 
187
187
 
188
188
  def _geometric_mean(
189
- matrices: List[np.ndarray],
189
+ matrices: list[np.ndarray],
190
190
  init: Optional[np.ndarray] = None,
191
191
  max_iter: int = 10,
192
192
  tol: Optional[float] = 1e-7,
@@ -3,7 +3,7 @@
3
3
  # Authors: Synchon Mandal <s.mandal@fz-juelich.de>
4
4
  # License: AGPL
5
5
 
6
- from typing import TYPE_CHECKING, Callable, List, Optional, Tuple, Union
6
+ from typing import TYPE_CHECKING, Callable, Optional, Union
7
7
 
8
8
  import numpy as np
9
9
  from nilearn import image, masking
@@ -271,7 +271,7 @@ class _JuniferExtractionFunctor:
271
271
  def __call__(
272
272
  self,
273
273
  imgs: Union["Nifti1Image", "Nifti2Image"],
274
- ) -> Tuple["ArrayLike", None]:
274
+ ) -> tuple["ArrayLike", None]:
275
275
  """Implement function call overloading.
276
276
 
277
277
  Parameters
@@ -372,9 +372,9 @@ class JuniferNiftiSpheresMasker(NiftiSpheresMasker):
372
372
  self,
373
373
  imgs: Union["Nifti1Image", "Nifti2Image"],
374
374
  confounds: Union[
375
- str, "Path", "ArrayLike", "DataFrame", List, None
375
+ str, "Path", "ArrayLike", "DataFrame", list, None
376
376
  ] = None,
377
- sample_mask: Union["ArrayLike", List, Tuple, None] = None,
377
+ sample_mask: Union["ArrayLike", list, tuple, None] = None,
378
378
  ) -> "ArrayLike":
379
379
  """Extract signals from a single 4D niimg.
380
380