junifer 0.0.6.dev219__py3-none-any.whl → 0.0.6.dev248__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (67) hide show
  1. junifer/__init__.pyi +2 -0
  2. junifer/_version.py +2 -2
  3. junifer/api/decorators.py +5 -4
  4. junifer/api/functions.py +9 -8
  5. junifer/data/coordinates/_ants_coordinates_warper.py +4 -6
  6. junifer/data/coordinates/_coordinates.py +28 -15
  7. junifer/data/coordinates/_fsl_coordinates_warper.py +4 -6
  8. junifer/data/masks/_ants_mask_warper.py +16 -9
  9. junifer/data/masks/_fsl_mask_warper.py +4 -6
  10. junifer/data/masks/_masks.py +21 -25
  11. junifer/data/parcellations/_ants_parcellation_warper.py +16 -9
  12. junifer/data/parcellations/_fsl_parcellation_warper.py +4 -6
  13. junifer/data/parcellations/_parcellations.py +20 -24
  14. junifer/data/utils.py +67 -3
  15. junifer/datagrabber/aomic/id1000.py +22 -9
  16. junifer/datagrabber/aomic/piop1.py +22 -9
  17. junifer/datagrabber/aomic/piop2.py +22 -9
  18. junifer/datagrabber/base.py +6 -1
  19. junifer/datagrabber/datalad_base.py +15 -8
  20. junifer/datagrabber/dmcc13_benchmark.py +23 -10
  21. junifer/datagrabber/hcp1200/hcp1200.py +18 -7
  22. junifer/datagrabber/multiple.py +2 -1
  23. junifer/datagrabber/pattern.py +65 -35
  24. junifer/datagrabber/pattern_validation_mixin.py +197 -87
  25. junifer/datagrabber/tests/test_dmcc13_benchmark.py +26 -9
  26. junifer/markers/base.py +4 -7
  27. junifer/markers/brainprint.py +4 -4
  28. junifer/markers/complexity/complexity_base.py +3 -3
  29. junifer/markers/ets_rss.py +4 -3
  30. junifer/markers/falff/_afni_falff.py +3 -5
  31. junifer/markers/falff/_junifer_falff.py +3 -3
  32. junifer/markers/falff/falff_base.py +4 -6
  33. junifer/markers/functional_connectivity/crossparcellation_functional_connectivity.py +4 -3
  34. junifer/markers/functional_connectivity/functional_connectivity_base.py +4 -3
  35. junifer/markers/parcel_aggregation.py +4 -3
  36. junifer/markers/reho/_afni_reho.py +3 -5
  37. junifer/markers/reho/_junifer_reho.py +3 -3
  38. junifer/markers/reho/reho_base.py +4 -6
  39. junifer/markers/sphere_aggregation.py +4 -3
  40. junifer/markers/temporal_snr/temporal_snr_base.py +4 -3
  41. junifer/onthefly/_brainprint.py +4 -7
  42. junifer/onthefly/read_transform.py +3 -6
  43. junifer/pipeline/marker_collection.py +6 -12
  44. junifer/pipeline/pipeline_component_registry.py +3 -8
  45. junifer/pipeline/pipeline_step_mixin.py +8 -4
  46. junifer/pipeline/tests/test_pipeline_step_mixin.py +18 -19
  47. junifer/pipeline/tests/test_workdir_manager.py +1 -0
  48. junifer/pipeline/update_meta_mixin.py +21 -17
  49. junifer/preprocess/confounds/fmriprep_confound_remover.py +2 -2
  50. junifer/preprocess/smoothing/_afni_smoothing.py +4 -6
  51. junifer/preprocess/smoothing/_fsl_smoothing.py +4 -7
  52. junifer/preprocess/smoothing/_nilearn_smoothing.py +3 -3
  53. junifer/preprocess/smoothing/smoothing.py +3 -2
  54. junifer/preprocess/warping/_ants_warper.py +26 -7
  55. junifer/preprocess/warping/_fsl_warper.py +22 -8
  56. junifer/preprocess/warping/space_warper.py +34 -6
  57. junifer/preprocess/warping/tests/test_space_warper.py +4 -7
  58. junifer/typing/__init__.py +9 -0
  59. junifer/typing/__init__.pyi +23 -0
  60. junifer/typing/_typing.py +61 -0
  61. {junifer-0.0.6.dev219.dist-info → junifer-0.0.6.dev248.dist-info}/METADATA +2 -1
  62. {junifer-0.0.6.dev219.dist-info → junifer-0.0.6.dev248.dist-info}/RECORD +67 -64
  63. {junifer-0.0.6.dev219.dist-info → junifer-0.0.6.dev248.dist-info}/WHEEL +1 -1
  64. {junifer-0.0.6.dev219.dist-info → junifer-0.0.6.dev248.dist-info}/AUTHORS.rst +0 -0
  65. {junifer-0.0.6.dev219.dist-info → junifer-0.0.6.dev248.dist-info}/LICENSE.md +0 -0
  66. {junifer-0.0.6.dev219.dist-info → junifer-0.0.6.dev248.dist-info}/entry_points.txt +0 -0
  67. {junifer-0.0.6.dev219.dist-info → junifer-0.0.6.dev248.dist-info}/top_level.txt +0 -0
@@ -3,7 +3,7 @@
3
3
  # Authors: Synchon Mandal <s.mandal@fz-juelich.de>
4
4
  # License: AGPL
5
5
 
6
- from typing import Dict, List
6
+ from typing import Dict, List, Union
7
7
 
8
8
  from ..utils import logger, raise_error, warn_with_log
9
9
 
@@ -36,7 +36,7 @@ PATTERNS_SCHEMA = {
36
36
  },
37
37
  },
38
38
  "Warp": {
39
- "mandatory": ["pattern", "src", "dst"],
39
+ "mandatory": ["pattern", "src", "dst", "warper"],
40
40
  "optional": {},
41
41
  },
42
42
  "VBM_GM": {
@@ -96,7 +96,7 @@ class PatternValidationMixin:
96
96
  def _validate_replacements(
97
97
  self,
98
98
  replacements: List[str],
99
- patterns: Dict[str, Dict[str, str]],
99
+ patterns: Dict[str, Union[Dict[str, str], List[Dict[str, str]]]],
100
100
  partial_pattern_ok: bool,
101
101
  ) -> None:
102
102
  """Validate the replacements.
@@ -132,39 +132,51 @@ class PatternValidationMixin:
132
132
 
133
133
  if any(not isinstance(x, str) for x in replacements):
134
134
  raise_error(
135
- msg="`replacements` must be a list of strings.",
135
+ msg="`replacements` must be a list of strings",
136
136
  klass=TypeError,
137
137
  )
138
138
 
139
+ # Make a list of all patterns recursively
140
+ all_patterns = []
141
+ for dtype_val in patterns.values():
142
+ # Conditional for list dtype vals like Warp
143
+ if isinstance(dtype_val, list):
144
+ for entry in dtype_val:
145
+ all_patterns.append(entry.get("pattern", ""))
146
+ else:
147
+ all_patterns.append(dtype_val.get("pattern", ""))
148
+ # Check for stray replacements
139
149
  for x in replacements:
140
- if all(
141
- x not in y
142
- for y in [
143
- data_type_val.get("pattern", "")
144
- for data_type_val in patterns.values()
145
- ]
146
- ):
150
+ if all(x not in y for y in all_patterns):
147
151
  if partial_pattern_ok:
148
152
  warn_with_log(
149
153
  f"Replacement: `{x}` is not part of any pattern, "
150
154
  "things might not work as expected if you are unsure "
151
- "of what you are doing"
155
+ "of what you are doing."
152
156
  )
153
157
  else:
154
158
  raise_error(
155
- msg=f"Replacement: {x} is not part of any pattern."
159
+ msg=f"Replacement: `{x}` is not part of any pattern"
156
160
  )
157
161
 
158
162
  # Check that at least one pattern has all the replacements
159
163
  at_least_one = False
160
- for data_type_val in patterns.values():
161
- if all(
162
- x in data_type_val.get("pattern", "") for x in replacements
163
- ):
164
- at_least_one = True
164
+ for dtype_val in patterns.values():
165
+ # Conditional for list dtype vals like Warp
166
+ if isinstance(dtype_val, list):
167
+ for entry in dtype_val:
168
+ if all(
169
+ x in entry.get("pattern", "") for x in replacements
170
+ ):
171
+ at_least_one = True
172
+ else:
173
+ if all(
174
+ x in dtype_val.get("pattern", "") for x in replacements
175
+ ):
176
+ at_least_one = True
165
177
  if not at_least_one and not partial_pattern_ok:
166
178
  raise_error(
167
- msg="At least one pattern must contain all replacements."
179
+ msg="At least one pattern must contain all replacements"
168
180
  )
169
181
 
170
182
  def _validate_mandatory_keys(
@@ -207,7 +219,7 @@ class PatternValidationMixin:
207
219
  warn_with_log(
208
220
  f"Mandatory key: `{key}` not found for {data_type}, "
209
221
  "things might not work as expected if you are unsure "
210
- "of what you are doing"
222
+ "of what you are doing."
211
223
  )
212
224
  else:
213
225
  raise_error(
@@ -215,7 +227,7 @@ class PatternValidationMixin:
215
227
  klass=KeyError,
216
228
  )
217
229
  else:
218
- logger.debug(f"Mandatory key: `{key}` found for {data_type}")
230
+ logger.debug(f"Mandatory key: `{key}` found for {data_type}.")
219
231
 
220
232
  def _identify_stray_keys(
221
233
  self, keys: List[str], schema: List[str], data_type: str
@@ -251,7 +263,7 @@ class PatternValidationMixin:
251
263
  self,
252
264
  types: List[str],
253
265
  replacements: List[str],
254
- patterns: Dict[str, Dict[str, str]],
266
+ patterns: Dict[str, Union[Dict[str, str], List[Dict[str, str]]]],
255
267
  partial_pattern_ok: bool = False,
256
268
  ) -> None:
257
269
  """Validate the patterns.
@@ -298,87 +310,185 @@ class PatternValidationMixin:
298
310
  msg="`patterns` must contain all `types`", klass=ValueError
299
311
  )
300
312
  # Check against schema
301
- for data_type_key, data_type_val in patterns.items():
313
+ for dtype_key, dtype_val in patterns.items():
302
314
  # Check if valid data type is provided
303
- if data_type_key not in PATTERNS_SCHEMA:
315
+ if dtype_key not in PATTERNS_SCHEMA:
304
316
  raise_error(
305
- f"Unknown data type: {data_type_key}, "
317
+ f"Unknown data type: {dtype_key}, "
306
318
  f"should be one of: {list(PATTERNS_SCHEMA.keys())}"
307
319
  )
308
- # Check mandatory keys for data type
309
- self._validate_mandatory_keys(
310
- keys=list(data_type_val),
311
- schema=PATTERNS_SCHEMA[data_type_key]["mandatory"],
312
- data_type=data_type_key,
313
- partial_pattern_ok=partial_pattern_ok,
314
- )
315
- # Check optional keys for data type
316
- for optional_key, optional_val in PATTERNS_SCHEMA[data_type_key][
317
- "optional"
318
- ].items():
319
- if optional_key not in data_type_val:
320
- logger.debug(
321
- f"Optional key: `{optional_key}` missing for "
322
- f"{data_type_key}"
323
- )
324
- else:
325
- logger.debug(
326
- f"Optional key: `{optional_key}` found for "
327
- f"{data_type_key}"
328
- )
329
- # Set nested type name for easier access
330
- nested_data_type = f"{data_type_key}.{optional_key}"
331
- nested_mandatory_keys_schema = PATTERNS_SCHEMA[
332
- data_type_key
333
- ]["optional"][optional_key]["mandatory"]
334
- nested_optional_keys_schema = PATTERNS_SCHEMA[
335
- data_type_key
336
- ]["optional"][optional_key]["optional"]
337
- # Check mandatory keys for nested type
320
+ # Conditional for list dtype vals like Warp
321
+ if isinstance(dtype_val, list):
322
+ for idx, entry in enumerate(dtype_val):
323
+ # Check mandatory keys for data type
338
324
  self._validate_mandatory_keys(
339
- keys=list(optional_val["mandatory"]),
340
- schema=nested_mandatory_keys_schema,
341
- data_type=nested_data_type,
325
+ keys=list(entry),
326
+ schema=PATTERNS_SCHEMA[dtype_key]["mandatory"],
327
+ data_type=f"{dtype_key}.{idx}",
342
328
  partial_pattern_ok=partial_pattern_ok,
343
329
  )
344
- # Check optional keys for nested type
345
- for nested_optional_key in nested_optional_keys_schema:
346
- if nested_optional_key not in optional_val["optional"]:
330
+ # Check optional keys for data type
331
+ for optional_key, optional_val in PATTERNS_SCHEMA[
332
+ dtype_key
333
+ ]["optional"].items():
334
+ if optional_key not in entry:
347
335
  logger.debug(
348
- f"Optional key: `{nested_optional_key}` "
349
- f"missing for {nested_data_type}"
336
+ f"Optional key: `{optional_key}` missing for "
337
+ f"{dtype_key}.{idx}"
350
338
  )
351
339
  else:
352
340
  logger.debug(
353
- f"Optional key: `{nested_optional_key}` found "
354
- f"for {nested_data_type}"
341
+ f"Optional key: `{optional_key}` found for "
342
+ f"{dtype_key}.{idx}"
343
+ )
344
+ # Set nested type name for easier access
345
+ nested_dtype = f"{dtype_key}.{idx}.{optional_key}"
346
+ nested_mandatory_keys_schema = PATTERNS_SCHEMA[
347
+ dtype_key
348
+ ]["optional"][optional_key]["mandatory"]
349
+ nested_optional_keys_schema = PATTERNS_SCHEMA[
350
+ dtype_key
351
+ ]["optional"][optional_key]["optional"]
352
+ # Check mandatory keys for nested type
353
+ self._validate_mandatory_keys(
354
+ keys=list(optional_val["mandatory"]),
355
+ schema=nested_mandatory_keys_schema,
356
+ data_type=nested_dtype,
357
+ partial_pattern_ok=partial_pattern_ok,
358
+ )
359
+ # Check optional keys for nested type
360
+ for (
361
+ nested_optional_key
362
+ ) in nested_optional_keys_schema:
363
+ if (
364
+ nested_optional_key
365
+ not in optional_val["optional"]
366
+ ):
367
+ logger.debug(
368
+ f"Optional key: "
369
+ f"`{nested_optional_key}` missing for "
370
+ f"{nested_dtype}"
371
+ )
372
+ else:
373
+ logger.debug(
374
+ f"Optional key: "
375
+ f"`{nested_optional_key}` found for "
376
+ f"{nested_dtype}"
377
+ )
378
+ # Check stray key for nested data type
379
+ self._identify_stray_keys(
380
+ keys=(
381
+ optional_val["mandatory"]
382
+ + optional_val["optional"]
383
+ ),
384
+ schema=(
385
+ nested_mandatory_keys_schema
386
+ + nested_optional_keys_schema
387
+ ),
388
+ data_type=nested_dtype,
355
389
  )
356
- # Check stray key for nested data type
390
+ # Check stray key for data type
357
391
  self._identify_stray_keys(
358
- keys=optional_val["mandatory"]
359
- + optional_val["optional"],
360
- schema=nested_mandatory_keys_schema
361
- + nested_optional_keys_schema,
362
- data_type=nested_data_type,
392
+ keys=list(entry.keys()),
393
+ schema=(
394
+ PATTERNS_SCHEMA[dtype_key]["mandatory"]
395
+ + list(
396
+ PATTERNS_SCHEMA[dtype_key]["optional"].keys()
397
+ )
398
+ ),
399
+ data_type=dtype_key,
363
400
  )
364
- # Check stray key for data type
365
- self._identify_stray_keys(
366
- keys=list(data_type_val.keys()),
367
- schema=(
368
- PATTERNS_SCHEMA[data_type_key]["mandatory"]
369
- + list(PATTERNS_SCHEMA[data_type_key]["optional"].keys())
370
- ),
371
- data_type=data_type_key,
372
- )
373
- # Wildcard check in patterns
374
- if "}*" in data_type_val.get("pattern", ""):
375
- raise_error(
376
- msg=(
377
- f"`{data_type_key}.pattern` must not contain `*` "
378
- "following a replacement"
401
+ # Wildcard check in patterns
402
+ if "}*" in entry.get("pattern", ""):
403
+ raise_error(
404
+ msg=(
405
+ f"`{dtype_key}.pattern` must not contain `*` "
406
+ "following a replacement"
407
+ ),
408
+ klass=ValueError,
409
+ )
410
+ else:
411
+ # Check mandatory keys for data type
412
+ self._validate_mandatory_keys(
413
+ keys=list(dtype_val),
414
+ schema=PATTERNS_SCHEMA[dtype_key]["mandatory"],
415
+ data_type=dtype_key,
416
+ partial_pattern_ok=partial_pattern_ok,
417
+ )
418
+ # Check optional keys for data type
419
+ for optional_key, optional_val in PATTERNS_SCHEMA[dtype_key][
420
+ "optional"
421
+ ].items():
422
+ if optional_key not in dtype_val:
423
+ logger.debug(
424
+ f"Optional key: `{optional_key}` missing for "
425
+ f"{dtype_key}."
426
+ )
427
+ else:
428
+ logger.debug(
429
+ f"Optional key: `{optional_key}` found for "
430
+ f"{dtype_key}."
431
+ )
432
+ # Set nested type name for easier access
433
+ nested_dtype = f"{dtype_key}.{optional_key}"
434
+ nested_mandatory_keys_schema = PATTERNS_SCHEMA[
435
+ dtype_key
436
+ ]["optional"][optional_key]["mandatory"]
437
+ nested_optional_keys_schema = PATTERNS_SCHEMA[
438
+ dtype_key
439
+ ]["optional"][optional_key]["optional"]
440
+ # Check mandatory keys for nested type
441
+ self._validate_mandatory_keys(
442
+ keys=list(optional_val["mandatory"]),
443
+ schema=nested_mandatory_keys_schema,
444
+ data_type=nested_dtype,
445
+ partial_pattern_ok=partial_pattern_ok,
446
+ )
447
+ # Check optional keys for nested type
448
+ for nested_optional_key in nested_optional_keys_schema:
449
+ if (
450
+ nested_optional_key
451
+ not in optional_val["optional"]
452
+ ):
453
+ logger.debug(
454
+ f"Optional key: `{nested_optional_key}` "
455
+ f"missing for {nested_dtype}"
456
+ )
457
+ else:
458
+ logger.debug(
459
+ f"Optional key: `{nested_optional_key}` "
460
+ f"found for {nested_dtype}"
461
+ )
462
+ # Check stray key for nested data type
463
+ self._identify_stray_keys(
464
+ keys=(
465
+ optional_val["mandatory"]
466
+ + optional_val["optional"]
467
+ ),
468
+ schema=(
469
+ nested_mandatory_keys_schema
470
+ + nested_optional_keys_schema
471
+ ),
472
+ data_type=nested_dtype,
473
+ )
474
+ # Check stray key for data type
475
+ self._identify_stray_keys(
476
+ keys=list(dtype_val.keys()),
477
+ schema=(
478
+ PATTERNS_SCHEMA[dtype_key]["mandatory"]
479
+ + list(PATTERNS_SCHEMA[dtype_key]["optional"].keys())
379
480
  ),
380
- klass=ValueError,
481
+ data_type=dtype_key,
381
482
  )
483
+ # Wildcard check in patterns
484
+ if "}*" in dtype_val.get("pattern", ""):
485
+ raise_error(
486
+ msg=(
487
+ f"`{dtype_key}.pattern` must not contain `*` "
488
+ "following a replacement"
489
+ ),
490
+ klass=ValueError,
491
+ )
382
492
 
383
493
  # Validate replacements
384
494
  self._validate_replacements(
@@ -116,7 +116,12 @@ def test_DMCC13Benchmark(
116
116
  data_file_names.extend(
117
117
  [
118
118
  "sub-01_desc-preproc_T1w.nii.gz",
119
- "sub-01_from-MNI152NLin2009cAsym_to-T1w_mode-image_xfm.h5",
119
+ [
120
+ "sub-01_from-MNI152NLin2009cAsym_to-T1w"
121
+ "_mode-image_xfm.h5",
122
+ "sub-01_from-T1w_to-MNI152NLin2009cAsym"
123
+ "_mode-image_xfm.h5",
124
+ ],
120
125
  ]
121
126
  )
122
127
  else:
@@ -127,14 +132,26 @@ def test_DMCC13Benchmark(
127
132
  for data_type, data_file_name in zip(data_types, data_file_names):
128
133
  # Assert data type
129
134
  assert data_type in out
130
- # Assert data file path exists
131
- assert out[data_type]["path"].exists()
132
- # Assert data file path is a file
133
- assert out[data_type]["path"].is_file()
134
- # Assert data file name
135
- assert out[data_type]["path"].name == data_file_name
136
- # Assert metadata
137
- assert "meta" in out[data_type]
135
+ # Conditional for Warp
136
+ if data_type == "Warp":
137
+ for idx, fname in enumerate(data_file_name):
138
+ # Assert data file path exists
139
+ assert out[data_type][idx]["path"].exists()
140
+ # Assert data file path is a file
141
+ assert out[data_type][idx]["path"].is_file()
142
+ # Assert data file name
143
+ assert out[data_type][idx]["path"].name == fname
144
+ # Assert metadata
145
+ assert "meta" in out[data_type][idx]
146
+ else:
147
+ # Assert data file path exists
148
+ assert out[data_type]["path"].exists()
149
+ # Assert data file path is a file
150
+ assert out[data_type]["path"].is_file()
151
+ # Assert data file name
152
+ assert out[data_type]["path"].name == data_file_name
153
+ # Assert metadata
154
+ assert "meta" in out[data_type]
138
155
 
139
156
  # Check BOLD nested data types
140
157
  for type_, file_name in zip(
junifer/markers/base.py CHANGED
@@ -6,16 +6,13 @@
6
6
 
7
7
  from abc import ABC, abstractmethod
8
8
  from copy import deepcopy
9
- from typing import TYPE_CHECKING, Any, Dict, List, Optional, Union
9
+ from typing import Any, Dict, List, Optional, Union
10
10
 
11
11
  from ..pipeline import PipelineStepMixin, UpdateMetaMixin
12
+ from ..typing import StorageLike
12
13
  from ..utils import logger, raise_error
13
14
 
14
15
 
15
- if TYPE_CHECKING:
16
- from junifer.storage import BaseFeatureStorage
17
-
18
-
19
16
  __all__ = ["BaseMarker"]
20
17
 
21
18
 
@@ -159,7 +156,7 @@ class BaseMarker(ABC, PipelineStepMixin, UpdateMetaMixin):
159
156
  type_: str,
160
157
  feature: str,
161
158
  out: Dict[str, Any],
162
- storage: "BaseFeatureStorage",
159
+ storage: StorageLike,
163
160
  ) -> None:
164
161
  """Store.
165
162
 
@@ -182,7 +179,7 @@ class BaseMarker(ABC, PipelineStepMixin, UpdateMetaMixin):
182
179
  def _fit_transform(
183
180
  self,
184
181
  input: Dict[str, Dict],
185
- storage: Optional["BaseFeatureStorage"] = None,
182
+ storage: Optional[StorageLike] = None,
186
183
  ) -> Dict:
187
184
  """Fit and transform.
188
185
 
@@ -11,7 +11,6 @@ from typing import (
11
11
  Dict,
12
12
  List,
13
13
  Optional,
14
- Set,
15
14
  Union,
16
15
  )
17
16
 
@@ -25,6 +24,7 @@ from ..external.BrainPrint.brainprint.brainprint import (
25
24
  )
26
25
  from ..external.BrainPrint.brainprint.surfaces import surf_to_vtk
27
26
  from ..pipeline import WorkDirManager
27
+ from ..typing import Dependencies, ExternalDependencies, MarkerInOutMappings
28
28
  from ..utils import logger, run_ext_cmd
29
29
  from .base import BaseMarker
30
30
 
@@ -68,7 +68,7 @@ class BrainPrint(BaseMarker):
68
68
 
69
69
  """
70
70
 
71
- _EXT_DEPENDENCIES: ClassVar[List[Dict[str, Union[str, List[str]]]]] = [
71
+ _EXT_DEPENDENCIES: ClassVar[ExternalDependencies] = [
72
72
  {
73
73
  "name": "freesurfer",
74
74
  "commands": [
@@ -80,9 +80,9 @@ class BrainPrint(BaseMarker):
80
80
  },
81
81
  ]
82
82
 
83
- _DEPENDENCIES: ClassVar[Set[str]] = {"lapy", "numpy"}
83
+ _DEPENDENCIES: ClassVar[Dependencies] = {"lapy", "numpy"}
84
84
 
85
- _MARKER_INOUT_MAPPINGS: ClassVar[Dict[str, Dict[str, str]]] = {
85
+ _MARKER_INOUT_MAPPINGS: ClassVar[MarkerInOutMappings] = {
86
86
  "FreeSurfer": {
87
87
  "eigenvalues": "scalar_table",
88
88
  "areas": "vector",
@@ -11,10 +11,10 @@ from typing import (
11
11
  Dict,
12
12
  List,
13
13
  Optional,
14
- Set,
15
14
  Union,
16
15
  )
17
16
 
17
+ from ...typing import Dependencies, MarkerInOutMappings
18
18
  from ...utils import raise_error
19
19
  from ..base import BaseMarker
20
20
  from ..parcel_aggregation import ParcelAggregation
@@ -51,9 +51,9 @@ class ComplexityBase(BaseMarker):
51
51
 
52
52
  """
53
53
 
54
- _DEPENDENCIES: ClassVar[Set[str]] = {"nilearn", "neurokit2"}
54
+ _DEPENDENCIES: ClassVar[Dependencies] = {"nilearn", "neurokit2"}
55
55
 
56
- _MARKER_INOUT_MAPPINGS: ClassVar[Dict[str, Dict[str, str]]] = {
56
+ _MARKER_INOUT_MAPPINGS: ClassVar[MarkerInOutMappings] = {
57
57
  "BOLD": {
58
58
  "complexity": "vector",
59
59
  },
@@ -6,11 +6,12 @@
6
6
  # Synchon Mandal <s.mandal@fz-juelich.de>
7
7
  # License: AGPL
8
8
 
9
- from typing import Any, ClassVar, Dict, List, Optional, Set, Union
9
+ from typing import Any, ClassVar, Dict, List, Optional, Union
10
10
 
11
11
  import numpy as np
12
12
 
13
13
  from ..api.decorators import register_marker
14
+ from ..typing import Dependencies, MarkerInOutMappings
14
15
  from ..utils import logger
15
16
  from .base import BaseMarker
16
17
  from .parcel_aggregation import ParcelAggregation
@@ -45,9 +46,9 @@ class RSSETSMarker(BaseMarker):
45
46
 
46
47
  """
47
48
 
48
- _DEPENDENCIES: ClassVar[Set[str]] = {"nilearn"}
49
+ _DEPENDENCIES: ClassVar[Dependencies] = {"nilearn"}
49
50
 
50
- _MARKER_INOUT_MAPPINGS: ClassVar[Dict[str, Dict[str, str]]] = {
51
+ _MARKER_INOUT_MAPPINGS: ClassVar[MarkerInOutMappings] = {
51
52
  "BOLD": {
52
53
  "rss_ets": "timeseries",
53
54
  },
@@ -8,22 +8,20 @@ from pathlib import Path
8
8
  from typing import (
9
9
  TYPE_CHECKING,
10
10
  ClassVar,
11
- Dict,
12
- List,
13
11
  Optional,
14
12
  Tuple,
15
- Union,
16
13
  )
17
14
 
18
15
  import nibabel as nib
19
16
 
20
17
  from ...pipeline import WorkDirManager
18
+ from ...typing import ExternalDependencies
21
19
  from ...utils import logger, run_ext_cmd
22
20
  from ...utils.singleton import Singleton
23
21
 
24
22
 
25
23
  if TYPE_CHECKING:
26
- from nibabel import Nifti1Image
24
+ from nibabel.nifti1 import Nifti1Image
27
25
 
28
26
 
29
27
  __all__ = ["AFNIALFF"]
@@ -37,7 +35,7 @@ class AFNIALFF(metaclass=Singleton):
37
35
 
38
36
  """
39
37
 
40
- _EXT_DEPENDENCIES: ClassVar[List[Dict[str, Union[str, List[str]]]]] = [
38
+ _EXT_DEPENDENCIES: ClassVar[ExternalDependencies] = [
41
39
  {
42
40
  "name": "afni",
43
41
  "commands": ["3dRSFC", "3dAFNItoNIFTI"],
@@ -9,7 +9,6 @@ from typing import (
9
9
  TYPE_CHECKING,
10
10
  ClassVar,
11
11
  Optional,
12
- Set,
13
12
  Tuple,
14
13
  )
15
14
 
@@ -19,12 +18,13 @@ import scipy as sp
19
18
  from nilearn import image as nimg
20
19
 
21
20
  from ...pipeline import WorkDirManager
21
+ from ...typing import Dependencies
22
22
  from ...utils import logger
23
23
  from ...utils.singleton import Singleton
24
24
 
25
25
 
26
26
  if TYPE_CHECKING:
27
- from nibabel import Nifti1Image
27
+ from nibabel.nifti1 import Nifti1Image
28
28
 
29
29
 
30
30
  __all__ = ["JuniferALFF"]
@@ -37,7 +37,7 @@ class JuniferALFF(metaclass=Singleton):
37
37
 
38
38
  """
39
39
 
40
- _DEPENDENCIES: ClassVar[Set[str]] = {"numpy", "nilearn", "scipy"}
40
+ _DEPENDENCIES: ClassVar[Dependencies] = {"numpy", "nilearn", "scipy"}
41
41
 
42
42
  def __del__(self) -> None:
43
43
  """Terminate the class."""
@@ -12,13 +12,11 @@ from typing import (
12
12
  Any,
13
13
  ClassVar,
14
14
  Dict,
15
- List,
16
15
  Optional,
17
16
  Tuple,
18
- Type,
19
- Union,
20
17
  )
21
18
 
19
+ from ...typing import ConditionalDependencies, MarkerInOutMappings
22
20
  from ...utils.logging import logger, raise_error
23
21
  from ..base import BaseMarker
24
22
  from ._afni_falff import AFNIALFF
@@ -26,7 +24,7 @@ from ._junifer_falff import JuniferALFF
26
24
 
27
25
 
28
26
  if TYPE_CHECKING:
29
- from nibabel import Nifti1Image
27
+ from nibabel.nifti1 import Nifti1Image
30
28
 
31
29
 
32
30
  __all__ = ["ALFFBase"]
@@ -72,7 +70,7 @@ class ALFFBase(BaseMarker):
72
70
 
73
71
  """
74
72
 
75
- _CONDITIONAL_DEPENDENCIES: ClassVar[List[Dict[str, Union[str, Type]]]] = [
73
+ _CONDITIONAL_DEPENDENCIES: ClassVar[ConditionalDependencies] = [
76
74
  {
77
75
  "using": "afni",
78
76
  "depends_on": AFNIALFF,
@@ -83,7 +81,7 @@ class ALFFBase(BaseMarker):
83
81
  },
84
82
  ]
85
83
 
86
- _MARKER_INOUT_MAPPINGS: ClassVar[Dict[str, Dict[str, str]]] = {
84
+ _MARKER_INOUT_MAPPINGS: ClassVar[MarkerInOutMappings] = {
87
85
  "BOLD": {
88
86
  "alff": "vector",
89
87
  "falff": "vector",