junifer 0.0.5.dev131__py3-none-any.whl → 0.0.5.dev152__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (124) hide show
  1. junifer/__init__.py +17 -0
  2. junifer/_version.py +2 -2
  3. junifer/api/__init__.py +3 -0
  4. junifer/api/decorators.py +9 -0
  5. junifer/api/functions.py +7 -4
  6. junifer/api/parser.py +3 -0
  7. junifer/api/queue_context/__init__.py +3 -0
  8. junifer/api/res/afni/run_afni_docker.sh +1 -1
  9. junifer/api/res/ants/run_ants_docker.sh +1 -1
  10. junifer/api/res/fsl/run_fsl_docker.sh +1 -1
  11. junifer/configs/juseless/__init__.py +3 -0
  12. junifer/configs/juseless/datagrabbers/__init__.py +9 -0
  13. junifer/configs/juseless/datagrabbers/aomic_id1000_vbm.py +3 -0
  14. junifer/configs/juseless/datagrabbers/camcan_vbm.py +3 -0
  15. junifer/configs/juseless/datagrabbers/ixi_vbm.py +3 -0
  16. junifer/configs/juseless/datagrabbers/ucla.py +3 -0
  17. junifer/configs/juseless/datagrabbers/ukb_vbm.py +3 -0
  18. junifer/data/__init__.py +20 -0
  19. junifer/data/coordinates.py +8 -0
  20. junifer/data/masks.py +10 -0
  21. junifer/data/parcellations.py +9 -0
  22. junifer/data/template_spaces.py +3 -0
  23. junifer/data/utils.py +3 -0
  24. junifer/datagrabber/__init__.py +15 -0
  25. junifer/datagrabber/aomic/__init__.py +3 -0
  26. junifer/datagrabber/aomic/id1000.py +3 -0
  27. junifer/datagrabber/aomic/piop1.py +3 -0
  28. junifer/datagrabber/aomic/piop2.py +3 -0
  29. junifer/datagrabber/base.py +3 -0
  30. junifer/datagrabber/datalad_base.py +3 -0
  31. junifer/datagrabber/hcp1200/__init__.py +3 -0
  32. junifer/datagrabber/hcp1200/datalad_hcp1200.py +3 -0
  33. junifer/datagrabber/hcp1200/hcp1200.py +3 -0
  34. junifer/datagrabber/multiple.py +3 -0
  35. junifer/datagrabber/pattern.py +3 -0
  36. junifer/datagrabber/pattern_datalad.py +3 -0
  37. junifer/datagrabber/utils.py +3 -0
  38. junifer/datareader/__init__.py +3 -0
  39. junifer/datareader/default.py +3 -0
  40. junifer/external/nilearn/junifer_nifti_spheres_masker.py +7 -7
  41. junifer/markers/__init__.py +21 -0
  42. junifer/markers/base.py +21 -7
  43. junifer/markers/brainprint.py +3 -0
  44. junifer/markers/collection.py +3 -0
  45. junifer/markers/complexity/__init__.py +10 -0
  46. junifer/markers/complexity/complexity_base.py +3 -0
  47. junifer/markers/complexity/hurst_exponent.py +3 -0
  48. junifer/markers/complexity/multiscale_entropy_auc.py +3 -0
  49. junifer/markers/complexity/perm_entropy.py +3 -0
  50. junifer/markers/complexity/range_entropy.py +3 -0
  51. junifer/markers/complexity/range_entropy_auc.py +3 -0
  52. junifer/markers/complexity/sample_entropy.py +3 -0
  53. junifer/markers/complexity/tests/test_hurst_exponent.py +7 -0
  54. junifer/markers/complexity/tests/test_multiscale_entropy_auc.py +7 -0
  55. junifer/markers/complexity/tests/test_perm_entropy.py +7 -0
  56. junifer/markers/complexity/tests/test_range_entropy.py +7 -0
  57. junifer/markers/complexity/tests/test_range_entropy_auc.py +7 -0
  58. junifer/markers/complexity/tests/test_sample_entropy.py +7 -0
  59. junifer/markers/complexity/tests/test_weighted_perm_entropy.py +7 -0
  60. junifer/markers/complexity/weighted_perm_entropy.py +3 -0
  61. junifer/markers/ets_rss.py +3 -0
  62. junifer/markers/falff/__init__.py +3 -0
  63. junifer/markers/falff/_afni_falff.py +3 -0
  64. junifer/markers/falff/_junifer_falff.py +3 -0
  65. junifer/markers/falff/falff_base.py +3 -0
  66. junifer/markers/falff/falff_parcels.py +3 -0
  67. junifer/markers/falff/falff_spheres.py +3 -0
  68. junifer/markers/functional_connectivity/__init__.py +9 -0
  69. junifer/markers/functional_connectivity/crossparcellation_functional_connectivity.py +3 -0
  70. junifer/markers/functional_connectivity/edge_functional_connectivity_parcels.py +3 -0
  71. junifer/markers/functional_connectivity/edge_functional_connectivity_spheres.py +3 -0
  72. junifer/markers/functional_connectivity/functional_connectivity_base.py +3 -0
  73. junifer/markers/functional_connectivity/functional_connectivity_parcels.py +3 -0
  74. junifer/markers/functional_connectivity/functional_connectivity_spheres.py +3 -0
  75. junifer/markers/parcel_aggregation.py +3 -0
  76. junifer/markers/reho/__init__.py +3 -0
  77. junifer/markers/reho/_afni_reho.py +3 -0
  78. junifer/markers/reho/_junifer_reho.py +3 -0
  79. junifer/markers/reho/reho_base.py +2 -0
  80. junifer/markers/reho/reho_parcels.py +3 -0
  81. junifer/markers/reho/reho_spheres.py +3 -0
  82. junifer/markers/sphere_aggregation.py +3 -0
  83. junifer/markers/temporal_snr/__init__.py +3 -0
  84. junifer/markers/temporal_snr/temporal_snr_base.py +3 -0
  85. junifer/markers/temporal_snr/temporal_snr_parcels.py +3 -0
  86. junifer/markers/temporal_snr/temporal_snr_spheres.py +3 -0
  87. junifer/onthefly/__init__.py +3 -0
  88. junifer/onthefly/read_transform.py +3 -0
  89. junifer/pipeline/__init__.py +8 -0
  90. junifer/pipeline/pipeline_step_mixin.py +3 -0
  91. junifer/pipeline/registry.py +3 -0
  92. junifer/pipeline/singleton.py +3 -0
  93. junifer/pipeline/update_meta_mixin.py +3 -0
  94. junifer/pipeline/utils.py +3 -0
  95. junifer/pipeline/workdir_manager.py +3 -0
  96. junifer/preprocess/__init__.py +8 -0
  97. junifer/preprocess/base.py +4 -1
  98. junifer/preprocess/confounds/__init__.py +3 -0
  99. junifer/preprocess/confounds/fmriprep_confound_remover.py +3 -0
  100. junifer/preprocess/smoothing/__init__.py +3 -0
  101. junifer/preprocess/warping/__init__.py +3 -0
  102. junifer/preprocess/warping/_ants_warper.py +3 -0
  103. junifer/preprocess/warping/_fsl_warper.py +3 -0
  104. junifer/stats.py +3 -0
  105. junifer/storage/__init__.py +8 -0
  106. junifer/storage/base.py +3 -0
  107. junifer/storage/hdf5.py +3 -0
  108. junifer/storage/pandas_base.py +3 -0
  109. junifer/storage/sqlite.py +3 -0
  110. junifer/storage/utils.py +9 -0
  111. junifer/testing/__init__.py +3 -0
  112. junifer/testing/datagrabbers.py +7 -0
  113. junifer/testing/utils.py +3 -0
  114. junifer/utils/__init__.py +10 -0
  115. junifer/utils/fs.py +3 -0
  116. junifer/utils/helpers.py +3 -0
  117. junifer/utils/logging.py +10 -0
  118. {junifer-0.0.5.dev131.dist-info → junifer-0.0.5.dev152.dist-info}/METADATA +15 -15
  119. {junifer-0.0.5.dev131.dist-info → junifer-0.0.5.dev152.dist-info}/RECORD +124 -124
  120. {junifer-0.0.5.dev131.dist-info → junifer-0.0.5.dev152.dist-info}/AUTHORS.rst +0 -0
  121. {junifer-0.0.5.dev131.dist-info → junifer-0.0.5.dev152.dist-info}/LICENSE.md +0 -0
  122. {junifer-0.0.5.dev131.dist-info → junifer-0.0.5.dev152.dist-info}/WHEEL +0 -0
  123. {junifer-0.0.5.dev131.dist-info → junifer-0.0.5.dev152.dist-info}/entry_points.txt +0 -0
  124. {junifer-0.0.5.dev131.dist-info → junifer-0.0.5.dev152.dist-info}/top_level.txt +0 -0
junifer/__init__.py CHANGED
@@ -20,3 +20,20 @@ from . import (
20
20
  onthefly,
21
21
  )
22
22
  from ._version import __version__
23
+
24
+
25
+ __all__ = [
26
+ "api",
27
+ "configs",
28
+ "data",
29
+ "datagrabber",
30
+ "datareader",
31
+ "markers",
32
+ "pipeline",
33
+ "preprocess",
34
+ "stats",
35
+ "storage",
36
+ "utils",
37
+ "external",
38
+ "onthefly",
39
+ ]
junifer/_version.py CHANGED
@@ -12,5 +12,5 @@ __version__: str
12
12
  __version_tuple__: VERSION_TUPLE
13
13
  version_tuple: VERSION_TUPLE
14
14
 
15
- __version__ = version = '0.0.5.dev131'
16
- __version_tuple__ = version_tuple = (0, 0, 5, 'dev131')
15
+ __version__ = version = '0.0.5.dev152'
16
+ __version_tuple__ = version_tuple = (0, 0, 5, 'dev152')
junifer/api/__init__.py CHANGED
@@ -7,3 +7,6 @@
7
7
  from . import decorators
8
8
  from .cli import cli
9
9
  from .functions import collect, queue, run
10
+
11
+
12
+ __all__ = ["decorators", "cli", "collect", "queue", "run"]
junifer/api/decorators.py CHANGED
@@ -10,6 +10,15 @@ from typing import Type
10
10
  from ..pipeline.registry import register
11
11
 
12
12
 
13
+ __all__ = [
14
+ "register_datagrabber",
15
+ "register_datareader",
16
+ "register_preprocessor",
17
+ "register_marker",
18
+ "register_storage",
19
+ ]
20
+
21
+
13
22
  def register_datagrabber(klass: Type) -> Type:
14
23
  """Register DataGrabber.
15
24
 
junifer/api/functions.py CHANGED
@@ -23,6 +23,9 @@ from .queue_context import GnuParallelLocalAdapter, HTCondorAdapter
23
23
  from .utils import yaml
24
24
 
25
25
 
26
+ __all__ = ["run", "collect", "queue", "reset", "list_elements"]
27
+
28
+
26
29
  def _get_datagrabber(datagrabber_config: Dict) -> BaseDataGrabber:
27
30
  """Get DataGrabber.
28
31
 
@@ -92,7 +95,7 @@ def run(
92
95
  datagrabber : dict
93
96
  DataGrabber to use. Must have a key ``kind`` with the kind of
94
97
  DataGrabber to use. All other keys are passed to the DataGrabber
95
- init function.
98
+ constructor.
96
99
  markers : list of dict
97
100
  List of markers to extract. Each marker is a dict with at least two
98
101
  keys: ``name`` and ``kind``. The ``name`` key is used to name the
@@ -102,11 +105,11 @@ def run(
102
105
  storage : dict
103
106
  Storage to use. Must have a key ``kind`` with the kind of
104
107
  storage to use. All other keys are passed to the storage
105
- init function.
108
+ constructor.
106
109
  preprocessors : list of dict, optional
107
110
  List of preprocessors to use. Each preprocessor is a dict with at
108
111
  least a key ``kind`` specifying the preprocessor to use. All other keys
109
- are passed to the preprocessor init function (default None).
112
+ are passed to the preprocessor constructor (default None).
110
113
  elements : str or tuple or list of str or tuple, optional
111
114
  Element(s) to process. Will be used to index the DataGrabber
112
115
  (default None).
@@ -188,7 +191,7 @@ def collect(storage: Dict) -> None:
188
191
  storage : dict
189
192
  Storage to use. Must have a key ``kind`` with the kind of
190
193
  storage to use. All other keys are passed to the storage
191
- init function.
194
+ constructor.
192
195
 
193
196
  """
194
197
  storage_params = storage.copy()
junifer/api/parser.py CHANGED
@@ -14,6 +14,9 @@ from ..utils.logging import logger, raise_error
14
14
  from .utils import yaml
15
15
 
16
16
 
17
+ __all__ = ["parse_yaml"]
18
+
19
+
17
20
  def parse_yaml(filepath: Union[str, Path]) -> Dict:
18
21
  """Parse YAML.
19
22
 
@@ -6,3 +6,6 @@
6
6
  from .queue_context_adapter import QueueContextAdapter
7
7
  from .htcondor_adapter import HTCondorAdapter
8
8
  from .gnu_parallel_local_adapter import GnuParallelLocalAdapter
9
+
10
+
11
+ __all__ = ["QueueContextAdapter", "HTCondorAdapter", "GnuParallelLocalAdapter"]
@@ -31,7 +31,7 @@ if [ -d "${var}" ]; then
31
31
  done
32
32
 
33
33
  echo "Docker args: ${docker_args[*]}" >&2
34
- echo "Corrected args for afni: ${corrected_args[*]}" >&2
34
+ echo "Corrected args for AFNI: ${corrected_args[*]}" >&2
35
35
 
36
36
  cwd=$(pwd)
37
37
  cmd="docker run --rm ${docker_args[*]} -v ${cwd}:${cwd} -w ${cwd} afni/afni_make_build ${corrected_args[*]}"
@@ -31,7 +31,7 @@ if [ -d "${var}" ]; then
31
31
  done
32
32
 
33
33
  echo "Docker args: ${docker_args[*]}" >&2
34
- echo "Corrected args for fsl: ${corrected_args[*]}" >&2
34
+ echo "Corrected args for ANTs: ${corrected_args[*]}" >&2
35
35
 
36
36
  cwd=$(pwd)
37
37
  cmd="docker run --rm ${docker_args[*]} -v ${cwd}:${cwd} -w ${cwd} antsx/ants ${corrected_args[*]}"
@@ -31,7 +31,7 @@ if [ -d "${var}" ]; then
31
31
  done
32
32
 
33
33
  echo "Docker args: ${docker_args[*]}" >&2
34
- echo "Corrected args for fsl: ${corrected_args[*]}" >&2
34
+ echo "Corrected args for FSL: ${corrected_args[*]}" >&2
35
35
 
36
36
  cwd=$(pwd)
37
37
  cmd="docker run --rm ${docker_args[*]} -v ${cwd}:${cwd} -w ${cwd} brainlife/fsl ${corrected_args[*]}"
@@ -4,3 +4,6 @@
4
4
  # License: AGPL
5
5
 
6
6
  from . import datagrabbers
7
+
8
+
9
+ __all__ = ["datagrabbers"]
@@ -10,3 +10,12 @@ from .camcan_vbm import JuselessDataladCamCANVBM
10
10
  from .ixi_vbm import JuselessDataladIXIVBM
11
11
  from .ucla import JuselessUCLA
12
12
  from .ukb_vbm import JuselessDataladUKBVBM
13
+
14
+
15
+ __all__ = [
16
+ "JuselessDataladAOMICID1000VBM",
17
+ "JuselessDataladCamCANVBM",
18
+ "JuselessDataladIXIVBM",
19
+ "JuselessUCLA",
20
+ "JuselessDataladUKBVBM",
21
+ ]
@@ -11,6 +11,9 @@ from ....api.decorators import register_datagrabber
11
11
  from ....datagrabber import PatternDataladDataGrabber
12
12
 
13
13
 
14
+ __all__ = ["JuselessDataladAOMICID1000VBM"]
15
+
16
+
14
17
  @register_datagrabber
15
18
  class JuselessDataladAOMICID1000VBM(PatternDataladDataGrabber):
16
19
  """Concrete implementation for Juseless AOMIC ID1000 VBM data fetching.
@@ -12,6 +12,9 @@ from ....api.decorators import register_datagrabber
12
12
  from ....datagrabber import PatternDataladDataGrabber
13
13
 
14
14
 
15
+ __all__ = ["JuselessDataladCamCANVBM"]
16
+
17
+
15
18
  @register_datagrabber
16
19
  class JuselessDataladCamCANVBM(PatternDataladDataGrabber):
17
20
  """Concrete implementation for Juseless CamCAN VBM data fetching.
@@ -13,6 +13,9 @@ from ....datagrabber import PatternDataladDataGrabber
13
13
  from ....utils import raise_error
14
14
 
15
15
 
16
+ __all__ = ["JuselessDataladIXIVBM"]
17
+
18
+
16
19
  @register_datagrabber
17
20
  class JuselessDataladIXIVBM(PatternDataladDataGrabber):
18
21
  """Concrete implementation for Juseless IXI VBM data fetching.
@@ -12,6 +12,9 @@ from ....datagrabber import PatternDataGrabber
12
12
  from ....utils import raise_error
13
13
 
14
14
 
15
+ __all__ = ["JuselessUCLA"]
16
+
17
+
15
18
  @register_datagrabber
16
19
  class JuselessUCLA(PatternDataGrabber):
17
20
  """Concrete implementation for Juseless UCLA data fetching.
@@ -12,6 +12,9 @@ from ....api.decorators import register_datagrabber
12
12
  from ....datagrabber import PatternDataladDataGrabber
13
13
 
14
14
 
15
+ __all__ = ["JuselessDataladUKBVBM"]
16
+
17
+
15
18
  @register_datagrabber
16
19
  class JuselessDataladUKBVBM(PatternDataladDataGrabber):
17
20
  """Concrete implementation for Juseless UKB VBM data fetching.
junifer/data/__init__.py CHANGED
@@ -28,3 +28,23 @@ from .masks import (
28
28
  from .template_spaces import get_template, get_xfm
29
29
 
30
30
  from . import utils
31
+
32
+
33
+ __all__ = [
34
+ "list_coordinates",
35
+ "load_coordinates",
36
+ "register_coordinates",
37
+ "get_coordinates",
38
+ "list_parcellations",
39
+ "load_parcellation",
40
+ "register_parcellation",
41
+ "merge_parcellations",
42
+ "get_parcellation",
43
+ "list_masks",
44
+ "load_mask",
45
+ "register_mask",
46
+ "get_mask",
47
+ "get_template",
48
+ "get_xfm",
49
+ "utils",
50
+ ]
@@ -16,6 +16,14 @@ from ..pipeline import WorkDirManager
16
16
  from ..utils import logger, raise_error, run_ext_cmd
17
17
 
18
18
 
19
+ __all__ = [
20
+ "register_coordinates",
21
+ "list_coordinates",
22
+ "get_coordinates",
23
+ "load_coordinates",
24
+ ]
25
+
26
+
19
27
  # Path to the VOIs
20
28
  _vois_path = Path(__file__).parent / "VOIs"
21
29
 
junifer/data/masks.py CHANGED
@@ -35,6 +35,16 @@ from .utils import closest_resolution
35
35
  if TYPE_CHECKING:
36
36
  from nibabel import Nifti1Image
37
37
 
38
+
39
+ __all__ = [
40
+ "compute_brain_mask",
41
+ "register_mask",
42
+ "list_masks",
43
+ "get_mask",
44
+ "load_mask",
45
+ ]
46
+
47
+
38
48
  # Path to the masks
39
49
  _masks_path = Path(__file__).parent / "masks"
40
50
 
@@ -30,6 +30,15 @@ if TYPE_CHECKING:
30
30
  from nibabel import Nifti1Image
31
31
 
32
32
 
33
+ __all__ = [
34
+ "register_parcellation",
35
+ "list_parcellations",
36
+ "get_parcellation",
37
+ "load_parcellation",
38
+ "merge_parcellations",
39
+ ]
40
+
41
+
33
42
  # A dictionary containing all supported parcellations and their respective
34
43
  # valid parameters.
35
44
 
@@ -15,6 +15,9 @@ from ..utils import logger, raise_error
15
15
  from .utils import closest_resolution
16
16
 
17
17
 
18
+ __all__ = ["get_xfm", "get_template"]
19
+
20
+
18
21
  def get_xfm(
19
22
  src: str, dst: str, xfms_dir: Union[str, Path, None] = None
20
23
  ) -> Path: # pragma: no cover
junifer/data/utils.py CHANGED
@@ -7,6 +7,9 @@ import numpy as np
7
7
  from ..utils.logging import logger
8
8
 
9
9
 
10
+ __all__ = ["closest_resolution"]
11
+
12
+
10
13
  def closest_resolution(
11
14
  resolution: Optional[Union[float, int]],
12
15
  valid_resolution: Union[List[float], List[int], np.ndarray],
@@ -16,3 +16,18 @@ from .aomic import DataladAOMICID1000, DataladAOMICPIOP1, DataladAOMICPIOP2
16
16
  from .hcp1200 import HCP1200, DataladHCP1200
17
17
  from .multiple import MultipleDataGrabber
18
18
  from .dmcc13_benchmark import DMCC13Benchmark
19
+
20
+
21
+ __all__ = [
22
+ "BaseDataGrabber",
23
+ "DataladDataGrabber",
24
+ "PatternDataGrabber",
25
+ "PatternDataladDataGrabber",
26
+ "DataladAOMICID1000",
27
+ "DataladAOMICPIOP1",
28
+ "DataladAOMICPIOP2",
29
+ "HCP1200",
30
+ "DataladHCP1200",
31
+ "MultipleDataGrabber",
32
+ "DMCC13Benchmark",
33
+ ]
@@ -7,3 +7,6 @@
7
7
  from .id1000 import DataladAOMICID1000
8
8
  from .piop1 import DataladAOMICPIOP1
9
9
  from .piop2 import DataladAOMICPIOP2
10
+
11
+
12
+ __all__ = ["DataladAOMICID1000", "DataladAOMICPIOP1", "DataladAOMICPIOP2"]
@@ -14,6 +14,9 @@ from ...api.decorators import register_datagrabber
14
14
  from ..pattern_datalad import PatternDataladDataGrabber
15
15
 
16
16
 
17
+ __all__ = ["DataladAOMICID1000"]
18
+
19
+
17
20
  @register_datagrabber
18
21
  class DataladAOMICID1000(PatternDataladDataGrabber):
19
22
  """Concrete implementation for datalad-based data fetching of AOMIC ID1000.
@@ -16,6 +16,9 @@ from ...utils import raise_error
16
16
  from ..pattern_datalad import PatternDataladDataGrabber
17
17
 
18
18
 
19
+ __all__ = ["DataladAOMICPIOP1"]
20
+
21
+
19
22
  @register_datagrabber
20
23
  class DataladAOMICPIOP1(PatternDataladDataGrabber):
21
24
  """Concrete implementation for pattern-based data fetching of AOMIC PIOP1.
@@ -16,6 +16,9 @@ from ...utils import raise_error
16
16
  from ..pattern_datalad import PatternDataladDataGrabber
17
17
 
18
18
 
19
+ __all__ = ["DataladAOMICPIOP2"]
20
+
21
+
19
22
  @register_datagrabber
20
23
  class DataladAOMICPIOP2(PatternDataladDataGrabber):
21
24
  """Concrete implementation for pattern-based data fetching of AOMIC PIOP2.
@@ -14,6 +14,9 @@ from ..utils import logger, raise_error
14
14
  from .utils import validate_types
15
15
 
16
16
 
17
+ __all__ = ["BaseDataGrabber"]
18
+
19
+
17
20
  class BaseDataGrabber(ABC, UpdateMetaMixin):
18
21
  """Abstract base class for DataGrabber.
19
22
 
@@ -21,6 +21,9 @@ from ..utils import logger, raise_error, warn_with_log
21
21
  from .base import BaseDataGrabber
22
22
 
23
23
 
24
+ __all__ = ["DataladDataGrabber"]
25
+
26
+
24
27
  class DataladDataGrabber(BaseDataGrabber):
25
28
  """Abstract base class for datalad-based data fetching.
26
29
 
@@ -5,3 +5,6 @@
5
5
 
6
6
  from .hcp1200 import HCP1200
7
7
  from .datalad_hcp1200 import DataladHCP1200
8
+
9
+
10
+ __all__ = ["HCP1200", "DataladHCP1200"]
@@ -14,6 +14,9 @@ from ...api.decorators import register_datagrabber
14
14
  from .hcp1200 import HCP1200
15
15
 
16
16
 
17
+ __all__ = ["DataladHCP1200"]
18
+
19
+
17
20
  @register_datagrabber
18
21
  class DataladHCP1200(DataladDataGrabber, HCP1200):
19
22
  """Concrete implementation for datalad-based data fetching of HCP1200.
@@ -14,6 +14,9 @@ from ..pattern import PatternDataGrabber
14
14
  from ..utils import raise_error
15
15
 
16
16
 
17
+ __all__ = ["HCP1200"]
18
+
19
+
17
20
  @register_datagrabber
18
21
  class HCP1200(PatternDataGrabber):
19
22
  """Concrete implementation for pattern-based data fetching of HCP1200.
@@ -11,6 +11,9 @@ from ..utils import raise_error
11
11
  from .base import BaseDataGrabber
12
12
 
13
13
 
14
+ __all__ = ["MultipleDataGrabber"]
15
+
16
+
14
17
  class MultipleDataGrabber(BaseDataGrabber):
15
18
  """Concrete implementation for multi sourced data fetching.
16
19
 
@@ -18,6 +18,9 @@ from .base import BaseDataGrabber
18
18
  from .utils import validate_patterns, validate_replacements
19
19
 
20
20
 
21
+ __all__ = ["PatternDataGrabber"]
22
+
23
+
21
24
  # Accepted formats for confounds specification
22
25
  _CONFOUNDS_FORMATS = ("fmriprep", "adhoc")
23
26
 
@@ -12,6 +12,9 @@ from .datalad_base import DataladDataGrabber
12
12
  from .pattern import PatternDataGrabber
13
13
 
14
14
 
15
+ __all__ = ["PatternDataladDataGrabber"]
16
+
17
+
15
18
  @register_datagrabber
16
19
  class PatternDataladDataGrabber(DataladDataGrabber, PatternDataGrabber):
17
20
  """Concrete implementation for pattern and datalad based data fetching.
@@ -9,6 +9,9 @@ from typing import Dict, List
9
9
  from ..utils import logger, raise_error
10
10
 
11
11
 
12
+ __all__ = ["validate_types", "validate_replacements", "validate_patterns"]
13
+
14
+
12
15
  # Define schema for pattern-based datagrabber's patterns
13
16
  PATTERNS_SCHEMA = {
14
17
  "T1w": {
@@ -6,3 +6,6 @@
6
6
  # License: AGPL
7
7
 
8
8
  from .default import DefaultDataReader
9
+
10
+
11
+ __all__ = ["DefaultDataReader"]
@@ -15,6 +15,9 @@ from ..pipeline import PipelineStepMixin, UpdateMetaMixin
15
15
  from ..utils.logging import logger, warn_with_log
16
16
 
17
17
 
18
+ __all__ = ["DefaultDataReader"]
19
+
20
+
18
21
  # Map each file extension to a type
19
22
  _extensions = {
20
23
  ".nii": "NIFTI",
@@ -10,9 +10,9 @@ from nilearn import image, masking
10
10
  from nilearn._utils.class_inspect import get_params
11
11
  from nilearn._utils.niimg import img_data_dtype
12
12
  from nilearn._utils.niimg_conversions import (
13
- _safe_get_data,
14
13
  check_niimg_3d,
15
14
  check_niimg_4d,
15
+ safe_get_data,
16
16
  )
17
17
  from nilearn.maskers import NiftiSpheresMasker
18
18
  from nilearn.maskers.base_masker import _filter_and_extract
@@ -104,7 +104,7 @@ def _apply_mask_and_get_affinity(
104
104
 
105
105
  # Compute world coordinates of all in-mask voxels.
106
106
  if niimg is None:
107
- mask, affine = masking._load_mask_img(mask_img)
107
+ mask, affine = masking.load_mask_img(mask_img)
108
108
  # Get coordinate for all voxels inside of mask
109
109
  mask_coords = np.asarray(np.nonzero(mask)).T.tolist()
110
110
  X = None
@@ -118,21 +118,21 @@ def _apply_mask_and_get_affinity(
118
118
  target_shape=niimg.shape[:3],
119
119
  interpolation="nearest",
120
120
  )
121
- mask, _ = masking._load_mask_img(mask_img)
121
+ mask, _ = masking.load_mask_img(mask_img)
122
122
  mask_coords = list(zip(*np.where(mask != 0)))
123
123
 
124
- X = masking._apply_mask_fmri(niimg, mask_img)
124
+ X = masking.apply_mask_fmri(niimg, mask_img)
125
125
 
126
126
  elif niimg is not None:
127
127
  affine = niimg.affine
128
- if np.isnan(np.sum(_safe_get_data(niimg))):
128
+ if np.isnan(np.sum(safe_get_data(niimg))):
129
129
  warn_with_log(
130
130
  "The imgs you have fed into fit_transform() contains NaN "
131
131
  "values which will be converted to zeroes."
132
132
  )
133
- X = _safe_get_data(niimg, True).reshape([-1, niimg.shape[3]]).T
133
+ X = safe_get_data(niimg, True).reshape([-1, niimg.shape[3]]).T
134
134
  else:
135
- X = _safe_get_data(niimg).reshape([-1, niimg.shape[3]]).T
135
+ X = safe_get_data(niimg).reshape([-1, niimg.shape[3]]).T
136
136
 
137
137
  mask_coords = list(np.ndindex(niimg.shape[:3]))
138
138
 
@@ -24,3 +24,24 @@ from .temporal_snr import (
24
24
  TemporalSNRSpheres,
25
25
  )
26
26
  from .brainprint import BrainPrint
27
+
28
+
29
+ __all__ = [
30
+ "BaseMarker",
31
+ "MarkerCollection",
32
+ "RSSETSMarker",
33
+ "ParcelAggregation",
34
+ "SphereAggregation",
35
+ "FunctionalConnectivityParcels",
36
+ "FunctionalConnectivitySpheres",
37
+ "CrossParcellationFC",
38
+ "EdgeCentricFCParcels",
39
+ "EdgeCentricFCSpheres",
40
+ "ReHoParcels",
41
+ "ReHoSpheres",
42
+ "ALFFParcels",
43
+ "ALFFSpheres",
44
+ "TemporalSNRParcels",
45
+ "TemporalSNRSpheres",
46
+ "BrainPrint",
47
+ ]
junifer/markers/base.py CHANGED
@@ -15,16 +15,22 @@ if TYPE_CHECKING:
15
15
  from junifer.storage import BaseFeatureStorage
16
16
 
17
17
 
18
+ __all__ = ["BaseMarker"]
19
+
20
+
18
21
  class BaseMarker(ABC, PipelineStepMixin, UpdateMetaMixin):
19
22
  """Abstract base class for all markers.
20
23
 
24
+ For every interface that is required, one needs to provide a concrete
25
+ implementation of this abstract class.
26
+
21
27
  Parameters
22
28
  ----------
23
- on : str or list of str
24
- The kind of data to apply the marker to. By default, will work on all
25
- available data.
29
+ on : str or list of str or None, optional
30
+ The data type to apply the marker on. If None,
31
+ will work on all available data types (default None).
26
32
  name : str, optional
27
- The name of the marker. By default, it will use the class name as the
33
+ The name of the marker. If None, will use the class name as the
28
34
  name of the marker (default None).
29
35
 
30
36
  Raises
@@ -192,17 +198,25 @@ class BaseMarker(ABC, PipelineStepMixin, UpdateMetaMixin):
192
198
  for type_ in self._on:
193
199
  if type_ in input.keys():
194
200
  logger.info(f"Computing {type_}")
201
+ # Get data dict for data type
195
202
  t_input = input[type_]
203
+ # Pass the other data types as extra input, removing
204
+ # the current type
196
205
  extra_input = input.copy()
197
206
  extra_input.pop(type_)
207
+ logger.debug(
208
+ f"Extra data type for feature extraction: "
209
+ f"{extra_input.keys()}"
210
+ )
211
+ # Copy metadata
198
212
  t_meta = t_input["meta"].copy()
199
213
  t_meta["type"] = type_
200
-
214
+ # Compute marker
201
215
  t_out = self.compute(input=t_input, extra_input=extra_input)
202
216
  t_out["meta"] = t_meta
203
-
217
+ # Update metadata for step
204
218
  self.update_meta(t_out, "marker")
205
-
219
+ # Check storage
206
220
  if storage is not None:
207
221
  logger.info(f"Storing in {storage}")
208
222
  self.store(type_=type_, out=t_out, storage=storage)
@@ -43,6 +43,9 @@ if TYPE_CHECKING:
43
43
  from junifer.storage import BaseFeatureStorage
44
44
 
45
45
 
46
+ __all__ = ["BrainPrint"]
47
+
48
+
46
49
  @register_marker
47
50
  class BrainPrint(BaseMarker):
48
51
  """Class for BrainPrint.
@@ -19,6 +19,9 @@ if TYPE_CHECKING:
19
19
  from junifer.datagrabber import BaseDataGrabber
20
20
 
21
21
 
22
+ __all__ = ["MarkerCollection"]
23
+
24
+
22
25
  class MarkerCollection:
23
26
  """Class for marker collection.
24
27