junifer 0.0.5.dev98__py3-none-any.whl → 0.0.5.dev131__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. junifer/_version.py +2 -2
  2. junifer/api/tests/test_api_utils.py +2 -0
  3. junifer/data/tests/test_masks.py +1 -1
  4. junifer/datagrabber/aomic/id1000.py +35 -2
  5. junifer/datagrabber/aomic/piop1.py +35 -2
  6. junifer/datagrabber/aomic/piop2.py +35 -2
  7. junifer/datagrabber/aomic/tests/test_id1000.py +43 -97
  8. junifer/datagrabber/aomic/tests/test_piop1.py +64 -106
  9. junifer/datagrabber/aomic/tests/test_piop2.py +44 -100
  10. junifer/datagrabber/pattern.py +2 -1
  11. junifer/datagrabber/utils.py +11 -0
  12. junifer/datareader/default.py +2 -2
  13. junifer/external/BrainPrint/brainprint/__init__.py +4 -0
  14. junifer/external/BrainPrint/brainprint/_version.py +3 -0
  15. junifer/external/BrainPrint/brainprint/asymmetry.py +91 -0
  16. junifer/external/BrainPrint/brainprint/brainprint.py +441 -0
  17. junifer/external/BrainPrint/brainprint/surfaces.py +258 -0
  18. junifer/external/BrainPrint/brainprint/utils/__init__.py +1 -0
  19. junifer/external/BrainPrint/brainprint/utils/_config.py +112 -0
  20. junifer/external/BrainPrint/brainprint/utils/utils.py +188 -0
  21. junifer/markers/__init__.py +1 -0
  22. junifer/markers/brainprint.py +662 -0
  23. junifer/markers/reho/_afni_reho.py +1 -1
  24. junifer/markers/reho/_junifer_reho.py +1 -1
  25. junifer/markers/reho/reho_parcels.py +2 -2
  26. junifer/markers/reho/reho_spheres.py +2 -2
  27. junifer/markers/tests/test_brainprint.py +47 -0
  28. {junifer-0.0.5.dev98.dist-info → junifer-0.0.5.dev131.dist-info}/METADATA +2 -1
  29. {junifer-0.0.5.dev98.dist-info → junifer-0.0.5.dev131.dist-info}/RECORD +34 -24
  30. {junifer-0.0.5.dev98.dist-info → junifer-0.0.5.dev131.dist-info}/AUTHORS.rst +0 -0
  31. {junifer-0.0.5.dev98.dist-info → junifer-0.0.5.dev131.dist-info}/LICENSE.md +0 -0
  32. {junifer-0.0.5.dev98.dist-info → junifer-0.0.5.dev131.dist-info}/WHEEL +0 -0
  33. {junifer-0.0.5.dev98.dist-info → junifer-0.0.5.dev131.dist-info}/entry_points.txt +0 -0
  34. {junifer-0.0.5.dev98.dist-info → junifer-0.0.5.dev131.dist-info}/top_level.txt +0 -0
@@ -18,123 +18,67 @@ URI = "https://gin.g-node.org/juaml/datalad-example-aomicpiop2"
18
18
 
19
19
 
20
20
  @pytest.mark.parametrize(
21
- "tasks",
22
- [None, "restingstate"],
21
+ "type_, nested_types, tasks",
22
+ [
23
+ ("BOLD", ["confounds", "mask"], None),
24
+ ("BOLD", ["confounds", "mask"], ["restingstate"]),
25
+ ("BOLD", ["confounds", "mask"], ["restingstate", "stopsignal"]),
26
+ ("BOLD", ["confounds", "mask"], ["workingmemory", "stopsignal"]),
27
+ ("BOLD", ["confounds", "mask"], ["workingmemory"]),
28
+ ("T1w", ["mask"], None),
29
+ ("VBM_CSF", None, None),
30
+ ("VBM_GM", None, None),
31
+ ("VBM_WM", None, None),
32
+ ("DWI", None, None),
33
+ ("FreeSurfer", None, None),
34
+ ],
23
35
  )
24
- def test_DataladAOMICPIOP2(tasks: Optional[str]) -> None:
36
+ def test_DataladAOMICPIOP2(
37
+ type_: str,
38
+ nested_types: Optional[List[str]],
39
+ tasks: Optional[List[str]],
40
+ ) -> None:
25
41
  """Test DataladAOMICPIOP2 DataGrabber.
26
42
 
27
43
  Parameters
28
44
  ----------
29
- tasks : str or None
45
+ type_ : str
46
+ The parametrized type.
47
+ nested_types : list of str or None
48
+ The parametrized nested types.
49
+ tasks : list of str or None
30
50
  The parametrized task values.
31
51
 
32
52
  """
33
- dg = DataladAOMICPIOP2(tasks=tasks)
53
+ dg = DataladAOMICPIOP2(types=type_, tasks=tasks)
34
54
  # Set URI to Gin
35
55
  dg.uri = URI
36
56
 
37
57
  with dg:
58
+ # Get all elements
38
59
  all_elements = dg.get_elements()
39
-
40
- if tasks == "restingstate":
41
- for el in all_elements:
42
- assert el[1] == "restingstate"
43
-
60
+ # Get test element
44
61
  test_element = all_elements[0]
45
- sub, task = test_element
62
+ # Get test element data
46
63
  out = dg[test_element]
47
-
48
- # asserts type "BOLD"
49
- assert "BOLD" in out
50
-
51
- new_task = f"{task}_acq-seq"
52
- assert (
53
- out["BOLD"]["path"].name == f"{sub}_task-{new_task}_"
54
- "space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz"
55
- )
56
-
57
- assert out["BOLD"]["path"].exists()
58
- assert out["BOLD"]["path"].is_file()
59
-
60
- # asserts type BOLD.confounds
61
- assert "confounds" in out["BOLD"]
62
-
63
- assert (
64
- out["BOLD"]["confounds"]["path"].name == f"{sub}_task-{new_task}_"
65
- "desc-confounds_regressors.tsv"
66
- )
67
-
68
- assert out["BOLD"]["confounds"]["path"].exists()
69
- assert out["BOLD"]["confounds"]["path"].is_file()
70
-
71
- # assert BOLD.mask
72
- assert out["BOLD"]["mask"]["path"].exists()
73
-
74
- # asserts type "T1w"
75
- assert "T1w" in out
76
-
77
- assert (
78
- out["T1w"]["path"].name == f"{sub}_space-MNI152NLin2009cAsym_"
79
- "desc-preproc_T1w.nii.gz"
80
- )
81
-
82
- assert out["T1w"]["path"].exists()
83
- assert out["T1w"]["path"].is_file()
84
-
85
- # asserts T1w.mask
86
- assert out["T1w"]["mask"]["path"].exists()
87
-
88
- # asserts type "VBM_CSF"
89
- assert "VBM_CSF" in out
90
-
91
- assert (
92
- out["VBM_CSF"]["path"].name
93
- == f"{sub}_space-MNI152NLin2009cAsym_label-"
94
- "CSF_probseg.nii.gz"
95
- )
96
-
97
- assert out["VBM_CSF"]["path"].exists()
98
- assert out["VBM_CSF"]["path"].is_file()
99
-
100
- # asserts type "VBM_GM"
101
- assert "VBM_GM" in out
102
-
103
- assert (
104
- out["VBM_GM"]["path"].name
105
- == f"{sub}_space-MNI152NLin2009cAsym_label-"
106
- "GM_probseg.nii.gz"
107
- )
108
-
109
- assert out["VBM_GM"]["path"].exists()
110
- assert out["VBM_GM"]["path"].is_file()
111
-
112
- # asserts type "VBM_WM"
113
- assert "VBM_WM" in out
114
-
115
- assert (
116
- out["VBM_WM"]["path"].name
117
- == f"{sub}_space-MNI152NLin2009cAsym_label-"
118
- "WM_probseg.nii.gz"
119
- )
120
-
121
- assert out["VBM_WM"]["path"].exists()
122
- assert out["VBM_WM"]["path"].is_file()
123
-
124
- # asserts type "DWI"
125
- assert "DWI" in out
126
-
127
- assert out["DWI"]["path"].name == f"{sub}_desc-preproc_dwi.nii.gz"
128
-
129
- assert out["DWI"]["path"].exists()
130
- assert out["DWI"]["path"].is_file()
131
-
132
- # asserts meta
133
- assert "meta" in out["BOLD"]
134
- meta = out["BOLD"]["meta"]
64
+ # Assert data type
65
+ assert type_ in out
66
+ # Check task name if BOLD
67
+ if type_ == "BOLD" and tasks is not None:
68
+ assert test_element[1] in out[type_]["path"].name
69
+ assert out[type_]["path"].exists()
70
+ assert out[type_]["path"].is_file()
71
+ # Asserts data type metadata
72
+ assert "meta" in out[type_]
73
+ meta = out[type_]["meta"]
135
74
  assert "element" in meta
136
75
  assert "subject" in meta["element"]
137
- assert sub == meta["element"]["subject"]
76
+ assert test_element[0] == meta["element"]["subject"]
77
+ # Assert nested data type if not None
78
+ if nested_types is not None:
79
+ for nested_type in nested_types:
80
+ assert out[type_][nested_type]["path"].exists()
81
+ assert out[type_][nested_type]["path"].is_file()
138
82
 
139
83
 
140
84
  @pytest.mark.parametrize(
@@ -6,6 +6,7 @@
6
6
  # License: AGPL
7
7
 
8
8
  import re
9
+ from copy import deepcopy
9
10
  from pathlib import Path
10
11
  from typing import Dict, List, Optional, Tuple, Union
11
12
 
@@ -364,7 +365,7 @@ class PatternDataGrabber(BaseDataGrabber):
364
365
  # Data type dictionary
365
366
  t_pattern = self.patterns[t_type]
366
367
  # Copy data type dictionary in output
367
- out[t_type] = t_pattern.copy()
368
+ out[t_type] = deepcopy(t_pattern)
368
369
  # Iterate to check for nested "types" like mask
369
370
  for k, v in t_pattern.items():
370
371
  # Resolve pattern for base data type
@@ -53,6 +53,17 @@ PATTERNS_SCHEMA = {
53
53
  "mandatory": ["pattern"],
54
54
  "optional": {},
55
55
  },
56
+ "FreeSurfer": {
57
+ "mandatory": ["pattern"],
58
+ "optional": {
59
+ "aseg": {"mandatory": ["pattern"], "optional": []},
60
+ "norm": {"mandatory": ["pattern"], "optional": []},
61
+ "lh_white": {"mandatory": ["pattern"], "optional": []},
62
+ "rh_white": {"mandatory": ["pattern"], "optional": []},
63
+ "lh_pial": {"mandatory": ["pattern"], "optional": []},
64
+ "rh_pial": {"mandatory": ["pattern"], "optional": []},
65
+ },
66
+ },
56
67
  }
57
68
 
58
69
 
@@ -103,8 +103,8 @@ class DefaultDataReader(PipelineStepMixin, UpdateMetaMixin):
103
103
  params = {}
104
104
  # For each type of data, try to read it
105
105
  for type_key, type_val in input.items():
106
- # Skip Warp data type
107
- if type_key == "Warp":
106
+ # Skip Warp and FreeSurfer data type
107
+ if type_key in ["Warp", "FreeSurfer"]:
108
108
  continue
109
109
 
110
110
  # Check for malformed datagrabber specification
@@ -0,0 +1,4 @@
1
+ from ._version import __version__ # noqa: F401
2
+ from .brainprint import compute_asymmetry, compute_brainprint # noqa: F401
3
+ from .surfaces import surf_to_vtk # noqa: F401
4
+ from .utils._config import sys_info # noqa: F401
@@ -0,0 +1,3 @@
1
+ """Version number."""
2
+
3
+ __version__ = "0.4.0"
@@ -0,0 +1,91 @@
1
+ """
2
+ Contains asymmetry estimation functionality.
3
+ """
4
+ from typing import Dict
5
+
6
+ import numpy as np
7
+ from lapy import shapedna
8
+
9
+
10
+ def compute_asymmetry(
11
+ eigenvalues, distance: str = "euc", skip_cortex: bool = False
12
+ ) -> Dict[str, float]:
13
+ """
14
+ Compute lateral shape distances from BrainPrint analysis results.
15
+
16
+ Parameters
17
+ ----------
18
+ eigenvalues : _type_
19
+ BrainPrint analysis results.
20
+ distance : str, optional
21
+ ShapeDNA distance, by default "euc".
22
+ skip_cortex : bool, optional
23
+ Whether to skip white matter and pial surfaces, by default False.
24
+
25
+ Returns
26
+ -------
27
+ Dict[str, float]
28
+ {left_label}_{right_label}, distance.
29
+ """
30
+ # Define structures
31
+
32
+ # combined and individual aseg labels:
33
+ # - Left Striatum: left Caudate + Putamen + Accumbens
34
+ # - Right Striatum: right Caudate + Putamen + Accumbens
35
+ # - CorpusCallosum: 5 subregions combined
36
+ # - Cerebellum: brainstem + (left+right) cerebellum WM and GM
37
+ # - Ventricles: (left+right) lat.vent + inf.lat.vent + choroidplexus + 3rdVent + CSF
38
+ # - Lateral-Ventricle: lat.vent + inf.lat.vent + choroidplexus
39
+ # - 3rd-Ventricle: 3rd-Ventricle + CSF
40
+
41
+ structures_left_right = [
42
+ ("Left-Striatum", "Right-Striatum"),
43
+ ("Left-Lateral-Ventricle", "Right-Lateral-Ventricle"),
44
+ (
45
+ "Left-Cerebellum-White-Matter",
46
+ "Right-Cerebellum-White-Matter",
47
+ ),
48
+ ("Left-Cerebellum-Cortex", "Right-Cerebellum-Cortex"),
49
+ ("Left-Thalamus-Proper", "Right-Thalamus-Proper"),
50
+ ("Left-Caudate", "Right-Caudate"),
51
+ ("Left-Putamen", "Right-Putamen"),
52
+ ("Left-Pallidum", "Right-Pallidum"),
53
+ ("Left-Hippocampus", "Right-Hippocampus"),
54
+ ("Left-Amygdala", "Right-Amygdala"),
55
+ ("Left-Accumbens-area", "Right-Accumbens-area"),
56
+ ("Left-VentralDC", "Right-VentralDC"),
57
+ ]
58
+
59
+ cortex_2d_left_right = [
60
+ ("lh-white-2d", "rh-white-2d"),
61
+ ("lh-pial-2d", "rh-pial-2d"),
62
+ ]
63
+
64
+ structures = structures_left_right
65
+ if not skip_cortex:
66
+ structures += cortex_2d_left_right
67
+
68
+ distances = dict()
69
+ for left_label, right_label in structures:
70
+ left_eigenvalues, right_eigenvalues = (
71
+ eigenvalues[left_label][2:],
72
+ eigenvalues[right_label][2:],
73
+ )
74
+ has_nan = np.isnan(left_eigenvalues).any() or np.isnan(right_eigenvalues).any()
75
+ key = f"{left_label}_{right_label}"
76
+ if has_nan:
77
+ message = (
78
+ "NaNs found for {left_label} or {right_label}, "
79
+ "skipping asymmetry computation...".format(
80
+ left_label=left_label, right_label=right_label
81
+ )
82
+ )
83
+ print(message)
84
+ distances[key] = np.nan
85
+ else:
86
+ distances[key] = shapedna.compute_distance(
87
+ left_eigenvalues,
88
+ right_eigenvalues,
89
+ dist=distance,
90
+ )
91
+ return distances