junifer 0.0.3.dev186__py3-none-any.whl → 0.0.4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (178) hide show
  1. junifer/_version.py +14 -2
  2. junifer/api/cli.py +162 -17
  3. junifer/api/functions.py +87 -419
  4. junifer/api/parser.py +24 -0
  5. junifer/api/queue_context/__init__.py +8 -0
  6. junifer/api/queue_context/gnu_parallel_local_adapter.py +258 -0
  7. junifer/api/queue_context/htcondor_adapter.py +365 -0
  8. junifer/api/queue_context/queue_context_adapter.py +60 -0
  9. junifer/api/queue_context/tests/test_gnu_parallel_local_adapter.py +192 -0
  10. junifer/api/queue_context/tests/test_htcondor_adapter.py +257 -0
  11. junifer/api/res/afni/run_afni_docker.sh +6 -6
  12. junifer/api/res/ants/ResampleImage +3 -0
  13. junifer/api/res/ants/antsApplyTransforms +3 -0
  14. junifer/api/res/ants/antsApplyTransformsToPoints +3 -0
  15. junifer/api/res/ants/run_ants_docker.sh +39 -0
  16. junifer/api/res/fsl/applywarp +3 -0
  17. junifer/api/res/fsl/flirt +3 -0
  18. junifer/api/res/fsl/img2imgcoord +3 -0
  19. junifer/api/res/fsl/run_fsl_docker.sh +39 -0
  20. junifer/api/res/fsl/std2imgcoord +3 -0
  21. junifer/api/res/run_conda.sh +4 -4
  22. junifer/api/res/run_venv.sh +22 -0
  23. junifer/api/tests/data/partly_cloudy_agg_mean_tian.yml +16 -0
  24. junifer/api/tests/test_api_utils.py +21 -3
  25. junifer/api/tests/test_cli.py +232 -9
  26. junifer/api/tests/test_functions.py +211 -439
  27. junifer/api/tests/test_parser.py +1 -1
  28. junifer/configs/juseless/datagrabbers/aomic_id1000_vbm.py +6 -1
  29. junifer/configs/juseless/datagrabbers/camcan_vbm.py +6 -1
  30. junifer/configs/juseless/datagrabbers/ixi_vbm.py +6 -1
  31. junifer/configs/juseless/datagrabbers/tests/test_ucla.py +8 -8
  32. junifer/configs/juseless/datagrabbers/ucla.py +44 -26
  33. junifer/configs/juseless/datagrabbers/ukb_vbm.py +6 -1
  34. junifer/data/VOIs/meta/AutobiographicalMemory_VOIs.txt +23 -0
  35. junifer/data/VOIs/meta/Power2013_MNI_VOIs.tsv +264 -0
  36. junifer/data/__init__.py +4 -0
  37. junifer/data/coordinates.py +298 -31
  38. junifer/data/masks.py +360 -28
  39. junifer/data/parcellations.py +621 -188
  40. junifer/data/template_spaces.py +190 -0
  41. junifer/data/tests/test_coordinates.py +34 -3
  42. junifer/data/tests/test_data_utils.py +1 -0
  43. junifer/data/tests/test_masks.py +202 -86
  44. junifer/data/tests/test_parcellations.py +266 -55
  45. junifer/data/tests/test_template_spaces.py +104 -0
  46. junifer/data/utils.py +4 -2
  47. junifer/datagrabber/__init__.py +1 -0
  48. junifer/datagrabber/aomic/id1000.py +111 -70
  49. junifer/datagrabber/aomic/piop1.py +116 -53
  50. junifer/datagrabber/aomic/piop2.py +116 -53
  51. junifer/datagrabber/aomic/tests/test_id1000.py +27 -27
  52. junifer/datagrabber/aomic/tests/test_piop1.py +27 -27
  53. junifer/datagrabber/aomic/tests/test_piop2.py +27 -27
  54. junifer/datagrabber/base.py +62 -10
  55. junifer/datagrabber/datalad_base.py +0 -2
  56. junifer/datagrabber/dmcc13_benchmark.py +372 -0
  57. junifer/datagrabber/hcp1200/datalad_hcp1200.py +5 -0
  58. junifer/datagrabber/hcp1200/hcp1200.py +30 -13
  59. junifer/datagrabber/pattern.py +133 -27
  60. junifer/datagrabber/pattern_datalad.py +111 -13
  61. junifer/datagrabber/tests/test_base.py +57 -6
  62. junifer/datagrabber/tests/test_datagrabber_utils.py +204 -76
  63. junifer/datagrabber/tests/test_datalad_base.py +0 -6
  64. junifer/datagrabber/tests/test_dmcc13_benchmark.py +256 -0
  65. junifer/datagrabber/tests/test_multiple.py +43 -10
  66. junifer/datagrabber/tests/test_pattern.py +125 -178
  67. junifer/datagrabber/tests/test_pattern_datalad.py +44 -25
  68. junifer/datagrabber/utils.py +151 -16
  69. junifer/datareader/default.py +36 -10
  70. junifer/external/nilearn/junifer_nifti_spheres_masker.py +6 -0
  71. junifer/markers/base.py +25 -16
  72. junifer/markers/collection.py +35 -16
  73. junifer/markers/complexity/__init__.py +27 -0
  74. junifer/markers/complexity/complexity_base.py +149 -0
  75. junifer/markers/complexity/hurst_exponent.py +136 -0
  76. junifer/markers/complexity/multiscale_entropy_auc.py +140 -0
  77. junifer/markers/complexity/perm_entropy.py +132 -0
  78. junifer/markers/complexity/range_entropy.py +136 -0
  79. junifer/markers/complexity/range_entropy_auc.py +145 -0
  80. junifer/markers/complexity/sample_entropy.py +134 -0
  81. junifer/markers/complexity/tests/test_complexity_base.py +19 -0
  82. junifer/markers/complexity/tests/test_hurst_exponent.py +69 -0
  83. junifer/markers/complexity/tests/test_multiscale_entropy_auc.py +68 -0
  84. junifer/markers/complexity/tests/test_perm_entropy.py +68 -0
  85. junifer/markers/complexity/tests/test_range_entropy.py +69 -0
  86. junifer/markers/complexity/tests/test_range_entropy_auc.py +69 -0
  87. junifer/markers/complexity/tests/test_sample_entropy.py +68 -0
  88. junifer/markers/complexity/tests/test_weighted_perm_entropy.py +68 -0
  89. junifer/markers/complexity/weighted_perm_entropy.py +133 -0
  90. junifer/markers/falff/_afni_falff.py +153 -0
  91. junifer/markers/falff/_junifer_falff.py +142 -0
  92. junifer/markers/falff/falff_base.py +91 -84
  93. junifer/markers/falff/falff_parcels.py +61 -45
  94. junifer/markers/falff/falff_spheres.py +64 -48
  95. junifer/markers/falff/tests/test_falff_parcels.py +89 -121
  96. junifer/markers/falff/tests/test_falff_spheres.py +92 -127
  97. junifer/markers/functional_connectivity/crossparcellation_functional_connectivity.py +1 -0
  98. junifer/markers/functional_connectivity/edge_functional_connectivity_parcels.py +1 -0
  99. junifer/markers/functional_connectivity/functional_connectivity_base.py +1 -0
  100. junifer/markers/functional_connectivity/tests/test_crossparcellation_functional_connectivity.py +46 -44
  101. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_parcels.py +34 -39
  102. junifer/markers/functional_connectivity/tests/test_edge_functional_connectivity_spheres.py +40 -52
  103. junifer/markers/functional_connectivity/tests/test_functional_connectivity_parcels.py +62 -70
  104. junifer/markers/functional_connectivity/tests/test_functional_connectivity_spheres.py +99 -85
  105. junifer/markers/parcel_aggregation.py +60 -38
  106. junifer/markers/reho/_afni_reho.py +192 -0
  107. junifer/markers/reho/_junifer_reho.py +281 -0
  108. junifer/markers/reho/reho_base.py +69 -34
  109. junifer/markers/reho/reho_parcels.py +26 -16
  110. junifer/markers/reho/reho_spheres.py +23 -9
  111. junifer/markers/reho/tests/test_reho_parcels.py +93 -92
  112. junifer/markers/reho/tests/test_reho_spheres.py +88 -86
  113. junifer/markers/sphere_aggregation.py +54 -9
  114. junifer/markers/temporal_snr/temporal_snr_base.py +1 -0
  115. junifer/markers/temporal_snr/tests/test_temporal_snr_parcels.py +38 -37
  116. junifer/markers/temporal_snr/tests/test_temporal_snr_spheres.py +34 -38
  117. junifer/markers/tests/test_collection.py +43 -42
  118. junifer/markers/tests/test_ets_rss.py +29 -37
  119. junifer/markers/tests/test_parcel_aggregation.py +587 -468
  120. junifer/markers/tests/test_sphere_aggregation.py +209 -157
  121. junifer/markers/utils.py +2 -40
  122. junifer/onthefly/read_transform.py +13 -6
  123. junifer/pipeline/__init__.py +1 -0
  124. junifer/pipeline/pipeline_step_mixin.py +105 -41
  125. junifer/pipeline/registry.py +17 -0
  126. junifer/pipeline/singleton.py +45 -0
  127. junifer/pipeline/tests/test_pipeline_step_mixin.py +139 -51
  128. junifer/pipeline/tests/test_update_meta_mixin.py +1 -0
  129. junifer/pipeline/tests/test_workdir_manager.py +104 -0
  130. junifer/pipeline/update_meta_mixin.py +8 -2
  131. junifer/pipeline/utils.py +154 -15
  132. junifer/pipeline/workdir_manager.py +246 -0
  133. junifer/preprocess/__init__.py +3 -0
  134. junifer/preprocess/ants/__init__.py +4 -0
  135. junifer/preprocess/ants/ants_apply_transforms_warper.py +185 -0
  136. junifer/preprocess/ants/tests/test_ants_apply_transforms_warper.py +56 -0
  137. junifer/preprocess/base.py +96 -69
  138. junifer/preprocess/bold_warper.py +265 -0
  139. junifer/preprocess/confounds/fmriprep_confound_remover.py +91 -134
  140. junifer/preprocess/confounds/tests/test_fmriprep_confound_remover.py +106 -111
  141. junifer/preprocess/fsl/__init__.py +4 -0
  142. junifer/preprocess/fsl/apply_warper.py +179 -0
  143. junifer/preprocess/fsl/tests/test_apply_warper.py +45 -0
  144. junifer/preprocess/tests/test_bold_warper.py +159 -0
  145. junifer/preprocess/tests/test_preprocess_base.py +6 -6
  146. junifer/preprocess/warping/__init__.py +6 -0
  147. junifer/preprocess/warping/_ants_warper.py +167 -0
  148. junifer/preprocess/warping/_fsl_warper.py +109 -0
  149. junifer/preprocess/warping/space_warper.py +213 -0
  150. junifer/preprocess/warping/tests/test_space_warper.py +198 -0
  151. junifer/stats.py +18 -4
  152. junifer/storage/base.py +9 -1
  153. junifer/storage/hdf5.py +8 -3
  154. junifer/storage/pandas_base.py +2 -1
  155. junifer/storage/sqlite.py +1 -0
  156. junifer/storage/tests/test_hdf5.py +2 -1
  157. junifer/storage/tests/test_sqlite.py +8 -8
  158. junifer/storage/tests/test_utils.py +6 -6
  159. junifer/storage/utils.py +1 -0
  160. junifer/testing/datagrabbers.py +11 -7
  161. junifer/testing/utils.py +1 -0
  162. junifer/tests/test_stats.py +2 -0
  163. junifer/utils/__init__.py +1 -0
  164. junifer/utils/helpers.py +53 -0
  165. junifer/utils/logging.py +14 -3
  166. junifer/utils/tests/test_helpers.py +35 -0
  167. {junifer-0.0.3.dev186.dist-info → junifer-0.0.4.dist-info}/METADATA +59 -28
  168. junifer-0.0.4.dist-info/RECORD +257 -0
  169. {junifer-0.0.3.dev186.dist-info → junifer-0.0.4.dist-info}/WHEEL +1 -1
  170. junifer/markers/falff/falff_estimator.py +0 -334
  171. junifer/markers/falff/tests/test_falff_estimator.py +0 -238
  172. junifer/markers/reho/reho_estimator.py +0 -515
  173. junifer/markers/reho/tests/test_reho_estimator.py +0 -260
  174. junifer-0.0.3.dev186.dist-info/RECORD +0 -199
  175. {junifer-0.0.3.dev186.dist-info → junifer-0.0.4.dist-info}/AUTHORS.rst +0 -0
  176. {junifer-0.0.3.dev186.dist-info → junifer-0.0.4.dist-info}/LICENSE.md +0 -0
  177. {junifer-0.0.3.dev186.dist-info → junifer-0.0.4.dist-info}/entry_points.txt +0 -0
  178. {junifer-0.0.3.dev186.dist-info → junifer-0.0.4.dist-info}/top_level.txt +0 -0
@@ -37,15 +37,16 @@ class BaseDataGrabber(ABC, UpdateMetaMixin):
37
37
  def __init__(self, types: List[str], datadir: Union[str, Path]) -> None:
38
38
  # Validate types
39
39
  validate_types(types)
40
+ self.types = types
41
+
40
42
  # Convert str to Path
41
43
  if not isinstance(datadir, Path):
42
44
  datadir = Path(datadir)
45
+ self._datadir = datadir
43
46
 
44
47
  logger.debug("Initializing BaseDataGrabber")
45
48
  logger.debug(f"\t_datadir = {datadir}")
46
49
  logger.debug(f"\ttypes = {types}")
47
- self._datadir = datadir
48
- self.types = types
49
50
 
50
51
  def __iter__(self) -> Iterator:
51
52
  """Enable iterable support.
@@ -58,12 +59,14 @@ class BaseDataGrabber(ABC, UpdateMetaMixin):
58
59
  """
59
60
  yield from self.get_elements()
60
61
 
61
- def __getitem__(self, element: Union[str, Tuple]) -> Dict[str, Dict]:
62
+ def __getitem__(
63
+ self, element: Union[str, Tuple[str, ...]]
64
+ ) -> Dict[str, Dict]:
62
65
  """Enable indexing support.
63
66
 
64
67
  Parameters
65
68
  ----------
66
- element : str or tuple
69
+ element : str or tuple of str
67
70
  The element to be indexed.
68
71
 
69
72
  Returns
@@ -74,12 +77,16 @@ class BaseDataGrabber(ABC, UpdateMetaMixin):
74
77
 
75
78
  """
76
79
  logger.info(f"Getting element {element}")
80
+ # Convert element to tuple if not already
77
81
  if not isinstance(element, tuple):
78
82
  element = (element,)
79
- named_element = dict(zip(self.get_element_keys(), element))
83
+ # Zip through element keys and actual values to construct element
84
+ # access dictionary
85
+ named_element: Dict = dict(zip(self.get_element_keys(), element))
80
86
  logger.debug(f"Named element: {named_element}")
87
+ # Fetch element
81
88
  out = self.get_item(**named_element)
82
-
89
+ # Update metadata
83
90
  for _, t_val in out.items():
84
91
  self.update_meta(t_val, "datagrabber")
85
92
  t_val["meta"]["element"] = named_element
@@ -117,6 +124,51 @@ class BaseDataGrabber(ABC, UpdateMetaMixin):
117
124
  """
118
125
  return self._datadir
119
126
 
127
+ def filter(self, selection: List[Union[str, Tuple[str]]]) -> Iterator:
128
+ """Filter elements to be grabbed.
129
+
130
+ Parameters
131
+ ----------
132
+ selection : list of str or tuple
133
+ The list of partial element key values to filter using.
134
+
135
+ Yields
136
+ ------
137
+ object
138
+ An element that can be indexed by the DataGrabber.
139
+
140
+ """
141
+
142
+ def filter_func(element: Union[str, Tuple[str]]) -> bool:
143
+ """Filter element based on selection.
144
+
145
+ Parameters
146
+ ----------
147
+ element : str or tuple of str
148
+ The element to be filtered.
149
+
150
+ Returns
151
+ -------
152
+ bool
153
+ If the element passes the filter or not.
154
+
155
+ """
156
+ # Convert element to tuple
157
+ if not isinstance(element, tuple):
158
+ element = (element,)
159
+ # Filter based on selection kind
160
+ if isinstance(selection[0], str):
161
+ for opt in selection:
162
+ if opt in element:
163
+ return True
164
+ elif isinstance(selection[0], tuple):
165
+ for opt in selection:
166
+ if set(opt).issubset(element):
167
+ return True
168
+ return False
169
+
170
+ yield from filter(filter_func, self.get_elements())
171
+
120
172
  @abstractmethod
121
173
  def get_element_keys(self) -> List[str]:
122
174
  """Get element keys.
@@ -133,10 +185,10 @@ class BaseDataGrabber(ABC, UpdateMetaMixin):
133
185
  raise_error(
134
186
  msg="Concrete classes need to implement get_element_keys().",
135
187
  klass=NotImplementedError,
136
- )
188
+ ) # pragma: no cover
137
189
 
138
190
  @abstractmethod
139
- def get_elements(self) -> List:
191
+ def get_elements(self) -> List[Union[str, Tuple[str]]]:
140
192
  """Get elements.
141
193
 
142
194
  Returns
@@ -150,7 +202,7 @@ class BaseDataGrabber(ABC, UpdateMetaMixin):
150
202
  raise_error(
151
203
  msg="Concrete classes need to implement get_elements().",
152
204
  klass=NotImplementedError,
153
- )
205
+ ) # pragma: no cover
154
206
 
155
207
  @abstractmethod
156
208
  def get_item(self, **element: Dict) -> Dict[str, Dict]:
@@ -171,4 +223,4 @@ class BaseDataGrabber(ABC, UpdateMetaMixin):
171
223
  raise_error(
172
224
  msg="Concrete classes need to implement get_item().",
173
225
  klass=NotImplementedError,
174
- )
226
+ ) # pragma: no cover
@@ -17,12 +17,10 @@ import datalad.api as dl
17
17
  from datalad.support.exceptions import IncompleteResultsError
18
18
  from datalad.support.gitrepo import GitRepo
19
19
 
20
- from ..api.decorators import register_datagrabber
21
20
  from ..utils import logger, raise_error, warn_with_log
22
21
  from .base import BaseDataGrabber
23
22
 
24
23
 
25
- @register_datagrabber
26
24
  class DataladDataGrabber(BaseDataGrabber):
27
25
  """Abstract base class for datalad-based data fetching.
28
26
 
@@ -0,0 +1,372 @@
1
+ """Provide concrete implementation for DMCC13Benchmark DataGrabber."""
2
+
3
+ # Authors: Synchon Mandal <s.mandal@fz-juelich.de>
4
+ # License: AGPL
5
+
6
+ from itertools import product
7
+ from pathlib import Path
8
+ from typing import Dict, List, Union
9
+
10
+ from ..api.decorators import register_datagrabber
11
+ from ..utils import raise_error
12
+ from .pattern_datalad import PatternDataladDataGrabber
13
+
14
+
15
+ __all__ = ["DMCC13Benchmark"]
16
+
17
+
18
+ @register_datagrabber
19
+ class DMCC13Benchmark(PatternDataladDataGrabber):
20
+ """Concrete implementation for datalad-based data fetching of DMCC13.
21
+
22
+ Parameters
23
+ ----------
24
+ datadir : str or Path or None, optional
25
+ The directory where the datalad dataset will be cloned. If None,
26
+ the datalad dataset will be cloned into a temporary directory
27
+ (default None).
28
+ types: {"BOLD", "BOLD_confounds", "T1w", "VBM_CSF", "VBM_GM", \
29
+ "VBM_WM"} or a list of the options, optional
30
+ DMCC data types. If None, all available data types are selected.
31
+ (default None).
32
+ sessions: {"ses-wave1bas", "ses-wave1pro", "ses-wave1rea"} or list of \
33
+ the options, optional
34
+ DMCC sessions. If None, all available sessions are selected
35
+ (default None).
36
+ tasks: {"Rest", "Axcpt", "Cuedts", "Stern", "Stroop"} or \
37
+ list of the options, optional
38
+ DMCC task sessions. If None, all available task sessions are selected
39
+ (default None).
40
+ phase_encodings : {"AP", "PA"} or list of the options, optional
41
+ DMCC phase encoding directions. If None, all available phase encodings
42
+ are selected (default None).
43
+ runs : {"1", "2"} or list of the options, optional
44
+ DMCC runs. If None, all available runs are selected (default None).
45
+ native_t1w : bool, optional
46
+ Whether to use T1w in native space (default False).
47
+
48
+ Raises
49
+ ------
50
+ ValueError
51
+ If invalid value is passed for:
52
+ * ``sessions``
53
+ * ``tasks``
54
+ * ``phase_encodings``
55
+ * ``runs``
56
+
57
+ """
58
+
59
+ def __init__(
60
+ self,
61
+ datadir: Union[str, Path, None] = None,
62
+ types: Union[str, List[str], None] = None,
63
+ sessions: Union[str, List[str], None] = None,
64
+ tasks: Union[str, List[str], None] = None,
65
+ phase_encodings: Union[str, List[str], None] = None,
66
+ runs: Union[str, List[str], None] = None,
67
+ native_t1w: bool = False,
68
+ ) -> None:
69
+ # Declare all sessions
70
+ all_sessions = [
71
+ "ses-wave1bas",
72
+ "ses-wave1pro",
73
+ "ses-wave1rea",
74
+ ]
75
+ # Set default sessions
76
+ if sessions is None:
77
+ sessions = all_sessions
78
+ else:
79
+ # Convert single session into list
80
+ if isinstance(sessions, str):
81
+ sessions = [sessions]
82
+ # Verify valid sessions
83
+ for s in sessions:
84
+ if s not in all_sessions:
85
+ raise_error(
86
+ f"{s} is not a valid session in the DMCC dataset"
87
+ )
88
+ self.sessions = sessions
89
+ # Declare all tasks
90
+ all_tasks = [
91
+ "Rest",
92
+ "Axcpt",
93
+ "Cuedts",
94
+ "Stern",
95
+ "Stroop",
96
+ ]
97
+ # Set default tasks
98
+ if tasks is None:
99
+ tasks = all_tasks
100
+ else:
101
+ # Convert single task into list
102
+ if isinstance(tasks, str):
103
+ tasks = [tasks]
104
+ # Verify valid tasks
105
+ for t in tasks:
106
+ if t not in all_tasks:
107
+ raise_error(f"{t} is not a valid task in the DMCC dataset")
108
+ self.tasks = tasks
109
+ # Declare all phase encodings
110
+ all_phase_encodings = ["AP", "PA"]
111
+ # Set default phase encodings
112
+ if phase_encodings is None:
113
+ phase_encodings = all_phase_encodings
114
+ else:
115
+ # Convert single phase encoding into list
116
+ if isinstance(phase_encodings, str):
117
+ phase_encodings = [phase_encodings]
118
+ # Verify valid phase encodings
119
+ for p in phase_encodings:
120
+ if p not in all_phase_encodings:
121
+ raise_error(
122
+ f"{p} is not a valid phase encoding in the DMCC "
123
+ "dataset"
124
+ )
125
+ self.phase_encodings = phase_encodings
126
+ # Declare all runs
127
+ all_runs = ["1", "2"]
128
+ # Set default runs
129
+ if runs is None:
130
+ runs = all_runs
131
+ else:
132
+ # Convert single run into list
133
+ if isinstance(runs, str):
134
+ runs = [runs]
135
+ # Verify valid runs
136
+ for r in runs:
137
+ if r not in all_runs:
138
+ raise_error(f"{r} is not a valid run in the DMCC dataset")
139
+ self.runs = runs
140
+ # The patterns
141
+ patterns = {
142
+ "BOLD": {
143
+ "pattern": (
144
+ "derivatives/fmriprep-1.3.2/{subject}/{session}/"
145
+ "func/{subject}_{session}_task-{task}_acq-mb4"
146
+ "{phase_encoding}_run-{run}_"
147
+ "space-MNI152NLin2009cAsym_desc-preproc_bold.nii.gz"
148
+ ),
149
+ "space": "MNI152NLin2009cAsym",
150
+ "mask_item": "BOLD_mask",
151
+ },
152
+ "BOLD_confounds": {
153
+ "pattern": (
154
+ "derivatives/fmriprep-1.3.2/{subject}/{session}/"
155
+ "func/{subject}_{session}_task-{task}_acq-mb4"
156
+ "{phase_encoding}_run-{run}_desc-confounds_regressors.tsv"
157
+ ),
158
+ "format": "fmriprep",
159
+ },
160
+ "BOLD_mask": {
161
+ "pattern": (
162
+ "derivatives/fmriprep-1.3.2/{subject}/{session}/"
163
+ "/func/{subject}_{session}_task-{task}_acq-mb4"
164
+ "{phase_encoding}_run-{run}_"
165
+ "space-MNI152NLin2009cAsym_desc-brain_mask.nii.gz"
166
+ ),
167
+ "space": "MNI152NLin2009cAsym",
168
+ },
169
+ "T1w": {
170
+ "pattern": (
171
+ "derivatives/fmriprep-1.3.2/{subject}/anat/"
172
+ "{subject}_space-MNI152NLin2009cAsym_desc-preproc_T1w.nii.gz"
173
+ ),
174
+ "space": "MNI152NLin2009cAsym",
175
+ "mask_item": "T1w_mask",
176
+ },
177
+ "T1w_mask": {
178
+ "pattern": (
179
+ "derivatives/fmriprep-1.3.2/{subject}/anat/"
180
+ "{subject}_space-MNI152NLin2009cAsym_desc-brain_mask.nii.gz"
181
+ ),
182
+ "space": "MNI152NLin2009cAsym",
183
+ },
184
+ "VBM_CSF": {
185
+ "pattern": (
186
+ "derivatives/fmriprep-1.3.2/{subject}/anat/"
187
+ "{subject}_space-MNI152NLin2009cAsym_label-CSF_probseg.nii.gz"
188
+ ),
189
+ "space": "MNI152NLin2009cAsym",
190
+ },
191
+ "VBM_GM": {
192
+ "pattern": (
193
+ "derivatives/fmriprep-1.3.2/{subject}/anat/"
194
+ "{subject}_space-MNI152NLin2009cAsym_label-GM_probseg.nii.gz"
195
+ ),
196
+ "space": "MNI152NLin2009cAsym",
197
+ },
198
+ "VBM_WM": {
199
+ "pattern": (
200
+ "derivatives/fmriprep-1.3.2/{subject}/anat/"
201
+ "{subject}_space-MNI152NLin2009cAsym_label-WM_probseg.nii.gz"
202
+ ),
203
+ "space": "MNI152NLin2009cAsym",
204
+ },
205
+ }
206
+ # Use native T1w assets
207
+ self.native_t1w = False
208
+ if native_t1w:
209
+ self.native_t1w = True
210
+ patterns.update(
211
+ {
212
+ "T1w": {
213
+ "pattern": (
214
+ "derivatives/fmriprep-1.3.2/{subject}/anat/"
215
+ "{subject}_desc-preproc_T1w.nii.gz"
216
+ ),
217
+ "space": "native",
218
+ "mask_item": "T1w_mask",
219
+ },
220
+ "T1w_mask": {
221
+ "pattern": (
222
+ "derivatives/fmriprep-1.3.2/{subject}/anat/"
223
+ "{subject}_desc-brain_mask.nii.gz"
224
+ ),
225
+ "space": "native",
226
+ },
227
+ "Warp": {
228
+ "pattern": (
229
+ "derivatives/fmriprep-1.3.2/{subject}/anat/"
230
+ "{subject}_from-MNI152NLin2009cAsym_to-T1w_"
231
+ "mode-image_xfm.h5"
232
+ ),
233
+ "src": "MNI152NLin2009cAsym",
234
+ "dst": "native",
235
+ },
236
+ }
237
+ )
238
+ # Set default types
239
+ if types is None:
240
+ types = list(patterns.keys())
241
+ # Convert single type into list
242
+ else:
243
+ if not isinstance(types, list):
244
+ types = [types]
245
+ # The replacements
246
+ replacements = ["subject", "session", "task", "phase_encoding", "run"]
247
+ uri = "https://github.com/OpenNeuroDatasets/ds003452.git"
248
+ super().__init__(
249
+ types=types,
250
+ datadir=datadir,
251
+ uri=uri,
252
+ patterns=patterns,
253
+ replacements=replacements,
254
+ confounds_format="fmriprep",
255
+ )
256
+
257
+ def get_item(
258
+ self,
259
+ subject: str,
260
+ session: str,
261
+ task: str,
262
+ phase_encoding: str,
263
+ run: str,
264
+ ) -> Dict:
265
+ """Index one element in the dataset.
266
+
267
+ Parameters
268
+ ----------
269
+ subject : str
270
+ The subject ID.
271
+ session : {"ses-wave1bas", "ses-wave1pro", "ses-wave1rea"}
272
+ The session to get.
273
+ task : {"Rest", "Axcpt", "Cuedts", "Stern", "Stroop"}
274
+ The task to get.
275
+ phase_encoding : {"AP", "PA"}
276
+ The phase encoding to get.
277
+ run : {"1", "2"}
278
+ The run to get.
279
+
280
+ Returns
281
+ -------
282
+ out : dict
283
+ Dictionary of paths for each type of data required for the
284
+ specified element.
285
+
286
+ """
287
+ # Format run
288
+ if phase_encoding == "AP":
289
+ run = "1"
290
+ else:
291
+ run = "2"
292
+ # Fetch item
293
+ out = super().get_item(
294
+ subject=subject,
295
+ session=session,
296
+ task=task,
297
+ phase_encoding=phase_encoding,
298
+ run=run,
299
+ )
300
+ if out.get("BOLD"):
301
+ out["BOLD"]["mask_item"] = "BOLD_mask"
302
+ # Add space information
303
+ out["BOLD"].update({"space": "MNI152NLin2009cAsym"})
304
+ if out.get("T1w"):
305
+ out["T1w"]["mask_item"] = "T1w_mask"
306
+ # Add space information
307
+ if self.native_t1w:
308
+ out["T1w"].update({"space": "native"})
309
+ else:
310
+ out["T1w"].update({"space": "MNI152NLin2009cAsym"})
311
+ if out.get("Warp"):
312
+ # Add source space information
313
+ out["Warp"].update({"src": "MNI152NLin2009cAsym"})
314
+ return out
315
+
316
+ def get_elements(self) -> List:
317
+ """Implement fetching list of subjects in the dataset.
318
+
319
+ Returns
320
+ -------
321
+ list of str
322
+ The list of subjects in the dataset.
323
+
324
+ """
325
+ subjects = [
326
+ "sub-f1031ax",
327
+ "sub-f1552xo",
328
+ "sub-f1659oa",
329
+ "sub-f1670rz",
330
+ "sub-f1951tt",
331
+ "sub-f3300jh",
332
+ "sub-f3720ca",
333
+ "sub-f5004cr",
334
+ "sub-f5407sl",
335
+ "sub-f5416zj",
336
+ "sub-f8113do",
337
+ "sub-f8570ui",
338
+ "sub-f9057kp",
339
+ ]
340
+ elems = []
341
+ # For wave1bas session
342
+ for subject, session, task, phase_encoding in product(
343
+ subjects,
344
+ ["ses-wave1bas"],
345
+ self.tasks,
346
+ self.phase_encodings,
347
+ ):
348
+ if phase_encoding == "AP":
349
+ run = "1"
350
+ else:
351
+ run = "2"
352
+ # Bypass for f1951tt not having run 2 for Rest
353
+ if subject == "sub-f1951tt" and task == "Rest" and run == "2":
354
+ continue
355
+ elems.append((subject, session, task, phase_encoding, run))
356
+ # For other sessions
357
+ for subject, session, task, phase_encoding in product(
358
+ subjects,
359
+ ["ses-wave1pro", "ses-wave1rea"],
360
+ ["Rest"],
361
+ self.phase_encodings,
362
+ ):
363
+ if phase_encoding == "AP":
364
+ run = "1"
365
+ else:
366
+ run = "2"
367
+ # Bypass for f5416zj for not having wave1rea session
368
+ if subject == "sub-f5416zj" and session == "ses-wave1rea":
369
+ continue
370
+ elems.append((subject, session, task, phase_encoding, run))
371
+
372
+ return elems
@@ -37,6 +37,11 @@ class DataladHCP1200(DataladDataGrabber, HCP1200):
37
37
  Only "REST1" and "REST2" tasks are available with ICA+FIX (default
38
38
  False).
39
39
 
40
+ Raises
41
+ ------
42
+ ValueError
43
+ If invalid value is passed for ``tasks`` or ``phase_encodings``.
44
+
40
45
  """
41
46
 
42
47
  def __init__(
@@ -34,8 +34,11 @@ class HCP1200(PatternDataGrabber):
34
34
  Whether to retrieve data that was processed with ICA+FIX.
35
35
  Only "REST1" and "REST2" tasks are available with ICA+FIX (default
36
36
  False).
37
- **kwargs
38
- Keyword arguments passed to superclass.
37
+
38
+ Raises
39
+ ------
40
+ ValueError
41
+ If invalid value is passed for ``tasks`` or ``phase_encodings``.
39
42
 
40
43
  """
41
44
 
@@ -45,7 +48,6 @@ class HCP1200(PatternDataGrabber):
45
48
  tasks: Union[str, List[str], None] = None,
46
49
  phase_encodings: Union[str, List[str], None] = None,
47
50
  ica_fix: bool = False,
48
- **kwargs,
49
51
  ) -> None:
50
52
  # All tasks
51
53
  all_tasks = [
@@ -74,6 +76,7 @@ class HCP1200(PatternDataGrabber):
74
76
  f"Valid task values can be any or all of {all_tasks}."
75
77
  )
76
78
  self.tasks: List[str] = tasks
79
+
77
80
  # All phase encodings
78
81
  all_phase_encodings = ["LR", "RL"]
79
82
  # Set phase encodings
@@ -90,6 +93,7 @@ class HCP1200(PatternDataGrabber):
90
93
  "Valid phase encoding can be any or all of "
91
94
  f"{all_phase_encodings}."
92
95
  )
96
+ self.phase_encodings = phase_encodings
93
97
 
94
98
  if ica_fix:
95
99
  if not all(task in ["REST1", "REST2"] for task in self.tasks):
@@ -97,16 +101,31 @@ class HCP1200(PatternDataGrabber):
97
101
  "ICA+FIX is only available for 'REST1' and 'REST2' tasks."
98
102
  )
99
103
  suffix = "_hp2000_clean" if ica_fix else ""
104
+
100
105
  # The types of data
101
- types = ["BOLD"]
106
+ types = ["BOLD", "T1w", "Warp"]
102
107
  # The patterns
103
108
  patterns = {
104
- "BOLD": (
105
- "{subject}/MNINonLinear/Results/"
106
- "{task}_{phase_encoding}/"
107
- "{task}_{phase_encoding}"
108
- f"{suffix}.nii.gz"
109
- )
109
+ "BOLD": {
110
+ "pattern": (
111
+ "{subject}/MNINonLinear/Results/"
112
+ "{task}_{phase_encoding}/"
113
+ "{task}_{phase_encoding}"
114
+ f"{suffix}.nii.gz"
115
+ ),
116
+ "space": "MNI152NLin6Asym",
117
+ },
118
+ "T1w": {
119
+ "pattern": "{subject}/T1w/T1w_acpc_dc_restore.nii.gz",
120
+ "space": "native",
121
+ },
122
+ "Warp": {
123
+ "pattern": (
124
+ "{subject}/MNINonLinear/xfms/standard2acpc_dc.nii.gz"
125
+ ),
126
+ "src": "MNI152NLin6Asym",
127
+ "dst": "native",
128
+ },
110
129
  }
111
130
  # The replacements
112
131
  replacements = ["subject", "task", "phase_encoding"]
@@ -116,7 +135,6 @@ class HCP1200(PatternDataGrabber):
116
135
  patterns=patterns,
117
136
  replacements=replacements,
118
137
  )
119
- self.phase_encodings = phase_encodings
120
138
 
121
139
  def get_item(self, subject: str, task: str, phase_encoding: str) -> Dict:
122
140
  """Implement single element indexing in the database.
@@ -144,10 +162,9 @@ class HCP1200(PatternDataGrabber):
144
162
  else:
145
163
  new_task = f"tfMRI_{task}"
146
164
 
147
- out = super().get_item(
165
+ return super().get_item(
148
166
  subject=subject, task=new_task, phase_encoding=phase_encoding
149
167
  )
150
- return out
151
168
 
152
169
  def get_elements(self) -> List:
153
170
  """Implement fetching list of elements in the dataset.