junifer 0.0.5.dev98__py3-none-any.whl → 0.0.5.dev131__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (34) hide show
  1. junifer/_version.py +2 -2
  2. junifer/api/tests/test_api_utils.py +2 -0
  3. junifer/data/tests/test_masks.py +1 -1
  4. junifer/datagrabber/aomic/id1000.py +35 -2
  5. junifer/datagrabber/aomic/piop1.py +35 -2
  6. junifer/datagrabber/aomic/piop2.py +35 -2
  7. junifer/datagrabber/aomic/tests/test_id1000.py +43 -97
  8. junifer/datagrabber/aomic/tests/test_piop1.py +64 -106
  9. junifer/datagrabber/aomic/tests/test_piop2.py +44 -100
  10. junifer/datagrabber/pattern.py +2 -1
  11. junifer/datagrabber/utils.py +11 -0
  12. junifer/datareader/default.py +2 -2
  13. junifer/external/BrainPrint/brainprint/__init__.py +4 -0
  14. junifer/external/BrainPrint/brainprint/_version.py +3 -0
  15. junifer/external/BrainPrint/brainprint/asymmetry.py +91 -0
  16. junifer/external/BrainPrint/brainprint/brainprint.py +441 -0
  17. junifer/external/BrainPrint/brainprint/surfaces.py +258 -0
  18. junifer/external/BrainPrint/brainprint/utils/__init__.py +1 -0
  19. junifer/external/BrainPrint/brainprint/utils/_config.py +112 -0
  20. junifer/external/BrainPrint/brainprint/utils/utils.py +188 -0
  21. junifer/markers/__init__.py +1 -0
  22. junifer/markers/brainprint.py +662 -0
  23. junifer/markers/reho/_afni_reho.py +1 -1
  24. junifer/markers/reho/_junifer_reho.py +1 -1
  25. junifer/markers/reho/reho_parcels.py +2 -2
  26. junifer/markers/reho/reho_spheres.py +2 -2
  27. junifer/markers/tests/test_brainprint.py +47 -0
  28. {junifer-0.0.5.dev98.dist-info → junifer-0.0.5.dev131.dist-info}/METADATA +2 -1
  29. {junifer-0.0.5.dev98.dist-info → junifer-0.0.5.dev131.dist-info}/RECORD +34 -24
  30. {junifer-0.0.5.dev98.dist-info → junifer-0.0.5.dev131.dist-info}/AUTHORS.rst +0 -0
  31. {junifer-0.0.5.dev98.dist-info → junifer-0.0.5.dev131.dist-info}/LICENSE.md +0 -0
  32. {junifer-0.0.5.dev98.dist-info → junifer-0.0.5.dev131.dist-info}/WHEEL +0 -0
  33. {junifer-0.0.5.dev98.dist-info → junifer-0.0.5.dev131.dist-info}/entry_points.txt +0 -0
  34. {junifer-0.0.5.dev98.dist-info → junifer-0.0.5.dev131.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,662 @@
1
+ """Provide class for BrainPrint."""
2
+
3
+ # Authors: Synchon Mandal <s.mandal@fz-juelich.de>
4
+ # License: AGPL
5
+
6
+ try:
7
+ from importlib.metadata import packages_distributions
8
+ except ImportError: # pragma: no cover
9
+ from importlib_metadata import packages_distributions
10
+
11
+ import uuid
12
+ from copy import deepcopy
13
+ from importlib.util import find_spec
14
+ from itertools import chain
15
+ from pathlib import Path
16
+ from typing import (
17
+ TYPE_CHECKING,
18
+ Any,
19
+ ClassVar,
20
+ Dict,
21
+ List,
22
+ Optional,
23
+ Set,
24
+ Union,
25
+ )
26
+
27
+ import numpy as np
28
+ import numpy.typing as npt
29
+
30
+ from ..api.decorators import register_marker
31
+ from ..external.BrainPrint.brainprint.brainprint import (
32
+ compute_asymmetry,
33
+ compute_brainprint,
34
+ )
35
+ from ..external.BrainPrint.brainprint.surfaces import surf_to_vtk
36
+ from ..pipeline import WorkDirManager
37
+ from ..pipeline.utils import check_ext_dependencies
38
+ from ..utils import logger, raise_error, run_ext_cmd
39
+ from .base import BaseMarker
40
+
41
+
42
+ if TYPE_CHECKING:
43
+ from junifer.storage import BaseFeatureStorage
44
+
45
+
46
+ @register_marker
47
+ class BrainPrint(BaseMarker):
48
+ """Class for BrainPrint.
49
+
50
+ Parameters
51
+ ----------
52
+ num : positive int, optional
53
+ Number of eigenvalues to compute (default 50).
54
+ skip_cortex : bool, optional
55
+ Whether to skip cortical surface or not (default False).
56
+ keep_eigenvectors : bool, optional
57
+ Whether to also return eigenvectors or not (default False).
58
+ norm : str, optional
59
+ Eigenvalues normalization method (default "none").
60
+ reweight : bool, optional
61
+ Whether to reweight eigenvalues or not (default False).
62
+ asymmetry : bool, optional
63
+ Whether to calculate asymmetry between lateral structures
64
+ (default False).
65
+ asymmetry_distance : {"euc"}, optional
66
+ Distance measurement to use if ``asymmetry=True``:
67
+
68
+ * ``"euc"`` : Euclidean
69
+
70
+ (default "euc").
71
+ use_cholmod : bool, optional
72
+ If True, attempts to use the Cholesky decomposition for improved
73
+ execution speed. Requires the ``scikit-sparse`` library. If it cannot
74
+ be found, an error will be thrown. If False, will use slower LU
75
+ decomposition (default False).
76
+ name : str, optional
77
+ The name of the marker. If None, will use the class name (default
78
+ None).
79
+
80
+ """
81
+
82
+ _EXT_DEPENDENCIES: ClassVar[List[Dict[str, Union[str, List[str]]]]] = [
83
+ {
84
+ "name": "freesurfer",
85
+ "commands": [
86
+ "mri_binarize",
87
+ "mri_pretess",
88
+ "mri_mc",
89
+ "mris_convert",
90
+ ],
91
+ },
92
+ ]
93
+
94
+ _DEPENDENCIES: ClassVar[Set[str]] = {"lapy", "numpy"}
95
+
96
+ def __init__(
97
+ self,
98
+ num: int = 50,
99
+ skip_cortex=False,
100
+ keep_eigenvectors: bool = False,
101
+ norm: str = "none",
102
+ reweight: bool = False,
103
+ asymmetry: bool = False,
104
+ asymmetry_distance: str = "euc",
105
+ use_cholmod: bool = False,
106
+ name: Optional[str] = None,
107
+ ) -> None:
108
+ self.num = num
109
+ self.skip_cortex = skip_cortex
110
+ self.keep_eigenvectors = keep_eigenvectors
111
+ self.norm = norm
112
+ self.reweight = reweight
113
+ self.asymmetry = asymmetry
114
+ self.asymmetry_distance = asymmetry_distance
115
+ self.use_cholmod = use_cholmod
116
+ super().__init__(name=name, on="FreeSurfer")
117
+
118
+ def get_valid_inputs(self) -> List[str]:
119
+ """Get valid data types for input.
120
+
121
+ Returns
122
+ -------
123
+ list of str
124
+ The list of data types that can be used as input for this marker.
125
+
126
+ """
127
+ return ["FreeSurfer"]
128
+
129
+ # TODO: kept for making this class concrete; should be removed later
130
+ def get_output_type(self, input_type: str) -> str:
131
+ """Get output type.
132
+
133
+ Parameters
134
+ ----------
135
+ input_type : str
136
+ The data type input to the marker.
137
+
138
+ Returns
139
+ -------
140
+ str
141
+ The storage type output by the marker.
142
+
143
+ """
144
+ return "vector"
145
+
146
+ # TODO: overridden to allow multiple outputs from single data type; should
147
+ # be removed later
148
+ def validate(self, input: List[str]) -> List[str]:
149
+ """Validate the the pipeline step.
150
+
151
+ Parameters
152
+ ----------
153
+ input : list of str
154
+ The input to the pipeline step.
155
+
156
+ Returns
157
+ -------
158
+ list of str
159
+ The output of the pipeline step.
160
+
161
+ """
162
+
163
+ def _check_dependencies(obj) -> None:
164
+ """Check obj._DEPENDENCIES.
165
+
166
+ Parameters
167
+ ----------
168
+ obj : object
169
+ Object to check _DEPENDENCIES of.
170
+
171
+ Raises
172
+ ------
173
+ ImportError
174
+ If the pipeline step object is missing dependencies required
175
+ for its working.
176
+
177
+ """
178
+ # Check if _DEPENDENCIES attribute is found;
179
+ # (markers and preprocessors will have them but not datareaders
180
+ # as of now)
181
+ dependencies_not_found = []
182
+ if hasattr(obj, "_DEPENDENCIES"):
183
+ # Check if dependencies are importable
184
+ for dependency in obj._DEPENDENCIES:
185
+ # First perform an easy check
186
+ if find_spec(dependency) is None:
187
+ # Then check mapped names
188
+ if dependency not in list(
189
+ chain.from_iterable(
190
+ packages_distributions().values()
191
+ )
192
+ ):
193
+ dependencies_not_found.append(dependency)
194
+ # Raise error if any dependency is not found
195
+ if dependencies_not_found:
196
+ raise_error(
197
+ msg=(
198
+ f"{dependencies_not_found} are not installed but are "
199
+ f"required for using {obj.__class__.__name__}."
200
+ ),
201
+ klass=ImportError,
202
+ )
203
+
204
+ def _check_ext_dependencies(obj) -> None:
205
+ """Check obj._EXT_DEPENDENCIES.
206
+
207
+ Parameters
208
+ ----------
209
+ obj : object
210
+ Object to check _EXT_DEPENDENCIES of.
211
+
212
+ """
213
+ # Check if _EXT_DEPENDENCIES attribute is found;
214
+ # (some markers and preprocessors might have them)
215
+ if hasattr(obj, "_EXT_DEPENDENCIES"):
216
+ for dependency in obj._EXT_DEPENDENCIES:
217
+ check_ext_dependencies(**dependency)
218
+
219
+ # Check dependencies
220
+ _check_dependencies(self)
221
+ # Check external dependencies
222
+ # _check_ext_dependencies(self)
223
+ # Validate input
224
+ _ = self.validate_input(input=input)
225
+ # Validate output type
226
+ outputs = ["scalar_table", "vector"]
227
+ return outputs
228
+
229
+ def _create_aseg_surface(
230
+ self,
231
+ aseg_path: Path,
232
+ norm_path: Path,
233
+ indices: List,
234
+ ) -> Path:
235
+ """Generate a surface from the aseg and label files.
236
+
237
+ Parameters
238
+ ----------
239
+ aseg_path : pathlib.Path
240
+ The FreeSurfer aseg path.
241
+ norm_path : pathlib.Path
242
+ The FreeSurfer norm path.
243
+ indices : list
244
+ List of label indices to include in the surface generation.
245
+
246
+ Returns
247
+ -------
248
+ pathlib.Path
249
+ Path to the generated surface in VTK format.
250
+
251
+ """
252
+ tempfile_prefix = f"aseg.{uuid.uuid4()}"
253
+
254
+ # Set mri_binarize command
255
+ mri_binarize_output_path = self._tempdir / f"{tempfile_prefix}.mgz"
256
+ mri_binarize_cmd = [
257
+ "mri_binarize",
258
+ f"--i {aseg_path.resolve()}",
259
+ f"--match {''.join(indices)}",
260
+ f"--o {mri_binarize_output_path.resolve()}",
261
+ ]
262
+ # Call mri_binarize command
263
+ run_ext_cmd(name="mri_binarize", cmd=mri_binarize_cmd)
264
+
265
+ label_value = "1"
266
+ # Fix label (pretess)
267
+ # Set mri_pretess command
268
+ mri_pretess_cmd = [
269
+ "mri_pretess",
270
+ f"{mri_binarize_output_path.resolve()}",
271
+ f"{label_value}",
272
+ f"{norm_path.resolve()}",
273
+ f"{mri_binarize_output_path.resolve()}",
274
+ ]
275
+ # Call mri_pretess command
276
+ run_ext_cmd(name="mri_pretess", cmd=mri_pretess_cmd)
277
+
278
+ # Run marching cube to extract surface
279
+ # Set mri_mc command
280
+ mri_mc_output_path = self._tempdir / f"{tempfile_prefix}.surf"
281
+ mri_mc_cmd = [
282
+ "mri_mc",
283
+ f"{mri_binarize_output_path.resolve()}",
284
+ f"{label_value}",
285
+ f"{mri_mc_output_path.resolve()}",
286
+ ]
287
+ # Run mri_mc command
288
+ run_ext_cmd(name="mri_mc", cmd=mri_mc_cmd)
289
+
290
+ # Convert to vtk
291
+ # Set mris_convert command
292
+ surface_path = (
293
+ self._element_tempdir / f"aseg.final.{'_'.join(indices)}.vtk"
294
+ )
295
+ mris_convert_cmd = [
296
+ "mris_convert",
297
+ f"{mri_mc_output_path.resolve()}",
298
+ f"{surface_path.resolve()}",
299
+ ]
300
+ # Run mris_convert command
301
+ run_ext_cmd(name="mris_convert", cmd=mris_convert_cmd)
302
+
303
+ return surface_path
304
+
305
+ def _create_aseg_surfaces(
306
+ self,
307
+ aseg_path: Path,
308
+ norm_path: Path,
309
+ ) -> Dict[str, Path]:
310
+ """Create surfaces from FreeSurfer aseg labels.
311
+
312
+ Parameters
313
+ ----------
314
+ aseg_path : pathlib.Path
315
+ The FreeSurfer aseg path.
316
+ norm_path : pathlib.Path
317
+ The FreeSurfer norm path.
318
+
319
+ Returns
320
+ -------
321
+ dict
322
+ Dictionary of label names mapped to corresponding surface paths.
323
+
324
+ """
325
+ # Define aseg labels
326
+
327
+ # combined and individual aseg labels:
328
+ # - Left Striatum: left Caudate + Putamen + Accumbens
329
+ # - Right Striatum: right Caudate + Putamen + Accumbens
330
+ # - CorpusCallosum: 5 subregions combined
331
+ # - Cerebellum: brainstem + (left+right) cerebellum WM and GM
332
+ # - Ventricles: (left+right) lat.vent + inf.lat.vent + choroidplexus +
333
+ # 3rdVent + CSF
334
+ # - Lateral-Ventricle: lat.vent + inf.lat.vent + choroidplexus
335
+ # - 3rd-Ventricle: 3rd-Ventricle + CSF
336
+
337
+ aseg_labels = {
338
+ "CorpusCallosum": ["251", "252", "253", "254", "255"],
339
+ "Cerebellum": ["7", "8", "16", "46", "47"],
340
+ "Ventricles": ["4", "5", "14", "24", "31", "43", "44", "63"],
341
+ "3rd-Ventricle": ["14", "24"],
342
+ "4th-Ventricle": ["15"],
343
+ "Brain-Stem": ["16"],
344
+ "Left-Striatum": ["11", "12", "26"],
345
+ "Left-Lateral-Ventricle": ["4", "5", "31"],
346
+ "Left-Cerebellum-White-Matter": ["7"],
347
+ "Left-Cerebellum-Cortex": ["8"],
348
+ "Left-Thalamus-Proper": ["10"],
349
+ "Left-Caudate": ["11"],
350
+ "Left-Putamen": ["12"],
351
+ "Left-Pallidum": ["13"],
352
+ "Left-Hippocampus": ["17"],
353
+ "Left-Amygdala": ["18"],
354
+ "Left-Accumbens-area": ["26"],
355
+ "Left-VentralDC": ["28"],
356
+ "Right-Striatum": ["50", "51", "58"],
357
+ "Right-Lateral-Ventricle": ["43", "44", "63"],
358
+ "Right-Cerebellum-White-Matter": ["46"],
359
+ "Right-Cerebellum-Cortex": ["47"],
360
+ "Right-Thalamus-Proper": ["49"],
361
+ "Right-Caudate": ["50"],
362
+ "Right-Putamen": ["51"],
363
+ "Right-Pallidum": ["52"],
364
+ "Right-Hippocampus": ["53"],
365
+ "Right-Amygdala": ["54"],
366
+ "Right-Accumbens-area": ["58"],
367
+ "Right-VentralDC": ["60"],
368
+ }
369
+ return {
370
+ label: self._create_aseg_surface(
371
+ aseg_path=aseg_path,
372
+ norm_path=norm_path,
373
+ indices=indices,
374
+ )
375
+ for label, indices in aseg_labels.items()
376
+ }
377
+
378
+ def _create_cortical_surfaces(
379
+ self,
380
+ lh_white_path: Path,
381
+ rh_white_path: Path,
382
+ lh_pial_path: Path,
383
+ rh_pial_path: Path,
384
+ ) -> Dict[str, Path]:
385
+ """Create cortical surfaces from FreeSurfer labels.
386
+
387
+ Parameters
388
+ ----------
389
+ lh_white_path : pathlib.Path
390
+ The FreeSurfer lh.white path.
391
+ rh_white_path : pathlib.Path
392
+ The FreeSurfer rh.white path.
393
+ lh_pial_path : pathlib.Path
394
+ The FreeSurfer lh.pial path.
395
+ rh_pial_path : pathlib.Path
396
+ The FreeSurfer rh.pial path.
397
+
398
+ Returns
399
+ -------
400
+ dict
401
+ Cortical surface label names with their paths as dictionary.
402
+
403
+ """
404
+ return {
405
+ "lh-white-2d": surf_to_vtk(
406
+ lh_white_path.resolve(),
407
+ (self._element_tempdir / "lh.white.vtk").resolve(),
408
+ ),
409
+ "rh-white-2d": surf_to_vtk(
410
+ rh_white_path.resolve(),
411
+ (self._element_tempdir / "rh.white.vtk").resolve(),
412
+ ),
413
+ "lh-pial-2d": surf_to_vtk(
414
+ lh_pial_path.resolve(),
415
+ (self._element_tempdir / "lh.pial.vtk").resolve(),
416
+ ),
417
+ "rh-pial-2d": surf_to_vtk(
418
+ rh_pial_path.resolve(),
419
+ (self._element_tempdir / "rh.pial.vtk").resolve(),
420
+ ),
421
+ }
422
+
423
+ def compute(
424
+ self,
425
+ input: Dict[str, Any],
426
+ extra_input: Optional[Dict] = None,
427
+ ) -> Dict:
428
+ """Compute.
429
+
430
+ Parameters
431
+ ----------
432
+ input : dict
433
+ The FreeSurfer data as dictionary.
434
+ extra_input : dict, optional
435
+ The other fields in the pipeline data object (default None).
436
+
437
+ Returns
438
+ -------
439
+ dict
440
+ The computed result as dictionary. The dictionary has the following
441
+ keys:
442
+
443
+ * ``eigenvalues`` : dict of surface labels (str) and eigenvalues
444
+ (``np.ndarray``)
445
+ * ``eigenvectors`` : dict of surface labels (str) and eigenvectors
446
+ (``np.ndarray``) if ``keep_eigenvectors=True``
447
+ else None
448
+ * ``distances`` : dict of ``{left_label}_{right_label}`` (str) and
449
+ distance (float) if ``asymmetry=True`` else None
450
+
451
+ References
452
+ ----------
453
+ .. [1] Wachinger, C., Golland, P., Kremen, W. et al. (2015)
454
+ BrainPrint: A discriminative characterization of brain
455
+ morphology.
456
+ NeuroImage, Volume 109, Pages 232-248.
457
+ https://doi.org/10.1016/j.neuroimage.2015.01.032.
458
+ .. [2] Reuter, M., Wolter, F.E., Peinecke, N. (2006)
459
+ Laplace-Beltrami spectra as 'Shape-DNA' of surfaces and solids.
460
+ Computer-Aided Design, Volume 38, Issue 4, Pages 342-366.
461
+ https://doi.org/10.1016/j.cad.2005.10.011.
462
+
463
+ """
464
+ logger.debug("Computing BrainPrint")
465
+
466
+ # Create component-scoped tempdir
467
+ self._tempdir = WorkDirManager().get_tempdir(prefix="brainprint")
468
+ # Create element-scoped tempdir so that the files are
469
+ # available later as nibabel stores file path reference for
470
+ # loading on computation
471
+ self._element_tempdir = WorkDirManager().get_element_tempdir(
472
+ prefix="brainprint"
473
+ )
474
+ # Generate surfaces
475
+ surfaces = self._create_aseg_surfaces(
476
+ aseg_path=input["aseg"]["path"],
477
+ norm_path=input["norm"]["path"],
478
+ )
479
+ if not self.skip_cortex:
480
+ cortical_surfaces = self._create_cortical_surfaces(
481
+ lh_white_path=input["lh_white"]["path"],
482
+ rh_white_path=input["rh_white"]["path"],
483
+ lh_pial_path=input["lh_pial"]["path"],
484
+ rh_pial_path=input["rh_pial"]["path"],
485
+ )
486
+ surfaces.update(cortical_surfaces)
487
+ # Compute brainprint
488
+ eigenvalues, _ = compute_brainprint(
489
+ surfaces=surfaces,
490
+ keep_eigenvectors=self.keep_eigenvectors,
491
+ num=self.num,
492
+ norm=self.norm,
493
+ reweight=self.reweight,
494
+ use_cholmod=self.use_cholmod,
495
+ )
496
+ # Calculate distances (if required)
497
+ distances = None
498
+ if self.asymmetry:
499
+ distances = compute_asymmetry(
500
+ eigenvalues=eigenvalues,
501
+ distance=self.asymmetry_distance,
502
+ skip_cortex=self.skip_cortex,
503
+ )
504
+
505
+ # Delete tempdir
506
+ WorkDirManager().delete_tempdir(self._tempdir)
507
+
508
+ output = {
509
+ "eigenvalues": {
510
+ "data": self._fix_nan(
511
+ [val[2:] for val in eigenvalues.values()]
512
+ ).T,
513
+ "col_names": list(eigenvalues.keys()),
514
+ "row_names": [f"ev{i}" for i in range(self.num)],
515
+ "row_header_col_name": "eigenvalue",
516
+ },
517
+ "areas": {
518
+ "data": self._fix_nan(
519
+ [val[0] for val in eigenvalues.values()]
520
+ ),
521
+ "col_names": list(eigenvalues.keys()),
522
+ },
523
+ "volumes": {
524
+ "data": self._fix_nan(
525
+ [val[1] for val in eigenvalues.values()]
526
+ ),
527
+ "col_names": list(eigenvalues.keys()),
528
+ },
529
+ }
530
+ if self.asymmetry:
531
+ output["distances"] = {
532
+ "data": self._fix_nan(list(distances.values())),
533
+ "col_names": list(distances.keys()),
534
+ }
535
+ return output
536
+
537
+ def _fix_nan(
538
+ self,
539
+ input_data: List[Union[float, str, npt.ArrayLike]],
540
+ ) -> np.ndarray:
541
+ """Convert BrainPrint output with string NaN to ``numpy.nan``.
542
+
543
+ Parameters
544
+ ----------
545
+ input_data : list of str, float or numpy.ndarray-like
546
+ The data to convert.
547
+
548
+ Returns
549
+ -------
550
+ np.ndarray
551
+ The converted data as ``numpy.ndarray``.
552
+
553
+ """
554
+ arr = np.asarray(input_data)
555
+ arr[arr == "NaN"] = np.nan
556
+ return arr.astype(np.float64)
557
+
558
+ # TODO: overridden to allow storing multiple outputs from single input;
559
+ # should be removed later
560
+ def store(
561
+ self,
562
+ type_: str,
563
+ feature: str,
564
+ out: Dict[str, Any],
565
+ storage: "BaseFeatureStorage",
566
+ ) -> None:
567
+ """Store.
568
+
569
+ Parameters
570
+ ----------
571
+ type_ : str
572
+ The data type to store.
573
+ feature : {"eigenvalues", "distances", "areas", "volumes"}
574
+ The feature name to store.
575
+ out : dict
576
+ The computed result as a dictionary to store.
577
+ storage : storage-like
578
+ The storage class, for example, SQLiteFeatureStorage.
579
+
580
+ Raises
581
+ ------
582
+ ValueError
583
+ If ``feature`` is invalid.
584
+
585
+ """
586
+ if feature == "eigenvalues":
587
+ output_type = "scalar_table"
588
+ elif feature in ["distances", "areas", "volumes"]:
589
+ output_type = "vector"
590
+ else:
591
+ raise_error(f"Unknown feature: {feature}")
592
+
593
+ logger.debug(f"Storing {output_type} in {storage}")
594
+ storage.store(kind=output_type, **out)
595
+
596
+ # TODO: overridden to allow storing multiple outputs from single input;
597
+ # should be removed later
598
+ def _fit_transform(
599
+ self,
600
+ input: Dict[str, Dict],
601
+ storage: Optional["BaseFeatureStorage"] = None,
602
+ ) -> Dict:
603
+ """Fit and transform.
604
+
605
+ Parameters
606
+ ----------
607
+ input : dict
608
+ The Junifer Data object.
609
+ storage : storage-like, optional
610
+ The storage class, for example, SQLiteFeatureStorage.
611
+
612
+ Returns
613
+ -------
614
+ dict
615
+ The processed output as a dictionary. If `storage` is provided,
616
+ empty dictionary is returned.
617
+
618
+ """
619
+ out = {}
620
+ for type_ in self._on:
621
+ if type_ in input.keys():
622
+ logger.info(f"Computing {type_}")
623
+ t_input = input[type_]
624
+ extra_input = input.copy()
625
+ extra_input.pop(type_)
626
+ t_meta = t_input["meta"].copy()
627
+ t_meta["type"] = type_
628
+
629
+ # Returns multiple features
630
+ t_out = self.compute(input=t_input, extra_input=extra_input)
631
+
632
+ if storage is None:
633
+ out[type_] = {}
634
+
635
+ for feature_name, feature_data in t_out.items():
636
+ # Make deep copy of the feature data for manipulation
637
+ feature_data_copy = deepcopy(feature_data)
638
+ # Make deep copy of metadata and add to feature data
639
+ feature_data_copy["meta"] = deepcopy(t_meta)
640
+ # Update metadata for the feature,
641
+ # feature data is not manipulated, only meta
642
+ self.update_meta(feature_data_copy, "marker")
643
+ # Update marker feature's metadata name
644
+ feature_data_copy["meta"]["marker"][
645
+ "name"
646
+ ] += f"_{feature_name}"
647
+
648
+ if storage is not None:
649
+ logger.info(f"Storing in {storage}")
650
+ self.store(
651
+ type_=type_,
652
+ feature=feature_name,
653
+ out=feature_data_copy,
654
+ storage=storage,
655
+ )
656
+ else:
657
+ logger.info(
658
+ "No storage specified, returning dictionary"
659
+ )
660
+ out[type_][feature_name] = feature_data_copy
661
+
662
+ return out
@@ -72,7 +72,7 @@ class AFNIReHo:
72
72
  Number of voxels in the neighbourhood, inclusive. Can be:
73
73
 
74
74
  * 7 : for facewise neighbours only
75
- * 19 : for face- and edge-wise nieghbours
75
+ * 19 : for face- and edge-wise neighbours
76
76
  * 27 : for face-, edge-, and node-wise neighbors
77
77
 
78
78
  (default 27).
@@ -60,7 +60,7 @@ class JuniferReHo:
60
60
  Number of voxels in the neighbourhood, inclusive. Can be:
61
61
 
62
62
  * 7 : for facewise neighbours only
63
- * 19 : for face- and edge-wise nieghbours
63
+ * 19 : for face- and edge-wise neighbours
64
64
  * 27 : for face-, edge-, and node-wise neighbors
65
65
  * 125 : for 5x5 cuboidal volume
66
66
 
@@ -37,7 +37,7 @@ class ReHoParcels(ReHoBase):
37
37
  Number of voxels in the neighbourhood, inclusive. Can be:
38
38
 
39
39
  - 7 : for facewise neighbours only
40
- - 19 : for face- and edge-wise nieghbours
40
+ - 19 : for face- and edge-wise neighbours
41
41
  - 27 : for face-, edge-, and node-wise neighbors
42
42
 
43
43
  * ``neigh_rad`` : positive float, optional
@@ -67,7 +67,7 @@ class ReHoParcels(ReHoBase):
67
67
  Number of voxels in the neighbourhood, inclusive. Can be:
68
68
 
69
69
  * 7 : for facewise neighbours only
70
- * 19 : for face- and edge-wise nieghbours
70
+ * 19 : for face- and edge-wise neighbours
71
71
  * 27 : for face-, edge-, and node-wise neighbors
72
72
  * 125 : for 5x5 cuboidal volume
73
73