junifer 0.0.5.dev110__py3-none-any.whl → 0.0.5.dev145__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- junifer/__init__.py +17 -0
- junifer/_version.py +2 -2
- junifer/api/__init__.py +3 -0
- junifer/api/decorators.py +9 -0
- junifer/api/functions.py +7 -4
- junifer/api/parser.py +3 -0
- junifer/api/queue_context/__init__.py +3 -0
- junifer/api/res/afni/run_afni_docker.sh +1 -1
- junifer/api/res/ants/run_ants_docker.sh +1 -1
- junifer/api/res/fsl/run_fsl_docker.sh +1 -1
- junifer/api/tests/test_api_utils.py +2 -0
- junifer/configs/juseless/__init__.py +3 -0
- junifer/configs/juseless/datagrabbers/__init__.py +9 -0
- junifer/configs/juseless/datagrabbers/aomic_id1000_vbm.py +3 -0
- junifer/configs/juseless/datagrabbers/camcan_vbm.py +3 -0
- junifer/configs/juseless/datagrabbers/ixi_vbm.py +3 -0
- junifer/configs/juseless/datagrabbers/ucla.py +3 -0
- junifer/configs/juseless/datagrabbers/ukb_vbm.py +3 -0
- junifer/data/__init__.py +20 -0
- junifer/data/coordinates.py +8 -0
- junifer/data/masks.py +10 -0
- junifer/data/parcellations.py +9 -0
- junifer/data/template_spaces.py +3 -0
- junifer/data/tests/test_masks.py +1 -1
- junifer/data/utils.py +3 -0
- junifer/datagrabber/__init__.py +15 -0
- junifer/datagrabber/aomic/__init__.py +3 -0
- junifer/datagrabber/aomic/id1000.py +3 -0
- junifer/datagrabber/aomic/piop1.py +3 -0
- junifer/datagrabber/aomic/piop2.py +3 -0
- junifer/datagrabber/base.py +3 -0
- junifer/datagrabber/datalad_base.py +3 -0
- junifer/datagrabber/hcp1200/__init__.py +3 -0
- junifer/datagrabber/hcp1200/datalad_hcp1200.py +3 -0
- junifer/datagrabber/hcp1200/hcp1200.py +3 -0
- junifer/datagrabber/multiple.py +3 -0
- junifer/datagrabber/pattern.py +5 -1
- junifer/datagrabber/pattern_datalad.py +3 -0
- junifer/datagrabber/utils.py +3 -0
- junifer/datareader/__init__.py +3 -0
- junifer/datareader/default.py +3 -0
- junifer/external/BrainPrint/brainprint/__init__.py +4 -0
- junifer/external/BrainPrint/brainprint/_version.py +3 -0
- junifer/external/BrainPrint/brainprint/asymmetry.py +91 -0
- junifer/external/BrainPrint/brainprint/brainprint.py +441 -0
- junifer/external/BrainPrint/brainprint/surfaces.py +258 -0
- junifer/external/BrainPrint/brainprint/utils/__init__.py +1 -0
- junifer/external/BrainPrint/brainprint/utils/_config.py +112 -0
- junifer/external/BrainPrint/brainprint/utils/utils.py +188 -0
- junifer/markers/__init__.py +22 -0
- junifer/markers/base.py +21 -7
- junifer/markers/brainprint.py +665 -0
- junifer/markers/collection.py +3 -0
- junifer/markers/complexity/__init__.py +10 -0
- junifer/markers/complexity/complexity_base.py +3 -0
- junifer/markers/complexity/hurst_exponent.py +3 -0
- junifer/markers/complexity/multiscale_entropy_auc.py +3 -0
- junifer/markers/complexity/perm_entropy.py +3 -0
- junifer/markers/complexity/range_entropy.py +3 -0
- junifer/markers/complexity/range_entropy_auc.py +3 -0
- junifer/markers/complexity/sample_entropy.py +3 -0
- junifer/markers/complexity/tests/test_hurst_exponent.py +7 -0
- junifer/markers/complexity/tests/test_multiscale_entropy_auc.py +7 -0
- junifer/markers/complexity/tests/test_perm_entropy.py +7 -0
- junifer/markers/complexity/tests/test_range_entropy.py +7 -0
- junifer/markers/complexity/tests/test_range_entropy_auc.py +7 -0
- junifer/markers/complexity/tests/test_sample_entropy.py +7 -0
- junifer/markers/complexity/tests/test_weighted_perm_entropy.py +7 -0
- junifer/markers/complexity/weighted_perm_entropy.py +3 -0
- junifer/markers/ets_rss.py +3 -0
- junifer/markers/falff/__init__.py +3 -0
- junifer/markers/falff/_afni_falff.py +3 -0
- junifer/markers/falff/_junifer_falff.py +3 -0
- junifer/markers/falff/falff_base.py +3 -0
- junifer/markers/falff/falff_parcels.py +3 -0
- junifer/markers/falff/falff_spheres.py +3 -0
- junifer/markers/functional_connectivity/__init__.py +9 -0
- junifer/markers/functional_connectivity/crossparcellation_functional_connectivity.py +3 -0
- junifer/markers/functional_connectivity/edge_functional_connectivity_parcels.py +3 -0
- junifer/markers/functional_connectivity/edge_functional_connectivity_spheres.py +3 -0
- junifer/markers/functional_connectivity/functional_connectivity_base.py +3 -0
- junifer/markers/functional_connectivity/functional_connectivity_parcels.py +3 -0
- junifer/markers/functional_connectivity/functional_connectivity_spheres.py +3 -0
- junifer/markers/parcel_aggregation.py +3 -0
- junifer/markers/reho/__init__.py +3 -0
- junifer/markers/reho/_afni_reho.py +4 -1
- junifer/markers/reho/_junifer_reho.py +4 -1
- junifer/markers/reho/reho_base.py +2 -0
- junifer/markers/reho/reho_parcels.py +5 -2
- junifer/markers/reho/reho_spheres.py +5 -2
- junifer/markers/sphere_aggregation.py +3 -0
- junifer/markers/temporal_snr/__init__.py +3 -0
- junifer/markers/temporal_snr/temporal_snr_base.py +3 -0
- junifer/markers/temporal_snr/temporal_snr_parcels.py +3 -0
- junifer/markers/temporal_snr/temporal_snr_spheres.py +3 -0
- junifer/markers/tests/test_brainprint.py +47 -0
- junifer/onthefly/__init__.py +3 -0
- junifer/onthefly/read_transform.py +3 -0
- junifer/pipeline/__init__.py +8 -0
- junifer/pipeline/pipeline_step_mixin.py +3 -0
- junifer/pipeline/registry.py +3 -0
- junifer/pipeline/singleton.py +3 -0
- junifer/pipeline/update_meta_mixin.py +3 -0
- junifer/pipeline/utils.py +3 -0
- junifer/pipeline/workdir_manager.py +3 -0
- junifer/preprocess/__init__.py +8 -0
- junifer/preprocess/base.py +4 -1
- junifer/preprocess/confounds/__init__.py +3 -0
- junifer/preprocess/confounds/fmriprep_confound_remover.py +3 -0
- junifer/preprocess/smoothing/__init__.py +3 -0
- junifer/preprocess/warping/__init__.py +3 -0
- junifer/preprocess/warping/_ants_warper.py +3 -0
- junifer/preprocess/warping/_fsl_warper.py +3 -0
- junifer/stats.py +3 -0
- junifer/storage/__init__.py +8 -0
- junifer/storage/base.py +3 -0
- junifer/storage/hdf5.py +3 -0
- junifer/storage/pandas_base.py +3 -0
- junifer/storage/sqlite.py +3 -0
- junifer/storage/utils.py +9 -0
- junifer/testing/__init__.py +3 -0
- junifer/testing/datagrabbers.py +7 -0
- junifer/testing/utils.py +3 -0
- junifer/utils/__init__.py +10 -0
- junifer/utils/fs.py +3 -0
- junifer/utils/helpers.py +3 -0
- junifer/utils/logging.py +10 -0
- {junifer-0.0.5.dev110.dist-info → junifer-0.0.5.dev145.dist-info}/METADATA +2 -1
- {junifer-0.0.5.dev110.dist-info → junifer-0.0.5.dev145.dist-info}/RECORD +134 -124
- {junifer-0.0.5.dev110.dist-info → junifer-0.0.5.dev145.dist-info}/AUTHORS.rst +0 -0
- {junifer-0.0.5.dev110.dist-info → junifer-0.0.5.dev145.dist-info}/LICENSE.md +0 -0
- {junifer-0.0.5.dev110.dist-info → junifer-0.0.5.dev145.dist-info}/WHEEL +0 -0
- {junifer-0.0.5.dev110.dist-info → junifer-0.0.5.dev145.dist-info}/entry_points.txt +0 -0
- {junifer-0.0.5.dev110.dist-info → junifer-0.0.5.dev145.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,112 @@
|
|
1
|
+
import platform
|
2
|
+
import re
|
3
|
+
import sys
|
4
|
+
from functools import partial
|
5
|
+
from importlib.metadata import requires, version
|
6
|
+
from typing import IO, Callable, List, Optional
|
7
|
+
|
8
|
+
import psutil
|
9
|
+
|
10
|
+
|
11
|
+
def sys_info(fid: Optional[IO] = None, developer: bool = False):
|
12
|
+
"""Print the system information for debugging.
|
13
|
+
|
14
|
+
Parameters
|
15
|
+
----------
|
16
|
+
fid : file-like, default=None
|
17
|
+
The file to write to, passed to :func:`print`.
|
18
|
+
Can be None to use :data:`sys.stdout`.
|
19
|
+
developer : bool, default=False
|
20
|
+
If True, display information about optional dependencies.
|
21
|
+
"""
|
22
|
+
|
23
|
+
ljust = 26
|
24
|
+
out = partial(print, end="", file=fid)
|
25
|
+
package = __package__.split(".")[0]
|
26
|
+
|
27
|
+
# OS information - requires python 3.8 or above
|
28
|
+
out("Platform:".ljust(ljust) + platform.platform() + "\n")
|
29
|
+
# Python information
|
30
|
+
out("Python:".ljust(ljust) + sys.version.replace("\n", " ") + "\n")
|
31
|
+
out("Executable:".ljust(ljust) + sys.executable + "\n")
|
32
|
+
# CPU information
|
33
|
+
out("CPU:".ljust(ljust) + platform.processor() + "\n")
|
34
|
+
out("Physical cores:".ljust(ljust) + str(psutil.cpu_count(False)) + "\n")
|
35
|
+
out("Logical cores:".ljust(ljust) + str(psutil.cpu_count(True)) + "\n")
|
36
|
+
# Memory information
|
37
|
+
out("RAM:".ljust(ljust))
|
38
|
+
out(f"{psutil.virtual_memory().total / float(2 ** 30):0.1f} GB\n")
|
39
|
+
out("SWAP:".ljust(ljust))
|
40
|
+
out(f"{psutil.swap_memory().total / float(2 ** 30):0.1f} GB\n")
|
41
|
+
|
42
|
+
# dependencies
|
43
|
+
out("\nDependencies info\n")
|
44
|
+
out(f"{package}:".ljust(ljust) + version(package) + "\n")
|
45
|
+
dependencies = [
|
46
|
+
elt.split(";")[0].rstrip() for elt in requires(package) if "extra" not in elt
|
47
|
+
]
|
48
|
+
_list_dependencies_info(out, ljust, dependencies)
|
49
|
+
|
50
|
+
# extras
|
51
|
+
if developer:
|
52
|
+
keys = (
|
53
|
+
"build",
|
54
|
+
"doc",
|
55
|
+
"test",
|
56
|
+
"style",
|
57
|
+
)
|
58
|
+
for key in keys:
|
59
|
+
dependencies = [
|
60
|
+
elt.split(";")[0].rstrip()
|
61
|
+
for elt in requires(package)
|
62
|
+
if f"extra == '{key}'" in elt or f'extra == "{key}"' in elt
|
63
|
+
]
|
64
|
+
if len(dependencies) == 0:
|
65
|
+
continue
|
66
|
+
out(f"\nOptional '{key}' info\n")
|
67
|
+
_list_dependencies_info(out, ljust, dependencies)
|
68
|
+
|
69
|
+
|
70
|
+
def _list_dependencies_info(out: Callable, ljust: int, dependencies: List[str]):
|
71
|
+
"""List dependencies names and versions.
|
72
|
+
|
73
|
+
Parameters
|
74
|
+
----------
|
75
|
+
out : Callable
|
76
|
+
output function
|
77
|
+
ljust : int
|
78
|
+
length of returned string
|
79
|
+
dependencies : List[str]
|
80
|
+
list of dependencies
|
81
|
+
|
82
|
+
"""
|
83
|
+
|
84
|
+
for dep in dependencies:
|
85
|
+
# handle dependencies with version specifiers
|
86
|
+
specifiers_pattern = r"(~=|==|!=|<=|>=|<|>|===)"
|
87
|
+
specifiers = re.findall(specifiers_pattern, dep)
|
88
|
+
if len(specifiers) != 0:
|
89
|
+
dep, _ = dep.split(specifiers[0])
|
90
|
+
while not dep[-1].isalpha():
|
91
|
+
dep = dep[:-1]
|
92
|
+
# handle dependencies provided with a [key], e.g. pydocstyle[toml]
|
93
|
+
if "[" in dep:
|
94
|
+
dep = dep.split("[")[0]
|
95
|
+
try:
|
96
|
+
version_ = version(dep)
|
97
|
+
except Exception:
|
98
|
+
version_ = "Not found."
|
99
|
+
|
100
|
+
# handle special dependencies with backends, C dep, ..
|
101
|
+
if dep in ("matplotlib", "seaborn") and version_ != "Not found.":
|
102
|
+
try:
|
103
|
+
from matplotlib import pyplot as plt
|
104
|
+
|
105
|
+
backend = plt.get_backend()
|
106
|
+
except Exception:
|
107
|
+
backend = "Not found"
|
108
|
+
|
109
|
+
out(f"{dep}:".ljust(ljust) + version_ + f" (backend: {backend})\n")
|
110
|
+
|
111
|
+
else:
|
112
|
+
out(f"{dep}:".ljust(ljust) + version_ + "\n")
|
@@ -0,0 +1,188 @@
|
|
1
|
+
"""
|
2
|
+
Utilities for the :mod:`brainprint` module.
|
3
|
+
"""
|
4
|
+
import os
|
5
|
+
import shlex
|
6
|
+
import subprocess
|
7
|
+
from pathlib import Path
|
8
|
+
from typing import Dict
|
9
|
+
|
10
|
+
import numpy as np
|
11
|
+
import pandas as pd
|
12
|
+
|
13
|
+
|
14
|
+
def validate_environment() -> None:
|
15
|
+
"""
|
16
|
+
Checks whether required environment variables are set.
|
17
|
+
"""
|
18
|
+
if not os.getenv("FREESURFER_HOME"):
|
19
|
+
raise RuntimeError(
|
20
|
+
"FreeSurfer root directory must be set as the $FREESURFER_HOME "
|
21
|
+
"environment variable!"
|
22
|
+
)
|
23
|
+
|
24
|
+
|
25
|
+
def test_freesurfer() -> None:
|
26
|
+
"""
|
27
|
+
Tests FreeSurfer binarize are accessible and executable.
|
28
|
+
|
29
|
+
Raises
|
30
|
+
------
|
31
|
+
RuntimeError
|
32
|
+
Failed to execute test FreeSurfer command
|
33
|
+
"""
|
34
|
+
command = "mri_binarize -version"
|
35
|
+
try:
|
36
|
+
run_shell_command(command)
|
37
|
+
except FileNotFoundError:
|
38
|
+
raise RuntimeError(
|
39
|
+
"Failed to run FreeSurfer command, please check the required binaries "
|
40
|
+
"are included in your $PATH."
|
41
|
+
)
|
42
|
+
|
43
|
+
|
44
|
+
def run_shell_command(command: str, verbose: bool = False):
|
45
|
+
"""
|
46
|
+
Execute shell command.
|
47
|
+
|
48
|
+
Parameters
|
49
|
+
----------
|
50
|
+
command : str
|
51
|
+
Shell command to be executed
|
52
|
+
|
53
|
+
Raises
|
54
|
+
------
|
55
|
+
RuntimeError
|
56
|
+
Shell command execution failure
|
57
|
+
"""
|
58
|
+
if verbose:
|
59
|
+
print(f"Executing command:\t{command}", end="\n")
|
60
|
+
args = shlex.split(command)
|
61
|
+
try:
|
62
|
+
return_code = subprocess.call(args)
|
63
|
+
except Exception as e:
|
64
|
+
message = (
|
65
|
+
"Failed to execute the following command:\n{command}\n"
|
66
|
+
"The following exception was raised:\n{exception}".format(
|
67
|
+
command=command, exception=e
|
68
|
+
)
|
69
|
+
)
|
70
|
+
print(message)
|
71
|
+
raise
|
72
|
+
if return_code != 0:
|
73
|
+
message = (
|
74
|
+
"Execution of the following command:\n{command}\n"
|
75
|
+
"Returned non-zero exit code!".format(command=command)
|
76
|
+
)
|
77
|
+
raise RuntimeError(message)
|
78
|
+
|
79
|
+
|
80
|
+
def validate_subject_dir(subjects_dir: Path, subject_id: str) -> Path:
|
81
|
+
"""
|
82
|
+
Checks the input FreeSurfer preprocessing results directory exists.
|
83
|
+
|
84
|
+
Parameters
|
85
|
+
----------
|
86
|
+
subjects_dir : Path
|
87
|
+
FreeSurfer's subjects directory
|
88
|
+
subject_id : str
|
89
|
+
The subject identifier, as defined within the FreeSurfer's subjects
|
90
|
+
directory
|
91
|
+
|
92
|
+
Raises
|
93
|
+
------
|
94
|
+
FileNotFoundError
|
95
|
+
Subject results directory does not exist
|
96
|
+
"""
|
97
|
+
subject_dir = Path(subjects_dir) / subject_id
|
98
|
+
if not subject_dir.is_dir():
|
99
|
+
message = "FreeSurfer results directory at {path} does not exist!".format(
|
100
|
+
path=subject_dir
|
101
|
+
)
|
102
|
+
raise FileNotFoundError(message)
|
103
|
+
return subject_dir
|
104
|
+
|
105
|
+
|
106
|
+
def resolve_destination(subject_dir: Path, destination: Path = None) -> Path:
|
107
|
+
if destination is None:
|
108
|
+
return Path(subject_dir) / "brainprint"
|
109
|
+
return destination
|
110
|
+
|
111
|
+
|
112
|
+
def create_output_paths(subject_dir: Path = None, destination: Path = None) -> None:
|
113
|
+
"""
|
114
|
+
Creates the output directories in which the BrainPrint analysis derivatives
|
115
|
+
will be created. One of *subject_dir* or *destination* must be
|
116
|
+
provided.
|
117
|
+
|
118
|
+
Parameters
|
119
|
+
----------
|
120
|
+
subject_dir : Path, optional
|
121
|
+
If provided, will simply nest results in the provided directory, by
|
122
|
+
default None
|
123
|
+
destination : Path, optional
|
124
|
+
If provided, will use this path as the results root directory, by
|
125
|
+
default None
|
126
|
+
|
127
|
+
Raises
|
128
|
+
------
|
129
|
+
ValueError
|
130
|
+
No *subject_dir* or *destination* provided
|
131
|
+
"""
|
132
|
+
destination = resolve_destination(subject_dir, destination)
|
133
|
+
destination.mkdir(parents=True, exist_ok=True)
|
134
|
+
(destination / "surfaces").mkdir(parents=True, exist_ok=True)
|
135
|
+
(destination / "temp").mkdir(parents=True, exist_ok=True)
|
136
|
+
return destination
|
137
|
+
|
138
|
+
|
139
|
+
def export_brainprint_results(
|
140
|
+
destination: Path,
|
141
|
+
eigenvalues: np.ndarray,
|
142
|
+
eigenvectors: np.ndarray = None,
|
143
|
+
distances: np.ndarray = None,
|
144
|
+
) -> Dict[str, Path]:
|
145
|
+
"""
|
146
|
+
Writes the BrainPrint analysis results to CSV files.
|
147
|
+
|
148
|
+
Parameters
|
149
|
+
----------
|
150
|
+
destination : Path
|
151
|
+
Eigenvalues CSV file destination
|
152
|
+
eigenvalues : np.ndarray
|
153
|
+
Eigenvalues
|
154
|
+
eigenvectors : np.ndarray, optional
|
155
|
+
Eigenvectors, by default None
|
156
|
+
distances : np.ndarray, optional
|
157
|
+
Distances, by default None
|
158
|
+
"""
|
159
|
+
files = {}
|
160
|
+
df = pd.DataFrame(eigenvalues).sort_index(axis=1)
|
161
|
+
ev_indices = [f"ev{i}" for i in range(len(df) - 2)]
|
162
|
+
df.index = ["area", "volume"] + ev_indices
|
163
|
+
df.to_csv(destination, index=True, na_rep="NaN")
|
164
|
+
files["eigenvalues"] = destination
|
165
|
+
|
166
|
+
if eigenvectors is not None:
|
167
|
+
eigenvectors_dir = destination.parent / "eigenvectors"
|
168
|
+
eigenvectors_dir.mkdir(parents=True, exist_ok=True)
|
169
|
+
for key, value in eigenvectors.items():
|
170
|
+
suffix = ".evecs-{key}.csv".format(key=key)
|
171
|
+
name = destination.with_suffix(suffix).name
|
172
|
+
vectors_destination = eigenvectors_dir / name
|
173
|
+
pd.DataFrame(value).to_csv(
|
174
|
+
vectors_destination,
|
175
|
+
index=True,
|
176
|
+
na_rep="NaN",
|
177
|
+
)
|
178
|
+
files["eigenvectors"] = eigenvectors_dir
|
179
|
+
|
180
|
+
if distances is not None:
|
181
|
+
distances_destination = destination.with_suffix(".asymmetry.csv")
|
182
|
+
pd.DataFrame([distances]).to_csv(
|
183
|
+
distances_destination,
|
184
|
+
index=False,
|
185
|
+
na_rep="NaN",
|
186
|
+
)
|
187
|
+
files["distances"] = distances_destination
|
188
|
+
return files
|
junifer/markers/__init__.py
CHANGED
@@ -23,3 +23,25 @@ from .temporal_snr import (
|
|
23
23
|
TemporalSNRParcels,
|
24
24
|
TemporalSNRSpheres,
|
25
25
|
)
|
26
|
+
from .brainprint import BrainPrint
|
27
|
+
|
28
|
+
|
29
|
+
__all__ = [
|
30
|
+
"BaseMarker",
|
31
|
+
"MarkerCollection",
|
32
|
+
"RSSETSMarker",
|
33
|
+
"ParcelAggregation",
|
34
|
+
"SphereAggregation",
|
35
|
+
"FunctionalConnectivityParcels",
|
36
|
+
"FunctionalConnectivitySpheres",
|
37
|
+
"CrossParcellationFC",
|
38
|
+
"EdgeCentricFCParcels",
|
39
|
+
"EdgeCentricFCSpheres",
|
40
|
+
"ReHoParcels",
|
41
|
+
"ReHoSpheres",
|
42
|
+
"ALFFParcels",
|
43
|
+
"ALFFSpheres",
|
44
|
+
"TemporalSNRParcels",
|
45
|
+
"TemporalSNRSpheres",
|
46
|
+
"BrainPrint",
|
47
|
+
]
|
junifer/markers/base.py
CHANGED
@@ -15,16 +15,22 @@ if TYPE_CHECKING:
|
|
15
15
|
from junifer.storage import BaseFeatureStorage
|
16
16
|
|
17
17
|
|
18
|
+
__all__ = ["BaseMarker"]
|
19
|
+
|
20
|
+
|
18
21
|
class BaseMarker(ABC, PipelineStepMixin, UpdateMetaMixin):
|
19
22
|
"""Abstract base class for all markers.
|
20
23
|
|
24
|
+
For every interface that is required, one needs to provide a concrete
|
25
|
+
implementation of this abstract class.
|
26
|
+
|
21
27
|
Parameters
|
22
28
|
----------
|
23
|
-
on : str or list of str
|
24
|
-
The
|
25
|
-
available data.
|
29
|
+
on : str or list of str or None, optional
|
30
|
+
The data type to apply the marker on. If None,
|
31
|
+
will work on all available data types (default None).
|
26
32
|
name : str, optional
|
27
|
-
The name of the marker.
|
33
|
+
The name of the marker. If None, will use the class name as the
|
28
34
|
name of the marker (default None).
|
29
35
|
|
30
36
|
Raises
|
@@ -192,17 +198,25 @@ class BaseMarker(ABC, PipelineStepMixin, UpdateMetaMixin):
|
|
192
198
|
for type_ in self._on:
|
193
199
|
if type_ in input.keys():
|
194
200
|
logger.info(f"Computing {type_}")
|
201
|
+
# Get data dict for data type
|
195
202
|
t_input = input[type_]
|
203
|
+
# Pass the other data types as extra input, removing
|
204
|
+
# the current type
|
196
205
|
extra_input = input.copy()
|
197
206
|
extra_input.pop(type_)
|
207
|
+
logger.debug(
|
208
|
+
f"Extra data type for feature extraction: "
|
209
|
+
f"{extra_input.keys()}"
|
210
|
+
)
|
211
|
+
# Copy metadata
|
198
212
|
t_meta = t_input["meta"].copy()
|
199
213
|
t_meta["type"] = type_
|
200
|
-
|
214
|
+
# Compute marker
|
201
215
|
t_out = self.compute(input=t_input, extra_input=extra_input)
|
202
216
|
t_out["meta"] = t_meta
|
203
|
-
|
217
|
+
# Update metadata for step
|
204
218
|
self.update_meta(t_out, "marker")
|
205
|
-
|
219
|
+
# Check storage
|
206
220
|
if storage is not None:
|
207
221
|
logger.info(f"Storing in {storage}")
|
208
222
|
self.store(type_=type_, out=t_out, storage=storage)
|