zea 0.0.8__py3-none-any.whl → 0.0.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- zea/__init__.py +3 -3
- zea/agent/masks.py +2 -2
- zea/agent/selection.py +3 -3
- zea/backend/__init__.py +1 -1
- zea/backend/tensorflow/dataloader.py +1 -1
- zea/beamform/beamformer.py +4 -2
- zea/beamform/pfield.py +2 -2
- zea/data/augmentations.py +1 -1
- zea/data/convert/__main__.py +93 -52
- zea/data/convert/camus.py +8 -2
- zea/data/convert/echonet.py +1 -1
- zea/data/convert/echonetlvh/__init__.py +1 -1
- zea/data/convert/verasonics.py +810 -772
- zea/data/data_format.py +0 -2
- zea/data/file.py +28 -0
- zea/data/preset_utils.py +1 -1
- zea/display.py +1 -1
- zea/doppler.py +5 -5
- zea/func/__init__.py +109 -0
- zea/{tensor_ops.py → func/tensor.py} +32 -8
- zea/func/ultrasound.py +500 -0
- zea/internal/_generate_keras_ops.py +5 -5
- zea/metrics.py +6 -5
- zea/models/diffusion.py +1 -1
- zea/models/echonetlvh.py +1 -1
- zea/models/gmm.py +1 -1
- zea/ops/__init__.py +188 -0
- zea/ops/base.py +442 -0
- zea/{keras_ops.py → ops/keras_ops.py} +2 -2
- zea/ops/pipeline.py +1472 -0
- zea/ops/tensor.py +356 -0
- zea/ops/ultrasound.py +890 -0
- zea/probes.py +2 -10
- zea/scan.py +17 -20
- zea/tools/fit_scan_cone.py +1 -1
- zea/tools/selection_tool.py +1 -1
- zea/tracking/lucas_kanade.py +1 -1
- zea/tracking/segmentation.py +1 -1
- {zea-0.0.8.dist-info → zea-0.0.9.dist-info}/METADATA +3 -1
- {zea-0.0.8.dist-info → zea-0.0.9.dist-info}/RECORD +43 -37
- zea/ops.py +0 -3534
- {zea-0.0.8.dist-info → zea-0.0.9.dist-info}/WHEEL +0 -0
- {zea-0.0.8.dist-info → zea-0.0.9.dist-info}/entry_points.txt +0 -0
- {zea-0.0.8.dist-info → zea-0.0.9.dist-info}/licenses/LICENSE +0 -0
zea/__init__.py
CHANGED
|
@@ -7,7 +7,7 @@ from . import log
|
|
|
7
7
|
|
|
8
8
|
# dynamically add __version__ attribute (see pyproject.toml)
|
|
9
9
|
# __version__ = __import__("importlib.metadata").metadata.version(__package__)
|
|
10
|
-
__version__ = "0.0.
|
|
10
|
+
__version__ = "0.0.9"
|
|
11
11
|
|
|
12
12
|
|
|
13
13
|
def _bootstrap_backend():
|
|
@@ -89,12 +89,12 @@ from . import (
|
|
|
89
89
|
beamform,
|
|
90
90
|
data,
|
|
91
91
|
display,
|
|
92
|
+
func,
|
|
92
93
|
io_lib,
|
|
93
|
-
keras_ops,
|
|
94
94
|
metrics,
|
|
95
95
|
models,
|
|
96
|
+
ops,
|
|
96
97
|
simulator,
|
|
97
|
-
tensor_ops,
|
|
98
98
|
utils,
|
|
99
99
|
visualize,
|
|
100
100
|
)
|
zea/agent/masks.py
CHANGED
|
@@ -9,8 +9,8 @@ from typing import List
|
|
|
9
9
|
import keras
|
|
10
10
|
from keras import ops
|
|
11
11
|
|
|
12
|
-
from zea import tensor_ops
|
|
13
12
|
from zea.agent.gumbel import hard_straight_through
|
|
13
|
+
from zea.func.tensor import nonzero
|
|
14
14
|
|
|
15
15
|
_DEFAULT_DTYPE = "bool"
|
|
16
16
|
|
|
@@ -56,7 +56,7 @@ def k_hot_to_indices(selected_lines, n_actions: int, fill_value=-1):
|
|
|
56
56
|
|
|
57
57
|
# Find nonzero indices for each frame
|
|
58
58
|
def get_nonzero(row):
|
|
59
|
-
return
|
|
59
|
+
return nonzero(row > 0, size=n_actions, fill_value=fill_value)[0]
|
|
60
60
|
|
|
61
61
|
indices = ops.vectorized_map(get_nonzero, selected_lines)
|
|
62
62
|
return indices
|
zea/agent/selection.py
CHANGED
|
@@ -16,9 +16,9 @@ from typing import Callable
|
|
|
16
16
|
import keras
|
|
17
17
|
from keras import ops
|
|
18
18
|
|
|
19
|
-
from zea import tensor_ops
|
|
20
19
|
from zea.agent import masks
|
|
21
20
|
from zea.backend.autograd import AutoGrad
|
|
21
|
+
from zea.func import tensor
|
|
22
22
|
from zea.internal.registry import action_selection_registry
|
|
23
23
|
|
|
24
24
|
|
|
@@ -462,7 +462,7 @@ class CovarianceSamplingLines(LinesActionModel):
|
|
|
462
462
|
particles = ops.reshape(particles, shape)
|
|
463
463
|
|
|
464
464
|
# [batch_size, rows * stack_n_cols, n_possible_actions, n_possible_actions]
|
|
465
|
-
cov_matrix =
|
|
465
|
+
cov_matrix = tensor.batch_cov(particles)
|
|
466
466
|
|
|
467
467
|
# Sum over the row dimension [batch_size, n_possible_actions, n_possible_actions]
|
|
468
468
|
cov_matrix = ops.sum(cov_matrix, axis=1)
|
|
@@ -477,7 +477,7 @@ class CovarianceSamplingLines(LinesActionModel):
|
|
|
477
477
|
# Subsample the covariance matrix with random lines
|
|
478
478
|
def subsample_with_mask(mask):
|
|
479
479
|
"""Subsample the covariance matrix with a single mask."""
|
|
480
|
-
subsampled_cov_matrix =
|
|
480
|
+
subsampled_cov_matrix = tensor.boolean_mask(
|
|
481
481
|
cov_matrix, mask, size=batch_size * self.n_actions**2
|
|
482
482
|
)
|
|
483
483
|
return ops.reshape(subsampled_cov_matrix, [batch_size, self.n_actions, self.n_actions])
|
zea/backend/__init__.py
CHANGED
|
@@ -131,7 +131,7 @@ class on_device:
|
|
|
131
131
|
.. code-block:: python
|
|
132
132
|
|
|
133
133
|
with zea.backend.on_device("gpu:3"):
|
|
134
|
-
pipeline = zea.Pipeline([zea.
|
|
134
|
+
pipeline = zea.Pipeline([zea.ops.Abs()])
|
|
135
135
|
output = pipeline(data=keras.random.normal((10, 10))) # output is on "cuda:3"
|
|
136
136
|
"""
|
|
137
137
|
|
|
@@ -12,8 +12,8 @@ from keras.src.trainers.data_adapters import TFDatasetAdapter
|
|
|
12
12
|
|
|
13
13
|
from zea.data.dataloader import H5Generator
|
|
14
14
|
from zea.data.layers import Resizer
|
|
15
|
+
from zea.func.tensor import translate
|
|
15
16
|
from zea.internal.utils import find_methods_with_return_type
|
|
16
|
-
from zea.tensor_ops import translate
|
|
17
17
|
|
|
18
18
|
METHODS_THAT_RETURN_DATASET = find_methods_with_return_type(tf.data.Dataset, "DatasetV2")
|
|
19
19
|
|
zea/beamform/beamformer.py
CHANGED
|
@@ -5,7 +5,7 @@ import numpy as np
|
|
|
5
5
|
from keras import ops
|
|
6
6
|
|
|
7
7
|
from zea.beamform.lens_correction import calculate_lens_corrected_delays
|
|
8
|
-
from zea.
|
|
8
|
+
from zea.func.tensor import vmap
|
|
9
9
|
|
|
10
10
|
|
|
11
11
|
def fnum_window_fn_rect(normalized_angle):
|
|
@@ -379,7 +379,7 @@ def complex_rotate(iq, theta):
|
|
|
379
379
|
|
|
380
380
|
.. math::
|
|
381
381
|
|
|
382
|
-
x(t + \\Delta t) &= I'(t) \\cos(\\omega_c (t + \\Delta t))
|
|
382
|
+
x(t + \\Delta t) &= I'(t) \\cos(\\omega_c (t + \\Delta t))
|
|
383
383
|
- Q'(t) \\sin(\\omega_c (t + \\Delta t))\\\\
|
|
384
384
|
&= \\overbrace{(I'(t)\\cos(\\theta)
|
|
385
385
|
- Q'(t)\\sin(\\theta) )}^{I_\\Delta(t)} \\cos(\\omega_c t)\\\\
|
|
@@ -452,6 +452,8 @@ def distance_Tx_generic(
|
|
|
452
452
|
`(n_el,)`.
|
|
453
453
|
probe_geometry (ops.Tensor): The positions of the transducer elements of shape
|
|
454
454
|
`(n_el, 3)`.
|
|
455
|
+
focus_distance (float): The focus distance in meters.
|
|
456
|
+
polar_angle (float): The polar angle in radians.
|
|
455
457
|
sound_speed (float): The speed of sound in m/s. Defaults to 1540.
|
|
456
458
|
|
|
457
459
|
Returns:
|
zea/beamform/pfield.py
CHANGED
|
@@ -24,8 +24,8 @@ import numpy as np
|
|
|
24
24
|
from keras import ops
|
|
25
25
|
|
|
26
26
|
from zea import log
|
|
27
|
+
from zea.func.tensor import sinc
|
|
27
28
|
from zea.internal.cache import cache_output
|
|
28
|
-
from zea.tensor_ops import sinc
|
|
29
29
|
|
|
30
30
|
|
|
31
31
|
def _abs_sinc(x):
|
|
@@ -101,7 +101,7 @@ def compute_pfield(
|
|
|
101
101
|
# array params
|
|
102
102
|
probe_geometry = ops.convert_to_tensor(probe_geometry, dtype="float32")
|
|
103
103
|
|
|
104
|
-
pitch = probe_geometry[1, 0] - probe_geometry[0, 0] # element pitch
|
|
104
|
+
pitch = ops.abs(probe_geometry[1, 0] - probe_geometry[0, 0]) # element pitch
|
|
105
105
|
|
|
106
106
|
kerf = 0.1 * pitch # for now this is hardcoded
|
|
107
107
|
element_width = pitch - kerf
|
zea/data/augmentations.py
CHANGED
zea/data/convert/__main__.py
CHANGED
|
@@ -1,86 +1,127 @@
|
|
|
1
1
|
import argparse
|
|
2
2
|
|
|
3
3
|
|
|
4
|
-
def
|
|
5
|
-
"""
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
dataset (str): One of "echonet", "echonetlvh", "camus", "picmus", "verasonics".
|
|
11
|
-
src (str): Source folder path.
|
|
12
|
-
dst (str): Destination folder path.
|
|
13
|
-
split_path (str|None): Optional path to a split.yaml to copy dataset splits.
|
|
14
|
-
no_hyperthreading (bool): Disable hyperthreading for multiprocessing.
|
|
15
|
-
frames (list[str]): MATLAB frames spec (e.g., ["all"], integers, or ranges like "4-8").
|
|
16
|
-
no_rejection (bool): EchonetLVH flag to skip manual_rejections.txt filtering.
|
|
17
|
-
batch (str|None): EchonetLVH Batch directory to process (e.g., "Batch2").
|
|
18
|
-
convert_measurements (bool): EchonetLVH flag to convert only measurements CSV.
|
|
19
|
-
convert_images (bool): EchonetLVH flag to convert only image files.
|
|
20
|
-
max_files (int|None): EchonetLVH maximum number of files to process.
|
|
21
|
-
force (bool): EchonetLVH flag to force recomputation even if parameters exist.
|
|
22
|
-
"""
|
|
23
|
-
parser = argparse.ArgumentParser(description="Convert raw data to a zea dataset.")
|
|
24
|
-
parser.add_argument(
|
|
25
|
-
"dataset",
|
|
26
|
-
choices=["echonet", "echonetlvh", "camus", "picmus", "verasonics"],
|
|
27
|
-
help="Raw dataset to convert",
|
|
28
|
-
)
|
|
29
|
-
parser.add_argument("src", type=str, help="Source folder path")
|
|
30
|
-
parser.add_argument("dst", type=str, help="Destination folder path")
|
|
31
|
-
parser.add_argument(
|
|
4
|
+
def _add_parser_args_echonet(subparsers):
|
|
5
|
+
"""Add Echonet specific arguments to the parser."""
|
|
6
|
+
echonet_parser = subparsers.add_parser("echonet", help="Convert Echonet dataset")
|
|
7
|
+
echonet_parser.add_argument("src", type=str, help="Source folder path")
|
|
8
|
+
echonet_parser.add_argument("dst", type=str, help="Destination folder path")
|
|
9
|
+
echonet_parser.add_argument(
|
|
32
10
|
"--split_path",
|
|
33
11
|
type=str,
|
|
34
12
|
help="Path to the split.yaml file containing the dataset split if a split should be copied",
|
|
35
13
|
)
|
|
36
|
-
|
|
14
|
+
echonet_parser.add_argument(
|
|
37
15
|
"--no_hyperthreading",
|
|
38
16
|
action="store_true",
|
|
39
17
|
help="Disable hyperthreading for multiprocessing",
|
|
40
18
|
)
|
|
41
|
-
# Dataset specific arguments:
|
|
42
19
|
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
"
|
|
20
|
+
|
|
21
|
+
def _add_parser_args_camus(subparsers):
|
|
22
|
+
"""Add CAMUS specific arguments to the parser."""
|
|
23
|
+
camus_parser = subparsers.add_parser("camus", help="Convert CAMUS dataset")
|
|
24
|
+
camus_parser.add_argument("src", type=str, help="Source folder path")
|
|
25
|
+
camus_parser.add_argument("dst", type=str, help="Destination folder path")
|
|
26
|
+
camus_parser.add_argument(
|
|
27
|
+
"--no_hyperthreading",
|
|
28
|
+
action="store_true",
|
|
29
|
+
help="Disable hyperthreading for multiprocessing",
|
|
51
30
|
)
|
|
52
|
-
|
|
53
|
-
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def _add_parser_args_echonetlvh(subparsers):
|
|
34
|
+
"""Add EchonetLVH specific arguments to the parser."""
|
|
35
|
+
echonetlvh_parser = subparsers.add_parser("echonetlvh", help="Convert EchonetLVH dataset")
|
|
36
|
+
echonetlvh_parser.add_argument("src", type=str, help="Source folder path")
|
|
37
|
+
echonetlvh_parser.add_argument("dst", type=str, help="Destination folder path")
|
|
38
|
+
echonetlvh_parser.add_argument(
|
|
54
39
|
"--no_rejection",
|
|
55
40
|
action="store_true",
|
|
56
|
-
help="
|
|
41
|
+
help="Do not reject sequences in manual_rejections.txt",
|
|
57
42
|
)
|
|
58
|
-
|
|
59
|
-
parser.add_argument(
|
|
43
|
+
echonetlvh_parser.add_argument(
|
|
60
44
|
"--batch",
|
|
61
45
|
type=str,
|
|
62
46
|
default=None,
|
|
63
|
-
help="
|
|
47
|
+
help="Specify which BatchX directory to process, e.g. --batch=Batch2",
|
|
64
48
|
)
|
|
65
|
-
|
|
49
|
+
echonetlvh_parser.add_argument(
|
|
66
50
|
"--convert_measurements",
|
|
67
51
|
action="store_true",
|
|
68
|
-
help="
|
|
52
|
+
help="Only convert measurements CSV file",
|
|
69
53
|
)
|
|
70
|
-
|
|
71
|
-
"--convert_images",
|
|
54
|
+
echonetlvh_parser.add_argument(
|
|
55
|
+
"--convert_images",
|
|
56
|
+
action="store_true",
|
|
57
|
+
help="Only convert image files",
|
|
72
58
|
)
|
|
73
|
-
|
|
59
|
+
echonetlvh_parser.add_argument(
|
|
74
60
|
"--max_files",
|
|
75
61
|
type=int,
|
|
76
62
|
default=None,
|
|
77
|
-
help="
|
|
63
|
+
help="Maximum number of files to process (for testing)",
|
|
78
64
|
)
|
|
79
|
-
|
|
65
|
+
echonetlvh_parser.add_argument(
|
|
80
66
|
"--force",
|
|
81
67
|
action="store_true",
|
|
82
|
-
help="
|
|
68
|
+
help="Force recomputation even if parameters already exist",
|
|
83
69
|
)
|
|
70
|
+
echonetlvh_parser.add_argument(
|
|
71
|
+
"--no_hyperthreading",
|
|
72
|
+
action="store_true",
|
|
73
|
+
help="Disable hyperthreading for multiprocessing",
|
|
74
|
+
)
|
|
75
|
+
|
|
76
|
+
|
|
77
|
+
def _add_parser_args_picmus(subparsers):
|
|
78
|
+
"""Add PICMUS specific arguments to the parser."""
|
|
79
|
+
picmus_parser = subparsers.add_parser("picmus", help="Convert PICMUS dataset")
|
|
80
|
+
picmus_parser.add_argument("src", type=str, help="Source folder path")
|
|
81
|
+
picmus_parser.add_argument("dst", type=str, help="Destination folder path")
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
def _add_parser_args_verasonics(subparsers):
|
|
85
|
+
verasonics_parser = subparsers.add_parser(
|
|
86
|
+
"verasonics", help="Convert Verasonics data to zea dataset"
|
|
87
|
+
)
|
|
88
|
+
verasonics_parser.add_argument("src", type=str, help="Source folder path")
|
|
89
|
+
verasonics_parser.add_argument("dst", type=str, help="Destination folder path")
|
|
90
|
+
verasonics_parser.add_argument(
|
|
91
|
+
"--frames",
|
|
92
|
+
default=["all"],
|
|
93
|
+
type=str,
|
|
94
|
+
nargs="+",
|
|
95
|
+
help="The frames to add to the file. This can be a list of integers, a range "
|
|
96
|
+
"of integers (e.g. 4-8), or 'all'.",
|
|
97
|
+
)
|
|
98
|
+
verasonics_parser.add_argument(
|
|
99
|
+
"--allow_accumulate",
|
|
100
|
+
action="store_true",
|
|
101
|
+
help=(
|
|
102
|
+
"Sometimes, some transmits are already accumulated on the Verasonics system "
|
|
103
|
+
"(e.g. harmonic imaging through pulse inversion). In this case, the mode in the "
|
|
104
|
+
"Receive structure is set to 1 (accumulate). If this flag is set, such files "
|
|
105
|
+
"will be processed. Otherwise, an error is raised when such a mode is detected."
|
|
106
|
+
),
|
|
107
|
+
)
|
|
108
|
+
verasonics_parser.add_argument(
|
|
109
|
+
"--device",
|
|
110
|
+
type=str,
|
|
111
|
+
default="cpu",
|
|
112
|
+
help="Device to use for conversion (e.g., 'cpu' or 'gpu:0').",
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
|
|
116
|
+
def get_parser():
|
|
117
|
+
"""Build and parse command-line arguments for converting raw datasets to a zea dataset."""
|
|
118
|
+
parser = argparse.ArgumentParser(description="Convert raw data to a zea dataset.")
|
|
119
|
+
subparsers = parser.add_subparsers(dest="dataset", required=True)
|
|
120
|
+
_add_parser_args_echonet(subparsers)
|
|
121
|
+
_add_parser_args_echonetlvh(subparsers)
|
|
122
|
+
_add_parser_args_camus(subparsers)
|
|
123
|
+
_add_parser_args_picmus(subparsers)
|
|
124
|
+
_add_parser_args_verasonics(subparsers)
|
|
84
125
|
return parser
|
|
85
126
|
|
|
86
127
|
|
zea/data/convert/camus.py
CHANGED
|
@@ -25,8 +25,8 @@ from tqdm import tqdm
|
|
|
25
25
|
from zea import log
|
|
26
26
|
from zea.data.convert.utils import unzip
|
|
27
27
|
from zea.data.data_format import generate_zea_dataset
|
|
28
|
+
from zea.func.tensor import translate
|
|
28
29
|
from zea.internal.utils import find_first_nonzero_index
|
|
29
|
-
from zea.tensor_ops import translate
|
|
30
30
|
|
|
31
31
|
|
|
32
32
|
def transform_sc_image_to_polar(image_sc, output_size=None, fit_outline=True):
|
|
@@ -133,7 +133,13 @@ def sitk_load(filepath: str | Path) -> Tuple[np.ndarray, Dict[str, Any]]:
|
|
|
133
133
|
- Collection of metadata.
|
|
134
134
|
"""
|
|
135
135
|
# Load image and save info
|
|
136
|
-
|
|
136
|
+
try:
|
|
137
|
+
import SimpleITK as sitk
|
|
138
|
+
except ImportError as exc:
|
|
139
|
+
raise ImportError(
|
|
140
|
+
"SimpleITK is not installed. "
|
|
141
|
+
"Please install it with `pip install SimpleITK` to convert CAMUS dataset."
|
|
142
|
+
) from exc
|
|
137
143
|
|
|
138
144
|
image = sitk.ReadImage(str(filepath))
|
|
139
145
|
|
zea/data/convert/echonet.py
CHANGED
|
@@ -24,7 +24,7 @@ from tqdm import tqdm
|
|
|
24
24
|
from zea import log
|
|
25
25
|
from zea.data import generate_zea_dataset
|
|
26
26
|
from zea.data.convert.utils import load_avi, unzip
|
|
27
|
-
from zea.
|
|
27
|
+
from zea.func.tensor import translate
|
|
28
28
|
|
|
29
29
|
|
|
30
30
|
def segment(tensor, number_erasing=0, min_clip=0):
|
|
@@ -30,7 +30,7 @@ from zea.data.convert.echonet import H5Processor
|
|
|
30
30
|
from zea.data.convert.echonetlvh.precompute_crop import precompute_cone_parameters
|
|
31
31
|
from zea.data.convert.utils import load_avi, unzip
|
|
32
32
|
from zea.display import cartesian_to_polar_matrix
|
|
33
|
-
from zea.
|
|
33
|
+
from zea.func.tensor import translate
|
|
34
34
|
|
|
35
35
|
|
|
36
36
|
def overwrite_splits(source_dir):
|