ophyd-async 0.5.2__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ophyd_async/__init__.py +10 -1
- ophyd_async/__main__.py +12 -4
- ophyd_async/_version.py +2 -2
- ophyd_async/core/__init__.py +15 -7
- ophyd_async/core/_detector.py +133 -87
- ophyd_async/core/_device.py +19 -16
- ophyd_async/core/_device_save_loader.py +30 -19
- ophyd_async/core/_flyer.py +6 -19
- ophyd_async/core/_hdf_dataset.py +8 -9
- ophyd_async/core/_log.py +3 -1
- ophyd_async/core/_mock_signal_backend.py +11 -9
- ophyd_async/core/_mock_signal_utils.py +8 -5
- ophyd_async/core/_protocol.py +7 -7
- ophyd_async/core/_providers.py +11 -11
- ophyd_async/core/_readable.py +30 -22
- ophyd_async/core/_signal.py +52 -51
- ophyd_async/core/_signal_backend.py +20 -7
- ophyd_async/core/_soft_signal_backend.py +62 -32
- ophyd_async/core/_status.py +7 -9
- ophyd_async/core/_table.py +146 -0
- ophyd_async/core/_utils.py +24 -28
- ophyd_async/epics/adaravis/_aravis_controller.py +20 -19
- ophyd_async/epics/adaravis/_aravis_io.py +2 -1
- ophyd_async/epics/adcore/_core_io.py +2 -0
- ophyd_async/epics/adcore/_core_logic.py +4 -5
- ophyd_async/epics/adcore/_hdf_writer.py +19 -8
- ophyd_async/epics/adcore/_single_trigger.py +1 -1
- ophyd_async/epics/adcore/_utils.py +5 -6
- ophyd_async/epics/adkinetix/_kinetix_controller.py +20 -15
- ophyd_async/epics/adpilatus/_pilatus_controller.py +22 -18
- ophyd_async/epics/adsimdetector/_sim.py +7 -6
- ophyd_async/epics/adsimdetector/_sim_controller.py +22 -17
- ophyd_async/epics/advimba/_vimba_controller.py +22 -17
- ophyd_async/epics/demo/_mover.py +4 -5
- ophyd_async/epics/demo/sensor.db +0 -1
- ophyd_async/epics/eiger/_eiger.py +1 -1
- ophyd_async/epics/eiger/_eiger_controller.py +18 -18
- ophyd_async/epics/eiger/_odin_io.py +6 -5
- ophyd_async/epics/motor.py +8 -10
- ophyd_async/epics/pvi/_pvi.py +30 -33
- ophyd_async/epics/signal/_aioca.py +55 -25
- ophyd_async/epics/signal/_common.py +3 -10
- ophyd_async/epics/signal/_epics_transport.py +11 -8
- ophyd_async/epics/signal/_p4p.py +79 -30
- ophyd_async/epics/signal/_signal.py +6 -8
- ophyd_async/fastcs/panda/__init__.py +0 -6
- ophyd_async/fastcs/panda/_block.py +7 -0
- ophyd_async/fastcs/panda/_control.py +16 -17
- ophyd_async/fastcs/panda/_hdf_panda.py +11 -4
- ophyd_async/fastcs/panda/_table.py +77 -138
- ophyd_async/fastcs/panda/_trigger.py +4 -5
- ophyd_async/fastcs/panda/_utils.py +3 -2
- ophyd_async/fastcs/panda/_writer.py +30 -15
- ophyd_async/plan_stubs/_fly.py +15 -17
- ophyd_async/plan_stubs/_nd_attributes.py +12 -6
- ophyd_async/sim/demo/_pattern_detector/_pattern_detector.py +3 -3
- ophyd_async/sim/demo/_pattern_detector/_pattern_detector_controller.py +27 -21
- ophyd_async/sim/demo/_pattern_detector/_pattern_detector_writer.py +9 -6
- ophyd_async/sim/demo/_pattern_detector/_pattern_generator.py +21 -23
- ophyd_async/sim/demo/_sim_motor.py +2 -1
- ophyd_async/tango/__init__.py +45 -0
- ophyd_async/tango/base_devices/__init__.py +4 -0
- ophyd_async/tango/base_devices/_base_device.py +225 -0
- ophyd_async/tango/base_devices/_tango_readable.py +33 -0
- ophyd_async/tango/demo/__init__.py +12 -0
- ophyd_async/tango/demo/_counter.py +37 -0
- ophyd_async/tango/demo/_detector.py +42 -0
- ophyd_async/tango/demo/_mover.py +77 -0
- ophyd_async/tango/demo/_tango/__init__.py +3 -0
- ophyd_async/tango/demo/_tango/_servers.py +108 -0
- ophyd_async/tango/signal/__init__.py +39 -0
- ophyd_async/tango/signal/_signal.py +223 -0
- ophyd_async/tango/signal/_tango_transport.py +764 -0
- {ophyd_async-0.5.2.dist-info → ophyd_async-0.7.0.dist-info}/METADATA +50 -45
- ophyd_async-0.7.0.dist-info/RECORD +108 -0
- {ophyd_async-0.5.2.dist-info → ophyd_async-0.7.0.dist-info}/WHEEL +1 -1
- ophyd_async-0.5.2.dist-info/RECORD +0 -95
- {ophyd_async-0.5.2.dist-info → ophyd_async-0.7.0.dist-info}/LICENSE +0 -0
- {ophyd_async-0.5.2.dist-info → ophyd_async-0.7.0.dist-info}/entry_points.txt +0 -0
- {ophyd_async-0.5.2.dist-info → ophyd_async-0.7.0.dist-info}/top_level.txt +0 -0
|
@@ -1,5 +1,7 @@
|
|
|
1
|
+
from collections.abc import Callable, Generator, Sequence
|
|
1
2
|
from enum import Enum
|
|
2
|
-
from
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Any
|
|
3
5
|
|
|
4
6
|
import numpy as np
|
|
5
7
|
import numpy.typing as npt
|
|
@@ -7,6 +9,7 @@ import yaml
|
|
|
7
9
|
from bluesky.plan_stubs import abs_set, wait
|
|
8
10
|
from bluesky.protocols import Location
|
|
9
11
|
from bluesky.utils import Msg
|
|
12
|
+
from pydantic import BaseModel
|
|
10
13
|
|
|
11
14
|
from ._device import Device
|
|
12
15
|
from ._signal import SignalRW
|
|
@@ -18,16 +21,22 @@ def ndarray_representer(dumper: yaml.Dumper, array: npt.NDArray[Any]) -> yaml.No
|
|
|
18
21
|
)
|
|
19
22
|
|
|
20
23
|
|
|
24
|
+
def pydantic_model_abstraction_representer(
|
|
25
|
+
dumper: yaml.Dumper, model: BaseModel
|
|
26
|
+
) -> yaml.Node:
|
|
27
|
+
return dumper.represent_data(model.model_dump(mode="python"))
|
|
28
|
+
|
|
29
|
+
|
|
21
30
|
class OphydDumper(yaml.Dumper):
|
|
22
31
|
def represent_data(self, data: Any) -> Any:
|
|
23
32
|
if isinstance(data, Enum):
|
|
24
33
|
return self.represent_data(data.value)
|
|
25
|
-
return super(
|
|
34
|
+
return super().represent_data(data)
|
|
26
35
|
|
|
27
36
|
|
|
28
37
|
def get_signal_values(
|
|
29
|
-
signals:
|
|
30
|
-
) -> Generator[Msg, Sequence[Location[Any]],
|
|
38
|
+
signals: dict[str, SignalRW[Any]], ignore: list[str] | None = None
|
|
39
|
+
) -> Generator[Msg, Sequence[Location[Any]], dict[str, Any]]:
|
|
31
40
|
"""Get signal values in bulk.
|
|
32
41
|
|
|
33
42
|
Used as part of saving the signals of a device to a yaml file.
|
|
@@ -59,13 +68,10 @@ def get_signal_values(
|
|
|
59
68
|
}
|
|
60
69
|
selected_values = yield Msg("locate", *selected_signals.values())
|
|
61
70
|
|
|
62
|
-
# TODO: investigate wrong type hints
|
|
63
|
-
if isinstance(selected_values, dict):
|
|
64
|
-
selected_values = [selected_values] # type: ignore
|
|
65
|
-
|
|
66
71
|
assert selected_values is not None, "No signalRW's were able to be located"
|
|
67
72
|
named_values = {
|
|
68
|
-
key: value["setpoint"]
|
|
73
|
+
key: value["setpoint"]
|
|
74
|
+
for key, value in zip(selected_signals, selected_values, strict=False)
|
|
69
75
|
}
|
|
70
76
|
# Ignored values place in with value None so we know which ones were ignored
|
|
71
77
|
named_values.update({key: None for key in ignore})
|
|
@@ -73,8 +79,8 @@ def get_signal_values(
|
|
|
73
79
|
|
|
74
80
|
|
|
75
81
|
def walk_rw_signals(
|
|
76
|
-
device: Device, path_prefix:
|
|
77
|
-
) ->
|
|
82
|
+
device: Device, path_prefix: str | None = ""
|
|
83
|
+
) -> dict[str, SignalRW[Any]]:
|
|
78
84
|
"""Retrieve all SignalRWs from a device.
|
|
79
85
|
|
|
80
86
|
Stores retrieved signals with their dotted attribute paths in a dictionary. Used as
|
|
@@ -104,7 +110,7 @@ def walk_rw_signals(
|
|
|
104
110
|
if not path_prefix:
|
|
105
111
|
path_prefix = ""
|
|
106
112
|
|
|
107
|
-
signals:
|
|
113
|
+
signals: dict[str, SignalRW[Any]] = {}
|
|
108
114
|
for attr_name, attr in device.children():
|
|
109
115
|
dot_path = f"{path_prefix}{attr_name}"
|
|
110
116
|
if type(attr) is SignalRW:
|
|
@@ -114,7 +120,7 @@ def walk_rw_signals(
|
|
|
114
120
|
return signals
|
|
115
121
|
|
|
116
122
|
|
|
117
|
-
def save_to_yaml(phases: Sequence[
|
|
123
|
+
def save_to_yaml(phases: Sequence[dict[str, Any]], save_path: str | Path) -> None:
|
|
118
124
|
"""Plan which serialises a phase or set of phases of SignalRWs to a yaml file.
|
|
119
125
|
|
|
120
126
|
Parameters
|
|
@@ -134,12 +140,17 @@ def save_to_yaml(phases: Sequence[Dict[str, Any]], save_path: str) -> None:
|
|
|
134
140
|
"""
|
|
135
141
|
|
|
136
142
|
yaml.add_representer(np.ndarray, ndarray_representer, Dumper=yaml.Dumper)
|
|
143
|
+
yaml.add_multi_representer(
|
|
144
|
+
BaseModel,
|
|
145
|
+
pydantic_model_abstraction_representer,
|
|
146
|
+
Dumper=yaml.Dumper,
|
|
147
|
+
)
|
|
137
148
|
|
|
138
149
|
with open(save_path, "w") as file:
|
|
139
150
|
yaml.dump(phases, file, Dumper=OphydDumper, default_flow_style=False)
|
|
140
151
|
|
|
141
152
|
|
|
142
|
-
def load_from_yaml(save_path: str) -> Sequence[
|
|
153
|
+
def load_from_yaml(save_path: str) -> Sequence[dict[str, Any]]:
|
|
143
154
|
"""Plan that returns a list of dicts with saved signal values from a yaml file.
|
|
144
155
|
|
|
145
156
|
Parameters
|
|
@@ -152,12 +163,12 @@ def load_from_yaml(save_path: str) -> Sequence[Dict[str, Any]]:
|
|
|
152
163
|
:func:`ophyd_async.core.save_to_yaml`
|
|
153
164
|
:func:`ophyd_async.core.set_signal_values`
|
|
154
165
|
"""
|
|
155
|
-
with open(save_path
|
|
166
|
+
with open(save_path) as file:
|
|
156
167
|
return yaml.full_load(file)
|
|
157
168
|
|
|
158
169
|
|
|
159
170
|
def set_signal_values(
|
|
160
|
-
signals:
|
|
171
|
+
signals: dict[str, SignalRW[Any]], values: Sequence[dict[str, Any]]
|
|
161
172
|
) -> Generator[Msg, None, None]:
|
|
162
173
|
"""Maps signals from a yaml file into device signals.
|
|
163
174
|
|
|
@@ -217,7 +228,7 @@ def load_device(device: Device, path: str):
|
|
|
217
228
|
yield from set_signal_values(signals_to_set, values)
|
|
218
229
|
|
|
219
230
|
|
|
220
|
-
def all_at_once(values:
|
|
231
|
+
def all_at_once(values: dict[str, Any]) -> Sequence[dict[str, Any]]:
|
|
221
232
|
"""Sort all the values into a single phase so they are set all at once"""
|
|
222
233
|
return [values]
|
|
223
234
|
|
|
@@ -225,8 +236,8 @@ def all_at_once(values: Dict[str, Any]) -> Sequence[Dict[str, Any]]:
|
|
|
225
236
|
def save_device(
|
|
226
237
|
device: Device,
|
|
227
238
|
path: str,
|
|
228
|
-
sorter: Callable[[
|
|
229
|
-
ignore:
|
|
239
|
+
sorter: Callable[[dict[str, Any]], Sequence[dict[str, Any]]] = all_at_once,
|
|
240
|
+
ignore: list[str] | None = None,
|
|
230
241
|
):
|
|
231
242
|
"""Plan that saves the state of all PV's on a device using a sorter.
|
|
232
243
|
|
ophyd_async/core/_flyer.py
CHANGED
|
@@ -1,15 +1,14 @@
|
|
|
1
1
|
from abc import ABC, abstractmethod
|
|
2
|
-
from typing import
|
|
2
|
+
from typing import Generic
|
|
3
3
|
|
|
4
|
-
from bluesky.protocols import
|
|
4
|
+
from bluesky.protocols import Flyable, Preparable, Stageable
|
|
5
5
|
|
|
6
6
|
from ._device import Device
|
|
7
|
-
from ._signal import SignalR
|
|
8
7
|
from ._status import AsyncStatus
|
|
9
|
-
from ._utils import T
|
|
8
|
+
from ._utils import T
|
|
10
9
|
|
|
11
10
|
|
|
12
|
-
class
|
|
11
|
+
class FlyerController(ABC, Generic[T]):
|
|
13
12
|
@abstractmethod
|
|
14
13
|
async def prepare(self, value: T):
|
|
15
14
|
"""Move to the start of the flyscan"""
|
|
@@ -36,16 +35,14 @@ class StandardFlyer(
|
|
|
36
35
|
):
|
|
37
36
|
def __init__(
|
|
38
37
|
self,
|
|
39
|
-
trigger_logic:
|
|
40
|
-
configuration_signals: Sequence[SignalR] = (),
|
|
38
|
+
trigger_logic: FlyerController[T],
|
|
41
39
|
name: str = "",
|
|
42
40
|
):
|
|
43
41
|
self._trigger_logic = trigger_logic
|
|
44
|
-
self._configuration_signals = tuple(configuration_signals)
|
|
45
42
|
super().__init__(name=name)
|
|
46
43
|
|
|
47
44
|
@property
|
|
48
|
-
def trigger_logic(self) ->
|
|
45
|
+
def trigger_logic(self) -> FlyerController[T]:
|
|
49
46
|
return self._trigger_logic
|
|
50
47
|
|
|
51
48
|
@AsyncStatus.wrap
|
|
@@ -71,13 +68,3 @@ class StandardFlyer(
|
|
|
71
68
|
@AsyncStatus.wrap
|
|
72
69
|
async def complete(self) -> None:
|
|
73
70
|
await self._trigger_logic.complete()
|
|
74
|
-
|
|
75
|
-
async def describe_configuration(self) -> Dict[str, DataKey]:
|
|
76
|
-
return await merge_gathered_dicts(
|
|
77
|
-
[sig.describe() for sig in self._configuration_signals]
|
|
78
|
-
)
|
|
79
|
-
|
|
80
|
-
async def read_configuration(self) -> Dict[str, Reading]:
|
|
81
|
-
return await merge_gathered_dicts(
|
|
82
|
-
[sig.read() for sig in self._configuration_signals]
|
|
83
|
-
)
|
ophyd_async/core/_hdf_dataset.py
CHANGED
|
@@ -1,12 +1,13 @@
|
|
|
1
|
+
from collections.abc import Iterator, Sequence
|
|
1
2
|
from dataclasses import dataclass, field
|
|
2
3
|
from pathlib import Path
|
|
3
|
-
from typing import Iterator, List, Sequence
|
|
4
4
|
from urllib.parse import urlunparse
|
|
5
5
|
|
|
6
6
|
from event_model import (
|
|
7
7
|
ComposeStreamResource,
|
|
8
8
|
ComposeStreamResourceBundle,
|
|
9
9
|
StreamDatum,
|
|
10
|
+
StreamRange,
|
|
10
11
|
StreamResource,
|
|
11
12
|
)
|
|
12
13
|
|
|
@@ -19,6 +20,8 @@ class HDFDataset:
|
|
|
19
20
|
dtype_numpy: str = ""
|
|
20
21
|
multiplier: int = 1
|
|
21
22
|
swmr: bool = False
|
|
23
|
+
# Represents explicit chunk size written to disk.
|
|
24
|
+
chunk_shape: tuple[int, ...] = ()
|
|
22
25
|
|
|
23
26
|
|
|
24
27
|
SLICE_NAME = "AD_HDF5_SWMR_SLICE"
|
|
@@ -33,7 +36,7 @@ class HDFFile:
|
|
|
33
36
|
def __init__(
|
|
34
37
|
self,
|
|
35
38
|
full_file_name: Path,
|
|
36
|
-
datasets:
|
|
39
|
+
datasets: list[HDFDataset],
|
|
37
40
|
hostname: str = "localhost",
|
|
38
41
|
) -> None:
|
|
39
42
|
self._last_emitted = 0
|
|
@@ -56,7 +59,7 @@ class HDFFile:
|
|
|
56
59
|
)
|
|
57
60
|
)
|
|
58
61
|
|
|
59
|
-
self._bundles:
|
|
62
|
+
self._bundles: list[ComposeStreamResourceBundle] = [
|
|
60
63
|
bundler_composer(
|
|
61
64
|
mimetype="application/x-hdf5",
|
|
62
65
|
uri=uri,
|
|
@@ -65,6 +68,7 @@ class HDFFile:
|
|
|
65
68
|
"dataset": ds.dataset,
|
|
66
69
|
"swmr": ds.swmr,
|
|
67
70
|
"multiplier": ds.multiplier,
|
|
71
|
+
"chunk_shape": ds.chunk_shape,
|
|
68
72
|
},
|
|
69
73
|
uid=None,
|
|
70
74
|
validate=True,
|
|
@@ -79,15 +83,10 @@ class HDFFile:
|
|
|
79
83
|
def stream_data(self, indices_written: int) -> Iterator[StreamDatum]:
|
|
80
84
|
# Indices are relative to resource
|
|
81
85
|
if indices_written > self._last_emitted:
|
|
82
|
-
indices = {
|
|
86
|
+
indices: StreamRange = {
|
|
83
87
|
"start": self._last_emitted,
|
|
84
88
|
"stop": indices_written,
|
|
85
89
|
}
|
|
86
90
|
self._last_emitted = indices_written
|
|
87
91
|
for bundle in self._bundles:
|
|
88
92
|
yield bundle.compose_stream_datum(indices)
|
|
89
|
-
return None
|
|
90
|
-
|
|
91
|
-
def close(self) -> None:
|
|
92
|
-
for bundle in self._bundles:
|
|
93
|
-
bundle.close()
|
ophyd_async/core/_log.py
CHANGED
|
@@ -29,7 +29,7 @@ class ColoredFormatterWithDeviceName(colorlog.ColoredFormatter):
|
|
|
29
29
|
def format(self, record):
|
|
30
30
|
message = super().format(record)
|
|
31
31
|
if hasattr(record, "ophyd_async_device_name"):
|
|
32
|
-
message = f"[{record.ophyd_async_device_name}]{message}"
|
|
32
|
+
message = f"[{record.ophyd_async_device_name}]{message}" # type: ignore
|
|
33
33
|
return message
|
|
34
34
|
|
|
35
35
|
|
|
@@ -39,6 +39,8 @@ def _validate_level(level) -> int:
|
|
|
39
39
|
levelno = level
|
|
40
40
|
elif isinstance(level, str):
|
|
41
41
|
levelno = logging.getLevelName(level)
|
|
42
|
+
else:
|
|
43
|
+
raise TypeError(f"Level {level!r} is not an int or str")
|
|
42
44
|
|
|
43
45
|
if isinstance(levelno, int):
|
|
44
46
|
return levelno
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
+
from collections.abc import Callable
|
|
2
3
|
from functools import cached_property
|
|
3
|
-
from
|
|
4
|
-
from unittest.mock import Mock
|
|
4
|
+
from unittest.mock import AsyncMock
|
|
5
5
|
|
|
6
6
|
from bluesky.protocols import Descriptor, Reading
|
|
7
7
|
|
|
@@ -11,10 +11,12 @@ from ._utils import DEFAULT_TIMEOUT, ReadingValueCallback, T
|
|
|
11
11
|
|
|
12
12
|
|
|
13
13
|
class MockSignalBackend(SignalBackend[T]):
|
|
14
|
+
"""Signal backend for testing, created by ``Device.connect(mock=True)``."""
|
|
15
|
+
|
|
14
16
|
def __init__(
|
|
15
17
|
self,
|
|
16
|
-
datatype:
|
|
17
|
-
initial_backend:
|
|
18
|
+
datatype: type[T] | None = None,
|
|
19
|
+
initial_backend: SignalBackend[T] | None = None,
|
|
18
20
|
) -> None:
|
|
19
21
|
if isinstance(initial_backend, MockSignalBackend):
|
|
20
22
|
raise ValueError("Cannot make a MockSignalBackend for a MockSignalBackends")
|
|
@@ -46,8 +48,8 @@ class MockSignalBackend(SignalBackend[T]):
|
|
|
46
48
|
pass
|
|
47
49
|
|
|
48
50
|
@cached_property
|
|
49
|
-
def put_mock(self) ->
|
|
50
|
-
return
|
|
51
|
+
def put_mock(self) -> AsyncMock:
|
|
52
|
+
return AsyncMock(name="put", spec=Callable)
|
|
51
53
|
|
|
52
54
|
@cached_property
|
|
53
55
|
def put_proceeds(self) -> asyncio.Event:
|
|
@@ -55,8 +57,8 @@ class MockSignalBackend(SignalBackend[T]):
|
|
|
55
57
|
put_proceeds.set()
|
|
56
58
|
return put_proceeds
|
|
57
59
|
|
|
58
|
-
async def put(self, value:
|
|
59
|
-
self.put_mock(value, wait=wait, timeout=timeout)
|
|
60
|
+
async def put(self, value: T | None, wait=True, timeout=None):
|
|
61
|
+
await self.put_mock(value, wait=wait, timeout=timeout)
|
|
60
62
|
await self.soft_backend.put(value, wait=wait, timeout=timeout)
|
|
61
63
|
|
|
62
64
|
if wait:
|
|
@@ -78,5 +80,5 @@ class MockSignalBackend(SignalBackend[T]):
|
|
|
78
80
|
async def get_datakey(self, source: str) -> Descriptor:
|
|
79
81
|
return await self.soft_backend.get_datakey(source)
|
|
80
82
|
|
|
81
|
-
def set_callback(self, callback:
|
|
83
|
+
def set_callback(self, callback: ReadingValueCallback[T] | None) -> None:
|
|
82
84
|
self.soft_backend.set_callback(callback)
|
|
@@ -1,6 +1,7 @@
|
|
|
1
|
+
from collections.abc import Awaitable, Callable, Iterable
|
|
1
2
|
from contextlib import asynccontextmanager, contextmanager
|
|
2
|
-
from typing import Any
|
|
3
|
-
from unittest.mock import
|
|
3
|
+
from typing import Any
|
|
4
|
+
from unittest.mock import AsyncMock
|
|
4
5
|
|
|
5
6
|
from ._mock_signal_backend import MockSignalBackend
|
|
6
7
|
from ._signal import Signal
|
|
@@ -41,7 +42,7 @@ async def mock_puts_blocked(*signals: Signal):
|
|
|
41
42
|
set_mock_put_proceeds(signal, True)
|
|
42
43
|
|
|
43
44
|
|
|
44
|
-
def get_mock_put(signal: Signal) ->
|
|
45
|
+
def get_mock_put(signal: Signal) -> AsyncMock:
|
|
45
46
|
"""Get the mock associated with the put call on the signal."""
|
|
46
47
|
return _get_mock_signal_backend(signal).put_mock
|
|
47
48
|
|
|
@@ -136,12 +137,14 @@ def set_mock_values(
|
|
|
136
137
|
|
|
137
138
|
|
|
138
139
|
@contextmanager
|
|
139
|
-
def _unset_side_effect_cm(put_mock:
|
|
140
|
+
def _unset_side_effect_cm(put_mock: AsyncMock):
|
|
140
141
|
yield
|
|
141
142
|
put_mock.side_effect = None
|
|
142
143
|
|
|
143
144
|
|
|
144
|
-
def callback_on_mock_put(
|
|
145
|
+
def callback_on_mock_put(
|
|
146
|
+
signal: Signal[T], callback: Callable[[T], None] | Callable[[T], Awaitable[None]]
|
|
147
|
+
):
|
|
145
148
|
"""For setting a callback when a backend is put to.
|
|
146
149
|
|
|
147
150
|
Can either be used in a context, with the callback being
|
ophyd_async/core/_protocol.py
CHANGED
|
@@ -4,14 +4,14 @@ from abc import abstractmethod
|
|
|
4
4
|
from typing import (
|
|
5
5
|
TYPE_CHECKING,
|
|
6
6
|
Any,
|
|
7
|
-
Dict,
|
|
8
7
|
Generic,
|
|
9
8
|
Protocol,
|
|
10
9
|
TypeVar,
|
|
11
10
|
runtime_checkable,
|
|
12
11
|
)
|
|
13
12
|
|
|
14
|
-
from bluesky.protocols import
|
|
13
|
+
from bluesky.protocols import HasName, Reading
|
|
14
|
+
from event_model import DataKey
|
|
15
15
|
|
|
16
16
|
if TYPE_CHECKING:
|
|
17
17
|
from ._status import AsyncStatus
|
|
@@ -20,7 +20,7 @@ if TYPE_CHECKING:
|
|
|
20
20
|
@runtime_checkable
|
|
21
21
|
class AsyncReadable(HasName, Protocol):
|
|
22
22
|
@abstractmethod
|
|
23
|
-
async def read(self) ->
|
|
23
|
+
async def read(self) -> dict[str, Reading]:
|
|
24
24
|
"""Return an OrderedDict mapping string field name(s) to dictionaries
|
|
25
25
|
of values and timestamps and optional per-point metadata.
|
|
26
26
|
|
|
@@ -36,7 +36,7 @@ class AsyncReadable(HasName, Protocol):
|
|
|
36
36
|
...
|
|
37
37
|
|
|
38
38
|
@abstractmethod
|
|
39
|
-
async def describe(self) ->
|
|
39
|
+
async def describe(self) -> dict[str, DataKey]:
|
|
40
40
|
"""Return an OrderedDict with exactly the same keys as the ``read``
|
|
41
41
|
method, here mapped to per-scan metadata about each field.
|
|
42
42
|
|
|
@@ -57,16 +57,16 @@ class AsyncReadable(HasName, Protocol):
|
|
|
57
57
|
|
|
58
58
|
|
|
59
59
|
@runtime_checkable
|
|
60
|
-
class AsyncConfigurable(Protocol):
|
|
60
|
+
class AsyncConfigurable(HasName, Protocol):
|
|
61
61
|
@abstractmethod
|
|
62
|
-
async def read_configuration(self) ->
|
|
62
|
+
async def read_configuration(self) -> dict[str, Reading]:
|
|
63
63
|
"""Same API as ``read`` but for slow-changing fields related to configuration.
|
|
64
64
|
e.g., exposure time. These will typically be read only once per run.
|
|
65
65
|
"""
|
|
66
66
|
...
|
|
67
67
|
|
|
68
68
|
@abstractmethod
|
|
69
|
-
async def describe_configuration(self) ->
|
|
69
|
+
async def describe_configuration(self) -> dict[str, DataKey]:
|
|
70
70
|
"""Same API as ``describe``, but corresponding to the keys in
|
|
71
71
|
``read_configuration``.
|
|
72
72
|
"""
|
ophyd_async/core/_providers.py
CHANGED
|
@@ -5,7 +5,7 @@ from collections.abc import Callable
|
|
|
5
5
|
from dataclasses import dataclass
|
|
6
6
|
from datetime import date
|
|
7
7
|
from pathlib import Path
|
|
8
|
-
from typing import
|
|
8
|
+
from typing import Protocol
|
|
9
9
|
|
|
10
10
|
|
|
11
11
|
@dataclass
|
|
@@ -26,13 +26,13 @@ class PathInfo:
|
|
|
26
26
|
|
|
27
27
|
class FilenameProvider(Protocol):
|
|
28
28
|
@abstractmethod
|
|
29
|
-
def __call__(self, device_name:
|
|
29
|
+
def __call__(self, device_name: str | None = None) -> str:
|
|
30
30
|
"""Get a filename to use for output data, w/o extension"""
|
|
31
31
|
|
|
32
32
|
|
|
33
33
|
class PathProvider(Protocol):
|
|
34
34
|
@abstractmethod
|
|
35
|
-
def __call__(self, device_name:
|
|
35
|
+
def __call__(self, device_name: str | None = None) -> PathInfo:
|
|
36
36
|
"""Get the current directory to write files into"""
|
|
37
37
|
|
|
38
38
|
|
|
@@ -40,7 +40,7 @@ class StaticFilenameProvider(FilenameProvider):
|
|
|
40
40
|
def __init__(self, filename: str):
|
|
41
41
|
self._static_filename = filename
|
|
42
42
|
|
|
43
|
-
def __call__(self, device_name:
|
|
43
|
+
def __call__(self, device_name: str | None = None) -> str:
|
|
44
44
|
return self._static_filename
|
|
45
45
|
|
|
46
46
|
|
|
@@ -48,12 +48,12 @@ class UUIDFilenameProvider(FilenameProvider):
|
|
|
48
48
|
def __init__(
|
|
49
49
|
self,
|
|
50
50
|
uuid_call_func: Callable = uuid.uuid4,
|
|
51
|
-
uuid_call_args:
|
|
51
|
+
uuid_call_args: list | None = None,
|
|
52
52
|
):
|
|
53
53
|
self._uuid_call_func = uuid_call_func
|
|
54
54
|
self._uuid_call_args = uuid_call_args or []
|
|
55
55
|
|
|
56
|
-
def __call__(self, device_name:
|
|
56
|
+
def __call__(self, device_name: str | None = None) -> str:
|
|
57
57
|
if (
|
|
58
58
|
self._uuid_call_func in [uuid.uuid3, uuid.uuid5]
|
|
59
59
|
and len(self._uuid_call_args) < 2
|
|
@@ -82,7 +82,7 @@ class AutoIncrementFilenameProvider(FilenameProvider):
|
|
|
82
82
|
self._increment = increment
|
|
83
83
|
self._inc_delimeter = inc_delimeter
|
|
84
84
|
|
|
85
|
-
def __call__(self, device_name:
|
|
85
|
+
def __call__(self, device_name: str | None = None) -> str:
|
|
86
86
|
if len(str(self._current_value)) > self._max_digits:
|
|
87
87
|
raise ValueError(
|
|
88
88
|
f"Auto incrementing filename counter \
|
|
@@ -108,7 +108,7 @@ class StaticPathProvider(PathProvider):
|
|
|
108
108
|
self._directory_path = directory_path
|
|
109
109
|
self._create_dir_depth = create_dir_depth
|
|
110
110
|
|
|
111
|
-
def __call__(self, device_name:
|
|
111
|
+
def __call__(self, device_name: str | None = None) -> PathInfo:
|
|
112
112
|
filename = self._filename_provider(device_name)
|
|
113
113
|
|
|
114
114
|
return PathInfo(
|
|
@@ -129,7 +129,7 @@ class AutoIncrementingPathProvider(PathProvider):
|
|
|
129
129
|
num_calls_per_inc: int = 1,
|
|
130
130
|
increment: int = 1,
|
|
131
131
|
inc_delimeter: str = "_",
|
|
132
|
-
base_name: str = None,
|
|
132
|
+
base_name: str | None = None,
|
|
133
133
|
) -> None:
|
|
134
134
|
self._filename_provider = filename_provider
|
|
135
135
|
self._base_directory_path = base_directory_path
|
|
@@ -143,7 +143,7 @@ class AutoIncrementingPathProvider(PathProvider):
|
|
|
143
143
|
self._increment = increment
|
|
144
144
|
self._inc_delimeter = inc_delimeter
|
|
145
145
|
|
|
146
|
-
def __call__(self, device_name:
|
|
146
|
+
def __call__(self, device_name: str | None = None) -> PathInfo:
|
|
147
147
|
filename = self._filename_provider(device_name)
|
|
148
148
|
|
|
149
149
|
padded_counter = f"{self._current_value:0{self._max_digits}}"
|
|
@@ -181,7 +181,7 @@ class YMDPathProvider(PathProvider):
|
|
|
181
181
|
self._create_dir_depth = create_dir_depth
|
|
182
182
|
self._device_name_as_base_dir = device_name_as_base_dir
|
|
183
183
|
|
|
184
|
-
def __call__(self, device_name:
|
|
184
|
+
def __call__(self, device_name: str | None = None) -> PathInfo:
|
|
185
185
|
sep = os.path.sep
|
|
186
186
|
current_date = date.today().strftime(f"%Y{sep}%m{sep}%d")
|
|
187
187
|
if device_name is None:
|
ophyd_async/core/_readable.py
CHANGED
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
import warnings
|
|
2
|
+
from collections.abc import Callable, Generator, Sequence
|
|
2
3
|
from contextlib import contextmanager
|
|
3
|
-
from typing import Callable, Dict, Generator, Optional, Sequence, Tuple, Type, Union
|
|
4
4
|
|
|
5
|
-
from bluesky.protocols import
|
|
5
|
+
from bluesky.protocols import HasHints, Hints, Reading
|
|
6
|
+
from event_model import DataKey
|
|
6
7
|
|
|
7
8
|
from ._device import Device, DeviceVector
|
|
8
9
|
from ._protocol import AsyncConfigurable, AsyncReadable, AsyncStageable
|
|
@@ -10,10 +11,12 @@ from ._signal import SignalR
|
|
|
10
11
|
from ._status import AsyncStatus
|
|
11
12
|
from ._utils import merge_gathered_dicts
|
|
12
13
|
|
|
13
|
-
ReadableChild =
|
|
14
|
-
ReadableChildWrapper =
|
|
15
|
-
Callable[[ReadableChild], ReadableChild]
|
|
16
|
-
]
|
|
14
|
+
ReadableChild = AsyncReadable | AsyncConfigurable | AsyncStageable | HasHints
|
|
15
|
+
ReadableChildWrapper = (
|
|
16
|
+
Callable[[ReadableChild], ReadableChild]
|
|
17
|
+
| type["ConfigSignal"]
|
|
18
|
+
| type["HintedSignal"]
|
|
19
|
+
)
|
|
17
20
|
|
|
18
21
|
|
|
19
22
|
class StandardReadable(
|
|
@@ -28,10 +31,10 @@ class StandardReadable(
|
|
|
28
31
|
|
|
29
32
|
# These must be immutable types to avoid accidental sharing between
|
|
30
33
|
# different instances of the class
|
|
31
|
-
_readables:
|
|
32
|
-
_configurables:
|
|
33
|
-
_stageables:
|
|
34
|
-
_has_hints:
|
|
34
|
+
_readables: tuple[AsyncReadable, ...] = ()
|
|
35
|
+
_configurables: tuple[AsyncConfigurable, ...] = ()
|
|
36
|
+
_stageables: tuple[AsyncStageable, ...] = ()
|
|
37
|
+
_has_hints: tuple[HasHints, ...] = ()
|
|
35
38
|
|
|
36
39
|
def set_readable_signals(
|
|
37
40
|
self,
|
|
@@ -53,7 +56,8 @@ class StandardReadable(
|
|
|
53
56
|
DeprecationWarning(
|
|
54
57
|
"Migrate to `add_children_as_readables` context manager or "
|
|
55
58
|
"`add_readables` method"
|
|
56
|
-
)
|
|
59
|
+
),
|
|
60
|
+
stacklevel=2,
|
|
57
61
|
)
|
|
58
62
|
self.add_readables(read, wrapper=HintedSignal)
|
|
59
63
|
self.add_readables(config, wrapper=ConfigSignal)
|
|
@@ -69,20 +73,20 @@ class StandardReadable(
|
|
|
69
73
|
for sig in self._stageables:
|
|
70
74
|
await sig.unstage().task
|
|
71
75
|
|
|
72
|
-
async def describe_configuration(self) ->
|
|
76
|
+
async def describe_configuration(self) -> dict[str, DataKey]:
|
|
73
77
|
return await merge_gathered_dicts(
|
|
74
78
|
[sig.describe_configuration() for sig in self._configurables]
|
|
75
79
|
)
|
|
76
80
|
|
|
77
|
-
async def read_configuration(self) ->
|
|
81
|
+
async def read_configuration(self) -> dict[str, Reading]:
|
|
78
82
|
return await merge_gathered_dicts(
|
|
79
83
|
[sig.read_configuration() for sig in self._configurables]
|
|
80
84
|
)
|
|
81
85
|
|
|
82
|
-
async def describe(self) ->
|
|
86
|
+
async def describe(self) -> dict[str, DataKey]:
|
|
83
87
|
return await merge_gathered_dicts([sig.describe() for sig in self._readables])
|
|
84
88
|
|
|
85
|
-
async def read(self) ->
|
|
89
|
+
async def read(self) -> dict[str, Reading]:
|
|
86
90
|
return await merge_gathered_dicts([sig.read() for sig in self._readables])
|
|
87
91
|
|
|
88
92
|
@property
|
|
@@ -123,7 +127,7 @@ class StandardReadable(
|
|
|
123
127
|
@contextmanager
|
|
124
128
|
def add_children_as_readables(
|
|
125
129
|
self,
|
|
126
|
-
wrapper:
|
|
130
|
+
wrapper: ReadableChildWrapper | None = None,
|
|
127
131
|
) -> Generator[None, None, None]:
|
|
128
132
|
"""Context manager to wrap adding Devices
|
|
129
133
|
|
|
@@ -167,8 +171,8 @@ class StandardReadable(
|
|
|
167
171
|
|
|
168
172
|
def add_readables(
|
|
169
173
|
self,
|
|
170
|
-
devices: Sequence[
|
|
171
|
-
wrapper:
|
|
174
|
+
devices: Sequence[ReadableChild],
|
|
175
|
+
wrapper: ReadableChildWrapper | None = None,
|
|
172
176
|
) -> None:
|
|
173
177
|
"""Add the given devices to the lists of known Devices
|
|
174
178
|
|
|
@@ -216,12 +220,16 @@ class ConfigSignal(AsyncConfigurable):
|
|
|
216
220
|
assert isinstance(signal, SignalR), f"Expected signal, got {signal}"
|
|
217
221
|
self.signal = signal
|
|
218
222
|
|
|
219
|
-
async def read_configuration(self) ->
|
|
223
|
+
async def read_configuration(self) -> dict[str, Reading]:
|
|
220
224
|
return await self.signal.read()
|
|
221
225
|
|
|
222
|
-
async def describe_configuration(self) ->
|
|
226
|
+
async def describe_configuration(self) -> dict[str, DataKey]:
|
|
223
227
|
return await self.signal.describe()
|
|
224
228
|
|
|
229
|
+
@property
|
|
230
|
+
def name(self) -> str:
|
|
231
|
+
return self.signal.name
|
|
232
|
+
|
|
225
233
|
|
|
226
234
|
class HintedSignal(HasHints, AsyncReadable):
|
|
227
235
|
def __init__(self, signal: ReadableChild, allow_cache: bool = True) -> None:
|
|
@@ -232,10 +240,10 @@ class HintedSignal(HasHints, AsyncReadable):
|
|
|
232
240
|
self.stage = signal.stage
|
|
233
241
|
self.unstage = signal.unstage
|
|
234
242
|
|
|
235
|
-
async def read(self) ->
|
|
243
|
+
async def read(self) -> dict[str, Reading]:
|
|
236
244
|
return await self.signal.read(cached=self.cached)
|
|
237
245
|
|
|
238
|
-
async def describe(self) ->
|
|
246
|
+
async def describe(self) -> dict[str, DataKey]:
|
|
239
247
|
return await self.signal.describe()
|
|
240
248
|
|
|
241
249
|
@property
|