ophyd-async 0.2.0__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ophyd_async/__init__.py +1 -4
- ophyd_async/_version.py +2 -2
- ophyd_async/core/__init__.py +52 -19
- ophyd_async/core/_providers.py +38 -5
- ophyd_async/core/async_status.py +86 -40
- ophyd_async/core/detector.py +214 -72
- ophyd_async/core/device.py +91 -50
- ophyd_async/core/device_save_loader.py +96 -23
- ophyd_async/core/flyer.py +32 -246
- ophyd_async/core/mock_signal_backend.py +82 -0
- ophyd_async/core/mock_signal_utils.py +145 -0
- ophyd_async/core/signal.py +225 -58
- ophyd_async/core/signal_backend.py +8 -5
- ophyd_async/core/{sim_signal_backend.py → soft_signal_backend.py} +51 -49
- ophyd_async/core/standard_readable.py +212 -23
- ophyd_async/core/utils.py +123 -30
- ophyd_async/epics/_backend/_aioca.py +42 -44
- ophyd_async/epics/_backend/_p4p.py +96 -52
- ophyd_async/epics/_backend/common.py +25 -0
- ophyd_async/epics/areadetector/__init__.py +8 -4
- ophyd_async/epics/areadetector/aravis.py +63 -0
- ophyd_async/epics/areadetector/controllers/__init__.py +2 -1
- ophyd_async/epics/areadetector/controllers/ad_sim_controller.py +1 -1
- ophyd_async/epics/areadetector/controllers/aravis_controller.py +78 -0
- ophyd_async/epics/areadetector/controllers/kinetix_controller.py +49 -0
- ophyd_async/epics/areadetector/controllers/pilatus_controller.py +37 -25
- ophyd_async/epics/areadetector/controllers/vimba_controller.py +66 -0
- ophyd_async/epics/areadetector/drivers/__init__.py +6 -0
- ophyd_async/epics/areadetector/drivers/ad_base.py +8 -12
- ophyd_async/epics/areadetector/drivers/aravis_driver.py +38 -0
- ophyd_async/epics/areadetector/drivers/kinetix_driver.py +27 -0
- ophyd_async/epics/areadetector/drivers/pilatus_driver.py +8 -5
- ophyd_async/epics/areadetector/drivers/vimba_driver.py +63 -0
- ophyd_async/epics/areadetector/kinetix.py +46 -0
- ophyd_async/epics/areadetector/pilatus.py +45 -0
- ophyd_async/epics/areadetector/single_trigger_det.py +14 -6
- ophyd_async/epics/areadetector/utils.py +2 -12
- ophyd_async/epics/areadetector/vimba.py +43 -0
- ophyd_async/epics/areadetector/writers/_hdffile.py +21 -7
- ophyd_async/epics/areadetector/writers/hdf_writer.py +32 -17
- ophyd_async/epics/areadetector/writers/nd_file_hdf.py +19 -18
- ophyd_async/epics/areadetector/writers/nd_plugin.py +15 -7
- ophyd_async/epics/demo/__init__.py +75 -49
- ophyd_async/epics/motion/motor.py +67 -53
- ophyd_async/epics/pvi/__init__.py +3 -0
- ophyd_async/epics/pvi/pvi.py +318 -0
- ophyd_async/epics/signal/__init__.py +8 -3
- ophyd_async/epics/signal/signal.py +26 -9
- ophyd_async/log.py +130 -0
- ophyd_async/panda/__init__.py +21 -5
- ophyd_async/panda/_common_blocks.py +49 -0
- ophyd_async/panda/_hdf_panda.py +48 -0
- ophyd_async/panda/_panda_controller.py +37 -0
- ophyd_async/panda/_trigger.py +39 -0
- ophyd_async/panda/_utils.py +15 -0
- ophyd_async/panda/writers/__init__.py +3 -0
- ophyd_async/panda/writers/_hdf_writer.py +220 -0
- ophyd_async/panda/writers/_panda_hdf_file.py +58 -0
- ophyd_async/plan_stubs/__init__.py +13 -0
- ophyd_async/plan_stubs/ensure_connected.py +22 -0
- ophyd_async/plan_stubs/fly.py +149 -0
- ophyd_async/protocols.py +126 -0
- ophyd_async/sim/__init__.py +11 -0
- ophyd_async/sim/demo/__init__.py +3 -0
- ophyd_async/sim/demo/sim_motor.py +103 -0
- ophyd_async/sim/pattern_generator.py +318 -0
- ophyd_async/sim/sim_pattern_detector_control.py +55 -0
- ophyd_async/sim/sim_pattern_detector_writer.py +34 -0
- ophyd_async/sim/sim_pattern_generator.py +37 -0
- {ophyd_async-0.2.0.dist-info → ophyd_async-0.3.0.dist-info}/METADATA +31 -70
- ophyd_async-0.3.0.dist-info/RECORD +86 -0
- {ophyd_async-0.2.0.dist-info → ophyd_async-0.3.0.dist-info}/WHEEL +1 -1
- ophyd_async/epics/signal/pvi_get.py +0 -22
- ophyd_async/panda/panda.py +0 -294
- ophyd_async-0.2.0.dist-info/RECORD +0 -53
- /ophyd_async/panda/{table.py → _table.py} +0 -0
- {ophyd_async-0.2.0.dist-info → ophyd_async-0.3.0.dist-info}/LICENSE +0 -0
- {ophyd_async-0.2.0.dist-info → ophyd_async-0.3.0.dist-info}/entry_points.txt +0 -0
- {ophyd_async-0.2.0.dist-info → ophyd_async-0.3.0.dist-info}/top_level.txt +0 -0
|
@@ -1,18 +1,17 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
2
|
|
|
3
|
-
import asyncio
|
|
4
3
|
import inspect
|
|
5
|
-
import re
|
|
6
4
|
import time
|
|
7
5
|
from collections import abc
|
|
8
6
|
from dataclasses import dataclass
|
|
9
7
|
from enum import Enum
|
|
10
|
-
from typing import
|
|
8
|
+
from typing import Dict, Generic, Optional, Type, Union, cast, get_origin
|
|
11
9
|
|
|
12
|
-
|
|
10
|
+
import numpy as np
|
|
11
|
+
from bluesky.protocols import DataKey, Dtype, Reading
|
|
13
12
|
|
|
14
13
|
from .signal_backend import SignalBackend
|
|
15
|
-
from .utils import ReadingValueCallback, T, get_dtype
|
|
14
|
+
from .utils import DEFAULT_TIMEOUT, ReadingValueCallback, T, get_dtype
|
|
16
15
|
|
|
17
16
|
primitive_dtypes: Dict[type, Dtype] = {
|
|
18
17
|
str: "string",
|
|
@@ -22,7 +21,7 @@ primitive_dtypes: Dict[type, Dtype] = {
|
|
|
22
21
|
}
|
|
23
22
|
|
|
24
23
|
|
|
25
|
-
class
|
|
24
|
+
class SoftConverter(Generic[T]):
|
|
26
25
|
def value(self, value: T) -> T:
|
|
27
26
|
return value
|
|
28
27
|
|
|
@@ -36,12 +35,17 @@ class SimConverter(Generic[T]):
|
|
|
36
35
|
alarm_severity=-1 if severity > 2 else severity,
|
|
37
36
|
)
|
|
38
37
|
|
|
39
|
-
def
|
|
38
|
+
def get_datakey(self, source: str, value) -> DataKey:
|
|
39
|
+
dtype = type(value)
|
|
40
|
+
if np.issubdtype(dtype, np.integer):
|
|
41
|
+
dtype = int
|
|
42
|
+
elif np.issubdtype(dtype, np.floating):
|
|
43
|
+
dtype = float
|
|
40
44
|
assert (
|
|
41
|
-
|
|
45
|
+
dtype in primitive_dtypes
|
|
42
46
|
), f"invalid converter for value of type {type(value)}"
|
|
43
|
-
|
|
44
|
-
return
|
|
47
|
+
dtype_name = primitive_dtypes[dtype]
|
|
48
|
+
return {"source": source, "dtype": dtype_name, "shape": []}
|
|
45
49
|
|
|
46
50
|
def make_initial_value(self, datatype: Optional[Type[T]]) -> T:
|
|
47
51
|
if datatype is None:
|
|
@@ -50,9 +54,9 @@ class SimConverter(Generic[T]):
|
|
|
50
54
|
return datatype()
|
|
51
55
|
|
|
52
56
|
|
|
53
|
-
class
|
|
54
|
-
def
|
|
55
|
-
return
|
|
57
|
+
class SoftArrayConverter(SoftConverter):
|
|
58
|
+
def get_datakey(self, source: str, value) -> DataKey:
|
|
59
|
+
return {"source": source, "dtype": "array", "shape": [len(value)]}
|
|
56
60
|
|
|
57
61
|
def make_initial_value(self, datatype: Optional[Type[T]]) -> T:
|
|
58
62
|
if datatype is None:
|
|
@@ -65,7 +69,7 @@ class SimArrayConverter(SimConverter):
|
|
|
65
69
|
|
|
66
70
|
|
|
67
71
|
@dataclass
|
|
68
|
-
class
|
|
72
|
+
class SoftEnumConverter(SoftConverter):
|
|
69
73
|
enum_class: Type[Enum]
|
|
70
74
|
|
|
71
75
|
def write_value(self, value: Union[Enum, str]) -> Enum:
|
|
@@ -74,11 +78,9 @@ class SimEnumConverter(SimConverter):
|
|
|
74
78
|
else:
|
|
75
79
|
return self.enum_class(value)
|
|
76
80
|
|
|
77
|
-
def
|
|
81
|
+
def get_datakey(self, source: str, value) -> DataKey:
|
|
78
82
|
choices = [e.value for e in self.enum_class]
|
|
79
|
-
return
|
|
80
|
-
source=source, dtype="string", shape=[], choices=choices
|
|
81
|
-
) # type: ignore
|
|
83
|
+
return {"source": source, "dtype": "string", "shape": [], "choices": choices} # type: ignore
|
|
82
84
|
|
|
83
85
|
def make_initial_value(self, datatype: Optional[Type[T]]) -> T:
|
|
84
86
|
if datatype is None:
|
|
@@ -87,48 +89,50 @@ class SimEnumConverter(SimConverter):
|
|
|
87
89
|
return cast(T, list(datatype.__members__.values())[0]) # type: ignore
|
|
88
90
|
|
|
89
91
|
|
|
90
|
-
class DisconnectedSimConverter(SimConverter):
|
|
91
|
-
def __getattribute__(self, __name: str) -> Any:
|
|
92
|
-
raise NotImplementedError("No PV has been set as connect() has not been called")
|
|
93
|
-
|
|
94
|
-
|
|
95
92
|
def make_converter(datatype):
|
|
96
93
|
is_array = get_dtype(datatype) is not None
|
|
97
94
|
is_sequence = get_origin(datatype) == abc.Sequence
|
|
98
95
|
is_enum = issubclass(datatype, Enum) if inspect.isclass(datatype) else False
|
|
99
96
|
|
|
100
97
|
if is_array or is_sequence:
|
|
101
|
-
return
|
|
98
|
+
return SoftArrayConverter()
|
|
102
99
|
if is_enum:
|
|
103
|
-
return
|
|
100
|
+
return SoftEnumConverter(datatype)
|
|
104
101
|
|
|
105
|
-
return
|
|
102
|
+
return SoftConverter()
|
|
106
103
|
|
|
107
104
|
|
|
108
|
-
class
|
|
109
|
-
"""An
|
|
105
|
+
class SoftSignalBackend(SignalBackend[T]):
|
|
106
|
+
"""An backend to a soft Signal, for test signals see ``MockSignalBackend``."""
|
|
110
107
|
|
|
111
108
|
_value: T
|
|
112
|
-
_initial_value: T
|
|
109
|
+
_initial_value: Optional[T]
|
|
113
110
|
_timestamp: float
|
|
114
111
|
_severity: int
|
|
115
112
|
|
|
116
|
-
def __init__(
|
|
117
|
-
|
|
118
|
-
|
|
113
|
+
def __init__(
|
|
114
|
+
self,
|
|
115
|
+
datatype: Optional[Type[T]],
|
|
116
|
+
initial_value: Optional[T] = None,
|
|
117
|
+
) -> None:
|
|
119
118
|
self.datatype = datatype
|
|
120
|
-
self.
|
|
121
|
-
self.converter:
|
|
122
|
-
self.
|
|
123
|
-
|
|
124
|
-
|
|
119
|
+
self._initial_value = initial_value
|
|
120
|
+
self.converter: SoftConverter = make_converter(datatype)
|
|
121
|
+
if self._initial_value is None:
|
|
122
|
+
self._initial_value = self.converter.make_initial_value(self.datatype)
|
|
123
|
+
else:
|
|
124
|
+
self._initial_value = self.converter.write_value(self._initial_value)
|
|
125
125
|
|
|
126
|
-
|
|
127
|
-
self.converter = make_converter(self.datatype)
|
|
128
|
-
self._initial_value = self.converter.make_initial_value(self.datatype)
|
|
126
|
+
self.callback: Optional[ReadingValueCallback[T]] = None
|
|
129
127
|
self._severity = 0
|
|
128
|
+
self.set_value(self._initial_value)
|
|
129
|
+
|
|
130
|
+
def source(self, name: str) -> str:
|
|
131
|
+
return f"soft://{name}"
|
|
130
132
|
|
|
131
|
-
|
|
133
|
+
async def connect(self, timeout: float = DEFAULT_TIMEOUT) -> None:
|
|
134
|
+
"""Connection isn't required for soft signals."""
|
|
135
|
+
pass
|
|
132
136
|
|
|
133
137
|
async def put(self, value: Optional[T], wait=True, timeout=None):
|
|
134
138
|
write_value = (
|
|
@@ -136,13 +140,11 @@ class SimSignalBackend(SignalBackend[T]):
|
|
|
136
140
|
if value is not None
|
|
137
141
|
else self._initial_value
|
|
138
142
|
)
|
|
139
|
-
self._set_value(write_value)
|
|
140
143
|
|
|
141
|
-
|
|
142
|
-
await asyncio.wait_for(self.put_proceeds.wait(), timeout)
|
|
144
|
+
self.set_value(write_value)
|
|
143
145
|
|
|
144
|
-
def
|
|
145
|
-
"""Method to bypass asynchronous logic
|
|
146
|
+
def set_value(self, value: T):
|
|
147
|
+
"""Method to bypass asynchronous logic."""
|
|
146
148
|
self._value = value
|
|
147
149
|
self._timestamp = time.monotonic()
|
|
148
150
|
reading: Reading = self.converter.reading(
|
|
@@ -152,8 +154,8 @@ class SimSignalBackend(SignalBackend[T]):
|
|
|
152
154
|
if self.callback:
|
|
153
155
|
self.callback(reading, self._value)
|
|
154
156
|
|
|
155
|
-
async def
|
|
156
|
-
return self.converter.
|
|
157
|
+
async def get_datakey(self, source: str) -> DataKey:
|
|
158
|
+
return self.converter.get_datakey(source, self._value)
|
|
157
159
|
|
|
158
160
|
async def get_reading(self) -> Reading:
|
|
159
161
|
return self.converter.reading(self._value, self._timestamp, self._severity)
|
|
@@ -162,7 +164,7 @@ class SimSignalBackend(SignalBackend[T]):
|
|
|
162
164
|
return self.converter.value(self._value)
|
|
163
165
|
|
|
164
166
|
async def get_setpoint(self) -> T:
|
|
165
|
-
"""For a
|
|
167
|
+
"""For a soft signal, the setpoint and readback values are the same."""
|
|
166
168
|
return await self.get_value()
|
|
167
169
|
|
|
168
170
|
def set_callback(self, callback: Optional[ReadingValueCallback[T]]) -> None:
|
|
@@ -1,14 +1,34 @@
|
|
|
1
|
-
|
|
1
|
+
import warnings
|
|
2
|
+
from contextlib import contextmanager
|
|
3
|
+
from typing import (
|
|
4
|
+
Callable,
|
|
5
|
+
Dict,
|
|
6
|
+
Generator,
|
|
7
|
+
Optional,
|
|
8
|
+
Sequence,
|
|
9
|
+
Tuple,
|
|
10
|
+
Type,
|
|
11
|
+
Union,
|
|
12
|
+
)
|
|
2
13
|
|
|
3
|
-
from bluesky.protocols import
|
|
14
|
+
from bluesky.protocols import DataKey, HasHints, Hints, Reading
|
|
15
|
+
|
|
16
|
+
from ophyd_async.protocols import AsyncConfigurable, AsyncReadable, AsyncStageable
|
|
4
17
|
|
|
5
18
|
from .async_status import AsyncStatus
|
|
6
|
-
from .device import Device
|
|
19
|
+
from .device import Device, DeviceVector
|
|
7
20
|
from .signal import SignalR
|
|
8
21
|
from .utils import merge_gathered_dicts
|
|
9
22
|
|
|
23
|
+
ReadableChild = Union[AsyncReadable, AsyncConfigurable, AsyncStageable, HasHints]
|
|
24
|
+
ReadableChildWrapper = Union[
|
|
25
|
+
Callable[[ReadableChild], ReadableChild], Type["ConfigSignal"], Type["HintedSignal"]
|
|
26
|
+
]
|
|
27
|
+
|
|
10
28
|
|
|
11
|
-
class StandardReadable(
|
|
29
|
+
class StandardReadable(
|
|
30
|
+
Device, AsyncReadable, AsyncConfigurable, AsyncStageable, HasHints
|
|
31
|
+
):
|
|
12
32
|
"""Device that owns its children and provides useful default behavior.
|
|
13
33
|
|
|
14
34
|
- When its name is set it renames child Devices
|
|
@@ -16,9 +36,12 @@ class StandardReadable(Device, Readable, Configurable, Stageable):
|
|
|
16
36
|
- These signals will be subscribed for read() between stage() and unstage()
|
|
17
37
|
"""
|
|
18
38
|
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
39
|
+
# These must be immutable types to avoid accidental sharing between
|
|
40
|
+
# different instances of the class
|
|
41
|
+
_readables: Tuple[AsyncReadable, ...] = ()
|
|
42
|
+
_configurables: Tuple[AsyncConfigurable, ...] = ()
|
|
43
|
+
_stageables: Tuple[AsyncStageable, ...] = ()
|
|
44
|
+
_has_hints: Tuple[HasHints, ...] = ()
|
|
22
45
|
|
|
23
46
|
def set_readable_signals(
|
|
24
47
|
self,
|
|
@@ -36,37 +59,203 @@ class StandardReadable(Device, Readable, Configurable, Stageable):
|
|
|
36
59
|
read_uncached:
|
|
37
60
|
Signals to make up :meth:`~StandardReadable.read` that won't be cached
|
|
38
61
|
"""
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
62
|
+
warnings.warn(
|
|
63
|
+
DeprecationWarning(
|
|
64
|
+
"Migrate to `add_children_as_readables` context manager or "
|
|
65
|
+
"`add_readables` method"
|
|
66
|
+
)
|
|
67
|
+
)
|
|
68
|
+
self.add_readables(read, wrapper=HintedSignal)
|
|
69
|
+
self.add_readables(config, wrapper=ConfigSignal)
|
|
70
|
+
self.add_readables(read_uncached, wrapper=HintedSignal.uncached)
|
|
42
71
|
|
|
43
72
|
@AsyncStatus.wrap
|
|
44
73
|
async def stage(self) -> None:
|
|
45
|
-
for sig in self.
|
|
74
|
+
for sig in self._stageables:
|
|
46
75
|
await sig.stage().task
|
|
47
76
|
|
|
48
77
|
@AsyncStatus.wrap
|
|
49
78
|
async def unstage(self) -> None:
|
|
50
|
-
for sig in self.
|
|
79
|
+
for sig in self._stageables:
|
|
51
80
|
await sig.unstage().task
|
|
52
81
|
|
|
53
|
-
async def describe_configuration(self) -> Dict[str,
|
|
82
|
+
async def describe_configuration(self) -> Dict[str, DataKey]:
|
|
54
83
|
return await merge_gathered_dicts(
|
|
55
|
-
[sig.
|
|
84
|
+
[sig.describe_configuration() for sig in self._configurables]
|
|
56
85
|
)
|
|
57
86
|
|
|
58
87
|
async def read_configuration(self) -> Dict[str, Reading]:
|
|
59
88
|
return await merge_gathered_dicts(
|
|
60
|
-
[sig.
|
|
89
|
+
[sig.read_configuration() for sig in self._configurables]
|
|
61
90
|
)
|
|
62
91
|
|
|
63
|
-
async def describe(self) -> Dict[str,
|
|
64
|
-
return await merge_gathered_dicts(
|
|
65
|
-
[sig.describe() for sig in self._read_signals + self._read_uncached_signals]
|
|
66
|
-
)
|
|
92
|
+
async def describe(self) -> Dict[str, DataKey]:
|
|
93
|
+
return await merge_gathered_dicts([sig.describe() for sig in self._readables])
|
|
67
94
|
|
|
68
95
|
async def read(self) -> Dict[str, Reading]:
|
|
69
|
-
return await merge_gathered_dicts(
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
96
|
+
return await merge_gathered_dicts([sig.read() for sig in self._readables])
|
|
97
|
+
|
|
98
|
+
@property
|
|
99
|
+
def hints(self) -> Hints:
|
|
100
|
+
hints: Hints = {}
|
|
101
|
+
for new_hint in self._has_hints:
|
|
102
|
+
# Merge the existing and new hints, based on the type of the value.
|
|
103
|
+
# This avoids default dict merge behaviour that overrides the values;
|
|
104
|
+
# we want to combine them when they are Sequences, and ensure they are
|
|
105
|
+
# identical when string values.
|
|
106
|
+
for key, value in new_hint.hints.items():
|
|
107
|
+
if isinstance(value, str):
|
|
108
|
+
if key in hints:
|
|
109
|
+
assert (
|
|
110
|
+
hints[key] == value # type: ignore[literal-required]
|
|
111
|
+
), f"Hints key {key} value may not be overridden"
|
|
112
|
+
else:
|
|
113
|
+
hints[key] = value # type: ignore[literal-required]
|
|
114
|
+
elif isinstance(value, Sequence):
|
|
115
|
+
if key in hints:
|
|
116
|
+
for new_val in value:
|
|
117
|
+
assert (
|
|
118
|
+
new_val not in hints[key] # type: ignore[literal-required]
|
|
119
|
+
), f"Hint {key} {new_val} overrides existing hint"
|
|
120
|
+
hints[key] = ( # type: ignore[literal-required]
|
|
121
|
+
hints[key] + value # type: ignore[literal-required]
|
|
122
|
+
)
|
|
123
|
+
else:
|
|
124
|
+
hints[key] = value # type: ignore[literal-required]
|
|
125
|
+
else:
|
|
126
|
+
raise TypeError(
|
|
127
|
+
f"{new_hint.name}: Unknown type for value '{value}' "
|
|
128
|
+
f" for key '{key}'"
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
return hints
|
|
132
|
+
|
|
133
|
+
@contextmanager
|
|
134
|
+
def add_children_as_readables(
|
|
135
|
+
self,
|
|
136
|
+
wrapper: Optional[ReadableChildWrapper] = None,
|
|
137
|
+
) -> Generator[None, None, None]:
|
|
138
|
+
"""Context manager to wrap adding Devices
|
|
139
|
+
|
|
140
|
+
Add Devices to this class instance inside the Context Manager to automatically
|
|
141
|
+
add them to the correct fields, based on the Device's interfaces.
|
|
142
|
+
|
|
143
|
+
The provided wrapper class will be applied to all Devices and can be used to
|
|
144
|
+
specify their behaviour.
|
|
145
|
+
|
|
146
|
+
Parameters
|
|
147
|
+
----------
|
|
148
|
+
wrapper:
|
|
149
|
+
Wrapper class to apply to all Devices created inside the context manager.
|
|
150
|
+
|
|
151
|
+
See Also
|
|
152
|
+
--------
|
|
153
|
+
:func:`~StandardReadable.add_readables`
|
|
154
|
+
:class:`ConfigSignal`
|
|
155
|
+
:class:`HintedSignal`
|
|
156
|
+
:meth:`HintedSignal.uncached`
|
|
157
|
+
"""
|
|
158
|
+
|
|
159
|
+
dict_copy = self.__dict__.copy()
|
|
160
|
+
|
|
161
|
+
yield
|
|
162
|
+
|
|
163
|
+
# Set symmetric difference operator gives all newly added keys
|
|
164
|
+
new_keys = dict_copy.keys() ^ self.__dict__.keys()
|
|
165
|
+
new_values = [self.__dict__[key] for key in new_keys]
|
|
166
|
+
|
|
167
|
+
flattened_values = []
|
|
168
|
+
for value in new_values:
|
|
169
|
+
if isinstance(value, DeviceVector):
|
|
170
|
+
children = value.children()
|
|
171
|
+
flattened_values.extend([x[1] for x in children])
|
|
172
|
+
else:
|
|
173
|
+
flattened_values.append(value)
|
|
174
|
+
|
|
175
|
+
new_devices = list(filter(lambda x: isinstance(x, Device), flattened_values))
|
|
176
|
+
self.add_readables(new_devices, wrapper)
|
|
177
|
+
|
|
178
|
+
def add_readables(
|
|
179
|
+
self,
|
|
180
|
+
devices: Sequence[Device],
|
|
181
|
+
wrapper: Optional[ReadableChildWrapper] = None,
|
|
182
|
+
) -> None:
|
|
183
|
+
"""Add the given devices to the lists of known Devices
|
|
184
|
+
|
|
185
|
+
Add the provided Devices to the relevant fields, based on the Signal's
|
|
186
|
+
interfaces.
|
|
187
|
+
|
|
188
|
+
The provided wrapper class will be applied to all Devices and can be used to
|
|
189
|
+
specify their behaviour.
|
|
190
|
+
|
|
191
|
+
Parameters
|
|
192
|
+
----------
|
|
193
|
+
devices:
|
|
194
|
+
The devices to be added
|
|
195
|
+
wrapper:
|
|
196
|
+
Wrapper class to apply to all Devices created inside the context manager.
|
|
197
|
+
|
|
198
|
+
See Also
|
|
199
|
+
--------
|
|
200
|
+
:func:`~StandardReadable.add_children_as_readables`
|
|
201
|
+
:class:`ConfigSignal`
|
|
202
|
+
:class:`HintedSignal`
|
|
203
|
+
:meth:`HintedSignal.uncached`
|
|
204
|
+
"""
|
|
205
|
+
|
|
206
|
+
for readable in devices:
|
|
207
|
+
obj = readable
|
|
208
|
+
if wrapper:
|
|
209
|
+
obj = wrapper(readable)
|
|
210
|
+
|
|
211
|
+
if isinstance(obj, AsyncReadable):
|
|
212
|
+
self._readables += (obj,)
|
|
213
|
+
|
|
214
|
+
if isinstance(obj, AsyncConfigurable):
|
|
215
|
+
self._configurables += (obj,)
|
|
216
|
+
|
|
217
|
+
if isinstance(obj, AsyncStageable):
|
|
218
|
+
self._stageables += (obj,)
|
|
219
|
+
|
|
220
|
+
if isinstance(obj, HasHints):
|
|
221
|
+
self._has_hints += (obj,)
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
class ConfigSignal(AsyncConfigurable):
|
|
225
|
+
def __init__(self, signal: ReadableChild) -> None:
|
|
226
|
+
assert isinstance(signal, SignalR), f"Expected signal, got {signal}"
|
|
227
|
+
self.signal = signal
|
|
228
|
+
|
|
229
|
+
async def read_configuration(self) -> Dict[str, Reading]:
|
|
230
|
+
return await self.signal.read()
|
|
231
|
+
|
|
232
|
+
async def describe_configuration(self) -> Dict[str, DataKey]:
|
|
233
|
+
return await self.signal.describe()
|
|
234
|
+
|
|
235
|
+
|
|
236
|
+
class HintedSignal(HasHints, AsyncReadable):
|
|
237
|
+
def __init__(self, signal: ReadableChild, allow_cache: bool = True) -> None:
|
|
238
|
+
assert isinstance(signal, SignalR), f"Expected signal, got {signal}"
|
|
239
|
+
self.signal = signal
|
|
240
|
+
self.cached = None if allow_cache else allow_cache
|
|
241
|
+
if allow_cache:
|
|
242
|
+
self.stage = signal.stage
|
|
243
|
+
self.unstage = signal.unstage
|
|
244
|
+
|
|
245
|
+
async def read(self) -> Dict[str, Reading]:
|
|
246
|
+
return await self.signal.read(cached=self.cached)
|
|
247
|
+
|
|
248
|
+
async def describe(self) -> Dict[str, DataKey]:
|
|
249
|
+
return await self.signal.describe()
|
|
250
|
+
|
|
251
|
+
@property
|
|
252
|
+
def name(self) -> str:
|
|
253
|
+
return self.signal.name
|
|
254
|
+
|
|
255
|
+
@property
|
|
256
|
+
def hints(self) -> Hints:
|
|
257
|
+
return {"fields": [self.signal.name]}
|
|
258
|
+
|
|
259
|
+
@classmethod
|
|
260
|
+
def uncached(cls, signal: ReadableChild) -> "HintedSignal":
|
|
261
|
+
return cls(signal, allow_cache=False)
|
ophyd_async/core/utils.py
CHANGED
|
@@ -1,56 +1,132 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
1
3
|
import asyncio
|
|
2
|
-
|
|
4
|
+
import logging
|
|
5
|
+
from dataclasses import dataclass
|
|
6
|
+
from typing import (
|
|
7
|
+
Awaitable,
|
|
8
|
+
Callable,
|
|
9
|
+
Dict,
|
|
10
|
+
Generic,
|
|
11
|
+
Iterable,
|
|
12
|
+
List,
|
|
13
|
+
Optional,
|
|
14
|
+
ParamSpec,
|
|
15
|
+
Type,
|
|
16
|
+
TypeVar,
|
|
17
|
+
Union,
|
|
18
|
+
)
|
|
3
19
|
|
|
4
20
|
import numpy as np
|
|
5
21
|
from bluesky.protocols import Reading
|
|
6
22
|
|
|
7
23
|
T = TypeVar("T")
|
|
24
|
+
P = ParamSpec("P")
|
|
8
25
|
Callback = Callable[[T], None]
|
|
9
26
|
|
|
10
27
|
#: A function that will be called with the Reading and value when the
|
|
11
28
|
#: monitor updates
|
|
12
29
|
ReadingValueCallback = Callable[[Reading, T], None]
|
|
13
30
|
DEFAULT_TIMEOUT = 10.0
|
|
31
|
+
ErrorText = Union[str, Dict[str, Exception]]
|
|
32
|
+
|
|
33
|
+
|
|
34
|
+
class CalculateTimeout:
|
|
35
|
+
"""Sentinel class used to implement ``myfunc(timeout=CalculateTimeout)``
|
|
36
|
+
|
|
37
|
+
This signifies that the function should calculate a suitable non-zero
|
|
38
|
+
timeout itself
|
|
39
|
+
"""
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
CalculatableTimeout = float | None | Type[CalculateTimeout]
|
|
14
43
|
|
|
15
44
|
|
|
16
45
|
class NotConnected(Exception):
|
|
17
46
|
"""Exception to be raised if a `Device.connect` is cancelled"""
|
|
18
47
|
|
|
19
|
-
|
|
20
|
-
|
|
48
|
+
_indent_width = " "
|
|
49
|
+
|
|
50
|
+
def __init__(self, errors: ErrorText):
|
|
51
|
+
"""
|
|
52
|
+
NotConnected holds a mapping of device/signal names to
|
|
53
|
+
errors.
|
|
54
|
+
|
|
55
|
+
Parameters
|
|
56
|
+
----------
|
|
57
|
+
errors: ErrorText
|
|
58
|
+
Mapping of device name to Exception or another NotConnected.
|
|
59
|
+
Alternatively a string with the signal error text.
|
|
60
|
+
"""
|
|
61
|
+
|
|
62
|
+
self._errors = errors
|
|
63
|
+
|
|
64
|
+
def _format_sub_errors(self, name: str, error: Exception, indent="") -> str:
|
|
65
|
+
if isinstance(error, NotConnected):
|
|
66
|
+
error_txt = ":" + error.format_error_string(indent + self._indent_width)
|
|
67
|
+
elif isinstance(error, Exception):
|
|
68
|
+
error_txt = ": " + err_str + "\n" if (err_str := str(error)) else "\n"
|
|
69
|
+
else:
|
|
70
|
+
raise RuntimeError(
|
|
71
|
+
f"Unexpected type `{type(error)}`, expected an Exception"
|
|
72
|
+
)
|
|
73
|
+
|
|
74
|
+
string = f"{indent}{name}: {type(error).__name__}" + error_txt
|
|
75
|
+
return string
|
|
76
|
+
|
|
77
|
+
def format_error_string(self, indent="") -> str:
|
|
78
|
+
if not isinstance(self._errors, dict) and not isinstance(self._errors, str):
|
|
79
|
+
raise RuntimeError(
|
|
80
|
+
f"Unexpected type `{type(self._errors)}` " "expected `str` or `dict`"
|
|
81
|
+
)
|
|
82
|
+
|
|
83
|
+
if isinstance(self._errors, str):
|
|
84
|
+
return " " + self._errors + "\n"
|
|
85
|
+
|
|
86
|
+
string = "\n"
|
|
87
|
+
for name, error in self._errors.items():
|
|
88
|
+
string += self._format_sub_errors(name, error, indent=indent)
|
|
89
|
+
return string
|
|
21
90
|
|
|
22
91
|
def __str__(self) -> str:
|
|
23
|
-
return "
|
|
92
|
+
return self.format_error_string(indent="")
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
@dataclass(frozen=True)
|
|
96
|
+
class WatcherUpdate(Generic[T]):
|
|
97
|
+
"""A dataclass such that, when expanded, it provides the kwargs for a watcher"""
|
|
98
|
+
|
|
99
|
+
current: T
|
|
100
|
+
initial: T
|
|
101
|
+
target: T
|
|
102
|
+
name: str | None = None
|
|
103
|
+
unit: str | None = None
|
|
104
|
+
precision: float | None = None
|
|
105
|
+
fraction: float | None = None
|
|
106
|
+
time_elapsed: float | None = None
|
|
107
|
+
time_remaining: float | None = None
|
|
24
108
|
|
|
25
109
|
|
|
26
110
|
async def wait_for_connection(**coros: Awaitable[None]):
|
|
27
|
-
"""Call many underlying signals, accumulating
|
|
111
|
+
"""Call many underlying signals, accumulating exceptions and returning them
|
|
28
112
|
|
|
29
|
-
|
|
30
|
-
------
|
|
31
|
-
`NotConnected` if cancelled
|
|
113
|
+
Expected kwargs should be a mapping of names to coroutine tasks to execute.
|
|
32
114
|
"""
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
lines += [f" {line}" for line in e.lines]
|
|
49
|
-
raise NotConnected(*lines)
|
|
50
|
-
else:
|
|
51
|
-
# Wait for everything to foreground the exceptions
|
|
52
|
-
for f in list(done) + list(pending):
|
|
53
|
-
await f
|
|
115
|
+
results = await asyncio.gather(*coros.values(), return_exceptions=True)
|
|
116
|
+
exceptions = {}
|
|
117
|
+
|
|
118
|
+
for name, result in zip(coros, results):
|
|
119
|
+
if isinstance(result, Exception):
|
|
120
|
+
exceptions[name] = result
|
|
121
|
+
if not isinstance(result, NotConnected):
|
|
122
|
+
logging.exception(
|
|
123
|
+
f"device `{name}` raised unexpected exception "
|
|
124
|
+
f"{type(result).__name__}",
|
|
125
|
+
exc_info=result,
|
|
126
|
+
)
|
|
127
|
+
|
|
128
|
+
if exceptions:
|
|
129
|
+
raise NotConnected(exceptions)
|
|
54
130
|
|
|
55
131
|
|
|
56
132
|
def get_dtype(typ: Type) -> Optional[np.dtype]:
|
|
@@ -86,7 +162,7 @@ def get_unique(values: Dict[str, T], types: str) -> T:
|
|
|
86
162
|
|
|
87
163
|
|
|
88
164
|
async def merge_gathered_dicts(
|
|
89
|
-
coros: Iterable[Awaitable[Dict[str, T]]]
|
|
165
|
+
coros: Iterable[Awaitable[Dict[str, T]]],
|
|
90
166
|
) -> Dict[str, T]:
|
|
91
167
|
"""Merge dictionaries produced by a sequence of coroutines.
|
|
92
168
|
|
|
@@ -102,3 +178,20 @@ async def merge_gathered_dicts(
|
|
|
102
178
|
|
|
103
179
|
async def gather_list(coros: Iterable[Awaitable[T]]) -> List[T]:
|
|
104
180
|
return await asyncio.gather(*coros)
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
def in_micros(t: float) -> int:
|
|
184
|
+
"""
|
|
185
|
+
Converts between a positive number of seconds and an equivalent
|
|
186
|
+
number of microseconds.
|
|
187
|
+
|
|
188
|
+
Args:
|
|
189
|
+
t (float): A time in seconds
|
|
190
|
+
Raises:
|
|
191
|
+
ValueError: if t < 0
|
|
192
|
+
Returns:
|
|
193
|
+
t (int): A time in microseconds, rounded up to the nearest whole microsecond,
|
|
194
|
+
"""
|
|
195
|
+
if t < 0:
|
|
196
|
+
raise ValueError(f"Expected a positive time in seconds, got {t!r}")
|
|
197
|
+
return int(np.ceil(t * 1e6))
|