ophyd-async 0.3a1__py3-none-any.whl → 0.3a3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ophyd_async/__init__.py +1 -4
- ophyd_async/_version.py +1 -1
- ophyd_async/core/__init__.py +23 -3
- ophyd_async/core/_providers.py +3 -1
- ophyd_async/core/detector.py +72 -46
- ophyd_async/core/device.py +8 -0
- ophyd_async/core/flyer.py +12 -21
- ophyd_async/core/signal.py +134 -20
- ophyd_async/core/signal_backend.py +6 -3
- ophyd_async/core/sim_signal_backend.py +32 -20
- ophyd_async/core/standard_readable.py +212 -23
- ophyd_async/core/utils.py +18 -1
- ophyd_async/epics/_backend/_aioca.py +17 -15
- ophyd_async/epics/_backend/_p4p.py +34 -25
- ophyd_async/epics/_backend/common.py +16 -11
- ophyd_async/epics/areadetector/__init__.py +8 -0
- ophyd_async/epics/areadetector/aravis.py +67 -0
- ophyd_async/epics/areadetector/controllers/__init__.py +2 -1
- ophyd_async/epics/areadetector/controllers/aravis_controller.py +73 -0
- ophyd_async/epics/areadetector/controllers/kinetix_controller.py +49 -0
- ophyd_async/epics/areadetector/controllers/pilatus_controller.py +36 -24
- ophyd_async/epics/areadetector/controllers/vimba_controller.py +66 -0
- ophyd_async/epics/areadetector/drivers/__init__.py +6 -0
- ophyd_async/epics/areadetector/drivers/aravis_driver.py +154 -0
- ophyd_async/epics/areadetector/drivers/kinetix_driver.py +24 -0
- ophyd_async/epics/areadetector/drivers/pilatus_driver.py +4 -4
- ophyd_async/epics/areadetector/drivers/vimba_driver.py +58 -0
- ophyd_async/epics/areadetector/kinetix.py +46 -0
- ophyd_async/epics/areadetector/pilatus.py +45 -0
- ophyd_async/epics/areadetector/single_trigger_det.py +14 -6
- ophyd_async/epics/areadetector/vimba.py +43 -0
- ophyd_async/epics/areadetector/writers/_hdffile.py +4 -4
- ophyd_async/epics/areadetector/writers/hdf_writer.py +12 -4
- ophyd_async/epics/areadetector/writers/nd_file_hdf.py +1 -0
- ophyd_async/epics/demo/__init__.py +45 -18
- ophyd_async/epics/motion/motor.py +24 -19
- ophyd_async/epics/pvi/__init__.py +3 -0
- ophyd_async/epics/pvi/pvi.py +318 -0
- ophyd_async/epics/signal/signal.py +26 -9
- ophyd_async/log.py +130 -0
- ophyd_async/panda/__init__.py +17 -6
- ophyd_async/panda/_common_blocks.py +49 -0
- ophyd_async/panda/_hdf_panda.py +48 -0
- ophyd_async/panda/{panda_controller.py → _panda_controller.py} +3 -7
- ophyd_async/panda/_trigger.py +39 -0
- ophyd_async/panda/writers/__init__.py +3 -0
- ophyd_async/panda/writers/_hdf_writer.py +220 -0
- ophyd_async/panda/writers/_panda_hdf_file.py +58 -0
- ophyd_async/planstubs/__init__.py +5 -0
- ophyd_async/planstubs/prepare_trigger_and_dets.py +57 -0
- ophyd_async/protocols.py +96 -0
- ophyd_async/sim/__init__.py +11 -0
- ophyd_async/sim/demo/__init__.py +3 -0
- ophyd_async/sim/demo/sim_motor.py +118 -0
- ophyd_async/sim/pattern_generator.py +318 -0
- ophyd_async/sim/sim_pattern_detector_control.py +55 -0
- ophyd_async/sim/sim_pattern_detector_writer.py +34 -0
- ophyd_async/sim/sim_pattern_generator.py +37 -0
- {ophyd_async-0.3a1.dist-info → ophyd_async-0.3a3.dist-info}/METADATA +30 -69
- ophyd_async-0.3a3.dist-info/RECORD +83 -0
- ophyd_async/epics/pvi.py +0 -70
- ophyd_async/panda/panda.py +0 -241
- ophyd_async-0.3a1.dist-info/RECORD +0 -56
- /ophyd_async/panda/{table.py → _table.py} +0 -0
- /ophyd_async/panda/{utils.py → _utils.py} +0 -0
- {ophyd_async-0.3a1.dist-info → ophyd_async-0.3a3.dist-info}/LICENSE +0 -0
- {ophyd_async-0.3a1.dist-info → ophyd_async-0.3a3.dist-info}/WHEEL +0 -0
- {ophyd_async-0.3a1.dist-info → ophyd_async-0.3a3.dist-info}/entry_points.txt +0 -0
- {ophyd_async-0.3a1.dist-info → ophyd_async-0.3a3.dist-info}/top_level.txt +0 -0
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
from abc import abstractmethod
|
|
2
2
|
from typing import Generic, Optional, Type
|
|
3
3
|
|
|
4
|
-
from bluesky.protocols import
|
|
4
|
+
from bluesky.protocols import DataKey, Reading
|
|
5
5
|
|
|
6
6
|
from .utils import DEFAULT_TIMEOUT, ReadingValueCallback, T
|
|
7
7
|
|
|
@@ -13,7 +13,10 @@ class SignalBackend(Generic[T]):
|
|
|
13
13
|
datatype: Optional[Type[T]] = None
|
|
14
14
|
|
|
15
15
|
#: Like ca://PV_PREFIX:SIGNAL
|
|
16
|
-
|
|
16
|
+
@abstractmethod
|
|
17
|
+
def source(name: str) -> str:
|
|
18
|
+
"""Return source of signal. Signals may pass a name to the backend, which can be
|
|
19
|
+
used or discarded."""
|
|
17
20
|
|
|
18
21
|
@abstractmethod
|
|
19
22
|
async def connect(self, timeout: float = DEFAULT_TIMEOUT):
|
|
@@ -24,7 +27,7 @@ class SignalBackend(Generic[T]):
|
|
|
24
27
|
"""Put a value to the PV, if wait then wait for completion for up to timeout"""
|
|
25
28
|
|
|
26
29
|
@abstractmethod
|
|
27
|
-
async def
|
|
30
|
+
async def get_datakey(self, source: str) -> DataKey:
|
|
28
31
|
"""Metadata like source, dtype, shape, precision, units"""
|
|
29
32
|
|
|
30
33
|
@abstractmethod
|
|
@@ -2,14 +2,14 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
import asyncio
|
|
4
4
|
import inspect
|
|
5
|
-
import re
|
|
6
5
|
import time
|
|
7
6
|
from collections import abc
|
|
8
7
|
from dataclasses import dataclass
|
|
9
8
|
from enum import Enum
|
|
10
9
|
from typing import Any, Dict, Generic, Optional, Type, Union, cast, get_origin
|
|
11
10
|
|
|
12
|
-
|
|
11
|
+
import numpy as np
|
|
12
|
+
from bluesky.protocols import DataKey, Dtype, Reading
|
|
13
13
|
|
|
14
14
|
from .signal_backend import SignalBackend
|
|
15
15
|
from .utils import DEFAULT_TIMEOUT, ReadingValueCallback, T, get_dtype
|
|
@@ -36,12 +36,17 @@ class SimConverter(Generic[T]):
|
|
|
36
36
|
alarm_severity=-1 if severity > 2 else severity,
|
|
37
37
|
)
|
|
38
38
|
|
|
39
|
-
def
|
|
39
|
+
def get_datakey(self, source: str, value) -> DataKey:
|
|
40
|
+
dtype = type(value)
|
|
41
|
+
if np.issubdtype(dtype, np.integer):
|
|
42
|
+
dtype = int
|
|
43
|
+
elif np.issubdtype(dtype, np.floating):
|
|
44
|
+
dtype = float
|
|
40
45
|
assert (
|
|
41
|
-
|
|
46
|
+
dtype in primitive_dtypes
|
|
42
47
|
), f"invalid converter for value of type {type(value)}"
|
|
43
|
-
|
|
44
|
-
return
|
|
48
|
+
dtype_name = primitive_dtypes[dtype]
|
|
49
|
+
return {"source": source, "dtype": dtype_name, "shape": []}
|
|
45
50
|
|
|
46
51
|
def make_initial_value(self, datatype: Optional[Type[T]]) -> T:
|
|
47
52
|
if datatype is None:
|
|
@@ -51,8 +56,8 @@ class SimConverter(Generic[T]):
|
|
|
51
56
|
|
|
52
57
|
|
|
53
58
|
class SimArrayConverter(SimConverter):
|
|
54
|
-
def
|
|
55
|
-
return
|
|
59
|
+
def get_datakey(self, source: str, value) -> DataKey:
|
|
60
|
+
return {"source": source, "dtype": "array", "shape": [len(value)]}
|
|
56
61
|
|
|
57
62
|
def make_initial_value(self, datatype: Optional[Type[T]]) -> T:
|
|
58
63
|
if datatype is None:
|
|
@@ -74,11 +79,9 @@ class SimEnumConverter(SimConverter):
|
|
|
74
79
|
else:
|
|
75
80
|
return self.enum_class(value)
|
|
76
81
|
|
|
77
|
-
def
|
|
82
|
+
def get_datakey(self, source: str, value) -> DataKey:
|
|
78
83
|
choices = [e.value for e in self.enum_class]
|
|
79
|
-
return
|
|
80
|
-
source=source, dtype="string", shape=[], choices=choices
|
|
81
|
-
) # type: ignore
|
|
84
|
+
return {"source": source, "dtype": "string", "shape": [], "choices": choices} # type: ignore
|
|
82
85
|
|
|
83
86
|
def make_initial_value(self, datatype: Optional[Type[T]]) -> T:
|
|
84
87
|
if datatype is None:
|
|
@@ -109,23 +112,32 @@ class SimSignalBackend(SignalBackend[T]):
|
|
|
109
112
|
"""An simulated backend to a Signal, created with ``Signal.connect(sim=True)``"""
|
|
110
113
|
|
|
111
114
|
_value: T
|
|
112
|
-
_initial_value: T
|
|
115
|
+
_initial_value: Optional[T]
|
|
113
116
|
_timestamp: float
|
|
114
117
|
_severity: int
|
|
115
118
|
|
|
116
|
-
def __init__(
|
|
117
|
-
|
|
118
|
-
|
|
119
|
+
def __init__(
|
|
120
|
+
self,
|
|
121
|
+
datatype: Optional[Type[T]],
|
|
122
|
+
initial_value: Optional[T] = None,
|
|
123
|
+
) -> None:
|
|
119
124
|
self.datatype = datatype
|
|
120
|
-
self.pv = source
|
|
121
125
|
self.converter: SimConverter = DisconnectedSimConverter()
|
|
126
|
+
self._initial_value = initial_value
|
|
122
127
|
self.put_proceeds = asyncio.Event()
|
|
123
128
|
self.put_proceeds.set()
|
|
124
129
|
self.callback: Optional[ReadingValueCallback[T]] = None
|
|
125
130
|
|
|
131
|
+
def source(self, name: str) -> str:
|
|
132
|
+
return f"soft://{name}"
|
|
133
|
+
|
|
126
134
|
async def connect(self, timeout: float = DEFAULT_TIMEOUT) -> None:
|
|
127
135
|
self.converter = make_converter(self.datatype)
|
|
128
|
-
self._initial_value
|
|
136
|
+
if self._initial_value is None:
|
|
137
|
+
self._initial_value = self.converter.make_initial_value(self.datatype)
|
|
138
|
+
else:
|
|
139
|
+
# convert potentially unconverted initial value passed to init method
|
|
140
|
+
self._initial_value = self.converter.write_value(self._initial_value)
|
|
129
141
|
self._severity = 0
|
|
130
142
|
|
|
131
143
|
await self.put(None)
|
|
@@ -152,8 +164,8 @@ class SimSignalBackend(SignalBackend[T]):
|
|
|
152
164
|
if self.callback:
|
|
153
165
|
self.callback(reading, self._value)
|
|
154
166
|
|
|
155
|
-
async def
|
|
156
|
-
return self.converter.
|
|
167
|
+
async def get_datakey(self, source: str) -> DataKey:
|
|
168
|
+
return self.converter.get_datakey(source, self._value)
|
|
157
169
|
|
|
158
170
|
async def get_reading(self) -> Reading:
|
|
159
171
|
return self.converter.reading(self._value, self._timestamp, self._severity)
|
|
@@ -1,14 +1,34 @@
|
|
|
1
|
-
|
|
1
|
+
import warnings
|
|
2
|
+
from contextlib import contextmanager
|
|
3
|
+
from typing import (
|
|
4
|
+
Callable,
|
|
5
|
+
Dict,
|
|
6
|
+
Generator,
|
|
7
|
+
Optional,
|
|
8
|
+
Sequence,
|
|
9
|
+
Tuple,
|
|
10
|
+
Type,
|
|
11
|
+
Union,
|
|
12
|
+
)
|
|
2
13
|
|
|
3
|
-
from bluesky.protocols import
|
|
14
|
+
from bluesky.protocols import DataKey, HasHints, Hints, Reading
|
|
15
|
+
|
|
16
|
+
from ophyd_async.protocols import AsyncConfigurable, AsyncReadable, AsyncStageable
|
|
4
17
|
|
|
5
18
|
from .async_status import AsyncStatus
|
|
6
|
-
from .device import Device
|
|
19
|
+
from .device import Device, DeviceVector
|
|
7
20
|
from .signal import SignalR
|
|
8
21
|
from .utils import merge_gathered_dicts
|
|
9
22
|
|
|
23
|
+
ReadableChild = Union[AsyncReadable, AsyncConfigurable, AsyncStageable, HasHints]
|
|
24
|
+
ReadableChildWrapper = Union[
|
|
25
|
+
Callable[[ReadableChild], ReadableChild], Type["ConfigSignal"], Type["HintedSignal"]
|
|
26
|
+
]
|
|
27
|
+
|
|
10
28
|
|
|
11
|
-
class StandardReadable(
|
|
29
|
+
class StandardReadable(
|
|
30
|
+
Device, AsyncReadable, AsyncConfigurable, AsyncStageable, HasHints
|
|
31
|
+
):
|
|
12
32
|
"""Device that owns its children and provides useful default behavior.
|
|
13
33
|
|
|
14
34
|
- When its name is set it renames child Devices
|
|
@@ -16,9 +36,12 @@ class StandardReadable(Device, Readable, Configurable, Stageable):
|
|
|
16
36
|
- These signals will be subscribed for read() between stage() and unstage()
|
|
17
37
|
"""
|
|
18
38
|
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
39
|
+
# These must be immutable types to avoid accidental sharing between
|
|
40
|
+
# different instances of the class
|
|
41
|
+
_readables: Tuple[AsyncReadable, ...] = ()
|
|
42
|
+
_configurables: Tuple[AsyncConfigurable, ...] = ()
|
|
43
|
+
_stageables: Tuple[AsyncStageable, ...] = ()
|
|
44
|
+
_has_hints: Tuple[HasHints, ...] = ()
|
|
22
45
|
|
|
23
46
|
def set_readable_signals(
|
|
24
47
|
self,
|
|
@@ -36,37 +59,203 @@ class StandardReadable(Device, Readable, Configurable, Stageable):
|
|
|
36
59
|
read_uncached:
|
|
37
60
|
Signals to make up :meth:`~StandardReadable.read` that won't be cached
|
|
38
61
|
"""
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
62
|
+
warnings.warn(
|
|
63
|
+
DeprecationWarning(
|
|
64
|
+
"Migrate to `add_children_as_readables` context manager or "
|
|
65
|
+
"`add_readables` method"
|
|
66
|
+
)
|
|
67
|
+
)
|
|
68
|
+
self.add_readables(read, wrapper=HintedSignal)
|
|
69
|
+
self.add_readables(config, wrapper=ConfigSignal)
|
|
70
|
+
self.add_readables(read_uncached, wrapper=HintedSignal.uncached)
|
|
42
71
|
|
|
43
72
|
@AsyncStatus.wrap
|
|
44
73
|
async def stage(self) -> None:
|
|
45
|
-
for sig in self.
|
|
74
|
+
for sig in self._stageables:
|
|
46
75
|
await sig.stage().task
|
|
47
76
|
|
|
48
77
|
@AsyncStatus.wrap
|
|
49
78
|
async def unstage(self) -> None:
|
|
50
|
-
for sig in self.
|
|
79
|
+
for sig in self._stageables:
|
|
51
80
|
await sig.unstage().task
|
|
52
81
|
|
|
53
|
-
async def describe_configuration(self) -> Dict[str,
|
|
82
|
+
async def describe_configuration(self) -> Dict[str, DataKey]:
|
|
54
83
|
return await merge_gathered_dicts(
|
|
55
|
-
[sig.
|
|
84
|
+
[sig.describe_configuration() for sig in self._configurables]
|
|
56
85
|
)
|
|
57
86
|
|
|
58
87
|
async def read_configuration(self) -> Dict[str, Reading]:
|
|
59
88
|
return await merge_gathered_dicts(
|
|
60
|
-
[sig.
|
|
89
|
+
[sig.read_configuration() for sig in self._configurables]
|
|
61
90
|
)
|
|
62
91
|
|
|
63
|
-
async def describe(self) -> Dict[str,
|
|
64
|
-
return await merge_gathered_dicts(
|
|
65
|
-
[sig.describe() for sig in self._read_signals + self._read_uncached_signals]
|
|
66
|
-
)
|
|
92
|
+
async def describe(self) -> Dict[str, DataKey]:
|
|
93
|
+
return await merge_gathered_dicts([sig.describe() for sig in self._readables])
|
|
67
94
|
|
|
68
95
|
async def read(self) -> Dict[str, Reading]:
|
|
69
|
-
return await merge_gathered_dicts(
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
96
|
+
return await merge_gathered_dicts([sig.read() for sig in self._readables])
|
|
97
|
+
|
|
98
|
+
@property
|
|
99
|
+
def hints(self) -> Hints:
|
|
100
|
+
hints: Hints = {}
|
|
101
|
+
for new_hint in self._has_hints:
|
|
102
|
+
# Merge the existing and new hints, based on the type of the value.
|
|
103
|
+
# This avoids default dict merge behaviour that overrides the values;
|
|
104
|
+
# we want to combine them when they are Sequences, and ensure they are
|
|
105
|
+
# identical when string values.
|
|
106
|
+
for key, value in new_hint.hints.items():
|
|
107
|
+
if isinstance(value, str):
|
|
108
|
+
if key in hints:
|
|
109
|
+
assert (
|
|
110
|
+
hints[key] == value # type: ignore[literal-required]
|
|
111
|
+
), f"Hints key {key} value may not be overridden"
|
|
112
|
+
else:
|
|
113
|
+
hints[key] = value # type: ignore[literal-required]
|
|
114
|
+
elif isinstance(value, Sequence):
|
|
115
|
+
if key in hints:
|
|
116
|
+
for new_val in value:
|
|
117
|
+
assert (
|
|
118
|
+
new_val not in hints[key] # type: ignore[literal-required]
|
|
119
|
+
), f"Hint {key} {new_val} overrides existing hint"
|
|
120
|
+
hints[key] = ( # type: ignore[literal-required]
|
|
121
|
+
hints[key] + value # type: ignore[literal-required]
|
|
122
|
+
)
|
|
123
|
+
else:
|
|
124
|
+
hints[key] = value # type: ignore[literal-required]
|
|
125
|
+
else:
|
|
126
|
+
raise TypeError(
|
|
127
|
+
f"{new_hint.name}: Unknown type for value '{value}' "
|
|
128
|
+
f" for key '{key}'"
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
return hints
|
|
132
|
+
|
|
133
|
+
@contextmanager
|
|
134
|
+
def add_children_as_readables(
|
|
135
|
+
self,
|
|
136
|
+
wrapper: Optional[ReadableChildWrapper] = None,
|
|
137
|
+
) -> Generator[None, None, None]:
|
|
138
|
+
"""Context manager to wrap adding Devices
|
|
139
|
+
|
|
140
|
+
Add Devices to this class instance inside the Context Manager to automatically
|
|
141
|
+
add them to the correct fields, based on the Device's interfaces.
|
|
142
|
+
|
|
143
|
+
The provided wrapper class will be applied to all Devices and can be used to
|
|
144
|
+
specify their behaviour.
|
|
145
|
+
|
|
146
|
+
Parameters
|
|
147
|
+
----------
|
|
148
|
+
wrapper:
|
|
149
|
+
Wrapper class to apply to all Devices created inside the context manager.
|
|
150
|
+
|
|
151
|
+
See Also
|
|
152
|
+
--------
|
|
153
|
+
:func:`~StandardReadable.add_readables`
|
|
154
|
+
:class:`ConfigSignal`
|
|
155
|
+
:class:`HintedSignal`
|
|
156
|
+
:meth:`HintedSignal.uncached`
|
|
157
|
+
"""
|
|
158
|
+
|
|
159
|
+
dict_copy = self.__dict__.copy()
|
|
160
|
+
|
|
161
|
+
yield
|
|
162
|
+
|
|
163
|
+
# Set symmetric difference operator gives all newly added keys
|
|
164
|
+
new_keys = dict_copy.keys() ^ self.__dict__.keys()
|
|
165
|
+
new_values = [self.__dict__[key] for key in new_keys]
|
|
166
|
+
|
|
167
|
+
flattened_values = []
|
|
168
|
+
for value in new_values:
|
|
169
|
+
if isinstance(value, DeviceVector):
|
|
170
|
+
children = value.children()
|
|
171
|
+
flattened_values.extend([x[1] for x in children])
|
|
172
|
+
else:
|
|
173
|
+
flattened_values.append(value)
|
|
174
|
+
|
|
175
|
+
new_devices = list(filter(lambda x: isinstance(x, Device), flattened_values))
|
|
176
|
+
self.add_readables(new_devices, wrapper)
|
|
177
|
+
|
|
178
|
+
def add_readables(
|
|
179
|
+
self,
|
|
180
|
+
devices: Sequence[Device],
|
|
181
|
+
wrapper: Optional[ReadableChildWrapper] = None,
|
|
182
|
+
) -> None:
|
|
183
|
+
"""Add the given devices to the lists of known Devices
|
|
184
|
+
|
|
185
|
+
Add the provided Devices to the relevant fields, based on the Signal's
|
|
186
|
+
interfaces.
|
|
187
|
+
|
|
188
|
+
The provided wrapper class will be applied to all Devices and can be used to
|
|
189
|
+
specify their behaviour.
|
|
190
|
+
|
|
191
|
+
Parameters
|
|
192
|
+
----------
|
|
193
|
+
devices:
|
|
194
|
+
The devices to be added
|
|
195
|
+
wrapper:
|
|
196
|
+
Wrapper class to apply to all Devices created inside the context manager.
|
|
197
|
+
|
|
198
|
+
See Also
|
|
199
|
+
--------
|
|
200
|
+
:func:`~StandardReadable.add_children_as_readables`
|
|
201
|
+
:class:`ConfigSignal`
|
|
202
|
+
:class:`HintedSignal`
|
|
203
|
+
:meth:`HintedSignal.uncached`
|
|
204
|
+
"""
|
|
205
|
+
|
|
206
|
+
for readable in devices:
|
|
207
|
+
obj = readable
|
|
208
|
+
if wrapper:
|
|
209
|
+
obj = wrapper(readable)
|
|
210
|
+
|
|
211
|
+
if isinstance(obj, AsyncReadable):
|
|
212
|
+
self._readables += (obj,)
|
|
213
|
+
|
|
214
|
+
if isinstance(obj, AsyncConfigurable):
|
|
215
|
+
self._configurables += (obj,)
|
|
216
|
+
|
|
217
|
+
if isinstance(obj, AsyncStageable):
|
|
218
|
+
self._stageables += (obj,)
|
|
219
|
+
|
|
220
|
+
if isinstance(obj, HasHints):
|
|
221
|
+
self._has_hints += (obj,)
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
class ConfigSignal(AsyncConfigurable):
|
|
225
|
+
def __init__(self, signal: ReadableChild) -> None:
|
|
226
|
+
assert isinstance(signal, SignalR), f"Expected signal, got {signal}"
|
|
227
|
+
self.signal = signal
|
|
228
|
+
|
|
229
|
+
async def read_configuration(self) -> Dict[str, Reading]:
|
|
230
|
+
return await self.signal.read()
|
|
231
|
+
|
|
232
|
+
async def describe_configuration(self) -> Dict[str, DataKey]:
|
|
233
|
+
return await self.signal.describe()
|
|
234
|
+
|
|
235
|
+
|
|
236
|
+
class HintedSignal(HasHints, AsyncReadable):
|
|
237
|
+
def __init__(self, signal: ReadableChild, allow_cache: bool = True) -> None:
|
|
238
|
+
assert isinstance(signal, SignalR), f"Expected signal, got {signal}"
|
|
239
|
+
self.signal = signal
|
|
240
|
+
self.cached = None if allow_cache else allow_cache
|
|
241
|
+
if allow_cache:
|
|
242
|
+
self.stage = signal.stage
|
|
243
|
+
self.unstage = signal.unstage
|
|
244
|
+
|
|
245
|
+
async def read(self) -> Dict[str, Reading]:
|
|
246
|
+
return await self.signal.read(cached=self.cached)
|
|
247
|
+
|
|
248
|
+
async def describe(self) -> Dict[str, DataKey]:
|
|
249
|
+
return await self.signal.describe()
|
|
250
|
+
|
|
251
|
+
@property
|
|
252
|
+
def name(self) -> str:
|
|
253
|
+
return self.signal.name
|
|
254
|
+
|
|
255
|
+
@property
|
|
256
|
+
def hints(self) -> Hints:
|
|
257
|
+
return {"fields": [self.signal.name]}
|
|
258
|
+
|
|
259
|
+
@classmethod
|
|
260
|
+
def uncached(cls, signal: ReadableChild) -> "HintedSignal":
|
|
261
|
+
return cls(signal, allow_cache=False)
|
ophyd_async/core/utils.py
CHANGED
|
@@ -132,7 +132,7 @@ def get_unique(values: Dict[str, T], types: str) -> T:
|
|
|
132
132
|
|
|
133
133
|
|
|
134
134
|
async def merge_gathered_dicts(
|
|
135
|
-
coros: Iterable[Awaitable[Dict[str, T]]]
|
|
135
|
+
coros: Iterable[Awaitable[Dict[str, T]]],
|
|
136
136
|
) -> Dict[str, T]:
|
|
137
137
|
"""Merge dictionaries produced by a sequence of coroutines.
|
|
138
138
|
|
|
@@ -148,3 +148,20 @@ async def merge_gathered_dicts(
|
|
|
148
148
|
|
|
149
149
|
async def gather_list(coros: Iterable[Awaitable[T]]) -> List[T]:
|
|
150
150
|
return await asyncio.gather(*coros)
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
def in_micros(t: float) -> int:
|
|
154
|
+
"""
|
|
155
|
+
Converts between a positive number of seconds and an equivalent
|
|
156
|
+
number of microseconds.
|
|
157
|
+
|
|
158
|
+
Args:
|
|
159
|
+
t (float): A time in seconds
|
|
160
|
+
Raises:
|
|
161
|
+
ValueError: if t < 0
|
|
162
|
+
Returns:
|
|
163
|
+
t (int): A time in microseconds, rounded up to the nearest whole microsecond,
|
|
164
|
+
"""
|
|
165
|
+
if t < 0:
|
|
166
|
+
raise ValueError(f"Expected a positive time in seconds, got {t!r}")
|
|
167
|
+
return int(np.ceil(t * 1e6))
|
|
@@ -15,7 +15,7 @@ from aioca import (
|
|
|
15
15
|
caput,
|
|
16
16
|
)
|
|
17
17
|
from aioca.types import AugmentedValue, Dbr, Format
|
|
18
|
-
from bluesky.protocols import
|
|
18
|
+
from bluesky.protocols import DataKey, Dtype, Reading
|
|
19
19
|
from epicscorelibs.ca import dbr
|
|
20
20
|
|
|
21
21
|
from ophyd_async.core import (
|
|
@@ -52,14 +52,14 @@ class CaConverter:
|
|
|
52
52
|
return value
|
|
53
53
|
|
|
54
54
|
def reading(self, value: AugmentedValue):
|
|
55
|
-
return
|
|
56
|
-
value
|
|
57
|
-
timestamp
|
|
58
|
-
alarm_severity
|
|
59
|
-
|
|
55
|
+
return {
|
|
56
|
+
"value": self.value(value),
|
|
57
|
+
"timestamp": value.timestamp,
|
|
58
|
+
"alarm_severity": -1 if value.severity > 2 else value.severity,
|
|
59
|
+
}
|
|
60
60
|
|
|
61
|
-
def
|
|
62
|
-
return
|
|
61
|
+
def get_datakey(self, source: str, value: AugmentedValue) -> DataKey:
|
|
62
|
+
return {"source": source, "dtype": dbr_to_dtype[value.datatype], "shape": []}
|
|
63
63
|
|
|
64
64
|
|
|
65
65
|
class CaLongStrConverter(CaConverter):
|
|
@@ -73,8 +73,8 @@ class CaLongStrConverter(CaConverter):
|
|
|
73
73
|
|
|
74
74
|
|
|
75
75
|
class CaArrayConverter(CaConverter):
|
|
76
|
-
def
|
|
77
|
-
return
|
|
76
|
+
def get_datakey(self, source: str, value: AugmentedValue) -> DataKey:
|
|
77
|
+
return {"source": source, "dtype": "array", "shape": [len(value)]}
|
|
78
78
|
|
|
79
79
|
|
|
80
80
|
@dataclass
|
|
@@ -90,9 +90,9 @@ class CaEnumConverter(CaConverter):
|
|
|
90
90
|
def value(self, value: AugmentedValue):
|
|
91
91
|
return self.enum_class(value)
|
|
92
92
|
|
|
93
|
-
def
|
|
93
|
+
def get_datakey(self, source: str, value: AugmentedValue) -> DataKey:
|
|
94
94
|
choices = [e.value for e in self.enum_class]
|
|
95
|
-
return
|
|
95
|
+
return {"source": source, "dtype": "string", "shape": [], "choices": choices}
|
|
96
96
|
|
|
97
97
|
|
|
98
98
|
class DisconnectedCaConverter(CaConverter):
|
|
@@ -170,9 +170,11 @@ class CaSignalBackend(SignalBackend[T]):
|
|
|
170
170
|
self.write_pv = write_pv
|
|
171
171
|
self.initial_values: Dict[str, AugmentedValue] = {}
|
|
172
172
|
self.converter: CaConverter = DisconnectedCaConverter(None, None)
|
|
173
|
-
self.source = f"ca://{self.read_pv}"
|
|
174
173
|
self.subscription: Optional[Subscription] = None
|
|
175
174
|
|
|
175
|
+
def source(self, name: str):
|
|
176
|
+
return f"ca://{self.read_pv}"
|
|
177
|
+
|
|
176
178
|
async def _store_initial_value(self, pv, timeout: float = DEFAULT_TIMEOUT):
|
|
177
179
|
try:
|
|
178
180
|
self.initial_values[pv] = await caget(
|
|
@@ -216,9 +218,9 @@ class CaSignalBackend(SignalBackend[T]):
|
|
|
216
218
|
timeout=None,
|
|
217
219
|
)
|
|
218
220
|
|
|
219
|
-
async def
|
|
221
|
+
async def get_datakey(self, source: str) -> DataKey:
|
|
220
222
|
value = await self._caget(FORMAT_CTRL)
|
|
221
|
-
return self.converter.
|
|
223
|
+
return self.converter.get_datakey(source, value)
|
|
222
224
|
|
|
223
225
|
async def get_reading(self) -> Reading:
|
|
224
226
|
value = await self._caget(FORMAT_TIME)
|
|
@@ -6,7 +6,7 @@ from dataclasses import dataclass
|
|
|
6
6
|
from enum import Enum
|
|
7
7
|
from typing import Any, Dict, List, Optional, Sequence, Type, Union
|
|
8
8
|
|
|
9
|
-
from bluesky.protocols import
|
|
9
|
+
from bluesky.protocols import DataKey, Dtype, Reading
|
|
10
10
|
from p4p import Value
|
|
11
11
|
from p4p.client.asyncio import Context, Subscription
|
|
12
12
|
|
|
@@ -49,15 +49,15 @@ class PvaConverter:
|
|
|
49
49
|
def reading(self, value):
|
|
50
50
|
ts = value["timeStamp"]
|
|
51
51
|
sv = value["alarm"]["severity"]
|
|
52
|
-
return
|
|
53
|
-
value
|
|
54
|
-
timestamp
|
|
55
|
-
alarm_severity
|
|
56
|
-
|
|
52
|
+
return {
|
|
53
|
+
"value": self.value(value),
|
|
54
|
+
"timestamp": ts["secondsPastEpoch"] + ts["nanoseconds"] * 1e-9,
|
|
55
|
+
"alarm_severity": -1 if sv > 2 else sv,
|
|
56
|
+
}
|
|
57
57
|
|
|
58
|
-
def
|
|
58
|
+
def get_datakey(self, source: str, value) -> DataKey:
|
|
59
59
|
dtype = specifier_to_dtype[value.type().aspy("value")]
|
|
60
|
-
return
|
|
60
|
+
return {"source": source, "dtype": dtype, "shape": []}
|
|
61
61
|
|
|
62
62
|
def metadata_fields(self) -> List[str]:
|
|
63
63
|
"""
|
|
@@ -73,8 +73,8 @@ class PvaConverter:
|
|
|
73
73
|
|
|
74
74
|
|
|
75
75
|
class PvaArrayConverter(PvaConverter):
|
|
76
|
-
def
|
|
77
|
-
return
|
|
76
|
+
def get_datakey(self, source: str, value) -> DataKey:
|
|
77
|
+
return {"source": source, "dtype": "array", "shape": [len(value["value"])]}
|
|
78
78
|
|
|
79
79
|
|
|
80
80
|
class PvaNDArrayConverter(PvaConverter):
|
|
@@ -96,9 +96,9 @@ class PvaNDArrayConverter(PvaConverter):
|
|
|
96
96
|
dims = self._get_dimensions(value)
|
|
97
97
|
return value["value"].reshape(dims)
|
|
98
98
|
|
|
99
|
-
def
|
|
99
|
+
def get_datakey(self, source: str, value) -> DataKey:
|
|
100
100
|
dims = self._get_dimensions(value)
|
|
101
|
-
return
|
|
101
|
+
return {"source": source, "dtype": "array", "shape": dims}
|
|
102
102
|
|
|
103
103
|
def write_value(self, value):
|
|
104
104
|
# No clear use-case for writing directly to an NDArray, and some
|
|
@@ -120,26 +120,26 @@ class PvaEnumConverter(PvaConverter):
|
|
|
120
120
|
def value(self, value):
|
|
121
121
|
return list(self.enum_class)[value["value"]["index"]]
|
|
122
122
|
|
|
123
|
-
def
|
|
123
|
+
def get_datakey(self, source: str, value) -> DataKey:
|
|
124
124
|
choices = [e.value for e in self.enum_class]
|
|
125
|
-
return
|
|
125
|
+
return {"source": source, "dtype": "string", "shape": [], "choices": choices}
|
|
126
126
|
|
|
127
127
|
|
|
128
128
|
class PvaEnumBoolConverter(PvaConverter):
|
|
129
129
|
def value(self, value):
|
|
130
130
|
return value["value"]["index"]
|
|
131
131
|
|
|
132
|
-
def
|
|
133
|
-
return
|
|
132
|
+
def get_datakey(self, source: str, value) -> DataKey:
|
|
133
|
+
return {"source": source, "dtype": "integer", "shape": []}
|
|
134
134
|
|
|
135
135
|
|
|
136
136
|
class PvaTableConverter(PvaConverter):
|
|
137
137
|
def value(self, value):
|
|
138
138
|
return value["value"].todict()
|
|
139
139
|
|
|
140
|
-
def
|
|
140
|
+
def get_datakey(self, source: str, value) -> DataKey:
|
|
141
141
|
# This is wrong, but defer until we know how to actually describe a table
|
|
142
|
-
return
|
|
142
|
+
return {"source": source, "dtype": "object", "shape": []} # type: ignore
|
|
143
143
|
|
|
144
144
|
|
|
145
145
|
class PvaDictConverter(PvaConverter):
|
|
@@ -147,12 +147,12 @@ class PvaDictConverter(PvaConverter):
|
|
|
147
147
|
ts = time.time()
|
|
148
148
|
value = value.todict()
|
|
149
149
|
# Alarm severity is vacuously 0 for a table
|
|
150
|
-
return
|
|
150
|
+
return {"value": value, "timestamp": ts, "alarm_severity": 0}
|
|
151
151
|
|
|
152
152
|
def value(self, value: Value):
|
|
153
153
|
return value.todict()
|
|
154
154
|
|
|
155
|
-
def
|
|
155
|
+
def get_datakey(self, source: str, value) -> DataKey:
|
|
156
156
|
raise NotImplementedError("Describing Dict signals not currently supported")
|
|
157
157
|
|
|
158
158
|
def metadata_fields(self) -> List[str]:
|
|
@@ -216,7 +216,13 @@ def make_converter(datatype: Optional[Type], values: Dict[str, Any]) -> PvaConve
|
|
|
216
216
|
)
|
|
217
217
|
return PvaEnumConverter(get_supported_enum_class(pv, datatype, pv_choices))
|
|
218
218
|
elif "NTScalar" in typeid:
|
|
219
|
-
if
|
|
219
|
+
if (
|
|
220
|
+
datatype
|
|
221
|
+
and not issubclass(typ, datatype)
|
|
222
|
+
and not (
|
|
223
|
+
typ is float and datatype is int
|
|
224
|
+
) # Allow float -> int since prec can be 0
|
|
225
|
+
):
|
|
220
226
|
raise TypeError(f"{pv} has type {typ.__name__} not {datatype.__name__}")
|
|
221
227
|
return PvaConverter()
|
|
222
228
|
elif "NTTable" in typeid:
|
|
@@ -236,9 +242,12 @@ class PvaSignalBackend(SignalBackend[T]):
|
|
|
236
242
|
self.write_pv = write_pv
|
|
237
243
|
self.initial_values: Dict[str, Any] = {}
|
|
238
244
|
self.converter: PvaConverter = DisconnectedPvaConverter()
|
|
239
|
-
self.source = f"pva://{self.read_pv}"
|
|
240
245
|
self.subscription: Optional[Subscription] = None
|
|
241
246
|
|
|
247
|
+
@property
|
|
248
|
+
def source(self, name: str):
|
|
249
|
+
return f"pva://{self.read_pv}"
|
|
250
|
+
|
|
242
251
|
@property
|
|
243
252
|
def ctxt(self) -> Context:
|
|
244
253
|
if PvaSignalBackend._ctxt is None:
|
|
@@ -279,7 +288,7 @@ class PvaSignalBackend(SignalBackend[T]):
|
|
|
279
288
|
write_value = self.initial_values[self.write_pv]
|
|
280
289
|
else:
|
|
281
290
|
write_value = self.converter.write_value(value)
|
|
282
|
-
coro = self.ctxt.put(self.write_pv,
|
|
291
|
+
coro = self.ctxt.put(self.write_pv, {"value": write_value}, wait=wait)
|
|
283
292
|
try:
|
|
284
293
|
await asyncio.wait_for(coro, timeout)
|
|
285
294
|
except asyncio.TimeoutError as exc:
|
|
@@ -290,9 +299,9 @@ class PvaSignalBackend(SignalBackend[T]):
|
|
|
290
299
|
)
|
|
291
300
|
raise NotConnected(f"pva://{self.write_pv}") from exc
|
|
292
301
|
|
|
293
|
-
async def
|
|
302
|
+
async def get_datakey(self, source: str) -> DataKey:
|
|
294
303
|
value = await self.ctxt.get(self.read_pv)
|
|
295
|
-
return self.converter.
|
|
304
|
+
return self.converter.get_datakey(source, value)
|
|
296
305
|
|
|
297
306
|
def _pva_request_string(self, fields: List[str]) -> str:
|
|
298
307
|
"""
|