ophyd-async 0.7.0a1__py3-none-any.whl → 0.8.0a3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ophyd_async/_version.py +2 -2
- ophyd_async/core/__init__.py +30 -9
- ophyd_async/core/_detector.py +5 -10
- ophyd_async/core/_device.py +146 -67
- ophyd_async/core/_device_filler.py +269 -0
- ophyd_async/core/_device_save_loader.py +6 -7
- ophyd_async/core/_mock_signal_backend.py +32 -40
- ophyd_async/core/_mock_signal_utils.py +22 -16
- ophyd_async/core/_protocol.py +28 -8
- ophyd_async/core/_readable.py +133 -134
- ophyd_async/core/_signal.py +140 -152
- ophyd_async/core/_signal_backend.py +131 -64
- ophyd_async/core/_soft_signal_backend.py +125 -194
- ophyd_async/core/_status.py +22 -6
- ophyd_async/core/_table.py +97 -100
- ophyd_async/core/_utils.py +79 -18
- ophyd_async/epics/adaravis/_aravis_controller.py +2 -2
- ophyd_async/epics/adaravis/_aravis_io.py +8 -6
- ophyd_async/epics/adcore/_core_io.py +5 -7
- ophyd_async/epics/adcore/_hdf_writer.py +2 -2
- ophyd_async/epics/adcore/_single_trigger.py +4 -9
- ophyd_async/epics/adcore/_utils.py +15 -10
- ophyd_async/epics/adkinetix/__init__.py +2 -1
- ophyd_async/epics/adkinetix/_kinetix_controller.py +6 -3
- ophyd_async/epics/adkinetix/_kinetix_io.py +4 -5
- ophyd_async/epics/adpilatus/_pilatus_controller.py +2 -2
- ophyd_async/epics/adpilatus/_pilatus_io.py +3 -4
- ophyd_async/epics/adsimdetector/_sim_controller.py +2 -2
- ophyd_async/epics/advimba/__init__.py +4 -1
- ophyd_async/epics/advimba/_vimba_controller.py +6 -3
- ophyd_async/epics/advimba/_vimba_io.py +8 -9
- ophyd_async/epics/core/__init__.py +26 -0
- ophyd_async/epics/core/_aioca.py +323 -0
- ophyd_async/epics/core/_epics_connector.py +53 -0
- ophyd_async/epics/core/_epics_device.py +13 -0
- ophyd_async/epics/core/_p4p.py +382 -0
- ophyd_async/epics/core/_pvi_connector.py +92 -0
- ophyd_async/epics/core/_signal.py +171 -0
- ophyd_async/epics/core/_util.py +61 -0
- ophyd_async/epics/demo/_mover.py +4 -5
- ophyd_async/epics/demo/_sensor.py +14 -13
- ophyd_async/epics/eiger/_eiger.py +1 -2
- ophyd_async/epics/eiger/_eiger_controller.py +1 -1
- ophyd_async/epics/eiger/_eiger_io.py +3 -5
- ophyd_async/epics/eiger/_odin_io.py +5 -5
- ophyd_async/epics/motor.py +4 -5
- ophyd_async/epics/signal.py +11 -0
- ophyd_async/fastcs/core.py +9 -0
- ophyd_async/fastcs/panda/__init__.py +4 -4
- ophyd_async/fastcs/panda/_block.py +23 -11
- ophyd_async/fastcs/panda/_control.py +3 -5
- ophyd_async/fastcs/panda/_hdf_panda.py +5 -19
- ophyd_async/fastcs/panda/_table.py +29 -51
- ophyd_async/fastcs/panda/_trigger.py +8 -8
- ophyd_async/fastcs/panda/_writer.py +4 -7
- ophyd_async/plan_stubs/_ensure_connected.py +3 -1
- ophyd_async/plan_stubs/_fly.py +2 -2
- ophyd_async/plan_stubs/_nd_attributes.py +5 -4
- ophyd_async/py.typed +0 -0
- ophyd_async/sim/demo/_pattern_detector/_pattern_detector_controller.py +1 -2
- ophyd_async/sim/demo/_sim_motor.py +3 -4
- ophyd_async/tango/__init__.py +2 -4
- ophyd_async/tango/base_devices/_base_device.py +76 -144
- ophyd_async/tango/demo/_counter.py +8 -18
- ophyd_async/tango/demo/_mover.py +5 -6
- ophyd_async/tango/signal/__init__.py +2 -4
- ophyd_async/tango/signal/_signal.py +29 -50
- ophyd_async/tango/signal/_tango_transport.py +38 -40
- {ophyd_async-0.7.0a1.dist-info → ophyd_async-0.8.0a3.dist-info}/METADATA +8 -12
- ophyd_async-0.8.0a3.dist-info/RECORD +112 -0
- {ophyd_async-0.7.0a1.dist-info → ophyd_async-0.8.0a3.dist-info}/WHEEL +1 -1
- ophyd_async/epics/pvi/__init__.py +0 -3
- ophyd_async/epics/pvi/_pvi.py +0 -338
- ophyd_async/epics/signal/__init__.py +0 -21
- ophyd_async/epics/signal/_aioca.py +0 -378
- ophyd_async/epics/signal/_common.py +0 -57
- ophyd_async/epics/signal/_epics_transport.py +0 -34
- ophyd_async/epics/signal/_p4p.py +0 -518
- ophyd_async/epics/signal/_signal.py +0 -114
- ophyd_async-0.7.0a1.dist-info/RECORD +0 -108
- {ophyd_async-0.7.0a1.dist-info → ophyd_async-0.8.0a3.dist-info}/LICENSE +0 -0
- {ophyd_async-0.7.0a1.dist-info → ophyd_async-0.8.0a3.dist-info}/entry_points.txt +0 -0
- {ophyd_async-0.7.0a1.dist-info → ophyd_async-0.8.0a3.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,382 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import asyncio
|
|
4
|
+
import atexit
|
|
5
|
+
import logging
|
|
6
|
+
from collections.abc import Mapping, Sequence
|
|
7
|
+
from math import isnan, nan
|
|
8
|
+
from typing import Any, Generic
|
|
9
|
+
|
|
10
|
+
import numpy as np
|
|
11
|
+
from bluesky.protocols import Reading
|
|
12
|
+
from event_model import DataKey, Limits, LimitsRange
|
|
13
|
+
from p4p import Value
|
|
14
|
+
from p4p.client.asyncio import Context, Subscription
|
|
15
|
+
from pydantic import BaseModel
|
|
16
|
+
|
|
17
|
+
from ophyd_async.core import (
|
|
18
|
+
Array1D,
|
|
19
|
+
Callback,
|
|
20
|
+
NotConnected,
|
|
21
|
+
SignalDatatype,
|
|
22
|
+
SignalDatatypeT,
|
|
23
|
+
SignalMetadata,
|
|
24
|
+
StrictEnum,
|
|
25
|
+
Table,
|
|
26
|
+
get_enum_cls,
|
|
27
|
+
get_unique,
|
|
28
|
+
make_datakey,
|
|
29
|
+
wait_for_connection,
|
|
30
|
+
)
|
|
31
|
+
|
|
32
|
+
from ._util import EpicsSignalBackend, format_datatype, get_supported_values
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def _limits_from_value(value: Any) -> Limits:
|
|
36
|
+
def get_limits(
|
|
37
|
+
substucture_name: str, low_name: str = "limitLow", high_name: str = "limitHigh"
|
|
38
|
+
) -> LimitsRange | None:
|
|
39
|
+
substructure = getattr(value, substucture_name, None)
|
|
40
|
+
low = getattr(substructure, low_name, nan)
|
|
41
|
+
high = getattr(substructure, high_name, nan)
|
|
42
|
+
if not (isnan(low) and isnan(high)):
|
|
43
|
+
return LimitsRange(
|
|
44
|
+
low=None if isnan(low) else low,
|
|
45
|
+
high=None if isnan(high) else high,
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
limits = Limits()
|
|
49
|
+
if limits_range := get_limits("valueAlarm", "lowAlarmLimit", "highAlarmLimit"):
|
|
50
|
+
limits["alarm"] = limits_range
|
|
51
|
+
if limits_range := get_limits("control"):
|
|
52
|
+
limits["control"] = limits_range
|
|
53
|
+
if limits_range := get_limits("display"):
|
|
54
|
+
limits["display"] = limits_range
|
|
55
|
+
if limits_range := get_limits("valueAlarm", "lowWarningLimit", "highWarningLimit"):
|
|
56
|
+
limits["warning"] = limits_range
|
|
57
|
+
return limits
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def _metadata_from_value(datatype: type[SignalDatatype], value: Any) -> SignalMetadata:
|
|
61
|
+
metadata = SignalMetadata()
|
|
62
|
+
value_data: Any = getattr(value, "value", None)
|
|
63
|
+
display_data: Any = getattr(value, "display", None)
|
|
64
|
+
if hasattr(display_data, "units"):
|
|
65
|
+
metadata["units"] = display_data.units
|
|
66
|
+
if hasattr(display_data, "precision") and not isnan(display_data.precision):
|
|
67
|
+
metadata["precision"] = display_data.precision
|
|
68
|
+
if limits := _limits_from_value(value):
|
|
69
|
+
metadata["limits"] = limits
|
|
70
|
+
# Get choices from display or value
|
|
71
|
+
if datatype is str or issubclass(datatype, StrictEnum):
|
|
72
|
+
if hasattr(display_data, "choices"):
|
|
73
|
+
metadata["choices"] = display_data.choices
|
|
74
|
+
elif hasattr(value_data, "choices"):
|
|
75
|
+
metadata["choices"] = value_data.choices
|
|
76
|
+
return metadata
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
class PvaConverter(Generic[SignalDatatypeT]):
|
|
80
|
+
value_fields = ("value",)
|
|
81
|
+
reading_fields = ("alarm", "timeStamp")
|
|
82
|
+
|
|
83
|
+
def __init__(self, datatype: type[SignalDatatypeT]):
|
|
84
|
+
self.datatype = datatype
|
|
85
|
+
|
|
86
|
+
def value(self, value: Any) -> SignalDatatypeT:
|
|
87
|
+
# for channel access ca_xxx classes, this
|
|
88
|
+
# invokes __pos__ operator to return an instance of
|
|
89
|
+
# the builtin base class
|
|
90
|
+
return value["value"]
|
|
91
|
+
|
|
92
|
+
def write_value(self, value: Any) -> Any:
|
|
93
|
+
# The pva library will do the conversion for us
|
|
94
|
+
return value
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
class DisconnectedPvaConverter(PvaConverter):
|
|
98
|
+
def __getattribute__(self, __name: str) -> Any:
|
|
99
|
+
raise NotImplementedError("No PV has been set as connect() has not been called")
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
class PvaNDArrayConverter(PvaConverter[SignalDatatypeT]):
|
|
103
|
+
value_fields = ("value", "dimension")
|
|
104
|
+
|
|
105
|
+
def _get_dimensions(self, value) -> list[int]:
|
|
106
|
+
dimensions: list[Value] = value["dimension"]
|
|
107
|
+
dims = [dim.size for dim in dimensions]
|
|
108
|
+
# Note: dimensions in NTNDArray are in fortran-like order
|
|
109
|
+
# with first index changing fastest.
|
|
110
|
+
#
|
|
111
|
+
# Therefore we need to reverse the order of the dimensions
|
|
112
|
+
# here to get back to a more usual C-like order with the
|
|
113
|
+
# last index changing fastest.
|
|
114
|
+
return dims[::-1]
|
|
115
|
+
|
|
116
|
+
def value(self, value: Any) -> SignalDatatypeT:
|
|
117
|
+
dims = self._get_dimensions(value)
|
|
118
|
+
return value["value"].reshape(dims)
|
|
119
|
+
|
|
120
|
+
def write_value(self, value: Any) -> Any:
|
|
121
|
+
# No clear use-case for writing directly to an NDArray, and some
|
|
122
|
+
# complexities around flattening to 1-D - e.g. dimension-order.
|
|
123
|
+
# Don't support this for now.
|
|
124
|
+
raise TypeError("Writing to NDArray not supported")
|
|
125
|
+
|
|
126
|
+
|
|
127
|
+
class PvaEnumConverter(PvaConverter[str]):
|
|
128
|
+
def __init__(
|
|
129
|
+
self, datatype: type[str] = str, supported_values: Mapping[str, str] = {}
|
|
130
|
+
):
|
|
131
|
+
self.supported_values = supported_values
|
|
132
|
+
super().__init__(datatype)
|
|
133
|
+
|
|
134
|
+
def value(self, value: Any) -> str:
|
|
135
|
+
str_value = value["value"]["choices"][value["value"]["index"]]
|
|
136
|
+
if self.supported_values:
|
|
137
|
+
return self.supported_values[str_value]
|
|
138
|
+
else:
|
|
139
|
+
return str_value
|
|
140
|
+
|
|
141
|
+
|
|
142
|
+
class PvaEnumBoolConverter(PvaConverter[bool]):
|
|
143
|
+
def __init__(self):
|
|
144
|
+
super().__init__(bool)
|
|
145
|
+
|
|
146
|
+
def value(self, value: Any) -> bool:
|
|
147
|
+
return bool(value["value"]["index"])
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
class PvaTableConverter(PvaConverter[Table]):
|
|
151
|
+
def value(self, value) -> Table:
|
|
152
|
+
return self.datatype(**value["value"].todict())
|
|
153
|
+
|
|
154
|
+
def write_value(self, value: BaseModel | dict[str, Any]) -> Any:
|
|
155
|
+
if isinstance(value, self.datatype):
|
|
156
|
+
return value.model_dump(mode="python")
|
|
157
|
+
return value
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
# https://mdavidsaver.github.io/p4p/values.html
|
|
161
|
+
_datatype_converter_from_typeid: dict[
|
|
162
|
+
tuple[str, str], tuple[type[SignalDatatype], type[PvaConverter]]
|
|
163
|
+
] = {
|
|
164
|
+
("epics:nt/NTScalar:1.0", "?"): (bool, PvaConverter),
|
|
165
|
+
("epics:nt/NTScalar:1.0", "b"): (int, PvaConverter),
|
|
166
|
+
("epics:nt/NTScalar:1.0", "B"): (int, PvaConverter),
|
|
167
|
+
("epics:nt/NTScalar:1.0", "h"): (int, PvaConverter),
|
|
168
|
+
("epics:nt/NTScalar:1.0", "H"): (int, PvaConverter),
|
|
169
|
+
("epics:nt/NTScalar:1.0", "i"): (int, PvaConverter),
|
|
170
|
+
("epics:nt/NTScalar:1.0", "I"): (int, PvaConverter),
|
|
171
|
+
("epics:nt/NTScalar:1.0", "l"): (int, PvaConverter),
|
|
172
|
+
("epics:nt/NTScalar:1.0", "L"): (int, PvaConverter),
|
|
173
|
+
("epics:nt/NTScalar:1.0", "f"): (float, PvaConverter),
|
|
174
|
+
("epics:nt/NTScalar:1.0", "d"): (float, PvaConverter),
|
|
175
|
+
("epics:nt/NTScalar:1.0", "s"): (str, PvaConverter),
|
|
176
|
+
("epics:nt/NTEnum:1.0", "S"): (str, PvaEnumConverter),
|
|
177
|
+
("epics:nt/NTScalarArray:1.0", "a?"): (Array1D[np.bool_], PvaConverter),
|
|
178
|
+
("epics:nt/NTScalarArray:1.0", "ab"): (Array1D[np.int8], PvaConverter),
|
|
179
|
+
("epics:nt/NTScalarArray:1.0", "aB"): (Array1D[np.uint8], PvaConverter),
|
|
180
|
+
("epics:nt/NTScalarArray:1.0", "ah"): (Array1D[np.int16], PvaConverter),
|
|
181
|
+
("epics:nt/NTScalarArray:1.0", "aH"): (Array1D[np.uint16], PvaConverter),
|
|
182
|
+
("epics:nt/NTScalarArray:1.0", "ai"): (Array1D[np.int32], PvaConverter),
|
|
183
|
+
("epics:nt/NTScalarArray:1.0", "aI"): (Array1D[np.uint32], PvaConverter),
|
|
184
|
+
("epics:nt/NTScalarArray:1.0", "al"): (Array1D[np.int64], PvaConverter),
|
|
185
|
+
("epics:nt/NTScalarArray:1.0", "aL"): (Array1D[np.uint64], PvaConverter),
|
|
186
|
+
("epics:nt/NTScalarArray:1.0", "af"): (Array1D[np.float32], PvaConverter),
|
|
187
|
+
("epics:nt/NTScalarArray:1.0", "ad"): (Array1D[np.float64], PvaConverter),
|
|
188
|
+
("epics:nt/NTScalarArray:1.0", "as"): (Sequence[str], PvaConverter),
|
|
189
|
+
("epics:nt/NTTable:1.0", "S"): (Table, PvaTableConverter),
|
|
190
|
+
("epics:nt/NTNDArray:1.0", "v"): (np.ndarray, PvaNDArrayConverter),
|
|
191
|
+
}
|
|
192
|
+
|
|
193
|
+
|
|
194
|
+
def _get_specifier(value: Value):
|
|
195
|
+
typ = value.type("value").aspy()
|
|
196
|
+
if isinstance(typ, tuple):
|
|
197
|
+
return typ[0]
|
|
198
|
+
else:
|
|
199
|
+
return str(typ)
|
|
200
|
+
|
|
201
|
+
|
|
202
|
+
def make_converter(datatype: type | None, values: dict[str, Any]) -> PvaConverter:
|
|
203
|
+
pv = list(values)[0]
|
|
204
|
+
typeid = get_unique({k: v.getID() for k, v in values.items()}, "typeids")
|
|
205
|
+
specifier = get_unique(
|
|
206
|
+
{k: _get_specifier(v) for k, v in values.items()},
|
|
207
|
+
"value type specifiers",
|
|
208
|
+
)
|
|
209
|
+
# Infer a datatype and converter from the typeid and specifier
|
|
210
|
+
inferred_datatype, converter_cls = _datatype_converter_from_typeid[
|
|
211
|
+
(typeid, specifier)
|
|
212
|
+
]
|
|
213
|
+
# Some override cases
|
|
214
|
+
if datatype is bool and typeid == "epics:nt/NTEnum:1.0":
|
|
215
|
+
# Database can't do bools, so are often representated as enums of len 2
|
|
216
|
+
pv_num_choices = get_unique(
|
|
217
|
+
{k: len(v["value"]["choices"]) for k, v in values.items()},
|
|
218
|
+
"number of choices",
|
|
219
|
+
)
|
|
220
|
+
if pv_num_choices != 2:
|
|
221
|
+
raise TypeError(f"{pv} has {pv_num_choices} choices, can't map to bool")
|
|
222
|
+
return PvaEnumBoolConverter()
|
|
223
|
+
elif typeid == "epics:nt/NTEnum:1.0":
|
|
224
|
+
pv_choices = get_unique(
|
|
225
|
+
{k: tuple(v["value"]["choices"]) for k, v in values.items()}, "choices"
|
|
226
|
+
)
|
|
227
|
+
if enum_cls := get_enum_cls(datatype):
|
|
228
|
+
# We were given an enum class, so make class from that
|
|
229
|
+
return PvaEnumConverter(
|
|
230
|
+
supported_values=get_supported_values(pv, enum_cls, pv_choices)
|
|
231
|
+
)
|
|
232
|
+
elif datatype in (None, str):
|
|
233
|
+
# Still use the Enum converter, but make choices from what it has
|
|
234
|
+
return PvaEnumConverter()
|
|
235
|
+
elif (
|
|
236
|
+
inferred_datatype is float
|
|
237
|
+
and datatype is int
|
|
238
|
+
and get_unique(
|
|
239
|
+
{k: v["display"]["precision"] for k, v in values.items()}, "precision"
|
|
240
|
+
)
|
|
241
|
+
== 0
|
|
242
|
+
):
|
|
243
|
+
# Allow int signals to represent float records when prec is 0
|
|
244
|
+
return PvaConverter(int)
|
|
245
|
+
elif inferred_datatype is str and (enum_cls := get_enum_cls(datatype)):
|
|
246
|
+
# Allow strings to be used as enums until QSRV supports this
|
|
247
|
+
return PvaConverter(str)
|
|
248
|
+
elif inferred_datatype is Table and datatype and issubclass(datatype, Table):
|
|
249
|
+
# Use a custom table class
|
|
250
|
+
return PvaTableConverter(datatype)
|
|
251
|
+
elif datatype in (None, inferred_datatype):
|
|
252
|
+
# If datatype matches what we are given then allow it and use inferred converter
|
|
253
|
+
return converter_cls(inferred_datatype)
|
|
254
|
+
raise TypeError(
|
|
255
|
+
f"{pv} with inferred datatype {format_datatype(inferred_datatype)}"
|
|
256
|
+
f" from {typeid=} {specifier=}"
|
|
257
|
+
f" cannot be coerced to {format_datatype(datatype)}"
|
|
258
|
+
)
|
|
259
|
+
|
|
260
|
+
|
|
261
|
+
_context: Context | None = None
|
|
262
|
+
|
|
263
|
+
|
|
264
|
+
def context() -> Context:
|
|
265
|
+
global _context
|
|
266
|
+
if _context is None:
|
|
267
|
+
_context = Context("pva", nt=False)
|
|
268
|
+
|
|
269
|
+
@atexit.register
|
|
270
|
+
def _del_ctxt():
|
|
271
|
+
# If we don't do this we get messages like this on close:
|
|
272
|
+
# Error in sys.excepthook:
|
|
273
|
+
# Original exception was:
|
|
274
|
+
global _context
|
|
275
|
+
del _context
|
|
276
|
+
|
|
277
|
+
return _context
|
|
278
|
+
|
|
279
|
+
|
|
280
|
+
async def pvget_with_timeout(pv: str, timeout: float) -> Any:
|
|
281
|
+
try:
|
|
282
|
+
return await asyncio.wait_for(context().get(pv), timeout=timeout)
|
|
283
|
+
except asyncio.TimeoutError as exc:
|
|
284
|
+
logging.debug(f"signal pva://{pv} timed out", exc_info=True)
|
|
285
|
+
raise NotConnected(f"pva://{pv}") from exc
|
|
286
|
+
|
|
287
|
+
|
|
288
|
+
def _pva_request_string(fields: Sequence[str]) -> str:
|
|
289
|
+
"""Converts a list of requested fields into a PVA request string which can be
|
|
290
|
+
passed to p4p.
|
|
291
|
+
"""
|
|
292
|
+
return f"field({','.join(fields)})"
|
|
293
|
+
|
|
294
|
+
|
|
295
|
+
class PvaSignalBackend(EpicsSignalBackend[SignalDatatypeT]):
|
|
296
|
+
def __init__(
|
|
297
|
+
self,
|
|
298
|
+
datatype: type[SignalDatatypeT] | None,
|
|
299
|
+
read_pv: str = "",
|
|
300
|
+
write_pv: str = "",
|
|
301
|
+
):
|
|
302
|
+
self.converter: PvaConverter = DisconnectedPvaConverter(float)
|
|
303
|
+
self.initial_values: dict[str, Any] = {}
|
|
304
|
+
self.subscription: Subscription | None = None
|
|
305
|
+
super().__init__(datatype, read_pv, write_pv)
|
|
306
|
+
|
|
307
|
+
def source(self, name: str, read: bool):
|
|
308
|
+
return f"pva://{self.read_pv if read else self.write_pv}"
|
|
309
|
+
|
|
310
|
+
async def _store_initial_value(self, pv: str, timeout: float):
|
|
311
|
+
self.initial_values[pv] = await pvget_with_timeout(pv, timeout)
|
|
312
|
+
|
|
313
|
+
async def connect(self, timeout: float):
|
|
314
|
+
if self.read_pv != self.write_pv:
|
|
315
|
+
# Different, need to connect both
|
|
316
|
+
await wait_for_connection(
|
|
317
|
+
read_pv=self._store_initial_value(self.read_pv, timeout=timeout),
|
|
318
|
+
write_pv=self._store_initial_value(self.write_pv, timeout=timeout),
|
|
319
|
+
)
|
|
320
|
+
else:
|
|
321
|
+
# The same, so only need to connect one
|
|
322
|
+
await self._store_initial_value(self.read_pv, timeout=timeout)
|
|
323
|
+
self.converter = make_converter(self.datatype, self.initial_values)
|
|
324
|
+
|
|
325
|
+
def _make_reading(self, value: Any) -> Reading[SignalDatatypeT]:
|
|
326
|
+
ts = value["timeStamp"]
|
|
327
|
+
sv = value["alarm"]["severity"]
|
|
328
|
+
return {
|
|
329
|
+
"value": self.converter.value(value),
|
|
330
|
+
"timestamp": ts["secondsPastEpoch"] + ts["nanoseconds"] * 1e-9,
|
|
331
|
+
"alarm_severity": -1 if sv > 2 else sv,
|
|
332
|
+
}
|
|
333
|
+
|
|
334
|
+
async def put(self, value: SignalDatatypeT | None, wait: bool):
|
|
335
|
+
if value is None:
|
|
336
|
+
write_value = self.initial_values[self.write_pv]
|
|
337
|
+
else:
|
|
338
|
+
write_value = self.converter.write_value(value)
|
|
339
|
+
await context().put(self.write_pv, {"value": write_value}, wait=wait)
|
|
340
|
+
|
|
341
|
+
async def get_datakey(self, source: str) -> DataKey:
|
|
342
|
+
value = await context().get(self.read_pv)
|
|
343
|
+
metadata = _metadata_from_value(self.converter.datatype, value)
|
|
344
|
+
return make_datakey(
|
|
345
|
+
self.converter.datatype, self.converter.value(value), source, metadata
|
|
346
|
+
)
|
|
347
|
+
|
|
348
|
+
async def get_reading(self) -> Reading:
|
|
349
|
+
request = _pva_request_string(
|
|
350
|
+
self.converter.value_fields + self.converter.reading_fields
|
|
351
|
+
)
|
|
352
|
+
value = await context().get(self.read_pv, request=request)
|
|
353
|
+
return self._make_reading(value)
|
|
354
|
+
|
|
355
|
+
async def get_value(self) -> SignalDatatypeT:
|
|
356
|
+
request = _pva_request_string(self.converter.value_fields)
|
|
357
|
+
value = await context().get(self.read_pv, request=request)
|
|
358
|
+
return self.converter.value(value)
|
|
359
|
+
|
|
360
|
+
async def get_setpoint(self) -> SignalDatatypeT:
|
|
361
|
+
request = _pva_request_string(self.converter.value_fields)
|
|
362
|
+
value = await context().get(self.write_pv, request=request)
|
|
363
|
+
return self.converter.value(value)
|
|
364
|
+
|
|
365
|
+
def set_callback(self, callback: Callback[Reading[SignalDatatypeT]] | None) -> None:
|
|
366
|
+
if callback:
|
|
367
|
+
assert (
|
|
368
|
+
not self.subscription
|
|
369
|
+
), "Cannot set a callback when one is already set"
|
|
370
|
+
|
|
371
|
+
async def async_callback(v):
|
|
372
|
+
callback(self._make_reading(v))
|
|
373
|
+
|
|
374
|
+
request = _pva_request_string(
|
|
375
|
+
self.converter.value_fields + self.converter.reading_fields
|
|
376
|
+
)
|
|
377
|
+
self.subscription = context().monitor(
|
|
378
|
+
self.read_pv, async_callback, request=request
|
|
379
|
+
)
|
|
380
|
+
elif self.subscription:
|
|
381
|
+
self.subscription.close()
|
|
382
|
+
self.subscription = None
|
|
@@ -0,0 +1,92 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from unittest.mock import Mock
|
|
4
|
+
|
|
5
|
+
from ophyd_async.core import (
|
|
6
|
+
Device,
|
|
7
|
+
DeviceConnector,
|
|
8
|
+
DeviceFiller,
|
|
9
|
+
Signal,
|
|
10
|
+
SignalR,
|
|
11
|
+
SignalRW,
|
|
12
|
+
SignalX,
|
|
13
|
+
)
|
|
14
|
+
|
|
15
|
+
from ._epics_connector import fill_backend_with_prefix
|
|
16
|
+
from ._signal import PvaSignalBackend, pvget_with_timeout
|
|
17
|
+
|
|
18
|
+
Entry = dict[str, str]
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
def _get_signal_details(entry: Entry) -> tuple[type[Signal], str, str]:
|
|
22
|
+
match entry:
|
|
23
|
+
case {"r": read_pv}:
|
|
24
|
+
return SignalR, read_pv, read_pv
|
|
25
|
+
case {"r": read_pv, "w": write_pv}:
|
|
26
|
+
return SignalRW, read_pv, write_pv
|
|
27
|
+
case {"rw": read_write_pv}:
|
|
28
|
+
return SignalRW, read_write_pv, read_write_pv
|
|
29
|
+
case {"x": execute_pv}:
|
|
30
|
+
return SignalX, execute_pv, execute_pv
|
|
31
|
+
case _:
|
|
32
|
+
raise TypeError(f"Can't process entry {entry}")
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
class PviDeviceConnector(DeviceConnector):
|
|
36
|
+
def __init__(self, prefix: str = "") -> None:
|
|
37
|
+
# TODO: what happens if we get a leading "pva://" here?
|
|
38
|
+
self.prefix = prefix
|
|
39
|
+
self.pvi_pv = prefix + "PVI"
|
|
40
|
+
|
|
41
|
+
def create_children_from_annotations(self, device: Device):
|
|
42
|
+
if not hasattr(self, "filler"):
|
|
43
|
+
self.filler = DeviceFiller(
|
|
44
|
+
device=device,
|
|
45
|
+
signal_backend_factory=PvaSignalBackend,
|
|
46
|
+
device_connector_factory=PviDeviceConnector,
|
|
47
|
+
)
|
|
48
|
+
# Devices will be created with unfilled PviDeviceConnectors
|
|
49
|
+
list(self.filler.create_devices_from_annotations(filled=False))
|
|
50
|
+
# Signals can be filled in with EpicsSignalSuffix and checked at runtime
|
|
51
|
+
for backend, annotations in self.filler.create_signals_from_annotations(
|
|
52
|
+
filled=False
|
|
53
|
+
):
|
|
54
|
+
fill_backend_with_prefix(self.prefix, backend, annotations)
|
|
55
|
+
self.filler.check_created()
|
|
56
|
+
|
|
57
|
+
def _fill_child(self, name: str, entry: Entry, vector_index: int | None = None):
|
|
58
|
+
if set(entry) == {"d"}:
|
|
59
|
+
connector = self.filler.fill_child_device(name, vector_index=vector_index)
|
|
60
|
+
connector.pvi_pv = entry["d"]
|
|
61
|
+
else:
|
|
62
|
+
signal_type, read_pv, write_pv = _get_signal_details(entry)
|
|
63
|
+
backend = self.filler.fill_child_signal(name, signal_type, vector_index)
|
|
64
|
+
backend.read_pv = read_pv
|
|
65
|
+
backend.write_pv = write_pv
|
|
66
|
+
|
|
67
|
+
async def connect(
|
|
68
|
+
self, device: Device, mock: bool | Mock, timeout: float, force_reconnect: bool
|
|
69
|
+
) -> None:
|
|
70
|
+
if mock:
|
|
71
|
+
# Make 2 entries for each DeviceVector
|
|
72
|
+
self.filler.create_device_vector_entries_to_mock(2)
|
|
73
|
+
else:
|
|
74
|
+
pvi_structure = await pvget_with_timeout(self.pvi_pv, timeout)
|
|
75
|
+
entries: dict[str, Entry | list[Entry | None]] = pvi_structure[
|
|
76
|
+
"value"
|
|
77
|
+
].todict()
|
|
78
|
+
# Fill based on what PVI gives us
|
|
79
|
+
for name, entry in entries.items():
|
|
80
|
+
if isinstance(entry, dict):
|
|
81
|
+
# This is a child
|
|
82
|
+
self._fill_child(name, entry)
|
|
83
|
+
else:
|
|
84
|
+
# This is a DeviceVector of children
|
|
85
|
+
for i, e in enumerate(entry):
|
|
86
|
+
if e:
|
|
87
|
+
self._fill_child(name, e, i)
|
|
88
|
+
# Check that all the requested children have been filled
|
|
89
|
+
self.filler.check_filled(f"{self.pvi_pv}: {entries}")
|
|
90
|
+
# Set the name of the device to name all children
|
|
91
|
+
device.set_name(device.name)
|
|
92
|
+
return await super().connect(device, mock, timeout, force_reconnect)
|
|
@@ -0,0 +1,171 @@
|
|
|
1
|
+
"""EPICS Signals over CA or PVA"""
|
|
2
|
+
|
|
3
|
+
from __future__ import annotations
|
|
4
|
+
|
|
5
|
+
from enum import Enum
|
|
6
|
+
|
|
7
|
+
from ophyd_async.core import (
|
|
8
|
+
SignalBackend,
|
|
9
|
+
SignalDatatypeT,
|
|
10
|
+
SignalR,
|
|
11
|
+
SignalRW,
|
|
12
|
+
SignalW,
|
|
13
|
+
SignalX,
|
|
14
|
+
get_unique,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
from ._util import EpicsSignalBackend
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class EpicsProtocol(Enum):
|
|
21
|
+
CA = "ca"
|
|
22
|
+
PVA = "pva"
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
_default_epics_protocol = EpicsProtocol.CA
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def _make_unavailable_function(error: Exception):
|
|
29
|
+
def transport_not_available(*args, **kwargs):
|
|
30
|
+
raise NotImplementedError("Transport not available") from error
|
|
31
|
+
|
|
32
|
+
return transport_not_available
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def _make_unavailable_class(error: Exception) -> type[EpicsSignalBackend]:
|
|
36
|
+
class TransportNotAvailable(EpicsSignalBackend):
|
|
37
|
+
__init__ = _make_unavailable_function(error)
|
|
38
|
+
|
|
39
|
+
return TransportNotAvailable
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
try:
|
|
43
|
+
from ._p4p import PvaSignalBackend, pvget_with_timeout
|
|
44
|
+
except ImportError as pva_error:
|
|
45
|
+
PvaSignalBackend = _make_unavailable_class(pva_error)
|
|
46
|
+
pvget_with_timeout = _make_unavailable_function(pva_error)
|
|
47
|
+
else:
|
|
48
|
+
_default_epics_protocol = EpicsProtocol.PVA
|
|
49
|
+
|
|
50
|
+
try:
|
|
51
|
+
from ._aioca import CaSignalBackend
|
|
52
|
+
except ImportError as ca_error:
|
|
53
|
+
CaSignalBackend = _make_unavailable_class(ca_error)
|
|
54
|
+
else:
|
|
55
|
+
_default_epics_protocol = EpicsProtocol.CA
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def split_protocol_from_pv(pv: str) -> tuple[EpicsProtocol, str]:
|
|
59
|
+
split = pv.split("://", 1)
|
|
60
|
+
if len(split) > 1:
|
|
61
|
+
# We got something like pva://mydevice, so use specified comms mode
|
|
62
|
+
scheme, pv = split
|
|
63
|
+
protocol = EpicsProtocol(scheme)
|
|
64
|
+
else:
|
|
65
|
+
# No comms mode specified, use the default
|
|
66
|
+
protocol = _default_epics_protocol
|
|
67
|
+
return protocol, pv
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def get_signal_backend_type(protocol: EpicsProtocol) -> type[EpicsSignalBackend]:
|
|
71
|
+
match protocol:
|
|
72
|
+
case EpicsProtocol.CA:
|
|
73
|
+
return CaSignalBackend
|
|
74
|
+
case EpicsProtocol.PVA:
|
|
75
|
+
return PvaSignalBackend
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
def _epics_signal_backend(
|
|
79
|
+
datatype: type[SignalDatatypeT] | None, read_pv: str, write_pv: str
|
|
80
|
+
) -> SignalBackend[SignalDatatypeT]:
|
|
81
|
+
"""Create an epics signal backend."""
|
|
82
|
+
r_protocol, r_pv = split_protocol_from_pv(read_pv)
|
|
83
|
+
w_protocol, w_pv = split_protocol_from_pv(write_pv)
|
|
84
|
+
protocol = get_unique({read_pv: r_protocol, write_pv: w_protocol}, "protocols")
|
|
85
|
+
signal_backend_type = get_signal_backend_type(protocol)
|
|
86
|
+
return signal_backend_type(datatype, r_pv, w_pv)
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def epics_signal_rw(
|
|
90
|
+
datatype: type[SignalDatatypeT],
|
|
91
|
+
read_pv: str,
|
|
92
|
+
write_pv: str | None = None,
|
|
93
|
+
name: str = "",
|
|
94
|
+
) -> SignalRW[SignalDatatypeT]:
|
|
95
|
+
"""Create a `SignalRW` backed by 1 or 2 EPICS PVs
|
|
96
|
+
|
|
97
|
+
Parameters
|
|
98
|
+
----------
|
|
99
|
+
datatype:
|
|
100
|
+
Check that the PV is of this type
|
|
101
|
+
read_pv:
|
|
102
|
+
The PV to read and monitor
|
|
103
|
+
write_pv:
|
|
104
|
+
If given, use this PV to write to, otherwise use read_pv
|
|
105
|
+
"""
|
|
106
|
+
backend = _epics_signal_backend(datatype, read_pv, write_pv or read_pv)
|
|
107
|
+
return SignalRW(backend, name=name)
|
|
108
|
+
|
|
109
|
+
|
|
110
|
+
def epics_signal_rw_rbv(
|
|
111
|
+
datatype: type[SignalDatatypeT],
|
|
112
|
+
write_pv: str,
|
|
113
|
+
read_suffix: str = "_RBV",
|
|
114
|
+
name: str = "",
|
|
115
|
+
) -> SignalRW[SignalDatatypeT]:
|
|
116
|
+
"""Create a `SignalRW` backed by 1 or 2 EPICS PVs, with a suffix on the readback pv
|
|
117
|
+
|
|
118
|
+
Parameters
|
|
119
|
+
----------
|
|
120
|
+
datatype:
|
|
121
|
+
Check that the PV is of this type
|
|
122
|
+
write_pv:
|
|
123
|
+
The PV to write to
|
|
124
|
+
read_suffix:
|
|
125
|
+
Append this suffix to the write pv to create the readback pv
|
|
126
|
+
"""
|
|
127
|
+
return epics_signal_rw(datatype, f"{write_pv}{read_suffix}", write_pv, name)
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def epics_signal_r(
|
|
131
|
+
datatype: type[SignalDatatypeT], read_pv: str, name: str = ""
|
|
132
|
+
) -> SignalR[SignalDatatypeT]:
|
|
133
|
+
"""Create a `SignalR` backed by 1 EPICS PV
|
|
134
|
+
|
|
135
|
+
Parameters
|
|
136
|
+
----------
|
|
137
|
+
datatype
|
|
138
|
+
Check that the PV is of this type
|
|
139
|
+
read_pv:
|
|
140
|
+
The PV to read and monitor
|
|
141
|
+
"""
|
|
142
|
+
backend = _epics_signal_backend(datatype, read_pv, read_pv)
|
|
143
|
+
return SignalR(backend, name=name)
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def epics_signal_w(
|
|
147
|
+
datatype: type[SignalDatatypeT], write_pv: str, name: str = ""
|
|
148
|
+
) -> SignalW[SignalDatatypeT]:
|
|
149
|
+
"""Create a `SignalW` backed by 1 EPICS PVs
|
|
150
|
+
|
|
151
|
+
Parameters
|
|
152
|
+
----------
|
|
153
|
+
datatype:
|
|
154
|
+
Check that the PV is of this type
|
|
155
|
+
write_pv:
|
|
156
|
+
The PV to write to
|
|
157
|
+
"""
|
|
158
|
+
backend = _epics_signal_backend(datatype, write_pv, write_pv)
|
|
159
|
+
return SignalW(backend, name=name)
|
|
160
|
+
|
|
161
|
+
|
|
162
|
+
def epics_signal_x(write_pv: str, name: str = "") -> SignalX:
|
|
163
|
+
"""Create a `SignalX` backed by 1 EPICS PVs
|
|
164
|
+
|
|
165
|
+
Parameters
|
|
166
|
+
----------
|
|
167
|
+
write_pv:
|
|
168
|
+
The PV to write its initial value to on trigger
|
|
169
|
+
"""
|
|
170
|
+
backend = _epics_signal_backend(None, write_pv, write_pv)
|
|
171
|
+
return SignalX(backend, name=name)
|