ophyd-async 0.7.0__py3-none-any.whl → 0.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ophyd_async/_version.py +2 -2
- ophyd_async/core/__init__.py +34 -9
- ophyd_async/core/_detector.py +5 -10
- ophyd_async/core/_device.py +170 -68
- ophyd_async/core/_device_filler.py +269 -0
- ophyd_async/core/_device_save_loader.py +6 -7
- ophyd_async/core/_mock_signal_backend.py +35 -40
- ophyd_async/core/_mock_signal_utils.py +25 -16
- ophyd_async/core/_protocol.py +28 -8
- ophyd_async/core/_readable.py +133 -134
- ophyd_async/core/_signal.py +219 -163
- ophyd_async/core/_signal_backend.py +131 -64
- ophyd_async/core/_soft_signal_backend.py +131 -194
- ophyd_async/core/_status.py +22 -6
- ophyd_async/core/_table.py +102 -100
- ophyd_async/core/_utils.py +143 -32
- ophyd_async/epics/adaravis/_aravis_controller.py +2 -2
- ophyd_async/epics/adaravis/_aravis_io.py +8 -6
- ophyd_async/epics/adcore/_core_io.py +5 -7
- ophyd_async/epics/adcore/_core_logic.py +3 -1
- ophyd_async/epics/adcore/_hdf_writer.py +2 -2
- ophyd_async/epics/adcore/_single_trigger.py +6 -10
- ophyd_async/epics/adcore/_utils.py +15 -10
- ophyd_async/epics/adkinetix/__init__.py +2 -1
- ophyd_async/epics/adkinetix/_kinetix_controller.py +6 -3
- ophyd_async/epics/adkinetix/_kinetix_io.py +4 -5
- ophyd_async/epics/adpilatus/_pilatus_controller.py +2 -2
- ophyd_async/epics/adpilatus/_pilatus_io.py +3 -4
- ophyd_async/epics/adsimdetector/_sim_controller.py +2 -2
- ophyd_async/epics/advimba/__init__.py +4 -1
- ophyd_async/epics/advimba/_vimba_controller.py +6 -3
- ophyd_async/epics/advimba/_vimba_io.py +8 -9
- ophyd_async/epics/core/__init__.py +26 -0
- ophyd_async/epics/core/_aioca.py +323 -0
- ophyd_async/epics/core/_epics_connector.py +53 -0
- ophyd_async/epics/core/_epics_device.py +13 -0
- ophyd_async/epics/core/_p4p.py +383 -0
- ophyd_async/epics/core/_pvi_connector.py +91 -0
- ophyd_async/epics/core/_signal.py +171 -0
- ophyd_async/epics/core/_util.py +61 -0
- ophyd_async/epics/demo/_mover.py +4 -5
- ophyd_async/epics/demo/_sensor.py +14 -13
- ophyd_async/epics/eiger/_eiger.py +1 -2
- ophyd_async/epics/eiger/_eiger_controller.py +7 -2
- ophyd_async/epics/eiger/_eiger_io.py +3 -5
- ophyd_async/epics/eiger/_odin_io.py +5 -5
- ophyd_async/epics/motor.py +4 -5
- ophyd_async/epics/signal.py +11 -0
- ophyd_async/epics/testing/__init__.py +24 -0
- ophyd_async/epics/testing/_example_ioc.py +105 -0
- ophyd_async/epics/testing/_utils.py +78 -0
- ophyd_async/epics/testing/test_records.db +152 -0
- ophyd_async/epics/testing/test_records_pva.db +177 -0
- ophyd_async/fastcs/core.py +9 -0
- ophyd_async/fastcs/panda/__init__.py +4 -4
- ophyd_async/fastcs/panda/_block.py +18 -13
- ophyd_async/fastcs/panda/_control.py +3 -5
- ophyd_async/fastcs/panda/_hdf_panda.py +5 -19
- ophyd_async/fastcs/panda/_table.py +30 -52
- ophyd_async/fastcs/panda/_trigger.py +8 -8
- ophyd_async/fastcs/panda/_writer.py +2 -5
- ophyd_async/plan_stubs/_ensure_connected.py +20 -13
- ophyd_async/plan_stubs/_fly.py +2 -2
- ophyd_async/plan_stubs/_nd_attributes.py +5 -4
- ophyd_async/py.typed +0 -0
- ophyd_async/sim/demo/_pattern_detector/_pattern_detector_controller.py +1 -2
- ophyd_async/sim/demo/_sim_motor.py +3 -4
- ophyd_async/tango/__init__.py +0 -45
- ophyd_async/tango/{signal → core}/__init__.py +9 -6
- ophyd_async/tango/core/_base_device.py +132 -0
- ophyd_async/tango/{signal → core}/_signal.py +42 -53
- ophyd_async/tango/{base_devices → core}/_tango_readable.py +3 -4
- ophyd_async/tango/{signal → core}/_tango_transport.py +38 -40
- ophyd_async/tango/demo/_counter.py +12 -23
- ophyd_async/tango/demo/_mover.py +13 -13
- {ophyd_async-0.7.0.dist-info → ophyd_async-0.8.0.dist-info}/METADATA +52 -55
- ophyd_async-0.8.0.dist-info/RECORD +116 -0
- {ophyd_async-0.7.0.dist-info → ophyd_async-0.8.0.dist-info}/WHEEL +1 -1
- ophyd_async/epics/pvi/__init__.py +0 -3
- ophyd_async/epics/pvi/_pvi.py +0 -338
- ophyd_async/epics/signal/__init__.py +0 -21
- ophyd_async/epics/signal/_aioca.py +0 -378
- ophyd_async/epics/signal/_common.py +0 -57
- ophyd_async/epics/signal/_epics_transport.py +0 -34
- ophyd_async/epics/signal/_p4p.py +0 -518
- ophyd_async/epics/signal/_signal.py +0 -114
- ophyd_async/tango/base_devices/__init__.py +0 -4
- ophyd_async/tango/base_devices/_base_device.py +0 -225
- ophyd_async-0.7.0.dist-info/RECORD +0 -108
- {ophyd_async-0.7.0.dist-info → ophyd_async-0.8.0.dist-info}/LICENSE +0 -0
- {ophyd_async-0.7.0.dist-info → ophyd_async-0.8.0.dist-info}/entry_points.txt +0 -0
- {ophyd_async-0.7.0.dist-info → ophyd_async-0.8.0.dist-info}/top_level.txt +0 -0
ophyd_async/epics/signal/_p4p.py
DELETED
|
@@ -1,518 +0,0 @@
|
|
|
1
|
-
import asyncio
|
|
2
|
-
import atexit
|
|
3
|
-
import inspect
|
|
4
|
-
import logging
|
|
5
|
-
import time
|
|
6
|
-
from collections.abc import Sequence
|
|
7
|
-
from dataclasses import dataclass
|
|
8
|
-
from enum import Enum
|
|
9
|
-
from math import isnan, nan
|
|
10
|
-
from typing import Any, get_origin
|
|
11
|
-
|
|
12
|
-
import numpy as np
|
|
13
|
-
from bluesky.protocols import Reading
|
|
14
|
-
from event_model import DataKey
|
|
15
|
-
from event_model.documents.event_descriptor import Dtype
|
|
16
|
-
from p4p import Value
|
|
17
|
-
from p4p.client.asyncio import Context, Subscription
|
|
18
|
-
from pydantic import BaseModel
|
|
19
|
-
|
|
20
|
-
from ophyd_async.core import (
|
|
21
|
-
DEFAULT_TIMEOUT,
|
|
22
|
-
NotConnected,
|
|
23
|
-
ReadingValueCallback,
|
|
24
|
-
RuntimeSubsetEnum,
|
|
25
|
-
SignalBackend,
|
|
26
|
-
T,
|
|
27
|
-
get_dtype,
|
|
28
|
-
get_unique,
|
|
29
|
-
is_pydantic_model,
|
|
30
|
-
wait_for_connection,
|
|
31
|
-
)
|
|
32
|
-
|
|
33
|
-
from ._common import LimitPair, Limits, common_meta, get_supported_values
|
|
34
|
-
|
|
35
|
-
# https://mdavidsaver.github.io/p4p/values.html
|
|
36
|
-
specifier_to_dtype: dict[str, Dtype] = {
|
|
37
|
-
"?": "integer", # bool
|
|
38
|
-
"b": "integer", # int8
|
|
39
|
-
"B": "integer", # uint8
|
|
40
|
-
"h": "integer", # int16
|
|
41
|
-
"H": "integer", # uint16
|
|
42
|
-
"i": "integer", # int32
|
|
43
|
-
"I": "integer", # uint32
|
|
44
|
-
"l": "integer", # int64
|
|
45
|
-
"L": "integer", # uint64
|
|
46
|
-
"f": "number", # float32
|
|
47
|
-
"d": "number", # float64
|
|
48
|
-
"s": "string",
|
|
49
|
-
}
|
|
50
|
-
|
|
51
|
-
specifier_to_np_dtype: dict[str, str] = {
|
|
52
|
-
"?": "<i2", # bool
|
|
53
|
-
"b": "|i1", # int8
|
|
54
|
-
"B": "|u1", # uint8
|
|
55
|
-
"h": "<i2", # int16
|
|
56
|
-
"H": "<u2", # uint16
|
|
57
|
-
"i": "<i4", # int32
|
|
58
|
-
"I": "<u4", # uint32
|
|
59
|
-
"l": "<i8", # int64
|
|
60
|
-
"L": "<u8", # uint64
|
|
61
|
-
"f": "<f4", # float32
|
|
62
|
-
"d": "<f8", # float64
|
|
63
|
-
"s": "|S40",
|
|
64
|
-
}
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
def _data_key_from_value(
|
|
68
|
-
source: str,
|
|
69
|
-
value: Value,
|
|
70
|
-
*,
|
|
71
|
-
shape: list[int] | None = None,
|
|
72
|
-
choices: list[str] | None = None,
|
|
73
|
-
dtype: Dtype | None = None,
|
|
74
|
-
) -> DataKey:
|
|
75
|
-
"""
|
|
76
|
-
Args:
|
|
77
|
-
value (Value): Description of the the return type of a DB record
|
|
78
|
-
shape: Optional override shape when len(shape) > 1
|
|
79
|
-
choices: Optional list of enum choices to pass as metadata in the datakey
|
|
80
|
-
dtype: Optional override dtype when AugmentedValue is ambiguous, e.g. booleans
|
|
81
|
-
|
|
82
|
-
Returns:
|
|
83
|
-
DataKey: A rich DataKey describing the DB record
|
|
84
|
-
"""
|
|
85
|
-
shape = shape or []
|
|
86
|
-
type_code = value.type().aspy("value")
|
|
87
|
-
|
|
88
|
-
dtype = dtype or specifier_to_dtype[type_code]
|
|
89
|
-
|
|
90
|
-
try:
|
|
91
|
-
if isinstance(type_code, tuple):
|
|
92
|
-
dtype_numpy = ""
|
|
93
|
-
if type_code[1] == "enum_t":
|
|
94
|
-
if dtype == "boolean":
|
|
95
|
-
dtype_numpy = "<i2"
|
|
96
|
-
else:
|
|
97
|
-
for item in type_code[2]:
|
|
98
|
-
if item[0] == "choices":
|
|
99
|
-
dtype_numpy = specifier_to_np_dtype[item[1][1]]
|
|
100
|
-
elif not type_code.startswith("a"):
|
|
101
|
-
dtype_numpy = specifier_to_np_dtype[type_code]
|
|
102
|
-
else:
|
|
103
|
-
# Array type, use typecode of internal element
|
|
104
|
-
dtype_numpy = specifier_to_np_dtype[type_code[1]]
|
|
105
|
-
except KeyError:
|
|
106
|
-
# Case where we can't determine dtype string from value
|
|
107
|
-
dtype_numpy = ""
|
|
108
|
-
|
|
109
|
-
display_data = getattr(value, "display", None)
|
|
110
|
-
|
|
111
|
-
d = DataKey(
|
|
112
|
-
source=source,
|
|
113
|
-
dtype=dtype,
|
|
114
|
-
# type ignore until https://github.com/bluesky/event-model/issues/308
|
|
115
|
-
dtype_numpy=dtype_numpy, # type: ignore
|
|
116
|
-
shape=shape,
|
|
117
|
-
)
|
|
118
|
-
if display_data is not None:
|
|
119
|
-
for key in common_meta:
|
|
120
|
-
attr = getattr(display_data, key, nan)
|
|
121
|
-
if isinstance(attr, str) or not isnan(attr):
|
|
122
|
-
d[key] = attr
|
|
123
|
-
|
|
124
|
-
if choices is not None:
|
|
125
|
-
# type ignore until https://github.com/bluesky/event-model/issues/309
|
|
126
|
-
d["choices"] = choices # type: ignore
|
|
127
|
-
|
|
128
|
-
if limits := _limits_from_value(value):
|
|
129
|
-
# type ignore until https://github.com/bluesky/event-model/issues/309
|
|
130
|
-
d["limits"] = limits # type: ignore
|
|
131
|
-
|
|
132
|
-
return d
|
|
133
|
-
|
|
134
|
-
|
|
135
|
-
def _limits_from_value(value: Value) -> Limits:
|
|
136
|
-
def get_limits(
|
|
137
|
-
substucture_name: str, low_name: str = "limitLow", high_name: str = "limitHigh"
|
|
138
|
-
) -> LimitPair:
|
|
139
|
-
substructure = getattr(value, substucture_name, None)
|
|
140
|
-
low = getattr(substructure, low_name, nan)
|
|
141
|
-
high = getattr(substructure, high_name, nan)
|
|
142
|
-
return LimitPair(
|
|
143
|
-
low=None if isnan(low) else low, high=None if isnan(high) else high
|
|
144
|
-
)
|
|
145
|
-
|
|
146
|
-
return Limits(
|
|
147
|
-
alarm=get_limits("valueAlarm", "lowAlarmLimit", "highAlarmLimit"),
|
|
148
|
-
control=get_limits("control"),
|
|
149
|
-
display=get_limits("display"),
|
|
150
|
-
warning=get_limits("valueAlarm", "lowWarningLimit", "highWarningLimit"),
|
|
151
|
-
)
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
class PvaConverter:
|
|
155
|
-
def write_value(self, value):
|
|
156
|
-
return value
|
|
157
|
-
|
|
158
|
-
def value(self, value):
|
|
159
|
-
return value["value"]
|
|
160
|
-
|
|
161
|
-
def reading(self, value) -> Reading:
|
|
162
|
-
ts = value["timeStamp"]
|
|
163
|
-
sv = value["alarm"]["severity"]
|
|
164
|
-
return {
|
|
165
|
-
"value": self.value(value),
|
|
166
|
-
"timestamp": ts["secondsPastEpoch"] + ts["nanoseconds"] * 1e-9,
|
|
167
|
-
"alarm_severity": -1 if sv > 2 else sv,
|
|
168
|
-
}
|
|
169
|
-
|
|
170
|
-
def get_datakey(self, source: str, value) -> DataKey:
|
|
171
|
-
return _data_key_from_value(source, value)
|
|
172
|
-
|
|
173
|
-
def metadata_fields(self) -> list[str]:
|
|
174
|
-
"""
|
|
175
|
-
Fields to request from PVA for metadata.
|
|
176
|
-
"""
|
|
177
|
-
return ["alarm", "timeStamp"]
|
|
178
|
-
|
|
179
|
-
def value_fields(self) -> list[str]:
|
|
180
|
-
"""
|
|
181
|
-
Fields to request from PVA for the value.
|
|
182
|
-
"""
|
|
183
|
-
return ["value"]
|
|
184
|
-
|
|
185
|
-
|
|
186
|
-
class PvaArrayConverter(PvaConverter):
|
|
187
|
-
def get_datakey(self, source: str, value) -> DataKey:
|
|
188
|
-
return _data_key_from_value(
|
|
189
|
-
source, value, dtype="array", shape=[len(value["value"])]
|
|
190
|
-
)
|
|
191
|
-
|
|
192
|
-
|
|
193
|
-
class PvaNDArrayConverter(PvaConverter):
|
|
194
|
-
def metadata_fields(self) -> list[str]:
|
|
195
|
-
return super().metadata_fields() + ["dimension"]
|
|
196
|
-
|
|
197
|
-
def _get_dimensions(self, value) -> list[int]:
|
|
198
|
-
dimensions: list[Value] = value["dimension"]
|
|
199
|
-
dims = [dim.size for dim in dimensions]
|
|
200
|
-
# Note: dimensions in NTNDArray are in fortran-like order
|
|
201
|
-
# with first index changing fastest.
|
|
202
|
-
#
|
|
203
|
-
# Therefore we need to reverse the order of the dimensions
|
|
204
|
-
# here to get back to a more usual C-like order with the
|
|
205
|
-
# last index changing fastest.
|
|
206
|
-
return dims[::-1]
|
|
207
|
-
|
|
208
|
-
def value(self, value):
|
|
209
|
-
dims = self._get_dimensions(value)
|
|
210
|
-
return value["value"].reshape(dims)
|
|
211
|
-
|
|
212
|
-
def get_datakey(self, source: str, value) -> DataKey:
|
|
213
|
-
dims = self._get_dimensions(value)
|
|
214
|
-
return _data_key_from_value(source, value, dtype="array", shape=dims)
|
|
215
|
-
|
|
216
|
-
def write_value(self, value):
|
|
217
|
-
# No clear use-case for writing directly to an NDArray, and some
|
|
218
|
-
# complexities around flattening to 1-D - e.g. dimension-order.
|
|
219
|
-
# Don't support this for now.
|
|
220
|
-
raise TypeError("Writing to NDArray not supported")
|
|
221
|
-
|
|
222
|
-
|
|
223
|
-
@dataclass
|
|
224
|
-
class PvaEnumConverter(PvaConverter):
|
|
225
|
-
"""To prevent issues when a signal is restarted and returns with different enum
|
|
226
|
-
values or orders, we put treat an Enum signal as a string, and cache the
|
|
227
|
-
choices on this class.
|
|
228
|
-
"""
|
|
229
|
-
|
|
230
|
-
def __init__(self, choices: dict[str, str]):
|
|
231
|
-
self.choices = tuple(choices.values())
|
|
232
|
-
|
|
233
|
-
def write_value(self, value: Enum | str):
|
|
234
|
-
if isinstance(value, Enum):
|
|
235
|
-
return value.value
|
|
236
|
-
else:
|
|
237
|
-
return value
|
|
238
|
-
|
|
239
|
-
def value(self, value):
|
|
240
|
-
return self.choices[value["value"]["index"]]
|
|
241
|
-
|
|
242
|
-
def get_datakey(self, source: str, value) -> DataKey:
|
|
243
|
-
return _data_key_from_value(
|
|
244
|
-
source, value, choices=list(self.choices), dtype="string"
|
|
245
|
-
)
|
|
246
|
-
|
|
247
|
-
|
|
248
|
-
class PvaEmumBoolConverter(PvaConverter):
|
|
249
|
-
def value(self, value):
|
|
250
|
-
return bool(value["value"]["index"])
|
|
251
|
-
|
|
252
|
-
def get_datakey(self, source: str, value) -> DataKey:
|
|
253
|
-
return _data_key_from_value(source, value, dtype="boolean")
|
|
254
|
-
|
|
255
|
-
|
|
256
|
-
class PvaTableConverter(PvaConverter):
|
|
257
|
-
def value(self, value):
|
|
258
|
-
return value["value"].todict()
|
|
259
|
-
|
|
260
|
-
def get_datakey(self, source: str, value) -> DataKey:
|
|
261
|
-
# This is wrong, but defer until we know how to actually describe a table
|
|
262
|
-
return _data_key_from_value(source, value, dtype="object") # type: ignore
|
|
263
|
-
|
|
264
|
-
|
|
265
|
-
class PvaPydanticModelConverter(PvaConverter):
|
|
266
|
-
def __init__(self, datatype: BaseModel):
|
|
267
|
-
self.datatype = datatype
|
|
268
|
-
|
|
269
|
-
def value(self, value: Value):
|
|
270
|
-
return self.datatype(**value.todict()) # type: ignore
|
|
271
|
-
|
|
272
|
-
def write_value(self, value: BaseModel | dict[str, Any]):
|
|
273
|
-
if isinstance(value, self.datatype): # type: ignore
|
|
274
|
-
return value.model_dump(mode="python") # type: ignore
|
|
275
|
-
return value
|
|
276
|
-
|
|
277
|
-
|
|
278
|
-
class PvaDictConverter(PvaConverter):
|
|
279
|
-
def reading(self, value) -> Reading:
|
|
280
|
-
ts = time.time()
|
|
281
|
-
value = value.todict()
|
|
282
|
-
# Alarm severity is vacuously 0 for a table
|
|
283
|
-
return {"value": value, "timestamp": ts, "alarm_severity": 0}
|
|
284
|
-
|
|
285
|
-
def value(self, value: Value):
|
|
286
|
-
return value.todict()
|
|
287
|
-
|
|
288
|
-
def get_datakey(self, source: str, value) -> DataKey:
|
|
289
|
-
raise NotImplementedError("Describing Dict signals not currently supported")
|
|
290
|
-
|
|
291
|
-
def metadata_fields(self) -> list[str]:
|
|
292
|
-
"""
|
|
293
|
-
Fields to request from PVA for metadata.
|
|
294
|
-
"""
|
|
295
|
-
return []
|
|
296
|
-
|
|
297
|
-
def value_fields(self) -> list[str]:
|
|
298
|
-
"""
|
|
299
|
-
Fields to request from PVA for the value.
|
|
300
|
-
"""
|
|
301
|
-
return []
|
|
302
|
-
|
|
303
|
-
|
|
304
|
-
class DisconnectedPvaConverter(PvaConverter):
|
|
305
|
-
def __getattribute__(self, __name: str) -> Any:
|
|
306
|
-
raise NotImplementedError("No PV has been set as connect() has not been called")
|
|
307
|
-
|
|
308
|
-
|
|
309
|
-
def make_converter(datatype: type | None, values: dict[str, Any]) -> PvaConverter:
|
|
310
|
-
pv = list(values)[0]
|
|
311
|
-
typeid = get_unique({k: v.getID() for k, v in values.items()}, "typeids")
|
|
312
|
-
typ = get_unique(
|
|
313
|
-
{k: type(v.get("value")) for k, v in values.items()}, "value types"
|
|
314
|
-
)
|
|
315
|
-
if "NTScalarArray" in typeid and typ is list:
|
|
316
|
-
# Waveform of strings, check we wanted this
|
|
317
|
-
if datatype and datatype != Sequence[str]:
|
|
318
|
-
raise TypeError(f"{pv} has type [str] not {datatype.__name__}")
|
|
319
|
-
return PvaArrayConverter()
|
|
320
|
-
elif "NTScalarArray" in typeid or "NTNDArray" in typeid:
|
|
321
|
-
pv_dtype = get_unique(
|
|
322
|
-
{k: v["value"].dtype for k, v in values.items()}, "dtypes"
|
|
323
|
-
)
|
|
324
|
-
# This is an array
|
|
325
|
-
if datatype:
|
|
326
|
-
# Check we wanted an array of this type
|
|
327
|
-
dtype = get_dtype(datatype)
|
|
328
|
-
if not dtype:
|
|
329
|
-
raise TypeError(f"{pv} has type [{pv_dtype}] not {datatype.__name__}")
|
|
330
|
-
if dtype != pv_dtype:
|
|
331
|
-
raise TypeError(f"{pv} has type [{pv_dtype}] not [{dtype}]")
|
|
332
|
-
if "NTNDArray" in typeid:
|
|
333
|
-
return PvaNDArrayConverter()
|
|
334
|
-
else:
|
|
335
|
-
return PvaArrayConverter()
|
|
336
|
-
elif "NTEnum" in typeid and datatype is bool:
|
|
337
|
-
# Wanted a bool, but database represents as an enum
|
|
338
|
-
pv_choices_len = get_unique(
|
|
339
|
-
{k: len(v["value"]["choices"]) for k, v in values.items()},
|
|
340
|
-
"number of choices",
|
|
341
|
-
)
|
|
342
|
-
if pv_choices_len != 2:
|
|
343
|
-
raise TypeError(f"{pv} has {pv_choices_len} choices, can't map to bool")
|
|
344
|
-
return PvaEmumBoolConverter()
|
|
345
|
-
elif "NTEnum" in typeid:
|
|
346
|
-
# This is an Enum
|
|
347
|
-
pv_choices = get_unique(
|
|
348
|
-
{k: tuple(v["value"]["choices"]) for k, v in values.items()}, "choices"
|
|
349
|
-
)
|
|
350
|
-
return PvaEnumConverter(get_supported_values(pv, datatype, pv_choices))
|
|
351
|
-
elif "NTScalar" in typeid:
|
|
352
|
-
if (
|
|
353
|
-
typ is str
|
|
354
|
-
and inspect.isclass(datatype)
|
|
355
|
-
and issubclass(datatype, RuntimeSubsetEnum)
|
|
356
|
-
):
|
|
357
|
-
return PvaEnumConverter(
|
|
358
|
-
get_supported_values(pv, datatype, datatype.choices) # type: ignore
|
|
359
|
-
)
|
|
360
|
-
elif datatype and not issubclass(typ, datatype):
|
|
361
|
-
# Allow int signals to represent float records when prec is 0
|
|
362
|
-
is_prec_zero_float = typ is float and (
|
|
363
|
-
get_unique(
|
|
364
|
-
{k: v["display"]["precision"] for k, v in values.items()},
|
|
365
|
-
"precision",
|
|
366
|
-
)
|
|
367
|
-
== 0
|
|
368
|
-
)
|
|
369
|
-
if not (datatype is int and is_prec_zero_float):
|
|
370
|
-
raise TypeError(f"{pv} has type {typ.__name__} not {datatype.__name__}")
|
|
371
|
-
return PvaConverter()
|
|
372
|
-
elif "NTTable" in typeid:
|
|
373
|
-
if is_pydantic_model(datatype):
|
|
374
|
-
return PvaPydanticModelConverter(datatype) # type: ignore
|
|
375
|
-
return PvaTableConverter()
|
|
376
|
-
elif "structure" in typeid:
|
|
377
|
-
return PvaDictConverter()
|
|
378
|
-
else:
|
|
379
|
-
raise TypeError(f"{pv}: Unsupported typeid {typeid}")
|
|
380
|
-
|
|
381
|
-
|
|
382
|
-
class PvaSignalBackend(SignalBackend[T]):
|
|
383
|
-
_ctxt: Context | None = None
|
|
384
|
-
|
|
385
|
-
_ALLOWED_DATATYPES = (
|
|
386
|
-
bool,
|
|
387
|
-
int,
|
|
388
|
-
float,
|
|
389
|
-
str,
|
|
390
|
-
Sequence,
|
|
391
|
-
np.ndarray,
|
|
392
|
-
Enum,
|
|
393
|
-
RuntimeSubsetEnum,
|
|
394
|
-
BaseModel,
|
|
395
|
-
dict,
|
|
396
|
-
)
|
|
397
|
-
|
|
398
|
-
@classmethod
|
|
399
|
-
def datatype_allowed(cls, dtype: Any) -> bool:
|
|
400
|
-
stripped_origin = get_origin(dtype) or dtype
|
|
401
|
-
if dtype is None:
|
|
402
|
-
return True
|
|
403
|
-
return inspect.isclass(stripped_origin) and issubclass(
|
|
404
|
-
stripped_origin, cls._ALLOWED_DATATYPES
|
|
405
|
-
)
|
|
406
|
-
|
|
407
|
-
def __init__(self, datatype: type[T] | None, read_pv: str, write_pv: str):
|
|
408
|
-
self.datatype = datatype
|
|
409
|
-
if not PvaSignalBackend.datatype_allowed(self.datatype):
|
|
410
|
-
raise TypeError(f"Given datatype {self.datatype} unsupported in PVA.")
|
|
411
|
-
|
|
412
|
-
self.read_pv = read_pv
|
|
413
|
-
self.write_pv = write_pv
|
|
414
|
-
self.initial_values: dict[str, Any] = {}
|
|
415
|
-
self.converter: PvaConverter = DisconnectedPvaConverter()
|
|
416
|
-
self.subscription: Subscription | None = None
|
|
417
|
-
|
|
418
|
-
def source(self, name: str):
|
|
419
|
-
return f"pva://{self.read_pv}"
|
|
420
|
-
|
|
421
|
-
@property
|
|
422
|
-
def ctxt(self) -> Context:
|
|
423
|
-
if PvaSignalBackend._ctxt is None:
|
|
424
|
-
PvaSignalBackend._ctxt = Context("pva", nt=False)
|
|
425
|
-
|
|
426
|
-
@atexit.register
|
|
427
|
-
def _del_ctxt():
|
|
428
|
-
# If we don't do this we get messages like this on close:
|
|
429
|
-
# Error in sys.excepthook:
|
|
430
|
-
# Original exception was:
|
|
431
|
-
PvaSignalBackend._ctxt = None
|
|
432
|
-
|
|
433
|
-
return PvaSignalBackend._ctxt
|
|
434
|
-
|
|
435
|
-
async def _store_initial_value(self, pv, timeout: float = DEFAULT_TIMEOUT):
|
|
436
|
-
try:
|
|
437
|
-
self.initial_values[pv] = await asyncio.wait_for(
|
|
438
|
-
self.ctxt.get(pv), timeout=timeout
|
|
439
|
-
)
|
|
440
|
-
except asyncio.TimeoutError as exc:
|
|
441
|
-
logging.debug(f"signal pva://{pv} timed out", exc_info=True)
|
|
442
|
-
raise NotConnected(f"pva://{pv}") from exc
|
|
443
|
-
|
|
444
|
-
async def connect(self, timeout: float = DEFAULT_TIMEOUT):
|
|
445
|
-
if self.read_pv != self.write_pv:
|
|
446
|
-
# Different, need to connect both
|
|
447
|
-
await wait_for_connection(
|
|
448
|
-
read_pv=self._store_initial_value(self.read_pv, timeout=timeout),
|
|
449
|
-
write_pv=self._store_initial_value(self.write_pv, timeout=timeout),
|
|
450
|
-
)
|
|
451
|
-
else:
|
|
452
|
-
# The same, so only need to connect one
|
|
453
|
-
await self._store_initial_value(self.read_pv, timeout=timeout)
|
|
454
|
-
self.converter = make_converter(self.datatype, self.initial_values)
|
|
455
|
-
|
|
456
|
-
async def put(self, value: T | None, wait=True, timeout=None):
|
|
457
|
-
if value is None:
|
|
458
|
-
write_value = self.initial_values[self.write_pv]
|
|
459
|
-
else:
|
|
460
|
-
write_value = self.converter.write_value(value)
|
|
461
|
-
coro = self.ctxt.put(self.write_pv, {"value": write_value}, wait=wait)
|
|
462
|
-
try:
|
|
463
|
-
await asyncio.wait_for(coro, timeout)
|
|
464
|
-
except asyncio.TimeoutError as exc:
|
|
465
|
-
logging.debug(
|
|
466
|
-
f"signal pva://{self.write_pv} timed out \
|
|
467
|
-
put value: {write_value}",
|
|
468
|
-
exc_info=True,
|
|
469
|
-
)
|
|
470
|
-
raise NotConnected(f"pva://{self.write_pv}") from exc
|
|
471
|
-
|
|
472
|
-
async def get_datakey(self, source: str) -> DataKey:
|
|
473
|
-
value = await self.ctxt.get(self.read_pv)
|
|
474
|
-
return self.converter.get_datakey(source, value)
|
|
475
|
-
|
|
476
|
-
def _pva_request_string(self, fields: list[str]) -> str:
|
|
477
|
-
"""
|
|
478
|
-
Converts a list of requested fields into a PVA request string which can be
|
|
479
|
-
passed to p4p.
|
|
480
|
-
"""
|
|
481
|
-
return f"field({','.join(fields)})"
|
|
482
|
-
|
|
483
|
-
async def get_reading(self) -> Reading:
|
|
484
|
-
request: str = self._pva_request_string(
|
|
485
|
-
self.converter.value_fields() + self.converter.metadata_fields()
|
|
486
|
-
)
|
|
487
|
-
value = await self.ctxt.get(self.read_pv, request=request)
|
|
488
|
-
return self.converter.reading(value)
|
|
489
|
-
|
|
490
|
-
async def get_value(self) -> T:
|
|
491
|
-
request: str = self._pva_request_string(self.converter.value_fields())
|
|
492
|
-
value = await self.ctxt.get(self.read_pv, request=request)
|
|
493
|
-
return self.converter.value(value)
|
|
494
|
-
|
|
495
|
-
async def get_setpoint(self) -> T:
|
|
496
|
-
value = await self.ctxt.get(self.write_pv, "field(value)")
|
|
497
|
-
return self.converter.value(value)
|
|
498
|
-
|
|
499
|
-
def set_callback(self, callback: ReadingValueCallback[T] | None) -> None:
|
|
500
|
-
if callback:
|
|
501
|
-
assert (
|
|
502
|
-
not self.subscription
|
|
503
|
-
), "Cannot set a callback when one is already set"
|
|
504
|
-
|
|
505
|
-
async def async_callback(v):
|
|
506
|
-
callback(self.converter.reading(v), self.converter.value(v))
|
|
507
|
-
|
|
508
|
-
request: str = self._pva_request_string(
|
|
509
|
-
self.converter.value_fields() + self.converter.metadata_fields()
|
|
510
|
-
)
|
|
511
|
-
|
|
512
|
-
self.subscription = self.ctxt.monitor(
|
|
513
|
-
self.read_pv, async_callback, request=request
|
|
514
|
-
)
|
|
515
|
-
else:
|
|
516
|
-
if self.subscription:
|
|
517
|
-
self.subscription.close()
|
|
518
|
-
self.subscription = None
|
|
@@ -1,114 +0,0 @@
|
|
|
1
|
-
"""EPICS Signals over CA or PVA"""
|
|
2
|
-
|
|
3
|
-
from __future__ import annotations
|
|
4
|
-
|
|
5
|
-
from ophyd_async.core import (
|
|
6
|
-
SignalBackend,
|
|
7
|
-
SignalR,
|
|
8
|
-
SignalRW,
|
|
9
|
-
SignalW,
|
|
10
|
-
SignalX,
|
|
11
|
-
T,
|
|
12
|
-
get_unique,
|
|
13
|
-
)
|
|
14
|
-
|
|
15
|
-
from ._epics_transport import _EpicsTransport
|
|
16
|
-
|
|
17
|
-
_default_epics_transport = _EpicsTransport.ca
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
def _transport_pv(pv: str) -> tuple[_EpicsTransport, str]:
|
|
21
|
-
split = pv.split("://", 1)
|
|
22
|
-
if len(split) > 1:
|
|
23
|
-
# We got something like pva://mydevice, so use specified comms mode
|
|
24
|
-
transport_str, pv = split
|
|
25
|
-
transport = _EpicsTransport[transport_str]
|
|
26
|
-
else:
|
|
27
|
-
# No comms mode specified, use the default
|
|
28
|
-
transport = _default_epics_transport
|
|
29
|
-
return transport, pv
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
def _epics_signal_backend(
|
|
33
|
-
datatype: type[T] | None, read_pv: str, write_pv: str
|
|
34
|
-
) -> SignalBackend[T]:
|
|
35
|
-
"""Create an epics signal backend."""
|
|
36
|
-
r_transport, r_pv = _transport_pv(read_pv)
|
|
37
|
-
w_transport, w_pv = _transport_pv(write_pv)
|
|
38
|
-
transport = get_unique({read_pv: r_transport, write_pv: w_transport}, "transports")
|
|
39
|
-
return transport.value(datatype, r_pv, w_pv)
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
def epics_signal_rw(
|
|
43
|
-
datatype: type[T], read_pv: str, write_pv: str | None = None, name: str = ""
|
|
44
|
-
) -> SignalRW[T]:
|
|
45
|
-
"""Create a `SignalRW` backed by 1 or 2 EPICS PVs
|
|
46
|
-
|
|
47
|
-
Parameters
|
|
48
|
-
----------
|
|
49
|
-
datatype:
|
|
50
|
-
Check that the PV is of this type
|
|
51
|
-
read_pv:
|
|
52
|
-
The PV to read and monitor
|
|
53
|
-
write_pv:
|
|
54
|
-
If given, use this PV to write to, otherwise use read_pv
|
|
55
|
-
"""
|
|
56
|
-
backend = _epics_signal_backend(datatype, read_pv, write_pv or read_pv)
|
|
57
|
-
return SignalRW(backend, name=name)
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
def epics_signal_rw_rbv(
|
|
61
|
-
datatype: type[T], write_pv: str, read_suffix: str = "_RBV", name: str = ""
|
|
62
|
-
) -> SignalRW[T]:
|
|
63
|
-
"""Create a `SignalRW` backed by 1 or 2 EPICS PVs, with a suffix on the readback pv
|
|
64
|
-
|
|
65
|
-
Parameters
|
|
66
|
-
----------
|
|
67
|
-
datatype:
|
|
68
|
-
Check that the PV is of this type
|
|
69
|
-
write_pv:
|
|
70
|
-
The PV to write to
|
|
71
|
-
read_suffix:
|
|
72
|
-
Append this suffix to the write pv to create the readback pv
|
|
73
|
-
"""
|
|
74
|
-
return epics_signal_rw(datatype, f"{write_pv}{read_suffix}", write_pv, name)
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
def epics_signal_r(datatype: type[T], read_pv: str, name: str = "") -> SignalR[T]:
|
|
78
|
-
"""Create a `SignalR` backed by 1 EPICS PV
|
|
79
|
-
|
|
80
|
-
Parameters
|
|
81
|
-
----------
|
|
82
|
-
datatype
|
|
83
|
-
Check that the PV is of this type
|
|
84
|
-
read_pv:
|
|
85
|
-
The PV to read and monitor
|
|
86
|
-
"""
|
|
87
|
-
backend = _epics_signal_backend(datatype, read_pv, read_pv)
|
|
88
|
-
return SignalR(backend, name=name)
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
def epics_signal_w(datatype: type[T], write_pv: str, name: str = "") -> SignalW[T]:
|
|
92
|
-
"""Create a `SignalW` backed by 1 EPICS PVs
|
|
93
|
-
|
|
94
|
-
Parameters
|
|
95
|
-
----------
|
|
96
|
-
datatype:
|
|
97
|
-
Check that the PV is of this type
|
|
98
|
-
write_pv:
|
|
99
|
-
The PV to write to
|
|
100
|
-
"""
|
|
101
|
-
backend = _epics_signal_backend(datatype, write_pv, write_pv)
|
|
102
|
-
return SignalW(backend, name=name)
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
def epics_signal_x(write_pv: str, name: str = "") -> SignalX:
|
|
106
|
-
"""Create a `SignalX` backed by 1 EPICS PVs
|
|
107
|
-
|
|
108
|
-
Parameters
|
|
109
|
-
----------
|
|
110
|
-
write_pv:
|
|
111
|
-
The PV to write its initial value to on trigger
|
|
112
|
-
"""
|
|
113
|
-
backend: SignalBackend = _epics_signal_backend(None, write_pv, write_pv)
|
|
114
|
-
return SignalX(backend, name=name)
|