ophyd-async 0.7.0__py3-none-any.whl → 0.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. ophyd_async/_version.py +2 -2
  2. ophyd_async/core/__init__.py +34 -9
  3. ophyd_async/core/_detector.py +5 -10
  4. ophyd_async/core/_device.py +170 -68
  5. ophyd_async/core/_device_filler.py +269 -0
  6. ophyd_async/core/_device_save_loader.py +6 -7
  7. ophyd_async/core/_mock_signal_backend.py +35 -40
  8. ophyd_async/core/_mock_signal_utils.py +25 -16
  9. ophyd_async/core/_protocol.py +28 -8
  10. ophyd_async/core/_readable.py +133 -134
  11. ophyd_async/core/_signal.py +219 -163
  12. ophyd_async/core/_signal_backend.py +131 -64
  13. ophyd_async/core/_soft_signal_backend.py +131 -194
  14. ophyd_async/core/_status.py +22 -6
  15. ophyd_async/core/_table.py +102 -100
  16. ophyd_async/core/_utils.py +143 -32
  17. ophyd_async/epics/adaravis/_aravis_controller.py +2 -2
  18. ophyd_async/epics/adaravis/_aravis_io.py +8 -6
  19. ophyd_async/epics/adcore/_core_io.py +5 -7
  20. ophyd_async/epics/adcore/_core_logic.py +3 -1
  21. ophyd_async/epics/adcore/_hdf_writer.py +2 -2
  22. ophyd_async/epics/adcore/_single_trigger.py +6 -10
  23. ophyd_async/epics/adcore/_utils.py +15 -10
  24. ophyd_async/epics/adkinetix/__init__.py +2 -1
  25. ophyd_async/epics/adkinetix/_kinetix_controller.py +6 -3
  26. ophyd_async/epics/adkinetix/_kinetix_io.py +4 -5
  27. ophyd_async/epics/adpilatus/_pilatus_controller.py +2 -2
  28. ophyd_async/epics/adpilatus/_pilatus_io.py +3 -4
  29. ophyd_async/epics/adsimdetector/_sim_controller.py +2 -2
  30. ophyd_async/epics/advimba/__init__.py +4 -1
  31. ophyd_async/epics/advimba/_vimba_controller.py +6 -3
  32. ophyd_async/epics/advimba/_vimba_io.py +8 -9
  33. ophyd_async/epics/core/__init__.py +26 -0
  34. ophyd_async/epics/core/_aioca.py +323 -0
  35. ophyd_async/epics/core/_epics_connector.py +53 -0
  36. ophyd_async/epics/core/_epics_device.py +13 -0
  37. ophyd_async/epics/core/_p4p.py +383 -0
  38. ophyd_async/epics/core/_pvi_connector.py +91 -0
  39. ophyd_async/epics/core/_signal.py +171 -0
  40. ophyd_async/epics/core/_util.py +61 -0
  41. ophyd_async/epics/demo/_mover.py +4 -5
  42. ophyd_async/epics/demo/_sensor.py +14 -13
  43. ophyd_async/epics/eiger/_eiger.py +1 -2
  44. ophyd_async/epics/eiger/_eiger_controller.py +7 -2
  45. ophyd_async/epics/eiger/_eiger_io.py +3 -5
  46. ophyd_async/epics/eiger/_odin_io.py +5 -5
  47. ophyd_async/epics/motor.py +4 -5
  48. ophyd_async/epics/signal.py +11 -0
  49. ophyd_async/epics/testing/__init__.py +24 -0
  50. ophyd_async/epics/testing/_example_ioc.py +105 -0
  51. ophyd_async/epics/testing/_utils.py +78 -0
  52. ophyd_async/epics/testing/test_records.db +152 -0
  53. ophyd_async/epics/testing/test_records_pva.db +177 -0
  54. ophyd_async/fastcs/core.py +9 -0
  55. ophyd_async/fastcs/panda/__init__.py +4 -4
  56. ophyd_async/fastcs/panda/_block.py +18 -13
  57. ophyd_async/fastcs/panda/_control.py +3 -5
  58. ophyd_async/fastcs/panda/_hdf_panda.py +5 -19
  59. ophyd_async/fastcs/panda/_table.py +30 -52
  60. ophyd_async/fastcs/panda/_trigger.py +8 -8
  61. ophyd_async/fastcs/panda/_writer.py +2 -5
  62. ophyd_async/plan_stubs/_ensure_connected.py +20 -13
  63. ophyd_async/plan_stubs/_fly.py +2 -2
  64. ophyd_async/plan_stubs/_nd_attributes.py +5 -4
  65. ophyd_async/py.typed +0 -0
  66. ophyd_async/sim/demo/_pattern_detector/_pattern_detector_controller.py +1 -2
  67. ophyd_async/sim/demo/_sim_motor.py +3 -4
  68. ophyd_async/tango/__init__.py +0 -45
  69. ophyd_async/tango/{signal → core}/__init__.py +9 -6
  70. ophyd_async/tango/core/_base_device.py +132 -0
  71. ophyd_async/tango/{signal → core}/_signal.py +42 -53
  72. ophyd_async/tango/{base_devices → core}/_tango_readable.py +3 -4
  73. ophyd_async/tango/{signal → core}/_tango_transport.py +38 -40
  74. ophyd_async/tango/demo/_counter.py +12 -23
  75. ophyd_async/tango/demo/_mover.py +13 -13
  76. {ophyd_async-0.7.0.dist-info → ophyd_async-0.8.0.dist-info}/METADATA +52 -55
  77. ophyd_async-0.8.0.dist-info/RECORD +116 -0
  78. {ophyd_async-0.7.0.dist-info → ophyd_async-0.8.0.dist-info}/WHEEL +1 -1
  79. ophyd_async/epics/pvi/__init__.py +0 -3
  80. ophyd_async/epics/pvi/_pvi.py +0 -338
  81. ophyd_async/epics/signal/__init__.py +0 -21
  82. ophyd_async/epics/signal/_aioca.py +0 -378
  83. ophyd_async/epics/signal/_common.py +0 -57
  84. ophyd_async/epics/signal/_epics_transport.py +0 -34
  85. ophyd_async/epics/signal/_p4p.py +0 -518
  86. ophyd_async/epics/signal/_signal.py +0 -114
  87. ophyd_async/tango/base_devices/__init__.py +0 -4
  88. ophyd_async/tango/base_devices/_base_device.py +0 -225
  89. ophyd_async-0.7.0.dist-info/RECORD +0 -108
  90. {ophyd_async-0.7.0.dist-info → ophyd_async-0.8.0.dist-info}/LICENSE +0 -0
  91. {ophyd_async-0.7.0.dist-info → ophyd_async-0.8.0.dist-info}/entry_points.txt +0 -0
  92. {ophyd_async-0.7.0.dist-info → ophyd_async-0.8.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,13 @@
1
+ from ophyd_async.core import Device
2
+
3
+ from ._epics_connector import EpicsDeviceConnector
4
+ from ._pvi_connector import PviDeviceConnector
5
+
6
+
7
+ class EpicsDevice(Device):
8
+ def __init__(self, prefix: str, with_pvi: bool = False, name: str = ""):
9
+ if with_pvi:
10
+ connector = PviDeviceConnector(prefix)
11
+ else:
12
+ connector = EpicsDeviceConnector(prefix)
13
+ super().__init__(name=name, connector=connector)
@@ -0,0 +1,383 @@
1
+ from __future__ import annotations
2
+
3
+ import asyncio
4
+ import atexit
5
+ import logging
6
+ from collections.abc import Mapping, Sequence
7
+ from math import isnan, nan
8
+ from typing import Any, Generic
9
+
10
+ import numpy as np
11
+ from bluesky.protocols import Reading
12
+ from event_model import DataKey, Limits, LimitsRange
13
+ from p4p import Value
14
+ from p4p.client.asyncio import Context, Subscription
15
+ from pydantic import BaseModel
16
+
17
+ from ophyd_async.core import (
18
+ Array1D,
19
+ Callback,
20
+ NotConnected,
21
+ SignalDatatype,
22
+ SignalDatatypeT,
23
+ SignalMetadata,
24
+ StrictEnum,
25
+ Table,
26
+ get_enum_cls,
27
+ get_unique,
28
+ make_datakey,
29
+ wait_for_connection,
30
+ )
31
+
32
+ from ._util import EpicsSignalBackend, format_datatype, get_supported_values
33
+
34
+
35
+ def _limits_from_value(value: Any) -> Limits:
36
+ def get_limits(
37
+ substucture_name: str, low_name: str = "limitLow", high_name: str = "limitHigh"
38
+ ) -> LimitsRange | None:
39
+ substructure = getattr(value, substucture_name, None)
40
+ low = getattr(substructure, low_name, nan)
41
+ high = getattr(substructure, high_name, nan)
42
+ if not (isnan(low) and isnan(high)):
43
+ return LimitsRange(
44
+ low=None if isnan(low) else low,
45
+ high=None if isnan(high) else high,
46
+ )
47
+
48
+ limits = Limits()
49
+ if limits_range := get_limits("valueAlarm", "lowAlarmLimit", "highAlarmLimit"):
50
+ limits["alarm"] = limits_range
51
+ if limits_range := get_limits("control"):
52
+ limits["control"] = limits_range
53
+ if limits_range := get_limits("display"):
54
+ limits["display"] = limits_range
55
+ if limits_range := get_limits("valueAlarm", "lowWarningLimit", "highWarningLimit"):
56
+ limits["warning"] = limits_range
57
+ return limits
58
+
59
+
60
+ def _metadata_from_value(datatype: type[SignalDatatype], value: Any) -> SignalMetadata:
61
+ metadata = SignalMetadata()
62
+ value_data: Any = getattr(value, "value", None)
63
+ display_data: Any = getattr(value, "display", None)
64
+ if hasattr(display_data, "units"):
65
+ metadata["units"] = display_data.units
66
+ if hasattr(display_data, "precision") and not isnan(display_data.precision):
67
+ metadata["precision"] = display_data.precision
68
+ if limits := _limits_from_value(value):
69
+ metadata["limits"] = limits
70
+ # Get choices from display or value
71
+ if datatype is str or issubclass(datatype, StrictEnum):
72
+ if hasattr(display_data, "choices"):
73
+ metadata["choices"] = display_data.choices
74
+ elif hasattr(value_data, "choices"):
75
+ metadata["choices"] = value_data.choices
76
+ return metadata
77
+
78
+
79
+ class PvaConverter(Generic[SignalDatatypeT]):
80
+ value_fields = ("value",)
81
+ reading_fields = ("alarm", "timeStamp")
82
+
83
+ def __init__(self, datatype: type[SignalDatatypeT]):
84
+ self.datatype = datatype
85
+
86
+ def value(self, value: Any) -> SignalDatatypeT:
87
+ # for channel access ca_xxx classes, this
88
+ # invokes __pos__ operator to return an instance of
89
+ # the builtin base class
90
+ return value["value"]
91
+
92
+ def write_value(self, value: Any) -> Any:
93
+ # The pva library will do the conversion for us
94
+ return value
95
+
96
+
97
+ class DisconnectedPvaConverter(PvaConverter):
98
+ def __getattribute__(self, __name: str) -> Any:
99
+ raise NotImplementedError("No PV has been set as connect() has not been called")
100
+
101
+
102
+ class PvaNDArrayConverter(PvaConverter[SignalDatatypeT]):
103
+ value_fields = ("value", "dimension")
104
+
105
+ def _get_dimensions(self, value) -> list[int]:
106
+ dimensions: list[Value] = value["dimension"]
107
+ dims = [dim.size for dim in dimensions]
108
+ # Note: dimensions in NTNDArray are in fortran-like order
109
+ # with first index changing fastest.
110
+ #
111
+ # Therefore we need to reverse the order of the dimensions
112
+ # here to get back to a more usual C-like order with the
113
+ # last index changing fastest.
114
+ return dims[::-1]
115
+
116
+ def value(self, value: Any) -> SignalDatatypeT:
117
+ dims = self._get_dimensions(value)
118
+ return value["value"].reshape(dims)
119
+
120
+ def write_value(self, value: Any) -> Any:
121
+ # No clear use-case for writing directly to an NDArray, and some
122
+ # complexities around flattening to 1-D - e.g. dimension-order.
123
+ # Don't support this for now.
124
+ raise TypeError("Writing to NDArray not supported")
125
+
126
+
127
+ class PvaEnumConverter(PvaConverter[str]):
128
+ def __init__(
129
+ self, datatype: type[str] = str, supported_values: Mapping[str, str] = {}
130
+ ):
131
+ self.supported_values = supported_values
132
+ super().__init__(datatype)
133
+
134
+ def value(self, value: Any) -> str:
135
+ str_value = value["value"]["choices"][value["value"]["index"]]
136
+ if self.supported_values:
137
+ return self.supported_values[str_value]
138
+ else:
139
+ return str_value
140
+
141
+
142
+ class PvaEnumBoolConverter(PvaConverter[bool]):
143
+ def __init__(self):
144
+ super().__init__(bool)
145
+
146
+ def value(self, value: Any) -> bool:
147
+ return bool(value["value"]["index"])
148
+
149
+
150
+ class PvaTableConverter(PvaConverter[Table]):
151
+ def value(self, value) -> Table:
152
+ return self.datatype(**value["value"].todict())
153
+
154
+ def write_value(self, value: BaseModel | dict[str, Any]) -> Any:
155
+ if isinstance(value, self.datatype):
156
+ return value.model_dump(mode="python")
157
+ return value
158
+
159
+
160
+ # https://mdavidsaver.github.io/p4p/values.html
161
+ _datatype_converter_from_typeid: dict[
162
+ tuple[str, str], tuple[type[SignalDatatype], type[PvaConverter]]
163
+ ] = {
164
+ ("epics:nt/NTScalar:1.0", "?"): (bool, PvaConverter),
165
+ ("epics:nt/NTScalar:1.0", "b"): (int, PvaConverter),
166
+ ("epics:nt/NTScalar:1.0", "B"): (int, PvaConverter),
167
+ ("epics:nt/NTScalar:1.0", "h"): (int, PvaConverter),
168
+ ("epics:nt/NTScalar:1.0", "H"): (int, PvaConverter),
169
+ ("epics:nt/NTScalar:1.0", "i"): (int, PvaConverter),
170
+ ("epics:nt/NTScalar:1.0", "I"): (int, PvaConverter),
171
+ ("epics:nt/NTScalar:1.0", "l"): (int, PvaConverter),
172
+ ("epics:nt/NTScalar:1.0", "L"): (int, PvaConverter),
173
+ ("epics:nt/NTScalar:1.0", "f"): (float, PvaConverter),
174
+ ("epics:nt/NTScalar:1.0", "d"): (float, PvaConverter),
175
+ ("epics:nt/NTScalar:1.0", "s"): (str, PvaConverter),
176
+ ("epics:nt/NTEnum:1.0", "S"): (str, PvaEnumConverter),
177
+ ("epics:nt/NTScalarArray:1.0", "a?"): (Array1D[np.bool_], PvaConverter),
178
+ ("epics:nt/NTScalarArray:1.0", "ab"): (Array1D[np.int8], PvaConverter),
179
+ ("epics:nt/NTScalarArray:1.0", "aB"): (Array1D[np.uint8], PvaConverter),
180
+ ("epics:nt/NTScalarArray:1.0", "ah"): (Array1D[np.int16], PvaConverter),
181
+ ("epics:nt/NTScalarArray:1.0", "aH"): (Array1D[np.uint16], PvaConverter),
182
+ ("epics:nt/NTScalarArray:1.0", "ai"): (Array1D[np.int32], PvaConverter),
183
+ ("epics:nt/NTScalarArray:1.0", "aI"): (Array1D[np.uint32], PvaConverter),
184
+ ("epics:nt/NTScalarArray:1.0", "al"): (Array1D[np.int64], PvaConverter),
185
+ ("epics:nt/NTScalarArray:1.0", "aL"): (Array1D[np.uint64], PvaConverter),
186
+ ("epics:nt/NTScalarArray:1.0", "af"): (Array1D[np.float32], PvaConverter),
187
+ ("epics:nt/NTScalarArray:1.0", "ad"): (Array1D[np.float64], PvaConverter),
188
+ ("epics:nt/NTScalarArray:1.0", "as"): (Sequence[str], PvaConverter),
189
+ ("epics:nt/NTTable:1.0", "S"): (Table, PvaTableConverter),
190
+ ("epics:nt/NTNDArray:1.0", "v"): (np.ndarray, PvaNDArrayConverter),
191
+ ("epics:nt/NTNDArray:1.0", "U"): (np.ndarray, PvaNDArrayConverter),
192
+ }
193
+
194
+
195
+ def _get_specifier(value: Value):
196
+ typ = value.type("value").aspy()
197
+ if isinstance(typ, tuple):
198
+ return typ[0]
199
+ else:
200
+ return str(typ)
201
+
202
+
203
+ def make_converter(datatype: type | None, values: dict[str, Any]) -> PvaConverter:
204
+ pv = list(values)[0]
205
+ typeid = get_unique({k: v.getID() for k, v in values.items()}, "typeids")
206
+ specifier = get_unique(
207
+ {k: _get_specifier(v) for k, v in values.items()},
208
+ "value type specifiers",
209
+ )
210
+ # Infer a datatype and converter from the typeid and specifier
211
+ inferred_datatype, converter_cls = _datatype_converter_from_typeid[
212
+ (typeid, specifier)
213
+ ]
214
+ # Some override cases
215
+ if datatype is bool and typeid == "epics:nt/NTEnum:1.0":
216
+ # Database can't do bools, so are often representated as enums of len 2
217
+ pv_num_choices = get_unique(
218
+ {k: len(v["value"]["choices"]) for k, v in values.items()},
219
+ "number of choices",
220
+ )
221
+ if pv_num_choices != 2:
222
+ raise TypeError(f"{pv} has {pv_num_choices} choices, can't map to bool")
223
+ return PvaEnumBoolConverter()
224
+ elif typeid == "epics:nt/NTEnum:1.0":
225
+ pv_choices = get_unique(
226
+ {k: tuple(v["value"]["choices"]) for k, v in values.items()}, "choices"
227
+ )
228
+ if enum_cls := get_enum_cls(datatype):
229
+ # We were given an enum class, so make class from that
230
+ return PvaEnumConverter(
231
+ supported_values=get_supported_values(pv, enum_cls, pv_choices)
232
+ )
233
+ elif datatype in (None, str):
234
+ # Still use the Enum converter, but make choices from what it has
235
+ return PvaEnumConverter()
236
+ elif (
237
+ inferred_datatype is float
238
+ and datatype is int
239
+ and get_unique(
240
+ {k: v["display"]["precision"] for k, v in values.items()}, "precision"
241
+ )
242
+ == 0
243
+ ):
244
+ # Allow int signals to represent float records when prec is 0
245
+ return PvaConverter(int)
246
+ elif inferred_datatype is str and (enum_cls := get_enum_cls(datatype)):
247
+ # Allow strings to be used as enums until QSRV supports this
248
+ return PvaConverter(str)
249
+ elif inferred_datatype is Table and datatype and issubclass(datatype, Table):
250
+ # Use a custom table class
251
+ return PvaTableConverter(datatype)
252
+ elif datatype in (None, inferred_datatype):
253
+ # If datatype matches what we are given then allow it and use inferred converter
254
+ return converter_cls(inferred_datatype)
255
+ raise TypeError(
256
+ f"{pv} with inferred datatype {format_datatype(inferred_datatype)}"
257
+ f" from {typeid=} {specifier=}"
258
+ f" cannot be coerced to {format_datatype(datatype)}"
259
+ )
260
+
261
+
262
+ _context: Context | None = None
263
+
264
+
265
+ def context() -> Context:
266
+ global _context
267
+ if _context is None:
268
+ _context = Context("pva", nt=False)
269
+
270
+ @atexit.register
271
+ def _del_ctxt():
272
+ # If we don't do this we get messages like this on close:
273
+ # Error in sys.excepthook:
274
+ # Original exception was:
275
+ global _context
276
+ del _context
277
+
278
+ return _context
279
+
280
+
281
+ async def pvget_with_timeout(pv: str, timeout: float) -> Any:
282
+ try:
283
+ return await asyncio.wait_for(context().get(pv), timeout=timeout)
284
+ except asyncio.TimeoutError as exc:
285
+ logging.debug(f"signal pva://{pv} timed out", exc_info=True)
286
+ raise NotConnected(f"pva://{pv}") from exc
287
+
288
+
289
+ def _pva_request_string(fields: Sequence[str]) -> str:
290
+ """Converts a list of requested fields into a PVA request string which can be
291
+ passed to p4p.
292
+ """
293
+ return f"field({','.join(fields)})"
294
+
295
+
296
+ class PvaSignalBackend(EpicsSignalBackend[SignalDatatypeT]):
297
+ def __init__(
298
+ self,
299
+ datatype: type[SignalDatatypeT] | None,
300
+ read_pv: str = "",
301
+ write_pv: str = "",
302
+ ):
303
+ self.converter: PvaConverter = DisconnectedPvaConverter(float)
304
+ self.initial_values: dict[str, Any] = {}
305
+ self.subscription: Subscription | None = None
306
+ super().__init__(datatype, read_pv, write_pv)
307
+
308
+ def source(self, name: str, read: bool):
309
+ return f"pva://{self.read_pv if read else self.write_pv}"
310
+
311
+ async def _store_initial_value(self, pv: str, timeout: float):
312
+ self.initial_values[pv] = await pvget_with_timeout(pv, timeout)
313
+
314
+ async def connect(self, timeout: float):
315
+ if self.read_pv != self.write_pv:
316
+ # Different, need to connect both
317
+ await wait_for_connection(
318
+ read_pv=self._store_initial_value(self.read_pv, timeout=timeout),
319
+ write_pv=self._store_initial_value(self.write_pv, timeout=timeout),
320
+ )
321
+ else:
322
+ # The same, so only need to connect one
323
+ await self._store_initial_value(self.read_pv, timeout=timeout)
324
+ self.converter = make_converter(self.datatype, self.initial_values)
325
+
326
+ def _make_reading(self, value: Any) -> Reading[SignalDatatypeT]:
327
+ ts = value["timeStamp"]
328
+ sv = value["alarm"]["severity"]
329
+ return {
330
+ "value": self.converter.value(value),
331
+ "timestamp": ts["secondsPastEpoch"] + ts["nanoseconds"] * 1e-9,
332
+ "alarm_severity": -1 if sv > 2 else sv,
333
+ }
334
+
335
+ async def put(self, value: SignalDatatypeT | None, wait: bool):
336
+ if value is None:
337
+ write_value = self.initial_values[self.write_pv]
338
+ else:
339
+ write_value = self.converter.write_value(value)
340
+ await context().put(self.write_pv, {"value": write_value}, wait=wait)
341
+
342
+ async def get_datakey(self, source: str) -> DataKey:
343
+ value = await context().get(self.read_pv)
344
+ metadata = _metadata_from_value(self.converter.datatype, value)
345
+ return make_datakey(
346
+ self.converter.datatype, self.converter.value(value), source, metadata
347
+ )
348
+
349
+ async def get_reading(self) -> Reading:
350
+ request = _pva_request_string(
351
+ self.converter.value_fields + self.converter.reading_fields
352
+ )
353
+ value = await context().get(self.read_pv, request=request)
354
+ return self._make_reading(value)
355
+
356
+ async def get_value(self) -> SignalDatatypeT:
357
+ request = _pva_request_string(self.converter.value_fields)
358
+ value = await context().get(self.read_pv, request=request)
359
+ return self.converter.value(value)
360
+
361
+ async def get_setpoint(self) -> SignalDatatypeT:
362
+ request = _pva_request_string(self.converter.value_fields)
363
+ value = await context().get(self.write_pv, request=request)
364
+ return self.converter.value(value)
365
+
366
+ def set_callback(self, callback: Callback[Reading[SignalDatatypeT]] | None) -> None:
367
+ if callback:
368
+ assert (
369
+ not self.subscription
370
+ ), "Cannot set a callback when one is already set"
371
+
372
+ async def async_callback(v):
373
+ callback(self._make_reading(v))
374
+
375
+ request = _pva_request_string(
376
+ self.converter.value_fields + self.converter.reading_fields
377
+ )
378
+ self.subscription = context().monitor(
379
+ self.read_pv, async_callback, request=request
380
+ )
381
+ elif self.subscription:
382
+ self.subscription.close()
383
+ self.subscription = None
@@ -0,0 +1,91 @@
1
+ from __future__ import annotations
2
+
3
+ from ophyd_async.core import (
4
+ Device,
5
+ DeviceConnector,
6
+ DeviceFiller,
7
+ Signal,
8
+ SignalR,
9
+ SignalRW,
10
+ SignalX,
11
+ )
12
+ from ophyd_async.core._utils import LazyMock
13
+
14
+ from ._epics_connector import fill_backend_with_prefix
15
+ from ._signal import PvaSignalBackend, pvget_with_timeout
16
+
17
+ Entry = dict[str, str]
18
+
19
+
20
+ def _get_signal_details(entry: Entry) -> tuple[type[Signal], str, str]:
21
+ match entry:
22
+ case {"r": read_pv}:
23
+ return SignalR, read_pv, read_pv
24
+ case {"r": read_pv, "w": write_pv}:
25
+ return SignalRW, read_pv, write_pv
26
+ case {"rw": read_write_pv}:
27
+ return SignalRW, read_write_pv, read_write_pv
28
+ case {"x": execute_pv}:
29
+ return SignalX, execute_pv, execute_pv
30
+ case _:
31
+ raise TypeError(f"Can't process entry {entry}")
32
+
33
+
34
+ class PviDeviceConnector(DeviceConnector):
35
+ def __init__(self, prefix: str = "") -> None:
36
+ # TODO: what happens if we get a leading "pva://" here?
37
+ self.prefix = prefix
38
+ self.pvi_pv = prefix + "PVI"
39
+
40
+ def create_children_from_annotations(self, device: Device):
41
+ if not hasattr(self, "filler"):
42
+ self.filler = DeviceFiller(
43
+ device=device,
44
+ signal_backend_factory=PvaSignalBackend,
45
+ device_connector_factory=PviDeviceConnector,
46
+ )
47
+ # Devices will be created with unfilled PviDeviceConnectors
48
+ list(self.filler.create_devices_from_annotations(filled=False))
49
+ # Signals can be filled in with EpicsSignalSuffix and checked at runtime
50
+ for backend, annotations in self.filler.create_signals_from_annotations(
51
+ filled=False
52
+ ):
53
+ fill_backend_with_prefix(self.prefix, backend, annotations)
54
+ self.filler.check_created()
55
+
56
+ def _fill_child(self, name: str, entry: Entry, vector_index: int | None = None):
57
+ if set(entry) == {"d"}:
58
+ connector = self.filler.fill_child_device(name, vector_index=vector_index)
59
+ connector.pvi_pv = entry["d"]
60
+ else:
61
+ signal_type, read_pv, write_pv = _get_signal_details(entry)
62
+ backend = self.filler.fill_child_signal(name, signal_type, vector_index)
63
+ backend.read_pv = read_pv
64
+ backend.write_pv = write_pv
65
+
66
+ async def connect_mock(self, device: Device, mock: LazyMock):
67
+ self.filler.create_device_vector_entries_to_mock(2)
68
+ # Set the name of the device to name all children
69
+ device.set_name(device.name)
70
+ return await super().connect_mock(device, mock)
71
+
72
+ async def connect_real(
73
+ self, device: Device, timeout: float, force_reconnect: bool
74
+ ) -> None:
75
+ pvi_structure = await pvget_with_timeout(self.pvi_pv, timeout)
76
+ entries: dict[str, Entry | list[Entry | None]] = pvi_structure["value"].todict()
77
+ # Fill based on what PVI gives us
78
+ for name, entry in entries.items():
79
+ if isinstance(entry, dict):
80
+ # This is a child
81
+ self._fill_child(name, entry)
82
+ else:
83
+ # This is a DeviceVector of children
84
+ for i, e in enumerate(entry):
85
+ if e:
86
+ self._fill_child(name, e, i)
87
+ # Check that all the requested children have been filled
88
+ self.filler.check_filled(f"{self.pvi_pv}: {entries}")
89
+ # Set the name of the device to name all children
90
+ device.set_name(device.name)
91
+ return await super().connect_real(device, timeout, force_reconnect)
@@ -0,0 +1,171 @@
1
+ """EPICS Signals over CA or PVA"""
2
+
3
+ from __future__ import annotations
4
+
5
+ from enum import Enum
6
+
7
+ from ophyd_async.core import (
8
+ SignalBackend,
9
+ SignalDatatypeT,
10
+ SignalR,
11
+ SignalRW,
12
+ SignalW,
13
+ SignalX,
14
+ get_unique,
15
+ )
16
+
17
+ from ._util import EpicsSignalBackend
18
+
19
+
20
+ class EpicsProtocol(Enum):
21
+ CA = "ca"
22
+ PVA = "pva"
23
+
24
+
25
+ _default_epics_protocol = EpicsProtocol.CA
26
+
27
+
28
+ def _make_unavailable_function(error: Exception):
29
+ def transport_not_available(*args, **kwargs):
30
+ raise NotImplementedError("Transport not available") from error
31
+
32
+ return transport_not_available
33
+
34
+
35
+ def _make_unavailable_class(error: Exception) -> type[EpicsSignalBackend]:
36
+ class TransportNotAvailable(EpicsSignalBackend):
37
+ __init__ = _make_unavailable_function(error)
38
+
39
+ return TransportNotAvailable
40
+
41
+
42
+ try:
43
+ from ._p4p import PvaSignalBackend, pvget_with_timeout
44
+ except ImportError as pva_error:
45
+ PvaSignalBackend = _make_unavailable_class(pva_error)
46
+ pvget_with_timeout = _make_unavailable_function(pva_error)
47
+ else:
48
+ _default_epics_protocol = EpicsProtocol.PVA
49
+
50
+ try:
51
+ from ._aioca import CaSignalBackend
52
+ except ImportError as ca_error:
53
+ CaSignalBackend = _make_unavailable_class(ca_error)
54
+ else:
55
+ _default_epics_protocol = EpicsProtocol.CA
56
+
57
+
58
+ def split_protocol_from_pv(pv: str) -> tuple[EpicsProtocol, str]:
59
+ split = pv.split("://", 1)
60
+ if len(split) > 1:
61
+ # We got something like pva://mydevice, so use specified comms mode
62
+ scheme, pv = split
63
+ protocol = EpicsProtocol(scheme)
64
+ else:
65
+ # No comms mode specified, use the default
66
+ protocol = _default_epics_protocol
67
+ return protocol, pv
68
+
69
+
70
+ def get_signal_backend_type(protocol: EpicsProtocol) -> type[EpicsSignalBackend]:
71
+ match protocol:
72
+ case EpicsProtocol.CA:
73
+ return CaSignalBackend
74
+ case EpicsProtocol.PVA:
75
+ return PvaSignalBackend
76
+
77
+
78
+ def _epics_signal_backend(
79
+ datatype: type[SignalDatatypeT] | None, read_pv: str, write_pv: str
80
+ ) -> SignalBackend[SignalDatatypeT]:
81
+ """Create an epics signal backend."""
82
+ r_protocol, r_pv = split_protocol_from_pv(read_pv)
83
+ w_protocol, w_pv = split_protocol_from_pv(write_pv)
84
+ protocol = get_unique({read_pv: r_protocol, write_pv: w_protocol}, "protocols")
85
+ signal_backend_type = get_signal_backend_type(protocol)
86
+ return signal_backend_type(datatype, r_pv, w_pv)
87
+
88
+
89
+ def epics_signal_rw(
90
+ datatype: type[SignalDatatypeT],
91
+ read_pv: str,
92
+ write_pv: str | None = None,
93
+ name: str = "",
94
+ ) -> SignalRW[SignalDatatypeT]:
95
+ """Create a `SignalRW` backed by 1 or 2 EPICS PVs
96
+
97
+ Parameters
98
+ ----------
99
+ datatype:
100
+ Check that the PV is of this type
101
+ read_pv:
102
+ The PV to read and monitor
103
+ write_pv:
104
+ If given, use this PV to write to, otherwise use read_pv
105
+ """
106
+ backend = _epics_signal_backend(datatype, read_pv, write_pv or read_pv)
107
+ return SignalRW(backend, name=name)
108
+
109
+
110
+ def epics_signal_rw_rbv(
111
+ datatype: type[SignalDatatypeT],
112
+ write_pv: str,
113
+ read_suffix: str = "_RBV",
114
+ name: str = "",
115
+ ) -> SignalRW[SignalDatatypeT]:
116
+ """Create a `SignalRW` backed by 1 or 2 EPICS PVs, with a suffix on the readback pv
117
+
118
+ Parameters
119
+ ----------
120
+ datatype:
121
+ Check that the PV is of this type
122
+ write_pv:
123
+ The PV to write to
124
+ read_suffix:
125
+ Append this suffix to the write pv to create the readback pv
126
+ """
127
+ return epics_signal_rw(datatype, f"{write_pv}{read_suffix}", write_pv, name)
128
+
129
+
130
+ def epics_signal_r(
131
+ datatype: type[SignalDatatypeT], read_pv: str, name: str = ""
132
+ ) -> SignalR[SignalDatatypeT]:
133
+ """Create a `SignalR` backed by 1 EPICS PV
134
+
135
+ Parameters
136
+ ----------
137
+ datatype
138
+ Check that the PV is of this type
139
+ read_pv:
140
+ The PV to read and monitor
141
+ """
142
+ backend = _epics_signal_backend(datatype, read_pv, read_pv)
143
+ return SignalR(backend, name=name)
144
+
145
+
146
+ def epics_signal_w(
147
+ datatype: type[SignalDatatypeT], write_pv: str, name: str = ""
148
+ ) -> SignalW[SignalDatatypeT]:
149
+ """Create a `SignalW` backed by 1 EPICS PVs
150
+
151
+ Parameters
152
+ ----------
153
+ datatype:
154
+ Check that the PV is of this type
155
+ write_pv:
156
+ The PV to write to
157
+ """
158
+ backend = _epics_signal_backend(datatype, write_pv, write_pv)
159
+ return SignalW(backend, name=name)
160
+
161
+
162
+ def epics_signal_x(write_pv: str, name: str = "") -> SignalX:
163
+ """Create a `SignalX` backed by 1 EPICS PVs
164
+
165
+ Parameters
166
+ ----------
167
+ write_pv:
168
+ The PV to write its initial value to on trigger
169
+ """
170
+ backend = _epics_signal_backend(None, write_pv, write_pv)
171
+ return SignalX(backend, name=name)