ophyd-async 0.7.0a1__py3-none-any.whl → 0.8.0a3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. ophyd_async/_version.py +2 -2
  2. ophyd_async/core/__init__.py +30 -9
  3. ophyd_async/core/_detector.py +5 -10
  4. ophyd_async/core/_device.py +146 -67
  5. ophyd_async/core/_device_filler.py +269 -0
  6. ophyd_async/core/_device_save_loader.py +6 -7
  7. ophyd_async/core/_mock_signal_backend.py +32 -40
  8. ophyd_async/core/_mock_signal_utils.py +22 -16
  9. ophyd_async/core/_protocol.py +28 -8
  10. ophyd_async/core/_readable.py +133 -134
  11. ophyd_async/core/_signal.py +140 -152
  12. ophyd_async/core/_signal_backend.py +131 -64
  13. ophyd_async/core/_soft_signal_backend.py +125 -194
  14. ophyd_async/core/_status.py +22 -6
  15. ophyd_async/core/_table.py +97 -100
  16. ophyd_async/core/_utils.py +79 -18
  17. ophyd_async/epics/adaravis/_aravis_controller.py +2 -2
  18. ophyd_async/epics/adaravis/_aravis_io.py +8 -6
  19. ophyd_async/epics/adcore/_core_io.py +5 -7
  20. ophyd_async/epics/adcore/_hdf_writer.py +2 -2
  21. ophyd_async/epics/adcore/_single_trigger.py +4 -9
  22. ophyd_async/epics/adcore/_utils.py +15 -10
  23. ophyd_async/epics/adkinetix/__init__.py +2 -1
  24. ophyd_async/epics/adkinetix/_kinetix_controller.py +6 -3
  25. ophyd_async/epics/adkinetix/_kinetix_io.py +4 -5
  26. ophyd_async/epics/adpilatus/_pilatus_controller.py +2 -2
  27. ophyd_async/epics/adpilatus/_pilatus_io.py +3 -4
  28. ophyd_async/epics/adsimdetector/_sim_controller.py +2 -2
  29. ophyd_async/epics/advimba/__init__.py +4 -1
  30. ophyd_async/epics/advimba/_vimba_controller.py +6 -3
  31. ophyd_async/epics/advimba/_vimba_io.py +8 -9
  32. ophyd_async/epics/core/__init__.py +26 -0
  33. ophyd_async/epics/core/_aioca.py +323 -0
  34. ophyd_async/epics/core/_epics_connector.py +53 -0
  35. ophyd_async/epics/core/_epics_device.py +13 -0
  36. ophyd_async/epics/core/_p4p.py +382 -0
  37. ophyd_async/epics/core/_pvi_connector.py +92 -0
  38. ophyd_async/epics/core/_signal.py +171 -0
  39. ophyd_async/epics/core/_util.py +61 -0
  40. ophyd_async/epics/demo/_mover.py +4 -5
  41. ophyd_async/epics/demo/_sensor.py +14 -13
  42. ophyd_async/epics/eiger/_eiger.py +1 -2
  43. ophyd_async/epics/eiger/_eiger_controller.py +1 -1
  44. ophyd_async/epics/eiger/_eiger_io.py +3 -5
  45. ophyd_async/epics/eiger/_odin_io.py +5 -5
  46. ophyd_async/epics/motor.py +4 -5
  47. ophyd_async/epics/signal.py +11 -0
  48. ophyd_async/fastcs/core.py +9 -0
  49. ophyd_async/fastcs/panda/__init__.py +4 -4
  50. ophyd_async/fastcs/panda/_block.py +23 -11
  51. ophyd_async/fastcs/panda/_control.py +3 -5
  52. ophyd_async/fastcs/panda/_hdf_panda.py +5 -19
  53. ophyd_async/fastcs/panda/_table.py +29 -51
  54. ophyd_async/fastcs/panda/_trigger.py +8 -8
  55. ophyd_async/fastcs/panda/_writer.py +4 -7
  56. ophyd_async/plan_stubs/_ensure_connected.py +3 -1
  57. ophyd_async/plan_stubs/_fly.py +2 -2
  58. ophyd_async/plan_stubs/_nd_attributes.py +5 -4
  59. ophyd_async/py.typed +0 -0
  60. ophyd_async/sim/demo/_pattern_detector/_pattern_detector_controller.py +1 -2
  61. ophyd_async/sim/demo/_sim_motor.py +3 -4
  62. ophyd_async/tango/__init__.py +2 -4
  63. ophyd_async/tango/base_devices/_base_device.py +76 -144
  64. ophyd_async/tango/demo/_counter.py +8 -18
  65. ophyd_async/tango/demo/_mover.py +5 -6
  66. ophyd_async/tango/signal/__init__.py +2 -4
  67. ophyd_async/tango/signal/_signal.py +29 -50
  68. ophyd_async/tango/signal/_tango_transport.py +38 -40
  69. {ophyd_async-0.7.0a1.dist-info → ophyd_async-0.8.0a3.dist-info}/METADATA +8 -12
  70. ophyd_async-0.8.0a3.dist-info/RECORD +112 -0
  71. {ophyd_async-0.7.0a1.dist-info → ophyd_async-0.8.0a3.dist-info}/WHEEL +1 -1
  72. ophyd_async/epics/pvi/__init__.py +0 -3
  73. ophyd_async/epics/pvi/_pvi.py +0 -338
  74. ophyd_async/epics/signal/__init__.py +0 -21
  75. ophyd_async/epics/signal/_aioca.py +0 -378
  76. ophyd_async/epics/signal/_common.py +0 -57
  77. ophyd_async/epics/signal/_epics_transport.py +0 -34
  78. ophyd_async/epics/signal/_p4p.py +0 -518
  79. ophyd_async/epics/signal/_signal.py +0 -114
  80. ophyd_async-0.7.0a1.dist-info/RECORD +0 -108
  81. {ophyd_async-0.7.0a1.dist-info → ophyd_async-0.8.0a3.dist-info}/LICENSE +0 -0
  82. {ophyd_async-0.7.0a1.dist-info → ophyd_async-0.8.0a3.dist-info}/entry_points.txt +0 -0
  83. {ophyd_async-0.7.0a1.dist-info → ophyd_async-0.8.0a3.dist-info}/top_level.txt +0 -0
@@ -1,378 +0,0 @@
1
- import inspect
2
- import logging
3
- import sys
4
- from collections.abc import Sequence
5
- from dataclasses import dataclass
6
- from enum import Enum
7
- from math import isnan, nan
8
- from typing import Any, get_origin
9
-
10
- import numpy as np
11
- from aioca import (
12
- FORMAT_CTRL,
13
- FORMAT_RAW,
14
- FORMAT_TIME,
15
- CANothing,
16
- Subscription,
17
- caget,
18
- camonitor,
19
- caput,
20
- )
21
- from aioca.types import AugmentedValue, Dbr, Format
22
- from bluesky.protocols import Reading
23
- from epicscorelibs.ca import dbr
24
- from event_model import DataKey
25
- from event_model.documents.event_descriptor import Dtype
26
-
27
- from ophyd_async.core import (
28
- DEFAULT_TIMEOUT,
29
- NotConnected,
30
- ReadingValueCallback,
31
- RuntimeSubsetEnum,
32
- SignalBackend,
33
- T,
34
- get_dtype,
35
- get_unique,
36
- wait_for_connection,
37
- )
38
-
39
- from ._common import LimitPair, Limits, common_meta, get_supported_values
40
-
41
- dbr_to_dtype: dict[Dbr, Dtype] = {
42
- dbr.DBR_STRING: "string",
43
- dbr.DBR_SHORT: "integer",
44
- dbr.DBR_FLOAT: "number",
45
- dbr.DBR_CHAR: "string",
46
- dbr.DBR_LONG: "integer",
47
- dbr.DBR_DOUBLE: "number",
48
- }
49
-
50
-
51
- def _data_key_from_augmented_value(
52
- value: AugmentedValue,
53
- *,
54
- choices: list[str] | None = None,
55
- dtype: Dtype | None = None,
56
- ) -> DataKey:
57
- """Use the return value of get with FORMAT_CTRL to construct a DataKey
58
- describing the signal. See docstring of AugmentedValue for expected
59
- value fields by DBR type.
60
-
61
- Args:
62
- value (AugmentedValue): Description of the the return type of a DB record
63
- choices: Optional list of enum choices to pass as metadata in the datakey
64
- dtype: Optional override dtype when AugmentedValue is ambiguous, e.g. booleans
65
-
66
- Returns:
67
- DataKey: A rich DataKey describing the DB record
68
- """
69
- source = f"ca://{value.name}"
70
- assert value.ok, f"Error reading {source}: {value}"
71
-
72
- scalar = value.element_count == 1
73
- dtype = dtype or dbr_to_dtype[value.datatype] # type: ignore
74
-
75
- dtype_numpy = np.dtype(dbr.DbrCodeToType[value.datatype].dtype).descr[0][1]
76
-
77
- d = DataKey(
78
- source=source,
79
- dtype=dtype if scalar else "array",
80
- # Ignore until https://github.com/bluesky/event-model/issues/308
81
- dtype_numpy=dtype_numpy, # type: ignore
82
- # strictly value.element_count >= len(value)
83
- shape=[] if scalar else [len(value)],
84
- )
85
- for key in common_meta:
86
- attr = getattr(value, key, nan)
87
- if isinstance(attr, str) or not isnan(attr):
88
- d[key] = attr
89
-
90
- if choices is not None:
91
- d["choices"] = choices # type: ignore
92
-
93
- if limits := _limits_from_augmented_value(value):
94
- d["limits"] = limits # type: ignore
95
-
96
- return d
97
-
98
-
99
- def _limits_from_augmented_value(value: AugmentedValue) -> Limits:
100
- def get_limits(limit: str) -> LimitPair:
101
- low = getattr(value, f"lower_{limit}_limit", nan)
102
- high = getattr(value, f"upper_{limit}_limit", nan)
103
- return LimitPair(
104
- low=None if isnan(low) else low, high=None if isnan(high) else high
105
- )
106
-
107
- return Limits(
108
- alarm=get_limits("alarm"),
109
- control=get_limits("ctrl"),
110
- display=get_limits("disp"),
111
- warning=get_limits("warning"),
112
- )
113
-
114
-
115
- @dataclass
116
- class CaConverter:
117
- read_dbr: Dbr | None
118
- write_dbr: Dbr | None
119
-
120
- def write_value(self, value) -> Any:
121
- return value
122
-
123
- def value(self, value: AugmentedValue):
124
- # for channel access ca_xxx classes, this
125
- # invokes __pos__ operator to return an instance of
126
- # the builtin base class
127
- return +value # type: ignore
128
-
129
- def reading(self, value: AugmentedValue) -> Reading:
130
- return {
131
- "value": self.value(value),
132
- "timestamp": value.timestamp,
133
- "alarm_severity": -1 if value.severity > 2 else value.severity,
134
- }
135
-
136
- def get_datakey(self, value: AugmentedValue) -> DataKey:
137
- return _data_key_from_augmented_value(value)
138
-
139
-
140
- class CaLongStrConverter(CaConverter):
141
- def __init__(self):
142
- return super().__init__(dbr.DBR_CHAR_STR, dbr.DBR_CHAR_STR)
143
-
144
- def write_value(self, value: str):
145
- # Add a null in here as this is what the commandline caput does
146
- # TODO: this should be in the server so check if it can be pushed to asyn
147
- return value + "\0"
148
-
149
-
150
- class CaArrayConverter(CaConverter):
151
- def value(self, value: AugmentedValue):
152
- return np.array(value, copy=False)
153
-
154
-
155
- @dataclass
156
- class CaEnumConverter(CaConverter):
157
- """To prevent issues when a signal is restarted and returns with different enum
158
- values or orders, we put treat an Enum signal as a string, and cache the
159
- choices on this class.
160
- """
161
-
162
- choices: dict[str, str]
163
-
164
- def write_value(self, value: Enum | str):
165
- if isinstance(value, Enum):
166
- return value.value
167
- else:
168
- return value
169
-
170
- def value(self, value: AugmentedValue):
171
- return self.choices[value] # type: ignore
172
-
173
- def get_datakey(self, value: AugmentedValue) -> DataKey:
174
- # Sometimes DBR_TYPE returns as String, must pass choices still
175
- return _data_key_from_augmented_value(value, choices=list(self.choices.keys()))
176
-
177
-
178
- @dataclass
179
- class CaBoolConverter(CaConverter):
180
- def value(self, value: AugmentedValue) -> bool:
181
- return bool(value)
182
-
183
- def get_datakey(self, value: AugmentedValue) -> DataKey:
184
- return _data_key_from_augmented_value(value, dtype="boolean")
185
-
186
-
187
- class DisconnectedCaConverter(CaConverter):
188
- def __getattribute__(self, __name: str) -> Any:
189
- raise NotImplementedError("No PV has been set as connect() has not been called")
190
-
191
-
192
- def make_converter(
193
- datatype: type | None, values: dict[str, AugmentedValue]
194
- ) -> CaConverter:
195
- pv = list(values)[0]
196
- pv_dbr = get_unique({k: v.datatype for k, v in values.items()}, "datatypes")
197
- is_array = bool([v for v in values.values() if v.element_count > 1])
198
- if is_array and datatype is str and pv_dbr == dbr.DBR_CHAR:
199
- # Override waveform of chars to be treated as string
200
- return CaLongStrConverter()
201
- elif is_array and pv_dbr == dbr.DBR_STRING:
202
- # Waveform of strings, check we wanted this
203
- if datatype:
204
- datatype_dtype = get_dtype(datatype)
205
- if not datatype_dtype or not np.can_cast(datatype_dtype, np.str_):
206
- raise TypeError(f"{pv} has type [str] not {datatype.__name__}")
207
- return CaArrayConverter(pv_dbr, None)
208
- elif is_array:
209
- pv_dtype = get_unique({k: v.dtype for k, v in values.items()}, "dtypes") # type: ignore
210
- # This is an array
211
- if datatype:
212
- # Check we wanted an array of this type
213
- dtype = get_dtype(datatype)
214
- if not dtype:
215
- raise TypeError(f"{pv} has type [{pv_dtype}] not {datatype.__name__}")
216
- if dtype != pv_dtype:
217
- raise TypeError(f"{pv} has type [{pv_dtype}] not [{dtype}]")
218
- return CaArrayConverter(pv_dbr, None) # type: ignore
219
- elif pv_dbr == dbr.DBR_ENUM and datatype is bool:
220
- # Database can't do bools, so are often representated as enums,
221
- # CA can do int
222
- pv_choices_len = get_unique(
223
- {k: len(v.enums) for k, v in values.items()}, "number of choices"
224
- )
225
- if pv_choices_len != 2:
226
- raise TypeError(f"{pv} has {pv_choices_len} choices, can't map to bool")
227
- return CaBoolConverter(dbr.DBR_SHORT, dbr.DBR_SHORT)
228
- elif pv_dbr == dbr.DBR_ENUM:
229
- # This is an Enum
230
- pv_choices = get_unique(
231
- {k: tuple(v.enums) for k, v in values.items()}, "choices"
232
- )
233
- supported_values = get_supported_values(pv, datatype, pv_choices)
234
- return CaEnumConverter(dbr.DBR_STRING, None, supported_values)
235
- else:
236
- value = list(values.values())[0]
237
- # Done the dbr check, so enough to check one of the values
238
- if datatype and not isinstance(value, datatype):
239
- # Allow int signals to represent float records when prec is 0
240
- is_prec_zero_float = (
241
- isinstance(value, float)
242
- and get_unique({k: v.precision for k, v in values.items()}, "precision")
243
- == 0
244
- )
245
- if not (datatype is int and is_prec_zero_float):
246
- raise TypeError(
247
- f"{pv} has type {type(value).__name__.replace('ca_', '')} "
248
- + f"not {datatype.__name__}"
249
- )
250
- return CaConverter(pv_dbr, None) # type: ignore
251
-
252
-
253
- _tried_pyepics = False
254
-
255
-
256
- def _use_pyepics_context_if_imported():
257
- global _tried_pyepics
258
- if not _tried_pyepics:
259
- ca = sys.modules.get("epics.ca", None)
260
- if ca:
261
- ca.use_initial_context()
262
- _tried_pyepics = True
263
-
264
-
265
- class CaSignalBackend(SignalBackend[T]):
266
- _ALLOWED_DATATYPES = (
267
- bool,
268
- int,
269
- float,
270
- str,
271
- Sequence,
272
- Enum,
273
- RuntimeSubsetEnum,
274
- np.ndarray,
275
- )
276
-
277
- @classmethod
278
- def datatype_allowed(cls, dtype: Any) -> bool:
279
- stripped_origin = get_origin(dtype) or dtype
280
- if dtype is None:
281
- return True
282
-
283
- return inspect.isclass(stripped_origin) and issubclass(
284
- stripped_origin, cls._ALLOWED_DATATYPES
285
- )
286
-
287
- def __init__(self, datatype: type[T] | None, read_pv: str, write_pv: str):
288
- self.datatype = datatype
289
- if not CaSignalBackend.datatype_allowed(self.datatype):
290
- raise TypeError(f"Given datatype {self.datatype} unsupported in CA.")
291
- self.read_pv = read_pv
292
- self.write_pv = write_pv
293
- self.initial_values: dict[str, AugmentedValue] = {}
294
- self.converter: CaConverter = DisconnectedCaConverter(None, None)
295
- self.subscription: Subscription | None = None
296
-
297
- def source(self, name: str):
298
- return f"ca://{self.read_pv}"
299
-
300
- async def _store_initial_value(self, pv, timeout: float = DEFAULT_TIMEOUT):
301
- try:
302
- self.initial_values[pv] = await caget(
303
- pv, format=FORMAT_CTRL, timeout=timeout
304
- )
305
- except CANothing as exc:
306
- logging.debug(f"signal ca://{pv} timed out")
307
- raise NotConnected(f"ca://{pv}") from exc
308
-
309
- async def connect(self, timeout: float = DEFAULT_TIMEOUT):
310
- _use_pyepics_context_if_imported()
311
- if self.read_pv != self.write_pv:
312
- # Different, need to connect both
313
- await wait_for_connection(
314
- read_pv=self._store_initial_value(self.read_pv, timeout=timeout),
315
- write_pv=self._store_initial_value(self.write_pv, timeout=timeout),
316
- )
317
- else:
318
- # The same, so only need to connect one
319
- await self._store_initial_value(self.read_pv, timeout=timeout)
320
- self.converter = make_converter(self.datatype, self.initial_values)
321
-
322
- async def put(self, value: T | None, wait=True, timeout=None):
323
- if value is None:
324
- write_value = self.initial_values[self.write_pv]
325
- else:
326
- write_value = self.converter.write_value(value)
327
- await caput(
328
- self.write_pv,
329
- write_value,
330
- datatype=self.converter.write_dbr,
331
- wait=wait,
332
- timeout=timeout,
333
- )
334
-
335
- async def _caget(self, format: Format) -> AugmentedValue:
336
- return await caget(
337
- self.read_pv,
338
- datatype=self.converter.read_dbr,
339
- format=format,
340
- timeout=None,
341
- )
342
-
343
- async def get_datakey(self, source: str) -> DataKey:
344
- value = await self._caget(FORMAT_CTRL)
345
- return self.converter.get_datakey(value)
346
-
347
- async def get_reading(self) -> Reading:
348
- value = await self._caget(FORMAT_TIME)
349
- return self.converter.reading(value)
350
-
351
- async def get_value(self) -> T:
352
- value = await self._caget(FORMAT_RAW)
353
- return self.converter.value(value)
354
-
355
- async def get_setpoint(self) -> T:
356
- value = await caget(
357
- self.write_pv,
358
- datatype=self.converter.read_dbr,
359
- format=FORMAT_RAW,
360
- timeout=None,
361
- )
362
- return self.converter.value(value)
363
-
364
- def set_callback(self, callback: ReadingValueCallback[T] | None) -> None:
365
- if callback:
366
- assert (
367
- not self.subscription
368
- ), "Cannot set a callback when one is already set"
369
- self.subscription = camonitor(
370
- self.read_pv,
371
- lambda v: callback(self.converter.reading(v), self.converter.value(v)),
372
- datatype=self.converter.read_dbr,
373
- format=FORMAT_TIME,
374
- )
375
- else:
376
- if self.subscription:
377
- self.subscription.close()
378
- self.subscription = None
@@ -1,57 +0,0 @@
1
- import inspect
2
- from enum import Enum
3
-
4
- from typing_extensions import TypedDict
5
-
6
- from ophyd_async.core import RuntimeSubsetEnum
7
-
8
- common_meta = {
9
- "units",
10
- "precision",
11
- }
12
-
13
-
14
- class LimitPair(TypedDict):
15
- high: float | None
16
- low: float | None
17
-
18
-
19
- class Limits(TypedDict):
20
- alarm: LimitPair
21
- control: LimitPair
22
- display: LimitPair
23
- warning: LimitPair
24
-
25
-
26
- def get_supported_values(
27
- pv: str,
28
- datatype: type[str] | None,
29
- pv_choices: tuple[str, ...],
30
- ) -> dict[str, str]:
31
- if inspect.isclass(datatype) and issubclass(datatype, RuntimeSubsetEnum):
32
- if not set(datatype.choices).issubset(set(pv_choices)):
33
- raise TypeError(
34
- f"{pv} has choices {pv_choices}, "
35
- f"which is not a superset of {str(datatype)}."
36
- )
37
- return {x: x or "_" for x in pv_choices}
38
- elif inspect.isclass(datatype) and issubclass(datatype, Enum):
39
- if not issubclass(datatype, str):
40
- raise TypeError(
41
- f"{pv} is type Enum but {datatype} does not inherit from String."
42
- )
43
-
44
- choices = tuple(v.value for v in datatype)
45
- if set(choices) != set(pv_choices):
46
- raise TypeError(
47
- f"{pv} has choices {pv_choices}, "
48
- f"which do not match {datatype}, which has {choices}."
49
- )
50
- return {x: datatype(x) if x else "_" for x in pv_choices}
51
- elif datatype is None or datatype is str:
52
- return {x: x or "_" for x in pv_choices}
53
-
54
- raise TypeError(
55
- f"{pv} has choices {pv_choices}. "
56
- "Use an Enum or SubsetEnum to represent this."
57
- )
@@ -1,34 +0,0 @@
1
- """EPICS Signals over CA or PVA"""
2
-
3
- from __future__ import annotations
4
-
5
- from enum import Enum
6
-
7
-
8
- def _make_unavailable_class(error: Exception) -> type:
9
- class TransportNotAvailable:
10
- def __init__(*args, **kwargs):
11
- raise NotImplementedError("Transport not available") from error
12
-
13
- return TransportNotAvailable
14
-
15
-
16
- try:
17
- from ._aioca import CaSignalBackend
18
- except ImportError as ca_error:
19
- CaSignalBackend = _make_unavailable_class(ca_error)
20
-
21
-
22
- try:
23
- from ._p4p import PvaSignalBackend
24
- except ImportError as pva_error:
25
- PvaSignalBackend = _make_unavailable_class(pva_error)
26
-
27
-
28
- class _EpicsTransport(Enum):
29
- """The sorts of transport EPICS support"""
30
-
31
- #: Use Channel Access (using aioca library)
32
- ca = CaSignalBackend
33
- #: Use PVAccess (using p4p library)
34
- pva = PvaSignalBackend