ophyd-async 0.1.0__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (94) hide show
  1. ophyd_async/__init__.py +1 -4
  2. ophyd_async/_version.py +2 -2
  3. ophyd_async/core/__init__.py +91 -19
  4. ophyd_async/core/_providers.py +68 -0
  5. ophyd_async/core/async_status.py +90 -42
  6. ophyd_async/core/detector.py +341 -0
  7. ophyd_async/core/device.py +226 -0
  8. ophyd_async/core/device_save_loader.py +286 -0
  9. ophyd_async/core/flyer.py +85 -0
  10. ophyd_async/core/mock_signal_backend.py +82 -0
  11. ophyd_async/core/mock_signal_utils.py +145 -0
  12. ophyd_async/core/{_device/_signal/signal.py → signal.py} +249 -61
  13. ophyd_async/core/{_device/_backend/signal_backend.py → signal_backend.py} +12 -5
  14. ophyd_async/core/{_device/_backend/sim_signal_backend.py → soft_signal_backend.py} +54 -48
  15. ophyd_async/core/standard_readable.py +261 -0
  16. ophyd_async/core/utils.py +127 -30
  17. ophyd_async/epics/_backend/_aioca.py +62 -43
  18. ophyd_async/epics/_backend/_p4p.py +100 -52
  19. ophyd_async/epics/_backend/common.py +25 -0
  20. ophyd_async/epics/areadetector/__init__.py +16 -15
  21. ophyd_async/epics/areadetector/aravis.py +63 -0
  22. ophyd_async/epics/areadetector/controllers/__init__.py +5 -0
  23. ophyd_async/epics/areadetector/controllers/ad_sim_controller.py +52 -0
  24. ophyd_async/epics/areadetector/controllers/aravis_controller.py +78 -0
  25. ophyd_async/epics/areadetector/controllers/kinetix_controller.py +49 -0
  26. ophyd_async/epics/areadetector/controllers/pilatus_controller.py +61 -0
  27. ophyd_async/epics/areadetector/controllers/vimba_controller.py +66 -0
  28. ophyd_async/epics/areadetector/drivers/__init__.py +21 -0
  29. ophyd_async/epics/areadetector/drivers/ad_base.py +107 -0
  30. ophyd_async/epics/areadetector/drivers/aravis_driver.py +38 -0
  31. ophyd_async/epics/areadetector/drivers/kinetix_driver.py +27 -0
  32. ophyd_async/epics/areadetector/drivers/pilatus_driver.py +21 -0
  33. ophyd_async/epics/areadetector/drivers/vimba_driver.py +63 -0
  34. ophyd_async/epics/areadetector/kinetix.py +46 -0
  35. ophyd_async/epics/areadetector/pilatus.py +45 -0
  36. ophyd_async/epics/areadetector/single_trigger_det.py +18 -10
  37. ophyd_async/epics/areadetector/utils.py +91 -13
  38. ophyd_async/epics/areadetector/vimba.py +43 -0
  39. ophyd_async/epics/areadetector/writers/__init__.py +5 -0
  40. ophyd_async/epics/areadetector/writers/_hdfdataset.py +10 -0
  41. ophyd_async/epics/areadetector/writers/_hdffile.py +54 -0
  42. ophyd_async/epics/areadetector/writers/hdf_writer.py +142 -0
  43. ophyd_async/epics/areadetector/writers/nd_file_hdf.py +40 -0
  44. ophyd_async/epics/areadetector/writers/nd_plugin.py +38 -0
  45. ophyd_async/epics/demo/__init__.py +78 -51
  46. ophyd_async/epics/demo/demo_ad_sim_detector.py +35 -0
  47. ophyd_async/epics/motion/motor.py +67 -52
  48. ophyd_async/epics/pvi/__init__.py +3 -0
  49. ophyd_async/epics/pvi/pvi.py +318 -0
  50. ophyd_async/epics/signal/__init__.py +8 -3
  51. ophyd_async/epics/signal/signal.py +27 -10
  52. ophyd_async/log.py +130 -0
  53. ophyd_async/panda/__init__.py +24 -7
  54. ophyd_async/panda/_common_blocks.py +49 -0
  55. ophyd_async/panda/_hdf_panda.py +48 -0
  56. ophyd_async/panda/_panda_controller.py +37 -0
  57. ophyd_async/panda/_table.py +158 -0
  58. ophyd_async/panda/_trigger.py +39 -0
  59. ophyd_async/panda/_utils.py +15 -0
  60. ophyd_async/panda/writers/__init__.py +3 -0
  61. ophyd_async/panda/writers/_hdf_writer.py +220 -0
  62. ophyd_async/panda/writers/_panda_hdf_file.py +58 -0
  63. ophyd_async/plan_stubs/__init__.py +13 -0
  64. ophyd_async/plan_stubs/ensure_connected.py +22 -0
  65. ophyd_async/plan_stubs/fly.py +149 -0
  66. ophyd_async/protocols.py +126 -0
  67. ophyd_async/sim/__init__.py +11 -0
  68. ophyd_async/sim/demo/__init__.py +3 -0
  69. ophyd_async/sim/demo/sim_motor.py +103 -0
  70. ophyd_async/sim/pattern_generator.py +318 -0
  71. ophyd_async/sim/sim_pattern_detector_control.py +55 -0
  72. ophyd_async/sim/sim_pattern_detector_writer.py +34 -0
  73. ophyd_async/sim/sim_pattern_generator.py +37 -0
  74. {ophyd_async-0.1.0.dist-info → ophyd_async-0.3.0.dist-info}/METADATA +35 -67
  75. ophyd_async-0.3.0.dist-info/RECORD +86 -0
  76. {ophyd_async-0.1.0.dist-info → ophyd_async-0.3.0.dist-info}/WHEEL +1 -1
  77. ophyd_async/core/_device/__init__.py +0 -0
  78. ophyd_async/core/_device/_backend/__init__.py +0 -0
  79. ophyd_async/core/_device/_signal/__init__.py +0 -0
  80. ophyd_async/core/_device/device.py +0 -60
  81. ophyd_async/core/_device/device_collector.py +0 -121
  82. ophyd_async/core/_device/device_vector.py +0 -14
  83. ophyd_async/core/_device/standard_readable.py +0 -72
  84. ophyd_async/epics/areadetector/ad_driver.py +0 -18
  85. ophyd_async/epics/areadetector/directory_provider.py +0 -18
  86. ophyd_async/epics/areadetector/hdf_streamer_det.py +0 -167
  87. ophyd_async/epics/areadetector/nd_file_hdf.py +0 -22
  88. ophyd_async/epics/areadetector/nd_plugin.py +0 -13
  89. ophyd_async/epics/signal/pvi_get.py +0 -22
  90. ophyd_async/panda/panda.py +0 -332
  91. ophyd_async-0.1.0.dist-info/RECORD +0 -45
  92. {ophyd_async-0.1.0.dist-info → ophyd_async-0.3.0.dist-info}/LICENSE +0 -0
  93. {ophyd_async-0.1.0.dist-info → ophyd_async-0.3.0.dist-info}/entry_points.txt +0 -0
  94. {ophyd_async-0.1.0.dist-info → ophyd_async-0.3.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,261 @@
1
+ import warnings
2
+ from contextlib import contextmanager
3
+ from typing import (
4
+ Callable,
5
+ Dict,
6
+ Generator,
7
+ Optional,
8
+ Sequence,
9
+ Tuple,
10
+ Type,
11
+ Union,
12
+ )
13
+
14
+ from bluesky.protocols import DataKey, HasHints, Hints, Reading
15
+
16
+ from ophyd_async.protocols import AsyncConfigurable, AsyncReadable, AsyncStageable
17
+
18
+ from .async_status import AsyncStatus
19
+ from .device import Device, DeviceVector
20
+ from .signal import SignalR
21
+ from .utils import merge_gathered_dicts
22
+
23
+ ReadableChild = Union[AsyncReadable, AsyncConfigurable, AsyncStageable, HasHints]
24
+ ReadableChildWrapper = Union[
25
+ Callable[[ReadableChild], ReadableChild], Type["ConfigSignal"], Type["HintedSignal"]
26
+ ]
27
+
28
+
29
+ class StandardReadable(
30
+ Device, AsyncReadable, AsyncConfigurable, AsyncStageable, HasHints
31
+ ):
32
+ """Device that owns its children and provides useful default behavior.
33
+
34
+ - When its name is set it renames child Devices
35
+ - Signals can be registered for read() and read_configuration()
36
+ - These signals will be subscribed for read() between stage() and unstage()
37
+ """
38
+
39
+ # These must be immutable types to avoid accidental sharing between
40
+ # different instances of the class
41
+ _readables: Tuple[AsyncReadable, ...] = ()
42
+ _configurables: Tuple[AsyncConfigurable, ...] = ()
43
+ _stageables: Tuple[AsyncStageable, ...] = ()
44
+ _has_hints: Tuple[HasHints, ...] = ()
45
+
46
+ def set_readable_signals(
47
+ self,
48
+ read: Sequence[SignalR] = (),
49
+ config: Sequence[SignalR] = (),
50
+ read_uncached: Sequence[SignalR] = (),
51
+ ):
52
+ """
53
+ Parameters
54
+ ----------
55
+ read:
56
+ Signals to make up :meth:`~StandardReadable.read`
57
+ conf:
58
+ Signals to make up :meth:`~StandardReadable.read_configuration`
59
+ read_uncached:
60
+ Signals to make up :meth:`~StandardReadable.read` that won't be cached
61
+ """
62
+ warnings.warn(
63
+ DeprecationWarning(
64
+ "Migrate to `add_children_as_readables` context manager or "
65
+ "`add_readables` method"
66
+ )
67
+ )
68
+ self.add_readables(read, wrapper=HintedSignal)
69
+ self.add_readables(config, wrapper=ConfigSignal)
70
+ self.add_readables(read_uncached, wrapper=HintedSignal.uncached)
71
+
72
+ @AsyncStatus.wrap
73
+ async def stage(self) -> None:
74
+ for sig in self._stageables:
75
+ await sig.stage().task
76
+
77
+ @AsyncStatus.wrap
78
+ async def unstage(self) -> None:
79
+ for sig in self._stageables:
80
+ await sig.unstage().task
81
+
82
+ async def describe_configuration(self) -> Dict[str, DataKey]:
83
+ return await merge_gathered_dicts(
84
+ [sig.describe_configuration() for sig in self._configurables]
85
+ )
86
+
87
+ async def read_configuration(self) -> Dict[str, Reading]:
88
+ return await merge_gathered_dicts(
89
+ [sig.read_configuration() for sig in self._configurables]
90
+ )
91
+
92
+ async def describe(self) -> Dict[str, DataKey]:
93
+ return await merge_gathered_dicts([sig.describe() for sig in self._readables])
94
+
95
+ async def read(self) -> Dict[str, Reading]:
96
+ return await merge_gathered_dicts([sig.read() for sig in self._readables])
97
+
98
+ @property
99
+ def hints(self) -> Hints:
100
+ hints: Hints = {}
101
+ for new_hint in self._has_hints:
102
+ # Merge the existing and new hints, based on the type of the value.
103
+ # This avoids default dict merge behaviour that overrides the values;
104
+ # we want to combine them when they are Sequences, and ensure they are
105
+ # identical when string values.
106
+ for key, value in new_hint.hints.items():
107
+ if isinstance(value, str):
108
+ if key in hints:
109
+ assert (
110
+ hints[key] == value # type: ignore[literal-required]
111
+ ), f"Hints key {key} value may not be overridden"
112
+ else:
113
+ hints[key] = value # type: ignore[literal-required]
114
+ elif isinstance(value, Sequence):
115
+ if key in hints:
116
+ for new_val in value:
117
+ assert (
118
+ new_val not in hints[key] # type: ignore[literal-required]
119
+ ), f"Hint {key} {new_val} overrides existing hint"
120
+ hints[key] = ( # type: ignore[literal-required]
121
+ hints[key] + value # type: ignore[literal-required]
122
+ )
123
+ else:
124
+ hints[key] = value # type: ignore[literal-required]
125
+ else:
126
+ raise TypeError(
127
+ f"{new_hint.name}: Unknown type for value '{value}' "
128
+ f" for key '{key}'"
129
+ )
130
+
131
+ return hints
132
+
133
+ @contextmanager
134
+ def add_children_as_readables(
135
+ self,
136
+ wrapper: Optional[ReadableChildWrapper] = None,
137
+ ) -> Generator[None, None, None]:
138
+ """Context manager to wrap adding Devices
139
+
140
+ Add Devices to this class instance inside the Context Manager to automatically
141
+ add them to the correct fields, based on the Device's interfaces.
142
+
143
+ The provided wrapper class will be applied to all Devices and can be used to
144
+ specify their behaviour.
145
+
146
+ Parameters
147
+ ----------
148
+ wrapper:
149
+ Wrapper class to apply to all Devices created inside the context manager.
150
+
151
+ See Also
152
+ --------
153
+ :func:`~StandardReadable.add_readables`
154
+ :class:`ConfigSignal`
155
+ :class:`HintedSignal`
156
+ :meth:`HintedSignal.uncached`
157
+ """
158
+
159
+ dict_copy = self.__dict__.copy()
160
+
161
+ yield
162
+
163
+ # Set symmetric difference operator gives all newly added keys
164
+ new_keys = dict_copy.keys() ^ self.__dict__.keys()
165
+ new_values = [self.__dict__[key] for key in new_keys]
166
+
167
+ flattened_values = []
168
+ for value in new_values:
169
+ if isinstance(value, DeviceVector):
170
+ children = value.children()
171
+ flattened_values.extend([x[1] for x in children])
172
+ else:
173
+ flattened_values.append(value)
174
+
175
+ new_devices = list(filter(lambda x: isinstance(x, Device), flattened_values))
176
+ self.add_readables(new_devices, wrapper)
177
+
178
+ def add_readables(
179
+ self,
180
+ devices: Sequence[Device],
181
+ wrapper: Optional[ReadableChildWrapper] = None,
182
+ ) -> None:
183
+ """Add the given devices to the lists of known Devices
184
+
185
+ Add the provided Devices to the relevant fields, based on the Signal's
186
+ interfaces.
187
+
188
+ The provided wrapper class will be applied to all Devices and can be used to
189
+ specify their behaviour.
190
+
191
+ Parameters
192
+ ----------
193
+ devices:
194
+ The devices to be added
195
+ wrapper:
196
+ Wrapper class to apply to all Devices created inside the context manager.
197
+
198
+ See Also
199
+ --------
200
+ :func:`~StandardReadable.add_children_as_readables`
201
+ :class:`ConfigSignal`
202
+ :class:`HintedSignal`
203
+ :meth:`HintedSignal.uncached`
204
+ """
205
+
206
+ for readable in devices:
207
+ obj = readable
208
+ if wrapper:
209
+ obj = wrapper(readable)
210
+
211
+ if isinstance(obj, AsyncReadable):
212
+ self._readables += (obj,)
213
+
214
+ if isinstance(obj, AsyncConfigurable):
215
+ self._configurables += (obj,)
216
+
217
+ if isinstance(obj, AsyncStageable):
218
+ self._stageables += (obj,)
219
+
220
+ if isinstance(obj, HasHints):
221
+ self._has_hints += (obj,)
222
+
223
+
224
+ class ConfigSignal(AsyncConfigurable):
225
+ def __init__(self, signal: ReadableChild) -> None:
226
+ assert isinstance(signal, SignalR), f"Expected signal, got {signal}"
227
+ self.signal = signal
228
+
229
+ async def read_configuration(self) -> Dict[str, Reading]:
230
+ return await self.signal.read()
231
+
232
+ async def describe_configuration(self) -> Dict[str, DataKey]:
233
+ return await self.signal.describe()
234
+
235
+
236
+ class HintedSignal(HasHints, AsyncReadable):
237
+ def __init__(self, signal: ReadableChild, allow_cache: bool = True) -> None:
238
+ assert isinstance(signal, SignalR), f"Expected signal, got {signal}"
239
+ self.signal = signal
240
+ self.cached = None if allow_cache else allow_cache
241
+ if allow_cache:
242
+ self.stage = signal.stage
243
+ self.unstage = signal.unstage
244
+
245
+ async def read(self) -> Dict[str, Reading]:
246
+ return await self.signal.read(cached=self.cached)
247
+
248
+ async def describe(self) -> Dict[str, DataKey]:
249
+ return await self.signal.describe()
250
+
251
+ @property
252
+ def name(self) -> str:
253
+ return self.signal.name
254
+
255
+ @property
256
+ def hints(self) -> Hints:
257
+ return {"fields": [self.signal.name]}
258
+
259
+ @classmethod
260
+ def uncached(cls, signal: ReadableChild) -> "HintedSignal":
261
+ return cls(signal, allow_cache=False)
ophyd_async/core/utils.py CHANGED
@@ -1,56 +1,132 @@
1
+ from __future__ import annotations
2
+
1
3
  import asyncio
2
- from typing import Awaitable, Callable, Dict, Iterable, List, Optional, Type, TypeVar
4
+ import logging
5
+ from dataclasses import dataclass
6
+ from typing import (
7
+ Awaitable,
8
+ Callable,
9
+ Dict,
10
+ Generic,
11
+ Iterable,
12
+ List,
13
+ Optional,
14
+ ParamSpec,
15
+ Type,
16
+ TypeVar,
17
+ Union,
18
+ )
3
19
 
4
20
  import numpy as np
5
21
  from bluesky.protocols import Reading
6
22
 
7
23
  T = TypeVar("T")
24
+ P = ParamSpec("P")
8
25
  Callback = Callable[[T], None]
9
26
 
10
27
  #: A function that will be called with the Reading and value when the
11
28
  #: monitor updates
12
29
  ReadingValueCallback = Callable[[Reading, T], None]
13
30
  DEFAULT_TIMEOUT = 10.0
31
+ ErrorText = Union[str, Dict[str, Exception]]
32
+
33
+
34
+ class CalculateTimeout:
35
+ """Sentinel class used to implement ``myfunc(timeout=CalculateTimeout)``
36
+
37
+ This signifies that the function should calculate a suitable non-zero
38
+ timeout itself
39
+ """
40
+
41
+
42
+ CalculatableTimeout = float | None | Type[CalculateTimeout]
14
43
 
15
44
 
16
45
  class NotConnected(Exception):
17
46
  """Exception to be raised if a `Device.connect` is cancelled"""
18
47
 
19
- def __init__(self, *lines: str):
20
- self.lines = list(lines)
48
+ _indent_width = " "
49
+
50
+ def __init__(self, errors: ErrorText):
51
+ """
52
+ NotConnected holds a mapping of device/signal names to
53
+ errors.
54
+
55
+ Parameters
56
+ ----------
57
+ errors: ErrorText
58
+ Mapping of device name to Exception or another NotConnected.
59
+ Alternatively a string with the signal error text.
60
+ """
61
+
62
+ self._errors = errors
63
+
64
+ def _format_sub_errors(self, name: str, error: Exception, indent="") -> str:
65
+ if isinstance(error, NotConnected):
66
+ error_txt = ":" + error.format_error_string(indent + self._indent_width)
67
+ elif isinstance(error, Exception):
68
+ error_txt = ": " + err_str + "\n" if (err_str := str(error)) else "\n"
69
+ else:
70
+ raise RuntimeError(
71
+ f"Unexpected type `{type(error)}`, expected an Exception"
72
+ )
73
+
74
+ string = f"{indent}{name}: {type(error).__name__}" + error_txt
75
+ return string
76
+
77
+ def format_error_string(self, indent="") -> str:
78
+ if not isinstance(self._errors, dict) and not isinstance(self._errors, str):
79
+ raise RuntimeError(
80
+ f"Unexpected type `{type(self._errors)}` " "expected `str` or `dict`"
81
+ )
82
+
83
+ if isinstance(self._errors, str):
84
+ return " " + self._errors + "\n"
85
+
86
+ string = "\n"
87
+ for name, error in self._errors.items():
88
+ string += self._format_sub_errors(name, error, indent=indent)
89
+ return string
21
90
 
22
91
  def __str__(self) -> str:
23
- return "\n".join(self.lines)
92
+ return self.format_error_string(indent="")
93
+
94
+
95
+ @dataclass(frozen=True)
96
+ class WatcherUpdate(Generic[T]):
97
+ """A dataclass such that, when expanded, it provides the kwargs for a watcher"""
98
+
99
+ current: T
100
+ initial: T
101
+ target: T
102
+ name: str | None = None
103
+ unit: str | None = None
104
+ precision: float | None = None
105
+ fraction: float | None = None
106
+ time_elapsed: float | None = None
107
+ time_remaining: float | None = None
24
108
 
25
109
 
26
110
  async def wait_for_connection(**coros: Awaitable[None]):
27
- """Call many underlying signals, accumulating `NotConnected` exceptions
111
+ """Call many underlying signals, accumulating exceptions and returning them
28
112
 
29
- Raises
30
- ------
31
- `NotConnected` if cancelled
113
+ Expected kwargs should be a mapping of names to coroutine tasks to execute.
32
114
  """
33
- ts = {k: asyncio.create_task(c) for (k, c) in coros.items()} # type: ignore
34
- try:
35
- done, pending = await asyncio.wait(ts.values())
36
- except asyncio.CancelledError:
37
- for t in ts.values():
38
- t.cancel()
39
- lines: List[str] = []
40
- for k, t in ts.items():
41
- try:
42
- await t
43
- except NotConnected as e:
44
- if len(e.lines) == 1:
45
- lines.append(f"{k}: {e.lines[0]}")
46
- else:
47
- lines.append(f"{k}:")
48
- lines += [f" {line}" for line in e.lines]
49
- raise NotConnected(*lines)
50
- else:
51
- # Wait for everything to foreground the exceptions
52
- for f in list(done) + list(pending):
53
- await f
115
+ results = await asyncio.gather(*coros.values(), return_exceptions=True)
116
+ exceptions = {}
117
+
118
+ for name, result in zip(coros, results):
119
+ if isinstance(result, Exception):
120
+ exceptions[name] = result
121
+ if not isinstance(result, NotConnected):
122
+ logging.exception(
123
+ f"device `{name}` raised unexpected exception "
124
+ f"{type(result).__name__}",
125
+ exc_info=result,
126
+ )
127
+
128
+ if exceptions:
129
+ raise NotConnected(exceptions)
54
130
 
55
131
 
56
132
  def get_dtype(typ: Type) -> Optional[np.dtype]:
@@ -86,7 +162,7 @@ def get_unique(values: Dict[str, T], types: str) -> T:
86
162
 
87
163
 
88
164
  async def merge_gathered_dicts(
89
- coros: Iterable[Awaitable[Dict[str, T]]]
165
+ coros: Iterable[Awaitable[Dict[str, T]]],
90
166
  ) -> Dict[str, T]:
91
167
  """Merge dictionaries produced by a sequence of coroutines.
92
168
 
@@ -98,3 +174,24 @@ async def merge_gathered_dicts(
98
174
  for result in await asyncio.gather(*coros):
99
175
  ret.update(result)
100
176
  return ret
177
+
178
+
179
+ async def gather_list(coros: Iterable[Awaitable[T]]) -> List[T]:
180
+ return await asyncio.gather(*coros)
181
+
182
+
183
+ def in_micros(t: float) -> int:
184
+ """
185
+ Converts between a positive number of seconds and an equivalent
186
+ number of microseconds.
187
+
188
+ Args:
189
+ t (float): A time in seconds
190
+ Raises:
191
+ ValueError: if t < 0
192
+ Returns:
193
+ t (int): A time in microseconds, rounded up to the nearest whole microsecond,
194
+ """
195
+ if t < 0:
196
+ raise ValueError(f"Expected a positive time in seconds, got {t!r}")
197
+ return int(np.ceil(t * 1e6))
@@ -1,5 +1,5 @@
1
+ import logging
1
2
  import sys
2
- from asyncio import CancelledError
3
3
  from dataclasses import dataclass
4
4
  from enum import Enum
5
5
  from typing import Any, Dict, Optional, Sequence, Type, Union
@@ -8,17 +8,17 @@ from aioca import (
8
8
  FORMAT_CTRL,
9
9
  FORMAT_RAW,
10
10
  FORMAT_TIME,
11
+ CANothing,
11
12
  Subscription,
12
13
  caget,
13
14
  camonitor,
14
15
  caput,
15
16
  )
16
17
  from aioca.types import AugmentedValue, Dbr, Format
17
- from bluesky.protocols import Descriptor, Dtype, Reading
18
+ from bluesky.protocols import DataKey, Dtype, Reading
18
19
  from epicscorelibs.ca import dbr
19
20
 
20
21
  from ophyd_async.core import (
21
- NotConnected,
22
22
  ReadingValueCallback,
23
23
  SignalBackend,
24
24
  T,
@@ -26,6 +26,9 @@ from ophyd_async.core import (
26
26
  get_unique,
27
27
  wait_for_connection,
28
28
  )
29
+ from ophyd_async.core.utils import DEFAULT_TIMEOUT, NotConnected
30
+
31
+ from .common import get_supported_values
29
32
 
30
33
  dbr_to_dtype: Dict[Dbr, Dtype] = {
31
34
  dbr.DBR_STRING: "string",
@@ -49,24 +52,34 @@ class CaConverter:
49
52
  return value
50
53
 
51
54
  def reading(self, value: AugmentedValue):
52
- return dict(
53
- value=self.value(value),
54
- timestamp=value.timestamp,
55
- alarm_severity=-1 if value.severity > 2 else value.severity,
56
- )
55
+ return {
56
+ "value": self.value(value),
57
+ "timestamp": value.timestamp,
58
+ "alarm_severity": -1 if value.severity > 2 else value.severity,
59
+ }
60
+
61
+ def get_datakey(self, source: str, value: AugmentedValue) -> DataKey:
62
+ return {"source": source, "dtype": dbr_to_dtype[value.datatype], "shape": []}
63
+
57
64
 
58
- def descriptor(self, source: str, value: AugmentedValue) -> Descriptor:
59
- return dict(source=source, dtype=dbr_to_dtype[value.datatype], shape=[])
65
+ class CaLongStrConverter(CaConverter):
66
+ def __init__(self):
67
+ return super().__init__(dbr.DBR_CHAR_STR, dbr.DBR_CHAR_STR)
68
+
69
+ def write_value(self, value: str):
70
+ # Add a null in here as this is what the commandline caput does
71
+ # TODO: this should be in the server so check if it can be pushed to asyn
72
+ return value + "\0"
60
73
 
61
74
 
62
75
  class CaArrayConverter(CaConverter):
63
- def descriptor(self, source: str, value: AugmentedValue) -> Descriptor:
64
- return dict(source=source, dtype="array", shape=[len(value)])
76
+ def get_datakey(self, source: str, value: AugmentedValue) -> DataKey:
77
+ return {"source": source, "dtype": "array", "shape": [len(value)]}
65
78
 
66
79
 
67
80
  @dataclass
68
81
  class CaEnumConverter(CaConverter):
69
- enum_class: Type[Enum]
82
+ choices: dict[str, str]
70
83
 
71
84
  def write_value(self, value: Union[Enum, str]):
72
85
  if isinstance(value, Enum):
@@ -75,13 +88,15 @@ class CaEnumConverter(CaConverter):
75
88
  return value
76
89
 
77
90
  def value(self, value: AugmentedValue):
78
- return self.enum_class(value)
91
+ return self.choices[value]
79
92
 
80
- def descriptor(self, source: str, value: AugmentedValue) -> Descriptor:
81
- choices = [e.value for e in self.enum_class]
82
- return dict(
83
- source=source, dtype="string", shape=[], choices=choices
84
- ) # type: ignore
93
+ def get_datakey(self, source: str, value: AugmentedValue) -> DataKey:
94
+ return {
95
+ "source": source,
96
+ "dtype": "string",
97
+ "shape": [],
98
+ "choices": list(self.choices),
99
+ }
85
100
 
86
101
 
87
102
  class DisconnectedCaConverter(CaConverter):
@@ -97,7 +112,7 @@ def make_converter(
97
112
  is_array = bool([v for v in values.values() if v.element_count > 1])
98
113
  if is_array and datatype is str and pv_dbr == dbr.DBR_CHAR:
99
114
  # Override waveform of chars to be treated as string
100
- return CaConverter(dbr.DBR_CHAR_STR, dbr.DBR_CHAR_STR)
115
+ return CaLongStrConverter()
101
116
  elif is_array and pv_dbr == dbr.DBR_STRING:
102
117
  # Waveform of strings, check we wanted this
103
118
  if datatype and datatype != Sequence[str]:
@@ -127,18 +142,8 @@ def make_converter(
127
142
  pv_choices = get_unique(
128
143
  {k: tuple(v.enums) for k, v in values.items()}, "choices"
129
144
  )
130
- if datatype:
131
- if not issubclass(datatype, Enum):
132
- raise TypeError(f"{pv} has type Enum not {datatype.__name__}")
133
- choices = tuple(v.value for v in datatype)
134
- if set(choices) != set(pv_choices):
135
- raise TypeError(f"{pv} has choices {pv_choices} not {choices}")
136
- enum_class = datatype
137
- else:
138
- enum_class = Enum( # type: ignore
139
- "GeneratedChoices", {x: x for x in pv_choices}, type=str
140
- )
141
- return CaEnumConverter(dbr.DBR_STRING, None, enum_class)
145
+ supported_values = get_supported_values(pv, datatype, pv_choices)
146
+ return CaEnumConverter(dbr.DBR_STRING, None, supported_values)
142
147
  else:
143
148
  value = list(values.values())[0]
144
149
  # Done the dbr check, so enough to check one of the values
@@ -169,26 +174,31 @@ class CaSignalBackend(SignalBackend[T]):
169
174
  self.write_pv = write_pv
170
175
  self.initial_values: Dict[str, AugmentedValue] = {}
171
176
  self.converter: CaConverter = DisconnectedCaConverter(None, None)
172
- self.source = f"ca://{self.read_pv}"
173
177
  self.subscription: Optional[Subscription] = None
174
178
 
175
- async def _store_initial_value(self, pv):
179
+ def source(self, name: str):
180
+ return f"ca://{self.read_pv}"
181
+
182
+ async def _store_initial_value(self, pv, timeout: float = DEFAULT_TIMEOUT):
176
183
  try:
177
- self.initial_values[pv] = await caget(pv, format=FORMAT_CTRL, timeout=None)
178
- except CancelledError:
179
- raise NotConnected(self.source)
184
+ self.initial_values[pv] = await caget(
185
+ pv, format=FORMAT_CTRL, timeout=timeout
186
+ )
187
+ except CANothing as exc:
188
+ logging.debug(f"signal ca://{pv} timed out")
189
+ raise NotConnected(f"ca://{pv}") from exc
180
190
 
181
- async def connect(self):
191
+ async def connect(self, timeout: float = DEFAULT_TIMEOUT):
182
192
  _use_pyepics_context_if_imported()
183
193
  if self.read_pv != self.write_pv:
184
194
  # Different, need to connect both
185
195
  await wait_for_connection(
186
- read_pv=self._store_initial_value(self.read_pv),
187
- write_pv=self._store_initial_value(self.write_pv),
196
+ read_pv=self._store_initial_value(self.read_pv, timeout=timeout),
197
+ write_pv=self._store_initial_value(self.write_pv, timeout=timeout),
188
198
  )
189
199
  else:
190
200
  # The same, so only need to connect one
191
- await self._store_initial_value(self.read_pv)
201
+ await self._store_initial_value(self.read_pv, timeout=timeout)
192
202
  self.converter = make_converter(self.datatype, self.initial_values)
193
203
 
194
204
  async def put(self, value: Optional[T], wait=True, timeout=None):
@@ -212,9 +222,9 @@ class CaSignalBackend(SignalBackend[T]):
212
222
  timeout=None,
213
223
  )
214
224
 
215
- async def get_descriptor(self) -> Descriptor:
225
+ async def get_datakey(self, source: str) -> DataKey:
216
226
  value = await self._caget(FORMAT_CTRL)
217
- return self.converter.descriptor(self.source, value)
227
+ return self.converter.get_datakey(source, value)
218
228
 
219
229
  async def get_reading(self) -> Reading:
220
230
  value = await self._caget(FORMAT_TIME)
@@ -224,6 +234,15 @@ class CaSignalBackend(SignalBackend[T]):
224
234
  value = await self._caget(FORMAT_RAW)
225
235
  return self.converter.value(value)
226
236
 
237
+ async def get_setpoint(self) -> T:
238
+ value = await caget(
239
+ self.write_pv,
240
+ datatype=self.converter.read_dbr,
241
+ format=FORMAT_RAW,
242
+ timeout=None,
243
+ )
244
+ return self.converter.value(value)
245
+
227
246
  def set_callback(self, callback: Optional[ReadingValueCallback[T]]) -> None:
228
247
  if callback:
229
248
  assert (