ophyd-async 0.3a2__py3-none-any.whl → 0.3a4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (53) hide show
  1. ophyd_async/_version.py +1 -1
  2. ophyd_async/core/__init__.py +35 -11
  3. ophyd_async/core/async_status.py +2 -0
  4. ophyd_async/core/detector.py +8 -9
  5. ophyd_async/core/device.py +22 -9
  6. ophyd_async/core/flyer.py +2 -2
  7. ophyd_async/core/mock_signal_backend.py +86 -0
  8. ophyd_async/core/mock_signal_utils.py +149 -0
  9. ophyd_async/core/signal.py +140 -49
  10. ophyd_async/core/signal_backend.py +2 -2
  11. ophyd_async/core/{sim_signal_backend.py → soft_signal_backend.py} +29 -39
  12. ophyd_async/core/standard_readable.py +211 -24
  13. ophyd_async/epics/_backend/_aioca.py +17 -13
  14. ophyd_async/epics/_backend/_p4p.py +28 -18
  15. ophyd_async/epics/_backend/common.py +17 -17
  16. ophyd_async/epics/areadetector/__init__.py +4 -4
  17. ophyd_async/epics/areadetector/aravis.py +7 -9
  18. ophyd_async/epics/areadetector/controllers/__init__.py +2 -1
  19. ophyd_async/epics/areadetector/controllers/kinetix_controller.py +49 -0
  20. ophyd_async/epics/areadetector/controllers/vimba_controller.py +66 -0
  21. ophyd_async/epics/areadetector/drivers/__init__.py +6 -0
  22. ophyd_async/epics/areadetector/drivers/ad_base.py +12 -10
  23. ophyd_async/epics/areadetector/drivers/aravis_driver.py +7 -5
  24. ophyd_async/epics/areadetector/drivers/kinetix_driver.py +27 -0
  25. ophyd_async/epics/areadetector/drivers/pilatus_driver.py +5 -2
  26. ophyd_async/epics/areadetector/drivers/vimba_driver.py +63 -0
  27. ophyd_async/epics/areadetector/kinetix.py +46 -0
  28. ophyd_async/epics/areadetector/pilatus.py +7 -12
  29. ophyd_async/epics/areadetector/single_trigger_det.py +14 -6
  30. ophyd_async/epics/areadetector/utils.py +2 -12
  31. ophyd_async/epics/areadetector/vimba.py +43 -0
  32. ophyd_async/epics/areadetector/writers/hdf_writer.py +6 -3
  33. ophyd_async/epics/areadetector/writers/nd_file_hdf.py +21 -18
  34. ophyd_async/epics/areadetector/writers/nd_plugin.py +6 -7
  35. ophyd_async/epics/demo/__init__.py +19 -22
  36. ophyd_async/epics/motion/motor.py +16 -13
  37. ophyd_async/epics/pvi/pvi.py +11 -11
  38. ophyd_async/epics/signal/signal.py +1 -1
  39. ophyd_async/log.py +130 -0
  40. ophyd_async/panda/_hdf_panda.py +3 -3
  41. ophyd_async/panda/writers/_hdf_writer.py +3 -3
  42. ophyd_async/protocols.py +26 -3
  43. ophyd_async/sim/demo/sim_motor.py +14 -12
  44. ophyd_async/sim/pattern_generator.py +9 -9
  45. ophyd_async/sim/sim_pattern_detector_writer.py +2 -2
  46. ophyd_async/sim/sim_pattern_generator.py +2 -2
  47. {ophyd_async-0.3a2.dist-info → ophyd_async-0.3a4.dist-info}/METADATA +20 -3
  48. ophyd_async-0.3a4.dist-info/RECORD +85 -0
  49. ophyd_async-0.3a2.dist-info/RECORD +0 -76
  50. {ophyd_async-0.3a2.dist-info → ophyd_async-0.3a4.dist-info}/LICENSE +0 -0
  51. {ophyd_async-0.3a2.dist-info → ophyd_async-0.3a4.dist-info}/WHEEL +0 -0
  52. {ophyd_async-0.3a2.dist-info → ophyd_async-0.3a4.dist-info}/entry_points.txt +0 -0
  53. {ophyd_async-0.3a2.dist-info → ophyd_async-0.3a4.dist-info}/top_level.txt +0 -0
@@ -1,16 +1,34 @@
1
- from typing import Dict, Sequence, Tuple
1
+ import warnings
2
+ from contextlib import contextmanager
3
+ from typing import (
4
+ Callable,
5
+ Dict,
6
+ Generator,
7
+ Optional,
8
+ Sequence,
9
+ Tuple,
10
+ Type,
11
+ Union,
12
+ )
2
13
 
3
- from bluesky.protocols import Descriptor, Reading, Stageable
14
+ from bluesky.protocols import DataKey, HasHints, Hints, Reading
4
15
 
5
- from ophyd_async.protocols import AsyncConfigurable, AsyncReadable
16
+ from ophyd_async.protocols import AsyncConfigurable, AsyncReadable, AsyncStageable
6
17
 
7
18
  from .async_status import AsyncStatus
8
- from .device import Device
19
+ from .device import Device, DeviceVector
9
20
  from .signal import SignalR
10
21
  from .utils import merge_gathered_dicts
11
22
 
23
+ ReadableChild = Union[AsyncReadable, AsyncConfigurable, AsyncStageable, HasHints]
24
+ ReadableChildWrapper = Union[
25
+ Callable[[ReadableChild], ReadableChild], Type["ConfigSignal"], Type["HintedSignal"]
26
+ ]
12
27
 
13
- class StandardReadable(Device, AsyncReadable, AsyncConfigurable, Stageable):
28
+
29
+ class StandardReadable(
30
+ Device, AsyncReadable, AsyncConfigurable, AsyncStageable, HasHints
31
+ ):
14
32
  """Device that owns its children and provides useful default behavior.
15
33
 
16
34
  - When its name is set it renames child Devices
@@ -18,9 +36,12 @@ class StandardReadable(Device, AsyncReadable, AsyncConfigurable, Stageable):
18
36
  - These signals will be subscribed for read() between stage() and unstage()
19
37
  """
20
38
 
21
- _read_signals: Tuple[SignalR, ...] = ()
22
- _configuration_signals: Tuple[SignalR, ...] = ()
23
- _read_uncached_signals: Tuple[SignalR, ...] = ()
39
+ # These must be immutable types to avoid accidental sharing between
40
+ # different instances of the class
41
+ _readables: Tuple[AsyncReadable, ...] = ()
42
+ _configurables: Tuple[AsyncConfigurable, ...] = ()
43
+ _stageables: Tuple[AsyncStageable, ...] = ()
44
+ _has_hints: Tuple[HasHints, ...] = ()
24
45
 
25
46
  def set_readable_signals(
26
47
  self,
@@ -38,37 +59,203 @@ class StandardReadable(Device, AsyncReadable, AsyncConfigurable, Stageable):
38
59
  read_uncached:
39
60
  Signals to make up :meth:`~StandardReadable.read` that won't be cached
40
61
  """
41
- self._read_signals = tuple(read)
42
- self._configuration_signals = tuple(config)
43
- self._read_uncached_signals = tuple(read_uncached)
62
+ warnings.warn(
63
+ DeprecationWarning(
64
+ "Migrate to `add_children_as_readables` context manager or "
65
+ "`add_readables` method"
66
+ )
67
+ )
68
+ self.add_readables(read, wrapper=HintedSignal)
69
+ self.add_readables(config, wrapper=ConfigSignal)
70
+ self.add_readables(read_uncached, wrapper=HintedSignal.uncached)
44
71
 
45
72
  @AsyncStatus.wrap
46
73
  async def stage(self) -> None:
47
- for sig in self._read_signals + self._configuration_signals:
74
+ for sig in self._stageables:
48
75
  await sig.stage().task
49
76
 
50
77
  @AsyncStatus.wrap
51
78
  async def unstage(self) -> None:
52
- for sig in self._read_signals + self._configuration_signals:
79
+ for sig in self._stageables:
53
80
  await sig.unstage().task
54
81
 
55
- async def describe_configuration(self) -> Dict[str, Descriptor]:
82
+ async def describe_configuration(self) -> Dict[str, DataKey]:
56
83
  return await merge_gathered_dicts(
57
- [sig.describe() for sig in self._configuration_signals]
84
+ [sig.describe_configuration() for sig in self._configurables]
58
85
  )
59
86
 
60
87
  async def read_configuration(self) -> Dict[str, Reading]:
61
88
  return await merge_gathered_dicts(
62
- [sig.read() for sig in self._configuration_signals]
89
+ [sig.read_configuration() for sig in self._configurables]
63
90
  )
64
91
 
65
- async def describe(self) -> Dict[str, Descriptor]:
66
- return await merge_gathered_dicts(
67
- [sig.describe() for sig in self._read_signals + self._read_uncached_signals]
68
- )
92
+ async def describe(self) -> Dict[str, DataKey]:
93
+ return await merge_gathered_dicts([sig.describe() for sig in self._readables])
69
94
 
70
95
  async def read(self) -> Dict[str, Reading]:
71
- return await merge_gathered_dicts(
72
- [sig.read() for sig in self._read_signals]
73
- + [sig.read(cached=False) for sig in self._read_uncached_signals]
74
- )
96
+ return await merge_gathered_dicts([sig.read() for sig in self._readables])
97
+
98
+ @property
99
+ def hints(self) -> Hints:
100
+ hints: Hints = {}
101
+ for new_hint in self._has_hints:
102
+ # Merge the existing and new hints, based on the type of the value.
103
+ # This avoids default dict merge behaviour that overrides the values;
104
+ # we want to combine them when they are Sequences, and ensure they are
105
+ # identical when string values.
106
+ for key, value in new_hint.hints.items():
107
+ if isinstance(value, str):
108
+ if key in hints:
109
+ assert (
110
+ hints[key] == value # type: ignore[literal-required]
111
+ ), f"Hints key {key} value may not be overridden"
112
+ else:
113
+ hints[key] = value # type: ignore[literal-required]
114
+ elif isinstance(value, Sequence):
115
+ if key in hints:
116
+ for new_val in value:
117
+ assert (
118
+ new_val not in hints[key] # type: ignore[literal-required]
119
+ ), f"Hint {key} {new_val} overrides existing hint"
120
+ hints[key] = ( # type: ignore[literal-required]
121
+ hints[key] + value # type: ignore[literal-required]
122
+ )
123
+ else:
124
+ hints[key] = value # type: ignore[literal-required]
125
+ else:
126
+ raise TypeError(
127
+ f"{new_hint.name}: Unknown type for value '{value}' "
128
+ f" for key '{key}'"
129
+ )
130
+
131
+ return hints
132
+
133
+ @contextmanager
134
+ def add_children_as_readables(
135
+ self,
136
+ wrapper: Optional[ReadableChildWrapper] = None,
137
+ ) -> Generator[None, None, None]:
138
+ """Context manager to wrap adding Devices
139
+
140
+ Add Devices to this class instance inside the Context Manager to automatically
141
+ add them to the correct fields, based on the Device's interfaces.
142
+
143
+ The provided wrapper class will be applied to all Devices and can be used to
144
+ specify their behaviour.
145
+
146
+ Parameters
147
+ ----------
148
+ wrapper:
149
+ Wrapper class to apply to all Devices created inside the context manager.
150
+
151
+ See Also
152
+ --------
153
+ :func:`~StandardReadable.add_readables`
154
+ :class:`ConfigSignal`
155
+ :class:`HintedSignal`
156
+ :meth:`HintedSignal.uncached`
157
+ """
158
+
159
+ dict_copy = self.__dict__.copy()
160
+
161
+ yield
162
+
163
+ # Set symmetric difference operator gives all newly added keys
164
+ new_keys = dict_copy.keys() ^ self.__dict__.keys()
165
+ new_values = [self.__dict__[key] for key in new_keys]
166
+
167
+ flattened_values = []
168
+ for value in new_values:
169
+ if isinstance(value, DeviceVector):
170
+ children = value.children()
171
+ flattened_values.extend([x[1] for x in children])
172
+ else:
173
+ flattened_values.append(value)
174
+
175
+ new_devices = list(filter(lambda x: isinstance(x, Device), flattened_values))
176
+ self.add_readables(new_devices, wrapper)
177
+
178
+ def add_readables(
179
+ self,
180
+ devices: Sequence[Device],
181
+ wrapper: Optional[ReadableChildWrapper] = None,
182
+ ) -> None:
183
+ """Add the given devices to the lists of known Devices
184
+
185
+ Add the provided Devices to the relevant fields, based on the Signal's
186
+ interfaces.
187
+
188
+ The provided wrapper class will be applied to all Devices and can be used to
189
+ specify their behaviour.
190
+
191
+ Parameters
192
+ ----------
193
+ devices:
194
+ The devices to be added
195
+ wrapper:
196
+ Wrapper class to apply to all Devices created inside the context manager.
197
+
198
+ See Also
199
+ --------
200
+ :func:`~StandardReadable.add_children_as_readables`
201
+ :class:`ConfigSignal`
202
+ :class:`HintedSignal`
203
+ :meth:`HintedSignal.uncached`
204
+ """
205
+
206
+ for readable in devices:
207
+ obj = readable
208
+ if wrapper:
209
+ obj = wrapper(readable)
210
+
211
+ if isinstance(obj, AsyncReadable):
212
+ self._readables += (obj,)
213
+
214
+ if isinstance(obj, AsyncConfigurable):
215
+ self._configurables += (obj,)
216
+
217
+ if isinstance(obj, AsyncStageable):
218
+ self._stageables += (obj,)
219
+
220
+ if isinstance(obj, HasHints):
221
+ self._has_hints += (obj,)
222
+
223
+
224
+ class ConfigSignal(AsyncConfigurable):
225
+ def __init__(self, signal: ReadableChild) -> None:
226
+ assert isinstance(signal, SignalR), f"Expected signal, got {signal}"
227
+ self.signal = signal
228
+
229
+ async def read_configuration(self) -> Dict[str, Reading]:
230
+ return await self.signal.read()
231
+
232
+ async def describe_configuration(self) -> Dict[str, DataKey]:
233
+ return await self.signal.describe()
234
+
235
+
236
+ class HintedSignal(HasHints, AsyncReadable):
237
+ def __init__(self, signal: ReadableChild, allow_cache: bool = True) -> None:
238
+ assert isinstance(signal, SignalR), f"Expected signal, got {signal}"
239
+ self.signal = signal
240
+ self.cached = None if allow_cache else allow_cache
241
+ if allow_cache:
242
+ self.stage = signal.stage
243
+ self.unstage = signal.unstage
244
+
245
+ async def read(self) -> Dict[str, Reading]:
246
+ return await self.signal.read(cached=self.cached)
247
+
248
+ async def describe(self) -> Dict[str, DataKey]:
249
+ return await self.signal.describe()
250
+
251
+ @property
252
+ def name(self) -> str:
253
+ return self.signal.name
254
+
255
+ @property
256
+ def hints(self) -> Hints:
257
+ return {"fields": [self.signal.name]}
258
+
259
+ @classmethod
260
+ def uncached(cls, signal: ReadableChild) -> "HintedSignal":
261
+ return cls(signal, allow_cache=False)
@@ -15,7 +15,7 @@ from aioca import (
15
15
  caput,
16
16
  )
17
17
  from aioca.types import AugmentedValue, Dbr, Format
18
- from bluesky.protocols import Descriptor, Dtype, Reading
18
+ from bluesky.protocols import DataKey, Dtype, Reading
19
19
  from epicscorelibs.ca import dbr
20
20
 
21
21
  from ophyd_async.core import (
@@ -28,7 +28,7 @@ from ophyd_async.core import (
28
28
  )
29
29
  from ophyd_async.core.utils import DEFAULT_TIMEOUT, NotConnected
30
30
 
31
- from .common import get_supported_enum_class
31
+ from .common import get_supported_values
32
32
 
33
33
  dbr_to_dtype: Dict[Dbr, Dtype] = {
34
34
  dbr.DBR_STRING: "string",
@@ -58,7 +58,7 @@ class CaConverter:
58
58
  "alarm_severity": -1 if value.severity > 2 else value.severity,
59
59
  }
60
60
 
61
- def descriptor(self, source: str, value: AugmentedValue) -> Descriptor:
61
+ def get_datakey(self, source: str, value: AugmentedValue) -> DataKey:
62
62
  return {"source": source, "dtype": dbr_to_dtype[value.datatype], "shape": []}
63
63
 
64
64
 
@@ -73,13 +73,13 @@ class CaLongStrConverter(CaConverter):
73
73
 
74
74
 
75
75
  class CaArrayConverter(CaConverter):
76
- def descriptor(self, source: str, value: AugmentedValue) -> Descriptor:
76
+ def get_datakey(self, source: str, value: AugmentedValue) -> DataKey:
77
77
  return {"source": source, "dtype": "array", "shape": [len(value)]}
78
78
 
79
79
 
80
80
  @dataclass
81
81
  class CaEnumConverter(CaConverter):
82
- enum_class: Type[Enum]
82
+ choices: dict[str, str]
83
83
 
84
84
  def write_value(self, value: Union[Enum, str]):
85
85
  if isinstance(value, Enum):
@@ -88,11 +88,15 @@ class CaEnumConverter(CaConverter):
88
88
  return value
89
89
 
90
90
  def value(self, value: AugmentedValue):
91
- return self.enum_class(value)
91
+ return self.choices[value]
92
92
 
93
- def descriptor(self, source: str, value: AugmentedValue) -> Descriptor:
94
- choices = [e.value for e in self.enum_class]
95
- return {"source": source, "dtype": "string", "shape": [], "choices": choices}
93
+ def get_datakey(self, source: str, value: AugmentedValue) -> DataKey:
94
+ return {
95
+ "source": source,
96
+ "dtype": "string",
97
+ "shape": [],
98
+ "choices": list(self.choices),
99
+ }
96
100
 
97
101
 
98
102
  class DisconnectedCaConverter(CaConverter):
@@ -138,8 +142,8 @@ def make_converter(
138
142
  pv_choices = get_unique(
139
143
  {k: tuple(v.enums) for k, v in values.items()}, "choices"
140
144
  )
141
- enum_class = get_supported_enum_class(pv, datatype, pv_choices)
142
- return CaEnumConverter(dbr.DBR_STRING, None, enum_class)
145
+ supported_values = get_supported_values(pv, datatype, pv_choices)
146
+ return CaEnumConverter(dbr.DBR_STRING, None, supported_values)
143
147
  else:
144
148
  value = list(values.values())[0]
145
149
  # Done the dbr check, so enough to check one of the values
@@ -218,9 +222,9 @@ class CaSignalBackend(SignalBackend[T]):
218
222
  timeout=None,
219
223
  )
220
224
 
221
- async def get_descriptor(self, source: str) -> Descriptor:
225
+ async def get_datakey(self, source: str) -> DataKey:
222
226
  value = await self._caget(FORMAT_CTRL)
223
- return self.converter.descriptor(source, value)
227
+ return self.converter.get_datakey(source, value)
224
228
 
225
229
  async def get_reading(self) -> Reading:
226
230
  value = await self._caget(FORMAT_TIME)
@@ -6,7 +6,7 @@ from dataclasses import dataclass
6
6
  from enum import Enum
7
7
  from typing import Any, Dict, List, Optional, Sequence, Type, Union
8
8
 
9
- from bluesky.protocols import Descriptor, Dtype, Reading
9
+ from bluesky.protocols import DataKey, Dtype, Reading
10
10
  from p4p import Value
11
11
  from p4p.client.asyncio import Context, Subscription
12
12
 
@@ -20,7 +20,7 @@ from ophyd_async.core import (
20
20
  )
21
21
  from ophyd_async.core.utils import DEFAULT_TIMEOUT, NotConnected
22
22
 
23
- from .common import get_supported_enum_class
23
+ from .common import get_supported_values
24
24
 
25
25
  # https://mdavidsaver.github.io/p4p/values.html
26
26
  specifier_to_dtype: Dict[str, Dtype] = {
@@ -55,7 +55,7 @@ class PvaConverter:
55
55
  "alarm_severity": -1 if sv > 2 else sv,
56
56
  }
57
57
 
58
- def descriptor(self, source: str, value) -> Descriptor:
58
+ def get_datakey(self, source: str, value) -> DataKey:
59
59
  dtype = specifier_to_dtype[value.type().aspy("value")]
60
60
  return {"source": source, "dtype": dtype, "shape": []}
61
61
 
@@ -73,7 +73,7 @@ class PvaConverter:
73
73
 
74
74
 
75
75
  class PvaArrayConverter(PvaConverter):
76
- def descriptor(self, source: str, value) -> Descriptor:
76
+ def get_datakey(self, source: str, value) -> DataKey:
77
77
  return {"source": source, "dtype": "array", "shape": [len(value["value"])]}
78
78
 
79
79
 
@@ -96,7 +96,7 @@ class PvaNDArrayConverter(PvaConverter):
96
96
  dims = self._get_dimensions(value)
97
97
  return value["value"].reshape(dims)
98
98
 
99
- def descriptor(self, source: str, value) -> Descriptor:
99
+ def get_datakey(self, source: str, value) -> DataKey:
100
100
  dims = self._get_dimensions(value)
101
101
  return {"source": source, "dtype": "array", "shape": dims}
102
102
 
@@ -109,7 +109,8 @@ class PvaNDArrayConverter(PvaConverter):
109
109
 
110
110
  @dataclass
111
111
  class PvaEnumConverter(PvaConverter):
112
- enum_class: Type[Enum]
112
+ def __init__(self, choices: dict[str, str]):
113
+ self.choices = tuple(choices.values())
113
114
 
114
115
  def write_value(self, value: Union[Enum, str]):
115
116
  if isinstance(value, Enum):
@@ -118,18 +119,22 @@ class PvaEnumConverter(PvaConverter):
118
119
  return value
119
120
 
120
121
  def value(self, value):
121
- return list(self.enum_class)[value["value"]["index"]]
122
+ return self.choices[value["value"]["index"]]
122
123
 
123
- def descriptor(self, source: str, value) -> Descriptor:
124
- choices = [e.value for e in self.enum_class]
125
- return {"source": source, "dtype": "string", "shape": [], "choices": choices}
124
+ def get_datakey(self, source: str, value) -> DataKey:
125
+ return {
126
+ "source": source,
127
+ "dtype": "string",
128
+ "shape": [],
129
+ "choices": list(self.choices),
130
+ }
126
131
 
127
132
 
128
133
  class PvaEnumBoolConverter(PvaConverter):
129
134
  def value(self, value):
130
135
  return value["value"]["index"]
131
136
 
132
- def descriptor(self, source: str, value) -> Descriptor:
137
+ def get_datakey(self, source: str, value) -> DataKey:
133
138
  return {"source": source, "dtype": "integer", "shape": []}
134
139
 
135
140
 
@@ -137,7 +142,7 @@ class PvaTableConverter(PvaConverter):
137
142
  def value(self, value):
138
143
  return value["value"].todict()
139
144
 
140
- def descriptor(self, source: str, value) -> Descriptor:
145
+ def get_datakey(self, source: str, value) -> DataKey:
141
146
  # This is wrong, but defer until we know how to actually describe a table
142
147
  return {"source": source, "dtype": "object", "shape": []} # type: ignore
143
148
 
@@ -152,7 +157,7 @@ class PvaDictConverter(PvaConverter):
152
157
  def value(self, value: Value):
153
158
  return value.todict()
154
159
 
155
- def descriptor(self, source: str, value) -> Descriptor:
160
+ def get_datakey(self, source: str, value) -> DataKey:
156
161
  raise NotImplementedError("Describing Dict signals not currently supported")
157
162
 
158
163
  def metadata_fields(self) -> List[str]:
@@ -214,9 +219,15 @@ def make_converter(datatype: Optional[Type], values: Dict[str, Any]) -> PvaConve
214
219
  pv_choices = get_unique(
215
220
  {k: tuple(v["value"]["choices"]) for k, v in values.items()}, "choices"
216
221
  )
217
- return PvaEnumConverter(get_supported_enum_class(pv, datatype, pv_choices))
222
+ return PvaEnumConverter(get_supported_values(pv, datatype, pv_choices))
218
223
  elif "NTScalar" in typeid:
219
- if datatype and not issubclass(typ, datatype):
224
+ if (
225
+ datatype
226
+ and not issubclass(typ, datatype)
227
+ and not (
228
+ typ is float and datatype is int
229
+ ) # Allow float -> int since prec can be 0
230
+ ):
220
231
  raise TypeError(f"{pv} has type {typ.__name__} not {datatype.__name__}")
221
232
  return PvaConverter()
222
233
  elif "NTTable" in typeid:
@@ -238,7 +249,6 @@ class PvaSignalBackend(SignalBackend[T]):
238
249
  self.converter: PvaConverter = DisconnectedPvaConverter()
239
250
  self.subscription: Optional[Subscription] = None
240
251
 
241
- @property
242
252
  def source(self, name: str):
243
253
  return f"pva://{self.read_pv}"
244
254
 
@@ -293,9 +303,9 @@ class PvaSignalBackend(SignalBackend[T]):
293
303
  )
294
304
  raise NotConnected(f"pva://{self.write_pv}") from exc
295
305
 
296
- async def get_descriptor(self, source: str) -> Descriptor:
306
+ async def get_datakey(self, source: str) -> DataKey:
297
307
  value = await self.ctxt.get(self.read_pv)
298
- return self.converter.descriptor(source, value)
308
+ return self.converter.get_datakey(source, value)
299
309
 
300
310
  def _pva_request_string(self, fields: List[str]) -> str:
301
311
  """
@@ -1,25 +1,25 @@
1
1
  from enum import Enum
2
- from typing import Any, Optional, Tuple, Type
2
+ from typing import Dict, Optional, Tuple, Type
3
3
 
4
4
 
5
- def get_supported_enum_class(
5
+ def get_supported_values(
6
6
  pv: str,
7
- datatype: Optional[Type[Enum]],
8
- pv_choices: Tuple[Any, ...],
9
- ) -> Type[Enum]:
7
+ datatype: Optional[Type[str]],
8
+ pv_choices: Tuple[str, ...],
9
+ ) -> Dict[str, str]:
10
10
  if not datatype:
11
- return Enum("GeneratedChoices", {x or "_": x for x in pv_choices}, type=str) # type: ignore
11
+ return {x: x or "_" for x in pv_choices}
12
12
 
13
- if not issubclass(datatype, Enum):
14
- raise TypeError(f"{pv} has type Enum not {datatype.__name__}")
15
13
  if not issubclass(datatype, str):
16
- raise TypeError(f"{pv} has type Enum but doesn't inherit from String")
17
- choices = tuple(v.value for v in datatype)
18
- if set(choices) != set(pv_choices):
19
- raise TypeError(
20
- (
21
- f"{pv} has choices {pv_choices}, "
22
- f"which do not match {datatype}, which has {choices}"
14
+ raise TypeError(f"{pv} is type Enum but doesn't inherit from String")
15
+ if issubclass(datatype, Enum):
16
+ choices = tuple(v.value for v in datatype)
17
+ if set(choices) != set(pv_choices):
18
+ raise TypeError(
19
+ (
20
+ f"{pv} has choices {pv_choices}, "
21
+ f"which do not match {datatype}, which has {choices}"
22
+ )
23
23
  )
24
- )
25
- return datatype
24
+ return {x: datatype(x) for x in pv_choices}
25
+ return {x: x for x in pv_choices}
@@ -1,4 +1,5 @@
1
1
  from .aravis import AravisDetector
2
+ from .kinetix import KinetixDetector
2
3
  from .pilatus import PilatusDetector
3
4
  from .single_trigger_det import SingleTriggerDet
4
5
  from .utils import (
@@ -6,17 +7,16 @@ from .utils import (
6
7
  ImageMode,
7
8
  NDAttributeDataType,
8
9
  NDAttributesXML,
9
- ad_r,
10
- ad_rw,
11
10
  )
11
+ from .vimba import VimbaDetector
12
12
 
13
13
  __all__ = [
14
14
  "AravisDetector",
15
+ "KinetixDetector",
16
+ "VimbaDetector",
15
17
  "SingleTriggerDet",
16
18
  "FileWriteMode",
17
19
  "ImageMode",
18
- "ad_r",
19
- "ad_rw",
20
20
  "NDAttributeDataType",
21
21
  "NDAttributesXML",
22
22
  "PilatusDetector",
@@ -23,16 +23,15 @@ class AravisDetector(StandardDetector, HasHints):
23
23
 
24
24
  def __init__(
25
25
  self,
26
- name: str,
26
+ prefix: str,
27
27
  directory_provider: DirectoryProvider,
28
- driver: AravisDriver,
29
- hdf: NDFileHDF,
28
+ drv_suffix="cam1:",
29
+ hdf_suffix="HDF1:",
30
+ name="",
30
31
  gpio_number: AravisController.GPIO_NUMBER = 1,
31
- **scalar_sigs: str,
32
32
  ):
33
- # Must be child of Detector to pick up connect()
34
- self.drv = driver
35
- self.hdf = hdf
33
+ self.drv = AravisDriver(prefix + drv_suffix)
34
+ self.hdf = NDFileHDF(prefix + hdf_suffix)
36
35
 
37
36
  super().__init__(
38
37
  AravisController(self.drv, gpio_number=gpio_number),
@@ -41,9 +40,8 @@ class AravisDetector(StandardDetector, HasHints):
41
40
  directory_provider,
42
41
  lambda: self.name,
43
42
  ADBaseShapeProvider(self.drv),
44
- **scalar_sigs,
45
43
  ),
46
- config_sigs=(self.drv.acquire_time, self.drv.acquire),
44
+ config_sigs=(self.drv.acquire_time,),
47
45
  name=name,
48
46
  )
49
47
 
@@ -1,4 +1,5 @@
1
1
  from .ad_sim_controller import ADSimController
2
+ from .aravis_controller import AravisController
2
3
  from .pilatus_controller import PilatusController
3
4
 
4
- __all__ = ["PilatusController", "ADSimController"]
5
+ __all__ = ["PilatusController", "ADSimController", "AravisController"]
@@ -0,0 +1,49 @@
1
+ import asyncio
2
+ from typing import Optional
3
+
4
+ from ophyd_async.core import AsyncStatus, DetectorControl, DetectorTrigger
5
+ from ophyd_async.epics.areadetector.drivers.ad_base import (
6
+ start_acquiring_driver_and_ensure_status,
7
+ )
8
+
9
+ from ..drivers.kinetix_driver import KinetixDriver, KinetixTriggerMode
10
+ from ..utils import ImageMode, stop_busy_record
11
+
12
+ KINETIX_TRIGGER_MODE_MAP = {
13
+ DetectorTrigger.internal: KinetixTriggerMode.internal,
14
+ DetectorTrigger.constant_gate: KinetixTriggerMode.gate,
15
+ DetectorTrigger.variable_gate: KinetixTriggerMode.gate,
16
+ DetectorTrigger.edge_trigger: KinetixTriggerMode.edge,
17
+ }
18
+
19
+
20
+ class KinetixController(DetectorControl):
21
+ def __init__(
22
+ self,
23
+ driver: KinetixDriver,
24
+ ) -> None:
25
+ self._drv = driver
26
+
27
+ def get_deadtime(self, exposure: float) -> float:
28
+ return 0.001
29
+
30
+ async def arm(
31
+ self,
32
+ num: int,
33
+ trigger: DetectorTrigger = DetectorTrigger.internal,
34
+ exposure: Optional[float] = None,
35
+ ) -> AsyncStatus:
36
+ await asyncio.gather(
37
+ self._drv.trigger_mode.set(KINETIX_TRIGGER_MODE_MAP[trigger]),
38
+ self._drv.num_images.set(num),
39
+ self._drv.image_mode.set(ImageMode.multiple),
40
+ )
41
+ if exposure is not None and trigger not in [
42
+ DetectorTrigger.variable_gate,
43
+ DetectorTrigger.constant_gate,
44
+ ]:
45
+ await self._drv.acquire_time.set(exposure)
46
+ return await start_acquiring_driver_and_ensure_status(self._drv)
47
+
48
+ async def disarm(self):
49
+ await stop_busy_record(self._drv.acquire, False, timeout=1)