ophyd-async 0.2.0__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. ophyd_async/__init__.py +1 -4
  2. ophyd_async/_version.py +2 -2
  3. ophyd_async/core/__init__.py +52 -19
  4. ophyd_async/core/_providers.py +38 -5
  5. ophyd_async/core/async_status.py +86 -40
  6. ophyd_async/core/detector.py +214 -72
  7. ophyd_async/core/device.py +91 -50
  8. ophyd_async/core/device_save_loader.py +96 -23
  9. ophyd_async/core/flyer.py +32 -246
  10. ophyd_async/core/mock_signal_backend.py +82 -0
  11. ophyd_async/core/mock_signal_utils.py +145 -0
  12. ophyd_async/core/signal.py +225 -58
  13. ophyd_async/core/signal_backend.py +8 -5
  14. ophyd_async/core/{sim_signal_backend.py → soft_signal_backend.py} +51 -49
  15. ophyd_async/core/standard_readable.py +212 -23
  16. ophyd_async/core/utils.py +123 -30
  17. ophyd_async/epics/_backend/_aioca.py +42 -44
  18. ophyd_async/epics/_backend/_p4p.py +96 -52
  19. ophyd_async/epics/_backend/common.py +25 -0
  20. ophyd_async/epics/areadetector/__init__.py +8 -4
  21. ophyd_async/epics/areadetector/aravis.py +63 -0
  22. ophyd_async/epics/areadetector/controllers/__init__.py +2 -1
  23. ophyd_async/epics/areadetector/controllers/ad_sim_controller.py +1 -1
  24. ophyd_async/epics/areadetector/controllers/aravis_controller.py +78 -0
  25. ophyd_async/epics/areadetector/controllers/kinetix_controller.py +49 -0
  26. ophyd_async/epics/areadetector/controllers/pilatus_controller.py +37 -25
  27. ophyd_async/epics/areadetector/controllers/vimba_controller.py +66 -0
  28. ophyd_async/epics/areadetector/drivers/__init__.py +6 -0
  29. ophyd_async/epics/areadetector/drivers/ad_base.py +8 -12
  30. ophyd_async/epics/areadetector/drivers/aravis_driver.py +38 -0
  31. ophyd_async/epics/areadetector/drivers/kinetix_driver.py +27 -0
  32. ophyd_async/epics/areadetector/drivers/pilatus_driver.py +8 -5
  33. ophyd_async/epics/areadetector/drivers/vimba_driver.py +63 -0
  34. ophyd_async/epics/areadetector/kinetix.py +46 -0
  35. ophyd_async/epics/areadetector/pilatus.py +45 -0
  36. ophyd_async/epics/areadetector/single_trigger_det.py +14 -6
  37. ophyd_async/epics/areadetector/utils.py +2 -12
  38. ophyd_async/epics/areadetector/vimba.py +43 -0
  39. ophyd_async/epics/areadetector/writers/_hdffile.py +21 -7
  40. ophyd_async/epics/areadetector/writers/hdf_writer.py +32 -17
  41. ophyd_async/epics/areadetector/writers/nd_file_hdf.py +19 -18
  42. ophyd_async/epics/areadetector/writers/nd_plugin.py +15 -7
  43. ophyd_async/epics/demo/__init__.py +75 -49
  44. ophyd_async/epics/motion/motor.py +67 -53
  45. ophyd_async/epics/pvi/__init__.py +3 -0
  46. ophyd_async/epics/pvi/pvi.py +318 -0
  47. ophyd_async/epics/signal/__init__.py +8 -3
  48. ophyd_async/epics/signal/signal.py +26 -9
  49. ophyd_async/log.py +130 -0
  50. ophyd_async/panda/__init__.py +21 -5
  51. ophyd_async/panda/_common_blocks.py +49 -0
  52. ophyd_async/panda/_hdf_panda.py +48 -0
  53. ophyd_async/panda/_panda_controller.py +37 -0
  54. ophyd_async/panda/_trigger.py +39 -0
  55. ophyd_async/panda/_utils.py +15 -0
  56. ophyd_async/panda/writers/__init__.py +3 -0
  57. ophyd_async/panda/writers/_hdf_writer.py +220 -0
  58. ophyd_async/panda/writers/_panda_hdf_file.py +58 -0
  59. ophyd_async/plan_stubs/__init__.py +13 -0
  60. ophyd_async/plan_stubs/ensure_connected.py +22 -0
  61. ophyd_async/plan_stubs/fly.py +149 -0
  62. ophyd_async/protocols.py +126 -0
  63. ophyd_async/sim/__init__.py +11 -0
  64. ophyd_async/sim/demo/__init__.py +3 -0
  65. ophyd_async/sim/demo/sim_motor.py +103 -0
  66. ophyd_async/sim/pattern_generator.py +318 -0
  67. ophyd_async/sim/sim_pattern_detector_control.py +55 -0
  68. ophyd_async/sim/sim_pattern_detector_writer.py +34 -0
  69. ophyd_async/sim/sim_pattern_generator.py +37 -0
  70. {ophyd_async-0.2.0.dist-info → ophyd_async-0.3.0.dist-info}/METADATA +31 -70
  71. ophyd_async-0.3.0.dist-info/RECORD +86 -0
  72. {ophyd_async-0.2.0.dist-info → ophyd_async-0.3.0.dist-info}/WHEEL +1 -1
  73. ophyd_async/epics/signal/pvi_get.py +0 -22
  74. ophyd_async/panda/panda.py +0 -294
  75. ophyd_async-0.2.0.dist-info/RECORD +0 -53
  76. /ophyd_async/panda/{table.py → _table.py} +0 -0
  77. {ophyd_async-0.2.0.dist-info → ophyd_async-0.3.0.dist-info}/LICENSE +0 -0
  78. {ophyd_async-0.2.0.dist-info → ophyd_async-0.3.0.dist-info}/entry_points.txt +0 -0
  79. {ophyd_async-0.2.0.dist-info → ophyd_async-0.3.0.dist-info}/top_level.txt +0 -0
@@ -1,5 +1,5 @@
1
+ import logging
1
2
  import sys
2
- from asyncio import CancelledError
3
3
  from dataclasses import dataclass
4
4
  from enum import Enum
5
5
  from typing import Any, Dict, Optional, Sequence, Type, Union
@@ -8,17 +8,17 @@ from aioca import (
8
8
  FORMAT_CTRL,
9
9
  FORMAT_RAW,
10
10
  FORMAT_TIME,
11
+ CANothing,
11
12
  Subscription,
12
13
  caget,
13
14
  camonitor,
14
15
  caput,
15
16
  )
16
17
  from aioca.types import AugmentedValue, Dbr, Format
17
- from bluesky.protocols import Descriptor, Dtype, Reading
18
+ from bluesky.protocols import DataKey, Dtype, Reading
18
19
  from epicscorelibs.ca import dbr
19
20
 
20
21
  from ophyd_async.core import (
21
- NotConnected,
22
22
  ReadingValueCallback,
23
23
  SignalBackend,
24
24
  T,
@@ -26,6 +26,9 @@ from ophyd_async.core import (
26
26
  get_unique,
27
27
  wait_for_connection,
28
28
  )
29
+ from ophyd_async.core.utils import DEFAULT_TIMEOUT, NotConnected
30
+
31
+ from .common import get_supported_values
29
32
 
30
33
  dbr_to_dtype: Dict[Dbr, Dtype] = {
31
34
  dbr.DBR_STRING: "string",
@@ -49,14 +52,14 @@ class CaConverter:
49
52
  return value
50
53
 
51
54
  def reading(self, value: AugmentedValue):
52
- return dict(
53
- value=self.value(value),
54
- timestamp=value.timestamp,
55
- alarm_severity=-1 if value.severity > 2 else value.severity,
56
- )
55
+ return {
56
+ "value": self.value(value),
57
+ "timestamp": value.timestamp,
58
+ "alarm_severity": -1 if value.severity > 2 else value.severity,
59
+ }
57
60
 
58
- def descriptor(self, source: str, value: AugmentedValue) -> Descriptor:
59
- return dict(source=source, dtype=dbr_to_dtype[value.datatype], shape=[])
61
+ def get_datakey(self, source: str, value: AugmentedValue) -> DataKey:
62
+ return {"source": source, "dtype": dbr_to_dtype[value.datatype], "shape": []}
60
63
 
61
64
 
62
65
  class CaLongStrConverter(CaConverter):
@@ -70,13 +73,13 @@ class CaLongStrConverter(CaConverter):
70
73
 
71
74
 
72
75
  class CaArrayConverter(CaConverter):
73
- def descriptor(self, source: str, value: AugmentedValue) -> Descriptor:
74
- return dict(source=source, dtype="array", shape=[len(value)])
76
+ def get_datakey(self, source: str, value: AugmentedValue) -> DataKey:
77
+ return {"source": source, "dtype": "array", "shape": [len(value)]}
75
78
 
76
79
 
77
80
  @dataclass
78
81
  class CaEnumConverter(CaConverter):
79
- enum_class: Type[Enum]
82
+ choices: dict[str, str]
80
83
 
81
84
  def write_value(self, value: Union[Enum, str]):
82
85
  if isinstance(value, Enum):
@@ -85,13 +88,15 @@ class CaEnumConverter(CaConverter):
85
88
  return value
86
89
 
87
90
  def value(self, value: AugmentedValue):
88
- return self.enum_class(value)
91
+ return self.choices[value]
89
92
 
90
- def descriptor(self, source: str, value: AugmentedValue) -> Descriptor:
91
- choices = [e.value for e in self.enum_class]
92
- return dict(
93
- source=source, dtype="string", shape=[], choices=choices
94
- ) # type: ignore
93
+ def get_datakey(self, source: str, value: AugmentedValue) -> DataKey:
94
+ return {
95
+ "source": source,
96
+ "dtype": "string",
97
+ "shape": [],
98
+ "choices": list(self.choices),
99
+ }
95
100
 
96
101
 
97
102
  class DisconnectedCaConverter(CaConverter):
@@ -137,20 +142,8 @@ def make_converter(
137
142
  pv_choices = get_unique(
138
143
  {k: tuple(v.enums) for k, v in values.items()}, "choices"
139
144
  )
140
- if datatype:
141
- if not issubclass(datatype, Enum):
142
- raise TypeError(f"{pv} has type Enum not {datatype.__name__}")
143
- if not issubclass(datatype, str):
144
- raise TypeError(f"{pv} has type Enum but doesn't inherit from String")
145
- choices = tuple(v.value for v in datatype)
146
- if set(choices) != set(pv_choices):
147
- raise TypeError(f"{pv} has choices {pv_choices} not {choices}")
148
- enum_class = datatype
149
- else:
150
- enum_class = Enum( # type: ignore
151
- "GeneratedChoices", {x: x for x in pv_choices}, type=str
152
- )
153
- return CaEnumConverter(dbr.DBR_STRING, None, enum_class)
145
+ supported_values = get_supported_values(pv, datatype, pv_choices)
146
+ return CaEnumConverter(dbr.DBR_STRING, None, supported_values)
154
147
  else:
155
148
  value = list(values.values())[0]
156
149
  # Done the dbr check, so enough to check one of the values
@@ -181,26 +174,31 @@ class CaSignalBackend(SignalBackend[T]):
181
174
  self.write_pv = write_pv
182
175
  self.initial_values: Dict[str, AugmentedValue] = {}
183
176
  self.converter: CaConverter = DisconnectedCaConverter(None, None)
184
- self.source = f"ca://{self.read_pv}"
185
177
  self.subscription: Optional[Subscription] = None
186
178
 
187
- async def _store_initial_value(self, pv):
179
+ def source(self, name: str):
180
+ return f"ca://{self.read_pv}"
181
+
182
+ async def _store_initial_value(self, pv, timeout: float = DEFAULT_TIMEOUT):
188
183
  try:
189
- self.initial_values[pv] = await caget(pv, format=FORMAT_CTRL, timeout=None)
190
- except CancelledError:
191
- raise NotConnected(self.source)
184
+ self.initial_values[pv] = await caget(
185
+ pv, format=FORMAT_CTRL, timeout=timeout
186
+ )
187
+ except CANothing as exc:
188
+ logging.debug(f"signal ca://{pv} timed out")
189
+ raise NotConnected(f"ca://{pv}") from exc
192
190
 
193
- async def connect(self):
191
+ async def connect(self, timeout: float = DEFAULT_TIMEOUT):
194
192
  _use_pyepics_context_if_imported()
195
193
  if self.read_pv != self.write_pv:
196
194
  # Different, need to connect both
197
195
  await wait_for_connection(
198
- read_pv=self._store_initial_value(self.read_pv),
199
- write_pv=self._store_initial_value(self.write_pv),
196
+ read_pv=self._store_initial_value(self.read_pv, timeout=timeout),
197
+ write_pv=self._store_initial_value(self.write_pv, timeout=timeout),
200
198
  )
201
199
  else:
202
200
  # The same, so only need to connect one
203
- await self._store_initial_value(self.read_pv)
201
+ await self._store_initial_value(self.read_pv, timeout=timeout)
204
202
  self.converter = make_converter(self.datatype, self.initial_values)
205
203
 
206
204
  async def put(self, value: Optional[T], wait=True, timeout=None):
@@ -224,9 +222,9 @@ class CaSignalBackend(SignalBackend[T]):
224
222
  timeout=None,
225
223
  )
226
224
 
227
- async def get_descriptor(self) -> Descriptor:
225
+ async def get_datakey(self, source: str) -> DataKey:
228
226
  value = await self._caget(FORMAT_CTRL)
229
- return self.converter.descriptor(self.source, value)
227
+ return self.converter.get_datakey(source, value)
230
228
 
231
229
  async def get_reading(self) -> Reading:
232
230
  value = await self._caget(FORMAT_TIME)
@@ -1,16 +1,16 @@
1
1
  import asyncio
2
2
  import atexit
3
- from asyncio import CancelledError
3
+ import logging
4
+ import time
4
5
  from dataclasses import dataclass
5
6
  from enum import Enum
6
7
  from typing import Any, Dict, List, Optional, Sequence, Type, Union
7
8
 
8
- from bluesky.protocols import Descriptor, Dtype, Reading
9
+ from bluesky.protocols import DataKey, Dtype, Reading
9
10
  from p4p import Value
10
11
  from p4p.client.asyncio import Context, Subscription
11
12
 
12
13
  from ophyd_async.core import (
13
- NotConnected,
14
14
  ReadingValueCallback,
15
15
  SignalBackend,
16
16
  T,
@@ -18,6 +18,9 @@ from ophyd_async.core import (
18
18
  get_unique,
19
19
  wait_for_connection,
20
20
  )
21
+ from ophyd_async.core.utils import DEFAULT_TIMEOUT, NotConnected
22
+
23
+ from .common import get_supported_values
21
24
 
22
25
  # https://mdavidsaver.github.io/p4p/values.html
23
26
  specifier_to_dtype: Dict[str, Dtype] = {
@@ -46,15 +49,15 @@ class PvaConverter:
46
49
  def reading(self, value):
47
50
  ts = value["timeStamp"]
48
51
  sv = value["alarm"]["severity"]
49
- return dict(
50
- value=self.value(value),
51
- timestamp=ts["secondsPastEpoch"] + ts["nanoseconds"] * 1e-9,
52
- alarm_severity=-1 if sv > 2 else sv,
53
- )
52
+ return {
53
+ "value": self.value(value),
54
+ "timestamp": ts["secondsPastEpoch"] + ts["nanoseconds"] * 1e-9,
55
+ "alarm_severity": -1 if sv > 2 else sv,
56
+ }
54
57
 
55
- def descriptor(self, source: str, value) -> Descriptor:
58
+ def get_datakey(self, source: str, value) -> DataKey:
56
59
  dtype = specifier_to_dtype[value.type().aspy("value")]
57
- return dict(source=source, dtype=dtype, shape=[])
60
+ return {"source": source, "dtype": dtype, "shape": []}
58
61
 
59
62
  def metadata_fields(self) -> List[str]:
60
63
  """
@@ -70,8 +73,8 @@ class PvaConverter:
70
73
 
71
74
 
72
75
  class PvaArrayConverter(PvaConverter):
73
- def descriptor(self, source: str, value) -> Descriptor:
74
- return dict(source=source, dtype="array", shape=[len(value["value"])])
76
+ def get_datakey(self, source: str, value) -> DataKey:
77
+ return {"source": source, "dtype": "array", "shape": [len(value["value"])]}
75
78
 
76
79
 
77
80
  class PvaNDArrayConverter(PvaConverter):
@@ -93,9 +96,9 @@ class PvaNDArrayConverter(PvaConverter):
93
96
  dims = self._get_dimensions(value)
94
97
  return value["value"].reshape(dims)
95
98
 
96
- def descriptor(self, source: str, value) -> Descriptor:
99
+ def get_datakey(self, source: str, value) -> DataKey:
97
100
  dims = self._get_dimensions(value)
98
- return dict(source=source, dtype="array", shape=dims)
101
+ return {"source": source, "dtype": "array", "shape": dims}
99
102
 
100
103
  def write_value(self, value):
101
104
  # No clear use-case for writing directly to an NDArray, and some
@@ -106,7 +109,8 @@ class PvaNDArrayConverter(PvaConverter):
106
109
 
107
110
  @dataclass
108
111
  class PvaEnumConverter(PvaConverter):
109
- enum_class: Type[Enum]
112
+ def __init__(self, choices: dict[str, str]):
113
+ self.choices = tuple(choices.values())
110
114
 
111
115
  def write_value(self, value: Union[Enum, str]):
112
116
  if isinstance(value, Enum):
@@ -115,30 +119,58 @@ class PvaEnumConverter(PvaConverter):
115
119
  return value
116
120
 
117
121
  def value(self, value):
118
- return list(self.enum_class)[value["value"]["index"]]
122
+ return self.choices[value["value"]["index"]]
119
123
 
120
- def descriptor(self, source: str, value) -> Descriptor:
121
- choices = [e.value for e in self.enum_class]
122
- return dict(
123
- source=source, dtype="string", shape=[], choices=choices
124
- ) # type: ignore
124
+ def get_datakey(self, source: str, value) -> DataKey:
125
+ return {
126
+ "source": source,
127
+ "dtype": "string",
128
+ "shape": [],
129
+ "choices": list(self.choices),
130
+ }
125
131
 
126
132
 
127
133
  class PvaEnumBoolConverter(PvaConverter):
128
134
  def value(self, value):
129
135
  return value["value"]["index"]
130
136
 
131
- def descriptor(self, source: str, value) -> Descriptor:
132
- return dict(source=source, dtype="integer", shape=[])
137
+ def get_datakey(self, source: str, value) -> DataKey:
138
+ return {"source": source, "dtype": "integer", "shape": []}
133
139
 
134
140
 
135
141
  class PvaTableConverter(PvaConverter):
136
142
  def value(self, value):
137
143
  return value["value"].todict()
138
144
 
139
- def descriptor(self, source: str, value) -> Descriptor:
145
+ def get_datakey(self, source: str, value) -> DataKey:
140
146
  # This is wrong, but defer until we know how to actually describe a table
141
- return dict(source=source, dtype="object", shape=[]) # type: ignore
147
+ return {"source": source, "dtype": "object", "shape": []} # type: ignore
148
+
149
+
150
+ class PvaDictConverter(PvaConverter):
151
+ def reading(self, value):
152
+ ts = time.time()
153
+ value = value.todict()
154
+ # Alarm severity is vacuously 0 for a table
155
+ return {"value": value, "timestamp": ts, "alarm_severity": 0}
156
+
157
+ def value(self, value: Value):
158
+ return value.todict()
159
+
160
+ def get_datakey(self, source: str, value) -> DataKey:
161
+ raise NotImplementedError("Describing Dict signals not currently supported")
162
+
163
+ def metadata_fields(self) -> List[str]:
164
+ """
165
+ Fields to request from PVA for metadata.
166
+ """
167
+ return []
168
+
169
+ def value_fields(self) -> List[str]:
170
+ """
171
+ Fields to request from PVA for the value.
172
+ """
173
+ return []
142
174
 
143
175
 
144
176
  class DisconnectedPvaConverter(PvaConverter):
@@ -149,7 +181,9 @@ class DisconnectedPvaConverter(PvaConverter):
149
181
  def make_converter(datatype: Optional[Type], values: Dict[str, Any]) -> PvaConverter:
150
182
  pv = list(values)[0]
151
183
  typeid = get_unique({k: v.getID() for k, v in values.items()}, "typeids")
152
- typ = get_unique({k: type(v["value"]) for k, v in values.items()}, "value types")
184
+ typ = get_unique(
185
+ {k: type(v.get("value")) for k, v in values.items()}, "value types"
186
+ )
153
187
  if "NTScalarArray" in typeid and typ == list:
154
188
  # Waveform of strings, check we wanted this
155
189
  if datatype and datatype != Sequence[str]:
@@ -185,24 +219,21 @@ def make_converter(datatype: Optional[Type], values: Dict[str, Any]) -> PvaConve
185
219
  pv_choices = get_unique(
186
220
  {k: tuple(v["value"]["choices"]) for k, v in values.items()}, "choices"
187
221
  )
188
- if datatype:
189
- if not issubclass(datatype, Enum):
190
- raise TypeError(f"{pv} has type Enum not {datatype.__name__}")
191
- choices = tuple(v.value for v in datatype)
192
- if set(choices) != set(pv_choices):
193
- raise TypeError(f"{pv} has choices {pv_choices} not {choices}")
194
- enum_class = datatype
195
- else:
196
- enum_class = Enum( # type: ignore
197
- "GeneratedChoices", {x or "_": x for x in pv_choices}, type=str
198
- )
199
- return PvaEnumConverter(enum_class)
222
+ return PvaEnumConverter(get_supported_values(pv, datatype, pv_choices))
200
223
  elif "NTScalar" in typeid:
201
- if datatype and not issubclass(typ, datatype):
224
+ if (
225
+ datatype
226
+ and not issubclass(typ, datatype)
227
+ and not (
228
+ typ is float and datatype is int
229
+ ) # Allow float -> int since prec can be 0
230
+ ):
202
231
  raise TypeError(f"{pv} has type {typ.__name__} not {datatype.__name__}")
203
232
  return PvaConverter()
204
233
  elif "NTTable" in typeid:
205
234
  return PvaTableConverter()
235
+ elif "structure" in typeid:
236
+ return PvaDictConverter()
206
237
  else:
207
238
  raise TypeError(f"{pv}: Unsupported typeid {typeid}")
208
239
 
@@ -216,9 +247,11 @@ class PvaSignalBackend(SignalBackend[T]):
216
247
  self.write_pv = write_pv
217
248
  self.initial_values: Dict[str, Any] = {}
218
249
  self.converter: PvaConverter = DisconnectedPvaConverter()
219
- self.source = f"pva://{self.read_pv}"
220
250
  self.subscription: Optional[Subscription] = None
221
251
 
252
+ def source(self, name: str):
253
+ return f"pva://{self.read_pv}"
254
+
222
255
  @property
223
256
  def ctxt(self) -> Context:
224
257
  if PvaSignalBackend._ctxt is None:
@@ -233,22 +266,25 @@ class PvaSignalBackend(SignalBackend[T]):
233
266
 
234
267
  return PvaSignalBackend._ctxt
235
268
 
236
- async def _store_initial_value(self, pv):
269
+ async def _store_initial_value(self, pv, timeout: float = DEFAULT_TIMEOUT):
237
270
  try:
238
- self.initial_values[pv] = await self.ctxt.get(pv)
239
- except CancelledError:
240
- raise NotConnected(self.source)
271
+ self.initial_values[pv] = await asyncio.wait_for(
272
+ self.ctxt.get(pv), timeout=timeout
273
+ )
274
+ except asyncio.TimeoutError as exc:
275
+ logging.debug(f"signal pva://{pv} timed out", exc_info=True)
276
+ raise NotConnected(f"pva://{pv}") from exc
241
277
 
242
- async def connect(self):
278
+ async def connect(self, timeout: float = DEFAULT_TIMEOUT):
243
279
  if self.read_pv != self.write_pv:
244
280
  # Different, need to connect both
245
281
  await wait_for_connection(
246
- read_pv=self._store_initial_value(self.read_pv),
247
- write_pv=self._store_initial_value(self.write_pv),
282
+ read_pv=self._store_initial_value(self.read_pv, timeout=timeout),
283
+ write_pv=self._store_initial_value(self.write_pv, timeout=timeout),
248
284
  )
249
285
  else:
250
286
  # The same, so only need to connect one
251
- await self._store_initial_value(self.read_pv)
287
+ await self._store_initial_value(self.read_pv, timeout=timeout)
252
288
  self.converter = make_converter(self.datatype, self.initial_values)
253
289
 
254
290
  async def put(self, value: Optional[T], wait=True, timeout=None):
@@ -256,12 +292,20 @@ class PvaSignalBackend(SignalBackend[T]):
256
292
  write_value = self.initial_values[self.write_pv]
257
293
  else:
258
294
  write_value = self.converter.write_value(value)
259
- coro = self.ctxt.put(self.write_pv, dict(value=write_value), wait=wait)
260
- await asyncio.wait_for(coro, timeout)
295
+ coro = self.ctxt.put(self.write_pv, {"value": write_value}, wait=wait)
296
+ try:
297
+ await asyncio.wait_for(coro, timeout)
298
+ except asyncio.TimeoutError as exc:
299
+ logging.debug(
300
+ f"signal pva://{self.write_pv} timed out \
301
+ put value: {write_value}",
302
+ exc_info=True,
303
+ )
304
+ raise NotConnected(f"pva://{self.write_pv}") from exc
261
305
 
262
- async def get_descriptor(self) -> Descriptor:
306
+ async def get_datakey(self, source: str) -> DataKey:
263
307
  value = await self.ctxt.get(self.read_pv)
264
- return self.converter.descriptor(self.source, value)
308
+ return self.converter.get_datakey(source, value)
265
309
 
266
310
  def _pva_request_string(self, fields: List[str]) -> str:
267
311
  """
@@ -0,0 +1,25 @@
1
+ from enum import Enum
2
+ from typing import Dict, Optional, Tuple, Type
3
+
4
+
5
+ def get_supported_values(
6
+ pv: str,
7
+ datatype: Optional[Type[str]],
8
+ pv_choices: Tuple[str, ...],
9
+ ) -> Dict[str, str]:
10
+ if not datatype:
11
+ return {x: x or "_" for x in pv_choices}
12
+
13
+ if not issubclass(datatype, str):
14
+ raise TypeError(f"{pv} is type Enum but doesn't inherit from String")
15
+ if issubclass(datatype, Enum):
16
+ choices = tuple(v.value for v in datatype)
17
+ if set(choices) != set(pv_choices):
18
+ raise TypeError(
19
+ (
20
+ f"{pv} has choices {pv_choices}, "
21
+ f"which do not match {datatype}, which has {choices}"
22
+ )
23
+ )
24
+ return {x: datatype(x) for x in pv_choices}
25
+ return {x: x for x in pv_choices}
@@ -1,19 +1,23 @@
1
+ from .aravis import AravisDetector
2
+ from .kinetix import KinetixDetector
3
+ from .pilatus import PilatusDetector
1
4
  from .single_trigger_det import SingleTriggerDet
2
5
  from .utils import (
3
6
  FileWriteMode,
4
7
  ImageMode,
5
8
  NDAttributeDataType,
6
9
  NDAttributesXML,
7
- ad_r,
8
- ad_rw,
9
10
  )
11
+ from .vimba import VimbaDetector
10
12
 
11
13
  __all__ = [
14
+ "AravisDetector",
15
+ "KinetixDetector",
16
+ "VimbaDetector",
12
17
  "SingleTriggerDet",
13
18
  "FileWriteMode",
14
19
  "ImageMode",
15
- "ad_r",
16
- "ad_rw",
17
20
  "NDAttributeDataType",
18
21
  "NDAttributesXML",
22
+ "PilatusDetector",
19
23
  ]
@@ -0,0 +1,63 @@
1
+ from typing import get_args
2
+
3
+ from bluesky.protocols import HasHints, Hints
4
+
5
+ from ophyd_async.core import DirectoryProvider, StandardDetector
6
+ from ophyd_async.epics.areadetector.controllers.aravis_controller import (
7
+ AravisController,
8
+ )
9
+ from ophyd_async.epics.areadetector.drivers import ADBaseShapeProvider
10
+ from ophyd_async.epics.areadetector.drivers.aravis_driver import AravisDriver
11
+ from ophyd_async.epics.areadetector.writers import HDFWriter, NDFileHDF
12
+
13
+
14
+ class AravisDetector(StandardDetector, HasHints):
15
+ """
16
+ Ophyd-async implementation of an ADAravis Detector.
17
+ The detector may be configured for an external trigger on a GPIO port,
18
+ which must be done prior to preparing the detector
19
+ """
20
+
21
+ _controller: AravisController
22
+ _writer: HDFWriter
23
+
24
+ def __init__(
25
+ self,
26
+ prefix: str,
27
+ directory_provider: DirectoryProvider,
28
+ drv_suffix="cam1:",
29
+ hdf_suffix="HDF1:",
30
+ name="",
31
+ gpio_number: AravisController.GPIO_NUMBER = 1,
32
+ ):
33
+ self.drv = AravisDriver(prefix + drv_suffix)
34
+ self.hdf = NDFileHDF(prefix + hdf_suffix)
35
+
36
+ super().__init__(
37
+ AravisController(self.drv, gpio_number=gpio_number),
38
+ HDFWriter(
39
+ self.hdf,
40
+ directory_provider,
41
+ lambda: self.name,
42
+ ADBaseShapeProvider(self.drv),
43
+ ),
44
+ config_sigs=(self.drv.acquire_time,),
45
+ name=name,
46
+ )
47
+
48
+ def get_external_trigger_gpio(self):
49
+ return self._controller.gpio_number
50
+
51
+ def set_external_trigger_gpio(self, gpio_number: AravisController.GPIO_NUMBER):
52
+ supported_gpio_numbers = get_args(AravisController.GPIO_NUMBER)
53
+ if gpio_number not in supported_gpio_numbers:
54
+ raise ValueError(
55
+ f"{self.__class__.__name__} only supports the following GPIO "
56
+ f"indices: {supported_gpio_numbers} but was asked to "
57
+ f"use {gpio_number}"
58
+ )
59
+ self._controller.gpio_number = gpio_number
60
+
61
+ @property
62
+ def hints(self) -> Hints:
63
+ return self._writer.hints
@@ -1,4 +1,5 @@
1
1
  from .ad_sim_controller import ADSimController
2
+ from .aravis_controller import AravisController
2
3
  from .pilatus_controller import PilatusController
3
4
 
4
- __all__ = ["PilatusController", "ADSimController"]
5
+ __all__ = ["PilatusController", "ADSimController", "AravisController"]
@@ -30,8 +30,8 @@ class ADSimController(DetectorControl):
30
30
 
31
31
  async def arm(
32
32
  self,
33
+ num: int,
33
34
  trigger: DetectorTrigger = DetectorTrigger.internal,
34
- num: int = 0,
35
35
  exposure: Optional[float] = None,
36
36
  ) -> AsyncStatus:
37
37
  assert (
@@ -0,0 +1,78 @@
1
+ import asyncio
2
+ from typing import Literal, Optional, Tuple
3
+
4
+ from ophyd_async.core import (
5
+ AsyncStatus,
6
+ DetectorControl,
7
+ DetectorTrigger,
8
+ set_and_wait_for_value,
9
+ )
10
+ from ophyd_async.epics.areadetector.drivers.aravis_driver import (
11
+ AravisDriver,
12
+ AravisTriggerMode,
13
+ AravisTriggerSource,
14
+ )
15
+ from ophyd_async.epics.areadetector.utils import ImageMode, stop_busy_record
16
+
17
+ # The deadtime of an ADaravis controller varies depending on the exact model of camera.
18
+ # Ideally we would maximize performance by dynamically retrieving the deadtime at
19
+ # runtime. See https://github.com/bluesky/ophyd-async/issues/308
20
+ _HIGHEST_POSSIBLE_DEADTIME = 1961e-6
21
+
22
+
23
+ class AravisController(DetectorControl):
24
+ GPIO_NUMBER = Literal[1, 2, 3, 4]
25
+
26
+ def __init__(self, driver: AravisDriver, gpio_number: GPIO_NUMBER) -> None:
27
+ self._drv = driver
28
+ self.gpio_number = gpio_number
29
+
30
+ def get_deadtime(self, exposure: float) -> float:
31
+ return _HIGHEST_POSSIBLE_DEADTIME
32
+
33
+ async def arm(
34
+ self,
35
+ num: int = 0,
36
+ trigger: DetectorTrigger = DetectorTrigger.internal,
37
+ exposure: Optional[float] = None,
38
+ ) -> AsyncStatus:
39
+ if num == 0:
40
+ image_mode = ImageMode.continuous
41
+ else:
42
+ image_mode = ImageMode.multiple
43
+ if exposure is not None:
44
+ await self._drv.acquire_time.set(exposure)
45
+
46
+ trigger_mode, trigger_source = self._get_trigger_info(trigger)
47
+ # trigger mode must be set first and on it's own!
48
+ await self._drv.trigger_mode.set(trigger_mode)
49
+
50
+ await asyncio.gather(
51
+ self._drv.trigger_source.set(trigger_source),
52
+ self._drv.num_images.set(num),
53
+ self._drv.image_mode.set(image_mode),
54
+ )
55
+
56
+ status = await set_and_wait_for_value(self._drv.acquire, True)
57
+ return status
58
+
59
+ def _get_trigger_info(
60
+ self, trigger: DetectorTrigger
61
+ ) -> Tuple[AravisTriggerMode, AravisTriggerSource]:
62
+ supported_trigger_types = (
63
+ DetectorTrigger.constant_gate,
64
+ DetectorTrigger.edge_trigger,
65
+ )
66
+ if trigger not in supported_trigger_types:
67
+ raise ValueError(
68
+ f"{self.__class__.__name__} only supports the following trigger "
69
+ f"types: {supported_trigger_types} but was asked to "
70
+ f"use {trigger}"
71
+ )
72
+ if trigger == DetectorTrigger.internal:
73
+ return AravisTriggerMode.off, "Freerun"
74
+ else:
75
+ return (AravisTriggerMode.on, f"Line{self.gpio_number}")
76
+
77
+ async def disarm(self):
78
+ await stop_busy_record(self._drv.acquire, False, timeout=1)