ophyd-async 0.3.1a1__py3-none-any.whl → 0.3.3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,8 +2,10 @@ import logging
2
2
  import sys
3
3
  from dataclasses import dataclass
4
4
  from enum import Enum
5
- from typing import Any, Dict, Optional, Sequence, Type, Union
5
+ from math import isnan, nan
6
+ from typing import Any, Dict, List, Optional, Type, Union
6
7
 
8
+ import numpy as np
7
9
  from aioca import (
8
10
  FORMAT_CTRL,
9
11
  FORMAT_RAW,
@@ -28,7 +30,7 @@ from ophyd_async.core import (
28
30
  )
29
31
  from ophyd_async.core.utils import DEFAULT_TIMEOUT, NotConnected
30
32
 
31
- from .common import get_supported_values
33
+ from .common import LimitPair, Limits, common_meta, get_supported_values
32
34
 
33
35
  dbr_to_dtype: Dict[Dbr, Dtype] = {
34
36
  dbr.DBR_STRING: "string",
@@ -40,6 +42,66 @@ dbr_to_dtype: Dict[Dbr, Dtype] = {
40
42
  }
41
43
 
42
44
 
45
+ def _data_key_from_augmented_value(
46
+ value: AugmentedValue,
47
+ *,
48
+ choices: Optional[List[str]] = None,
49
+ dtype: Optional[str] = None,
50
+ ) -> DataKey:
51
+ """Use the return value of get with FORMAT_CTRL to construct a DataKey
52
+ describing the signal. See docstring of AugmentedValue for expected
53
+ value fields by DBR type.
54
+
55
+ Args:
56
+ value (AugmentedValue): Description of the the return type of a DB record
57
+ choices: Optional list of enum choices to pass as metadata in the datakey
58
+ dtype: Optional override dtype when AugmentedValue is ambiguous, e.g. booleans
59
+
60
+ Returns:
61
+ DataKey: A rich DataKey describing the DB record
62
+ """
63
+ source = f"ca://{value.name}"
64
+ assert value.ok, f"Error reading {source}: {value}"
65
+
66
+ scalar = value.element_count == 1
67
+ dtype = dtype or dbr_to_dtype[value.datatype]
68
+
69
+ d = DataKey(
70
+ source=source,
71
+ dtype=dtype if scalar else "array",
72
+ # strictly value.element_count >= len(value)
73
+ shape=[] if scalar else [len(value)],
74
+ )
75
+ for key in common_meta:
76
+ attr = getattr(value, key, nan)
77
+ if isinstance(attr, str) or not isnan(attr):
78
+ d[key] = attr
79
+
80
+ if choices is not None:
81
+ d["choices"] = choices
82
+
83
+ if limits := _limits_from_augmented_value(value):
84
+ d["limits"] = limits
85
+
86
+ return d
87
+
88
+
89
+ def _limits_from_augmented_value(value: AugmentedValue) -> Limits:
90
+ def get_limits(limit: str) -> LimitPair:
91
+ low = getattr(value, f"lower_{limit}_limit", nan)
92
+ high = getattr(value, f"upper_{limit}_limit", nan)
93
+ return LimitPair(
94
+ low=None if isnan(low) else low, high=None if isnan(high) else high
95
+ )
96
+
97
+ return Limits(
98
+ alarm=get_limits("alarm"),
99
+ control=get_limits("ctrl"),
100
+ display=get_limits("disp"),
101
+ warning=get_limits("warning"),
102
+ )
103
+
104
+
43
105
  @dataclass
44
106
  class CaConverter:
45
107
  read_dbr: Optional[Dbr]
@@ -49,7 +111,10 @@ class CaConverter:
49
111
  return value
50
112
 
51
113
  def value(self, value: AugmentedValue):
52
- return value
114
+ # for channel access ca_xxx classes, this
115
+ # invokes __pos__ operator to return an instance of
116
+ # the builtin base class
117
+ return +value
53
118
 
54
119
  def reading(self, value: AugmentedValue):
55
120
  return {
@@ -58,8 +123,8 @@ class CaConverter:
58
123
  "alarm_severity": -1 if value.severity > 2 else value.severity,
59
124
  }
60
125
 
61
- def get_datakey(self, source: str, value: AugmentedValue) -> DataKey:
62
- return {"source": source, "dtype": dbr_to_dtype[value.datatype], "shape": []}
126
+ def get_datakey(self, value: AugmentedValue) -> DataKey:
127
+ return _data_key_from_augmented_value(value)
63
128
 
64
129
 
65
130
  class CaLongStrConverter(CaConverter):
@@ -73,12 +138,17 @@ class CaLongStrConverter(CaConverter):
73
138
 
74
139
 
75
140
  class CaArrayConverter(CaConverter):
76
- def get_datakey(self, source: str, value: AugmentedValue) -> DataKey:
77
- return {"source": source, "dtype": "array", "shape": [len(value)]}
141
+ def value(self, value: AugmentedValue):
142
+ return np.array(value, copy=False)
78
143
 
79
144
 
80
145
  @dataclass
81
146
  class CaEnumConverter(CaConverter):
147
+ """To prevent issues when a signal is restarted and returns with different enum
148
+ values or orders, we put treat an Enum signal as a string, and cache the
149
+ choices on this class.
150
+ """
151
+
82
152
  choices: dict[str, str]
83
153
 
84
154
  def write_value(self, value: Union[Enum, str]):
@@ -90,13 +160,18 @@ class CaEnumConverter(CaConverter):
90
160
  def value(self, value: AugmentedValue):
91
161
  return self.choices[value]
92
162
 
93
- def get_datakey(self, source: str, value: AugmentedValue) -> DataKey:
94
- return {
95
- "source": source,
96
- "dtype": "string",
97
- "shape": [],
98
- "choices": list(self.choices),
99
- }
163
+ def get_datakey(self, value: AugmentedValue) -> DataKey:
164
+ # Sometimes DBR_TYPE returns as String, must pass choices still
165
+ return _data_key_from_augmented_value(value, choices=list(self.choices.keys()))
166
+
167
+
168
+ @dataclass
169
+ class CaBoolConverter(CaConverter):
170
+ def value(self, value: AugmentedValue) -> bool:
171
+ return bool(value)
172
+
173
+ def get_datakey(self, value: AugmentedValue) -> DataKey:
174
+ return _data_key_from_augmented_value(value, dtype="bool")
100
175
 
101
176
 
102
177
  class DisconnectedCaConverter(CaConverter):
@@ -115,8 +190,10 @@ def make_converter(
115
190
  return CaLongStrConverter()
116
191
  elif is_array and pv_dbr == dbr.DBR_STRING:
117
192
  # Waveform of strings, check we wanted this
118
- if datatype and datatype != Sequence[str]:
119
- raise TypeError(f"{pv} has type [str] not {datatype.__name__}")
193
+ if datatype:
194
+ datatype_dtype = get_dtype(datatype)
195
+ if not datatype_dtype or not np.can_cast(datatype_dtype, np.str_):
196
+ raise TypeError(f"{pv} has type [str] not {datatype.__name__}")
120
197
  return CaArrayConverter(pv_dbr, None)
121
198
  elif is_array:
122
199
  pv_dtype = get_unique({k: v.dtype for k, v in values.items()}, "dtypes")
@@ -136,7 +213,7 @@ def make_converter(
136
213
  )
137
214
  if pv_choices_len != 2:
138
215
  raise TypeError(f"{pv} has {pv_choices_len} choices, can't map to bool")
139
- return CaConverter(dbr.DBR_SHORT, dbr.DBR_SHORT)
216
+ return CaBoolConverter(dbr.DBR_SHORT, dbr.DBR_SHORT)
140
217
  elif pv_dbr == dbr.DBR_ENUM:
141
218
  # This is an Enum
142
219
  pv_choices = get_unique(
@@ -224,7 +301,7 @@ class CaSignalBackend(SignalBackend[T]):
224
301
 
225
302
  async def get_datakey(self, source: str) -> DataKey:
226
303
  value = await self._caget(FORMAT_CTRL)
227
- return self.converter.get_datakey(source, value)
304
+ return self.converter.get_datakey(value)
228
305
 
229
306
  async def get_reading(self) -> Reading:
230
307
  value = await self._caget(FORMAT_TIME)
@@ -4,6 +4,7 @@ import logging
4
4
  import time
5
5
  from dataclasses import dataclass
6
6
  from enum import Enum
7
+ from math import isnan, nan
7
8
  from typing import Any, Dict, List, Optional, Sequence, Type, Union
8
9
 
9
10
  from bluesky.protocols import DataKey, Dtype, Reading
@@ -20,7 +21,7 @@ from ophyd_async.core import (
20
21
  )
21
22
  from ophyd_async.core.utils import DEFAULT_TIMEOUT, NotConnected
22
23
 
23
- from .common import get_supported_values
24
+ from .common import LimitPair, Limits, common_meta, get_supported_values
24
25
 
25
26
  # https://mdavidsaver.github.io/p4p/values.html
26
27
  specifier_to_dtype: Dict[str, Dtype] = {
@@ -39,6 +40,67 @@ specifier_to_dtype: Dict[str, Dtype] = {
39
40
  }
40
41
 
41
42
 
43
+ def _data_key_from_value(
44
+ source: str,
45
+ value: Value,
46
+ *,
47
+ shape: Optional[list[int]] = None,
48
+ choices: Optional[list[str]] = None,
49
+ dtype: Optional[str] = None,
50
+ ) -> DataKey:
51
+ """
52
+ Args:
53
+ value (Value): Description of the the return type of a DB record
54
+ shape: Optional override shape when len(shape) > 1
55
+ choices: Optional list of enum choices to pass as metadata in the datakey
56
+ dtype: Optional override dtype when AugmentedValue is ambiguous, e.g. booleans
57
+
58
+ Returns:
59
+ DataKey: A rich DataKey describing the DB record
60
+ """
61
+ shape = shape or []
62
+ dtype = dtype or specifier_to_dtype[value.type().aspy("value")]
63
+ display_data = getattr(value, "display", None)
64
+
65
+ d = DataKey(
66
+ source=source,
67
+ dtype=dtype,
68
+ shape=shape,
69
+ )
70
+ if display_data is not None:
71
+ for key in common_meta:
72
+ attr = getattr(display_data, key, nan)
73
+ if isinstance(attr, str) or not isnan(attr):
74
+ d[key] = attr
75
+
76
+ if choices is not None:
77
+ d["choices"] = choices
78
+
79
+ if limits := _limits_from_value(value):
80
+ d["limits"] = limits
81
+
82
+ return d
83
+
84
+
85
+ def _limits_from_value(value: Value) -> Limits:
86
+ def get_limits(
87
+ substucture_name: str, low_name: str = "limitLow", high_name: str = "limitHigh"
88
+ ) -> LimitPair:
89
+ substructure = getattr(value, substucture_name, None)
90
+ low = getattr(substructure, low_name, nan)
91
+ high = getattr(substructure, high_name, nan)
92
+ return LimitPair(
93
+ low=None if isnan(low) else low, high=None if isnan(high) else high
94
+ )
95
+
96
+ return Limits(
97
+ alarm=get_limits("valueAlarm", "lowAlarmLimit", "highAlarmLimit"),
98
+ control=get_limits("control"),
99
+ display=get_limits("display"),
100
+ warning=get_limits("valueAlarm", "lowWarningLimit", "highWarningLimit"),
101
+ )
102
+
103
+
42
104
  class PvaConverter:
43
105
  def write_value(self, value):
44
106
  return value
@@ -56,8 +118,7 @@ class PvaConverter:
56
118
  }
57
119
 
58
120
  def get_datakey(self, source: str, value) -> DataKey:
59
- dtype = specifier_to_dtype[value.type().aspy("value")]
60
- return {"source": source, "dtype": dtype, "shape": []}
121
+ return _data_key_from_value(source, value)
61
122
 
62
123
  def metadata_fields(self) -> List[str]:
63
124
  """
@@ -74,7 +135,9 @@ class PvaConverter:
74
135
 
75
136
  class PvaArrayConverter(PvaConverter):
76
137
  def get_datakey(self, source: str, value) -> DataKey:
77
- return {"source": source, "dtype": "array", "shape": [len(value["value"])]}
138
+ return _data_key_from_value(
139
+ source, value, dtype="array", shape=[len(value["value"])]
140
+ )
78
141
 
79
142
 
80
143
  class PvaNDArrayConverter(PvaConverter):
@@ -98,7 +161,7 @@ class PvaNDArrayConverter(PvaConverter):
98
161
 
99
162
  def get_datakey(self, source: str, value) -> DataKey:
100
163
  dims = self._get_dimensions(value)
101
- return {"source": source, "dtype": "array", "shape": dims}
164
+ return _data_key_from_value(source, value, dtype="array", shape=dims)
102
165
 
103
166
  def write_value(self, value):
104
167
  # No clear use-case for writing directly to an NDArray, and some
@@ -109,6 +172,11 @@ class PvaNDArrayConverter(PvaConverter):
109
172
 
110
173
  @dataclass
111
174
  class PvaEnumConverter(PvaConverter):
175
+ """To prevent issues when a signal is restarted and returns with different enum
176
+ values or orders, we put treat an Enum signal as a string, and cache the
177
+ choices on this class.
178
+ """
179
+
112
180
  def __init__(self, choices: dict[str, str]):
113
181
  self.choices = tuple(choices.values())
114
182
 
@@ -122,20 +190,17 @@ class PvaEnumConverter(PvaConverter):
122
190
  return self.choices[value["value"]["index"]]
123
191
 
124
192
  def get_datakey(self, source: str, value) -> DataKey:
125
- return {
126
- "source": source,
127
- "dtype": "string",
128
- "shape": [],
129
- "choices": list(self.choices),
130
- }
193
+ return _data_key_from_value(
194
+ source, value, choices=list(self.choices), dtype="string"
195
+ )
131
196
 
132
197
 
133
- class PvaEnumBoolConverter(PvaConverter):
198
+ class PvaEmumBoolConverter(PvaConverter):
134
199
  def value(self, value):
135
- return value["value"]["index"]
200
+ return bool(value["value"]["index"])
136
201
 
137
202
  def get_datakey(self, source: str, value) -> DataKey:
138
- return {"source": source, "dtype": "integer", "shape": []}
203
+ return _data_key_from_value(source, value, dtype="bool")
139
204
 
140
205
 
141
206
  class PvaTableConverter(PvaConverter):
@@ -144,7 +209,7 @@ class PvaTableConverter(PvaConverter):
144
209
 
145
210
  def get_datakey(self, source: str, value) -> DataKey:
146
211
  # This is wrong, but defer until we know how to actually describe a table
147
- return {"source": source, "dtype": "object", "shape": []} # type: ignore
212
+ return _data_key_from_value(source, value, dtype="object")
148
213
 
149
214
 
150
215
  class PvaDictConverter(PvaConverter):
@@ -213,7 +278,7 @@ def make_converter(datatype: Optional[Type], values: Dict[str, Any]) -> PvaConve
213
278
  )
214
279
  if pv_choices_len != 2:
215
280
  raise TypeError(f"{pv} has {pv_choices_len} choices, can't map to bool")
216
- return PvaEnumBoolConverter()
281
+ return PvaEmumBoolConverter()
217
282
  elif "NTEnum" in typeid:
218
283
  # This is an Enum
219
284
  pv_choices = get_unique(
@@ -1,5 +1,31 @@
1
+ import inspect
1
2
  from enum import Enum
2
- from typing import Dict, Optional, Tuple, Type
3
+ from typing import Dict, Optional, Tuple, Type, TypedDict
4
+
5
+ from ophyd_async.core.signal_backend import RuntimeSubsetEnum
6
+
7
+ common_meta = {
8
+ "units",
9
+ "precision",
10
+ }
11
+
12
+
13
+ class LimitPair(TypedDict):
14
+ high: float | None
15
+ low: float | None
16
+
17
+ def __bool__(self) -> bool:
18
+ return self.low is None and self.high is None
19
+
20
+
21
+ class Limits(TypedDict):
22
+ alarm: LimitPair
23
+ control: LimitPair
24
+ display: LimitPair
25
+ warning: LimitPair
26
+
27
+ def __bool__(self) -> bool:
28
+ return any(self.alarm, self.control, self.display, self.warning)
3
29
 
4
30
 
5
31
  def get_supported_values(
@@ -7,19 +33,30 @@ def get_supported_values(
7
33
  datatype: Optional[Type[str]],
8
34
  pv_choices: Tuple[str, ...],
9
35
  ) -> Dict[str, str]:
10
- if not datatype:
36
+ if inspect.isclass(datatype) and issubclass(datatype, RuntimeSubsetEnum):
37
+ if not set(datatype.choices).issubset(set(pv_choices)):
38
+ raise TypeError(
39
+ f"{pv} has choices {pv_choices}, "
40
+ f"which is not a superset of {str(datatype)}."
41
+ )
11
42
  return {x: x or "_" for x in pv_choices}
43
+ elif inspect.isclass(datatype) and issubclass(datatype, Enum):
44
+ if not issubclass(datatype, str):
45
+ raise TypeError(
46
+ f"{pv} is type Enum but {datatype} does not inherit from String."
47
+ )
12
48
 
13
- if not issubclass(datatype, str):
14
- raise TypeError(f"{pv} is type Enum but doesn't inherit from String")
15
- if issubclass(datatype, Enum):
16
49
  choices = tuple(v.value for v in datatype)
17
50
  if set(choices) != set(pv_choices):
18
51
  raise TypeError(
19
- (
20
- f"{pv} has choices {pv_choices}, "
21
- f"which do not match {datatype}, which has {choices}"
22
- )
52
+ f"{pv} has choices {pv_choices}, "
53
+ f"which do not match {datatype}, which has {choices}."
23
54
  )
24
- return {x: datatype(x) for x in pv_choices}
25
- return {x: x for x in pv_choices}
55
+ return {x: datatype(x) if x else "_" for x in pv_choices}
56
+ elif datatype is None:
57
+ return {x: x or "_" for x in pv_choices}
58
+
59
+ raise TypeError(
60
+ f"{pv} has choices {pv_choices}. "
61
+ "Use an Enum or SubsetEnum to represent this."
62
+ )
@@ -5,6 +5,7 @@ from ophyd_async.core import DEFAULT_TIMEOUT, wait_for_value
5
5
  from ophyd_async.core.async_status import AsyncStatus
6
6
  from ophyd_async.core.detector import DetectorControl, DetectorTrigger
7
7
  from ophyd_async.epics.areadetector.drivers.ad_base import (
8
+ set_exposure_time_and_acquire_period_if_supplied,
8
9
  start_acquiring_driver_and_ensure_status,
9
10
  )
10
11
  from ophyd_async.epics.areadetector.drivers.pilatus_driver import (
@@ -39,7 +40,9 @@ class PilatusController(DetectorControl):
39
40
  exposure: Optional[float] = None,
40
41
  ) -> AsyncStatus:
41
42
  if exposure is not None:
42
- await self._drv.acquire_time.set(exposure)
43
+ await set_exposure_time_and_acquire_period_if_supplied(
44
+ self, self._drv, exposure
45
+ )
43
46
  await asyncio.gather(
44
47
  self._drv.trigger_mode.set(self._get_trigger_mode(trigger)),
45
48
  self._drv.num_images.set(999_999 if num == 0 else num),
@@ -2,6 +2,7 @@ from .ad_base import (
2
2
  ADBase,
3
3
  ADBaseShapeProvider,
4
4
  DetectorState,
5
+ set_exposure_time_and_acquire_period_if_supplied,
5
6
  start_acquiring_driver_and_ensure_status,
6
7
  )
7
8
  from .aravis_driver import AravisDriver
@@ -17,5 +18,6 @@ __all__ = [
17
18
  "KinetixDriver",
18
19
  "VimbaDriver",
19
20
  "start_acquiring_driver_and_ensure_status",
21
+ "set_exposure_time_and_acquire_period_if_supplied",
20
22
  "DetectorState",
21
23
  ]
@@ -5,6 +5,7 @@ from typing import FrozenSet, Sequence, Set
5
5
  from ophyd_async.core import (
6
6
  DEFAULT_TIMEOUT,
7
7
  AsyncStatus,
8
+ DetectorControl,
8
9
  ShapeProvider,
9
10
  set_and_wait_for_value,
10
11
  )
@@ -44,6 +45,7 @@ class ADBase(NDArrayBase):
44
45
  def __init__(self, prefix: str, name: str = "") -> None:
45
46
  # Define some signals
46
47
  self.acquire_time = epics_signal_rw_rbv(float, prefix + "AcquireTime")
48
+ self.acquire_period = epics_signal_rw_rbv(float, prefix + "AcquirePeriod")
47
49
  self.num_images = epics_signal_rw_rbv(int, prefix + "NumImages")
48
50
  self.image_mode = epics_signal_rw_rbv(ImageMode, prefix + "ImageMode")
49
51
  self.detector_state = epics_signal_r(
@@ -52,6 +54,36 @@ class ADBase(NDArrayBase):
52
54
  super().__init__(prefix, name=name)
53
55
 
54
56
 
57
+ async def set_exposure_time_and_acquire_period_if_supplied(
58
+ controller: DetectorControl,
59
+ driver: ADBase,
60
+ exposure: float | None = None,
61
+ timeout: float = DEFAULT_TIMEOUT,
62
+ ) -> None:
63
+ """
64
+ Sets the exposure time if it is not None and the acquire period to the
65
+ exposure time plus the deadtime. This is expected behavior for most
66
+ AreaDetectors, but some may require more specialized handling.
67
+
68
+ Parameters
69
+ ----------
70
+ controller:
71
+ Controller that can supply a deadtime.
72
+ driver:
73
+ The driver to start acquiring. Must subclass ADBase.
74
+ exposure:
75
+ Desired exposure time, this is a noop if it is None.
76
+ timeout:
77
+ How long to wait for the exposure time and acquire period to be set.
78
+ """
79
+ if exposure is not None:
80
+ full_frame_time = exposure + controller.get_deadtime(exposure)
81
+ await asyncio.gather(
82
+ driver.acquire_time.set(exposure, timeout=timeout),
83
+ driver.acquire_period.set(full_frame_time, timeout=timeout),
84
+ )
85
+
86
+
55
87
  async def start_acquiring_driver_and_ensure_status(
56
88
  driver: ADBase,
57
89
  good_states: Set[DetectorState] = set(DEFAULT_GOOD_STATES),
@@ -95,5 +95,3 @@ class Motor(StandardReadable, Movable, Stoppable):
95
95
  # Put with completion will never complete as we are waiting for completion on
96
96
  # the move above, so need to pass wait=False
97
97
  await self.motor_stop.trigger(wait=False)
98
- # Trigger any callbacks
99
- await self.user_readback._backend.put(await self.user_readback.get_value())
@@ -179,7 +179,15 @@ def _mock_common_blocks(device: Device, stripped_type: Optional[Type] = None):
179
179
  sub_device_2 = device_cls(SoftSignalBackend(signal_dtype))
180
180
  sub_device = DeviceVector({1: sub_device_1, 2: sub_device_2})
181
181
  else:
182
- sub_device = DeviceVector({1: device_cls(), 2: device_cls()})
182
+ if hasattr(device, device_name):
183
+ sub_device = getattr(device, device_name)
184
+ else:
185
+ sub_device = DeviceVector(
186
+ {
187
+ 1: device_cls(),
188
+ 2: device_cls(),
189
+ }
190
+ )
183
191
 
184
192
  for sub_device_in_vector in sub_device.values():
185
193
  _mock_common_blocks(sub_device_in_vector, stripped_type=device_cls)
@@ -296,7 +304,9 @@ async def fill_pvi_entries(
296
304
 
297
305
 
298
306
  def create_children_from_annotations(
299
- device: Device, included_optional_fields: Tuple[str, ...] = ()
307
+ device: Device,
308
+ included_optional_fields: Tuple[str, ...] = (),
309
+ device_vectors: Optional[Dict[str, int]] = None,
300
310
  ):
301
311
  """For intializing blocks at __init__ of ``device``."""
302
312
  for name, device_type in get_type_hints(type(device)).items():
@@ -307,12 +317,22 @@ def create_children_from_annotations(
307
317
  continue
308
318
  is_device_vector, device_type = _strip_device_vector(device_type)
309
319
  if (
310
- is_device_vector
320
+ (is_device_vector and (not device_vectors or name not in device_vectors))
311
321
  or ((origin := get_origin(device_type)) and issubclass(origin, Signal))
312
322
  or (isclass(device_type) and issubclass(device_type, Signal))
313
323
  ):
314
324
  continue
315
325
 
316
- sub_device = device_type()
317
- setattr(device, name, sub_device)
318
- create_children_from_annotations(sub_device)
326
+ if is_device_vector:
327
+ n_device_vector = DeviceVector(
328
+ {i: device_type() for i in range(1, device_vectors[name] + 1)}
329
+ )
330
+ setattr(device, name, n_device_vector)
331
+ for sub_device in n_device_vector.values():
332
+ create_children_from_annotations(
333
+ sub_device, device_vectors=device_vectors
334
+ )
335
+ else:
336
+ sub_device = device_type()
337
+ setattr(device, name, sub_device)
338
+ create_children_from_annotations(sub_device, device_vectors=device_vectors)
@@ -3,7 +3,7 @@ from __future__ import annotations
3
3
  from enum import Enum
4
4
 
5
5
  from ophyd_async.core import Device, DeviceVector, SignalR, SignalRW
6
- from ophyd_async.panda._table import SeqTable
6
+ from ophyd_async.panda._table import DatasetTable, SeqTable
7
7
 
8
8
 
9
9
  class DataBlock(Device):
@@ -14,6 +14,7 @@ class DataBlock(Device):
14
14
  num_captured: SignalR[int]
15
15
  capture: SignalRW[bool]
16
16
  flush_period: SignalRW[float]
17
+ datasets: SignalR[DatasetTable]
17
18
 
18
19
 
19
20
  class PulseBlock(Device):
@@ -28,9 +28,7 @@ class HDFPanda(CommonPandaBlocks, StandardDetector):
28
28
  create_children_from_annotations(self)
29
29
  controller = PandaPcapController(pcap=self.pcap)
30
30
  writer = PandaHDFWriter(
31
- prefix=prefix,
32
31
  directory_provider=directory_provider,
33
- name_provider=lambda: name,
34
32
  panda_device=self,
35
33
  )
36
34
  super().__init__(
@@ -6,6 +6,16 @@ import numpy as np
6
6
  import numpy.typing as npt
7
7
 
8
8
 
9
+ class PandaHdf5DatasetType(str, Enum):
10
+ FLOAT_64 = "float64"
11
+ UINT_32 = "uint32"
12
+
13
+
14
+ class DatasetTable(TypedDict):
15
+ name: npt.NDArray[np.str_]
16
+ hdf5_type: Sequence[PandaHdf5DatasetType]
17
+
18
+
9
19
  class SeqTrigger(str, Enum):
10
20
  IMMEDIATE = "Immediate"
11
21
  BITA_0 = "BITA=0"