ophyd-async 0.5.0__py3-none-any.whl → 0.5.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (36) hide show
  1. ophyd_async/_version.py +2 -2
  2. ophyd_async/core/__init__.py +2 -0
  3. ophyd_async/core/_detector.py +38 -28
  4. ophyd_async/core/_hdf_dataset.py +1 -5
  5. ophyd_async/core/_mock_signal_utils.py +4 -3
  6. ophyd_async/core/_providers.py +24 -37
  7. ophyd_async/core/_signal.py +73 -28
  8. ophyd_async/epics/adcore/__init__.py +14 -3
  9. ophyd_async/epics/adcore/_core_io.py +29 -5
  10. ophyd_async/epics/adcore/_hdf_writer.py +45 -21
  11. ophyd_async/epics/adcore/_utils.py +69 -70
  12. ophyd_async/epics/adkinetix/_kinetix_io.py +4 -1
  13. ophyd_async/epics/adpilatus/_pilatus_controller.py +1 -1
  14. ophyd_async/epics/adpilatus/_pilatus_io.py +1 -1
  15. ophyd_async/epics/advimba/_vimba_controller.py +3 -3
  16. ophyd_async/epics/advimba/_vimba_io.py +6 -4
  17. ophyd_async/epics/motor.py +15 -2
  18. ophyd_async/epics/signal/_aioca.py +12 -5
  19. ophyd_async/epics/signal/_common.py +1 -1
  20. ophyd_async/epics/signal/_p4p.py +14 -11
  21. ophyd_async/fastcs/panda/__init__.py +3 -3
  22. ophyd_async/fastcs/panda/{_common_blocks.py → _block.py} +2 -0
  23. ophyd_async/fastcs/panda/{_panda_controller.py → _control.py} +1 -1
  24. ophyd_async/fastcs/panda/_hdf_panda.py +4 -4
  25. ophyd_async/fastcs/panda/_trigger.py +1 -1
  26. ophyd_async/fastcs/panda/{_hdf_writer.py → _writer.py} +29 -22
  27. ophyd_async/plan_stubs/__init__.py +3 -0
  28. ophyd_async/plan_stubs/_nd_attributes.py +63 -0
  29. ophyd_async/sim/demo/_pattern_detector/_pattern_detector_controller.py +5 -2
  30. ophyd_async/sim/demo/_pattern_detector/_pattern_generator.py +1 -3
  31. {ophyd_async-0.5.0.dist-info → ophyd_async-0.5.1.dist-info}/METADATA +46 -44
  32. {ophyd_async-0.5.0.dist-info → ophyd_async-0.5.1.dist-info}/RECORD +36 -35
  33. {ophyd_async-0.5.0.dist-info → ophyd_async-0.5.1.dist-info}/WHEEL +1 -1
  34. {ophyd_async-0.5.0.dist-info → ophyd_async-0.5.1.dist-info}/LICENSE +0 -0
  35. {ophyd_async-0.5.0.dist-info → ophyd_async-0.5.1.dist-info}/entry_points.txt +0 -0
  36. {ophyd_async-0.5.0.dist-info → ophyd_async-0.5.1.dist-info}/top_level.txt +0 -0
@@ -1,6 +1,7 @@
1
1
  import asyncio
2
2
  from pathlib import Path
3
3
  from typing import AsyncGenerator, AsyncIterator, Dict, List, Optional
4
+ from xml.etree import ElementTree as ET
4
5
 
5
6
  from bluesky.protocols import DataKey, Hints, StreamAsset
6
7
 
@@ -18,8 +19,13 @@ from ophyd_async.core import (
18
19
  wait_for_value,
19
20
  )
20
21
 
21
- from ._core_io import NDFileHDFIO
22
- from ._utils import FileWriteMode, convert_ad_dtype_to_np
22
+ from ._core_io import NDArrayBaseIO, NDFileHDFIO
23
+ from ._utils import (
24
+ FileWriteMode,
25
+ convert_ad_dtype_to_np,
26
+ convert_param_dtype_to_np,
27
+ convert_pv_dtype_to_np,
28
+ )
23
29
 
24
30
 
25
31
  class ADHDFWriter(DetectorWriter):
@@ -29,13 +35,14 @@ class ADHDFWriter(DetectorWriter):
29
35
  path_provider: PathProvider,
30
36
  name_provider: NameProvider,
31
37
  shape_provider: ShapeProvider,
32
- **scalar_datasets_paths: str,
38
+ *plugins: NDArrayBaseIO,
33
39
  ) -> None:
34
40
  self.hdf = hdf
35
41
  self._path_provider = path_provider
36
42
  self._name_provider = name_provider
37
43
  self._shape_provider = shape_provider
38
- self._scalar_datasets_paths = scalar_datasets_paths
44
+
45
+ self._plugins = plugins
39
46
  self._capture_status: Optional[AsyncStatus] = None
40
47
  self._datasets: List[HDFDataset] = []
41
48
  self._file: Optional[HDFFile] = None
@@ -44,16 +51,19 @@ class ADHDFWriter(DetectorWriter):
44
51
  async def open(self, multiplier: int = 1) -> Dict[str, DataKey]:
45
52
  self._file = None
46
53
  info = self._path_provider(device_name=self.hdf.name)
47
- file_path = str(info.root / info.resource_dir)
54
+
55
+ # Set the directory creation depth first, since dir creation callback happens
56
+ # when directory path PV is processed.
57
+ await self.hdf.create_directory.set(info.create_dir_depth)
58
+
48
59
  await asyncio.gather(
49
60
  self.hdf.num_extra_dims.set(0),
50
61
  self.hdf.lazy_open.set(True),
51
62
  self.hdf.swmr_mode.set(True),
52
63
  # See https://github.com/bluesky/ophyd-async/issues/122
53
- self.hdf.file_path.set(file_path),
64
+ self.hdf.file_path.set(str(info.directory_path)),
54
65
  self.hdf.file_name.set(info.filename),
55
66
  self.hdf.file_template.set("%s/%s.h5"),
56
- self.hdf.create_dir_depth.set(info.create_dir_depth),
57
67
  self.hdf.file_write_mode.set(FileWriteMode.stream),
58
68
  # Never use custom xml layout file but use the one defined
59
69
  # in the source code file NDFileHDF5LayoutXML.cpp
@@ -62,7 +72,7 @@ class ADHDFWriter(DetectorWriter):
62
72
 
63
73
  assert (
64
74
  await self.hdf.file_path_exists.get_value()
65
- ), f"File path {file_path} for hdf plugin does not exist"
75
+ ), f"File path {info.directory_path} for hdf plugin does not exist"
66
76
 
67
77
  # Overwrite num_capture to go forever
68
78
  await self.hdf.num_capture.set(0)
@@ -90,16 +100,31 @@ class ADHDFWriter(DetectorWriter):
90
100
  )
91
101
  ]
92
102
  # And all the scalar datasets
93
- for ds_name, ds_path in self._scalar_datasets_paths.items():
94
- self._datasets.append(
95
- HDFDataset(
96
- f"{name}-{ds_name}",
97
- f"/entry/instrument/NDAttributes/{ds_path}",
98
- (),
99
- "",
100
- multiplier,
101
- )
102
- )
103
+ for plugin in self._plugins:
104
+ maybe_xml = await plugin.nd_attributes_file.get_value()
105
+ # This is the check that ADCore does to see if it is an XML string
106
+ # rather than a filename to parse
107
+ if "<Attributes>" in maybe_xml:
108
+ root = ET.fromstring(maybe_xml)
109
+ for child in root:
110
+ datakey = child.attrib["name"]
111
+ if child.attrib.get("type", "EPICS_PV") == "EPICS_PV":
112
+ np_datatype = convert_pv_dtype_to_np(
113
+ child.attrib.get("dbrtype", "DBR_NATIVE")
114
+ )
115
+ else:
116
+ np_datatype = convert_param_dtype_to_np(
117
+ child.attrib.get("datatype", "INT")
118
+ )
119
+ self._datasets.append(
120
+ HDFDataset(
121
+ datakey,
122
+ f"/entry/instrument/NDAttributes/{datakey}",
123
+ (),
124
+ np_datatype,
125
+ multiplier,
126
+ )
127
+ )
103
128
 
104
129
  describe = {
105
130
  ds.data_key: DataKey(
@@ -133,7 +158,6 @@ class ADHDFWriter(DetectorWriter):
133
158
  if not self._file:
134
159
  path = Path(await self.hdf.full_file_name.get_value())
135
160
  self._file = HDFFile(
136
- self._path_provider(),
137
161
  # See https://github.com/bluesky/ophyd-async/issues/122
138
162
  path,
139
163
  self._datasets,
@@ -149,8 +173,8 @@ class ADHDFWriter(DetectorWriter):
149
173
 
150
174
  async def close(self):
151
175
  # Already done a caput callback in _capture_status, so can't do one here
152
- await self.hdf.capture.set(0, wait=False)
153
- await wait_for_value(self.hdf.capture, 0, DEFAULT_TIMEOUT)
176
+ await self.hdf.capture.set(False, wait=False)
177
+ await wait_for_value(self.hdf.capture, False, DEFAULT_TIMEOUT)
154
178
  if self._capture_status:
155
179
  # We kicked off an open, so wait for it to return
156
180
  await self._capture_status
@@ -1,8 +1,9 @@
1
+ from dataclasses import dataclass
1
2
  from enum import Enum
2
3
  from typing import Optional
3
- from xml.etree import cElementTree as ET
4
4
 
5
5
  from ophyd_async.core import DEFAULT_TIMEOUT, SignalRW, T, wait_for_value
6
+ from ophyd_async.core._signal import SignalR
6
7
 
7
8
 
8
9
  class ADBaseDataType(str, Enum):
@@ -34,6 +35,45 @@ def convert_ad_dtype_to_np(ad_dtype: ADBaseDataType) -> str:
34
35
  return ad_dtype_to_np_dtype[ad_dtype]
35
36
 
36
37
 
38
+ def convert_pv_dtype_to_np(datatype: str) -> str:
39
+ _pvattribute_to_ad_datatype = {
40
+ "DBR_SHORT": ADBaseDataType.Int16,
41
+ "DBR_ENUM": ADBaseDataType.Int16,
42
+ "DBR_INT": ADBaseDataType.Int32,
43
+ "DBR_LONG": ADBaseDataType.Int32,
44
+ "DBR_FLOAT": ADBaseDataType.Float32,
45
+ "DBR_DOUBLE": ADBaseDataType.Float64,
46
+ }
47
+ if datatype in ["DBR_STRING", "DBR_CHAR"]:
48
+ np_datatype = "s40"
49
+ elif datatype == "DBR_NATIVE":
50
+ raise ValueError("Don't support DBR_NATIVE yet")
51
+ else:
52
+ try:
53
+ np_datatype = convert_ad_dtype_to_np(_pvattribute_to_ad_datatype[datatype])
54
+ except KeyError:
55
+ raise ValueError(f"Invalid dbr type {datatype}")
56
+ return np_datatype
57
+
58
+
59
+ def convert_param_dtype_to_np(datatype: str) -> str:
60
+ _paramattribute_to_ad_datatype = {
61
+ "INT": ADBaseDataType.Int32,
62
+ "INT64": ADBaseDataType.Int64,
63
+ "DOUBLE": ADBaseDataType.Float64,
64
+ }
65
+ if datatype in ["STRING"]:
66
+ np_datatype = "s40"
67
+ else:
68
+ try:
69
+ np_datatype = convert_ad_dtype_to_np(
70
+ _paramattribute_to_ad_datatype[datatype]
71
+ )
72
+ except KeyError:
73
+ raise ValueError(f"Invalid datatype {datatype}")
74
+ return np_datatype
75
+
76
+
37
77
  class FileWriteMode(str, Enum):
38
78
  single = "Single"
39
79
  capture = "Capture"
@@ -52,75 +92,34 @@ class NDAttributeDataType(str, Enum):
52
92
  STRING = "STRING"
53
93
 
54
94
 
55
- class NDAttributesXML:
56
- """Helper to make NDAttributesFile XML for areaDetector"""
57
-
58
- _dbr_types = {
59
- None: "DBR_NATIVE",
60
- NDAttributeDataType.INT: "DBR_LONG",
61
- NDAttributeDataType.DOUBLE: "DBR_DOUBLE",
62
- NDAttributeDataType.STRING: "DBR_STRING",
63
- }
64
-
65
- def __init__(self):
66
- self._root = ET.Element("Attributes")
67
-
68
- def add_epics_pv(
69
- self,
70
- name: str,
71
- pv: str,
72
- datatype: Optional[NDAttributeDataType] = None,
73
- description: str = "",
74
- ):
75
- """Add a PV to the attribute list
76
-
77
- Args:
78
- name: The attribute name
79
- pv: The pv to get from
80
- datatype: An override datatype, otherwise will use native EPICS type
81
- description: A description that appears in the HDF file as an attribute
82
- """
83
- ET.SubElement(
84
- self._root,
85
- "Attribute",
86
- name=name,
87
- type="EPICS_PV",
88
- source=pv,
89
- datatype=self._dbr_types[datatype],
90
- description=description,
91
- )
92
-
93
- def add_param(
94
- self,
95
- name: str,
96
- param: str,
97
- datatype: NDAttributeDataType,
98
- addr: int = 0,
99
- description: str = "",
100
- ):
101
- """Add a driver or plugin parameter to the attribute list
102
-
103
- Args:
104
- name: The attribute name
105
- param: The parameter string as seen in the INP link of the record
106
- datatype: The datatype of the parameter
107
- description: A description that appears in the HDF file as an attribute
108
- """
109
- ET.SubElement(
110
- self._root,
111
- "Attribute",
112
- name=name,
113
- type="PARAM",
114
- source=param,
115
- addr=str(addr),
116
- datatype=datatype.value,
117
- description=description,
118
- )
119
-
120
- def __str__(self) -> str:
121
- """Output the XML pretty printed"""
122
- ET.indent(self._root, space=" ", level=0)
123
- return ET.tostring(self._root, xml_declaration=True, encoding="utf-8").decode()
95
+ class NDAttributePvDbrType(str, Enum):
96
+ DBR_SHORT = "DBR_SHORT"
97
+ DBR_ENUM = "DBR_ENUM"
98
+ DBR_INT = "DBR_INT"
99
+ DBR_LONG = "DBR_LONG"
100
+ DBR_FLOAT = "DBR_FLOAT"
101
+ DBR_DOUBLE = "DBR_DOUBLE"
102
+ DBR_STRING = "DBR_STRING"
103
+ DBR_CHAR = "DBR_CHAR"
104
+
105
+
106
+ @dataclass
107
+ class NDAttributePv:
108
+ name: str # name of attribute stamped on array, also scientifically useful name
109
+ # when appended to device.name
110
+ signal: SignalR # caget the pv given by signal.source and attach to each frame
111
+ dbrtype: NDAttributePvDbrType
112
+ description: str = "" # A description that appears in the HDF file as an attribute
113
+
114
+
115
+ @dataclass
116
+ class NDAttributeParam:
117
+ name: str # name of attribute stamped on array, also scientifically useful name
118
+ # when appended to device.name
119
+ param: str # The parameter string as seen in the INP link of the record
120
+ datatype: NDAttributeDataType # The datatype of the parameter
121
+ addr: int = 0 # The address as seen in the INP link of the record
122
+ description: str = "" # A description that appears in the HDF file as an attribute
124
123
 
125
124
 
126
125
  async def stop_busy_record(
@@ -14,6 +14,7 @@ class KinetixReadoutMode(str, Enum):
14
14
  sensitivity = 1
15
15
  speed = 2
16
16
  dynamic_range = 3
17
+ sub_electron = 4
17
18
 
18
19
 
19
20
  class KinetixDriverIO(adcore.ADBaseIO):
@@ -24,5 +25,7 @@ class KinetixDriverIO(adcore.ADBaseIO):
24
25
  self.trigger_mode = epics_signal_rw_rbv(
25
26
  KinetixTriggerMode, prefix + "TriggerMode"
26
27
  )
27
- self.mode = epics_signal_rw_rbv(KinetixReadoutMode, prefix + "ReadoutPortIdx")
28
+ self.readout_port_idx = epics_signal_rw_rbv(
29
+ KinetixReadoutMode, prefix + "ReadoutPortIdx"
30
+ )
28
31
  super().__init__(prefix, name)
@@ -54,7 +54,7 @@ class PilatusController(DetectorControl):
54
54
  # is actually ready. Should wait for that too or we risk dropping
55
55
  # a frame
56
56
  await wait_for_value(
57
- self._drv.armed_for_triggers,
57
+ self._drv.armed,
58
58
  True,
59
59
  timeout=DEFAULT_TIMEOUT,
60
60
  )
@@ -19,5 +19,5 @@ class PilatusDriverIO(adcore.ADBaseIO):
19
19
  self.trigger_mode = epics_signal_rw_rbv(
20
20
  PilatusTriggerMode, prefix + "TriggerMode"
21
21
  )
22
- self.armed_for_triggers = epics_signal_r(bool, prefix + "Armed")
22
+ self.armed = epics_signal_r(bool, prefix + "Armed")
23
23
  super().__init__(prefix, name)
@@ -39,7 +39,7 @@ class VimbaController(DetectorControl):
39
39
  ) -> AsyncStatus:
40
40
  await asyncio.gather(
41
41
  self._drv.trigger_mode.set(TRIGGER_MODE[trigger]),
42
- self._drv.expose_mode.set(EXPOSE_OUT_MODE[trigger]),
42
+ self._drv.exposure_mode.set(EXPOSE_OUT_MODE[trigger]),
43
43
  self._drv.num_images.set(num),
44
44
  self._drv.image_mode.set(adcore.ImageMode.multiple),
45
45
  )
@@ -49,9 +49,9 @@ class VimbaController(DetectorControl):
49
49
  ]:
50
50
  await self._drv.acquire_time.set(exposure)
51
51
  if trigger != DetectorTrigger.internal:
52
- self._drv.trig_source.set(VimbaTriggerSource.line1)
52
+ self._drv.trigger_source.set(VimbaTriggerSource.line1)
53
53
  else:
54
- self._drv.trig_source.set(VimbaTriggerSource.freerun)
54
+ self._drv.trigger_source.set(VimbaTriggerSource.freerun)
55
55
  return await adcore.start_acquiring_driver_and_ensure_status(self._drv)
56
56
 
57
57
  async def disarm(self):
@@ -50,15 +50,17 @@ class VimbaDriverIO(adcore.ADBaseIO):
50
50
 
51
51
  def __init__(self, prefix: str, name: str = "") -> None:
52
52
  # self.pixel_format = epics_signal_rw_rbv(PixelFormat, prefix + "PixelFormat")
53
- self.convert_format = epics_signal_rw_rbv(
53
+ self.convert_pixel_format = epics_signal_rw_rbv(
54
54
  VimbaConvertFormat, prefix + "ConvertPixelFormat"
55
55
  ) # Pixel format of data outputted to AD
56
- self.trig_source = epics_signal_rw_rbv(
56
+ self.trigger_source = epics_signal_rw_rbv(
57
57
  VimbaTriggerSource, prefix + "TriggerSource"
58
58
  )
59
59
  self.trigger_mode = epics_signal_rw_rbv(VimbaOnOff, prefix + "TriggerMode")
60
- self.overlap = epics_signal_rw_rbv(VimbaOverlap, prefix + "TriggerOverlap")
61
- self.expose_mode = epics_signal_rw_rbv(
60
+ self.trigger_overlap = epics_signal_rw_rbv(
61
+ VimbaOverlap, prefix + "TriggerOverlap"
62
+ )
63
+ self.exposure_mode = epics_signal_rw_rbv(
62
64
  VimbaExposeOutMode, prefix + "ExposureMode"
63
65
  )
64
66
  super().__init__(prefix, name)
@@ -1,7 +1,13 @@
1
1
  import asyncio
2
2
  from typing import Optional
3
3
 
4
- from bluesky.protocols import Flyable, Movable, Preparable, Stoppable
4
+ from bluesky.protocols import (
5
+ Flyable,
6
+ Locatable,
7
+ Location,
8
+ Preparable,
9
+ Stoppable,
10
+ )
5
11
  from pydantic import BaseModel, Field
6
12
 
7
13
  from ophyd_async.core import (
@@ -51,7 +57,7 @@ class FlyMotorInfo(BaseModel):
51
57
  timeout: CalculatableTimeout = Field(frozen=True, default=CalculateTimeout)
52
58
 
53
59
 
54
- class Motor(StandardReadable, Movable, Stoppable, Flyable, Preparable):
60
+ class Motor(StandardReadable, Locatable, Stoppable, Flyable, Preparable):
55
61
  """Device that moves a motor record"""
56
62
 
57
63
  def __init__(self, prefix: str, name="") -> None:
@@ -193,6 +199,13 @@ class Motor(StandardReadable, Movable, Stoppable, Flyable, Preparable):
193
199
  await self.velocity.set(abs(max_speed))
194
200
  return fly_velocity
195
201
 
202
+ async def locate(self) -> Location[float]:
203
+ location: Location = {
204
+ "setpoint": await self.user_setpoint.get_value(),
205
+ "readback": await self.user_readback.get_value(),
206
+ }
207
+ return location
208
+
196
209
  async def _prepare_motor_path(
197
210
  self, fly_velocity: float, start_position: float, end_position: float
198
211
  ) -> float:
@@ -47,7 +47,7 @@ def _data_key_from_augmented_value(
47
47
  value: AugmentedValue,
48
48
  *,
49
49
  choices: Optional[List[str]] = None,
50
- dtype: Optional[str] = None,
50
+ dtype: Optional[Dtype] = None,
51
51
  ) -> DataKey:
52
52
  """Use the return value of get with FORMAT_CTRL to construct a DataKey
53
53
  describing the signal. See docstring of AugmentedValue for expected
@@ -175,7 +175,7 @@ class CaBoolConverter(CaConverter):
175
175
  return bool(value)
176
176
 
177
177
  def get_datakey(self, value: AugmentedValue) -> DataKey:
178
- return _data_key_from_augmented_value(value, dtype="bool")
178
+ return _data_key_from_augmented_value(value, dtype="boolean")
179
179
 
180
180
 
181
181
  class DisconnectedCaConverter(CaConverter):
@@ -229,10 +229,17 @@ def make_converter(
229
229
  value = list(values.values())[0]
230
230
  # Done the dbr check, so enough to check one of the values
231
231
  if datatype and not isinstance(value, datatype):
232
- raise TypeError(
233
- f"{pv} has type {type(value).__name__.replace('ca_', '')} "
234
- + f"not {datatype.__name__}"
232
+ # Allow int signals to represent float records when prec is 0
233
+ is_prec_zero_float = (
234
+ isinstance(value, float)
235
+ and get_unique({k: v.precision for k, v in values.items()}, "precision")
236
+ == 0
235
237
  )
238
+ if not (datatype is int and is_prec_zero_float):
239
+ raise TypeError(
240
+ f"{pv} has type {type(value).__name__.replace('ca_', '')} "
241
+ + f"not {datatype.__name__}"
242
+ )
236
243
  return CaConverter(pv_dbr, None)
237
244
 
238
245
 
@@ -55,7 +55,7 @@ def get_supported_values(
55
55
  f"which do not match {datatype}, which has {choices}."
56
56
  )
57
57
  return {x: datatype(x) if x else "_" for x in pv_choices}
58
- elif datatype is None:
58
+ elif datatype is None or datatype is str:
59
59
  return {x: x or "_" for x in pv_choices}
60
60
 
61
61
  raise TypeError(
@@ -64,7 +64,7 @@ def _data_key_from_value(
64
64
  *,
65
65
  shape: Optional[list[int]] = None,
66
66
  choices: Optional[list[str]] = None,
67
- dtype: Optional[str] = None,
67
+ dtype: Optional[Dtype] = None,
68
68
  ) -> DataKey:
69
69
  """
70
70
  Args:
@@ -85,7 +85,7 @@ def _data_key_from_value(
85
85
  if isinstance(type_code, tuple):
86
86
  dtype_numpy = ""
87
87
  if type_code[1] == "enum_t":
88
- if dtype == "bool":
88
+ if dtype == "boolean":
89
89
  dtype_numpy = "<i2"
90
90
  else:
91
91
  for item in type_code[2]:
@@ -241,7 +241,7 @@ class PvaEmumBoolConverter(PvaConverter):
241
241
  return bool(value["value"]["index"])
242
242
 
243
243
  def get_datakey(self, source: str, value) -> DataKey:
244
- return _data_key_from_value(source, value, dtype="bool")
244
+ return _data_key_from_value(source, value, dtype="boolean")
245
245
 
246
246
 
247
247
  class PvaTableConverter(PvaConverter):
@@ -335,14 +335,17 @@ def make_converter(datatype: Optional[Type], values: Dict[str, Any]) -> PvaConve
335
335
  return PvaEnumConverter(
336
336
  get_supported_values(pv, datatype, datatype.choices)
337
337
  )
338
- elif (
339
- datatype
340
- and not issubclass(typ, datatype)
341
- and not (
342
- typ is float and datatype is int
343
- ) # Allow float -> int since prec can be 0
344
- ):
345
- raise TypeError(f"{pv} has type {typ.__name__} not {datatype.__name__}")
338
+ elif datatype and not issubclass(typ, datatype):
339
+ # Allow int signals to represent float records when prec is 0
340
+ is_prec_zero_float = typ is float and (
341
+ get_unique(
342
+ {k: v["display"]["precision"] for k, v in values.items()},
343
+ "precision",
344
+ )
345
+ == 0
346
+ )
347
+ if not (datatype is int and is_prec_zero_float):
348
+ raise TypeError(f"{pv} has type {typ.__name__} not {datatype.__name__}")
346
349
  return PvaConverter()
347
350
  elif "NTTable" in typeid:
348
351
  return PvaTableConverter()
@@ -1,4 +1,4 @@
1
- from ._common_blocks import (
1
+ from ._block import (
2
2
  CommonPandaBlocks,
3
3
  DataBlock,
4
4
  EnableDisableOptions,
@@ -9,9 +9,8 @@ from ._common_blocks import (
9
9
  SeqBlock,
10
10
  TimeUnits,
11
11
  )
12
+ from ._control import PandaPcapController
12
13
  from ._hdf_panda import HDFPanda
13
- from ._hdf_writer import PandaHDFWriter
14
- from ._panda_controller import PandaPcapController
15
14
  from ._table import (
16
15
  DatasetTable,
17
16
  PandaHdf5DatasetType,
@@ -28,6 +27,7 @@ from ._trigger import (
28
27
  StaticSeqTableTriggerLogic,
29
28
  )
30
29
  from ._utils import phase_sorter
30
+ from ._writer import PandaHDFWriter
31
31
 
32
32
  __all__ = [
33
33
  "CommonPandaBlocks",
@@ -13,6 +13,8 @@ class DataBlock(Device):
13
13
  hdf_file_name: SignalRW[str]
14
14
  num_capture: SignalRW[int]
15
15
  num_captured: SignalR[int]
16
+ create_directory: SignalRW[int]
17
+ directory_exists: SignalR[bool]
16
18
  capture: SignalRW[bool]
17
19
  flush_period: SignalRW[float]
18
20
  datasets: SignalR[DatasetTable]
@@ -8,7 +8,7 @@ from ophyd_async.core import (
8
8
  wait_for_value,
9
9
  )
10
10
 
11
- from ._common_blocks import PcapBlock
11
+ from ._block import PcapBlock
12
12
 
13
13
 
14
14
  class PandaPcapController(DetectorControl):
@@ -5,9 +5,9 @@ from typing import Sequence
5
5
  from ophyd_async.core import DEFAULT_TIMEOUT, PathProvider, SignalR, StandardDetector
6
6
  from ophyd_async.epics.pvi import create_children_from_annotations, fill_pvi_entries
7
7
 
8
- from ._common_blocks import CommonPandaBlocks
9
- from ._hdf_writer import PandaHDFWriter
10
- from ._panda_controller import PandaPcapController
8
+ from ._block import CommonPandaBlocks
9
+ from ._control import PandaPcapController
10
+ from ._writer import PandaHDFWriter
11
11
 
12
12
 
13
13
  class HDFPanda(CommonPandaBlocks, StandardDetector):
@@ -26,7 +26,7 @@ class HDFPanda(CommonPandaBlocks, StandardDetector):
26
26
  prefix=prefix,
27
27
  path_provider=path_provider,
28
28
  name_provider=lambda: name,
29
- panda_device=self,
29
+ panda_data_block=self.data,
30
30
  )
31
31
  super().__init__(
32
32
  controller=controller,
@@ -5,7 +5,7 @@ from pydantic import BaseModel, Field
5
5
 
6
6
  from ophyd_async.core import TriggerLogic, wait_for_value
7
7
 
8
- from ._common_blocks import PcompBlock, PcompDirectionOptions, SeqBlock, TimeUnits
8
+ from ._block import PcompBlock, PcompDirectionOptions, SeqBlock, TimeUnits
9
9
  from ._table import SeqTable
10
10
 
11
11