ophyd-async 0.10.0a2__py3-none-any.whl → 0.10.0a3__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. ophyd_async/_version.py +2 -2
  2. ophyd_async/core/__init__.py +0 -2
  3. ophyd_async/core/_derived_signal.py +5 -5
  4. ophyd_async/core/_derived_signal_backend.py +4 -4
  5. ophyd_async/core/_detector.py +69 -55
  6. ophyd_async/core/_device.py +3 -3
  7. ophyd_async/core/_hdf_dataset.py +1 -5
  8. ophyd_async/core/_providers.py +0 -8
  9. ophyd_async/core/_readable.py +13 -1
  10. ophyd_async/epics/adandor/_andor_controller.py +1 -1
  11. ophyd_async/epics/adaravis/_aravis_controller.py +2 -2
  12. ophyd_async/epics/adcore/_core_logic.py +3 -3
  13. ophyd_async/epics/adcore/_core_writer.py +22 -29
  14. ophyd_async/epics/adcore/_hdf_writer.py +17 -15
  15. ophyd_async/epics/adcore/_jpeg_writer.py +1 -3
  16. ophyd_async/epics/adcore/_tiff_writer.py +1 -3
  17. ophyd_async/epics/adkinetix/_kinetix_controller.py +1 -1
  18. ophyd_async/epics/adpilatus/_pilatus_controller.py +2 -2
  19. ophyd_async/epics/adpilatus/_pilatus_io.py +1 -1
  20. ophyd_async/epics/advimba/_vimba_controller.py +1 -1
  21. ophyd_async/epics/core/_p4p.py +1 -1
  22. ophyd_async/epics/core/_pvi_connector.py +5 -3
  23. ophyd_async/epics/eiger/__init__.py +2 -4
  24. ophyd_async/epics/eiger/_odin_io.py +57 -34
  25. ophyd_async/epics/testing/_example_ioc.py +1 -0
  26. ophyd_async/epics/testing/test_records.db +5 -0
  27. ophyd_async/fastcs/eiger/__init__.py +13 -0
  28. ophyd_async/{epics → fastcs}/eiger/_eiger.py +5 -5
  29. ophyd_async/{epics → fastcs}/eiger/_eiger_controller.py +12 -26
  30. ophyd_async/fastcs/eiger/_eiger_io.py +53 -0
  31. ophyd_async/fastcs/panda/_block.py +2 -0
  32. ophyd_async/fastcs/panda/_hdf_panda.py +0 -1
  33. ophyd_async/fastcs/panda/_writer.py +23 -22
  34. ophyd_async/plan_stubs/_fly.py +2 -2
  35. ophyd_async/sim/_blob_detector.py +0 -1
  36. ophyd_async/sim/_blob_detector_controller.py +1 -1
  37. ophyd_async/sim/_blob_detector_writer.py +15 -19
  38. ophyd_async/sim/_pattern_generator.py +2 -0
  39. {ophyd_async-0.10.0a2.dist-info → ophyd_async-0.10.0a3.dist-info}/METADATA +1 -1
  40. {ophyd_async-0.10.0a2.dist-info → ophyd_async-0.10.0a3.dist-info}/RECORD +43 -42
  41. {ophyd_async-0.10.0a2.dist-info → ophyd_async-0.10.0a3.dist-info}/WHEEL +1 -1
  42. ophyd_async/epics/eiger/_eiger_io.py +0 -42
  43. {ophyd_async-0.10.0a2.dist-info → ophyd_async-0.10.0a3.dist-info}/licenses/LICENSE +0 -0
  44. {ophyd_async-0.10.0a2.dist-info → ophyd_async-0.10.0a3.dist-info}/top_level.txt +0 -0
@@ -5,12 +5,12 @@ from xml.etree import ElementTree as ET
5
5
 
6
6
  from bluesky.protocols import StreamAsset
7
7
  from event_model import DataKey
8
+ from pydantic import PositiveInt
8
9
 
9
10
  from ophyd_async.core import (
10
11
  DatasetDescriber,
11
12
  HDFDatasetDescription,
12
13
  HDFDocumentComposer,
13
- NameProvider,
14
14
  PathProvider,
15
15
  )
16
16
 
@@ -31,14 +31,12 @@ class ADHDFWriter(ADWriter[NDFileHDFIO]):
31
31
  self,
32
32
  fileio: NDFileHDFIO,
33
33
  path_provider: PathProvider,
34
- name_provider: NameProvider,
35
34
  dataset_describer: DatasetDescriber,
36
35
  plugins: dict[str, NDPluginBaseIO] | None = None,
37
36
  ) -> None:
38
37
  super().__init__(
39
38
  fileio,
40
39
  path_provider,
41
- name_provider,
42
40
  dataset_describer,
43
41
  plugins=plugins,
44
42
  file_extension=".h5",
@@ -48,7 +46,9 @@ class ADHDFWriter(ADWriter[NDFileHDFIO]):
48
46
  self._composer: HDFDocumentComposer | None = None
49
47
  self._filename_template = "%s%s"
50
48
 
51
- async def open(self, multiplier: int = 1) -> dict[str, DataKey]:
49
+ async def open(
50
+ self, name: str, exposures_per_event: PositiveInt = 1
51
+ ) -> dict[str, DataKey]:
52
52
  self._composer = None
53
53
 
54
54
  # Setting HDF writer specific signals
@@ -64,13 +64,13 @@ class ADHDFWriter(ADWriter[NDFileHDFIO]):
64
64
  )
65
65
 
66
66
  # Set common AD file plugin params, begin capturing
67
- await self.begin_capture()
67
+ await self.begin_capture(name)
68
68
 
69
- name = self._name_provider()
70
69
  detector_shape = await self._dataset_describer.shape()
71
70
  np_dtype = await self._dataset_describer.np_datatype()
72
- self._multiplier = multiplier
73
- outer_shape = (multiplier,) if multiplier > 1 else ()
71
+
72
+ # Used by the base class
73
+ self._exposures_per_event = exposures_per_event
74
74
 
75
75
  # Determine number of frames that will be saved per HDF chunk
76
76
  frames_per_chunk = await self.fileio.num_frames_chunks.get_value()
@@ -80,9 +80,8 @@ class ADHDFWriter(ADWriter[NDFileHDFIO]):
80
80
  HDFDatasetDescription(
81
81
  data_key=name,
82
82
  dataset="/entry/data/data",
83
- shape=detector_shape,
83
+ shape=(exposures_per_event, *detector_shape),
84
84
  dtype_numpy=np_dtype,
85
- multiplier=multiplier,
86
85
  chunk_shape=(frames_per_chunk, *detector_shape),
87
86
  )
88
87
  ]
@@ -107,20 +106,23 @@ class ADHDFWriter(ADWriter[NDFileHDFIO]):
107
106
  HDFDatasetDescription(
108
107
  data_key=data_key,
109
108
  dataset=f"/entry/instrument/NDAttributes/{data_key}",
110
- shape=(),
109
+ shape=(exposures_per_event,)
110
+ if exposures_per_event > 1
111
+ else (),
111
112
  dtype_numpy=np_datatype,
112
113
  # NDAttributes appear to always be configured with
113
114
  # this chunk size
114
115
  chunk_shape=(16384,),
115
- multiplier=multiplier,
116
116
  )
117
117
  )
118
118
 
119
119
  describe = {
120
120
  ds.data_key: DataKey(
121
121
  source=self.fileio.full_file_name.source,
122
- shape=list(outer_shape + tuple(ds.shape)),
123
- dtype="array" if ds.shape else "number",
122
+ shape=list(ds.shape),
123
+ dtype="array"
124
+ if exposures_per_event > 1 or len(ds.shape) > 1
125
+ else "number",
124
126
  dtype_numpy=ds.dtype_numpy,
125
127
  external="STREAM:",
126
128
  )
@@ -129,7 +131,7 @@ class ADHDFWriter(ADWriter[NDFileHDFIO]):
129
131
  return describe
130
132
 
131
133
  async def collect_stream_docs(
132
- self, indices_written: int
134
+ self, name: str, indices_written: int
133
135
  ) -> AsyncIterator[StreamAsset]:
134
136
  # TODO: fail if we get dropped frames
135
137
  await self.fileio.flush_now.set(True)
@@ -1,4 +1,4 @@
1
- from ophyd_async.core import DatasetDescriber, NameProvider, PathProvider
1
+ from ophyd_async.core import DatasetDescriber, PathProvider
2
2
 
3
3
  from ._core_io import NDFileIO, NDPluginBaseIO
4
4
  from ._core_writer import ADWriter
@@ -11,14 +11,12 @@ class ADJPEGWriter(ADWriter[NDFileIO]):
11
11
  self,
12
12
  fileio: NDFileIO,
13
13
  path_provider: PathProvider,
14
- name_provider: NameProvider,
15
14
  dataset_describer: DatasetDescriber,
16
15
  plugins: dict[str, NDPluginBaseIO] | None = None,
17
16
  ) -> None:
18
17
  super().__init__(
19
18
  fileio,
20
19
  path_provider,
21
- name_provider,
22
20
  dataset_describer,
23
21
  plugins=plugins,
24
22
  file_extension=".jpg",
@@ -1,4 +1,4 @@
1
- from ophyd_async.core import DatasetDescriber, NameProvider, PathProvider
1
+ from ophyd_async.core import DatasetDescriber, PathProvider
2
2
 
3
3
  from ._core_io import NDFileIO, NDPluginBaseIO
4
4
  from ._core_writer import ADWriter
@@ -11,14 +11,12 @@ class ADTIFFWriter(ADWriter[NDFileIO]):
11
11
  self,
12
12
  fileio: NDFileIO,
13
13
  path_provider: PathProvider,
14
- name_provider: NameProvider,
15
14
  dataset_describer: DatasetDescriber,
16
15
  plugins: dict[str, NDPluginBaseIO] | None = None,
17
16
  ) -> None:
18
17
  super().__init__(
19
18
  fileio,
20
19
  path_provider,
21
- name_provider,
22
20
  dataset_describer,
23
21
  plugins=plugins,
24
22
  file_extension=".tiff",
@@ -34,7 +34,7 @@ class KinetixController(adcore.ADBaseController[KinetixDriverIO]):
34
34
  self.driver.trigger_mode.set(
35
35
  KINETIX_TRIGGER_MODE_MAP[trigger_info.trigger]
36
36
  ),
37
- self.driver.num_images.set(trigger_info.total_number_of_triggers),
37
+ self.driver.num_images.set(trigger_info.total_number_of_exposures),
38
38
  self.driver.image_mode.set(adcore.ADImageMode.MULTIPLE),
39
39
  )
40
40
  if trigger_info.livetime is not None and trigger_info.trigger not in [
@@ -52,8 +52,8 @@ class PilatusController(adcore.ADBaseController[PilatusDriverIO]):
52
52
  self.driver.trigger_mode.set(self._get_trigger_mode(trigger_info.trigger)),
53
53
  self.driver.num_images.set(
54
54
  999_999
55
- if trigger_info.total_number_of_triggers == 0
56
- else trigger_info.total_number_of_triggers
55
+ if trigger_info.total_number_of_exposures == 0
56
+ else trigger_info.total_number_of_exposures
57
57
  ),
58
58
  self.driver.image_mode.set(adcore.ADImageMode.MULTIPLE),
59
59
  )
@@ -21,4 +21,4 @@ class PilatusDriverIO(adcore.ADBaseIO):
21
21
  """This mirrors the interface provided by ADPilatus/db/pilatus.template."""
22
22
  """See HTML docs at https://areadetector.github.io/areaDetector/ADPilatus/pilatusDoc.html"""
23
23
  trigger_mode: A[SignalRW[PilatusTriggerMode], PvSuffix.rbv("TriggerMode")]
24
- armed: A[SignalR[bool], PvSuffix.rbv("Armed_RBV")]
24
+ armed: A[SignalR[bool], PvSuffix("Armed")]
@@ -40,7 +40,7 @@ class VimbaController(adcore.ADBaseController[VimbaDriverIO]):
40
40
  await asyncio.gather(
41
41
  self.driver.trigger_mode.set(TRIGGER_MODE[trigger_info.trigger]),
42
42
  self.driver.exposure_mode.set(EXPOSE_OUT_MODE[trigger_info.trigger]),
43
- self.driver.num_images.set(trigger_info.total_number_of_triggers),
43
+ self.driver.num_images.set(trigger_info.total_number_of_exposures),
44
44
  self.driver.image_mode.set(adcore.ADImageMode.MULTIPLE),
45
45
  )
46
46
  if trigger_info.livetime is not None and trigger_info.trigger not in [
@@ -383,7 +383,7 @@ class PvaSignalBackend(EpicsSignalBackend[SignalDatatypeT]):
383
383
 
384
384
  async def put(self, value: SignalDatatypeT | None, wait: bool):
385
385
  if value is None:
386
- write_value = self.initial_values[self.write_pv]
386
+ write_value = self.initial_values[self.write_pv]["value"]
387
387
  else:
388
388
  write_value = self.converter.write_value(value)
389
389
  await context().put(self.write_pv, {"value": write_value}, wait=wait)
@@ -43,10 +43,10 @@ FastCSPVIVector = dict[Literal["d"], Entry]
43
43
 
44
44
  def _get_signal_details(entry: Entry) -> tuple[type[Signal], str, str]:
45
45
  match entry:
46
- case {"r": read_pv}:
47
- return SignalR, read_pv, read_pv
48
46
  case {"r": read_pv, "w": write_pv}:
49
47
  return SignalRW, read_pv, write_pv
48
+ case {"r": read_pv}:
49
+ return SignalR, read_pv, read_pv
50
50
  case {"w": write_pv}:
51
51
  return SignalW, write_pv, write_pv
52
52
  case {"rw": read_write_pv}:
@@ -77,6 +77,8 @@ class PviDeviceConnector(DeviceConnector):
77
77
  hinted Signals are not present.
78
78
  """
79
79
 
80
+ mock_device_vector_len: int = 2
81
+
80
82
  def __init__(self, prefix: str = "", error_hint: str = "") -> None:
81
83
  # TODO: what happens if we get a leading "pva://" here?
82
84
  self.prefix = prefix
@@ -110,7 +112,7 @@ class PviDeviceConnector(DeviceConnector):
110
112
  backend.write_pv = write_pv
111
113
 
112
114
  async def connect_mock(self, device: Device, mock: LazyMock):
113
- self.filler.create_device_vector_entries_to_mock(2)
115
+ self.filler.create_device_vector_entries_to_mock(self.mock_device_vector_len)
114
116
  # Set the name of the device to name all children
115
117
  device.set_name(device.name)
116
118
  return await super().connect_mock(device, mock)
@@ -1,5 +1,3 @@
1
- from ._eiger import EigerDetector, EigerTriggerInfo
2
- from ._eiger_controller import EigerController
3
- from ._eiger_io import EigerDriverIO
1
+ from ._odin_io import Odin, OdinWriter, Writing
4
2
 
5
- __all__ = ["EigerDetector", "EigerController", "EigerDriverIO", "EigerTriggerInfo"]
3
+ __all__ = ["Odin", "OdinWriter", "Writing"]
@@ -2,17 +2,19 @@ import asyncio
2
2
  from collections.abc import AsyncGenerator, AsyncIterator
3
3
 
4
4
  from bluesky.protocols import StreamAsset
5
- from event_model import DataKey
5
+ from event_model import DataKey # type: ignore
6
6
 
7
7
  from ophyd_async.core import (
8
+ DEFAULT_TIMEOUT,
8
9
  DetectorWriter,
9
10
  Device,
10
11
  DeviceVector,
11
- NameProvider,
12
12
  PathProvider,
13
13
  StrictEnum,
14
14
  observe_value,
15
+ set_and_wait_for_other_value,
15
16
  set_and_wait_for_value,
17
+ wait_for_value,
16
18
  )
17
19
  from ophyd_async.epics.core import (
18
20
  epics_signal_r,
@@ -22,44 +24,53 @@ from ophyd_async.epics.core import (
22
24
 
23
25
 
24
26
  class Writing(StrictEnum):
25
- ON = "ON"
26
- OFF = "OFF"
27
+ CAPTURE = "Capture"
28
+ DONE = "Done"
27
29
 
28
30
 
29
31
  class OdinNode(Device):
30
32
  def __init__(self, prefix: str, name: str = "") -> None:
31
- self.writing = epics_signal_r(Writing, f"{prefix}HDF:Writing")
32
- self.connected = epics_signal_r(bool, f"{prefix}Connected")
33
+ self.writing = epics_signal_r(str, f"{prefix}Writing_RBV")
34
+ self.frames_dropped = epics_signal_r(int, f"{prefix}FramesDropped_RBV")
35
+ self.frames_time_out = epics_signal_r(int, f"{prefix}FramesTimedOut_RBV")
36
+ self.error_status = epics_signal_r(str, f"{prefix}FPErrorState_RBV")
37
+ self.fp_initialised = epics_signal_r(int, f"{prefix}FPProcessConnected_RBV")
38
+ self.fr_initialised = epics_signal_r(int, f"{prefix}FRProcessConnected_RBV")
39
+ self.num_captured = epics_signal_r(int, f"{prefix}NumCaptured_RBV")
40
+ self.clear_errors = epics_signal_rw(int, f"{prefix}FPClearErrors")
41
+ self.error_message = epics_signal_rw(str, f"{prefix}FPErrorMessage_RBV")
33
42
 
34
43
  super().__init__(name)
35
44
 
36
45
 
37
46
  class Odin(Device):
38
47
  def __init__(self, prefix: str, name: str = "") -> None:
39
- self.nodes = DeviceVector({i: OdinNode(f"{prefix}FP{i}:") for i in range(4)})
40
-
41
- self.capture = epics_signal_rw(
42
- Writing, f"{prefix}Writing", f"{prefix}ConfigHdfWrite"
48
+ self.nodes = DeviceVector(
49
+ {i: OdinNode(f"{prefix[:-1]}{i + 1}:") for i in range(4)}
43
50
  )
44
- self.num_captured = epics_signal_r(int, f"{prefix}FramesWritten")
45
- self.num_to_capture = epics_signal_rw_rbv(int, f"{prefix}ConfigHdfFrames")
46
51
 
47
- self.start_timeout = epics_signal_rw_rbv(int, f"{prefix}TimeoutTimerPeriod")
52
+ self.capture = epics_signal_rw(Writing, f"{prefix}Capture")
53
+ self.capture_rbv = epics_signal_r(str, prefix + "Capture_RBV")
54
+ self.num_captured = epics_signal_r(int, f"{prefix}NumCapture_RBV")
55
+ self.num_to_capture = epics_signal_rw_rbv(int, f"{prefix}NumCapture")
48
56
 
49
- self.image_height = epics_signal_rw_rbv(int, f"{prefix}DatasetDataDims0")
50
- self.image_width = epics_signal_rw_rbv(int, f"{prefix}DatasetDataDims1")
57
+ self.start_timeout = epics_signal_rw(str, f"{prefix}StartTimeout")
58
+ self.timeout_active_rbv = epics_signal_r(str, f"{prefix}TimeoutActive_RBV")
51
59
 
52
- self.num_row_chunks = epics_signal_rw_rbv(int, f"{prefix}DatasetDataChunks1")
53
- self.num_col_chunks = epics_signal_rw_rbv(int, f"{prefix}DatasetDataChunks2")
60
+ self.image_height = epics_signal_rw_rbv(int, f"{prefix}ImageHeight")
61
+ self.image_width = epics_signal_rw_rbv(int, f"{prefix}ImageWidth")
54
62
 
55
- self.file_path = epics_signal_rw_rbv(str, f"{prefix}ConfigHdfFilePath")
56
- self.file_name = epics_signal_rw_rbv(str, f"{prefix}ConfigHdfFilePrefix")
63
+ self.num_row_chunks = epics_signal_rw_rbv(int, f"{prefix}NumRowChunks")
64
+ self.num_col_chunks = epics_signal_rw_rbv(int, f"{prefix}NumColChunks")
57
65
 
58
- self.acquisition_id = epics_signal_rw_rbv(
59
- str, f"{prefix}ConfigHdfAcquisitionId"
60
- )
66
+ self.file_path = epics_signal_rw_rbv(str, f"{prefix}FilePath")
67
+ self.file_name = epics_signal_rw_rbv(str, f"{prefix}FileName")
68
+
69
+ self.num_frames_chunks = epics_signal_rw(int, prefix + "NumFramesChunks")
70
+ self.meta_active = epics_signal_r(str, prefix + "META:AcquisitionActive_RBV")
71
+ self.meta_writing = epics_signal_r(str, prefix + "META:Writing_RBV")
61
72
 
62
- self.data_type = epics_signal_rw_rbv(str, f"{prefix}DatasetDataDatatype")
73
+ self.data_type = epics_signal_rw_rbv(str, f"{prefix}DataType")
63
74
 
64
75
  super().__init__(name)
65
76
 
@@ -68,27 +79,37 @@ class OdinWriter(DetectorWriter):
68
79
  def __init__(
69
80
  self,
70
81
  path_provider: PathProvider,
71
- name_provider: NameProvider,
72
82
  odin_driver: Odin,
73
83
  ) -> None:
74
84
  self._drv = odin_driver
75
85
  self._path_provider = path_provider
76
- self._name_provider = name_provider
77
86
  super().__init__()
78
87
 
79
- async def open(self, multiplier: int = 1) -> dict[str, DataKey]:
80
- info = self._path_provider(device_name=self._name_provider())
88
+ async def open(self, name: str, exposures_per_event: int = 1) -> dict[str, DataKey]:
89
+ info = self._path_provider(device_name=name)
90
+ self._exposures_per_event = exposures_per_event
81
91
 
82
92
  await asyncio.gather(
83
93
  self._drv.file_path.set(str(info.directory_path)),
84
94
  self._drv.file_name.set(info.filename),
85
95
  self._drv.data_type.set(
86
- "uint16"
96
+ "UInt16"
87
97
  ), # TODO: Get from eiger https://github.com/bluesky/ophyd-async/issues/529
88
98
  self._drv.num_to_capture.set(0),
89
99
  )
90
100
 
91
- await self._drv.capture.set(Writing.ON)
101
+ await wait_for_value(self._drv.meta_active, "Active", timeout=DEFAULT_TIMEOUT)
102
+
103
+ await set_and_wait_for_other_value(
104
+ self._drv.capture,
105
+ Writing.CAPTURE,
106
+ self._drv.capture_rbv,
107
+ "Capturing",
108
+ set_timeout=None,
109
+ wait_for_set_completion=False,
110
+ ) # TODO: Investigate why we do not get a put callback when setting capture pv https://github.com/bluesky/ophyd-async/issues/866
111
+
112
+ await wait_for_value(self._drv.meta_writing, "Writing", timeout=DEFAULT_TIMEOUT)
92
113
 
93
114
  return await self._describe()
94
115
 
@@ -100,7 +121,7 @@ class OdinWriter(DetectorWriter):
100
121
  return {
101
122
  "data": DataKey(
102
123
  source=self._drv.file_name.source,
103
- shape=list(data_shape),
124
+ shape=[self._exposures_per_event, *data_shape],
104
125
  dtype="array",
105
126
  # TODO: Use correct type based on eiger https://github.com/bluesky/ophyd-async/issues/529
106
127
  dtype_numpy="<u2",
@@ -112,14 +133,16 @@ class OdinWriter(DetectorWriter):
112
133
  self, timeout: float
113
134
  ) -> AsyncGenerator[int, None]:
114
135
  async for num_captured in observe_value(self._drv.num_captured, timeout):
115
- yield num_captured
136
+ yield num_captured // self._exposures_per_event
116
137
 
117
138
  async def get_indices_written(self) -> int:
118
- return await self._drv.num_captured.get_value()
139
+ return await self._drv.num_captured.get_value() // self._exposures_per_event
119
140
 
120
- def collect_stream_docs(self, indices_written: int) -> AsyncIterator[StreamAsset]:
141
+ def collect_stream_docs(
142
+ self, name: str, indices_written: int
143
+ ) -> AsyncIterator[StreamAsset]:
121
144
  # TODO: Correctly return stream https://github.com/bluesky/ophyd-async/issues/530
122
145
  raise NotImplementedError()
123
146
 
124
147
  async def close(self) -> None:
125
- await set_and_wait_for_value(self._drv.capture, Writing.OFF)
148
+ await set_and_wait_for_value(self._drv.capture, Writing.DONE)
@@ -48,6 +48,7 @@ class EpicsTestCaDevice(EpicsDevice):
48
48
  longstr: A[SignalRW[str], PvSuffix("longstr")]
49
49
  longstr2: A[SignalRW[str], PvSuffix("longstr2.VAL$")]
50
50
  a_bool: A[SignalRW[bool], PvSuffix("bool")]
51
+ slowseq: A[SignalRW[int], PvSuffix("slowseq")]
51
52
  enum: A[SignalRW[EpicsTestEnum], PvSuffix("enum")]
52
53
  enum2: A[SignalRW[EpicsTestEnum], PvSuffix("enum2")]
53
54
  subset_enum: A[SignalRW[EpicsTestSubsetEnum], PvSuffix("subset_enum")]
@@ -112,6 +112,11 @@ record(mbbo, "$(device)enum_str_fallback") {
112
112
  field(PINI, "YES")
113
113
  }
114
114
 
115
+ record(seq, "$(device)slowseq") {
116
+ field(DLY1, "0.5")
117
+ field(LNK1, "$(device)slowseq.DESC")
118
+ }
119
+
115
120
  record(waveform, "$(device)uint8a") {
116
121
  field(NELM, "3")
117
122
  field(FTVL, "UCHAR")
@@ -0,0 +1,13 @@
1
+ from ._eiger import EigerDetector, EigerTriggerInfo
2
+ from ._eiger_controller import EigerController
3
+ from ._eiger_io import EigerDetectorIO, EigerDriverIO, EigerMonitorIO, EigerStreamIO
4
+
5
+ __all__ = [
6
+ "EigerDetector",
7
+ "EigerController",
8
+ "EigerDriverIO",
9
+ "EigerTriggerInfo",
10
+ "EigerDetectorIO",
11
+ "EigerMonitorIO",
12
+ "EigerStreamIO",
13
+ ]
@@ -1,10 +1,10 @@
1
1
  from pydantic import Field
2
2
 
3
3
  from ophyd_async.core import AsyncStatus, PathProvider, StandardDetector, TriggerInfo
4
+ from ophyd_async.epics.eiger import Odin, OdinWriter
4
5
 
5
6
  from ._eiger_controller import EigerController
6
7
  from ._eiger_io import EigerDriverIO
7
- from ._odin_io import Odin, OdinWriter
8
8
 
9
9
 
10
10
  class EigerTriggerInfo(TriggerInfo):
@@ -15,22 +15,22 @@ class EigerDetector(StandardDetector):
15
15
  """Ophyd-async implementation of an Eiger Detector."""
16
16
 
17
17
  _controller: EigerController
18
- _writer: Odin
18
+ _writer: OdinWriter
19
19
 
20
20
  def __init__(
21
21
  self,
22
22
  prefix: str,
23
23
  path_provider: PathProvider,
24
24
  drv_suffix="-EA-EIGER-01:",
25
- hdf_suffix="-EA-ODIN-01:",
25
+ hdf_suffix="-EA-EIGER-01:OD:",
26
26
  name="",
27
27
  ):
28
28
  self.drv = EigerDriverIO(prefix + drv_suffix)
29
- self.odin = Odin(prefix + hdf_suffix + "FP:")
29
+ self.odin = Odin(prefix + hdf_suffix)
30
30
 
31
31
  super().__init__(
32
32
  EigerController(self.drv),
33
- OdinWriter(path_provider, lambda: self.name, self.odin),
33
+ OdinWriter(path_provider, self.odin),
34
34
  name=name,
35
35
  )
36
36
 
@@ -2,11 +2,9 @@ import asyncio
2
2
 
3
3
  from ophyd_async.core import (
4
4
  DEFAULT_TIMEOUT,
5
- AsyncStatus,
6
5
  DetectorController,
7
6
  DetectorTrigger,
8
7
  TriggerInfo,
9
- set_and_wait_for_other_value,
10
8
  )
11
9
 
12
10
  from ._eiger_io import EigerDriverIO, EigerTriggerMode
@@ -20,59 +18,47 @@ EIGER_TRIGGER_MODE_MAP = {
20
18
 
21
19
 
22
20
  class EigerController(DetectorController):
23
- """Controller for the Eiger detector."""
24
-
25
21
  def __init__(
26
22
  self,
27
23
  driver: EigerDriverIO,
28
24
  ) -> None:
29
25
  self._drv = driver
30
- self._arm_status: AsyncStatus | None = None
31
26
 
32
27
  def get_deadtime(self, exposure: float | None) -> float:
33
28
  # See https://media.dectris.com/filer_public/30/14/3014704e-5f3b-43ba-8ccf-8ef720e60d2a/240202_usermanual_eiger2.pdf
34
29
  return 0.0001
35
30
 
36
31
  async def set_energy(self, energy: float, tolerance: float = 0.1):
37
- """Change photon energy if outside tolerance.
32
+ """Set photon energy."""
33
+ """Changing photon energy takes some time so only do so if the current energy is
34
+ outside the tolerance."""
38
35
 
39
- It takes some time so don't do it unless it is outside tolerance.
40
- """
41
- current_energy = await self._drv.photon_energy.get_value()
36
+ current_energy = await self._drv.detector.photon_energy.get_value()
42
37
  if abs(current_energy - energy) > tolerance:
43
- await self._drv.photon_energy.set(energy)
38
+ await self._drv.detector.photon_energy.set(energy)
44
39
 
45
40
  async def prepare(self, trigger_info: TriggerInfo):
46
41
  coros = [
47
- self._drv.trigger_mode.set(
42
+ self._drv.detector.trigger_mode.set(
48
43
  EIGER_TRIGGER_MODE_MAP[trigger_info.trigger].value
49
44
  ),
50
- self._drv.num_images.set(trigger_info.total_number_of_triggers),
45
+ self._drv.detector.nimages.set(trigger_info.total_number_of_exposures),
51
46
  ]
52
47
  if trigger_info.livetime is not None:
53
48
  coros.extend(
54
49
  [
55
- self._drv.acquire_time.set(trigger_info.livetime),
56
- self._drv.acquire_period.set(trigger_info.livetime),
50
+ self._drv.detector.count_time.set(trigger_info.livetime),
51
+ self._drv.detector.frame_time.set(trigger_info.livetime),
57
52
  ]
58
53
  )
59
54
  await asyncio.gather(*coros)
60
55
 
61
56
  async def arm(self):
62
- # TODO: Detector state should be an enum see https://github.com/DiamondLightSource/eiger-fastcs/issues/43
63
- self._arm_status = await set_and_wait_for_other_value(
64
- self._drv.arm,
65
- 1,
66
- self._drv.state,
67
- "ready",
68
- timeout=DEFAULT_TIMEOUT,
69
- wait_for_set_completion=False,
70
- )
57
+ self._arm_status = self._drv.detector.arm.trigger(timeout=DEFAULT_TIMEOUT)
71
58
 
72
59
  async def wait_for_idle(self):
73
- if self._arm_status and not self._arm_status.done:
60
+ if self._arm_status:
74
61
  await self._arm_status
75
- self._arm_status = None
76
62
 
77
63
  async def disarm(self):
78
- await self._drv.disarm.set(1)
64
+ await self._drv.detector.disarm.trigger()
@@ -0,0 +1,53 @@
1
+ from ophyd_async.core import (
2
+ Device,
3
+ SignalR,
4
+ SignalRW,
5
+ SignalX,
6
+ StrictEnum,
7
+ )
8
+ from ophyd_async.fastcs.core import fastcs_connector
9
+
10
+
11
+ class EigerTriggerMode(StrictEnum):
12
+ INTERNAL = "ints"
13
+ EDGE = "exts"
14
+ GATE = "exte"
15
+
16
+
17
+ class EigerMonitorIO(Device):
18
+ pass
19
+
20
+
21
+ class EigerStreamIO(Device):
22
+ pass
23
+
24
+
25
+ class EigerDetectorIO(Device):
26
+ bit_depth_readout: SignalR[int]
27
+ state: SignalR[str]
28
+ count_time: SignalRW[float]
29
+ frame_time: SignalRW[float]
30
+ nimages: SignalRW[int]
31
+ nexpi: SignalRW[int]
32
+ trigger_mode: SignalRW[str]
33
+ roi_mode: SignalRW[str]
34
+ photon_energy: SignalRW[float]
35
+ beam_center_x: SignalRW[float]
36
+ beam_center_y: SignalRW[float]
37
+ detector_distance: SignalRW[float]
38
+ omega_start: SignalRW[float]
39
+ omega_increment: SignalRW[float]
40
+ arm: SignalX
41
+ disarm: SignalX
42
+
43
+
44
+ class EigerDriverIO(Device):
45
+ """Contains signals for handling IO on the Eiger detector."""
46
+
47
+ stale_parameters: SignalR[bool]
48
+ monitor: EigerMonitorIO
49
+ stream: EigerStreamIO
50
+ detector: EigerDetectorIO
51
+
52
+ def __init__(self, uri: str, name: str = ""):
53
+ super().__init__(name=name, connector=fastcs_connector(self, uri))
@@ -38,6 +38,8 @@ class PulseBlock(Device):
38
38
  """Used for configuring pulses in the PandA."""
39
39
 
40
40
  delay: SignalRW[float]
41
+ pulses: SignalRW[int]
42
+ step: SignalRW[float]
41
43
  width: SignalRW[float]
42
44
 
43
45
 
@@ -30,7 +30,6 @@ class HDFPanda(
30
30
  controller = PandaPcapController(pcap=self.pcap)
31
31
  writer = PandaHDFWriter(
32
32
  path_provider=path_provider,
33
- name_provider=lambda: name,
34
33
  panda_data_block=self.data,
35
34
  )
36
35
  super().__init__(