dls-dodal 1.57.0__py3-none-any.whl → 1.59.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (63) hide show
  1. {dls_dodal-1.57.0.dist-info → dls_dodal-1.59.1.dist-info}/METADATA +2 -1
  2. {dls_dodal-1.57.0.dist-info → dls_dodal-1.59.1.dist-info}/RECORD +63 -46
  3. dodal/_version.py +2 -2
  4. dodal/beamlines/b07.py +10 -5
  5. dodal/beamlines/b07_1.py +10 -5
  6. dodal/beamlines/b21.py +22 -0
  7. dodal/beamlines/i02_1.py +80 -0
  8. dodal/beamlines/i03.py +7 -4
  9. dodal/beamlines/i04.py +20 -3
  10. dodal/beamlines/i09.py +10 -9
  11. dodal/beamlines/i09_1.py +10 -5
  12. dodal/beamlines/i10-1.py +25 -0
  13. dodal/beamlines/i10.py +17 -1
  14. dodal/beamlines/i11.py +0 -17
  15. dodal/beamlines/i19_2.py +20 -0
  16. dodal/beamlines/i21.py +27 -0
  17. dodal/beamlines/i22.py +12 -2
  18. dodal/beamlines/i24.py +32 -3
  19. dodal/beamlines/k07.py +31 -0
  20. dodal/beamlines/p60.py +10 -9
  21. dodal/common/beamlines/commissioning_mode.py +33 -0
  22. dodal/common/watcher_utils.py +1 -1
  23. dodal/devices/apple2_undulator.py +18 -142
  24. dodal/devices/attenuator/attenuator.py +48 -2
  25. dodal/devices/attenuator/filter.py +3 -0
  26. dodal/devices/attenuator/filter_selections.py +26 -0
  27. dodal/devices/baton.py +4 -0
  28. dodal/devices/eiger.py +2 -1
  29. dodal/devices/electron_analyser/__init__.py +4 -0
  30. dodal/devices/electron_analyser/abstract/base_driver_io.py +30 -18
  31. dodal/devices/electron_analyser/energy_sources.py +101 -0
  32. dodal/devices/electron_analyser/specs/detector.py +6 -6
  33. dodal/devices/electron_analyser/specs/driver_io.py +7 -15
  34. dodal/devices/electron_analyser/vgscienta/detector.py +6 -6
  35. dodal/devices/electron_analyser/vgscienta/driver_io.py +7 -14
  36. dodal/devices/fast_grid_scan.py +130 -64
  37. dodal/devices/focusing_mirror.py +30 -0
  38. dodal/devices/i02_1/__init__.py +0 -0
  39. dodal/devices/i02_1/fast_grid_scan.py +61 -0
  40. dodal/devices/i02_1/sample_motors.py +19 -0
  41. dodal/devices/i04/murko_results.py +69 -23
  42. dodal/devices/i10/i10_apple2.py +282 -140
  43. dodal/devices/i19/backlight.py +17 -0
  44. dodal/devices/i21/__init__.py +3 -0
  45. dodal/devices/i21/enums.py +8 -0
  46. dodal/devices/i22/nxsas.py +2 -0
  47. dodal/devices/i24/commissioning_jungfrau.py +114 -0
  48. dodal/devices/smargon.py +0 -56
  49. dodal/devices/temperture_controller/__init__.py +3 -0
  50. dodal/devices/temperture_controller/lakeshore/__init__.py +0 -0
  51. dodal/devices/temperture_controller/lakeshore/lakeshore.py +204 -0
  52. dodal/devices/temperture_controller/lakeshore/lakeshore_io.py +112 -0
  53. dodal/devices/tetramm.py +38 -16
  54. dodal/devices/undulator.py +13 -9
  55. dodal/devices/v2f.py +39 -0
  56. dodal/devices/xbpm_feedback.py +12 -6
  57. dodal/devices/zebra/zebra.py +1 -0
  58. dodal/devices/zebra/zebra_constants_mapping.py +1 -1
  59. dodal/parameters/experiment_parameter_base.py +1 -5
  60. {dls_dodal-1.57.0.dist-info → dls_dodal-1.59.1.dist-info}/WHEEL +0 -0
  61. {dls_dodal-1.57.0.dist-info → dls_dodal-1.59.1.dist-info}/entry_points.txt +0 -0
  62. {dls_dodal-1.57.0.dist-info → dls_dodal-1.59.1.dist-info}/licenses/LICENSE +0 -0
  63. {dls_dodal-1.57.0.dist-info → dls_dodal-1.59.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,17 @@
1
+ from bluesky.protocols import Movable
2
+ from ophyd_async.core import AsyncStatus, StandardReadable
3
+ from ophyd_async.epics.core import epics_signal_rw
4
+
5
+ from dodal.common.enums import InOutUpper
6
+
7
+
8
+ class BacklightPosition(StandardReadable, Movable[InOutUpper]):
9
+ """Device moves backlight to the IN or OUT position since controls side manages switching the light on/off"""
10
+
11
+ def __init__(self, prefix: str, name: str = "") -> None:
12
+ self.position = epics_signal_rw(InOutUpper, f"{prefix}AD1:choiceButton")
13
+ super().__init__(name)
14
+
15
+ @AsyncStatus.wrap
16
+ async def set(self, value: InOutUpper):
17
+ await self.position.set(value, wait=True)
@@ -0,0 +1,3 @@
1
+ from dodal.devices.i21.enums import Grating
2
+
3
+ __all__ = ["Grating"]
@@ -0,0 +1,8 @@
1
+ from ophyd_async.core import StrictEnum
2
+
3
+
4
+ class Grating(StrictEnum):
5
+ VPG1 = "VPG1"
6
+ VPG2 = "VPG2"
7
+ VPG3 = "VPG3"
8
+ VPG4 = "VPG4"
@@ -7,6 +7,7 @@ from bluesky.protocols import Reading
7
7
  from event_model.documents.event_descriptor import DataKey
8
8
  from ophyd_async.core import PathProvider
9
9
  from ophyd_async.epics.adaravis import AravisDetector
10
+ from ophyd_async.epics.adcore import NDPluginBaseIO
10
11
  from ophyd_async.epics.adpilatus import PilatusDetector
11
12
 
12
13
  ValueAndUnits = tuple[float, str]
@@ -106,6 +107,7 @@ class NXSasPilatus(PilatusDetector):
106
107
  fileio_suffix: str,
107
108
  metadata_holder: NXSasMetadataHolder,
108
109
  name: str = "",
110
+ plugins: dict[str, NDPluginBaseIO] | None = None,
109
111
  ):
110
112
  """Extends detector with configuration metadata required or desired
111
113
  to comply with the NXsas application definition.
@@ -0,0 +1,114 @@
1
+ import asyncio
2
+ from collections.abc import AsyncGenerator, AsyncIterator
3
+ from pathlib import Path
4
+
5
+ from bluesky.protocols import StreamAsset
6
+ from event_model import DataKey # type: ignore
7
+ from ophyd_async.core import (
8
+ AutoIncrementingPathProvider,
9
+ DetectorWriter,
10
+ StandardDetector,
11
+ StandardReadable,
12
+ StaticPathProvider,
13
+ observe_value,
14
+ wait_for_value,
15
+ )
16
+ from ophyd_async.epics.core import epics_signal_r, epics_signal_rw, epics_signal_rw_rbv
17
+ from ophyd_async.fastcs.jungfrau._controller import JungfrauController
18
+ from ophyd_async.fastcs.jungfrau._signals import JungfrauDriverIO
19
+
20
+ from dodal.log import LOGGER
21
+
22
+
23
+ class JunfrauCommissioningWriter(DetectorWriter, StandardReadable):
24
+ """Implementation of the temporary filewriter used for Jungfrau commissioning on i24.
25
+
26
+ The PVs on this device are responsible for writing files of a specified name
27
+ to a specified path, marking itself as "ready to write", and having a counter of
28
+ frames written, which must be zero'd at the ophyd level
29
+ """
30
+
31
+ def __init__(
32
+ self,
33
+ prefix,
34
+ path_provider: AutoIncrementingPathProvider | StaticPathProvider,
35
+ name="",
36
+ ) -> None:
37
+ with self.add_children_as_readables():
38
+ self._path_info = path_provider
39
+ self.frame_counter = epics_signal_rw(int, f"{prefix}NumCaptured")
40
+ self.file_name = epics_signal_rw_rbv(str, f"{prefix}FileName")
41
+ self.file_path = epics_signal_rw_rbv(str, f"{prefix}FilePath")
42
+ self.writer_ready = epics_signal_r(int, f"{prefix}Ready_RBV")
43
+ super().__init__(name)
44
+
45
+ async def open(self, name: str, exposures_per_event: int = 1) -> dict[str, DataKey]:
46
+ self._exposures_per_event = exposures_per_event
47
+ _path_info = self._path_info()
48
+
49
+ # Commissioning Jungfrau plans allow you to override path, so check to see if file exists
50
+ requested_filepath = Path(_path_info.directory_path) / _path_info.filename
51
+ if requested_filepath.exists():
52
+ raise FileExistsError(
53
+ f"Jungfrau was requested to write to {requested_filepath}, but this file already exists!"
54
+ )
55
+
56
+ await asyncio.gather(
57
+ self.file_name.set(_path_info.filename),
58
+ self.file_path.set(str(_path_info.directory_path)),
59
+ self.frame_counter.set(0),
60
+ )
61
+ LOGGER.info(
62
+ f"Jungfrau writing to folder {_path_info.directory_path} with filename {_path_info.filename}"
63
+ )
64
+ await wait_for_value(self.writer_ready, 1, timeout=10)
65
+ return await self._describe()
66
+
67
+ async def _describe(self) -> dict[str, DataKey]:
68
+ # Dummy function, doesn't actually describe the dataset
69
+
70
+ return {
71
+ "data": DataKey(
72
+ source="Commissioning writer",
73
+ shape=[-1],
74
+ dtype="array",
75
+ dtype_numpy="<u2",
76
+ external="STREAM:",
77
+ )
78
+ }
79
+
80
+ async def observe_indices_written(
81
+ self, timeout: float
82
+ ) -> AsyncGenerator[int, None]:
83
+ timeout = timeout * 2 # This filewriter is slow
84
+ async for num_captured in observe_value(self.frame_counter, timeout):
85
+ yield num_captured // (self._exposures_per_event)
86
+
87
+ async def get_indices_written(self) -> int:
88
+ return await self.frame_counter.get_value() // self._exposures_per_event
89
+
90
+ def collect_stream_docs(
91
+ self, name: str, indices_written: int
92
+ ) -> AsyncIterator[StreamAsset]:
93
+ raise NotImplementedError()
94
+
95
+ async def close(self) -> None: ...
96
+
97
+
98
+ class CommissioningJungfrau(
99
+ StandardDetector[JungfrauController, JunfrauCommissioningWriter]
100
+ ):
101
+ """Ophyd-async implementation of a Jungfrau 9M Detector, using a temporary
102
+ filewriter in place of Odin"""
103
+
104
+ def __init__(
105
+ self,
106
+ prefix: str,
107
+ writer_prefix: str,
108
+ path_provider: AutoIncrementingPathProvider | StaticPathProvider,
109
+ name="",
110
+ ):
111
+ self.drv = JungfrauDriverIO(prefix)
112
+ writer = JunfrauCommissioningWriter(writer_prefix, path_provider)
113
+ controller = JungfrauController(self.drv)
114
+ super().__init__(controller, writer, name=name)
dodal/devices/smargon.py CHANGED
@@ -1,13 +1,9 @@
1
1
  import asyncio
2
- from collections.abc import Collection, Generator
3
- from dataclasses import dataclass
4
2
  from enum import Enum
5
3
  from math import isclose
6
4
  from typing import TypedDict, cast
7
5
 
8
- from bluesky import plan_stubs as bps
9
6
  from bluesky.protocols import Movable
10
- from bluesky.utils import Msg
11
7
  from ophyd_async.core import (
12
8
  AsyncStatus,
13
9
  Device,
@@ -66,40 +62,6 @@ class StubOffsets(Device):
66
62
  await self.to_robot_load.set(1)
67
63
 
68
64
 
69
- @dataclass
70
- class AxisLimit:
71
- """Represents the minimum and maximum allowable values on an axis"""
72
-
73
- min_value: float
74
- max_value: float
75
-
76
- def contains(self, pos: float):
77
- """Determine if the specified value is within limits.
78
-
79
- Args:
80
- pos: the value to check
81
-
82
- Returns:
83
- True if the value does not exceed the limits
84
- """
85
- return self.min_value <= pos <= self.max_value
86
-
87
-
88
- @dataclass
89
- class XYZLimits:
90
- """The limits of the smargon x, y, z axes."""
91
-
92
- x: AxisLimit
93
- y: AxisLimit
94
- z: AxisLimit
95
-
96
- def position_valid(self, pos: Collection[float]) -> bool:
97
- return all(
98
- axis_limits.contains(value)
99
- for axis_limits, value in zip([self.x, self.y, self.z], pos, strict=False)
100
- )
101
-
102
-
103
65
  class DeferMoves(StrictEnum):
104
66
  ON = "Defer On"
105
67
  OFF = "Defer Off"
@@ -144,24 +106,6 @@ class Smargon(XYZStage, Movable):
144
106
 
145
107
  super().__init__(prefix, name)
146
108
 
147
- def get_xyz_limits(self) -> Generator[Msg, None, XYZLimits]:
148
- """Obtain a plan stub that returns the smargon XYZ axis limits
149
-
150
- Yields:
151
- Bluesky messages
152
-
153
- Returns:
154
- the axis limits
155
- """
156
- limits = {}
157
- for name, pv in [
158
- (attr_name, getattr(self, attr_name)) for attr_name in ["x", "y", "z"]
159
- ]:
160
- min_value = yield from bps.rd(pv.low_limit_travel)
161
- max_value = yield from bps.rd(pv.high_limit_travel)
162
- limits[name] = AxisLimit(min_value, max_value)
163
- return XYZLimits(**limits)
164
-
165
109
  @AsyncStatus.wrap
166
110
  async def set(self, value: CombinedMove):
167
111
  """This will move all motion together in a deferred move.
@@ -0,0 +1,3 @@
1
+ from .lakeshore.lakeshore import Lakeshore, Lakeshore336, Lakeshore340
2
+
3
+ __all__ = ["Lakeshore336", "Lakeshore340", "Lakeshore"]
@@ -0,0 +1,204 @@
1
+ from asyncio import gather
2
+
3
+ from bluesky.protocols import Movable
4
+ from ophyd_async.core import (
5
+ AsyncStatus,
6
+ SignalDatatypeT,
7
+ StandardReadable,
8
+ StandardReadableFormat,
9
+ StrictEnum,
10
+ derived_signal_rw,
11
+ soft_signal_rw,
12
+ )
13
+
14
+ from .lakeshore_io import (
15
+ LakeshoreBaseIO,
16
+ )
17
+
18
+
19
+ class Heater336Settings(StrictEnum):
20
+ OFF = "Off"
21
+ LOW = "Low"
22
+ MEDIUM = "Medium"
23
+ HIGH = "High"
24
+
25
+
26
+ class Lakeshore(LakeshoreBaseIO, StandardReadable, Movable[float]):
27
+ """
28
+ Device for controlling and reading from a Lakeshore temperature controller.
29
+ It supports multiple channels and PID control.
30
+
31
+ Attributes
32
+ ----------
33
+ temperature : LakeshoreBaseIO
34
+ Temperature IO interface.
35
+ PID : PIDBaseIO
36
+ PID IO interface.
37
+ control_channel : derived_signal_rw
38
+ Signal for selecting the control channel,
39
+ optional readback as hinted signal
40
+ (default readback channel is the same as control channel).
41
+
42
+ temperature_high_limit: soft_signal_rw
43
+ Signal to store the soft high temperature limit.
44
+ temperature_low_limit: soft_signal_rw
45
+ Signal to store the soft low temperature limit.
46
+
47
+
48
+ Methods
49
+ -------
50
+ set(value: float)
51
+ Set the temperature setpoint for the selected control channel.
52
+ """
53
+
54
+ def __init__(
55
+ self,
56
+ prefix: str,
57
+ num_readback_channel: int,
58
+ heater_setting: type[SignalDatatypeT],
59
+ control_channel: int = 1,
60
+ single_control_channel: bool = False,
61
+ name: str = "",
62
+ ):
63
+ """
64
+ Parameters
65
+ ----------
66
+ prefix : str
67
+ The EPICS prefix for the device.
68
+ no_channels : int
69
+ Number of temperature channels.
70
+ heater_setting : type[SignalDatatypeT]
71
+ Enum type for heater settings.
72
+ control_channel : int, optional
73
+ The initial control channel (default is 1).
74
+ single_control_channel : bool, optional
75
+ Whether to use a single control channel (default is False).
76
+ name : str, optional
77
+ Name of the device.
78
+ """
79
+ self._control_channel = soft_signal_rw(int, initial_value=control_channel)
80
+ self.temperature_high_limit = soft_signal_rw(float, initial_value=400)
81
+ self.temperature_low_limit = soft_signal_rw(float, initial_value=0)
82
+
83
+ self.control_channel = derived_signal_rw(
84
+ raw_to_derived=self._get_control_channel,
85
+ set_derived=self._set_control_channel,
86
+ current_channel=self._control_channel,
87
+ )
88
+ super().__init__(
89
+ prefix=prefix,
90
+ num_readback_channel=num_readback_channel,
91
+ heater_setting=heater_setting,
92
+ name=name,
93
+ single_control_channel=single_control_channel,
94
+ )
95
+
96
+ self.add_readables(
97
+ [setpoint.user_setpoint for setpoint in self.control_channels.values()]
98
+ )
99
+ self.add_readables(
100
+ list(self.readback.values()), format=StandardReadableFormat.HINTED_SIGNAL
101
+ )
102
+
103
+ self.add_readables(
104
+ [
105
+ self._control_channel,
106
+ self.control_channels[control_channel].p,
107
+ self.control_channels[control_channel].i,
108
+ self.control_channels[control_channel].d,
109
+ self.control_channels[control_channel].heater_output_range,
110
+ ],
111
+ StandardReadableFormat.CONFIG_SIGNAL,
112
+ )
113
+
114
+ @AsyncStatus.wrap
115
+ async def set(self, value: float) -> None:
116
+ """
117
+ Set the temperature setpoint for the active control channel.
118
+ """
119
+ high, low = await gather(
120
+ self.temperature_high_limit.get_value(),
121
+ self.temperature_low_limit.get_value(),
122
+ )
123
+ if high >= value >= low:
124
+ await self.control_channels[
125
+ await self.control_channel.get_value()
126
+ ].user_setpoint.set(value)
127
+ else:
128
+ raise ValueError(
129
+ f"{self.name} requested temperature {value} is outside limits: {low}, {high}"
130
+ )
131
+
132
+ def _get_control_channel(self, current_channel: int) -> int:
133
+ return current_channel
134
+
135
+ async def _set_control_channel(self, value: int) -> None:
136
+ if value < 1 or value > len(self.control_channels):
137
+ raise ValueError(
138
+ f"Control channel must be between 1 and {len(self.control_channels)}."
139
+ )
140
+ await self._control_channel.set(value)
141
+ self._read_config_funcs = (
142
+ self._control_channel.read,
143
+ self.control_channels[value].user_setpoint.read,
144
+ self.control_channels[value].p.read,
145
+ self.control_channels[value].i.read,
146
+ self.control_channels[value].d.read,
147
+ self.control_channels[value].heater_output_range.read,
148
+ )
149
+
150
+
151
+ class Lakeshore336(Lakeshore):
152
+ def __init__(
153
+ self,
154
+ prefix: str,
155
+ control_channel: int = 1,
156
+ name: str = "",
157
+ ):
158
+ """
159
+ Lakeshore 336 temperature controller. With 4 readback and control channels.
160
+ Heater settings are: Off, Low, Medium, High.
161
+ Parameters
162
+ ----------
163
+ prefix : str
164
+ The EPICS prefix for the device.
165
+ control_channel : int, optional
166
+ The initial control channel (default is 1).
167
+ """
168
+ super().__init__(
169
+ prefix=prefix,
170
+ num_readback_channel=4,
171
+ heater_setting=Heater336Settings,
172
+ control_channel=control_channel,
173
+ single_control_channel=False,
174
+ name=name,
175
+ )
176
+
177
+
178
+ class Lakeshore340(Lakeshore):
179
+ def __init__(
180
+ self,
181
+ prefix: str,
182
+ control_channel: int = 1,
183
+ name: str = "",
184
+ ):
185
+ """Lakeshore 340 temperature controller. With 4 readback channels and a single
186
+ control channel.
187
+ Heater settings are in power from 0 to 5. 0 is 0 watt, 5 is 50 watt.
188
+
189
+ Parameters
190
+ ----------
191
+ prefix : str
192
+ The EPICS prefix for the device.
193
+ control_channel : int, optional
194
+ The initial control channel (default is 1).
195
+ """
196
+
197
+ super().__init__(
198
+ prefix=prefix,
199
+ num_readback_channel=4,
200
+ heater_setting=float,
201
+ control_channel=control_channel,
202
+ single_control_channel=True,
203
+ name=name,
204
+ )
@@ -0,0 +1,112 @@
1
+ from ophyd_async.core import Device, DeviceVector, SignalDatatypeT
2
+ from ophyd_async.epics.core import epics_signal_r, epics_signal_rw
3
+
4
+
5
+ class LakeshoreControlChannel(Device):
6
+ """
7
+ Single control channel for a Lakeshore temperature controller.
8
+
9
+ Provides access to setpoint, ramp rate, ramp enable, heater output, heater output range,
10
+ PID parameters (P, I, D), and manual output for the channel.
11
+ """
12
+
13
+ def __init__(
14
+ self,
15
+ prefix: str,
16
+ suffix: str,
17
+ heater_type: type[SignalDatatypeT],
18
+ name: str = "",
19
+ ):
20
+ """Initialize the LakeshoreControlChannel device.
21
+ Parameters
22
+ ----------
23
+ prefix: str
24
+ The EPICS prefix for the Lakeshore device.
25
+ suffix: str
26
+ Suffix for the channel, used to differentiate multiple channels.
27
+ heater_type: SignalDatatypeT
28
+ Type of the heater output range.
29
+ name: str
30
+ Optional name for the device.
31
+ """
32
+
33
+ def channel_rw(channel_type, pv_name):
34
+ return epics_signal_rw(
35
+ channel_type,
36
+ f"{prefix}{pv_name}{suffix}",
37
+ f"{prefix}{pv_name}_S{suffix}",
38
+ )
39
+
40
+ self.user_setpoint = channel_rw(channel_type=float, pv_name="SETP")
41
+ self.ramp_rate = channel_rw(channel_type=float, pv_name="RAMP")
42
+ self.ramp_enable = channel_rw(channel_type=int, pv_name="RAMPST")
43
+ self.heater_output_range = channel_rw(channel_type=heater_type, pv_name="RANGE")
44
+ self.p = channel_rw(channel_type=float, pv_name="P")
45
+ self.i = channel_rw(channel_type=float, pv_name="I")
46
+ self.d = channel_rw(channel_type=float, pv_name="D")
47
+ self.manual_output = channel_rw(channel_type=float, pv_name="MOUT")
48
+ self.heater_output = epics_signal_r(float, f"{prefix}{'HTR'}{suffix}")
49
+
50
+ super().__init__(name=name)
51
+
52
+
53
+ class LakeshoreBaseIO(Device):
54
+ """Base class for Lakeshore temperature controller IO.
55
+
56
+ Provides access to control channels and readback channels for setpoint, ramp rate, heater output,
57
+ and PID parameters. Supports both single and multiple control channel configurations.
58
+ Note:
59
+ Almost all models have a controller for each readback channel but some models
60
+ only has a single controller for multiple readback channels.
61
+ """
62
+
63
+ def __init__(
64
+ self,
65
+ prefix: str,
66
+ num_readback_channel: int,
67
+ heater_setting: type[SignalDatatypeT],
68
+ name: str = "",
69
+ single_control_channel: bool = False,
70
+ ):
71
+ """Initialize the LakeshoreBaseIO device.
72
+
73
+ Parameters
74
+ -----------
75
+ prefix: str
76
+ The EPICS prefix for the Lakeshore device.
77
+ num_readback_channel: int
78
+ Number of readback channels to create.
79
+ heater_setting: SignalDatatypeT
80
+ Type of the heater setting.
81
+ name: str
82
+ Optional name for the device.
83
+ single_control_channel: bool
84
+ If True, use a single control channel for all readback.
85
+ """
86
+
87
+ suffixes = (
88
+ [""]
89
+ if single_control_channel
90
+ else map(str, range(1, num_readback_channel + 1))
91
+ )
92
+ self.control_channels = DeviceVector(
93
+ {
94
+ i: LakeshoreControlChannel(
95
+ prefix=prefix, suffix=suffix, heater_type=heater_setting
96
+ )
97
+ for i, suffix in enumerate(suffixes, start=1)
98
+ }
99
+ )
100
+
101
+ self.readback = DeviceVector(
102
+ {
103
+ i: epics_signal_r(
104
+ float,
105
+ read_pv=f"{prefix}KRDG{i - 1}",
106
+ )
107
+ for i in range(1, num_readback_channel + 1)
108
+ }
109
+ )
110
+ super().__init__(
111
+ name=name,
112
+ )
dodal/devices/tetramm.py CHANGED
@@ -16,6 +16,7 @@ from ophyd_async.core import (
16
16
  TriggerInfo,
17
17
  set_and_wait_for_value,
18
18
  soft_signal_r_and_setter,
19
+ wait_for_value,
19
20
  )
20
21
  from ophyd_async.epics.adcore import (
21
22
  ADHDFWriter,
@@ -23,7 +24,7 @@ from ophyd_async.epics.adcore import (
23
24
  NDFileHDFIO,
24
25
  NDPluginBaseIO,
25
26
  )
26
- from ophyd_async.epics.core import PvSuffix, stop_busy_record
27
+ from ophyd_async.epics.core import PvSuffix, epics_signal_r
27
28
 
28
29
 
29
30
  class TetrammRange(StrictEnum):
@@ -77,6 +78,7 @@ class TetrammController(DetectorController):
77
78
  _supported_trigger_types = {
78
79
  DetectorTrigger.EDGE_TRIGGER: TetrammTrigger.EXT_TRIGGER,
79
80
  DetectorTrigger.CONSTANT_GATE: TetrammTrigger.EXT_TRIGGER,
81
+ DetectorTrigger.VARIABLE_GATE: TetrammTrigger.EXT_TRIGGER,
80
82
  }
81
83
  """"On the TetrAMM ASCII mode requires a minimum value of ValuesPerRead of 500,
82
84
  [...] binary mode the minimum value of ValuesPerRead is 5."
@@ -86,11 +88,9 @@ class TetrammController(DetectorController):
86
88
  """The TetrAMM always digitizes at 100 kHz"""
87
89
  _base_sample_rate: int = 100_000
88
90
 
89
- def __init__(
90
- self,
91
- driver: TetrammDriver,
92
- ) -> None:
91
+ def __init__(self, driver: TetrammDriver, file_io: NDFileHDFIO) -> None:
93
92
  self.driver = driver
93
+ self._file_io = file_io
94
94
  self._arm_status: AsyncStatus | None = None
95
95
 
96
96
  def get_deadtime(self, exposure: float | None) -> float:
@@ -107,13 +107,19 @@ class TetrammController(DetectorController):
107
107
  if trigger_info.livetime is None:
108
108
  raise ValueError(f"{self.__class__.__name__} requires that livetime is set")
109
109
 
110
+ current_trig_status = await self.driver.trigger_mode.get_value()
111
+
112
+ if current_trig_status == TetrammTrigger.FREE_RUN: # if freerun turn off first
113
+ await self.disarm()
114
+
110
115
  # trigger mode must be set first and on its own!
111
116
  await self.driver.trigger_mode.set(
112
117
  self._supported_trigger_types[trigger_info.trigger]
113
118
  )
119
+
114
120
  await asyncio.gather(
115
- self.driver.averaging_time.set(trigger_info.livetime),
116
121
  self.set_exposure(trigger_info.livetime),
122
+ self._file_io.num_capture.set(trigger_info.total_number_of_exposures),
117
123
  )
118
124
 
119
125
  # raise an error if asked to trigger faster than the max.
@@ -133,14 +139,18 @@ class TetrammController(DetectorController):
133
139
  self._arm_status = await self.start_acquiring_driver_and_ensure_status()
134
140
 
135
141
  async def wait_for_idle(self):
136
- if self._arm_status and not self._arm_status.done:
137
- await self._arm_status
138
- self._arm_status = None
142
+ # tetramm never goes idle really, actually it is always acquiring
143
+ # so need to wait for the capture to finish instead
144
+ await wait_for_value(self._file_io.acquire, False, timeout=None)
145
+
146
+ async def unstage(self):
147
+ await self.disarm()
148
+ await self._file_io.acquire.set(False)
139
149
 
140
150
  async def disarm(self):
141
151
  # We can't use caput callback as we already used it in arm() and we can't have
142
152
  # 2 or they will deadlock
143
- await stop_busy_record(self.driver.acquire, False, timeout=1)
153
+ await set_and_wait_for_value(self.driver.acquire, False, timeout=1)
144
154
 
145
155
  async def set_exposure(self, exposure: float) -> None:
146
156
  """Set the exposure time and acquire period.
@@ -164,7 +174,9 @@ class TetrammController(DetectorController):
164
174
  "Tetramm exposure time must be at least "
165
175
  f"{minimum_samples * sample_time}s, asked to set it to {exposure}s"
166
176
  )
167
- await self.driver.averaging_time.set(samples_per_reading * sample_time)
177
+ await self.driver.averaging_time.set(
178
+ samples_per_reading * sample_time
179
+ ) # correct
168
180
 
169
181
  async def start_acquiring_driver_and_ensure_status(self) -> AsyncStatus:
170
182
  """Start acquiring driver, raising ValueError if the detector is in a bad state.
@@ -202,10 +214,7 @@ class TetrammDatasetDescriber(DatasetDescriber):
202
214
  async def shape(self) -> tuple[int, int]:
203
215
  return (
204
216
  int(await self._driver.num_channels.get_value()),
205
- int(
206
- await self._driver.averaging_time.get_value()
207
- / await self._driver.sample_time.get_value(),
208
- ),
217
+ int(await self._driver.to_average.get_value()),
209
218
  )
210
219
 
211
220
 
@@ -223,7 +232,20 @@ class TetrammDetector(StandardDetector):
223
232
  ):
224
233
  self.driver = TetrammDriver(prefix + drv_suffix)
225
234
  self.file_io = NDFileHDFIO(prefix + fileio_suffix)
226
- controller = TetrammController(self.driver)
235
+ controller = TetrammController(self.driver, self.file_io)
236
+
237
+ self.current1 = epics_signal_r(float, prefix + "Cur1:MeanValue_RBV")
238
+ self.current2 = epics_signal_r(float, prefix + "Cur2:MeanValue_RBV")
239
+ self.current3 = epics_signal_r(float, prefix + "Cur3:MeanValue_RBV")
240
+ self.current4 = epics_signal_r(float, prefix + "Cur4:MeanValue_RBV")
241
+
242
+ self.sum_x = epics_signal_r(float, prefix + "SumX:MeanValue_RBV")
243
+ self.sum_y = epics_signal_r(float, prefix + "SumY:MeanValue_RBV")
244
+ self.sum_all = epics_signal_r(float, prefix + "SumAll:MeanValue_RBV")
245
+ self.diff_x = epics_signal_r(float, prefix + "DiffX:MeanValue_RBV")
246
+ self.diff_y = epics_signal_r(float, prefix + "DiffY:MeanValue_RBV")
247
+ self.pos_x = epics_signal_r(float, prefix + "PosX:MeanValue_RBV")
248
+ self.pos_y = epics_signal_r(float, prefix + "PosY:MeanValue_RBV")
227
249
 
228
250
  writer = ADHDFWriter(
229
251
  fileio=self.file_io,