dls-dodal 1.58.0__py3-none-any.whl → 1.60.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (71) hide show
  1. {dls_dodal-1.58.0.dist-info → dls_dodal-1.60.0.dist-info}/METADATA +3 -3
  2. {dls_dodal-1.58.0.dist-info → dls_dodal-1.60.0.dist-info}/RECORD +71 -47
  3. dodal/_version.py +2 -2
  4. dodal/beamlines/__init__.py +1 -0
  5. dodal/beamlines/b07.py +10 -5
  6. dodal/beamlines/b07_1.py +10 -5
  7. dodal/beamlines/b21.py +22 -0
  8. dodal/beamlines/i02_1.py +80 -0
  9. dodal/beamlines/i03.py +5 -3
  10. dodal/beamlines/i04.py +5 -3
  11. dodal/beamlines/i09.py +10 -9
  12. dodal/beamlines/i09_1.py +10 -5
  13. dodal/beamlines/i10-1.py +25 -0
  14. dodal/beamlines/i10.py +17 -1
  15. dodal/beamlines/i11.py +0 -17
  16. dodal/beamlines/i15.py +242 -0
  17. dodal/beamlines/i15_1.py +156 -0
  18. dodal/beamlines/i19_1.py +3 -1
  19. dodal/beamlines/i19_2.py +12 -1
  20. dodal/beamlines/i21.py +27 -0
  21. dodal/beamlines/i22.py +12 -2
  22. dodal/beamlines/i24.py +32 -3
  23. dodal/beamlines/k07.py +31 -0
  24. dodal/beamlines/p60.py +10 -9
  25. dodal/common/watcher_utils.py +1 -1
  26. dodal/devices/apple2_undulator.py +18 -142
  27. dodal/devices/attenuator/attenuator.py +48 -2
  28. dodal/devices/attenuator/filter.py +3 -0
  29. dodal/devices/attenuator/filter_selections.py +26 -0
  30. dodal/devices/eiger.py +2 -1
  31. dodal/devices/electron_analyser/__init__.py +4 -0
  32. dodal/devices/electron_analyser/abstract/base_driver_io.py +30 -18
  33. dodal/devices/electron_analyser/energy_sources.py +101 -0
  34. dodal/devices/electron_analyser/specs/detector.py +6 -6
  35. dodal/devices/electron_analyser/specs/driver_io.py +7 -15
  36. dodal/devices/electron_analyser/vgscienta/detector.py +6 -6
  37. dodal/devices/electron_analyser/vgscienta/driver_io.py +7 -14
  38. dodal/devices/fast_grid_scan.py +130 -64
  39. dodal/devices/focusing_mirror.py +30 -0
  40. dodal/devices/i02_1/__init__.py +0 -0
  41. dodal/devices/i02_1/fast_grid_scan.py +61 -0
  42. dodal/devices/i02_1/sample_motors.py +19 -0
  43. dodal/devices/i04/murko_results.py +69 -23
  44. dodal/devices/i10/i10_apple2.py +282 -140
  45. dodal/devices/i15/dcm.py +77 -0
  46. dodal/devices/i15/focussing_mirror.py +71 -0
  47. dodal/devices/i15/jack.py +39 -0
  48. dodal/devices/i15/laue.py +18 -0
  49. dodal/devices/i15/motors.py +27 -0
  50. dodal/devices/i15/multilayer_mirror.py +25 -0
  51. dodal/devices/i15/rail.py +17 -0
  52. dodal/devices/i21/__init__.py +3 -0
  53. dodal/devices/i21/enums.py +8 -0
  54. dodal/devices/i22/nxsas.py +2 -0
  55. dodal/devices/i24/commissioning_jungfrau.py +114 -0
  56. dodal/devices/motors.py +52 -1
  57. dodal/devices/slits.py +18 -0
  58. dodal/devices/smargon.py +0 -56
  59. dodal/devices/temperture_controller/__init__.py +3 -0
  60. dodal/devices/temperture_controller/lakeshore/__init__.py +0 -0
  61. dodal/devices/temperture_controller/lakeshore/lakeshore.py +204 -0
  62. dodal/devices/temperture_controller/lakeshore/lakeshore_io.py +112 -0
  63. dodal/devices/tetramm.py +38 -16
  64. dodal/devices/v2f.py +39 -0
  65. dodal/devices/zebra/zebra.py +1 -0
  66. dodal/devices/zebra/zebra_constants_mapping.py +1 -1
  67. dodal/parameters/experiment_parameter_base.py +1 -5
  68. {dls_dodal-1.58.0.dist-info → dls_dodal-1.60.0.dist-info}/WHEEL +0 -0
  69. {dls_dodal-1.58.0.dist-info → dls_dodal-1.60.0.dist-info}/entry_points.txt +0 -0
  70. {dls_dodal-1.58.0.dist-info → dls_dodal-1.60.0.dist-info}/licenses/LICENSE +0 -0
  71. {dls_dodal-1.58.0.dist-info → dls_dodal-1.60.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,39 @@
1
+ from ophyd_async.core import StandardReadable
2
+ from ophyd_async.epics.motor import Motor
3
+
4
+
5
+ class JackX(StandardReadable):
6
+ """Focusing Mirror"""
7
+
8
+ def __init__(
9
+ self,
10
+ prefix: str,
11
+ name: str = "",
12
+ ):
13
+ with self.add_children_as_readables():
14
+ self.rotation = Motor(prefix + "Ry")
15
+ self.transx = Motor(prefix + "X")
16
+ self.y1 = Motor(prefix + "Y1")
17
+ self.y2 = Motor(prefix + "Y2")
18
+ self.y3 = Motor(prefix + "Y3")
19
+
20
+ super().__init__(name)
21
+
22
+
23
+ class JackY(StandardReadable):
24
+ """Focusing Mirror"""
25
+
26
+ def __init__(
27
+ self,
28
+ prefix: str,
29
+ name: str = "",
30
+ ):
31
+ with self.add_children_as_readables():
32
+ self.j1 = Motor(prefix + "J1")
33
+ self.j2 = Motor(prefix + "J2")
34
+ self.j3 = Motor(prefix + "J3")
35
+ self.pitch = Motor(prefix + "PITCH")
36
+ self.roll = Motor(prefix + "ROLL")
37
+ self.y = Motor(prefix + "Y")
38
+
39
+ super().__init__(name)
@@ -0,0 +1,18 @@
1
+ from ophyd_async.core import StandardReadable
2
+ from ophyd_async.epics.motor import Motor
3
+
4
+
5
+ class LaueMonochrometer(StandardReadable):
6
+ def __init__(
7
+ self,
8
+ prefix: str,
9
+ name: str = "",
10
+ ):
11
+ with self.add_children_as_readables():
12
+ self.bend = Motor(prefix + "BENDER")
13
+ self.bragg = Motor(prefix + "PITCH")
14
+ self.roll = Motor(prefix + "ROLL")
15
+ self.yaw = Motor(prefix + "YAW")
16
+ self.y = Motor(prefix + "Y")
17
+
18
+ super().__init__(name)
@@ -0,0 +1,27 @@
1
+ from ophyd_async.epics.motor import Motor
2
+
3
+ from dodal.devices.motors import Stage
4
+
5
+
6
+ class UpstreamDownstreamPair(Stage):
7
+ def __init__(self, prefix: str, name: str = ""):
8
+ with self.add_children_as_readables():
9
+ self.upstream = Motor(prefix + "US")
10
+ self.downstream = Motor(prefix + "DS")
11
+ super().__init__(name=name)
12
+
13
+
14
+ class NumberedTripleAxisStage(Stage):
15
+ def __init__(
16
+ self,
17
+ prefix: str,
18
+ name: str = "",
19
+ axis1_infix: str = "AXIS1",
20
+ axis2_infix: str = "AXIS2",
21
+ axis3_infix: str = "AXIS3",
22
+ ):
23
+ with self.add_children_as_readables():
24
+ self.axis1 = Motor(prefix + axis1_infix)
25
+ self.axis2 = Motor(prefix + axis2_infix)
26
+ self.axis3 = Motor(prefix + axis3_infix)
27
+ super().__init__(name=name)
@@ -0,0 +1,25 @@
1
+ from ophyd_async.core import StandardReadable
2
+ from ophyd_async.epics.motor import Motor
3
+
4
+
5
+ class MultiLayerMirror(StandardReadable):
6
+ """Multilayer Mirror"""
7
+
8
+ def __init__(
9
+ self,
10
+ prefix: str,
11
+ name: str = "",
12
+ ):
13
+ with self.add_children_as_readables():
14
+ self.ds_x = Motor(prefix + "X2")
15
+ self.ds_y = Motor(prefix + "J3")
16
+ self.ib_y = Motor(prefix + "J1")
17
+ self.ob_y = Motor(prefix + "J2")
18
+ self.pitch = Motor(prefix + "PITCH")
19
+ self.roll = Motor(prefix + "ROLL")
20
+ self.us_x = Motor(prefix + "X1")
21
+ self.x = Motor(prefix + "X")
22
+ self.y = Motor(prefix + "Y")
23
+ self.yaw = Motor(prefix + "YAW")
24
+
25
+ super().__init__(name)
@@ -0,0 +1,17 @@
1
+ from ophyd_async.core import StandardReadable
2
+ from ophyd_async.epics.motor import Motor
3
+
4
+
5
+ class Rail(StandardReadable):
6
+ def __init__(
7
+ self,
8
+ prefix: str,
9
+ name: str = "",
10
+ ):
11
+ with self.add_children_as_readables():
12
+ self.pitch = Motor(prefix + "PITCH")
13
+ self.y = Motor(prefix + "Y")
14
+ self.y1 = Motor(prefix + "Y1")
15
+ self.y2 = Motor(prefix + "Y2")
16
+
17
+ super().__init__(name)
@@ -0,0 +1,3 @@
1
+ from dodal.devices.i21.enums import Grating
2
+
3
+ __all__ = ["Grating"]
@@ -0,0 +1,8 @@
1
+ from ophyd_async.core import StrictEnum
2
+
3
+
4
+ class Grating(StrictEnum):
5
+ VPG1 = "VPG1"
6
+ VPG2 = "VPG2"
7
+ VPG3 = "VPG3"
8
+ VPG4 = "VPG4"
@@ -7,6 +7,7 @@ from bluesky.protocols import Reading
7
7
  from event_model.documents.event_descriptor import DataKey
8
8
  from ophyd_async.core import PathProvider
9
9
  from ophyd_async.epics.adaravis import AravisDetector
10
+ from ophyd_async.epics.adcore import NDPluginBaseIO
10
11
  from ophyd_async.epics.adpilatus import PilatusDetector
11
12
 
12
13
  ValueAndUnits = tuple[float, str]
@@ -106,6 +107,7 @@ class NXSasPilatus(PilatusDetector):
106
107
  fileio_suffix: str,
107
108
  metadata_holder: NXSasMetadataHolder,
108
109
  name: str = "",
110
+ plugins: dict[str, NDPluginBaseIO] | None = None,
109
111
  ):
110
112
  """Extends detector with configuration metadata required or desired
111
113
  to comply with the NXsas application definition.
@@ -0,0 +1,114 @@
1
+ import asyncio
2
+ from collections.abc import AsyncGenerator, AsyncIterator
3
+ from pathlib import Path
4
+
5
+ from bluesky.protocols import StreamAsset
6
+ from event_model import DataKey # type: ignore
7
+ from ophyd_async.core import (
8
+ AutoIncrementingPathProvider,
9
+ DetectorWriter,
10
+ StandardDetector,
11
+ StandardReadable,
12
+ StaticPathProvider,
13
+ observe_value,
14
+ wait_for_value,
15
+ )
16
+ from ophyd_async.epics.core import epics_signal_r, epics_signal_rw, epics_signal_rw_rbv
17
+ from ophyd_async.fastcs.jungfrau._controller import JungfrauController
18
+ from ophyd_async.fastcs.jungfrau._signals import JungfrauDriverIO
19
+
20
+ from dodal.log import LOGGER
21
+
22
+
23
+ class JunfrauCommissioningWriter(DetectorWriter, StandardReadable):
24
+ """Implementation of the temporary filewriter used for Jungfrau commissioning on i24.
25
+
26
+ The PVs on this device are responsible for writing files of a specified name
27
+ to a specified path, marking itself as "ready to write", and having a counter of
28
+ frames written, which must be zero'd at the ophyd level
29
+ """
30
+
31
+ def __init__(
32
+ self,
33
+ prefix,
34
+ path_provider: AutoIncrementingPathProvider | StaticPathProvider,
35
+ name="",
36
+ ) -> None:
37
+ with self.add_children_as_readables():
38
+ self._path_info = path_provider
39
+ self.frame_counter = epics_signal_rw(int, f"{prefix}NumCaptured")
40
+ self.file_name = epics_signal_rw_rbv(str, f"{prefix}FileName")
41
+ self.file_path = epics_signal_rw_rbv(str, f"{prefix}FilePath")
42
+ self.writer_ready = epics_signal_r(int, f"{prefix}Ready_RBV")
43
+ super().__init__(name)
44
+
45
+ async def open(self, name: str, exposures_per_event: int = 1) -> dict[str, DataKey]:
46
+ self._exposures_per_event = exposures_per_event
47
+ _path_info = self._path_info()
48
+
49
+ # Commissioning Jungfrau plans allow you to override path, so check to see if file exists
50
+ requested_filepath = Path(_path_info.directory_path) / _path_info.filename
51
+ if requested_filepath.exists():
52
+ raise FileExistsError(
53
+ f"Jungfrau was requested to write to {requested_filepath}, but this file already exists!"
54
+ )
55
+
56
+ await asyncio.gather(
57
+ self.file_name.set(_path_info.filename),
58
+ self.file_path.set(str(_path_info.directory_path)),
59
+ self.frame_counter.set(0),
60
+ )
61
+ LOGGER.info(
62
+ f"Jungfrau writing to folder {_path_info.directory_path} with filename {_path_info.filename}"
63
+ )
64
+ await wait_for_value(self.writer_ready, 1, timeout=10)
65
+ return await self._describe()
66
+
67
+ async def _describe(self) -> dict[str, DataKey]:
68
+ # Dummy function, doesn't actually describe the dataset
69
+
70
+ return {
71
+ "data": DataKey(
72
+ source="Commissioning writer",
73
+ shape=[-1],
74
+ dtype="array",
75
+ dtype_numpy="<u2",
76
+ external="STREAM:",
77
+ )
78
+ }
79
+
80
+ async def observe_indices_written(
81
+ self, timeout: float
82
+ ) -> AsyncGenerator[int, None]:
83
+ timeout = timeout * 2 # This filewriter is slow
84
+ async for num_captured in observe_value(self.frame_counter, timeout):
85
+ yield num_captured // (self._exposures_per_event)
86
+
87
+ async def get_indices_written(self) -> int:
88
+ return await self.frame_counter.get_value() // self._exposures_per_event
89
+
90
+ def collect_stream_docs(
91
+ self, name: str, indices_written: int
92
+ ) -> AsyncIterator[StreamAsset]:
93
+ raise NotImplementedError()
94
+
95
+ async def close(self) -> None: ...
96
+
97
+
98
+ class CommissioningJungfrau(
99
+ StandardDetector[JungfrauController, JunfrauCommissioningWriter]
100
+ ):
101
+ """Ophyd-async implementation of a Jungfrau 9M Detector, using a temporary
102
+ filewriter in place of Odin"""
103
+
104
+ def __init__(
105
+ self,
106
+ prefix: str,
107
+ writer_prefix: str,
108
+ path_provider: AutoIncrementingPathProvider | StaticPathProvider,
109
+ name="",
110
+ ):
111
+ self.drv = JungfrauDriverIO(prefix)
112
+ writer = JunfrauCommissioningWriter(writer_prefix, path_provider)
113
+ controller = JungfrauController(self.drv)
114
+ super().__init__(controller, writer, name=name)
dodal/devices/motors.py CHANGED
@@ -70,13 +70,27 @@ class XYZThetaStage(XYZStage):
70
70
  x_infix: str = _X,
71
71
  y_infix: str = _Y,
72
72
  z_infix: str = _Z,
73
- theta_infix: str = _Z,
73
+ theta_infix: str = "THETA",
74
74
  ) -> None:
75
75
  with self.add_children_as_readables():
76
76
  self.theta = Motor(prefix + theta_infix)
77
77
  super().__init__(prefix, name, x_infix, y_infix, z_infix)
78
78
 
79
79
 
80
+ class XYPhiStage(XYStage):
81
+ def __init__(
82
+ self,
83
+ prefix: str,
84
+ x_infix: str = _X,
85
+ y_infix: str = _Y,
86
+ phi_infix: str = "PHI",
87
+ name: str = "",
88
+ ) -> None:
89
+ with self.add_children_as_readables():
90
+ self.phi = Motor(prefix + phi_infix)
91
+ super().__init__(prefix, name, x_infix, y_infix)
92
+
93
+
80
94
  class XYPitchStage(XYStage):
81
95
  def __init__(
82
96
  self,
@@ -91,6 +105,23 @@ class XYPitchStage(XYStage):
91
105
  super().__init__(prefix, name, x_infix, y_infix)
92
106
 
93
107
 
108
+ class XYZPitchYawStage(XYZStage):
109
+ def __init__(
110
+ self,
111
+ prefix: str,
112
+ name: str = "",
113
+ x_infix: str = _X,
114
+ y_infix: str = _Y,
115
+ z_infix: str = _Z,
116
+ pitch_infix="PITCH",
117
+ yaw_infix="YAW",
118
+ ):
119
+ with self.add_children_as_readables():
120
+ self.pitch = Motor(prefix + pitch_infix)
121
+ self.yaw = Motor(prefix + yaw_infix)
122
+ super().__init__(prefix, name, x_infix, y_infix, z_infix)
123
+
124
+
94
125
  class XYZPitchYawRollStage(XYZStage):
95
126
  def __init__(
96
127
  self,
@@ -136,6 +167,26 @@ class SixAxisGonio(XYZStage):
136
167
  )
137
168
 
138
169
 
170
+ class SixAxisGonioKappaPhi(XYZStage):
171
+ def __init__(
172
+ self,
173
+ prefix: str,
174
+ name: str = "",
175
+ x_infix: str = _X,
176
+ y_infix: str = _Y,
177
+ z_infix: str = _Z,
178
+ kappa_infix: str = "KAPPA",
179
+ phi_infix: str = "PHI",
180
+ ):
181
+ """Six-axis goniometer with a standard xyz stage and two axes of rotation:
182
+ kappa and phi.
183
+ """
184
+ with self.add_children_as_readables():
185
+ self.kappa = Motor(prefix + kappa_infix)
186
+ self.phi = Motor(prefix + phi_infix)
187
+ super().__init__(prefix, name, x_infix, y_infix, z_infix)
188
+
189
+
139
190
  class YZStage(Stage):
140
191
  def __init__(
141
192
  self, prefix: str, name: str = "", y_infix: str = _Y, z_infix: str = _Z
dodal/devices/slits.py CHANGED
@@ -37,3 +37,21 @@ class Slits(MinimalSlits):
37
37
  self.x_centre = Motor(prefix + x_centre)
38
38
  self.y_centre = Motor(prefix + y_centre)
39
39
  super().__init__(prefix=prefix, x_gap=x_gap, y_gap=y_gap, name=name)
40
+
41
+
42
+ class SlitsY(StandardReadable):
43
+ """
44
+ Representation of a 2-blade slits.
45
+ """
46
+
47
+ def __init__(
48
+ self,
49
+ prefix: str,
50
+ y_gap: str = "Y:SIZE",
51
+ y_centre: str = "Y:CENTRE",
52
+ name: str = "",
53
+ ) -> None:
54
+ with self.add_children_as_readables():
55
+ self.y_gap = Motor(prefix + y_gap)
56
+ self.y_centre = Motor(prefix + y_centre)
57
+ super().__init__(name=name)
dodal/devices/smargon.py CHANGED
@@ -1,13 +1,9 @@
1
1
  import asyncio
2
- from collections.abc import Collection, Generator
3
- from dataclasses import dataclass
4
2
  from enum import Enum
5
3
  from math import isclose
6
4
  from typing import TypedDict, cast
7
5
 
8
- from bluesky import plan_stubs as bps
9
6
  from bluesky.protocols import Movable
10
- from bluesky.utils import Msg
11
7
  from ophyd_async.core import (
12
8
  AsyncStatus,
13
9
  Device,
@@ -66,40 +62,6 @@ class StubOffsets(Device):
66
62
  await self.to_robot_load.set(1)
67
63
 
68
64
 
69
- @dataclass
70
- class AxisLimit:
71
- """Represents the minimum and maximum allowable values on an axis"""
72
-
73
- min_value: float
74
- max_value: float
75
-
76
- def contains(self, pos: float):
77
- """Determine if the specified value is within limits.
78
-
79
- Args:
80
- pos: the value to check
81
-
82
- Returns:
83
- True if the value does not exceed the limits
84
- """
85
- return self.min_value <= pos <= self.max_value
86
-
87
-
88
- @dataclass
89
- class XYZLimits:
90
- """The limits of the smargon x, y, z axes."""
91
-
92
- x: AxisLimit
93
- y: AxisLimit
94
- z: AxisLimit
95
-
96
- def position_valid(self, pos: Collection[float]) -> bool:
97
- return all(
98
- axis_limits.contains(value)
99
- for axis_limits, value in zip([self.x, self.y, self.z], pos, strict=False)
100
- )
101
-
102
-
103
65
  class DeferMoves(StrictEnum):
104
66
  ON = "Defer On"
105
67
  OFF = "Defer Off"
@@ -144,24 +106,6 @@ class Smargon(XYZStage, Movable):
144
106
 
145
107
  super().__init__(prefix, name)
146
108
 
147
- def get_xyz_limits(self) -> Generator[Msg, None, XYZLimits]:
148
- """Obtain a plan stub that returns the smargon XYZ axis limits
149
-
150
- Yields:
151
- Bluesky messages
152
-
153
- Returns:
154
- the axis limits
155
- """
156
- limits = {}
157
- for name, pv in [
158
- (attr_name, getattr(self, attr_name)) for attr_name in ["x", "y", "z"]
159
- ]:
160
- min_value = yield from bps.rd(pv.low_limit_travel)
161
- max_value = yield from bps.rd(pv.high_limit_travel)
162
- limits[name] = AxisLimit(min_value, max_value)
163
- return XYZLimits(**limits)
164
-
165
109
  @AsyncStatus.wrap
166
110
  async def set(self, value: CombinedMove):
167
111
  """This will move all motion together in a deferred move.
@@ -0,0 +1,3 @@
1
+ from .lakeshore.lakeshore import Lakeshore, Lakeshore336, Lakeshore340
2
+
3
+ __all__ = ["Lakeshore336", "Lakeshore340", "Lakeshore"]
@@ -0,0 +1,204 @@
1
+ from asyncio import gather
2
+
3
+ from bluesky.protocols import Movable
4
+ from ophyd_async.core import (
5
+ AsyncStatus,
6
+ SignalDatatypeT,
7
+ StandardReadable,
8
+ StandardReadableFormat,
9
+ StrictEnum,
10
+ derived_signal_rw,
11
+ soft_signal_rw,
12
+ )
13
+
14
+ from .lakeshore_io import (
15
+ LakeshoreBaseIO,
16
+ )
17
+
18
+
19
+ class Heater336Settings(StrictEnum):
20
+ OFF = "Off"
21
+ LOW = "Low"
22
+ MEDIUM = "Medium"
23
+ HIGH = "High"
24
+
25
+
26
+ class Lakeshore(LakeshoreBaseIO, StandardReadable, Movable[float]):
27
+ """
28
+ Device for controlling and reading from a Lakeshore temperature controller.
29
+ It supports multiple channels and PID control.
30
+
31
+ Attributes
32
+ ----------
33
+ temperature : LakeshoreBaseIO
34
+ Temperature IO interface.
35
+ PID : PIDBaseIO
36
+ PID IO interface.
37
+ control_channel : derived_signal_rw
38
+ Signal for selecting the control channel,
39
+ optional readback as hinted signal
40
+ (default readback channel is the same as control channel).
41
+
42
+ temperature_high_limit: soft_signal_rw
43
+ Signal to store the soft high temperature limit.
44
+ temperature_low_limit: soft_signal_rw
45
+ Signal to store the soft low temperature limit.
46
+
47
+
48
+ Methods
49
+ -------
50
+ set(value: float)
51
+ Set the temperature setpoint for the selected control channel.
52
+ """
53
+
54
+ def __init__(
55
+ self,
56
+ prefix: str,
57
+ num_readback_channel: int,
58
+ heater_setting: type[SignalDatatypeT],
59
+ control_channel: int = 1,
60
+ single_control_channel: bool = False,
61
+ name: str = "",
62
+ ):
63
+ """
64
+ Parameters
65
+ ----------
66
+ prefix : str
67
+ The EPICS prefix for the device.
68
+ no_channels : int
69
+ Number of temperature channels.
70
+ heater_setting : type[SignalDatatypeT]
71
+ Enum type for heater settings.
72
+ control_channel : int, optional
73
+ The initial control channel (default is 1).
74
+ single_control_channel : bool, optional
75
+ Whether to use a single control channel (default is False).
76
+ name : str, optional
77
+ Name of the device.
78
+ """
79
+ self._control_channel = soft_signal_rw(int, initial_value=control_channel)
80
+ self.temperature_high_limit = soft_signal_rw(float, initial_value=400)
81
+ self.temperature_low_limit = soft_signal_rw(float, initial_value=0)
82
+
83
+ self.control_channel = derived_signal_rw(
84
+ raw_to_derived=self._get_control_channel,
85
+ set_derived=self._set_control_channel,
86
+ current_channel=self._control_channel,
87
+ )
88
+ super().__init__(
89
+ prefix=prefix,
90
+ num_readback_channel=num_readback_channel,
91
+ heater_setting=heater_setting,
92
+ name=name,
93
+ single_control_channel=single_control_channel,
94
+ )
95
+
96
+ self.add_readables(
97
+ [setpoint.user_setpoint for setpoint in self.control_channels.values()]
98
+ )
99
+ self.add_readables(
100
+ list(self.readback.values()), format=StandardReadableFormat.HINTED_SIGNAL
101
+ )
102
+
103
+ self.add_readables(
104
+ [
105
+ self._control_channel,
106
+ self.control_channels[control_channel].p,
107
+ self.control_channels[control_channel].i,
108
+ self.control_channels[control_channel].d,
109
+ self.control_channels[control_channel].heater_output_range,
110
+ ],
111
+ StandardReadableFormat.CONFIG_SIGNAL,
112
+ )
113
+
114
+ @AsyncStatus.wrap
115
+ async def set(self, value: float) -> None:
116
+ """
117
+ Set the temperature setpoint for the active control channel.
118
+ """
119
+ high, low = await gather(
120
+ self.temperature_high_limit.get_value(),
121
+ self.temperature_low_limit.get_value(),
122
+ )
123
+ if high >= value >= low:
124
+ await self.control_channels[
125
+ await self.control_channel.get_value()
126
+ ].user_setpoint.set(value)
127
+ else:
128
+ raise ValueError(
129
+ f"{self.name} requested temperature {value} is outside limits: {low}, {high}"
130
+ )
131
+
132
+ def _get_control_channel(self, current_channel: int) -> int:
133
+ return current_channel
134
+
135
+ async def _set_control_channel(self, value: int) -> None:
136
+ if value < 1 or value > len(self.control_channels):
137
+ raise ValueError(
138
+ f"Control channel must be between 1 and {len(self.control_channels)}."
139
+ )
140
+ await self._control_channel.set(value)
141
+ self._read_config_funcs = (
142
+ self._control_channel.read,
143
+ self.control_channels[value].user_setpoint.read,
144
+ self.control_channels[value].p.read,
145
+ self.control_channels[value].i.read,
146
+ self.control_channels[value].d.read,
147
+ self.control_channels[value].heater_output_range.read,
148
+ )
149
+
150
+
151
+ class Lakeshore336(Lakeshore):
152
+ def __init__(
153
+ self,
154
+ prefix: str,
155
+ control_channel: int = 1,
156
+ name: str = "",
157
+ ):
158
+ """
159
+ Lakeshore 336 temperature controller. With 4 readback and control channels.
160
+ Heater settings are: Off, Low, Medium, High.
161
+ Parameters
162
+ ----------
163
+ prefix : str
164
+ The EPICS prefix for the device.
165
+ control_channel : int, optional
166
+ The initial control channel (default is 1).
167
+ """
168
+ super().__init__(
169
+ prefix=prefix,
170
+ num_readback_channel=4,
171
+ heater_setting=Heater336Settings,
172
+ control_channel=control_channel,
173
+ single_control_channel=False,
174
+ name=name,
175
+ )
176
+
177
+
178
+ class Lakeshore340(Lakeshore):
179
+ def __init__(
180
+ self,
181
+ prefix: str,
182
+ control_channel: int = 1,
183
+ name: str = "",
184
+ ):
185
+ """Lakeshore 340 temperature controller. With 4 readback channels and a single
186
+ control channel.
187
+ Heater settings are in power from 0 to 5. 0 is 0 watt, 5 is 50 watt.
188
+
189
+ Parameters
190
+ ----------
191
+ prefix : str
192
+ The EPICS prefix for the device.
193
+ control_channel : int, optional
194
+ The initial control channel (default is 1).
195
+ """
196
+
197
+ super().__init__(
198
+ prefix=prefix,
199
+ num_readback_channel=4,
200
+ heater_setting=float,
201
+ control_channel=control_channel,
202
+ single_control_channel=True,
203
+ name=name,
204
+ )