dls-dodal 1.66.0__py3-none-any.whl → 1.68.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (83) hide show
  1. {dls_dodal-1.66.0.dist-info → dls_dodal-1.68.0.dist-info}/METADATA +2 -2
  2. {dls_dodal-1.66.0.dist-info → dls_dodal-1.68.0.dist-info}/RECORD +75 -65
  3. dodal/_version.py +2 -2
  4. dodal/beamlines/b07.py +1 -1
  5. dodal/beamlines/b07_1.py +1 -1
  6. dodal/beamlines/i03.py +92 -208
  7. dodal/beamlines/i04.py +22 -1
  8. dodal/beamlines/i05.py +1 -1
  9. dodal/beamlines/i06.py +1 -1
  10. dodal/beamlines/i09.py +1 -1
  11. dodal/beamlines/i09_1.py +27 -3
  12. dodal/beamlines/i09_2.py +58 -2
  13. dodal/beamlines/i10_optics.py +44 -25
  14. dodal/beamlines/i16.py +23 -0
  15. dodal/beamlines/i17.py +7 -3
  16. dodal/beamlines/i19_1.py +26 -14
  17. dodal/beamlines/i19_2.py +49 -38
  18. dodal/beamlines/i21.py +61 -2
  19. dodal/beamlines/i22.py +16 -1
  20. dodal/beamlines/p60.py +1 -1
  21. dodal/beamlines/training_rig.py +0 -16
  22. dodal/cli.py +26 -12
  23. dodal/common/coordination.py +3 -2
  24. dodal/device_manager.py +604 -0
  25. dodal/devices/cryostream.py +28 -57
  26. dodal/devices/eiger.py +41 -27
  27. dodal/devices/electron_analyser/__init__.py +0 -33
  28. dodal/devices/electron_analyser/base/__init__.py +58 -0
  29. dodal/devices/electron_analyser/base/base_controller.py +73 -0
  30. dodal/devices/electron_analyser/base/base_detector.py +214 -0
  31. dodal/devices/electron_analyser/{abstract → base}/base_driver_io.py +23 -42
  32. dodal/devices/electron_analyser/{abstract → base}/base_region.py +47 -11
  33. dodal/devices/electron_analyser/{util.py → base/base_util.py} +1 -1
  34. dodal/devices/electron_analyser/{energy_sources.py → base/energy_sources.py} +1 -1
  35. dodal/devices/electron_analyser/specs/__init__.py +4 -4
  36. dodal/devices/electron_analyser/specs/specs_detector.py +46 -0
  37. dodal/devices/electron_analyser/specs/{driver_io.py → specs_driver_io.py} +23 -26
  38. dodal/devices/electron_analyser/specs/{region.py → specs_region.py} +4 -3
  39. dodal/devices/electron_analyser/vgscienta/__init__.py +4 -4
  40. dodal/devices/electron_analyser/vgscienta/vgscienta_detector.py +52 -0
  41. dodal/devices/electron_analyser/vgscienta/{driver_io.py → vgscienta_driver_io.py} +25 -31
  42. dodal/devices/electron_analyser/vgscienta/{region.py → vgscienta_region.py} +6 -6
  43. dodal/devices/i04/max_pixel.py +38 -0
  44. dodal/devices/i09_1_shared/__init__.py +8 -1
  45. dodal/devices/i09_1_shared/hard_energy.py +112 -0
  46. dodal/devices/i09_2_shared/__init__.py +0 -0
  47. dodal/devices/i09_2_shared/i09_apple2.py +14 -0
  48. dodal/devices/i10/i10_apple2.py +24 -22
  49. dodal/devices/i17/i17_apple2.py +32 -20
  50. dodal/devices/i19/access_controlled/attenuator_motor_squad.py +61 -0
  51. dodal/devices/i19/access_controlled/blueapi_device.py +9 -1
  52. dodal/devices/i19/access_controlled/shutter.py +2 -4
  53. dodal/devices/i21/__init__.py +3 -1
  54. dodal/devices/insertion_device/__init__.py +58 -0
  55. dodal/devices/{apple2_undulator.py → insertion_device/apple2_undulator.py} +102 -44
  56. dodal/devices/insertion_device/energy_motor_lookup.py +88 -0
  57. dodal/devices/insertion_device/id_enum.py +17 -0
  58. dodal/devices/insertion_device/lookup_table_models.py +317 -0
  59. dodal/devices/motors.py +14 -0
  60. dodal/devices/robot.py +16 -11
  61. dodal/plans/__init__.py +1 -1
  62. dodal/plans/configure_arm_trigger_and_disarm_detector.py +2 -4
  63. dodal/testing/electron_analyser/device_factory.py +4 -4
  64. dodal/testing/fixtures/devices/__init__.py +0 -0
  65. dodal/testing/fixtures/devices/apple2.py +78 -0
  66. dodal/testing/fixtures/run_engine.py +4 -0
  67. dodal/utils.py +6 -3
  68. dodal/devices/electron_analyser/abstract/__init__.py +0 -25
  69. dodal/devices/electron_analyser/abstract/base_detector.py +0 -63
  70. dodal/devices/electron_analyser/abstract/types.py +0 -12
  71. dodal/devices/electron_analyser/detector.py +0 -143
  72. dodal/devices/electron_analyser/specs/detector.py +0 -34
  73. dodal/devices/electron_analyser/types.py +0 -57
  74. dodal/devices/electron_analyser/vgscienta/detector.py +0 -48
  75. dodal/devices/util/lookup_tables_apple2.py +0 -390
  76. {dls_dodal-1.66.0.dist-info → dls_dodal-1.68.0.dist-info}/WHEEL +0 -0
  77. {dls_dodal-1.66.0.dist-info → dls_dodal-1.68.0.dist-info}/entry_points.txt +0 -0
  78. {dls_dodal-1.66.0.dist-info → dls_dodal-1.68.0.dist-info}/licenses/LICENSE +0 -0
  79. {dls_dodal-1.66.0.dist-info → dls_dodal-1.68.0.dist-info}/top_level.txt +0 -0
  80. /dodal/devices/electron_analyser/{enums.py → base/base_enums.py} +0 -0
  81. /dodal/devices/electron_analyser/specs/{enums.py → specs_enums.py} +0 -0
  82. /dodal/devices/electron_analyser/vgscienta/{enums.py → vgscienta_enums.py} +0 -0
  83. /dodal/plans/{scanspec.py → spec_path.py} +0 -0
@@ -0,0 +1,317 @@
1
+ """Apple2 lookup table utilities and CSV converter.
2
+
3
+ This module provides helpers to read, validate and convert Apple2 insertion-device
4
+ lookup tables (energy -> gap/phase polynomials) from CSV sources into an
5
+ in-memory dictionary format used by the Apple2 controllers.
6
+
7
+ Data format produced
8
+ The lookup-table dictionary created by convert_csv_to_lookup() follows this
9
+ structure:
10
+
11
+ {
12
+ "POL_MODE": {
13
+ "energy_entries": [
14
+ {
15
+ "low": <float>,
16
+ "high": <float>,
17
+ "poly": <numpy.poly1d>
18
+ },
19
+ ...
20
+ ]
21
+ },
22
+ ...
23
+ }
24
+ """
25
+
26
+ import csv
27
+ import io
28
+ from collections.abc import Generator
29
+ from typing import Annotated as A
30
+ from typing import Any, NamedTuple, Self
31
+
32
+ import numpy as np
33
+ from pydantic import (
34
+ BaseModel,
35
+ ConfigDict,
36
+ Field,
37
+ RootModel,
38
+ field_serializer,
39
+ field_validator,
40
+ )
41
+
42
+ from dodal.devices.insertion_device.id_enum import Pol
43
+
44
+ DEFAULT_POLY_DEG = [
45
+ "7th-order",
46
+ "6th-order",
47
+ "5th-order",
48
+ "4th-order",
49
+ "3rd-order",
50
+ "2nd-order",
51
+ "1st-order",
52
+ "b",
53
+ ]
54
+
55
+ MODE_NAME_CONVERT = {"cr": "pc", "cl": "nc"}
56
+ DEFAULT_GAP_FILE = "IDEnergy2GapCalibrations.csv"
57
+ DEFAULT_PHASE_FILE = "IDEnergy2PhaseCalibrations.csv"
58
+
59
+ ROW_PHASE_MOTOR_TOLERANCE = 0.004
60
+ ROW_PHASE_CIRCULAR = 15
61
+ MAXIMUM_ROW_PHASE_MOTOR_POSITION = 24.0
62
+ MAXIMUM_GAP_MOTOR_POSITION = 100
63
+
64
+ DEFAULT_POLY1D_PARAMETERS = {
65
+ Pol.LH: [0],
66
+ Pol.LV: [MAXIMUM_ROW_PHASE_MOTOR_POSITION],
67
+ Pol.PC: [ROW_PHASE_CIRCULAR],
68
+ Pol.NC: [-ROW_PHASE_CIRCULAR],
69
+ Pol.LH3: [0],
70
+ }
71
+
72
+
73
+ class Source(NamedTuple):
74
+ column: str
75
+ value: str
76
+
77
+
78
+ class LookupTableColumnConfig(BaseModel):
79
+ """Configuration on how to process a csv file columns into a LookupTable data model."""
80
+
81
+ source: A[
82
+ Source | None,
83
+ Field(
84
+ description="If not None, only process the row if the source column name match the value."
85
+ ),
86
+ ] = None
87
+ mode: A[str, Field(description="Polarisation mode column name.")] = "Mode"
88
+ min_energy: A[str, Field(description="Minimum energy column name.")] = "MinEnergy"
89
+ max_energy: A[str, Field(description="Maximum energy column name.")] = "MaxEnergy"
90
+ poly_deg: list[str] = Field(
91
+ description="Polynomial column names.", default_factory=lambda: DEFAULT_POLY_DEG
92
+ )
93
+ mode_name_convert: dict[str, str] = Field(
94
+ description="When processing polarisation mode values, map their alias values to a real value.",
95
+ default_factory=lambda: MODE_NAME_CONVERT,
96
+ )
97
+ grating: A[
98
+ str | None, Field(description="Optional column name for entry grating.")
99
+ ] = None
100
+
101
+
102
+ class EnergyCoverageEntry(BaseModel):
103
+ model_config = ConfigDict(
104
+ arbitrary_types_allowed=True, frozen=True
105
+ ) # arbitrary_types_allowed is True so np.poly1d can be used.
106
+ min_energy: float
107
+ max_energy: float
108
+ poly: np.poly1d
109
+ grating: float | None = None
110
+
111
+ @field_validator("poly", mode="before")
112
+ @classmethod
113
+ def validate_and_convert_poly(
114
+ cls: type[Self], value: np.poly1d | list
115
+ ) -> np.poly1d:
116
+ """If reading from serialized data, it will be using a list. Convert to np.poly1d"""
117
+ if isinstance(value, list):
118
+ return np.poly1d(value)
119
+ return value
120
+
121
+ @field_serializer("poly", mode="plain")
122
+ def serialize_poly(self, value: np.poly1d) -> list:
123
+ """Allow np.poly1d to work when serializing."""
124
+ return value.coefficients.tolist()
125
+
126
+
127
+ class EnergyCoverage(BaseModel):
128
+ model_config = ConfigDict(frozen=True)
129
+ energy_entries: tuple[EnergyCoverageEntry, ...]
130
+
131
+ @field_validator("energy_entries", mode="after")
132
+ @classmethod
133
+ def _prepare_energy_entries(
134
+ cls, value: tuple[EnergyCoverageEntry, ...]
135
+ ) -> tuple[EnergyCoverageEntry, ...]:
136
+ """Convert incoming energy_entries to a sorted, immutable tuple."""
137
+ return tuple(sorted(value, key=lambda e: e.min_energy))
138
+
139
+ @classmethod
140
+ def generate(
141
+ cls: type[Self],
142
+ min_energies: list[float],
143
+ max_energies: list[float],
144
+ poly1d_params: list[list[float]],
145
+ ) -> Self:
146
+ energy_entries = tuple(
147
+ EnergyCoverageEntry(
148
+ min_energy=min_energy,
149
+ max_energy=max_energy,
150
+ poly=np.poly1d(poly_params),
151
+ )
152
+ for min_energy, max_energy, poly_params in zip(
153
+ min_energies, max_energies, poly1d_params, strict=True
154
+ )
155
+ )
156
+ return cls(energy_entries=energy_entries)
157
+
158
+ @property
159
+ def min_energy(self) -> float:
160
+ return self.energy_entries[0].min_energy
161
+
162
+ @property
163
+ def max_energy(self) -> float:
164
+ return self.energy_entries[-1].max_energy
165
+
166
+ def get_poly(self, energy: float) -> np.poly1d:
167
+ """
168
+ Return the numpy.poly1d polynomial applicable for the given energy.
169
+
170
+ Parameters:
171
+ -----------
172
+ energy:
173
+ Energy value in the same units used to create the lookup table.
174
+ """
175
+
176
+ if not self.min_energy <= energy <= self.max_energy:
177
+ raise ValueError(
178
+ f"Demanding energy must lie between {self.min_energy} and {self.max_energy}!"
179
+ )
180
+
181
+ poly_index = self.get_energy_index(energy)
182
+ if poly_index is not None:
183
+ return self.energy_entries[poly_index].poly
184
+ raise ValueError(
185
+ "Cannot find polynomial coefficients for your requested energy."
186
+ + " There might be gap in the calibration lookup table."
187
+ )
188
+
189
+ def get_energy_index(self, energy: float) -> int | None:
190
+ """Binary search assumes self.energy_entries is sorted by min_energy.
191
+ Return index or None if not found."""
192
+ max_index = len(self.energy_entries) - 1
193
+ min_index = 0
194
+ while min_index <= max_index:
195
+ mid_index = (min_index + max_index) // 2
196
+ en_try = self.energy_entries[mid_index]
197
+ if en_try.min_energy <= energy <= en_try.max_energy:
198
+ return mid_index
199
+ elif energy < en_try.min_energy:
200
+ max_index = mid_index - 1
201
+ else:
202
+ min_index = mid_index + 1
203
+ return None
204
+
205
+
206
+ class LookupTable(RootModel[dict[Pol, EnergyCoverage]]):
207
+ """
208
+ Specialised lookup table for insertion devices to relate the energy and polarisation
209
+ values to Apple2 motor positions.
210
+ """
211
+
212
+ model_config = ConfigDict(frozen=True)
213
+
214
+ # Allow to auto specify a dict if one not provided
215
+ def __init__(self, root: dict[Pol, EnergyCoverage] | None = None):
216
+ super().__init__(root=root or {})
217
+
218
+ @classmethod
219
+ def generate(
220
+ cls: type[Self],
221
+ pols: list[Pol],
222
+ energy_coverage: list[EnergyCoverage],
223
+ ) -> Self:
224
+ """Generate a LookupTable containing multiple EnergyCoverage
225
+ for provided polarisations."""
226
+ root_data = dict(zip(pols, energy_coverage, strict=False))
227
+ return cls(root=root_data)
228
+
229
+ def get_poly(
230
+ self,
231
+ energy: float,
232
+ pol: Pol,
233
+ ) -> np.poly1d:
234
+ """
235
+ Return the numpy.poly1d polynomial applicable for the given energy and polarisation.
236
+
237
+ Parameters:
238
+ -----------
239
+ energy:
240
+ Energy value in the same units used to create the lookup table.
241
+ pol:
242
+ Polarisation mode (Pol enum).
243
+ """
244
+ return self.root[pol].get_poly(energy)
245
+
246
+
247
+ def convert_csv_to_lookup(
248
+ file_contents: str,
249
+ lut_config: LookupTableColumnConfig,
250
+ skip_line_start_with: str = "#",
251
+ ) -> LookupTable:
252
+ """
253
+ Convert CSV content into the Apple2 lookup-table dictionary.
254
+
255
+ Parameters:
256
+ -----------
257
+ file_contents:
258
+ The CSV file contents as string.
259
+ lut_config:
260
+ The configuration that how to process the file_contents into a LookupTable.
261
+ skip_line_start_with
262
+ Lines beginning with this prefix are skipped (default "#").
263
+
264
+ Returns:
265
+ -----------
266
+ LookupTable
267
+ """
268
+ temp_mode_entries: dict[Pol, list[EnergyCoverageEntry]] = {}
269
+
270
+ def process_row(row: dict[str, Any]) -> None:
271
+ """Process a single row from the CSV file and update the temporary entry list."""
272
+ raw_mode_value = str(row[lut_config.mode]).lower()
273
+ mode_value = Pol(
274
+ lut_config.mode_name_convert.get(raw_mode_value, raw_mode_value)
275
+ )
276
+
277
+ coefficients = np.poly1d([float(row[coef]) for coef in lut_config.poly_deg])
278
+
279
+ energy_entry = EnergyCoverageEntry(
280
+ min_energy=float(row[lut_config.min_energy]),
281
+ max_energy=float(row[lut_config.max_energy]),
282
+ poly=coefficients,
283
+ )
284
+
285
+ if mode_value not in temp_mode_entries:
286
+ temp_mode_entries[mode_value] = []
287
+
288
+ temp_mode_entries[mode_value].append(energy_entry)
289
+
290
+ reader = csv.DictReader(read_file_and_skip(file_contents, skip_line_start_with))
291
+
292
+ for row in reader:
293
+ source = lut_config.source
294
+ # If there are multiple source only convert requested.
295
+ if source is None or row[source.column] == source.value:
296
+ process_row(row=row)
297
+ # Check if our LookupTable is empty after processing, raise error if it is.
298
+ if not temp_mode_entries:
299
+ raise RuntimeError(
300
+ "LookupTable content is empty, failed to convert the file contents to "
301
+ "a LookupTable!"
302
+ )
303
+
304
+ final_lut_root: dict[Pol, EnergyCoverage] = {}
305
+ for pol, entries in temp_mode_entries.items():
306
+ final_lut_root[pol] = EnergyCoverage.model_validate({"energy_entries": entries})
307
+
308
+ return LookupTable(root=final_lut_root)
309
+
310
+
311
+ def read_file_and_skip(file: str, skip_line_start_with: str = "#") -> Generator[str]:
312
+ """Yield non-comment lines from the CSV content string."""
313
+ for line in io.StringIO(file):
314
+ if line.startswith(skip_line_start_with):
315
+ continue
316
+ else:
317
+ yield line
dodal/devices/motors.py CHANGED
@@ -122,6 +122,20 @@ class XYPitchStage(XYStage):
122
122
  super().__init__(prefix, name, x_infix, y_infix)
123
123
 
124
124
 
125
+ class XYRollStage(XYStage):
126
+ def __init__(
127
+ self,
128
+ prefix: str,
129
+ x_infix: str = _X,
130
+ y_infix: str = _Y,
131
+ roll_infix: str = "ROLL",
132
+ name: str = "",
133
+ ) -> None:
134
+ with self.add_children_as_readables():
135
+ self.roll = Motor(prefix + roll_infix)
136
+ super().__init__(prefix, name, x_infix, y_infix)
137
+
138
+
125
139
  class XYZPitchYawStage(XYZStage):
126
140
  def __init__(
127
141
  self,
dodal/devices/robot.py CHANGED
@@ -71,6 +71,7 @@ class BartRobot(StandardReadable, Movable[SampleLocation | None]):
71
71
  LOAD_TIMEOUT = 60
72
72
 
73
73
  # Error codes that we do special things on
74
+ NO_ERROR = 0
74
75
  NO_PIN_ERROR_CODE = 25
75
76
  LIGHT_CURTAIN_TRIPPED = 40
76
77
 
@@ -115,21 +116,25 @@ class BartRobot(StandardReadable, Movable[SampleLocation | None]):
115
116
  )
116
117
  super().__init__(name=name)
117
118
 
118
- async def pin_mounted_or_no_pin_found(self):
119
- """This co-routine will finish when either a pin is detected or the robot gives
120
- an error saying no pin was found (whichever happens first). In the case where no
121
- pin was found a RobotLoadError error is raised.
119
+ async def pin_state_or_error(self, expected_state=PinMounted.PIN_MOUNTED):
120
+ """This co-routine will finish when either the pin sensor reaches the specified
121
+ state or the robot gives an error (whichever happens first). In the case where
122
+ there is an error a RobotLoadError error is raised.
122
123
  """
123
124
 
124
- async def raise_if_no_pin():
125
- await wait_for_value(self.prog_error.code, self.NO_PIN_ERROR_CODE, None)
126
- raise RobotLoadError(self.NO_PIN_ERROR_CODE, "Pin was not detected")
125
+ async def raise_if_error():
126
+ await wait_for_value(
127
+ self.prog_error.code, lambda value: value != self.NO_ERROR, None
128
+ )
129
+ error_code = await self.prog_error.code.get_value()
130
+ error_msg = await self.prog_error.str.get_value()
131
+ raise RobotLoadError(error_code, error_msg)
127
132
 
128
133
  async def wfv():
129
- await wait_for_value(self.gonio_pin_sensor, PinMounted.PIN_MOUNTED, None)
134
+ await wait_for_value(self.gonio_pin_sensor, expected_state, None)
130
135
 
131
136
  tasks = [
132
- (Task(raise_if_no_pin())),
137
+ (Task(raise_if_error())),
133
138
  (Task(wfv())),
134
139
  ]
135
140
  try:
@@ -168,10 +173,10 @@ class BartRobot(StandardReadable, Movable[SampleLocation | None]):
168
173
  await self.load.trigger()
169
174
  if await self.gonio_pin_sensor.get_value() == PinMounted.PIN_MOUNTED:
170
175
  LOGGER.info(WAIT_FOR_OLD_PIN_MSG)
171
- await wait_for_value(self.gonio_pin_sensor, PinMounted.NO_PIN_MOUNTED, None)
176
+ await self.pin_state_or_error(PinMounted.NO_PIN_MOUNTED)
172
177
  LOGGER.info(WAIT_FOR_NEW_PIN_MSG)
173
178
 
174
- await self.pin_mounted_or_no_pin_found()
179
+ await self.pin_state_or_error()
175
180
 
176
181
  @AsyncStatus.wrap
177
182
  async def set(self, value: SampleLocation | None):
dodal/plans/__init__.py CHANGED
@@ -1,4 +1,4 @@
1
- from .scanspec import spec_scan
1
+ from .spec_path import spec_scan
2
2
  from .wrapped import count
3
3
 
4
4
  __all__ = ["count", "spec_scan"]
@@ -12,7 +12,7 @@ from ophyd_async.core import (
12
12
  )
13
13
  from ophyd_async.fastcs.eiger import EigerDetector
14
14
 
15
- from dodal.beamlines.i03 import fastcs_eiger, set_path_provider
15
+ from dodal.beamlines.i03 import fastcs_eiger
16
16
  from dodal.devices.detector import DetectorParams
17
17
  from dodal.log import LOGGER, do_default_logging_setup
18
18
 
@@ -163,9 +163,7 @@ if __name__ == "__main__":
163
163
  PurePath("/dls/i03/data/2025/cm40607-2/test_new_eiger/"),
164
164
  )
165
165
 
166
- set_path_provider(path_provider)
167
-
168
- eiger = fastcs_eiger(connect_immediately=True)
166
+ eiger = fastcs_eiger.build(connect_immediately=True, path_provider=path_provider)
169
167
  run_engine(
170
168
  configure_arm_trigger_and_disarm_detector(
171
169
  eiger=eiger,
@@ -1,16 +1,16 @@
1
1
  from typing import Any, get_args, get_origin
2
2
 
3
- from dodal.devices.electron_analyser.abstract import (
3
+ from dodal.devices.electron_analyser.base.base_detector import TElectronAnalyserDetector
4
+ from dodal.devices.electron_analyser.base.base_driver_io import (
4
5
  TAbstractAnalyserDriverIO,
5
6
  )
6
- from dodal.devices.electron_analyser.detector import TElectronAnalyserDetector
7
7
  from dodal.devices.electron_analyser.vgscienta import (
8
8
  VGScientaAnalyserDriverIO,
9
9
  VGScientaDetector,
10
10
  )
11
11
 
12
12
 
13
- async def create_driver(
13
+ def create_driver(
14
14
  driver_class: type[TAbstractAnalyserDriverIO],
15
15
  **kwargs: Any,
16
16
  ) -> TAbstractAnalyserDriverIO:
@@ -34,7 +34,7 @@ async def create_driver(
34
34
  return driver_class(**(parameters | kwargs))
35
35
 
36
36
 
37
- async def create_detector(
37
+ def create_detector(
38
38
  detector_class: type[TElectronAnalyserDetector],
39
39
  **kwargs: Any,
40
40
  ) -> TElectronAnalyserDetector:
File without changes
@@ -0,0 +1,78 @@
1
+ from unittest.mock import MagicMock
2
+
3
+ import pytest
4
+ from daq_config_server.client import ConfigServer
5
+ from ophyd_async.core import (
6
+ init_devices,
7
+ set_mock_value,
8
+ )
9
+
10
+ from dodal.devices.insertion_device import (
11
+ EnabledDisabledUpper,
12
+ UndulatorGap,
13
+ UndulatorGateStatus,
14
+ UndulatorJawPhase,
15
+ UndulatorPhaseAxes,
16
+ )
17
+
18
+
19
+ @pytest.fixture
20
+ def mock_config_client() -> ConfigServer:
21
+ mock_config_client = ConfigServer()
22
+
23
+ mock_config_client.get_file_contents = MagicMock(spec=["get_file_contents"])
24
+
25
+ def my_side_effect(file_path, reset_cached_result) -> str:
26
+ assert reset_cached_result is True
27
+ with open(file_path) as f:
28
+ return f.read()
29
+
30
+ mock_config_client.get_file_contents.side_effect = my_side_effect
31
+ return mock_config_client
32
+
33
+
34
+ @pytest.fixture
35
+ async def mock_id_gap(prefix: str = "BLXX-EA-DET-007:") -> UndulatorGap:
36
+ async with init_devices(mock=True):
37
+ mock_id_gap = UndulatorGap(prefix, "mock_id_gap")
38
+ assert mock_id_gap.name == "mock_id_gap"
39
+ set_mock_value(mock_id_gap.gate, UndulatorGateStatus.CLOSE)
40
+ set_mock_value(mock_id_gap.velocity, 1)
41
+ set_mock_value(mock_id_gap.user_readback, 1)
42
+ set_mock_value(mock_id_gap.user_setpoint, "1")
43
+ set_mock_value(mock_id_gap.status, EnabledDisabledUpper.ENABLED)
44
+ return mock_id_gap
45
+
46
+
47
+ @pytest.fixture
48
+ async def mock_phase_axes(prefix: str = "BLXX-EA-DET-007:") -> UndulatorPhaseAxes:
49
+ async with init_devices(mock=True):
50
+ mock_phase_axes = UndulatorPhaseAxes(
51
+ prefix=prefix,
52
+ top_outer="RPQ1",
53
+ top_inner="RPQ2",
54
+ btm_outer="RPQ3",
55
+ btm_inner="RPQ4",
56
+ )
57
+ assert mock_phase_axes.name == "mock_phase_axes"
58
+ set_mock_value(mock_phase_axes.gate, UndulatorGateStatus.CLOSE)
59
+ set_mock_value(mock_phase_axes.top_outer.velocity, 2)
60
+ set_mock_value(mock_phase_axes.top_inner.velocity, 2)
61
+ set_mock_value(mock_phase_axes.btm_outer.velocity, 2)
62
+ set_mock_value(mock_phase_axes.btm_inner.velocity, 2)
63
+ set_mock_value(mock_phase_axes.status, EnabledDisabledUpper.ENABLED)
64
+ return mock_phase_axes
65
+
66
+
67
+ @pytest.fixture
68
+ async def mock_jaw_phase(prefix: str = "BLXX-EA-DET-007:") -> UndulatorJawPhase:
69
+ async with init_devices(mock=True):
70
+ mock_jaw_phase = UndulatorJawPhase(
71
+ prefix=prefix, move_pv="RPQ1", jaw_phase="JAW"
72
+ )
73
+ set_mock_value(mock_jaw_phase.gate, UndulatorGateStatus.CLOSE)
74
+ set_mock_value(mock_jaw_phase.jaw_phase.velocity, 2)
75
+ set_mock_value(mock_jaw_phase.jaw_phase.user_readback, 0)
76
+ set_mock_value(mock_jaw_phase.jaw_phase.user_setpoint_readback, 0)
77
+ set_mock_value(mock_jaw_phase.status, EnabledDisabledUpper.ENABLED)
78
+ return mock_jaw_phase
@@ -3,6 +3,7 @@ Allow external repos to reuse these fixtures so defined in single place.
3
3
  """
4
4
 
5
5
  import asyncio
6
+ import copy
6
7
  import os
7
8
  import threading
8
9
  import time
@@ -34,10 +35,13 @@ async def _ensure_running_bluesky_event_loop(_global_run_engine):
34
35
 
35
36
  @pytest.fixture()
36
37
  async def run_engine(_global_run_engine: RunEngine) -> AsyncGenerator[RunEngine, None]:
38
+ initial_md = copy.deepcopy(_global_run_engine.md)
37
39
  try:
38
40
  yield _global_run_engine
39
41
  finally:
42
+ # Clear subscriptions, cache, and reset metadata
40
43
  _global_run_engine.reset()
44
+ _global_run_engine.md = initial_md
41
45
 
42
46
 
43
47
  @pytest_asyncio.fixture(scope="session", loop_scope="session")
dodal/utils.py CHANGED
@@ -11,7 +11,7 @@ from functools import update_wrapper, wraps
11
11
  from importlib import import_module
12
12
  from inspect import signature
13
13
  from os import environ
14
- from types import ModuleType
14
+ from types import FunctionType, ModuleType
15
15
  from typing import (
16
16
  Any,
17
17
  Generic,
@@ -240,7 +240,8 @@ def make_device(
240
240
  def make_all_devices(
241
241
  module: str | ModuleType | None = None, include_skipped: bool = False, **kwargs
242
242
  ) -> tuple[dict[str, AnyDevice], dict[str, Exception]]:
243
- """Makes all devices in the given beamline module.
243
+ """Makes all devices in the given beamline module, for those modules using device factories
244
+ as opposed to the DeviceManager.
244
245
 
245
246
  In cases of device interdependencies it ensures a device is created before any which
246
247
  depend on it.
@@ -413,7 +414,9 @@ def is_v2_device_factory(func: Callable) -> TypeGuard[V2DeviceFactory]:
413
414
 
414
415
 
415
416
  def is_any_device_factory(func: Callable) -> TypeGuard[AnyDeviceFactory]:
416
- return is_v1_device_factory(func) or is_v2_device_factory(func)
417
+ return isinstance(func, FunctionType | DeviceInitializationController) and (
418
+ is_v1_device_factory(func) or is_v2_device_factory(func)
419
+ )
417
420
 
418
421
 
419
422
  def is_v2_device_type(obj: type[Any]) -> bool:
@@ -1,25 +0,0 @@
1
- from .base_detector import (
2
- BaseElectronAnalyserDetector,
3
- )
4
- from .base_driver_io import AbstractAnalyserDriverIO, TAbstractAnalyserDriverIO
5
- from .base_region import (
6
- AbstractBaseRegion,
7
- AbstractBaseSequence,
8
- TAbstractBaseRegion,
9
- TAbstractBaseSequence,
10
- TAcquisitionMode,
11
- TLensMode,
12
- )
13
-
14
- __all__ = [
15
- "AbstractBaseRegion",
16
- "AbstractBaseSequence",
17
- "TAbstractBaseRegion",
18
- "TAbstractBaseSequence",
19
- "TAcquisitionMode",
20
- "TLensMode",
21
- "AbstractAnalyserDriverIO",
22
- "BaseElectronAnalyserDetector",
23
- "AbstractAnalyserDriverIO",
24
- "TAbstractAnalyserDriverIO",
25
- ]
@@ -1,63 +0,0 @@
1
- from typing import Generic
2
-
3
- from bluesky.protocols import Reading, Triggerable
4
- from event_model import DataKey
5
- from ophyd_async.core import (
6
- AsyncConfigurable,
7
- AsyncReadable,
8
- AsyncStatus,
9
- Device,
10
- )
11
- from ophyd_async.epics.adcore import ADBaseController
12
-
13
- from dodal.devices.electron_analyser.abstract.base_driver_io import (
14
- TAbstractAnalyserDriverIO,
15
- )
16
-
17
-
18
- class BaseElectronAnalyserDetector(
19
- Device,
20
- Triggerable,
21
- AsyncReadable,
22
- AsyncConfigurable,
23
- Generic[TAbstractAnalyserDriverIO],
24
- ):
25
- """
26
- Detector for data acquisition of electron analyser. Can only acquire using settings
27
- already configured for the device.
28
-
29
- If possible, this should be changed to inherit from a StandardDetector. Currently,
30
- StandardDetector forces you to use a file writer which doesn't apply here.
31
- See issue https://github.com/bluesky/ophyd-async/issues/888
32
- """
33
-
34
- def __init__(
35
- self,
36
- controller: ADBaseController[TAbstractAnalyserDriverIO],
37
- name: str = "",
38
- ):
39
- self._controller = controller
40
- super().__init__(name)
41
-
42
- @AsyncStatus.wrap
43
- async def trigger(self) -> None:
44
- await self._controller.arm()
45
- await self._controller.wait_for_idle()
46
-
47
- async def read(self) -> dict[str, Reading]:
48
- return await self._controller.driver.read()
49
-
50
- async def describe(self) -> dict[str, DataKey]:
51
- data = await self._controller.driver.describe()
52
- # Correct the shape for image
53
- prefix = self._controller.driver.name + "-"
54
- energy_size = len(await self._controller.driver.energy_axis.get_value())
55
- angle_size = len(await self._controller.driver.angle_axis.get_value())
56
- data[prefix + "image"]["shape"] = [angle_size, energy_size]
57
- return data
58
-
59
- async def read_configuration(self) -> dict[str, Reading]:
60
- return await self._controller.driver.read_configuration()
61
-
62
- async def describe_configuration(self) -> dict[str, DataKey]:
63
- return await self._controller.driver.describe_configuration()
@@ -1,12 +0,0 @@
1
- from typing import TypeVar
2
-
3
- from ophyd_async.core import StrictEnum, SupersetEnum
4
-
5
- TAcquisitionMode = TypeVar("TAcquisitionMode", bound=StrictEnum)
6
- # Allow SupersetEnum. Specs analysers can connect to Lens and Psu mode separately to the
7
- # analyser which leaves the enum to either be "Not connected" OR the available enums
8
- # when connected.
9
- TLensMode = TypeVar("TLensMode", bound=SupersetEnum | StrictEnum)
10
- TPsuMode = TypeVar("TPsuMode", bound=SupersetEnum | StrictEnum)
11
- TPassEnergy = TypeVar("TPassEnergy", bound=StrictEnum | float)
12
- TPassEnergyEnum = TypeVar("TPassEnergyEnum", bound=StrictEnum)