dls-dodal 1.67.0__py3-none-any.whl → 1.68.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dls_dodal-1.67.0.dist-info → dls_dodal-1.68.0.dist-info}/METADATA +2 -2
- {dls_dodal-1.67.0.dist-info → dls_dodal-1.68.0.dist-info}/RECORD +50 -50
- dodal/_version.py +2 -2
- dodal/beamlines/b07.py +1 -1
- dodal/beamlines/b07_1.py +1 -1
- dodal/beamlines/i05.py +1 -1
- dodal/beamlines/i06.py +1 -1
- dodal/beamlines/i09.py +1 -1
- dodal/beamlines/i09_1.py +1 -1
- dodal/beamlines/i09_2.py +5 -4
- dodal/beamlines/i10_optics.py +1 -1
- dodal/beamlines/i16.py +23 -0
- dodal/beamlines/i17.py +1 -1
- dodal/beamlines/i21.py +61 -2
- dodal/beamlines/p60.py +1 -1
- dodal/devices/eiger.py +15 -9
- dodal/devices/electron_analyser/__init__.py +0 -33
- dodal/devices/electron_analyser/base/__init__.py +58 -0
- dodal/devices/electron_analyser/base/base_controller.py +73 -0
- dodal/devices/electron_analyser/base/base_detector.py +214 -0
- dodal/devices/electron_analyser/{abstract → base}/base_driver_io.py +23 -42
- dodal/devices/electron_analyser/{abstract → base}/base_region.py +47 -11
- dodal/devices/electron_analyser/{util.py → base/base_util.py} +1 -1
- dodal/devices/electron_analyser/{energy_sources.py → base/energy_sources.py} +1 -1
- dodal/devices/electron_analyser/specs/__init__.py +4 -4
- dodal/devices/electron_analyser/specs/specs_detector.py +46 -0
- dodal/devices/electron_analyser/specs/{driver_io.py → specs_driver_io.py} +23 -26
- dodal/devices/electron_analyser/specs/{region.py → specs_region.py} +4 -3
- dodal/devices/electron_analyser/vgscienta/__init__.py +4 -4
- dodal/devices/electron_analyser/vgscienta/vgscienta_detector.py +52 -0
- dodal/devices/electron_analyser/vgscienta/{driver_io.py → vgscienta_driver_io.py} +25 -31
- dodal/devices/electron_analyser/vgscienta/{region.py → vgscienta_region.py} +6 -6
- dodal/devices/i09_2_shared/i09_apple2.py +0 -72
- dodal/devices/i10/i10_apple2.py +2 -2
- dodal/devices/i21/__init__.py +3 -1
- dodal/devices/insertion_device/__init__.py +58 -0
- dodal/devices/insertion_device/apple2_undulator.py +66 -16
- dodal/devices/insertion_device/energy_motor_lookup.py +1 -1
- dodal/devices/insertion_device/id_enum.py +17 -0
- dodal/devices/insertion_device/lookup_table_models.py +65 -35
- dodal/testing/electron_analyser/device_factory.py +4 -4
- dodal/testing/fixtures/devices/apple2.py +1 -1
- dodal/testing/fixtures/run_engine.py +4 -0
- dodal/devices/electron_analyser/abstract/__init__.py +0 -25
- dodal/devices/electron_analyser/abstract/base_detector.py +0 -63
- dodal/devices/electron_analyser/abstract/types.py +0 -12
- dodal/devices/electron_analyser/detector.py +0 -143
- dodal/devices/electron_analyser/specs/detector.py +0 -34
- dodal/devices/electron_analyser/types.py +0 -57
- dodal/devices/electron_analyser/vgscienta/detector.py +0 -48
- {dls_dodal-1.67.0.dist-info → dls_dodal-1.68.0.dist-info}/WHEEL +0 -0
- {dls_dodal-1.67.0.dist-info → dls_dodal-1.68.0.dist-info}/entry_points.txt +0 -0
- {dls_dodal-1.67.0.dist-info → dls_dodal-1.68.0.dist-info}/licenses/LICENSE +0 -0
- {dls_dodal-1.67.0.dist-info → dls_dodal-1.68.0.dist-info}/top_level.txt +0 -0
- /dodal/devices/electron_analyser/{enums.py → base/base_enums.py} +0 -0
- /dodal/devices/electron_analyser/specs/{enums.py → specs_enums.py} +0 -0
- /dodal/devices/electron_analyser/vgscienta/{enums.py → vgscienta_enums.py} +0 -0
|
@@ -39,7 +39,7 @@ from pydantic import (
|
|
|
39
39
|
field_validator,
|
|
40
40
|
)
|
|
41
41
|
|
|
42
|
-
from dodal.devices.insertion_device.
|
|
42
|
+
from dodal.devices.insertion_device.id_enum import Pol
|
|
43
43
|
|
|
44
44
|
DEFAULT_POLY_DEG = [
|
|
45
45
|
"7th-order",
|
|
@@ -94,13 +94,19 @@ class LookupTableColumnConfig(BaseModel):
|
|
|
94
94
|
description="When processing polarisation mode values, map their alias values to a real value.",
|
|
95
95
|
default_factory=lambda: MODE_NAME_CONVERT,
|
|
96
96
|
)
|
|
97
|
+
grating: A[
|
|
98
|
+
str | None, Field(description="Optional column name for entry grating.")
|
|
99
|
+
] = None
|
|
97
100
|
|
|
98
101
|
|
|
99
102
|
class EnergyCoverageEntry(BaseModel):
|
|
100
|
-
model_config = ConfigDict(
|
|
103
|
+
model_config = ConfigDict(
|
|
104
|
+
arbitrary_types_allowed=True, frozen=True
|
|
105
|
+
) # arbitrary_types_allowed is True so np.poly1d can be used.
|
|
101
106
|
min_energy: float
|
|
102
107
|
max_energy: float
|
|
103
108
|
poly: np.poly1d
|
|
109
|
+
grating: float | None = None
|
|
104
110
|
|
|
105
111
|
@field_validator("poly", mode="before")
|
|
106
112
|
@classmethod
|
|
@@ -119,7 +125,16 @@ class EnergyCoverageEntry(BaseModel):
|
|
|
119
125
|
|
|
120
126
|
|
|
121
127
|
class EnergyCoverage(BaseModel):
|
|
122
|
-
|
|
128
|
+
model_config = ConfigDict(frozen=True)
|
|
129
|
+
energy_entries: tuple[EnergyCoverageEntry, ...]
|
|
130
|
+
|
|
131
|
+
@field_validator("energy_entries", mode="after")
|
|
132
|
+
@classmethod
|
|
133
|
+
def _prepare_energy_entries(
|
|
134
|
+
cls, value: tuple[EnergyCoverageEntry, ...]
|
|
135
|
+
) -> tuple[EnergyCoverageEntry, ...]:
|
|
136
|
+
"""Convert incoming energy_entries to a sorted, immutable tuple."""
|
|
137
|
+
return tuple(sorted(value, key=lambda e: e.min_energy))
|
|
123
138
|
|
|
124
139
|
@classmethod
|
|
125
140
|
def generate(
|
|
@@ -128,7 +143,7 @@ class EnergyCoverage(BaseModel):
|
|
|
128
143
|
max_energies: list[float],
|
|
129
144
|
poly1d_params: list[list[float]],
|
|
130
145
|
) -> Self:
|
|
131
|
-
energy_entries =
|
|
146
|
+
energy_entries = tuple(
|
|
132
147
|
EnergyCoverageEntry(
|
|
133
148
|
min_energy=min_energy,
|
|
134
149
|
max_energy=max_energy,
|
|
@@ -137,16 +152,16 @@ class EnergyCoverage(BaseModel):
|
|
|
137
152
|
for min_energy, max_energy, poly_params in zip(
|
|
138
153
|
min_energies, max_energies, poly1d_params, strict=True
|
|
139
154
|
)
|
|
140
|
-
|
|
155
|
+
)
|
|
141
156
|
return cls(energy_entries=energy_entries)
|
|
142
157
|
|
|
143
158
|
@property
|
|
144
159
|
def min_energy(self) -> float:
|
|
145
|
-
return
|
|
160
|
+
return self.energy_entries[0].min_energy
|
|
146
161
|
|
|
147
162
|
@property
|
|
148
163
|
def max_energy(self) -> float:
|
|
149
|
-
return
|
|
164
|
+
return self.energy_entries[-1].max_energy
|
|
150
165
|
|
|
151
166
|
def get_poly(self, energy: float) -> np.poly1d:
|
|
152
167
|
"""
|
|
@@ -157,25 +172,36 @@ class EnergyCoverage(BaseModel):
|
|
|
157
172
|
energy:
|
|
158
173
|
Energy value in the same units used to create the lookup table.
|
|
159
174
|
"""
|
|
160
|
-
|
|
161
|
-
min_energy
|
|
162
|
-
max_energy = self.max_energy
|
|
163
|
-
if energy < min_energy or energy > max_energy:
|
|
175
|
+
|
|
176
|
+
if not self.min_energy <= energy <= self.max_energy:
|
|
164
177
|
raise ValueError(
|
|
165
|
-
f"Demanding energy must lie between {min_energy} and {max_energy}!"
|
|
178
|
+
f"Demanding energy must lie between {self.min_energy} and {self.max_energy}!"
|
|
166
179
|
)
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
|
|
170
|
-
|
|
171
|
-
and energy < energy_coverage.max_energy
|
|
172
|
-
):
|
|
173
|
-
return energy_coverage.poly
|
|
180
|
+
|
|
181
|
+
poly_index = self.get_energy_index(energy)
|
|
182
|
+
if poly_index is not None:
|
|
183
|
+
return self.energy_entries[poly_index].poly
|
|
174
184
|
raise ValueError(
|
|
175
185
|
"Cannot find polynomial coefficients for your requested energy."
|
|
176
186
|
+ " There might be gap in the calibration lookup table."
|
|
177
187
|
)
|
|
178
188
|
|
|
189
|
+
def get_energy_index(self, energy: float) -> int | None:
|
|
190
|
+
"""Binary search assumes self.energy_entries is sorted by min_energy.
|
|
191
|
+
Return index or None if not found."""
|
|
192
|
+
max_index = len(self.energy_entries) - 1
|
|
193
|
+
min_index = 0
|
|
194
|
+
while min_index <= max_index:
|
|
195
|
+
mid_index = (min_index + max_index) // 2
|
|
196
|
+
en_try = self.energy_entries[mid_index]
|
|
197
|
+
if en_try.min_energy <= energy <= en_try.max_energy:
|
|
198
|
+
return mid_index
|
|
199
|
+
elif energy < en_try.min_energy:
|
|
200
|
+
max_index = mid_index - 1
|
|
201
|
+
else:
|
|
202
|
+
min_index = mid_index + 1
|
|
203
|
+
return None
|
|
204
|
+
|
|
179
205
|
|
|
180
206
|
class LookupTable(RootModel[dict[Pol, EnergyCoverage]]):
|
|
181
207
|
"""
|
|
@@ -183,6 +209,8 @@ class LookupTable(RootModel[dict[Pol, EnergyCoverage]]):
|
|
|
183
209
|
values to Apple2 motor positions.
|
|
184
210
|
"""
|
|
185
211
|
|
|
212
|
+
model_config = ConfigDict(frozen=True)
|
|
213
|
+
|
|
186
214
|
# Allow to auto specify a dict if one not provided
|
|
187
215
|
def __init__(self, root: dict[Pol, EnergyCoverage] | None = None):
|
|
188
216
|
super().__init__(root=root or {})
|
|
@@ -195,10 +223,8 @@ class LookupTable(RootModel[dict[Pol, EnergyCoverage]]):
|
|
|
195
223
|
) -> Self:
|
|
196
224
|
"""Generate a LookupTable containing multiple EnergyCoverage
|
|
197
225
|
for provided polarisations."""
|
|
198
|
-
|
|
199
|
-
|
|
200
|
-
lut.root[pols[i]] = energy_coverage[i]
|
|
201
|
-
return lut
|
|
226
|
+
root_data = dict(zip(pols, energy_coverage, strict=False))
|
|
227
|
+
return cls(root=root_data)
|
|
202
228
|
|
|
203
229
|
def get_poly(
|
|
204
230
|
self,
|
|
@@ -239,15 +265,15 @@ def convert_csv_to_lookup(
|
|
|
239
265
|
-----------
|
|
240
266
|
LookupTable
|
|
241
267
|
"""
|
|
268
|
+
temp_mode_entries: dict[Pol, list[EnergyCoverageEntry]] = {}
|
|
242
269
|
|
|
243
|
-
def process_row(row: dict[str, Any]
|
|
244
|
-
"""Process a single row from the CSV file and update the
|
|
270
|
+
def process_row(row: dict[str, Any]) -> None:
|
|
271
|
+
"""Process a single row from the CSV file and update the temporary entry list."""
|
|
245
272
|
raw_mode_value = str(row[lut_config.mode]).lower()
|
|
246
273
|
mode_value = Pol(
|
|
247
274
|
lut_config.mode_name_convert.get(raw_mode_value, raw_mode_value)
|
|
248
275
|
)
|
|
249
276
|
|
|
250
|
-
# Create polynomial object for energy-to-gap/phase conversion
|
|
251
277
|
coefficients = np.poly1d([float(row[coef]) for coef in lut_config.poly_deg])
|
|
252
278
|
|
|
253
279
|
energy_entry = EnergyCoverageEntry(
|
|
@@ -255,27 +281,31 @@ def convert_csv_to_lookup(
|
|
|
255
281
|
max_energy=float(row[lut_config.max_energy]),
|
|
256
282
|
poly=coefficients,
|
|
257
283
|
)
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
261
|
-
|
|
284
|
+
|
|
285
|
+
if mode_value not in temp_mode_entries:
|
|
286
|
+
temp_mode_entries[mode_value] = []
|
|
287
|
+
|
|
288
|
+
temp_mode_entries[mode_value].append(energy_entry)
|
|
262
289
|
|
|
263
290
|
reader = csv.DictReader(read_file_and_skip(file_contents, skip_line_start_with))
|
|
264
|
-
lut = LookupTable()
|
|
265
291
|
|
|
266
292
|
for row in reader:
|
|
267
293
|
source = lut_config.source
|
|
268
294
|
# If there are multiple source only convert requested.
|
|
269
295
|
if source is None or row[source.column] == source.value:
|
|
270
|
-
process_row(row=row
|
|
271
|
-
|
|
296
|
+
process_row(row=row)
|
|
272
297
|
# Check if our LookupTable is empty after processing, raise error if it is.
|
|
273
|
-
if not
|
|
298
|
+
if not temp_mode_entries:
|
|
274
299
|
raise RuntimeError(
|
|
275
300
|
"LookupTable content is empty, failed to convert the file contents to "
|
|
276
301
|
"a LookupTable!"
|
|
277
302
|
)
|
|
278
|
-
|
|
303
|
+
|
|
304
|
+
final_lut_root: dict[Pol, EnergyCoverage] = {}
|
|
305
|
+
for pol, entries in temp_mode_entries.items():
|
|
306
|
+
final_lut_root[pol] = EnergyCoverage.model_validate({"energy_entries": entries})
|
|
307
|
+
|
|
308
|
+
return LookupTable(root=final_lut_root)
|
|
279
309
|
|
|
280
310
|
|
|
281
311
|
def read_file_and_skip(file: str, skip_line_start_with: str = "#") -> Generator[str]:
|
|
@@ -1,16 +1,16 @@
|
|
|
1
1
|
from typing import Any, get_args, get_origin
|
|
2
2
|
|
|
3
|
-
from dodal.devices.electron_analyser.
|
|
3
|
+
from dodal.devices.electron_analyser.base.base_detector import TElectronAnalyserDetector
|
|
4
|
+
from dodal.devices.electron_analyser.base.base_driver_io import (
|
|
4
5
|
TAbstractAnalyserDriverIO,
|
|
5
6
|
)
|
|
6
|
-
from dodal.devices.electron_analyser.detector import TElectronAnalyserDetector
|
|
7
7
|
from dodal.devices.electron_analyser.vgscienta import (
|
|
8
8
|
VGScientaAnalyserDriverIO,
|
|
9
9
|
VGScientaDetector,
|
|
10
10
|
)
|
|
11
11
|
|
|
12
12
|
|
|
13
|
-
|
|
13
|
+
def create_driver(
|
|
14
14
|
driver_class: type[TAbstractAnalyserDriverIO],
|
|
15
15
|
**kwargs: Any,
|
|
16
16
|
) -> TAbstractAnalyserDriverIO:
|
|
@@ -34,7 +34,7 @@ async def create_driver(
|
|
|
34
34
|
return driver_class(**(parameters | kwargs))
|
|
35
35
|
|
|
36
36
|
|
|
37
|
-
|
|
37
|
+
def create_detector(
|
|
38
38
|
detector_class: type[TElectronAnalyserDetector],
|
|
39
39
|
**kwargs: Any,
|
|
40
40
|
) -> TElectronAnalyserDetector:
|
|
@@ -3,6 +3,7 @@ Allow external repos to reuse these fixtures so defined in single place.
|
|
|
3
3
|
"""
|
|
4
4
|
|
|
5
5
|
import asyncio
|
|
6
|
+
import copy
|
|
6
7
|
import os
|
|
7
8
|
import threading
|
|
8
9
|
import time
|
|
@@ -34,10 +35,13 @@ async def _ensure_running_bluesky_event_loop(_global_run_engine):
|
|
|
34
35
|
|
|
35
36
|
@pytest.fixture()
|
|
36
37
|
async def run_engine(_global_run_engine: RunEngine) -> AsyncGenerator[RunEngine, None]:
|
|
38
|
+
initial_md = copy.deepcopy(_global_run_engine.md)
|
|
37
39
|
try:
|
|
38
40
|
yield _global_run_engine
|
|
39
41
|
finally:
|
|
42
|
+
# Clear subscriptions, cache, and reset metadata
|
|
40
43
|
_global_run_engine.reset()
|
|
44
|
+
_global_run_engine.md = initial_md
|
|
41
45
|
|
|
42
46
|
|
|
43
47
|
@pytest_asyncio.fixture(scope="session", loop_scope="session")
|
|
@@ -1,25 +0,0 @@
|
|
|
1
|
-
from .base_detector import (
|
|
2
|
-
BaseElectronAnalyserDetector,
|
|
3
|
-
)
|
|
4
|
-
from .base_driver_io import AbstractAnalyserDriverIO, TAbstractAnalyserDriverIO
|
|
5
|
-
from .base_region import (
|
|
6
|
-
AbstractBaseRegion,
|
|
7
|
-
AbstractBaseSequence,
|
|
8
|
-
TAbstractBaseRegion,
|
|
9
|
-
TAbstractBaseSequence,
|
|
10
|
-
TAcquisitionMode,
|
|
11
|
-
TLensMode,
|
|
12
|
-
)
|
|
13
|
-
|
|
14
|
-
__all__ = [
|
|
15
|
-
"AbstractBaseRegion",
|
|
16
|
-
"AbstractBaseSequence",
|
|
17
|
-
"TAbstractBaseRegion",
|
|
18
|
-
"TAbstractBaseSequence",
|
|
19
|
-
"TAcquisitionMode",
|
|
20
|
-
"TLensMode",
|
|
21
|
-
"AbstractAnalyserDriverIO",
|
|
22
|
-
"BaseElectronAnalyserDetector",
|
|
23
|
-
"AbstractAnalyserDriverIO",
|
|
24
|
-
"TAbstractAnalyserDriverIO",
|
|
25
|
-
]
|
|
@@ -1,63 +0,0 @@
|
|
|
1
|
-
from typing import Generic
|
|
2
|
-
|
|
3
|
-
from bluesky.protocols import Reading, Triggerable
|
|
4
|
-
from event_model import DataKey
|
|
5
|
-
from ophyd_async.core import (
|
|
6
|
-
AsyncConfigurable,
|
|
7
|
-
AsyncReadable,
|
|
8
|
-
AsyncStatus,
|
|
9
|
-
Device,
|
|
10
|
-
)
|
|
11
|
-
from ophyd_async.epics.adcore import ADBaseController
|
|
12
|
-
|
|
13
|
-
from dodal.devices.electron_analyser.abstract.base_driver_io import (
|
|
14
|
-
TAbstractAnalyserDriverIO,
|
|
15
|
-
)
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
class BaseElectronAnalyserDetector(
|
|
19
|
-
Device,
|
|
20
|
-
Triggerable,
|
|
21
|
-
AsyncReadable,
|
|
22
|
-
AsyncConfigurable,
|
|
23
|
-
Generic[TAbstractAnalyserDriverIO],
|
|
24
|
-
):
|
|
25
|
-
"""
|
|
26
|
-
Detector for data acquisition of electron analyser. Can only acquire using settings
|
|
27
|
-
already configured for the device.
|
|
28
|
-
|
|
29
|
-
If possible, this should be changed to inherit from a StandardDetector. Currently,
|
|
30
|
-
StandardDetector forces you to use a file writer which doesn't apply here.
|
|
31
|
-
See issue https://github.com/bluesky/ophyd-async/issues/888
|
|
32
|
-
"""
|
|
33
|
-
|
|
34
|
-
def __init__(
|
|
35
|
-
self,
|
|
36
|
-
controller: ADBaseController[TAbstractAnalyserDriverIO],
|
|
37
|
-
name: str = "",
|
|
38
|
-
):
|
|
39
|
-
self._controller = controller
|
|
40
|
-
super().__init__(name)
|
|
41
|
-
|
|
42
|
-
@AsyncStatus.wrap
|
|
43
|
-
async def trigger(self) -> None:
|
|
44
|
-
await self._controller.arm()
|
|
45
|
-
await self._controller.wait_for_idle()
|
|
46
|
-
|
|
47
|
-
async def read(self) -> dict[str, Reading]:
|
|
48
|
-
return await self._controller.driver.read()
|
|
49
|
-
|
|
50
|
-
async def describe(self) -> dict[str, DataKey]:
|
|
51
|
-
data = await self._controller.driver.describe()
|
|
52
|
-
# Correct the shape for image
|
|
53
|
-
prefix = self._controller.driver.name + "-"
|
|
54
|
-
energy_size = len(await self._controller.driver.energy_axis.get_value())
|
|
55
|
-
angle_size = len(await self._controller.driver.angle_axis.get_value())
|
|
56
|
-
data[prefix + "image"]["shape"] = [angle_size, energy_size]
|
|
57
|
-
return data
|
|
58
|
-
|
|
59
|
-
async def read_configuration(self) -> dict[str, Reading]:
|
|
60
|
-
return await self._controller.driver.read_configuration()
|
|
61
|
-
|
|
62
|
-
async def describe_configuration(self) -> dict[str, DataKey]:
|
|
63
|
-
return await self._controller.driver.describe_configuration()
|
|
@@ -1,12 +0,0 @@
|
|
|
1
|
-
from typing import TypeVar
|
|
2
|
-
|
|
3
|
-
from ophyd_async.core import StrictEnum, SupersetEnum
|
|
4
|
-
|
|
5
|
-
TAcquisitionMode = TypeVar("TAcquisitionMode", bound=StrictEnum)
|
|
6
|
-
# Allow SupersetEnum. Specs analysers can connect to Lens and Psu mode separately to the
|
|
7
|
-
# analyser which leaves the enum to either be "Not connected" OR the available enums
|
|
8
|
-
# when connected.
|
|
9
|
-
TLensMode = TypeVar("TLensMode", bound=SupersetEnum | StrictEnum)
|
|
10
|
-
TPsuMode = TypeVar("TPsuMode", bound=SupersetEnum | StrictEnum)
|
|
11
|
-
TPassEnergy = TypeVar("TPassEnergy", bound=StrictEnum | float)
|
|
12
|
-
TPassEnergyEnum = TypeVar("TPassEnergyEnum", bound=StrictEnum)
|
|
@@ -1,143 +0,0 @@
|
|
|
1
|
-
from typing import Generic, TypeVar
|
|
2
|
-
|
|
3
|
-
from bluesky.protocols import Stageable
|
|
4
|
-
from ophyd_async.core import AsyncStatus
|
|
5
|
-
from ophyd_async.epics.adcore import ADBaseController
|
|
6
|
-
|
|
7
|
-
from dodal.common.data_util import load_json_file_to_class
|
|
8
|
-
from dodal.devices.controllers import ConstantDeadTimeController
|
|
9
|
-
from dodal.devices.electron_analyser.abstract.base_detector import (
|
|
10
|
-
BaseElectronAnalyserDetector,
|
|
11
|
-
)
|
|
12
|
-
from dodal.devices.electron_analyser.abstract.base_driver_io import (
|
|
13
|
-
TAbstractAnalyserDriverIO,
|
|
14
|
-
)
|
|
15
|
-
from dodal.devices.electron_analyser.abstract.base_region import (
|
|
16
|
-
TAbstractBaseRegion,
|
|
17
|
-
TAbstractBaseSequence,
|
|
18
|
-
)
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
class ElectronAnalyserRegionDetector(
|
|
22
|
-
BaseElectronAnalyserDetector[TAbstractAnalyserDriverIO],
|
|
23
|
-
Generic[TAbstractAnalyserDriverIO, TAbstractBaseRegion],
|
|
24
|
-
):
|
|
25
|
-
"""
|
|
26
|
-
Extends electron analyser detector to configure specific region settings before data
|
|
27
|
-
acquisition. It is designed to only exist inside a plan.
|
|
28
|
-
"""
|
|
29
|
-
|
|
30
|
-
def __init__(
|
|
31
|
-
self,
|
|
32
|
-
controller: ADBaseController[TAbstractAnalyserDriverIO],
|
|
33
|
-
region: TAbstractBaseRegion,
|
|
34
|
-
name: str = "",
|
|
35
|
-
):
|
|
36
|
-
self.region = region
|
|
37
|
-
super().__init__(controller, name)
|
|
38
|
-
|
|
39
|
-
@AsyncStatus.wrap
|
|
40
|
-
async def trigger(self) -> None:
|
|
41
|
-
# Configure region parameters on the driver first before data collection.
|
|
42
|
-
await self._controller.driver.set(self.region)
|
|
43
|
-
await super().trigger()
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
TElectronAnalyserRegionDetector = TypeVar(
|
|
47
|
-
"TElectronAnalyserRegionDetector",
|
|
48
|
-
bound=ElectronAnalyserRegionDetector,
|
|
49
|
-
)
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
class ElectronAnalyserDetector(
|
|
53
|
-
BaseElectronAnalyserDetector[TAbstractAnalyserDriverIO],
|
|
54
|
-
Stageable,
|
|
55
|
-
Generic[
|
|
56
|
-
TAbstractAnalyserDriverIO,
|
|
57
|
-
TAbstractBaseSequence,
|
|
58
|
-
TAbstractBaseRegion,
|
|
59
|
-
],
|
|
60
|
-
):
|
|
61
|
-
"""
|
|
62
|
-
Electron analyser detector with the additional functionality to load a sequence file
|
|
63
|
-
and create a list of temporary ElectronAnalyserRegionDetector objects. These will
|
|
64
|
-
setup configured region settings before data acquisition.
|
|
65
|
-
"""
|
|
66
|
-
|
|
67
|
-
def __init__(
|
|
68
|
-
self,
|
|
69
|
-
sequence_class: type[TAbstractBaseSequence],
|
|
70
|
-
driver: TAbstractAnalyserDriverIO,
|
|
71
|
-
name: str = "",
|
|
72
|
-
):
|
|
73
|
-
# Save driver as direct child so participates with connect()
|
|
74
|
-
self.driver = driver
|
|
75
|
-
self._sequence_class = sequence_class
|
|
76
|
-
controller = ConstantDeadTimeController[TAbstractAnalyserDriverIO](driver, 0)
|
|
77
|
-
super().__init__(controller, name)
|
|
78
|
-
|
|
79
|
-
@AsyncStatus.wrap
|
|
80
|
-
async def stage(self) -> None:
|
|
81
|
-
"""
|
|
82
|
-
Prepare the detector for use by ensuring it is idle and ready.
|
|
83
|
-
|
|
84
|
-
This method asynchronously stages the detector by first disarming the controller
|
|
85
|
-
to ensure the detector is not actively acquiring data, then invokes the driver's
|
|
86
|
-
stage procedure. This ensures the detector is in a known, ready state
|
|
87
|
-
before use.
|
|
88
|
-
|
|
89
|
-
Raises:
|
|
90
|
-
Any exceptions raised by the driver's stage or controller's disarm methods.
|
|
91
|
-
"""
|
|
92
|
-
await self._controller.disarm()
|
|
93
|
-
await self.driver.stage()
|
|
94
|
-
|
|
95
|
-
@AsyncStatus.wrap
|
|
96
|
-
async def unstage(self) -> None:
|
|
97
|
-
"""Disarm the detector."""
|
|
98
|
-
await self._controller.disarm()
|
|
99
|
-
await self.driver.unstage()
|
|
100
|
-
|
|
101
|
-
def load_sequence(self, filename: str) -> TAbstractBaseSequence:
|
|
102
|
-
"""
|
|
103
|
-
Load the sequence data from a provided json file into a sequence class.
|
|
104
|
-
|
|
105
|
-
Args:
|
|
106
|
-
filename: Path to the sequence file containing the region data.
|
|
107
|
-
|
|
108
|
-
Returns:
|
|
109
|
-
Pydantic model representing the sequence file.
|
|
110
|
-
"""
|
|
111
|
-
return load_json_file_to_class(self._sequence_class, filename)
|
|
112
|
-
|
|
113
|
-
def create_region_detector_list(
|
|
114
|
-
self, filename: str, enabled_only=True
|
|
115
|
-
) -> list[
|
|
116
|
-
ElectronAnalyserRegionDetector[TAbstractAnalyserDriverIO, TAbstractBaseRegion]
|
|
117
|
-
]:
|
|
118
|
-
"""
|
|
119
|
-
Create a list of detectors equal to the number of regions in a sequence file.
|
|
120
|
-
Each detector is responsible for setting up a specific region.
|
|
121
|
-
|
|
122
|
-
Args:
|
|
123
|
-
filename: Path to the sequence file containing the region data.
|
|
124
|
-
enabled_only: If true, only include the region if enabled is True.
|
|
125
|
-
|
|
126
|
-
Returns:
|
|
127
|
-
List of ElectronAnalyserRegionDetector, equal to the number of regions in
|
|
128
|
-
the sequence file.
|
|
129
|
-
"""
|
|
130
|
-
seq = self.load_sequence(filename)
|
|
131
|
-
regions = seq.get_enabled_regions() if enabled_only else seq.regions
|
|
132
|
-
return [
|
|
133
|
-
ElectronAnalyserRegionDetector(
|
|
134
|
-
self._controller, r, self.name + "_" + r.name
|
|
135
|
-
)
|
|
136
|
-
for r in regions
|
|
137
|
-
]
|
|
138
|
-
|
|
139
|
-
|
|
140
|
-
TElectronAnalyserDetector = TypeVar(
|
|
141
|
-
"TElectronAnalyserDetector",
|
|
142
|
-
bound=ElectronAnalyserDetector,
|
|
143
|
-
)
|
|
@@ -1,34 +0,0 @@
|
|
|
1
|
-
from typing import Generic
|
|
2
|
-
|
|
3
|
-
from dodal.devices.electron_analyser.abstract.types import TLensMode, TPsuMode
|
|
4
|
-
from dodal.devices.electron_analyser.detector import (
|
|
5
|
-
ElectronAnalyserDetector,
|
|
6
|
-
)
|
|
7
|
-
from dodal.devices.electron_analyser.energy_sources import (
|
|
8
|
-
DualEnergySource,
|
|
9
|
-
EnergySource,
|
|
10
|
-
)
|
|
11
|
-
from dodal.devices.electron_analyser.specs.driver_io import SpecsAnalyserDriverIO
|
|
12
|
-
from dodal.devices.electron_analyser.specs.region import SpecsRegion, SpecsSequence
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
class SpecsDetector(
|
|
16
|
-
ElectronAnalyserDetector[
|
|
17
|
-
SpecsAnalyserDriverIO[TLensMode, TPsuMode],
|
|
18
|
-
SpecsSequence[TLensMode, TPsuMode],
|
|
19
|
-
SpecsRegion[TLensMode, TPsuMode],
|
|
20
|
-
],
|
|
21
|
-
Generic[TLensMode, TPsuMode],
|
|
22
|
-
):
|
|
23
|
-
def __init__(
|
|
24
|
-
self,
|
|
25
|
-
prefix: str,
|
|
26
|
-
lens_mode_type: type[TLensMode],
|
|
27
|
-
psu_mode_type: type[TPsuMode],
|
|
28
|
-
energy_source: DualEnergySource | EnergySource,
|
|
29
|
-
name: str = "",
|
|
30
|
-
):
|
|
31
|
-
driver = SpecsAnalyserDriverIO[TLensMode, TPsuMode](
|
|
32
|
-
prefix, lens_mode_type, psu_mode_type, energy_source
|
|
33
|
-
)
|
|
34
|
-
super().__init__(SpecsSequence[lens_mode_type, psu_mode_type], driver, name)
|
|
@@ -1,57 +0,0 @@
|
|
|
1
|
-
from ophyd_async.core import StrictEnum, SupersetEnum
|
|
2
|
-
|
|
3
|
-
from dodal.devices.electron_analyser.abstract.base_driver_io import (
|
|
4
|
-
AbstractAnalyserDriverIO,
|
|
5
|
-
)
|
|
6
|
-
from dodal.devices.electron_analyser.abstract.base_region import (
|
|
7
|
-
AbstractBaseRegion,
|
|
8
|
-
AbstractBaseSequence,
|
|
9
|
-
)
|
|
10
|
-
from dodal.devices.electron_analyser.detector import (
|
|
11
|
-
ElectronAnalyserDetector,
|
|
12
|
-
ElectronAnalyserRegionDetector,
|
|
13
|
-
)
|
|
14
|
-
from dodal.devices.electron_analyser.specs.detector import (
|
|
15
|
-
SpecsAnalyserDriverIO,
|
|
16
|
-
SpecsDetector,
|
|
17
|
-
)
|
|
18
|
-
from dodal.devices.electron_analyser.vgscienta.detector import (
|
|
19
|
-
VGScientaAnalyserDriverIO,
|
|
20
|
-
VGScientaDetector,
|
|
21
|
-
)
|
|
22
|
-
|
|
23
|
-
AnyAcqMode = StrictEnum
|
|
24
|
-
AnyLensMode = SupersetEnum | StrictEnum
|
|
25
|
-
AnyPsuMode = SupersetEnum | StrictEnum
|
|
26
|
-
AnyPassEnergy = StrictEnum | float
|
|
27
|
-
AnyPassEnergyEnum = StrictEnum
|
|
28
|
-
|
|
29
|
-
# Electron analyser types that encompasses all implementations, useful for tests and
|
|
30
|
-
# plans
|
|
31
|
-
ElectronAnalyserDetectorImpl = (
|
|
32
|
-
VGScientaDetector[AnyLensMode, AnyPsuMode, AnyPassEnergyEnum]
|
|
33
|
-
| SpecsDetector[AnyLensMode, AnyPsuMode]
|
|
34
|
-
)
|
|
35
|
-
ElectronAnalyserDriverImpl = (
|
|
36
|
-
VGScientaAnalyserDriverIO[AnyLensMode, AnyPsuMode, AnyPassEnergyEnum]
|
|
37
|
-
| SpecsAnalyserDriverIO[AnyLensMode, AnyPsuMode]
|
|
38
|
-
)
|
|
39
|
-
|
|
40
|
-
# Short hand the type so less verbose
|
|
41
|
-
AbstractBaseRegion = AbstractBaseRegion[AnyAcqMode, AnyLensMode, AnyPassEnergy]
|
|
42
|
-
|
|
43
|
-
# Generic electron analyser types that supports full typing with the abstract classes.
|
|
44
|
-
GenericElectronAnalyserDetector = ElectronAnalyserDetector[
|
|
45
|
-
AbstractAnalyserDriverIO[
|
|
46
|
-
AbstractBaseRegion, AnyAcqMode, AnyLensMode, AnyPsuMode, AnyPassEnergy
|
|
47
|
-
],
|
|
48
|
-
AbstractBaseSequence[AbstractBaseRegion],
|
|
49
|
-
AbstractBaseRegion,
|
|
50
|
-
]
|
|
51
|
-
|
|
52
|
-
GenericElectronAnalyserRegionDetector = ElectronAnalyserRegionDetector[
|
|
53
|
-
AbstractAnalyserDriverIO[
|
|
54
|
-
AbstractBaseRegion, AnyAcqMode, AnyLensMode, AnyPsuMode, AnyPassEnergy
|
|
55
|
-
],
|
|
56
|
-
AbstractBaseRegion,
|
|
57
|
-
]
|
|
@@ -1,48 +0,0 @@
|
|
|
1
|
-
from typing import Generic
|
|
2
|
-
|
|
3
|
-
from dodal.devices.electron_analyser.abstract.types import (
|
|
4
|
-
TLensMode,
|
|
5
|
-
TPassEnergyEnum,
|
|
6
|
-
TPsuMode,
|
|
7
|
-
)
|
|
8
|
-
from dodal.devices.electron_analyser.detector import (
|
|
9
|
-
ElectronAnalyserDetector,
|
|
10
|
-
)
|
|
11
|
-
from dodal.devices.electron_analyser.energy_sources import (
|
|
12
|
-
DualEnergySource,
|
|
13
|
-
EnergySource,
|
|
14
|
-
)
|
|
15
|
-
from dodal.devices.electron_analyser.vgscienta.driver_io import (
|
|
16
|
-
VGScientaAnalyserDriverIO,
|
|
17
|
-
)
|
|
18
|
-
from dodal.devices.electron_analyser.vgscienta.region import (
|
|
19
|
-
VGScientaRegion,
|
|
20
|
-
VGScientaSequence,
|
|
21
|
-
)
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
class VGScientaDetector(
|
|
25
|
-
ElectronAnalyserDetector[
|
|
26
|
-
VGScientaAnalyserDriverIO[TLensMode, TPsuMode, TPassEnergyEnum],
|
|
27
|
-
VGScientaSequence[TLensMode, TPsuMode, TPassEnergyEnum],
|
|
28
|
-
VGScientaRegion[TLensMode, TPassEnergyEnum],
|
|
29
|
-
],
|
|
30
|
-
Generic[TLensMode, TPsuMode, TPassEnergyEnum],
|
|
31
|
-
):
|
|
32
|
-
def __init__(
|
|
33
|
-
self,
|
|
34
|
-
prefix: str,
|
|
35
|
-
lens_mode_type: type[TLensMode],
|
|
36
|
-
psu_mode_type: type[TPsuMode],
|
|
37
|
-
pass_energy_type: type[TPassEnergyEnum],
|
|
38
|
-
energy_source: DualEnergySource | EnergySource,
|
|
39
|
-
name: str = "",
|
|
40
|
-
):
|
|
41
|
-
driver = VGScientaAnalyserDriverIO[TLensMode, TPsuMode, TPassEnergyEnum](
|
|
42
|
-
prefix, lens_mode_type, psu_mode_type, pass_energy_type, energy_source
|
|
43
|
-
)
|
|
44
|
-
super().__init__(
|
|
45
|
-
VGScientaSequence[lens_mode_type, psu_mode_type, pass_energy_type],
|
|
46
|
-
driver,
|
|
47
|
-
name,
|
|
48
|
-
)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|