ophyd-async 0.5.0__py3-none-any.whl → 0.5.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ophyd_async/_version.py +2 -2
- ophyd_async/core/__init__.py +6 -3
- ophyd_async/core/_detector.py +38 -28
- ophyd_async/core/_hdf_dataset.py +1 -5
- ophyd_async/core/_mock_signal_utils.py +4 -3
- ophyd_async/core/_providers.py +30 -39
- ophyd_async/core/_signal.py +73 -28
- ophyd_async/core/_status.py +17 -1
- ophyd_async/epics/adaravis/_aravis.py +1 -1
- ophyd_async/epics/adcore/__init__.py +16 -5
- ophyd_async/epics/adcore/_core_io.py +29 -5
- ophyd_async/epics/adcore/_core_logic.py +7 -4
- ophyd_async/epics/adcore/_hdf_writer.py +51 -33
- ophyd_async/epics/adcore/_utils.py +69 -70
- ophyd_async/epics/adkinetix/_kinetix.py +1 -1
- ophyd_async/epics/adkinetix/_kinetix_io.py +4 -1
- ophyd_async/epics/adpilatus/_pilatus.py +1 -1
- ophyd_async/epics/adpilatus/_pilatus_controller.py +1 -1
- ophyd_async/epics/adpilatus/_pilatus_io.py +1 -1
- ophyd_async/epics/adsimdetector/_sim.py +1 -1
- ophyd_async/epics/advimba/_vimba.py +1 -1
- ophyd_async/epics/advimba/_vimba_controller.py +3 -3
- ophyd_async/epics/advimba/_vimba_io.py +6 -4
- ophyd_async/epics/eiger/__init__.py +5 -0
- ophyd_async/epics/eiger/_eiger.py +43 -0
- ophyd_async/epics/eiger/_eiger_controller.py +66 -0
- ophyd_async/epics/eiger/_eiger_io.py +42 -0
- ophyd_async/epics/eiger/_odin_io.py +125 -0
- ophyd_async/epics/motor.py +16 -3
- ophyd_async/epics/signal/_aioca.py +12 -5
- ophyd_async/epics/signal/_common.py +1 -1
- ophyd_async/epics/signal/_p4p.py +14 -11
- ophyd_async/fastcs/panda/__init__.py +3 -3
- ophyd_async/fastcs/panda/{_common_blocks.py → _block.py} +2 -0
- ophyd_async/fastcs/panda/{_panda_controller.py → _control.py} +1 -1
- ophyd_async/fastcs/panda/_hdf_panda.py +4 -4
- ophyd_async/fastcs/panda/_trigger.py +1 -1
- ophyd_async/fastcs/panda/{_hdf_writer.py → _writer.py} +29 -22
- ophyd_async/plan_stubs/__init__.py +3 -0
- ophyd_async/plan_stubs/_nd_attributes.py +63 -0
- ophyd_async/sim/demo/_pattern_detector/_pattern_detector_controller.py +5 -2
- ophyd_async/sim/demo/_pattern_detector/_pattern_generator.py +1 -3
- {ophyd_async-0.5.0.dist-info → ophyd_async-0.5.2.dist-info}/METADATA +46 -44
- {ophyd_async-0.5.0.dist-info → ophyd_async-0.5.2.dist-info}/RECORD +48 -42
- {ophyd_async-0.5.0.dist-info → ophyd_async-0.5.2.dist-info}/WHEEL +1 -1
- {ophyd_async-0.5.0.dist-info → ophyd_async-0.5.2.dist-info}/LICENSE +0 -0
- {ophyd_async-0.5.0.dist-info → ophyd_async-0.5.2.dist-info}/entry_points.txt +0 -0
- {ophyd_async-0.5.0.dist-info → ophyd_async-0.5.2.dist-info}/top_level.txt +0 -0
|
@@ -22,18 +22,19 @@ class NDArrayBaseIO(Device):
|
|
|
22
22
|
self.acquire = epics_signal_rw_rbv(bool, prefix + "Acquire")
|
|
23
23
|
self.array_size_x = epics_signal_r(int, prefix + "ArraySizeX_RBV")
|
|
24
24
|
self.array_size_y = epics_signal_r(int, prefix + "ArraySizeY_RBV")
|
|
25
|
-
self.data_type = epics_signal_r(ADBaseDataType, prefix + "
|
|
25
|
+
self.data_type = epics_signal_r(ADBaseDataType, prefix + "DataType_RBV")
|
|
26
26
|
self.array_counter = epics_signal_rw_rbv(int, prefix + "ArrayCounter")
|
|
27
27
|
# There is no _RBV for this one
|
|
28
28
|
self.wait_for_plugins = epics_signal_rw(bool, prefix + "WaitForPlugins")
|
|
29
|
-
|
|
30
29
|
super().__init__(name=name)
|
|
31
30
|
|
|
32
31
|
|
|
33
32
|
class NDPluginBaseIO(NDArrayBaseIO):
|
|
34
33
|
def __init__(self, prefix: str, name: str = "") -> None:
|
|
35
34
|
self.nd_array_port = epics_signal_rw_rbv(str, prefix + "NDArrayPort")
|
|
36
|
-
self.
|
|
35
|
+
self.enable_callbacks = epics_signal_rw_rbv(
|
|
36
|
+
Callback, prefix + "EnableCallbacks"
|
|
37
|
+
)
|
|
37
38
|
self.nd_array_address = epics_signal_rw_rbv(int, prefix + "NDArrayAddress")
|
|
38
39
|
self.array_size0 = epics_signal_r(int, prefix + "ArraySize0_RBV")
|
|
39
40
|
self.array_size1 = epics_signal_r(int, prefix + "ArraySize1_RBV")
|
|
@@ -41,7 +42,30 @@ class NDPluginBaseIO(NDArrayBaseIO):
|
|
|
41
42
|
|
|
42
43
|
|
|
43
44
|
class NDPluginStatsIO(NDPluginBaseIO):
|
|
44
|
-
|
|
45
|
+
"""
|
|
46
|
+
Plugin for computing statistics from an image or region of interest within an image.
|
|
47
|
+
"""
|
|
48
|
+
|
|
49
|
+
def __init__(self, prefix: str, name: str = "") -> None:
|
|
50
|
+
# Basic statistics
|
|
51
|
+
self.compute_statistics = epics_signal_rw(bool, prefix + "ComputeStatistics")
|
|
52
|
+
self.bgd_width = epics_signal_rw(int, prefix + "BgdWidth")
|
|
53
|
+
self.total_array = epics_signal_rw(float, prefix + "TotalArray")
|
|
54
|
+
# Centroid statistics
|
|
55
|
+
self.compute_centroid = epics_signal_rw(bool, prefix + "ComputeCentroid")
|
|
56
|
+
self.centroid_threshold = epics_signal_rw(float, prefix + "CentroidThreshold")
|
|
57
|
+
# X and Y Profiles
|
|
58
|
+
self.compute_profiles = epics_signal_rw(bool, prefix + "ComputeProfiles")
|
|
59
|
+
self.profile_size_x = epics_signal_rw(int, prefix + "ProfileSizeX")
|
|
60
|
+
self.profile_size_y = epics_signal_rw(int, prefix + "ProfileSizeY")
|
|
61
|
+
self.cursor_x = epics_signal_rw(int, prefix + "CursorX")
|
|
62
|
+
self.cursor_y = epics_signal_rw(int, prefix + "CursorY")
|
|
63
|
+
# Array Histogram
|
|
64
|
+
self.compute_histogram = epics_signal_rw(bool, prefix + "ComputeHistogram")
|
|
65
|
+
self.hist_size = epics_signal_rw(int, prefix + "HistSize")
|
|
66
|
+
self.hist_min = epics_signal_rw(float, prefix + "HistMin")
|
|
67
|
+
self.hist_max = epics_signal_rw(float, prefix + "HistMax")
|
|
68
|
+
super().__init__(prefix, name)
|
|
45
69
|
|
|
46
70
|
|
|
47
71
|
class DetectorState(str, Enum):
|
|
@@ -110,5 +134,5 @@ class NDFileHDFIO(NDPluginBaseIO):
|
|
|
110
134
|
self.xml_file_name = epics_signal_rw_rbv(str, prefix + "XMLFileName")
|
|
111
135
|
self.array_size0 = epics_signal_r(int, prefix + "ArraySize0")
|
|
112
136
|
self.array_size1 = epics_signal_r(int, prefix + "ArraySize1")
|
|
113
|
-
self.
|
|
137
|
+
self.create_directory = epics_signal_rw(int, prefix + "CreateDirectory")
|
|
114
138
|
super().__init__(prefix, name)
|
|
@@ -4,10 +4,11 @@ from typing import FrozenSet, Set
|
|
|
4
4
|
from ophyd_async.core import (
|
|
5
5
|
DEFAULT_TIMEOUT,
|
|
6
6
|
AsyncStatus,
|
|
7
|
+
DatasetDescriber,
|
|
7
8
|
DetectorControl,
|
|
8
|
-
ShapeProvider,
|
|
9
9
|
set_and_wait_for_value,
|
|
10
10
|
)
|
|
11
|
+
from ophyd_async.epics.adcore._utils import convert_ad_dtype_to_np
|
|
11
12
|
|
|
12
13
|
from ._core_io import ADBaseIO, DetectorState
|
|
13
14
|
|
|
@@ -18,15 +19,17 @@ DEFAULT_GOOD_STATES: FrozenSet[DetectorState] = frozenset(
|
|
|
18
19
|
)
|
|
19
20
|
|
|
20
21
|
|
|
21
|
-
class
|
|
22
|
+
class ADBaseDatasetDescriber(DatasetDescriber):
|
|
22
23
|
def __init__(self, driver: ADBaseIO) -> None:
|
|
23
24
|
self._driver = driver
|
|
24
25
|
|
|
25
|
-
async def
|
|
26
|
+
async def np_datatype(self) -> str:
|
|
27
|
+
return convert_ad_dtype_to_np(await self._driver.data_type.get_value())
|
|
28
|
+
|
|
29
|
+
async def shape(self) -> tuple[int, int]:
|
|
26
30
|
shape = await asyncio.gather(
|
|
27
31
|
self._driver.array_size_y.get_value(),
|
|
28
32
|
self._driver.array_size_x.get_value(),
|
|
29
|
-
self._driver.data_type.get_value(),
|
|
30
33
|
)
|
|
31
34
|
return shape
|
|
32
35
|
|
|
@@ -1,25 +1,30 @@
|
|
|
1
1
|
import asyncio
|
|
2
2
|
from pathlib import Path
|
|
3
3
|
from typing import AsyncGenerator, AsyncIterator, Dict, List, Optional
|
|
4
|
+
from xml.etree import ElementTree as ET
|
|
4
5
|
|
|
5
6
|
from bluesky.protocols import DataKey, Hints, StreamAsset
|
|
6
7
|
|
|
7
8
|
from ophyd_async.core import (
|
|
8
9
|
DEFAULT_TIMEOUT,
|
|
9
10
|
AsyncStatus,
|
|
11
|
+
DatasetDescriber,
|
|
10
12
|
DetectorWriter,
|
|
11
13
|
HDFDataset,
|
|
12
14
|
HDFFile,
|
|
13
15
|
NameProvider,
|
|
14
16
|
PathProvider,
|
|
15
|
-
ShapeProvider,
|
|
16
17
|
observe_value,
|
|
17
18
|
set_and_wait_for_value,
|
|
18
19
|
wait_for_value,
|
|
19
20
|
)
|
|
20
21
|
|
|
21
|
-
from ._core_io import NDFileHDFIO
|
|
22
|
-
from ._utils import
|
|
22
|
+
from ._core_io import NDArrayBaseIO, NDFileHDFIO
|
|
23
|
+
from ._utils import (
|
|
24
|
+
FileWriteMode,
|
|
25
|
+
convert_param_dtype_to_np,
|
|
26
|
+
convert_pv_dtype_to_np,
|
|
27
|
+
)
|
|
23
28
|
|
|
24
29
|
|
|
25
30
|
class ADHDFWriter(DetectorWriter):
|
|
@@ -28,14 +33,15 @@ class ADHDFWriter(DetectorWriter):
|
|
|
28
33
|
hdf: NDFileHDFIO,
|
|
29
34
|
path_provider: PathProvider,
|
|
30
35
|
name_provider: NameProvider,
|
|
31
|
-
|
|
32
|
-
|
|
36
|
+
dataset_describer: DatasetDescriber,
|
|
37
|
+
*plugins: NDArrayBaseIO,
|
|
33
38
|
) -> None:
|
|
34
39
|
self.hdf = hdf
|
|
35
40
|
self._path_provider = path_provider
|
|
36
41
|
self._name_provider = name_provider
|
|
37
|
-
self.
|
|
38
|
-
|
|
42
|
+
self._dataset_describer = dataset_describer
|
|
43
|
+
|
|
44
|
+
self._plugins = plugins
|
|
39
45
|
self._capture_status: Optional[AsyncStatus] = None
|
|
40
46
|
self._datasets: List[HDFDataset] = []
|
|
41
47
|
self._file: Optional[HDFFile] = None
|
|
@@ -44,16 +50,19 @@ class ADHDFWriter(DetectorWriter):
|
|
|
44
50
|
async def open(self, multiplier: int = 1) -> Dict[str, DataKey]:
|
|
45
51
|
self._file = None
|
|
46
52
|
info = self._path_provider(device_name=self.hdf.name)
|
|
47
|
-
|
|
53
|
+
|
|
54
|
+
# Set the directory creation depth first, since dir creation callback happens
|
|
55
|
+
# when directory path PV is processed.
|
|
56
|
+
await self.hdf.create_directory.set(info.create_dir_depth)
|
|
57
|
+
|
|
48
58
|
await asyncio.gather(
|
|
49
59
|
self.hdf.num_extra_dims.set(0),
|
|
50
60
|
self.hdf.lazy_open.set(True),
|
|
51
61
|
self.hdf.swmr_mode.set(True),
|
|
52
62
|
# See https://github.com/bluesky/ophyd-async/issues/122
|
|
53
|
-
self.hdf.file_path.set(
|
|
63
|
+
self.hdf.file_path.set(str(info.directory_path)),
|
|
54
64
|
self.hdf.file_name.set(info.filename),
|
|
55
65
|
self.hdf.file_template.set("%s/%s.h5"),
|
|
56
|
-
self.hdf.create_dir_depth.set(info.create_dir_depth),
|
|
57
66
|
self.hdf.file_write_mode.set(FileWriteMode.stream),
|
|
58
67
|
# Never use custom xml layout file but use the one defined
|
|
59
68
|
# in the source code file NDFileHDF5LayoutXML.cpp
|
|
@@ -62,44 +71,54 @@ class ADHDFWriter(DetectorWriter):
|
|
|
62
71
|
|
|
63
72
|
assert (
|
|
64
73
|
await self.hdf.file_path_exists.get_value()
|
|
65
|
-
), f"File path {
|
|
74
|
+
), f"File path {info.directory_path} for hdf plugin does not exist"
|
|
66
75
|
|
|
67
76
|
# Overwrite num_capture to go forever
|
|
68
77
|
await self.hdf.num_capture.set(0)
|
|
69
78
|
# Wait for it to start, stashing the status that tells us when it finishes
|
|
70
79
|
self._capture_status = await set_and_wait_for_value(self.hdf.capture, True)
|
|
71
80
|
name = self._name_provider()
|
|
72
|
-
detector_shape =
|
|
81
|
+
detector_shape = await self._dataset_describer.shape()
|
|
82
|
+
np_dtype = await self._dataset_describer.np_datatype()
|
|
73
83
|
self._multiplier = multiplier
|
|
74
84
|
outer_shape = (multiplier,) if multiplier > 1 else ()
|
|
75
|
-
frame_shape = detector_shape[:-1] if len(detector_shape) > 0 else []
|
|
76
|
-
dtype_numpy = (
|
|
77
|
-
convert_ad_dtype_to_np(detector_shape[-1])
|
|
78
|
-
if len(detector_shape) > 0
|
|
79
|
-
else ""
|
|
80
|
-
)
|
|
81
85
|
|
|
82
86
|
# Add the main data
|
|
83
87
|
self._datasets = [
|
|
84
88
|
HDFDataset(
|
|
85
89
|
data_key=name,
|
|
86
90
|
dataset="/entry/data/data",
|
|
87
|
-
shape=
|
|
88
|
-
dtype_numpy=
|
|
91
|
+
shape=detector_shape,
|
|
92
|
+
dtype_numpy=np_dtype,
|
|
89
93
|
multiplier=multiplier,
|
|
90
94
|
)
|
|
91
95
|
]
|
|
92
96
|
# And all the scalar datasets
|
|
93
|
-
for
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
97
|
+
for plugin in self._plugins:
|
|
98
|
+
maybe_xml = await plugin.nd_attributes_file.get_value()
|
|
99
|
+
# This is the check that ADCore does to see if it is an XML string
|
|
100
|
+
# rather than a filename to parse
|
|
101
|
+
if "<Attributes>" in maybe_xml:
|
|
102
|
+
root = ET.fromstring(maybe_xml)
|
|
103
|
+
for child in root:
|
|
104
|
+
datakey = child.attrib["name"]
|
|
105
|
+
if child.attrib.get("type", "EPICS_PV") == "EPICS_PV":
|
|
106
|
+
np_datatype = convert_pv_dtype_to_np(
|
|
107
|
+
child.attrib.get("dbrtype", "DBR_NATIVE")
|
|
108
|
+
)
|
|
109
|
+
else:
|
|
110
|
+
np_datatype = convert_param_dtype_to_np(
|
|
111
|
+
child.attrib.get("datatype", "INT")
|
|
112
|
+
)
|
|
113
|
+
self._datasets.append(
|
|
114
|
+
HDFDataset(
|
|
115
|
+
datakey,
|
|
116
|
+
f"/entry/instrument/NDAttributes/{datakey}",
|
|
117
|
+
(),
|
|
118
|
+
np_datatype,
|
|
119
|
+
multiplier,
|
|
120
|
+
)
|
|
121
|
+
)
|
|
103
122
|
|
|
104
123
|
describe = {
|
|
105
124
|
ds.data_key: DataKey(
|
|
@@ -133,7 +152,6 @@ class ADHDFWriter(DetectorWriter):
|
|
|
133
152
|
if not self._file:
|
|
134
153
|
path = Path(await self.hdf.full_file_name.get_value())
|
|
135
154
|
self._file = HDFFile(
|
|
136
|
-
self._path_provider(),
|
|
137
155
|
# See https://github.com/bluesky/ophyd-async/issues/122
|
|
138
156
|
path,
|
|
139
157
|
self._datasets,
|
|
@@ -149,8 +167,8 @@ class ADHDFWriter(DetectorWriter):
|
|
|
149
167
|
|
|
150
168
|
async def close(self):
|
|
151
169
|
# Already done a caput callback in _capture_status, so can't do one here
|
|
152
|
-
await self.hdf.capture.set(
|
|
153
|
-
await wait_for_value(self.hdf.capture,
|
|
170
|
+
await self.hdf.capture.set(False, wait=False)
|
|
171
|
+
await wait_for_value(self.hdf.capture, False, DEFAULT_TIMEOUT)
|
|
154
172
|
if self._capture_status:
|
|
155
173
|
# We kicked off an open, so wait for it to return
|
|
156
174
|
await self._capture_status
|
|
@@ -1,8 +1,9 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
1
2
|
from enum import Enum
|
|
2
3
|
from typing import Optional
|
|
3
|
-
from xml.etree import cElementTree as ET
|
|
4
4
|
|
|
5
5
|
from ophyd_async.core import DEFAULT_TIMEOUT, SignalRW, T, wait_for_value
|
|
6
|
+
from ophyd_async.core._signal import SignalR
|
|
6
7
|
|
|
7
8
|
|
|
8
9
|
class ADBaseDataType(str, Enum):
|
|
@@ -34,6 +35,45 @@ def convert_ad_dtype_to_np(ad_dtype: ADBaseDataType) -> str:
|
|
|
34
35
|
return ad_dtype_to_np_dtype[ad_dtype]
|
|
35
36
|
|
|
36
37
|
|
|
38
|
+
def convert_pv_dtype_to_np(datatype: str) -> str:
|
|
39
|
+
_pvattribute_to_ad_datatype = {
|
|
40
|
+
"DBR_SHORT": ADBaseDataType.Int16,
|
|
41
|
+
"DBR_ENUM": ADBaseDataType.Int16,
|
|
42
|
+
"DBR_INT": ADBaseDataType.Int32,
|
|
43
|
+
"DBR_LONG": ADBaseDataType.Int32,
|
|
44
|
+
"DBR_FLOAT": ADBaseDataType.Float32,
|
|
45
|
+
"DBR_DOUBLE": ADBaseDataType.Float64,
|
|
46
|
+
}
|
|
47
|
+
if datatype in ["DBR_STRING", "DBR_CHAR"]:
|
|
48
|
+
np_datatype = "s40"
|
|
49
|
+
elif datatype == "DBR_NATIVE":
|
|
50
|
+
raise ValueError("Don't support DBR_NATIVE yet")
|
|
51
|
+
else:
|
|
52
|
+
try:
|
|
53
|
+
np_datatype = convert_ad_dtype_to_np(_pvattribute_to_ad_datatype[datatype])
|
|
54
|
+
except KeyError:
|
|
55
|
+
raise ValueError(f"Invalid dbr type {datatype}")
|
|
56
|
+
return np_datatype
|
|
57
|
+
|
|
58
|
+
|
|
59
|
+
def convert_param_dtype_to_np(datatype: str) -> str:
|
|
60
|
+
_paramattribute_to_ad_datatype = {
|
|
61
|
+
"INT": ADBaseDataType.Int32,
|
|
62
|
+
"INT64": ADBaseDataType.Int64,
|
|
63
|
+
"DOUBLE": ADBaseDataType.Float64,
|
|
64
|
+
}
|
|
65
|
+
if datatype in ["STRING"]:
|
|
66
|
+
np_datatype = "s40"
|
|
67
|
+
else:
|
|
68
|
+
try:
|
|
69
|
+
np_datatype = convert_ad_dtype_to_np(
|
|
70
|
+
_paramattribute_to_ad_datatype[datatype]
|
|
71
|
+
)
|
|
72
|
+
except KeyError:
|
|
73
|
+
raise ValueError(f"Invalid datatype {datatype}")
|
|
74
|
+
return np_datatype
|
|
75
|
+
|
|
76
|
+
|
|
37
77
|
class FileWriteMode(str, Enum):
|
|
38
78
|
single = "Single"
|
|
39
79
|
capture = "Capture"
|
|
@@ -52,75 +92,34 @@ class NDAttributeDataType(str, Enum):
|
|
|
52
92
|
STRING = "STRING"
|
|
53
93
|
|
|
54
94
|
|
|
55
|
-
class
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
ET.SubElement(
|
|
84
|
-
self._root,
|
|
85
|
-
"Attribute",
|
|
86
|
-
name=name,
|
|
87
|
-
type="EPICS_PV",
|
|
88
|
-
source=pv,
|
|
89
|
-
datatype=self._dbr_types[datatype],
|
|
90
|
-
description=description,
|
|
91
|
-
)
|
|
92
|
-
|
|
93
|
-
def add_param(
|
|
94
|
-
self,
|
|
95
|
-
name: str,
|
|
96
|
-
param: str,
|
|
97
|
-
datatype: NDAttributeDataType,
|
|
98
|
-
addr: int = 0,
|
|
99
|
-
description: str = "",
|
|
100
|
-
):
|
|
101
|
-
"""Add a driver or plugin parameter to the attribute list
|
|
102
|
-
|
|
103
|
-
Args:
|
|
104
|
-
name: The attribute name
|
|
105
|
-
param: The parameter string as seen in the INP link of the record
|
|
106
|
-
datatype: The datatype of the parameter
|
|
107
|
-
description: A description that appears in the HDF file as an attribute
|
|
108
|
-
"""
|
|
109
|
-
ET.SubElement(
|
|
110
|
-
self._root,
|
|
111
|
-
"Attribute",
|
|
112
|
-
name=name,
|
|
113
|
-
type="PARAM",
|
|
114
|
-
source=param,
|
|
115
|
-
addr=str(addr),
|
|
116
|
-
datatype=datatype.value,
|
|
117
|
-
description=description,
|
|
118
|
-
)
|
|
119
|
-
|
|
120
|
-
def __str__(self) -> str:
|
|
121
|
-
"""Output the XML pretty printed"""
|
|
122
|
-
ET.indent(self._root, space=" ", level=0)
|
|
123
|
-
return ET.tostring(self._root, xml_declaration=True, encoding="utf-8").decode()
|
|
95
|
+
class NDAttributePvDbrType(str, Enum):
|
|
96
|
+
DBR_SHORT = "DBR_SHORT"
|
|
97
|
+
DBR_ENUM = "DBR_ENUM"
|
|
98
|
+
DBR_INT = "DBR_INT"
|
|
99
|
+
DBR_LONG = "DBR_LONG"
|
|
100
|
+
DBR_FLOAT = "DBR_FLOAT"
|
|
101
|
+
DBR_DOUBLE = "DBR_DOUBLE"
|
|
102
|
+
DBR_STRING = "DBR_STRING"
|
|
103
|
+
DBR_CHAR = "DBR_CHAR"
|
|
104
|
+
|
|
105
|
+
|
|
106
|
+
@dataclass
|
|
107
|
+
class NDAttributePv:
|
|
108
|
+
name: str # name of attribute stamped on array, also scientifically useful name
|
|
109
|
+
# when appended to device.name
|
|
110
|
+
signal: SignalR # caget the pv given by signal.source and attach to each frame
|
|
111
|
+
dbrtype: NDAttributePvDbrType
|
|
112
|
+
description: str = "" # A description that appears in the HDF file as an attribute
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
@dataclass
|
|
116
|
+
class NDAttributeParam:
|
|
117
|
+
name: str # name of attribute stamped on array, also scientifically useful name
|
|
118
|
+
# when appended to device.name
|
|
119
|
+
param: str # The parameter string as seen in the INP link of the record
|
|
120
|
+
datatype: NDAttributeDataType # The datatype of the parameter
|
|
121
|
+
addr: int = 0 # The address as seen in the INP link of the record
|
|
122
|
+
description: str = "" # A description that appears in the HDF file as an attribute
|
|
124
123
|
|
|
125
124
|
|
|
126
125
|
async def stop_busy_record(
|
|
@@ -14,6 +14,7 @@ class KinetixReadoutMode(str, Enum):
|
|
|
14
14
|
sensitivity = 1
|
|
15
15
|
speed = 2
|
|
16
16
|
dynamic_range = 3
|
|
17
|
+
sub_electron = 4
|
|
17
18
|
|
|
18
19
|
|
|
19
20
|
class KinetixDriverIO(adcore.ADBaseIO):
|
|
@@ -24,5 +25,7 @@ class KinetixDriverIO(adcore.ADBaseIO):
|
|
|
24
25
|
self.trigger_mode = epics_signal_rw_rbv(
|
|
25
26
|
KinetixTriggerMode, prefix + "TriggerMode"
|
|
26
27
|
)
|
|
27
|
-
self.
|
|
28
|
+
self.readout_port_idx = epics_signal_rw_rbv(
|
|
29
|
+
KinetixReadoutMode, prefix + "ReadoutPortIdx"
|
|
30
|
+
)
|
|
28
31
|
super().__init__(prefix, name)
|
|
@@ -19,5 +19,5 @@ class PilatusDriverIO(adcore.ADBaseIO):
|
|
|
19
19
|
self.trigger_mode = epics_signal_rw_rbv(
|
|
20
20
|
PilatusTriggerMode, prefix + "TriggerMode"
|
|
21
21
|
)
|
|
22
|
-
self.
|
|
22
|
+
self.armed = epics_signal_r(bool, prefix + "Armed")
|
|
23
23
|
super().__init__(prefix, name)
|
|
@@ -39,7 +39,7 @@ class VimbaController(DetectorControl):
|
|
|
39
39
|
) -> AsyncStatus:
|
|
40
40
|
await asyncio.gather(
|
|
41
41
|
self._drv.trigger_mode.set(TRIGGER_MODE[trigger]),
|
|
42
|
-
self._drv.
|
|
42
|
+
self._drv.exposure_mode.set(EXPOSE_OUT_MODE[trigger]),
|
|
43
43
|
self._drv.num_images.set(num),
|
|
44
44
|
self._drv.image_mode.set(adcore.ImageMode.multiple),
|
|
45
45
|
)
|
|
@@ -49,9 +49,9 @@ class VimbaController(DetectorControl):
|
|
|
49
49
|
]:
|
|
50
50
|
await self._drv.acquire_time.set(exposure)
|
|
51
51
|
if trigger != DetectorTrigger.internal:
|
|
52
|
-
self._drv.
|
|
52
|
+
self._drv.trigger_source.set(VimbaTriggerSource.line1)
|
|
53
53
|
else:
|
|
54
|
-
self._drv.
|
|
54
|
+
self._drv.trigger_source.set(VimbaTriggerSource.freerun)
|
|
55
55
|
return await adcore.start_acquiring_driver_and_ensure_status(self._drv)
|
|
56
56
|
|
|
57
57
|
async def disarm(self):
|
|
@@ -50,15 +50,17 @@ class VimbaDriverIO(adcore.ADBaseIO):
|
|
|
50
50
|
|
|
51
51
|
def __init__(self, prefix: str, name: str = "") -> None:
|
|
52
52
|
# self.pixel_format = epics_signal_rw_rbv(PixelFormat, prefix + "PixelFormat")
|
|
53
|
-
self.
|
|
53
|
+
self.convert_pixel_format = epics_signal_rw_rbv(
|
|
54
54
|
VimbaConvertFormat, prefix + "ConvertPixelFormat"
|
|
55
55
|
) # Pixel format of data outputted to AD
|
|
56
|
-
self.
|
|
56
|
+
self.trigger_source = epics_signal_rw_rbv(
|
|
57
57
|
VimbaTriggerSource, prefix + "TriggerSource"
|
|
58
58
|
)
|
|
59
59
|
self.trigger_mode = epics_signal_rw_rbv(VimbaOnOff, prefix + "TriggerMode")
|
|
60
|
-
self.
|
|
61
|
-
|
|
60
|
+
self.trigger_overlap = epics_signal_rw_rbv(
|
|
61
|
+
VimbaOverlap, prefix + "TriggerOverlap"
|
|
62
|
+
)
|
|
63
|
+
self.exposure_mode = epics_signal_rw_rbv(
|
|
62
64
|
VimbaExposeOutMode, prefix + "ExposureMode"
|
|
63
65
|
)
|
|
64
66
|
super().__init__(prefix, name)
|
|
@@ -0,0 +1,43 @@
|
|
|
1
|
+
from pydantic import Field
|
|
2
|
+
|
|
3
|
+
from ophyd_async.core import AsyncStatus, PathProvider, StandardDetector
|
|
4
|
+
from ophyd_async.core._detector import TriggerInfo
|
|
5
|
+
|
|
6
|
+
from ._eiger_controller import EigerController
|
|
7
|
+
from ._eiger_io import EigerDriverIO
|
|
8
|
+
from ._odin_io import Odin, OdinWriter
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class EigerTriggerInfo(TriggerInfo):
|
|
12
|
+
energy_ev: float = Field(gt=0)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class EigerDetector(StandardDetector):
|
|
16
|
+
"""
|
|
17
|
+
Ophyd-async implementation of an Eiger Detector.
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
_controller: EigerController
|
|
21
|
+
_writer: Odin
|
|
22
|
+
|
|
23
|
+
def __init__(
|
|
24
|
+
self,
|
|
25
|
+
prefix: str,
|
|
26
|
+
path_provider: PathProvider,
|
|
27
|
+
drv_suffix="-EA-EIGER-01:",
|
|
28
|
+
hdf_suffix="-EA-ODIN-01:",
|
|
29
|
+
name="",
|
|
30
|
+
):
|
|
31
|
+
self.drv = EigerDriverIO(prefix + drv_suffix)
|
|
32
|
+
self.odin = Odin(prefix + hdf_suffix + "FP:")
|
|
33
|
+
|
|
34
|
+
super().__init__(
|
|
35
|
+
EigerController(self.drv),
|
|
36
|
+
OdinWriter(path_provider, lambda: self.name, self.odin),
|
|
37
|
+
name=name,
|
|
38
|
+
)
|
|
39
|
+
|
|
40
|
+
@AsyncStatus.wrap
|
|
41
|
+
async def prepare(self, value: EigerTriggerInfo) -> None:
|
|
42
|
+
await self._controller.set_energy(value.energy_ev)
|
|
43
|
+
await super().prepare(value)
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from typing import Optional
|
|
3
|
+
|
|
4
|
+
from ophyd_async.core import (
|
|
5
|
+
DEFAULT_TIMEOUT,
|
|
6
|
+
AsyncStatus,
|
|
7
|
+
DetectorControl,
|
|
8
|
+
DetectorTrigger,
|
|
9
|
+
set_and_wait_for_other_value,
|
|
10
|
+
)
|
|
11
|
+
|
|
12
|
+
from ._eiger_io import EigerDriverIO, EigerTriggerMode
|
|
13
|
+
|
|
14
|
+
EIGER_TRIGGER_MODE_MAP = {
|
|
15
|
+
DetectorTrigger.internal: EigerTriggerMode.internal,
|
|
16
|
+
DetectorTrigger.constant_gate: EigerTriggerMode.gate,
|
|
17
|
+
DetectorTrigger.variable_gate: EigerTriggerMode.gate,
|
|
18
|
+
DetectorTrigger.edge_trigger: EigerTriggerMode.edge,
|
|
19
|
+
}
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class EigerController(DetectorControl):
|
|
23
|
+
def __init__(
|
|
24
|
+
self,
|
|
25
|
+
driver: EigerDriverIO,
|
|
26
|
+
) -> None:
|
|
27
|
+
self._drv = driver
|
|
28
|
+
|
|
29
|
+
def get_deadtime(self, exposure: float) -> float:
|
|
30
|
+
# See https://media.dectris.com/filer_public/30/14/3014704e-5f3b-43ba-8ccf-8ef720e60d2a/240202_usermanual_eiger2.pdf
|
|
31
|
+
return 0.0001
|
|
32
|
+
|
|
33
|
+
async def set_energy(self, energy: float, tolerance: float = 0.1):
|
|
34
|
+
"""Changing photon energy takes some time so only do so if the current energy is
|
|
35
|
+
outside the tolerance."""
|
|
36
|
+
current_energy = await self._drv.photon_energy.get_value()
|
|
37
|
+
if abs(current_energy - energy) > tolerance:
|
|
38
|
+
await self._drv.photon_energy.set(energy)
|
|
39
|
+
|
|
40
|
+
@AsyncStatus.wrap
|
|
41
|
+
async def arm(
|
|
42
|
+
self,
|
|
43
|
+
num: int,
|
|
44
|
+
trigger: DetectorTrigger = DetectorTrigger.internal,
|
|
45
|
+
exposure: Optional[float] = None,
|
|
46
|
+
):
|
|
47
|
+
coros = [
|
|
48
|
+
self._drv.trigger_mode.set(EIGER_TRIGGER_MODE_MAP[trigger].value),
|
|
49
|
+
self._drv.num_images.set(num),
|
|
50
|
+
]
|
|
51
|
+
if exposure is not None:
|
|
52
|
+
coros.extend(
|
|
53
|
+
[
|
|
54
|
+
self._drv.acquire_time.set(exposure),
|
|
55
|
+
self._drv.acquire_period.set(exposure),
|
|
56
|
+
]
|
|
57
|
+
)
|
|
58
|
+
await asyncio.gather(*coros)
|
|
59
|
+
|
|
60
|
+
# TODO: Detector state should be an enum see https://github.com/DiamondLightSource/eiger-fastcs/issues/43
|
|
61
|
+
await set_and_wait_for_other_value(
|
|
62
|
+
self._drv.arm, 1, self._drv.state, "ready", timeout=DEFAULT_TIMEOUT
|
|
63
|
+
)
|
|
64
|
+
|
|
65
|
+
async def disarm(self):
|
|
66
|
+
await self._drv.disarm.set(1)
|