ophyd-async 0.3.4__py3-none-any.whl → 0.4.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ophyd_async/_version.py +2 -2
- ophyd_async/core/__init__.py +20 -8
- ophyd_async/core/_providers.py +186 -24
- ophyd_async/core/detector.py +14 -15
- ophyd_async/core/device.py +18 -6
- ophyd_async/core/signal.py +32 -8
- ophyd_async/core/soft_signal_backend.py +20 -2
- ophyd_async/epics/_backend/_aioca.py +3 -0
- ophyd_async/epics/_backend/_p4p.py +50 -2
- ophyd_async/epics/_backend/common.py +3 -1
- ophyd_async/epics/areadetector/aravis.py +3 -3
- ophyd_async/epics/areadetector/controllers/aravis_controller.py +1 -0
- ophyd_async/epics/areadetector/drivers/ad_base.py +3 -2
- ophyd_async/epics/areadetector/kinetix.py +3 -3
- ophyd_async/epics/areadetector/pilatus.py +3 -3
- ophyd_async/epics/areadetector/vimba.py +3 -3
- ophyd_async/epics/areadetector/writers/__init__.py +2 -2
- ophyd_async/epics/areadetector/writers/general_hdffile.py +97 -0
- ophyd_async/epics/areadetector/writers/hdf_writer.py +27 -10
- ophyd_async/epics/areadetector/writers/nd_file_hdf.py +3 -0
- ophyd_async/epics/areadetector/writers/nd_plugin.py +30 -0
- ophyd_async/epics/demo/demo_ad_sim_detector.py +3 -3
- ophyd_async/epics/motion/motor.py +132 -2
- ophyd_async/panda/__init__.py +15 -1
- ophyd_async/panda/_common_blocks.py +22 -1
- ophyd_async/panda/_hdf_panda.py +5 -3
- ophyd_async/panda/_table.py +20 -18
- ophyd_async/panda/_trigger.py +62 -7
- ophyd_async/panda/writers/_hdf_writer.py +17 -8
- ophyd_async/plan_stubs/ensure_connected.py +7 -2
- ophyd_async/plan_stubs/fly.py +58 -7
- ophyd_async/sim/pattern_generator.py +71 -182
- ophyd_async/sim/sim_pattern_detector_control.py +3 -3
- ophyd_async/sim/sim_pattern_detector_writer.py +9 -5
- ophyd_async/sim/sim_pattern_generator.py +12 -5
- {ophyd_async-0.3.4.dist-info → ophyd_async-0.4.0.dist-info}/METADATA +7 -2
- {ophyd_async-0.3.4.dist-info → ophyd_async-0.4.0.dist-info}/RECORD +41 -43
- {ophyd_async-0.3.4.dist-info → ophyd_async-0.4.0.dist-info}/WHEEL +1 -1
- ophyd_async/epics/areadetector/writers/_hdfdataset.py +0 -10
- ophyd_async/epics/areadetector/writers/_hdffile.py +0 -54
- ophyd_async/panda/writers/_panda_hdf_file.py +0 -54
- {ophyd_async-0.3.4.dist-info → ophyd_async-0.4.0.dist-info}/LICENSE +0 -0
- {ophyd_async-0.3.4.dist-info → ophyd_async-0.4.0.dist-info}/entry_points.txt +0 -0
- {ophyd_async-0.3.4.dist-info → ophyd_async-0.4.0.dist-info}/top_level.txt +0 -0
|
@@ -2,7 +2,7 @@ from typing import get_args
|
|
|
2
2
|
|
|
3
3
|
from bluesky.protocols import HasHints, Hints
|
|
4
4
|
|
|
5
|
-
from ophyd_async.core import
|
|
5
|
+
from ophyd_async.core import PathProvider, StandardDetector
|
|
6
6
|
from ophyd_async.epics.areadetector.controllers.aravis_controller import (
|
|
7
7
|
AravisController,
|
|
8
8
|
)
|
|
@@ -24,7 +24,7 @@ class AravisDetector(StandardDetector, HasHints):
|
|
|
24
24
|
def __init__(
|
|
25
25
|
self,
|
|
26
26
|
prefix: str,
|
|
27
|
-
|
|
27
|
+
path_provider: PathProvider,
|
|
28
28
|
drv_suffix="cam1:",
|
|
29
29
|
hdf_suffix="HDF1:",
|
|
30
30
|
name="",
|
|
@@ -37,7 +37,7 @@ class AravisDetector(StandardDetector, HasHints):
|
|
|
37
37
|
AravisController(self.drv, gpio_number=gpio_number),
|
|
38
38
|
HDFWriter(
|
|
39
39
|
self.hdf,
|
|
40
|
-
|
|
40
|
+
path_provider,
|
|
41
41
|
lambda: self.name,
|
|
42
42
|
ADBaseShapeProvider(self.drv),
|
|
43
43
|
),
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import asyncio
|
|
2
2
|
from enum import Enum
|
|
3
|
-
from typing import FrozenSet,
|
|
3
|
+
from typing import FrozenSet, Set
|
|
4
4
|
|
|
5
5
|
from ophyd_async.core import (
|
|
6
6
|
DEFAULT_TIMEOUT,
|
|
@@ -131,9 +131,10 @@ class ADBaseShapeProvider(ShapeProvider):
|
|
|
131
131
|
def __init__(self, driver: ADBase) -> None:
|
|
132
132
|
self._driver = driver
|
|
133
133
|
|
|
134
|
-
async def __call__(self) ->
|
|
134
|
+
async def __call__(self) -> tuple:
|
|
135
135
|
shape = await asyncio.gather(
|
|
136
136
|
self._driver.array_size_y.get_value(),
|
|
137
137
|
self._driver.array_size_x.get_value(),
|
|
138
|
+
self._driver.data_type.get_value(),
|
|
138
139
|
)
|
|
139
140
|
return shape
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from bluesky.protocols import HasHints, Hints
|
|
2
2
|
|
|
3
|
-
from ophyd_async.core import
|
|
3
|
+
from ophyd_async.core import PathProvider, StandardDetector
|
|
4
4
|
from ophyd_async.epics.areadetector.controllers.kinetix_controller import (
|
|
5
5
|
KinetixController,
|
|
6
6
|
)
|
|
@@ -21,7 +21,7 @@ class KinetixDetector(StandardDetector, HasHints):
|
|
|
21
21
|
def __init__(
|
|
22
22
|
self,
|
|
23
23
|
prefix: str,
|
|
24
|
-
|
|
24
|
+
path_provider: PathProvider,
|
|
25
25
|
drv_suffix="cam1:",
|
|
26
26
|
hdf_suffix="HDF1:",
|
|
27
27
|
name="",
|
|
@@ -33,7 +33,7 @@ class KinetixDetector(StandardDetector, HasHints):
|
|
|
33
33
|
KinetixController(self.drv),
|
|
34
34
|
HDFWriter(
|
|
35
35
|
self.hdf,
|
|
36
|
-
|
|
36
|
+
path_provider,
|
|
37
37
|
lambda: self.name,
|
|
38
38
|
ADBaseShapeProvider(self.drv),
|
|
39
39
|
),
|
|
@@ -2,7 +2,7 @@ from enum import Enum
|
|
|
2
2
|
|
|
3
3
|
from bluesky.protocols import Hints
|
|
4
4
|
|
|
5
|
-
from ophyd_async.core import
|
|
5
|
+
from ophyd_async.core import PathProvider
|
|
6
6
|
from ophyd_async.core.detector import StandardDetector
|
|
7
7
|
from ophyd_async.epics.areadetector.controllers.pilatus_controller import (
|
|
8
8
|
PilatusController,
|
|
@@ -36,7 +36,7 @@ class PilatusDetector(StandardDetector):
|
|
|
36
36
|
def __init__(
|
|
37
37
|
self,
|
|
38
38
|
prefix: str,
|
|
39
|
-
|
|
39
|
+
path_provider: PathProvider,
|
|
40
40
|
readout_time: PilatusReadoutTime = PilatusReadoutTime.pilatus3,
|
|
41
41
|
drv_suffix: str = "cam1:",
|
|
42
42
|
hdf_suffix: str = "HDF1:",
|
|
@@ -49,7 +49,7 @@ class PilatusDetector(StandardDetector):
|
|
|
49
49
|
PilatusController(self.drv, readout_time=readout_time.value),
|
|
50
50
|
HDFWriter(
|
|
51
51
|
self.hdf,
|
|
52
|
-
|
|
52
|
+
path_provider,
|
|
53
53
|
lambda: self.name,
|
|
54
54
|
ADBaseShapeProvider(self.drv),
|
|
55
55
|
),
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from bluesky.protocols import HasHints, Hints
|
|
2
2
|
|
|
3
|
-
from ophyd_async.core import
|
|
3
|
+
from ophyd_async.core import PathProvider, StandardDetector
|
|
4
4
|
from ophyd_async.epics.areadetector.controllers.vimba_controller import VimbaController
|
|
5
5
|
from ophyd_async.epics.areadetector.drivers import ADBaseShapeProvider
|
|
6
6
|
from ophyd_async.epics.areadetector.drivers.vimba_driver import VimbaDriver
|
|
@@ -18,7 +18,7 @@ class VimbaDetector(StandardDetector, HasHints):
|
|
|
18
18
|
def __init__(
|
|
19
19
|
self,
|
|
20
20
|
prefix: str,
|
|
21
|
-
|
|
21
|
+
path_provider: PathProvider,
|
|
22
22
|
drv_suffix="cam1:",
|
|
23
23
|
hdf_suffix="HDF1:",
|
|
24
24
|
name="",
|
|
@@ -30,7 +30,7 @@ class VimbaDetector(StandardDetector, HasHints):
|
|
|
30
30
|
VimbaController(self.drv),
|
|
31
31
|
HDFWriter(
|
|
32
32
|
self.hdf,
|
|
33
|
-
|
|
33
|
+
path_provider,
|
|
34
34
|
lambda: self.name,
|
|
35
35
|
ADBaseShapeProvider(self.drv),
|
|
36
36
|
),
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
from .hdf_writer import HDFWriter
|
|
2
2
|
from .nd_file_hdf import NDFileHDF
|
|
3
|
-
from .nd_plugin import NDPluginBase, NDPluginStats
|
|
3
|
+
from .nd_plugin import ADBaseDataType, NDPluginBase, NDPluginStats
|
|
4
4
|
|
|
5
|
-
__all__ = ["HDFWriter", "NDFileHDF", "NDPluginBase", "NDPluginStats"]
|
|
5
|
+
__all__ = ["HDFWriter", "NDFileHDF", "NDPluginBase", "NDPluginStats", "ADBaseDataType"]
|
|
@@ -0,0 +1,97 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import Iterator, List, Sequence
|
|
4
|
+
from urllib.parse import urlunparse
|
|
5
|
+
|
|
6
|
+
from event_model import (
|
|
7
|
+
ComposeStreamResource,
|
|
8
|
+
ComposeStreamResourceBundle,
|
|
9
|
+
StreamDatum,
|
|
10
|
+
StreamResource,
|
|
11
|
+
)
|
|
12
|
+
|
|
13
|
+
from ophyd_async.core import PathInfo
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@dataclass
|
|
17
|
+
class _HDFDataset:
|
|
18
|
+
data_key: str
|
|
19
|
+
dataset: str
|
|
20
|
+
shape: Sequence[int] = field(default_factory=tuple)
|
|
21
|
+
dtype_numpy: str = ""
|
|
22
|
+
multiplier: int = 1
|
|
23
|
+
swmr: bool = False
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
SLICE_NAME = "AD_HDF5_SWMR_SLICE"
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class _HDFFile:
|
|
30
|
+
"""
|
|
31
|
+
:param directory_info: Contains information about how to construct a StreamResource
|
|
32
|
+
:param full_file_name: Absolute path to the file to be written
|
|
33
|
+
:param datasets: Datasets to write into the file
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
def __init__(
|
|
37
|
+
self,
|
|
38
|
+
path_info: PathInfo,
|
|
39
|
+
full_file_name: Path,
|
|
40
|
+
datasets: List[_HDFDataset],
|
|
41
|
+
hostname: str = "localhost",
|
|
42
|
+
) -> None:
|
|
43
|
+
self._last_emitted = 0
|
|
44
|
+
self._hostname = hostname
|
|
45
|
+
|
|
46
|
+
if len(datasets) == 0:
|
|
47
|
+
self._bundles = []
|
|
48
|
+
return None
|
|
49
|
+
|
|
50
|
+
bundler_composer = ComposeStreamResource()
|
|
51
|
+
|
|
52
|
+
uri = urlunparse(
|
|
53
|
+
(
|
|
54
|
+
"file",
|
|
55
|
+
self._hostname,
|
|
56
|
+
str((path_info.root / full_file_name).absolute()),
|
|
57
|
+
"",
|
|
58
|
+
"",
|
|
59
|
+
None,
|
|
60
|
+
)
|
|
61
|
+
)
|
|
62
|
+
|
|
63
|
+
self._bundles: List[ComposeStreamResourceBundle] = [
|
|
64
|
+
bundler_composer(
|
|
65
|
+
mimetype="application/x-hdf5",
|
|
66
|
+
uri=uri,
|
|
67
|
+
data_key=ds.data_key,
|
|
68
|
+
parameters={
|
|
69
|
+
"dataset": ds.dataset,
|
|
70
|
+
"swmr": ds.swmr,
|
|
71
|
+
"multiplier": ds.multiplier,
|
|
72
|
+
},
|
|
73
|
+
uid=None,
|
|
74
|
+
validate=True,
|
|
75
|
+
)
|
|
76
|
+
for ds in datasets
|
|
77
|
+
]
|
|
78
|
+
|
|
79
|
+
def stream_resources(self) -> Iterator[StreamResource]:
|
|
80
|
+
for bundle in self._bundles:
|
|
81
|
+
yield bundle.stream_resource_doc
|
|
82
|
+
|
|
83
|
+
def stream_data(self, indices_written: int) -> Iterator[StreamDatum]:
|
|
84
|
+
# Indices are relative to resource
|
|
85
|
+
if indices_written > self._last_emitted:
|
|
86
|
+
indices = {
|
|
87
|
+
"start": self._last_emitted,
|
|
88
|
+
"stop": indices_written,
|
|
89
|
+
}
|
|
90
|
+
self._last_emitted = indices_written
|
|
91
|
+
for bundle in self._bundles:
|
|
92
|
+
yield bundle.compose_stream_datum(indices)
|
|
93
|
+
return None
|
|
94
|
+
|
|
95
|
+
def close(self) -> None:
|
|
96
|
+
for bundle in self._bundles:
|
|
97
|
+
bundle.close()
|
|
@@ -8,30 +8,30 @@ from ophyd_async.core import (
|
|
|
8
8
|
DEFAULT_TIMEOUT,
|
|
9
9
|
AsyncStatus,
|
|
10
10
|
DetectorWriter,
|
|
11
|
-
DirectoryProvider,
|
|
12
11
|
NameProvider,
|
|
12
|
+
PathProvider,
|
|
13
13
|
ShapeProvider,
|
|
14
14
|
set_and_wait_for_value,
|
|
15
15
|
wait_for_value,
|
|
16
16
|
)
|
|
17
17
|
from ophyd_async.core.signal import observe_value
|
|
18
18
|
|
|
19
|
-
from .
|
|
20
|
-
from ._hdffile import _HDFFile
|
|
19
|
+
from .general_hdffile import _HDFDataset, _HDFFile
|
|
21
20
|
from .nd_file_hdf import FileWriteMode, NDFileHDF
|
|
21
|
+
from .nd_plugin import convert_ad_dtype_to_np
|
|
22
22
|
|
|
23
23
|
|
|
24
24
|
class HDFWriter(DetectorWriter):
|
|
25
25
|
def __init__(
|
|
26
26
|
self,
|
|
27
27
|
hdf: NDFileHDF,
|
|
28
|
-
|
|
28
|
+
path_provider: PathProvider,
|
|
29
29
|
name_provider: NameProvider,
|
|
30
30
|
shape_provider: ShapeProvider,
|
|
31
31
|
**scalar_datasets_paths: str,
|
|
32
32
|
) -> None:
|
|
33
33
|
self.hdf = hdf
|
|
34
|
-
self.
|
|
34
|
+
self._path_provider = path_provider
|
|
35
35
|
self._name_provider = name_provider
|
|
36
36
|
self._shape_provider = shape_provider
|
|
37
37
|
self._scalar_datasets_paths = scalar_datasets_paths
|
|
@@ -42,7 +42,7 @@ class HDFWriter(DetectorWriter):
|
|
|
42
42
|
|
|
43
43
|
async def open(self, multiplier: int = 1) -> Dict[str, DataKey]:
|
|
44
44
|
self._file = None
|
|
45
|
-
info = self.
|
|
45
|
+
info = self._path_provider(device_name=self.hdf.name)
|
|
46
46
|
file_path = str(info.root / info.resource_dir)
|
|
47
47
|
await asyncio.gather(
|
|
48
48
|
self.hdf.num_extra_dims.set(0),
|
|
@@ -50,8 +50,9 @@ class HDFWriter(DetectorWriter):
|
|
|
50
50
|
self.hdf.swmr_mode.set(True),
|
|
51
51
|
# See https://github.com/bluesky/ophyd-async/issues/122
|
|
52
52
|
self.hdf.file_path.set(file_path),
|
|
53
|
-
self.hdf.file_name.set(
|
|
53
|
+
self.hdf.file_name.set(info.filename),
|
|
54
54
|
self.hdf.file_template.set("%s/%s.h5"),
|
|
55
|
+
self.hdf.create_dir_depth.set(info.create_dir_depth),
|
|
55
56
|
self.hdf.file_write_mode.set(FileWriteMode.stream),
|
|
56
57
|
# Never use custom xml layout file but use the one defined
|
|
57
58
|
# in the source code file NDFileHDF5LayoutXML.cpp
|
|
@@ -70,9 +71,22 @@ class HDFWriter(DetectorWriter):
|
|
|
70
71
|
detector_shape = tuple(await self._shape_provider())
|
|
71
72
|
self._multiplier = multiplier
|
|
72
73
|
outer_shape = (multiplier,) if multiplier > 1 else ()
|
|
74
|
+
frame_shape = detector_shape[:-1] if len(detector_shape) > 0 else []
|
|
75
|
+
dtype_numpy = (
|
|
76
|
+
convert_ad_dtype_to_np(detector_shape[-1])
|
|
77
|
+
if len(detector_shape) > 0
|
|
78
|
+
else ""
|
|
79
|
+
)
|
|
80
|
+
|
|
73
81
|
# Add the main data
|
|
74
82
|
self._datasets = [
|
|
75
|
-
_HDFDataset(
|
|
83
|
+
_HDFDataset(
|
|
84
|
+
data_key=name,
|
|
85
|
+
dataset="/entry/data/data",
|
|
86
|
+
shape=frame_shape,
|
|
87
|
+
dtype_numpy=dtype_numpy,
|
|
88
|
+
multiplier=multiplier,
|
|
89
|
+
)
|
|
76
90
|
]
|
|
77
91
|
# And all the scalar datasets
|
|
78
92
|
for ds_name, ds_path in self._scalar_datasets_paths.items():
|
|
@@ -81,14 +95,17 @@ class HDFWriter(DetectorWriter):
|
|
|
81
95
|
f"{name}-{ds_name}",
|
|
82
96
|
f"/entry/instrument/NDAttributes/{ds_path}",
|
|
83
97
|
(),
|
|
98
|
+
"",
|
|
84
99
|
multiplier,
|
|
85
100
|
)
|
|
86
101
|
)
|
|
102
|
+
|
|
87
103
|
describe = {
|
|
88
|
-
ds.
|
|
104
|
+
ds.data_key: DataKey(
|
|
89
105
|
source=self.hdf.full_file_name.source,
|
|
90
106
|
shape=outer_shape + tuple(ds.shape),
|
|
91
107
|
dtype="array" if ds.shape else "number",
|
|
108
|
+
dtype_numpy=ds.dtype_numpy,
|
|
92
109
|
external="STREAM:",
|
|
93
110
|
)
|
|
94
111
|
for ds in self._datasets
|
|
@@ -115,7 +132,7 @@ class HDFWriter(DetectorWriter):
|
|
|
115
132
|
if not self._file:
|
|
116
133
|
path = Path(await self.hdf.full_file_name.get_value())
|
|
117
134
|
self._file = _HDFFile(
|
|
118
|
-
self.
|
|
135
|
+
self._path_provider(),
|
|
119
136
|
# See https://github.com/bluesky/ophyd-async/issues/122
|
|
120
137
|
path,
|
|
121
138
|
self._datasets,
|
|
@@ -37,4 +37,7 @@ class NDFileHDF(NDPluginBase):
|
|
|
37
37
|
self.capture = epics_signal_rw_rbv(bool, prefix + "Capture")
|
|
38
38
|
self.flush_now = epics_signal_rw(bool, prefix + "FlushNow")
|
|
39
39
|
self.xml_file_name = epics_signal_rw_rbv(str, prefix + "XMLFileName")
|
|
40
|
+
self.array_size0 = epics_signal_r(int, prefix + "ArraySize0")
|
|
41
|
+
self.array_size1 = epics_signal_r(int, prefix + "ArraySize1")
|
|
42
|
+
self.create_dir_depth = epics_signal_rw(int, prefix + "CreateDirectory")
|
|
40
43
|
super().__init__(prefix, name)
|
|
@@ -10,6 +10,35 @@ class Callback(str, Enum):
|
|
|
10
10
|
Disable = "Disable"
|
|
11
11
|
|
|
12
12
|
|
|
13
|
+
class ADBaseDataType(str, Enum):
|
|
14
|
+
Int8 = "Int8"
|
|
15
|
+
UInt8 = "UInt8"
|
|
16
|
+
Int16 = "Int16"
|
|
17
|
+
UInt16 = "UInt16"
|
|
18
|
+
Int32 = "Int32"
|
|
19
|
+
UInt32 = "UInt32"
|
|
20
|
+
Int64 = "Int64"
|
|
21
|
+
UInt64 = "UInt64"
|
|
22
|
+
Float32 = "Float32"
|
|
23
|
+
Float64 = "Float64"
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def convert_ad_dtype_to_np(ad_dtype: ADBaseDataType) -> str:
|
|
27
|
+
ad_dtype_to_np_dtype = {
|
|
28
|
+
ADBaseDataType.Int8: "|i1",
|
|
29
|
+
ADBaseDataType.UInt8: "|u1",
|
|
30
|
+
ADBaseDataType.Int16: "<i2",
|
|
31
|
+
ADBaseDataType.UInt16: "<u2",
|
|
32
|
+
ADBaseDataType.Int32: "<i4",
|
|
33
|
+
ADBaseDataType.UInt32: "<u4",
|
|
34
|
+
ADBaseDataType.Int64: "<i8",
|
|
35
|
+
ADBaseDataType.UInt64: "<u8",
|
|
36
|
+
ADBaseDataType.Float32: "<f4",
|
|
37
|
+
ADBaseDataType.Float64: "<f8",
|
|
38
|
+
}
|
|
39
|
+
return ad_dtype_to_np_dtype[ad_dtype]
|
|
40
|
+
|
|
41
|
+
|
|
13
42
|
class NDArrayBase(Device):
|
|
14
43
|
def __init__(self, prefix: str, name: str = "") -> None:
|
|
15
44
|
self.unique_id = epics_signal_r(int, prefix + "UniqueId_RBV")
|
|
@@ -17,6 +46,7 @@ class NDArrayBase(Device):
|
|
|
17
46
|
self.acquire = epics_signal_rw_rbv(bool, prefix + "Acquire")
|
|
18
47
|
self.array_size_x = epics_signal_r(int, prefix + "ArraySizeX_RBV")
|
|
19
48
|
self.array_size_y = epics_signal_r(int, prefix + "ArraySizeY_RBV")
|
|
49
|
+
self.data_type = epics_signal_r(ADBaseDataType, prefix + "NDDataType_RBV")
|
|
20
50
|
self.array_counter = epics_signal_rw_rbv(int, prefix + "ArrayCounter")
|
|
21
51
|
# There is no _RBV for this one
|
|
22
52
|
self.wait_for_plugins = epics_signal_rw(bool, prefix + "WaitForPlugins")
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
from typing import Sequence
|
|
2
2
|
|
|
3
|
-
from ophyd_async.core import
|
|
3
|
+
from ophyd_async.core import PathProvider, SignalR, StandardDetector
|
|
4
4
|
|
|
5
5
|
from ..areadetector.controllers import ADSimController
|
|
6
6
|
from ..areadetector.drivers import ADBase, ADBaseShapeProvider
|
|
@@ -15,7 +15,7 @@ class DemoADSimDetector(StandardDetector):
|
|
|
15
15
|
self,
|
|
16
16
|
drv: ADBase,
|
|
17
17
|
hdf: NDFileHDF,
|
|
18
|
-
|
|
18
|
+
path_provider: PathProvider,
|
|
19
19
|
name: str = "",
|
|
20
20
|
config_sigs: Sequence[SignalR] = (),
|
|
21
21
|
):
|
|
@@ -26,7 +26,7 @@ class DemoADSimDetector(StandardDetector):
|
|
|
26
26
|
ADSimController(self.drv),
|
|
27
27
|
HDFWriter(
|
|
28
28
|
self.hdf,
|
|
29
|
-
|
|
29
|
+
path_provider,
|
|
30
30
|
lambda: self.name,
|
|
31
31
|
ADBaseShapeProvider(self.drv),
|
|
32
32
|
),
|
|
@@ -1,6 +1,8 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
+
from typing import Optional
|
|
2
3
|
|
|
3
|
-
from bluesky.protocols import Movable, Stoppable
|
|
4
|
+
from bluesky.protocols import Flyable, Movable, Preparable, Stoppable
|
|
5
|
+
from pydantic import BaseModel, Field
|
|
4
6
|
|
|
5
7
|
from ophyd_async.core import (
|
|
6
8
|
ConfigSignal,
|
|
@@ -8,6 +10,7 @@ from ophyd_async.core import (
|
|
|
8
10
|
StandardReadable,
|
|
9
11
|
WatchableAsyncStatus,
|
|
10
12
|
)
|
|
13
|
+
from ophyd_async.core.async_status import AsyncStatus
|
|
11
14
|
from ophyd_async.core.signal import observe_value
|
|
12
15
|
from ophyd_async.core.utils import (
|
|
13
16
|
DEFAULT_TIMEOUT,
|
|
@@ -19,7 +22,39 @@ from ophyd_async.core.utils import (
|
|
|
19
22
|
from ..signal.signal import epics_signal_r, epics_signal_rw, epics_signal_x
|
|
20
23
|
|
|
21
24
|
|
|
22
|
-
class
|
|
25
|
+
class MotorLimitsException(Exception):
|
|
26
|
+
pass
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
class InvalidFlyMotorException(Exception):
|
|
30
|
+
pass
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
DEFAULT_MOTOR_FLY_TIMEOUT = 60
|
|
34
|
+
DEFAULT_WATCHER_UPDATE_FREQUENCY = 0.2
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class FlyMotorInfo(BaseModel):
|
|
38
|
+
"""Minimal set of information required to fly a motor:"""
|
|
39
|
+
|
|
40
|
+
#: Absolute position of the motor once it finishes accelerating to desired
|
|
41
|
+
#: velocity, in motor EGUs
|
|
42
|
+
start_position: float = Field(frozen=True)
|
|
43
|
+
|
|
44
|
+
#: Absolute position of the motor once it begins decelerating from desired
|
|
45
|
+
#: velocity, in EGUs
|
|
46
|
+
end_position: float = Field(frozen=True)
|
|
47
|
+
|
|
48
|
+
#: Time taken for the motor to get from start_position to end_position, excluding
|
|
49
|
+
#: run-up and run-down, in seconds.
|
|
50
|
+
time_for_move: float = Field(frozen=True, gt=0)
|
|
51
|
+
|
|
52
|
+
#: Maximum time for the complete motor move, including run up and run down.
|
|
53
|
+
#: Defaults to `time_for_move` + run up and run down times + 10s.
|
|
54
|
+
timeout: CalculatableTimeout = Field(frozen=True, default=CalculateTimeout)
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class Motor(StandardReadable, Movable, Stoppable, Flyable, Preparable):
|
|
23
58
|
"""Device that moves a motor record"""
|
|
24
59
|
|
|
25
60
|
def __init__(self, prefix: str, name="") -> None:
|
|
@@ -43,6 +78,16 @@ class Motor(StandardReadable, Movable, Stoppable):
|
|
|
43
78
|
self.motor_stop = epics_signal_x(prefix + ".STOP")
|
|
44
79
|
# Whether set() should complete successfully or not
|
|
45
80
|
self._set_success = True
|
|
81
|
+
|
|
82
|
+
# end_position of a fly move, with run_up_distance added on.
|
|
83
|
+
self._fly_completed_position: Optional[float] = None
|
|
84
|
+
|
|
85
|
+
# Set on kickoff(), complete when motor reaches self._fly_completed_position
|
|
86
|
+
self._fly_status: Optional[WatchableAsyncStatus] = None
|
|
87
|
+
|
|
88
|
+
# Set during prepare
|
|
89
|
+
self._fly_timeout: Optional[CalculatableTimeout] = CalculateTimeout
|
|
90
|
+
|
|
46
91
|
super().__init__(name=name)
|
|
47
92
|
|
|
48
93
|
def set_name(self, name: str):
|
|
@@ -50,6 +95,44 @@ class Motor(StandardReadable, Movable, Stoppable):
|
|
|
50
95
|
# Readback should be named the same as its parent in read()
|
|
51
96
|
self.user_readback.set_name(name)
|
|
52
97
|
|
|
98
|
+
@AsyncStatus.wrap
|
|
99
|
+
async def prepare(self, value: FlyMotorInfo):
|
|
100
|
+
"""Calculate required velocity and run-up distance, then if motor limits aren't
|
|
101
|
+
breached, move to start position minus run-up distance"""
|
|
102
|
+
|
|
103
|
+
self._fly_timeout = value.timeout
|
|
104
|
+
|
|
105
|
+
# Velocity, at which motor travels from start_position to end_position, in motor
|
|
106
|
+
# egu/s.
|
|
107
|
+
fly_velocity = await self._prepare_velocity(
|
|
108
|
+
value.start_position,
|
|
109
|
+
value.end_position,
|
|
110
|
+
value.time_for_move,
|
|
111
|
+
)
|
|
112
|
+
|
|
113
|
+
# start_position with run_up_distance added on.
|
|
114
|
+
fly_prepared_position = await self._prepare_motor_path(
|
|
115
|
+
abs(fly_velocity), value.start_position, value.end_position
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
await self.set(fly_prepared_position)
|
|
119
|
+
|
|
120
|
+
@AsyncStatus.wrap
|
|
121
|
+
async def kickoff(self):
|
|
122
|
+
"""Begin moving motor from prepared position to final position."""
|
|
123
|
+
assert (
|
|
124
|
+
self._fly_completed_position
|
|
125
|
+
), "Motor must be prepared before attempting to kickoff"
|
|
126
|
+
|
|
127
|
+
self._fly_status = self.set(
|
|
128
|
+
self._fly_completed_position, timeout=self._fly_timeout
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
def complete(self) -> WatchableAsyncStatus:
|
|
132
|
+
"""Mark as complete once motor reaches completed position."""
|
|
133
|
+
assert self._fly_status, "kickoff not called"
|
|
134
|
+
return self._fly_status
|
|
135
|
+
|
|
53
136
|
@WatchableAsyncStatus.wrap
|
|
54
137
|
async def set(
|
|
55
138
|
self, new_position: float, timeout: CalculatableTimeout = CalculateTimeout
|
|
@@ -95,3 +178,50 @@ class Motor(StandardReadable, Movable, Stoppable):
|
|
|
95
178
|
# Put with completion will never complete as we are waiting for completion on
|
|
96
179
|
# the move above, so need to pass wait=False
|
|
97
180
|
await self.motor_stop.trigger(wait=False)
|
|
181
|
+
|
|
182
|
+
async def _prepare_velocity(
|
|
183
|
+
self, start_position: float, end_position: float, time_for_move: float
|
|
184
|
+
) -> float:
|
|
185
|
+
fly_velocity = (start_position - end_position) / time_for_move
|
|
186
|
+
max_speed, egu = await asyncio.gather(
|
|
187
|
+
self.max_velocity.get_value(), self.motor_egu.get_value()
|
|
188
|
+
)
|
|
189
|
+
if abs(fly_velocity) > max_speed:
|
|
190
|
+
raise MotorLimitsException(
|
|
191
|
+
f"Motor speed of {abs(fly_velocity)} {egu}/s was requested for a motor "
|
|
192
|
+
f" with max speed of {max_speed} {egu}/s"
|
|
193
|
+
)
|
|
194
|
+
await self.velocity.set(abs(fly_velocity))
|
|
195
|
+
return fly_velocity
|
|
196
|
+
|
|
197
|
+
async def _prepare_motor_path(
|
|
198
|
+
self, fly_velocity: float, start_position: float, end_position: float
|
|
199
|
+
) -> float:
|
|
200
|
+
# Distance required for motor to accelerate from stationary to fly_velocity, and
|
|
201
|
+
# distance required for motor to decelerate from fly_velocity to stationary
|
|
202
|
+
run_up_distance = (await self.acceleration_time.get_value()) * fly_velocity
|
|
203
|
+
|
|
204
|
+
self._fly_completed_position = end_position + run_up_distance
|
|
205
|
+
|
|
206
|
+
# Prepared position not used after prepare, so no need to store in self
|
|
207
|
+
fly_prepared_position = start_position - run_up_distance
|
|
208
|
+
|
|
209
|
+
motor_lower_limit, motor_upper_limit, egu = await asyncio.gather(
|
|
210
|
+
self.low_limit_travel.get_value(),
|
|
211
|
+
self.high_limit_travel.get_value(),
|
|
212
|
+
self.motor_egu.get_value(),
|
|
213
|
+
)
|
|
214
|
+
|
|
215
|
+
if (
|
|
216
|
+
not motor_upper_limit >= fly_prepared_position >= motor_lower_limit
|
|
217
|
+
or not motor_upper_limit
|
|
218
|
+
>= self._fly_completed_position
|
|
219
|
+
>= motor_lower_limit
|
|
220
|
+
):
|
|
221
|
+
raise MotorLimitsException(
|
|
222
|
+
f"Motor trajectory for requested fly is from "
|
|
223
|
+
f"{fly_prepared_position}{egu} to "
|
|
224
|
+
f"{self._fly_completed_position}{egu} but motor limits are "
|
|
225
|
+
f"{motor_lower_limit}{egu} <= x <= {motor_upper_limit}{egu} "
|
|
226
|
+
)
|
|
227
|
+
return fly_prepared_position
|
ophyd_async/panda/__init__.py
CHANGED
|
@@ -1,7 +1,10 @@
|
|
|
1
1
|
from ._common_blocks import (
|
|
2
2
|
CommonPandaBlocks,
|
|
3
3
|
DataBlock,
|
|
4
|
+
EnableDisableOptions,
|
|
4
5
|
PcapBlock,
|
|
6
|
+
PcompBlock,
|
|
7
|
+
PcompDirectionOptions,
|
|
5
8
|
PulseBlock,
|
|
6
9
|
SeqBlock,
|
|
7
10
|
TimeUnits,
|
|
@@ -15,17 +18,27 @@ from ._table import (
|
|
|
15
18
|
seq_table_from_arrays,
|
|
16
19
|
seq_table_from_rows,
|
|
17
20
|
)
|
|
18
|
-
from ._trigger import
|
|
21
|
+
from ._trigger import (
|
|
22
|
+
PcompInfo,
|
|
23
|
+
SeqTableInfo,
|
|
24
|
+
StaticPcompTriggerLogic,
|
|
25
|
+
StaticSeqTableTriggerLogic,
|
|
26
|
+
)
|
|
19
27
|
from ._utils import phase_sorter
|
|
20
28
|
|
|
21
29
|
__all__ = [
|
|
22
30
|
"CommonPandaBlocks",
|
|
23
31
|
"HDFPanda",
|
|
32
|
+
"PcompBlock",
|
|
33
|
+
"PcompInfo",
|
|
34
|
+
"PcompDirectionOptions",
|
|
35
|
+
"EnableDisableOptions",
|
|
24
36
|
"PcapBlock",
|
|
25
37
|
"PulseBlock",
|
|
26
38
|
"seq_table_from_arrays",
|
|
27
39
|
"seq_table_from_rows",
|
|
28
40
|
"SeqBlock",
|
|
41
|
+
"SeqTableInfo",
|
|
29
42
|
"SeqTable",
|
|
30
43
|
"SeqTableRow",
|
|
31
44
|
"SeqTrigger",
|
|
@@ -35,4 +48,5 @@ __all__ = [
|
|
|
35
48
|
"DataBlock",
|
|
36
49
|
"CommonPandABlocks",
|
|
37
50
|
"StaticSeqTableTriggerLogic",
|
|
51
|
+
"StaticPcompTriggerLogic",
|
|
38
52
|
]
|