ophyd-async 0.3a1__py3-none-any.whl → 0.3a3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ophyd_async/__init__.py +1 -4
- ophyd_async/_version.py +1 -1
- ophyd_async/core/__init__.py +23 -3
- ophyd_async/core/_providers.py +3 -1
- ophyd_async/core/detector.py +72 -46
- ophyd_async/core/device.py +8 -0
- ophyd_async/core/flyer.py +12 -21
- ophyd_async/core/signal.py +134 -20
- ophyd_async/core/signal_backend.py +6 -3
- ophyd_async/core/sim_signal_backend.py +32 -20
- ophyd_async/core/standard_readable.py +212 -23
- ophyd_async/core/utils.py +18 -1
- ophyd_async/epics/_backend/_aioca.py +17 -15
- ophyd_async/epics/_backend/_p4p.py +34 -25
- ophyd_async/epics/_backend/common.py +16 -11
- ophyd_async/epics/areadetector/__init__.py +8 -0
- ophyd_async/epics/areadetector/aravis.py +67 -0
- ophyd_async/epics/areadetector/controllers/__init__.py +2 -1
- ophyd_async/epics/areadetector/controllers/aravis_controller.py +73 -0
- ophyd_async/epics/areadetector/controllers/kinetix_controller.py +49 -0
- ophyd_async/epics/areadetector/controllers/pilatus_controller.py +36 -24
- ophyd_async/epics/areadetector/controllers/vimba_controller.py +66 -0
- ophyd_async/epics/areadetector/drivers/__init__.py +6 -0
- ophyd_async/epics/areadetector/drivers/aravis_driver.py +154 -0
- ophyd_async/epics/areadetector/drivers/kinetix_driver.py +24 -0
- ophyd_async/epics/areadetector/drivers/pilatus_driver.py +4 -4
- ophyd_async/epics/areadetector/drivers/vimba_driver.py +58 -0
- ophyd_async/epics/areadetector/kinetix.py +46 -0
- ophyd_async/epics/areadetector/pilatus.py +45 -0
- ophyd_async/epics/areadetector/single_trigger_det.py +14 -6
- ophyd_async/epics/areadetector/vimba.py +43 -0
- ophyd_async/epics/areadetector/writers/_hdffile.py +4 -4
- ophyd_async/epics/areadetector/writers/hdf_writer.py +12 -4
- ophyd_async/epics/areadetector/writers/nd_file_hdf.py +1 -0
- ophyd_async/epics/demo/__init__.py +45 -18
- ophyd_async/epics/motion/motor.py +24 -19
- ophyd_async/epics/pvi/__init__.py +3 -0
- ophyd_async/epics/pvi/pvi.py +318 -0
- ophyd_async/epics/signal/signal.py +26 -9
- ophyd_async/log.py +130 -0
- ophyd_async/panda/__init__.py +17 -6
- ophyd_async/panda/_common_blocks.py +49 -0
- ophyd_async/panda/_hdf_panda.py +48 -0
- ophyd_async/panda/{panda_controller.py → _panda_controller.py} +3 -7
- ophyd_async/panda/_trigger.py +39 -0
- ophyd_async/panda/writers/__init__.py +3 -0
- ophyd_async/panda/writers/_hdf_writer.py +220 -0
- ophyd_async/panda/writers/_panda_hdf_file.py +58 -0
- ophyd_async/planstubs/__init__.py +5 -0
- ophyd_async/planstubs/prepare_trigger_and_dets.py +57 -0
- ophyd_async/protocols.py +96 -0
- ophyd_async/sim/__init__.py +11 -0
- ophyd_async/sim/demo/__init__.py +3 -0
- ophyd_async/sim/demo/sim_motor.py +118 -0
- ophyd_async/sim/pattern_generator.py +318 -0
- ophyd_async/sim/sim_pattern_detector_control.py +55 -0
- ophyd_async/sim/sim_pattern_detector_writer.py +34 -0
- ophyd_async/sim/sim_pattern_generator.py +37 -0
- {ophyd_async-0.3a1.dist-info → ophyd_async-0.3a3.dist-info}/METADATA +30 -69
- ophyd_async-0.3a3.dist-info/RECORD +83 -0
- ophyd_async/epics/pvi.py +0 -70
- ophyd_async/panda/panda.py +0 -241
- ophyd_async-0.3a1.dist-info/RECORD +0 -56
- /ophyd_async/panda/{table.py → _table.py} +0 -0
- /ophyd_async/panda/{utils.py → _utils.py} +0 -0
- {ophyd_async-0.3a1.dist-info → ophyd_async-0.3a3.dist-info}/LICENSE +0 -0
- {ophyd_async-0.3a1.dist-info → ophyd_async-0.3a3.dist-info}/WHEEL +0 -0
- {ophyd_async-0.3a1.dist-info → ophyd_async-0.3a3.dist-info}/entry_points.txt +0 -0
- {ophyd_async-0.3a1.dist-info → ophyd_async-0.3a3.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from dataclasses import dataclass
|
|
3
|
+
|
|
4
|
+
from ophyd_async.core import TriggerLogic, wait_for_value
|
|
5
|
+
from ophyd_async.panda import SeqBlock, SeqTable, TimeUnits
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass
|
|
9
|
+
class SeqTableInfo:
|
|
10
|
+
sequence_table: SeqTable
|
|
11
|
+
repeats: int
|
|
12
|
+
prescale_as_us: float = 1 # microseconds
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class StaticSeqTableTriggerLogic(TriggerLogic[SeqTableInfo]):
|
|
16
|
+
def __init__(self, seq: SeqBlock) -> None:
|
|
17
|
+
self.seq = seq
|
|
18
|
+
|
|
19
|
+
async def prepare(self, value: SeqTableInfo):
|
|
20
|
+
await asyncio.gather(
|
|
21
|
+
self.seq.prescale_units.set(TimeUnits.us),
|
|
22
|
+
self.seq.enable.set("ZERO"),
|
|
23
|
+
)
|
|
24
|
+
await asyncio.gather(
|
|
25
|
+
self.seq.prescale.set(value.prescale_as_us),
|
|
26
|
+
self.seq.repeats.set(value.repeats),
|
|
27
|
+
self.seq.table.set(value.sequence_table),
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
async def kickoff(self) -> None:
|
|
31
|
+
await self.seq.enable.set("ONE")
|
|
32
|
+
await wait_for_value(self.seq.active, True, timeout=1)
|
|
33
|
+
|
|
34
|
+
async def complete(self) -> None:
|
|
35
|
+
await wait_for_value(self.seq.active, False, timeout=None)
|
|
36
|
+
|
|
37
|
+
async def stop(self):
|
|
38
|
+
await self.seq.enable.set("ZERO")
|
|
39
|
+
await wait_for_value(self.seq.active, False, timeout=1)
|
|
@@ -0,0 +1,220 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from dataclasses import dataclass
|
|
3
|
+
from enum import Enum
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Any, AsyncGenerator, AsyncIterator, Dict, List, Optional
|
|
6
|
+
|
|
7
|
+
from bluesky.protocols import DataKey, StreamAsset
|
|
8
|
+
from p4p.client.thread import Context
|
|
9
|
+
|
|
10
|
+
from ophyd_async.core import (
|
|
11
|
+
DEFAULT_TIMEOUT,
|
|
12
|
+
DetectorWriter,
|
|
13
|
+
Device,
|
|
14
|
+
DirectoryProvider,
|
|
15
|
+
NameProvider,
|
|
16
|
+
SignalR,
|
|
17
|
+
wait_for_value,
|
|
18
|
+
)
|
|
19
|
+
from ophyd_async.core.signal import observe_value
|
|
20
|
+
from ophyd_async.panda import CommonPandaBlocks
|
|
21
|
+
|
|
22
|
+
from ._panda_hdf_file import _HDFDataset, _HDFFile
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class Capture(str, Enum):
|
|
26
|
+
# Capture signals for the HDF Panda
|
|
27
|
+
No = "No"
|
|
28
|
+
Value = "Value"
|
|
29
|
+
Diff = "Diff"
|
|
30
|
+
Sum = "Sum"
|
|
31
|
+
Mean = "Mean"
|
|
32
|
+
Min = "Min"
|
|
33
|
+
Max = "Max"
|
|
34
|
+
MinMax = "Min Max"
|
|
35
|
+
MinMaxMean = "Min Max Mean"
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def get_capture_signals(
|
|
39
|
+
block: Device, path_prefix: Optional[str] = ""
|
|
40
|
+
) -> Dict[str, SignalR]:
|
|
41
|
+
"""Get dict mapping a capture signal's name to the signal itself"""
|
|
42
|
+
if not path_prefix:
|
|
43
|
+
path_prefix = ""
|
|
44
|
+
signals: Dict[str, SignalR[Any]] = {}
|
|
45
|
+
for attr_name, attr in block.children():
|
|
46
|
+
# Capture signals end in _capture, but num_capture is a red herring
|
|
47
|
+
if attr_name == "num_capture":
|
|
48
|
+
continue
|
|
49
|
+
dot_path = f"{path_prefix}{attr_name}"
|
|
50
|
+
if isinstance(attr, SignalR) and attr_name.endswith("_capture"):
|
|
51
|
+
signals[dot_path] = attr
|
|
52
|
+
attr_signals = get_capture_signals(attr, path_prefix=dot_path + ".")
|
|
53
|
+
signals.update(attr_signals)
|
|
54
|
+
return signals
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
@dataclass
|
|
58
|
+
class CaptureSignalWrapper:
|
|
59
|
+
signal: SignalR
|
|
60
|
+
capture_type: Capture
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
# This should return a dictionary which contains a dict, containing the Capture
|
|
64
|
+
# signal object, and the value of that signal
|
|
65
|
+
async def get_signals_marked_for_capture(
|
|
66
|
+
capture_signals: Dict[str, SignalR],
|
|
67
|
+
) -> Dict[str, CaptureSignalWrapper]:
|
|
68
|
+
# Read signals to see if they should be captured
|
|
69
|
+
do_read = [signal.get_value() for signal in capture_signals.values()]
|
|
70
|
+
|
|
71
|
+
signal_values = await asyncio.gather(*do_read)
|
|
72
|
+
|
|
73
|
+
assert len(signal_values) == len(
|
|
74
|
+
capture_signals
|
|
75
|
+
), "Length of read signals are different to length of signals"
|
|
76
|
+
|
|
77
|
+
signals_to_capture: Dict[str, CaptureSignalWrapper] = {}
|
|
78
|
+
for signal_path, signal_object, signal_value in zip(
|
|
79
|
+
capture_signals.keys(), capture_signals.values(), signal_values
|
|
80
|
+
):
|
|
81
|
+
signal_path = signal_path.replace("_capture", "")
|
|
82
|
+
if (signal_value.value in iter(Capture)) and (signal_value.value != Capture.No):
|
|
83
|
+
signals_to_capture[signal_path] = CaptureSignalWrapper(
|
|
84
|
+
signal_object,
|
|
85
|
+
signal_value.value,
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
return signals_to_capture
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
class PandaHDFWriter(DetectorWriter):
|
|
92
|
+
_ctxt: Optional[Context] = None
|
|
93
|
+
|
|
94
|
+
def __init__(
|
|
95
|
+
self,
|
|
96
|
+
prefix: str,
|
|
97
|
+
directory_provider: DirectoryProvider,
|
|
98
|
+
name_provider: NameProvider,
|
|
99
|
+
panda_device: CommonPandaBlocks,
|
|
100
|
+
) -> None:
|
|
101
|
+
self.panda_device = panda_device
|
|
102
|
+
self._prefix = prefix
|
|
103
|
+
self._directory_provider = directory_provider
|
|
104
|
+
self._name_provider = name_provider
|
|
105
|
+
self._datasets: List[_HDFDataset] = []
|
|
106
|
+
self._file: Optional[_HDFFile] = None
|
|
107
|
+
self._multiplier = 1
|
|
108
|
+
|
|
109
|
+
# Triggered on PCAP arm
|
|
110
|
+
async def open(self, multiplier: int = 1) -> Dict[str, DataKey]:
|
|
111
|
+
"""Retrieve and get descriptor of all PandA signals marked for capture"""
|
|
112
|
+
|
|
113
|
+
# Get capture PVs by looking at panda. Gives mapping of dotted attribute path
|
|
114
|
+
# to Signal object
|
|
115
|
+
self.capture_signals = get_capture_signals(self.panda_device)
|
|
116
|
+
|
|
117
|
+
# Ensure flushes are immediate
|
|
118
|
+
await self.panda_device.data.flush_period.set(0)
|
|
119
|
+
|
|
120
|
+
to_capture = await get_signals_marked_for_capture(self.capture_signals)
|
|
121
|
+
self._file = None
|
|
122
|
+
info = self._directory_provider()
|
|
123
|
+
# Set the initial values
|
|
124
|
+
await asyncio.gather(
|
|
125
|
+
self.panda_device.data.hdf_directory.set(
|
|
126
|
+
str(info.root / info.resource_dir)
|
|
127
|
+
),
|
|
128
|
+
self.panda_device.data.hdf_file_name.set(
|
|
129
|
+
f"{info.prefix}{self.panda_device.name}{info.suffix}",
|
|
130
|
+
),
|
|
131
|
+
self.panda_device.data.num_capture.set(0),
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
# Wait for it to start, stashing the status that tells us when it finishes
|
|
135
|
+
await self.panda_device.data.capture.set(True)
|
|
136
|
+
name = self._name_provider()
|
|
137
|
+
if multiplier > 1:
|
|
138
|
+
raise ValueError(
|
|
139
|
+
"All PandA datasets should be scalar, multiplier should be 1"
|
|
140
|
+
)
|
|
141
|
+
self._datasets = []
|
|
142
|
+
for attribute_path, capture_signal in to_capture.items():
|
|
143
|
+
split_path = attribute_path.split(".")
|
|
144
|
+
signal_name = split_path[-1]
|
|
145
|
+
# Get block names from numbered blocks, eg INENC[1]
|
|
146
|
+
block_name = (
|
|
147
|
+
f"{split_path[-3]}{split_path[-2]}"
|
|
148
|
+
if split_path[-2].isnumeric()
|
|
149
|
+
else split_path[-2]
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
for suffix in str(capture_signal.capture_type).split(" "):
|
|
153
|
+
self._datasets.append(
|
|
154
|
+
_HDFDataset(
|
|
155
|
+
name,
|
|
156
|
+
block_name,
|
|
157
|
+
f"{name}-{block_name}-{signal_name}-{suffix}",
|
|
158
|
+
f"{block_name}-{signal_name}".upper() + f"-{suffix}",
|
|
159
|
+
[1],
|
|
160
|
+
multiplier=1,
|
|
161
|
+
)
|
|
162
|
+
)
|
|
163
|
+
|
|
164
|
+
describe = {
|
|
165
|
+
ds.name: DataKey(
|
|
166
|
+
source=self.panda_device.data.hdf_directory.source,
|
|
167
|
+
shape=ds.shape,
|
|
168
|
+
dtype="array" if ds.shape != [1] else "number",
|
|
169
|
+
external="STREAM:",
|
|
170
|
+
)
|
|
171
|
+
for ds in self._datasets
|
|
172
|
+
}
|
|
173
|
+
return describe
|
|
174
|
+
|
|
175
|
+
# Next few functions are exactly the same as AD writer. Could move as default
|
|
176
|
+
# StandardDetector behavior
|
|
177
|
+
async def wait_for_index(
|
|
178
|
+
self, index: int, timeout: Optional[float] = DEFAULT_TIMEOUT
|
|
179
|
+
):
|
|
180
|
+
def matcher(value: int) -> bool:
|
|
181
|
+
return value >= index
|
|
182
|
+
|
|
183
|
+
matcher.__name__ = f"index_at_least_{index}"
|
|
184
|
+
await wait_for_value(
|
|
185
|
+
self.panda_device.data.num_captured, matcher, timeout=timeout
|
|
186
|
+
)
|
|
187
|
+
|
|
188
|
+
async def get_indices_written(self) -> int:
|
|
189
|
+
return await self.panda_device.data.num_captured.get_value()
|
|
190
|
+
|
|
191
|
+
async def observe_indices_written(
|
|
192
|
+
self, timeout=DEFAULT_TIMEOUT
|
|
193
|
+
) -> AsyncGenerator[int, None]:
|
|
194
|
+
"""Wait until a specific index is ready to be collected"""
|
|
195
|
+
async for num_captured in observe_value(
|
|
196
|
+
self.panda_device.data.num_captured, timeout
|
|
197
|
+
):
|
|
198
|
+
yield num_captured // self._multiplier
|
|
199
|
+
|
|
200
|
+
async def collect_stream_docs(
|
|
201
|
+
self, indices_written: int
|
|
202
|
+
) -> AsyncIterator[StreamAsset]:
|
|
203
|
+
# TODO: fail if we get dropped frames
|
|
204
|
+
if indices_written:
|
|
205
|
+
if not self._file:
|
|
206
|
+
self._file = _HDFFile(
|
|
207
|
+
self._directory_provider(),
|
|
208
|
+
Path(await self.panda_device.data.hdf_file_name.get_value()),
|
|
209
|
+
self._datasets,
|
|
210
|
+
)
|
|
211
|
+
for doc in self._file.stream_resources():
|
|
212
|
+
yield "stream_resource", doc
|
|
213
|
+
for doc in self._file.stream_data(indices_written):
|
|
214
|
+
yield "stream_datum", doc
|
|
215
|
+
|
|
216
|
+
# Could put this function as default for StandardDetector
|
|
217
|
+
async def close(self):
|
|
218
|
+
await self.panda_device.data.capture.set(
|
|
219
|
+
False, wait=True, timeout=DEFAULT_TIMEOUT
|
|
220
|
+
)
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import Iterator, List
|
|
4
|
+
|
|
5
|
+
from event_model import StreamDatum, StreamResource, compose_stream_resource
|
|
6
|
+
|
|
7
|
+
from ophyd_async.core import DirectoryInfo
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@dataclass
|
|
11
|
+
class _HDFDataset:
|
|
12
|
+
device_name: str
|
|
13
|
+
block: str
|
|
14
|
+
name: str
|
|
15
|
+
path: str
|
|
16
|
+
shape: List[int]
|
|
17
|
+
multiplier: int
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class _HDFFile:
|
|
21
|
+
def __init__(
|
|
22
|
+
self,
|
|
23
|
+
directory_info: DirectoryInfo,
|
|
24
|
+
full_file_name: Path,
|
|
25
|
+
datasets: List[_HDFDataset],
|
|
26
|
+
) -> None:
|
|
27
|
+
self._last_emitted = 0
|
|
28
|
+
self._bundles = [
|
|
29
|
+
compose_stream_resource(
|
|
30
|
+
spec="AD_HDF5_SWMR_SLICE",
|
|
31
|
+
root=str(directory_info.root),
|
|
32
|
+
data_key=ds.name,
|
|
33
|
+
resource_path=(f"{str(directory_info.root)}/{full_file_name}"),
|
|
34
|
+
resource_kwargs={
|
|
35
|
+
"name": ds.name,
|
|
36
|
+
"block": ds.block,
|
|
37
|
+
"path": ds.path,
|
|
38
|
+
"multiplier": ds.multiplier,
|
|
39
|
+
"timestamps": "/entry/instrument/NDAttributes/NDArrayTimeStamp",
|
|
40
|
+
},
|
|
41
|
+
)
|
|
42
|
+
for ds in datasets
|
|
43
|
+
]
|
|
44
|
+
|
|
45
|
+
def stream_resources(self) -> Iterator[StreamResource]:
|
|
46
|
+
for bundle in self._bundles:
|
|
47
|
+
yield bundle.stream_resource_doc
|
|
48
|
+
|
|
49
|
+
def stream_data(self, indices_written: int) -> Iterator[StreamDatum]:
|
|
50
|
+
# Indices are relative to resource
|
|
51
|
+
if indices_written > self._last_emitted:
|
|
52
|
+
indices = {
|
|
53
|
+
"start": self._last_emitted,
|
|
54
|
+
"stop": indices_written,
|
|
55
|
+
}
|
|
56
|
+
self._last_emitted = indices_written
|
|
57
|
+
for bundle in self._bundles:
|
|
58
|
+
yield bundle.compose_stream_datum(indices)
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
from typing import List
|
|
2
|
+
|
|
3
|
+
import bluesky.plan_stubs as bps
|
|
4
|
+
|
|
5
|
+
from ophyd_async.core.detector import DetectorTrigger, StandardDetector, TriggerInfo
|
|
6
|
+
from ophyd_async.core.flyer import HardwareTriggeredFlyable
|
|
7
|
+
from ophyd_async.core.utils import in_micros
|
|
8
|
+
from ophyd_async.panda._table import SeqTable, SeqTableRow, seq_table_from_rows
|
|
9
|
+
from ophyd_async.panda._trigger import SeqTableInfo
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def prepare_static_seq_table_flyer_and_detectors_with_same_trigger(
|
|
13
|
+
flyer: HardwareTriggeredFlyable[SeqTableInfo],
|
|
14
|
+
detectors: List[StandardDetector],
|
|
15
|
+
num: int,
|
|
16
|
+
width: float,
|
|
17
|
+
deadtime: float,
|
|
18
|
+
shutter_time: float,
|
|
19
|
+
repeats: int = 1,
|
|
20
|
+
period: float = 0.0,
|
|
21
|
+
):
|
|
22
|
+
trigger_info = TriggerInfo(
|
|
23
|
+
num=num * repeats,
|
|
24
|
+
trigger=DetectorTrigger.constant_gate,
|
|
25
|
+
deadtime=deadtime,
|
|
26
|
+
livetime=width,
|
|
27
|
+
)
|
|
28
|
+
|
|
29
|
+
trigger_time = num * (width + deadtime)
|
|
30
|
+
pre_delay = max(period - 2 * shutter_time - trigger_time, 0)
|
|
31
|
+
|
|
32
|
+
table: SeqTable = seq_table_from_rows(
|
|
33
|
+
# Wait for pre-delay then open shutter
|
|
34
|
+
SeqTableRow(
|
|
35
|
+
time1=in_micros(pre_delay),
|
|
36
|
+
time2=in_micros(shutter_time),
|
|
37
|
+
outa2=True,
|
|
38
|
+
),
|
|
39
|
+
# Keeping shutter open, do N triggers
|
|
40
|
+
SeqTableRow(
|
|
41
|
+
repeats=num,
|
|
42
|
+
time1=in_micros(width),
|
|
43
|
+
outa1=True,
|
|
44
|
+
outb1=True,
|
|
45
|
+
time2=in_micros(deadtime),
|
|
46
|
+
outa2=True,
|
|
47
|
+
),
|
|
48
|
+
# Add the shutter close
|
|
49
|
+
SeqTableRow(time2=in_micros(shutter_time)),
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
table_info = SeqTableInfo(table, repeats)
|
|
53
|
+
|
|
54
|
+
for det in detectors:
|
|
55
|
+
yield from bps.prepare(det, trigger_info, wait=False, group="prep")
|
|
56
|
+
yield from bps.prepare(flyer, table_info, wait=False, group="prep")
|
|
57
|
+
yield from bps.wait(group="prep")
|
ophyd_async/protocols.py
ADDED
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
from abc import abstractmethod
|
|
2
|
+
from typing import Dict, Protocol, runtime_checkable
|
|
3
|
+
|
|
4
|
+
from bluesky.protocols import DataKey, HasName, Reading
|
|
5
|
+
|
|
6
|
+
from ophyd_async.core.async_status import AsyncStatus
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@runtime_checkable
|
|
10
|
+
class AsyncReadable(HasName, Protocol):
|
|
11
|
+
@abstractmethod
|
|
12
|
+
async def read(self) -> Dict[str, Reading]:
|
|
13
|
+
"""Return an OrderedDict mapping string field name(s) to dictionaries
|
|
14
|
+
of values and timestamps and optional per-point metadata.
|
|
15
|
+
|
|
16
|
+
Example return value:
|
|
17
|
+
|
|
18
|
+
.. code-block:: python
|
|
19
|
+
|
|
20
|
+
OrderedDict(('channel1',
|
|
21
|
+
{'value': 5, 'timestamp': 1472493713.271991}),
|
|
22
|
+
('channel2',
|
|
23
|
+
{'value': 16, 'timestamp': 1472493713.539238}))
|
|
24
|
+
"""
|
|
25
|
+
...
|
|
26
|
+
|
|
27
|
+
@abstractmethod
|
|
28
|
+
async def describe(self) -> Dict[str, DataKey]:
|
|
29
|
+
"""Return an OrderedDict with exactly the same keys as the ``read``
|
|
30
|
+
method, here mapped to per-scan metadata about each field.
|
|
31
|
+
|
|
32
|
+
Example return value:
|
|
33
|
+
|
|
34
|
+
.. code-block:: python
|
|
35
|
+
|
|
36
|
+
OrderedDict(('channel1',
|
|
37
|
+
{'source': 'XF23-ID:SOME_PV_NAME',
|
|
38
|
+
'dtype': 'number',
|
|
39
|
+
'shape': []}),
|
|
40
|
+
('channel2',
|
|
41
|
+
{'source': 'XF23-ID:SOME_PV_NAME',
|
|
42
|
+
'dtype': 'number',
|
|
43
|
+
'shape': []}))
|
|
44
|
+
"""
|
|
45
|
+
...
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
@runtime_checkable
|
|
49
|
+
class AsyncConfigurable(Protocol):
|
|
50
|
+
@abstractmethod
|
|
51
|
+
async def read_configuration(self) -> Dict[str, Reading]:
|
|
52
|
+
"""Same API as ``read`` but for slow-changing fields related to configuration.
|
|
53
|
+
e.g., exposure time. These will typically be read only once per run.
|
|
54
|
+
"""
|
|
55
|
+
...
|
|
56
|
+
|
|
57
|
+
@abstractmethod
|
|
58
|
+
async def describe_configuration(self) -> Dict[str, DataKey]:
|
|
59
|
+
"""Same API as ``describe``, but corresponding to the keys in
|
|
60
|
+
``read_configuration``.
|
|
61
|
+
"""
|
|
62
|
+
...
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
@runtime_checkable
|
|
66
|
+
class AsyncPausable(Protocol):
|
|
67
|
+
@abstractmethod
|
|
68
|
+
async def pause(self) -> None:
|
|
69
|
+
"""Perform device-specific work when the RunEngine pauses."""
|
|
70
|
+
...
|
|
71
|
+
|
|
72
|
+
@abstractmethod
|
|
73
|
+
async def resume(self) -> None:
|
|
74
|
+
"""Perform device-specific work when the RunEngine resumes after a pause."""
|
|
75
|
+
...
|
|
76
|
+
|
|
77
|
+
|
|
78
|
+
@runtime_checkable
|
|
79
|
+
class AsyncStageable(Protocol):
|
|
80
|
+
@abstractmethod
|
|
81
|
+
def stage(self) -> AsyncStatus:
|
|
82
|
+
"""An optional hook for "setting up" the device for acquisition.
|
|
83
|
+
|
|
84
|
+
It should return a ``Status`` that is marked done when the device is
|
|
85
|
+
done staging.
|
|
86
|
+
"""
|
|
87
|
+
...
|
|
88
|
+
|
|
89
|
+
@abstractmethod
|
|
90
|
+
def unstage(self) -> AsyncStatus:
|
|
91
|
+
"""A hook for "cleaning up" the device after acquisition.
|
|
92
|
+
|
|
93
|
+
It should return a ``Status`` that is marked done when the device is finished
|
|
94
|
+
unstaging.
|
|
95
|
+
"""
|
|
96
|
+
...
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
from .pattern_generator import PatternGenerator
|
|
2
|
+
from .sim_pattern_detector_control import SimPatternDetectorControl
|
|
3
|
+
from .sim_pattern_detector_writer import SimPatternDetectorWriter
|
|
4
|
+
from .sim_pattern_generator import SimPatternDetector
|
|
5
|
+
|
|
6
|
+
__all__ = [
|
|
7
|
+
"PatternGenerator",
|
|
8
|
+
"SimPatternDetectorControl",
|
|
9
|
+
"SimPatternDetectorWriter",
|
|
10
|
+
"SimPatternDetector",
|
|
11
|
+
]
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import time
|
|
3
|
+
from typing import Callable, List, Optional
|
|
4
|
+
|
|
5
|
+
from bluesky.protocols import Movable, Stoppable
|
|
6
|
+
|
|
7
|
+
from ophyd_async.core import StandardReadable
|
|
8
|
+
from ophyd_async.core.async_status import AsyncStatus
|
|
9
|
+
from ophyd_async.core.signal import soft_signal_r_and_backend, soft_signal_rw
|
|
10
|
+
from ophyd_async.core.standard_readable import ConfigSignal, HintedSignal
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class SimMotor(StandardReadable, Movable, Stoppable):
|
|
14
|
+
def __init__(self, name="", instant=True) -> None:
|
|
15
|
+
"""
|
|
16
|
+
Simulated motor device
|
|
17
|
+
|
|
18
|
+
args:
|
|
19
|
+
- prefix: str: Signal names prefix
|
|
20
|
+
- name: str: name of device
|
|
21
|
+
- instant: bool: whether to move instantly, or with a delay
|
|
22
|
+
"""
|
|
23
|
+
with self.add_children_as_readables(HintedSignal):
|
|
24
|
+
self.user_readback, self._user_readback = soft_signal_r_and_backend(
|
|
25
|
+
float, 0
|
|
26
|
+
)
|
|
27
|
+
|
|
28
|
+
with self.add_children_as_readables(ConfigSignal):
|
|
29
|
+
self.velocity = soft_signal_rw(float, 1.0)
|
|
30
|
+
self.egu = soft_signal_rw(float, "mm")
|
|
31
|
+
|
|
32
|
+
self._instant = instant
|
|
33
|
+
self._move_task: Optional[asyncio.Task] = None
|
|
34
|
+
|
|
35
|
+
# Define some signals
|
|
36
|
+
self.user_setpoint = soft_signal_rw(float, 0)
|
|
37
|
+
|
|
38
|
+
super().__init__(name=name)
|
|
39
|
+
|
|
40
|
+
# Whether set() should complete successfully or not
|
|
41
|
+
self._set_success = True
|
|
42
|
+
|
|
43
|
+
def stop(self, success=False):
|
|
44
|
+
"""
|
|
45
|
+
Stop the motor if it is moving
|
|
46
|
+
"""
|
|
47
|
+
if self._move_task:
|
|
48
|
+
self._move_task.cancel()
|
|
49
|
+
self._move_task = None
|
|
50
|
+
|
|
51
|
+
self._set_success = success
|
|
52
|
+
|
|
53
|
+
def set(self, new_position: float, timeout: Optional[float] = None) -> AsyncStatus: # noqa: F821
|
|
54
|
+
"""
|
|
55
|
+
Asynchronously move the motor to a new position.
|
|
56
|
+
"""
|
|
57
|
+
watchers: List[Callable] = []
|
|
58
|
+
coro = asyncio.wait_for(self._move(new_position, watchers), timeout=timeout)
|
|
59
|
+
return AsyncStatus(coro, watchers)
|
|
60
|
+
|
|
61
|
+
async def _move(self, new_position: float, watchers: List[Callable] = []):
|
|
62
|
+
"""
|
|
63
|
+
Start the motor moving to a new position.
|
|
64
|
+
|
|
65
|
+
If the motor is already moving, it will stop first.
|
|
66
|
+
If this is an instant motor the move will be instantaneous.
|
|
67
|
+
"""
|
|
68
|
+
self.stop()
|
|
69
|
+
start = time.monotonic()
|
|
70
|
+
|
|
71
|
+
current_position = await self.user_readback.get_value()
|
|
72
|
+
distance = abs(new_position - current_position)
|
|
73
|
+
travel_time = 0 if self._instant else distance / await self.velocity.get_value()
|
|
74
|
+
|
|
75
|
+
old_position, units = await asyncio.gather(
|
|
76
|
+
self.user_setpoint.get_value(),
|
|
77
|
+
self.egu.get_value(),
|
|
78
|
+
)
|
|
79
|
+
|
|
80
|
+
async def update_position():
|
|
81
|
+
while True:
|
|
82
|
+
time_elapsed = round(time.monotonic() - start, 2)
|
|
83
|
+
|
|
84
|
+
# update position based on time elapsed
|
|
85
|
+
if time_elapsed >= travel_time:
|
|
86
|
+
# successfully reached our target position
|
|
87
|
+
await self._user_readback.put(new_position)
|
|
88
|
+
self._set_success = True
|
|
89
|
+
break
|
|
90
|
+
else:
|
|
91
|
+
current_position = (
|
|
92
|
+
old_position + distance * time_elapsed / travel_time
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
await self._user_readback.put(current_position)
|
|
96
|
+
|
|
97
|
+
# notify watchers of the new position
|
|
98
|
+
for watcher in watchers:
|
|
99
|
+
watcher(
|
|
100
|
+
name=self.name,
|
|
101
|
+
current=current_position,
|
|
102
|
+
initial=old_position,
|
|
103
|
+
target=new_position,
|
|
104
|
+
unit=units,
|
|
105
|
+
time_elapsed=time.monotonic() - start,
|
|
106
|
+
)
|
|
107
|
+
|
|
108
|
+
# 10hz update loop
|
|
109
|
+
await asyncio.sleep(0.1)
|
|
110
|
+
|
|
111
|
+
# set up a task that updates the motor position at 10hz
|
|
112
|
+
self._move_task = asyncio.create_task(update_position())
|
|
113
|
+
|
|
114
|
+
try:
|
|
115
|
+
await self._move_task
|
|
116
|
+
finally:
|
|
117
|
+
if not self._set_success:
|
|
118
|
+
raise RuntimeError("Motor was stopped")
|