ophyd-async 0.2.0__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. ophyd_async/__init__.py +1 -4
  2. ophyd_async/_version.py +2 -2
  3. ophyd_async/core/__init__.py +52 -19
  4. ophyd_async/core/_providers.py +38 -5
  5. ophyd_async/core/async_status.py +86 -40
  6. ophyd_async/core/detector.py +214 -72
  7. ophyd_async/core/device.py +91 -50
  8. ophyd_async/core/device_save_loader.py +96 -23
  9. ophyd_async/core/flyer.py +32 -246
  10. ophyd_async/core/mock_signal_backend.py +82 -0
  11. ophyd_async/core/mock_signal_utils.py +145 -0
  12. ophyd_async/core/signal.py +225 -58
  13. ophyd_async/core/signal_backend.py +8 -5
  14. ophyd_async/core/{sim_signal_backend.py → soft_signal_backend.py} +51 -49
  15. ophyd_async/core/standard_readable.py +212 -23
  16. ophyd_async/core/utils.py +123 -30
  17. ophyd_async/epics/_backend/_aioca.py +42 -44
  18. ophyd_async/epics/_backend/_p4p.py +96 -52
  19. ophyd_async/epics/_backend/common.py +25 -0
  20. ophyd_async/epics/areadetector/__init__.py +8 -4
  21. ophyd_async/epics/areadetector/aravis.py +63 -0
  22. ophyd_async/epics/areadetector/controllers/__init__.py +2 -1
  23. ophyd_async/epics/areadetector/controllers/ad_sim_controller.py +1 -1
  24. ophyd_async/epics/areadetector/controllers/aravis_controller.py +78 -0
  25. ophyd_async/epics/areadetector/controllers/kinetix_controller.py +49 -0
  26. ophyd_async/epics/areadetector/controllers/pilatus_controller.py +37 -25
  27. ophyd_async/epics/areadetector/controllers/vimba_controller.py +66 -0
  28. ophyd_async/epics/areadetector/drivers/__init__.py +6 -0
  29. ophyd_async/epics/areadetector/drivers/ad_base.py +8 -12
  30. ophyd_async/epics/areadetector/drivers/aravis_driver.py +38 -0
  31. ophyd_async/epics/areadetector/drivers/kinetix_driver.py +27 -0
  32. ophyd_async/epics/areadetector/drivers/pilatus_driver.py +8 -5
  33. ophyd_async/epics/areadetector/drivers/vimba_driver.py +63 -0
  34. ophyd_async/epics/areadetector/kinetix.py +46 -0
  35. ophyd_async/epics/areadetector/pilatus.py +45 -0
  36. ophyd_async/epics/areadetector/single_trigger_det.py +14 -6
  37. ophyd_async/epics/areadetector/utils.py +2 -12
  38. ophyd_async/epics/areadetector/vimba.py +43 -0
  39. ophyd_async/epics/areadetector/writers/_hdffile.py +21 -7
  40. ophyd_async/epics/areadetector/writers/hdf_writer.py +32 -17
  41. ophyd_async/epics/areadetector/writers/nd_file_hdf.py +19 -18
  42. ophyd_async/epics/areadetector/writers/nd_plugin.py +15 -7
  43. ophyd_async/epics/demo/__init__.py +75 -49
  44. ophyd_async/epics/motion/motor.py +67 -53
  45. ophyd_async/epics/pvi/__init__.py +3 -0
  46. ophyd_async/epics/pvi/pvi.py +318 -0
  47. ophyd_async/epics/signal/__init__.py +8 -3
  48. ophyd_async/epics/signal/signal.py +26 -9
  49. ophyd_async/log.py +130 -0
  50. ophyd_async/panda/__init__.py +21 -5
  51. ophyd_async/panda/_common_blocks.py +49 -0
  52. ophyd_async/panda/_hdf_panda.py +48 -0
  53. ophyd_async/panda/_panda_controller.py +37 -0
  54. ophyd_async/panda/_trigger.py +39 -0
  55. ophyd_async/panda/_utils.py +15 -0
  56. ophyd_async/panda/writers/__init__.py +3 -0
  57. ophyd_async/panda/writers/_hdf_writer.py +220 -0
  58. ophyd_async/panda/writers/_panda_hdf_file.py +58 -0
  59. ophyd_async/plan_stubs/__init__.py +13 -0
  60. ophyd_async/plan_stubs/ensure_connected.py +22 -0
  61. ophyd_async/plan_stubs/fly.py +149 -0
  62. ophyd_async/protocols.py +126 -0
  63. ophyd_async/sim/__init__.py +11 -0
  64. ophyd_async/sim/demo/__init__.py +3 -0
  65. ophyd_async/sim/demo/sim_motor.py +103 -0
  66. ophyd_async/sim/pattern_generator.py +318 -0
  67. ophyd_async/sim/sim_pattern_detector_control.py +55 -0
  68. ophyd_async/sim/sim_pattern_detector_writer.py +34 -0
  69. ophyd_async/sim/sim_pattern_generator.py +37 -0
  70. {ophyd_async-0.2.0.dist-info → ophyd_async-0.3.0.dist-info}/METADATA +31 -70
  71. ophyd_async-0.3.0.dist-info/RECORD +86 -0
  72. {ophyd_async-0.2.0.dist-info → ophyd_async-0.3.0.dist-info}/WHEEL +1 -1
  73. ophyd_async/epics/signal/pvi_get.py +0 -22
  74. ophyd_async/panda/panda.py +0 -294
  75. ophyd_async-0.2.0.dist-info/RECORD +0 -53
  76. /ophyd_async/panda/{table.py → _table.py} +0 -0
  77. {ophyd_async-0.2.0.dist-info → ophyd_async-0.3.0.dist-info}/LICENSE +0 -0
  78. {ophyd_async-0.2.0.dist-info → ophyd_async-0.3.0.dist-info}/entry_points.txt +0 -0
  79. {ophyd_async-0.2.0.dist-info → ophyd_async-0.3.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,37 @@
1
+ import asyncio
2
+ from typing import Optional
3
+
4
+ from ophyd_async.core import (
5
+ AsyncStatus,
6
+ DetectorControl,
7
+ DetectorTrigger,
8
+ wait_for_value,
9
+ )
10
+ from ophyd_async.panda import PcapBlock
11
+
12
+
13
+ class PandaPcapController(DetectorControl):
14
+ def __init__(self, pcap: PcapBlock) -> None:
15
+ self.pcap = pcap
16
+
17
+ def get_deadtime(self, exposure: float) -> float:
18
+ return 0.000000008
19
+
20
+ async def arm(
21
+ self,
22
+ num: int,
23
+ trigger: DetectorTrigger = DetectorTrigger.constant_gate,
24
+ exposure: Optional[float] = None,
25
+ ) -> AsyncStatus:
26
+ assert trigger in (
27
+ DetectorTrigger.constant_gate,
28
+ trigger == DetectorTrigger.variable_gate,
29
+ ), "Only constant_gate and variable_gate triggering is supported on the PandA"
30
+ await asyncio.gather(self.pcap.arm.set(True))
31
+ await wait_for_value(self.pcap.active, True, timeout=1)
32
+ return AsyncStatus(wait_for_value(self.pcap.active, False, timeout=None))
33
+
34
+ async def disarm(self) -> AsyncStatus:
35
+ await asyncio.gather(self.pcap.arm.set(False))
36
+ await wait_for_value(self.pcap.active, False, timeout=1)
37
+ return AsyncStatus(wait_for_value(self.pcap.active, False, timeout=None))
@@ -0,0 +1,39 @@
1
+ import asyncio
2
+ from dataclasses import dataclass
3
+
4
+ from ophyd_async.core import TriggerLogic, wait_for_value
5
+ from ophyd_async.panda import SeqBlock, SeqTable, TimeUnits
6
+
7
+
8
+ @dataclass
9
+ class SeqTableInfo:
10
+ sequence_table: SeqTable
11
+ repeats: int
12
+ prescale_as_us: float = 1 # microseconds
13
+
14
+
15
+ class StaticSeqTableTriggerLogic(TriggerLogic[SeqTableInfo]):
16
+ def __init__(self, seq: SeqBlock) -> None:
17
+ self.seq = seq
18
+
19
+ async def prepare(self, value: SeqTableInfo):
20
+ await asyncio.gather(
21
+ self.seq.prescale_units.set(TimeUnits.us),
22
+ self.seq.enable.set("ZERO"),
23
+ )
24
+ await asyncio.gather(
25
+ self.seq.prescale.set(value.prescale_as_us),
26
+ self.seq.repeats.set(value.repeats),
27
+ self.seq.table.set(value.sequence_table),
28
+ )
29
+
30
+ async def kickoff(self) -> None:
31
+ await self.seq.enable.set("ONE")
32
+ await wait_for_value(self.seq.active, True, timeout=1)
33
+
34
+ async def complete(self) -> None:
35
+ await wait_for_value(self.seq.active, False, timeout=None)
36
+
37
+ async def stop(self):
38
+ await self.seq.enable.set("ZERO")
39
+ await wait_for_value(self.seq.active, False, timeout=1)
@@ -0,0 +1,15 @@
1
+ from typing import Any, Dict, Sequence
2
+
3
+
4
+ def phase_sorter(panda_signal_values: Dict[str, Any]) -> Sequence[Dict[str, Any]]:
5
+ # Panda has two load phases. If the signal name ends in the string "UNITS",
6
+ # it needs to be loaded first so put in first phase
7
+ phase_1, phase_2 = {}, {}
8
+
9
+ for key, value in panda_signal_values.items():
10
+ if key.endswith("units"):
11
+ phase_1[key] = value
12
+ else:
13
+ phase_2[key] = value
14
+
15
+ return [phase_1, phase_2]
@@ -0,0 +1,3 @@
1
+ from ._hdf_writer import PandaHDFWriter
2
+
3
+ __all__ = ["PandaHDFWriter"]
@@ -0,0 +1,220 @@
1
+ import asyncio
2
+ from dataclasses import dataclass
3
+ from enum import Enum
4
+ from pathlib import Path
5
+ from typing import Any, AsyncGenerator, AsyncIterator, Dict, List, Optional
6
+
7
+ from bluesky.protocols import DataKey, StreamAsset
8
+ from p4p.client.thread import Context
9
+
10
+ from ophyd_async.core import (
11
+ DEFAULT_TIMEOUT,
12
+ DetectorWriter,
13
+ Device,
14
+ DirectoryProvider,
15
+ NameProvider,
16
+ SignalR,
17
+ wait_for_value,
18
+ )
19
+ from ophyd_async.core.signal import observe_value
20
+ from ophyd_async.panda import CommonPandaBlocks
21
+
22
+ from ._panda_hdf_file import _HDFDataset, _HDFFile
23
+
24
+
25
+ class Capture(str, Enum):
26
+ # Capture signals for the HDF Panda
27
+ No = "No"
28
+ Value = "Value"
29
+ Diff = "Diff"
30
+ Sum = "Sum"
31
+ Mean = "Mean"
32
+ Min = "Min"
33
+ Max = "Max"
34
+ MinMax = "Min Max"
35
+ MinMaxMean = "Min Max Mean"
36
+
37
+
38
+ def get_capture_signals(
39
+ block: Device, path_prefix: Optional[str] = ""
40
+ ) -> Dict[str, SignalR]:
41
+ """Get dict mapping a capture signal's name to the signal itself"""
42
+ if not path_prefix:
43
+ path_prefix = ""
44
+ signals: Dict[str, SignalR[Any]] = {}
45
+ for attr_name, attr in block.children():
46
+ # Capture signals end in _capture, but num_capture is a red herring
47
+ if attr_name == "num_capture":
48
+ continue
49
+ dot_path = f"{path_prefix}{attr_name}"
50
+ if isinstance(attr, SignalR) and attr_name.endswith("_capture"):
51
+ signals[dot_path] = attr
52
+ attr_signals = get_capture_signals(attr, path_prefix=dot_path + ".")
53
+ signals.update(attr_signals)
54
+ return signals
55
+
56
+
57
+ @dataclass
58
+ class CaptureSignalWrapper:
59
+ signal: SignalR
60
+ capture_type: Capture
61
+
62
+
63
+ # This should return a dictionary which contains a dict, containing the Capture
64
+ # signal object, and the value of that signal
65
+ async def get_signals_marked_for_capture(
66
+ capture_signals: Dict[str, SignalR],
67
+ ) -> Dict[str, CaptureSignalWrapper]:
68
+ # Read signals to see if they should be captured
69
+ do_read = [signal.get_value() for signal in capture_signals.values()]
70
+
71
+ signal_values = await asyncio.gather(*do_read)
72
+
73
+ assert len(signal_values) == len(
74
+ capture_signals
75
+ ), "Length of read signals are different to length of signals"
76
+
77
+ signals_to_capture: Dict[str, CaptureSignalWrapper] = {}
78
+ for signal_path, signal_object, signal_value in zip(
79
+ capture_signals.keys(), capture_signals.values(), signal_values
80
+ ):
81
+ signal_path = signal_path.replace("_capture", "")
82
+ if (signal_value in iter(Capture)) and (signal_value != Capture.No):
83
+ signals_to_capture[signal_path] = CaptureSignalWrapper(
84
+ signal_object,
85
+ signal_value,
86
+ )
87
+
88
+ return signals_to_capture
89
+
90
+
91
+ class PandaHDFWriter(DetectorWriter):
92
+ _ctxt: Optional[Context] = None
93
+
94
+ def __init__(
95
+ self,
96
+ prefix: str,
97
+ directory_provider: DirectoryProvider,
98
+ name_provider: NameProvider,
99
+ panda_device: CommonPandaBlocks,
100
+ ) -> None:
101
+ self.panda_device = panda_device
102
+ self._prefix = prefix
103
+ self._directory_provider = directory_provider
104
+ self._name_provider = name_provider
105
+ self._datasets: List[_HDFDataset] = []
106
+ self._file: Optional[_HDFFile] = None
107
+ self._multiplier = 1
108
+
109
+ # Triggered on PCAP arm
110
+ async def open(self, multiplier: int = 1) -> Dict[str, DataKey]:
111
+ """Retrieve and get descriptor of all PandA signals marked for capture"""
112
+
113
+ # Get capture PVs by looking at panda. Gives mapping of dotted attribute path
114
+ # to Signal object
115
+ self.capture_signals = get_capture_signals(self.panda_device)
116
+
117
+ # Ensure flushes are immediate
118
+ await self.panda_device.data.flush_period.set(0)
119
+
120
+ to_capture = await get_signals_marked_for_capture(self.capture_signals)
121
+ self._file = None
122
+ info = self._directory_provider()
123
+ # Set the initial values
124
+ await asyncio.gather(
125
+ self.panda_device.data.hdf_directory.set(
126
+ str(info.root / info.resource_dir)
127
+ ),
128
+ self.panda_device.data.hdf_file_name.set(
129
+ f"{info.prefix}{self.panda_device.name}{info.suffix}.h5",
130
+ ),
131
+ self.panda_device.data.num_capture.set(0),
132
+ )
133
+
134
+ # Wait for it to start, stashing the status that tells us when it finishes
135
+ await self.panda_device.data.capture.set(True)
136
+ name = self._name_provider()
137
+ if multiplier > 1:
138
+ raise ValueError(
139
+ "All PandA datasets should be scalar, multiplier should be 1"
140
+ )
141
+ self._datasets = []
142
+ for attribute_path, capture_signal in to_capture.items():
143
+ split_path = attribute_path.split(".")
144
+ signal_name = split_path[-1]
145
+ # Get block names from numbered blocks, eg INENC[1]
146
+ block_name = (
147
+ f"{split_path[-3]}{split_path[-2]}"
148
+ if split_path[-2].isnumeric()
149
+ else split_path[-2]
150
+ )
151
+
152
+ for suffix in capture_signal.capture_type.split(" "):
153
+ self._datasets.append(
154
+ _HDFDataset(
155
+ name,
156
+ block_name,
157
+ f"{name}-{block_name}-{signal_name}-{suffix}",
158
+ f"{block_name}-{signal_name}".upper() + f"-{suffix}",
159
+ [1],
160
+ multiplier=1,
161
+ )
162
+ )
163
+
164
+ describe = {
165
+ ds.name: DataKey(
166
+ source=self.panda_device.data.hdf_directory.source,
167
+ shape=ds.shape,
168
+ dtype="array" if ds.shape != [1] else "number",
169
+ external="STREAM:",
170
+ )
171
+ for ds in self._datasets
172
+ }
173
+ return describe
174
+
175
+ # Next few functions are exactly the same as AD writer. Could move as default
176
+ # StandardDetector behavior
177
+ async def wait_for_index(
178
+ self, index: int, timeout: Optional[float] = DEFAULT_TIMEOUT
179
+ ):
180
+ def matcher(value: int) -> bool:
181
+ return value >= index
182
+
183
+ matcher.__name__ = f"index_at_least_{index}"
184
+ await wait_for_value(
185
+ self.panda_device.data.num_captured, matcher, timeout=timeout
186
+ )
187
+
188
+ async def get_indices_written(self) -> int:
189
+ return await self.panda_device.data.num_captured.get_value()
190
+
191
+ async def observe_indices_written(
192
+ self, timeout=DEFAULT_TIMEOUT
193
+ ) -> AsyncGenerator[int, None]:
194
+ """Wait until a specific index is ready to be collected"""
195
+ async for num_captured in observe_value(
196
+ self.panda_device.data.num_captured, timeout
197
+ ):
198
+ yield num_captured // self._multiplier
199
+
200
+ async def collect_stream_docs(
201
+ self, indices_written: int
202
+ ) -> AsyncIterator[StreamAsset]:
203
+ # TODO: fail if we get dropped frames
204
+ if indices_written:
205
+ if not self._file:
206
+ self._file = _HDFFile(
207
+ self._directory_provider(),
208
+ Path(await self.panda_device.data.hdf_file_name.get_value()),
209
+ self._datasets,
210
+ )
211
+ for doc in self._file.stream_resources():
212
+ yield "stream_resource", doc
213
+ for doc in self._file.stream_data(indices_written):
214
+ yield "stream_datum", doc
215
+
216
+ # Could put this function as default for StandardDetector
217
+ async def close(self):
218
+ await self.panda_device.data.capture.set(
219
+ False, wait=True, timeout=DEFAULT_TIMEOUT
220
+ )
@@ -0,0 +1,58 @@
1
+ from dataclasses import dataclass
2
+ from pathlib import Path
3
+ from typing import Iterator, List
4
+
5
+ from event_model import StreamDatum, StreamResource, compose_stream_resource
6
+
7
+ from ophyd_async.core import DirectoryInfo
8
+
9
+
10
+ @dataclass
11
+ class _HDFDataset:
12
+ device_name: str
13
+ block: str
14
+ name: str
15
+ path: str
16
+ shape: List[int]
17
+ multiplier: int
18
+
19
+
20
+ class _HDFFile:
21
+ def __init__(
22
+ self,
23
+ directory_info: DirectoryInfo,
24
+ full_file_name: Path,
25
+ datasets: List[_HDFDataset],
26
+ ) -> None:
27
+ self._last_emitted = 0
28
+ self._bundles = [
29
+ compose_stream_resource(
30
+ spec="AD_HDF5_SWMR_SLICE",
31
+ root=str(directory_info.root),
32
+ data_key=ds.name,
33
+ resource_path=(f"{str(directory_info.root)}/{full_file_name}"),
34
+ resource_kwargs={
35
+ "name": ds.name,
36
+ "block": ds.block,
37
+ "path": ds.path,
38
+ "multiplier": ds.multiplier,
39
+ "timestamps": "/entry/instrument/NDAttributes/NDArrayTimeStamp",
40
+ },
41
+ )
42
+ for ds in datasets
43
+ ]
44
+
45
+ def stream_resources(self) -> Iterator[StreamResource]:
46
+ for bundle in self._bundles:
47
+ yield bundle.stream_resource_doc
48
+
49
+ def stream_data(self, indices_written: int) -> Iterator[StreamDatum]:
50
+ # Indices are relative to resource
51
+ if indices_written > self._last_emitted:
52
+ indices = {
53
+ "start": self._last_emitted,
54
+ "stop": indices_written,
55
+ }
56
+ self._last_emitted = indices_written
57
+ for bundle in self._bundles:
58
+ yield bundle.compose_stream_datum(indices)
@@ -0,0 +1,13 @@
1
+ from .ensure_connected import ensure_connected
2
+ from .fly import (
3
+ fly_and_collect,
4
+ prepare_static_seq_table_flyer_and_detectors_with_same_trigger,
5
+ time_resolved_fly_and_collect_with_static_seq_table,
6
+ )
7
+
8
+ __all__ = [
9
+ "fly_and_collect",
10
+ "prepare_static_seq_table_flyer_and_detectors_with_same_trigger",
11
+ "time_resolved_fly_and_collect_with_static_seq_table",
12
+ "ensure_connected",
13
+ ]
@@ -0,0 +1,22 @@
1
+ import bluesky.plan_stubs as bps
2
+
3
+ from ophyd_async.core.device import Device
4
+ from ophyd_async.core.utils import DEFAULT_TIMEOUT, wait_for_connection
5
+
6
+
7
+ def ensure_connected(
8
+ *devices: Device,
9
+ mock: bool = False,
10
+ timeout: float = DEFAULT_TIMEOUT,
11
+ force_reconnect=False,
12
+ ):
13
+ yield from bps.wait_for(
14
+ [
15
+ lambda: wait_for_connection(
16
+ **{
17
+ device.name: device.connect(mock, timeout, force_reconnect)
18
+ for device in devices
19
+ }
20
+ )
21
+ ]
22
+ )
@@ -0,0 +1,149 @@
1
+ from typing import List
2
+
3
+ import bluesky.plan_stubs as bps
4
+ from bluesky.utils import short_uid
5
+
6
+ from ophyd_async.core.detector import DetectorTrigger, StandardDetector, TriggerInfo
7
+ from ophyd_async.core.flyer import HardwareTriggeredFlyable
8
+ from ophyd_async.core.utils import in_micros
9
+ from ophyd_async.panda._table import SeqTable, SeqTableRow, seq_table_from_rows
10
+ from ophyd_async.panda._trigger import SeqTableInfo
11
+
12
+
13
+ def prepare_static_seq_table_flyer_and_detectors_with_same_trigger(
14
+ flyer: HardwareTriggeredFlyable[SeqTableInfo],
15
+ detectors: List[StandardDetector],
16
+ number_of_frames: int,
17
+ exposure: float,
18
+ shutter_time: float,
19
+ repeats: int = 1,
20
+ period: float = 0.0,
21
+ ):
22
+ """Prepare a hardware triggered flyable and one or more detectors.
23
+
24
+ Prepare a hardware triggered flyable and one or more detectors with the
25
+ same trigger. This method constructs TriggerInfo and a static sequence
26
+ table from required parameters. The table is required to prepare the flyer,
27
+ and the TriggerInfo is required to prepare the detector(s).
28
+
29
+ This prepares all supplied detectors with the same trigger.
30
+
31
+ """
32
+ if not detectors:
33
+ raise ValueError("No detectors provided. There must be at least one.")
34
+
35
+ deadtime = max(det.controller.get_deadtime(exposure) for det in detectors)
36
+
37
+ trigger_info = TriggerInfo(
38
+ num=number_of_frames * repeats,
39
+ trigger=DetectorTrigger.constant_gate,
40
+ deadtime=deadtime,
41
+ livetime=exposure,
42
+ )
43
+ trigger_time = number_of_frames * (exposure + deadtime)
44
+ pre_delay = max(period - 2 * shutter_time - trigger_time, 0)
45
+
46
+ table: SeqTable = seq_table_from_rows(
47
+ # Wait for pre-delay then open shutter
48
+ SeqTableRow(
49
+ time1=in_micros(pre_delay),
50
+ time2=in_micros(shutter_time),
51
+ outa2=True,
52
+ ),
53
+ # Keeping shutter open, do N triggers
54
+ SeqTableRow(
55
+ repeats=number_of_frames,
56
+ time1=in_micros(exposure),
57
+ outa1=True,
58
+ outb1=True,
59
+ time2=in_micros(deadtime),
60
+ outa2=True,
61
+ ),
62
+ # Add the shutter close
63
+ SeqTableRow(time2=in_micros(shutter_time)),
64
+ )
65
+
66
+ table_info = SeqTableInfo(table, repeats)
67
+
68
+ for det in detectors:
69
+ yield from bps.prepare(det, trigger_info, wait=False, group="prep")
70
+ yield from bps.prepare(flyer, table_info, wait=False, group="prep")
71
+ yield from bps.wait(group="prep")
72
+
73
+
74
+ def fly_and_collect(
75
+ stream_name: str,
76
+ flyer: HardwareTriggeredFlyable[SeqTableInfo],
77
+ detectors: List[StandardDetector],
78
+ ):
79
+ """Kickoff, complete and collect with a flyer and multiple detectors.
80
+
81
+ This stub takes a flyer and one or more detectors that have been prepared. It
82
+ declares a stream for the detectors, then kicks off the detectors and the flyer.
83
+ The detectors are collected until the flyer and detectors have completed.
84
+
85
+ """
86
+ yield from bps.declare_stream(*detectors, name=stream_name, collect=True)
87
+ yield from bps.kickoff(flyer, wait=True)
88
+ for detector in detectors:
89
+ yield from bps.kickoff(detector)
90
+
91
+ # collect_while_completing
92
+ group = short_uid(label="complete")
93
+
94
+ yield from bps.complete(flyer, wait=False, group=group)
95
+ for detector in detectors:
96
+ yield from bps.complete(detector, wait=False, group=group)
97
+
98
+ done = False
99
+ while not done:
100
+ try:
101
+ yield from bps.wait(group=group, timeout=0.5)
102
+ except TimeoutError:
103
+ pass
104
+ else:
105
+ done = True
106
+ yield from bps.collect(
107
+ *detectors,
108
+ return_payload=False,
109
+ name=stream_name,
110
+ )
111
+ yield from bps.wait(group=group)
112
+
113
+
114
+ def time_resolved_fly_and_collect_with_static_seq_table(
115
+ stream_name: str,
116
+ flyer: HardwareTriggeredFlyable[SeqTableInfo],
117
+ detectors: List[StandardDetector],
118
+ number_of_frames: int,
119
+ exposure: float,
120
+ shutter_time: float,
121
+ repeats: int = 1,
122
+ period: float = 0.0,
123
+ ):
124
+ """Run a scan wth a flyer and multiple detectors.
125
+
126
+ The stub demonstrates the standard basic flow for a flyscan:
127
+
128
+ - Prepare the flyer and detectors with a trigger
129
+ - Fly and collect:
130
+ - Declare the stream and kickoff the scan
131
+ - Collect while completing
132
+
133
+ This needs to be used in a plan that instantates detectors and a flyer,
134
+ stages/unstages the devices, and opens and closes the run.
135
+
136
+ """
137
+
138
+ # Set up scan and prepare trigger
139
+ yield from prepare_static_seq_table_flyer_and_detectors_with_same_trigger(
140
+ flyer,
141
+ detectors,
142
+ number_of_frames=number_of_frames,
143
+ exposure=exposure,
144
+ shutter_time=shutter_time,
145
+ repeats=repeats,
146
+ period=period,
147
+ )
148
+ # Run the fly scan
149
+ yield from fly_and_collect(stream_name, flyer, detectors)
@@ -0,0 +1,126 @@
1
+ from __future__ import annotations
2
+
3
+ from abc import abstractmethod
4
+ from typing import (
5
+ TYPE_CHECKING,
6
+ Any,
7
+ Dict,
8
+ Generic,
9
+ Protocol,
10
+ TypeVar,
11
+ runtime_checkable,
12
+ )
13
+
14
+ from bluesky.protocols import DataKey, HasName, Reading
15
+
16
+ if TYPE_CHECKING:
17
+ from ophyd_async.core.async_status import AsyncStatus
18
+
19
+
20
+ @runtime_checkable
21
+ class AsyncReadable(HasName, Protocol):
22
+ @abstractmethod
23
+ async def read(self) -> Dict[str, Reading]:
24
+ """Return an OrderedDict mapping string field name(s) to dictionaries
25
+ of values and timestamps and optional per-point metadata.
26
+
27
+ Example return value:
28
+
29
+ .. code-block:: python
30
+
31
+ OrderedDict(('channel1',
32
+ {'value': 5, 'timestamp': 1472493713.271991}),
33
+ ('channel2',
34
+ {'value': 16, 'timestamp': 1472493713.539238}))
35
+ """
36
+ ...
37
+
38
+ @abstractmethod
39
+ async def describe(self) -> Dict[str, DataKey]:
40
+ """Return an OrderedDict with exactly the same keys as the ``read``
41
+ method, here mapped to per-scan metadata about each field.
42
+
43
+ Example return value:
44
+
45
+ .. code-block:: python
46
+
47
+ OrderedDict(('channel1',
48
+ {'source': 'XF23-ID:SOME_PV_NAME',
49
+ 'dtype': 'number',
50
+ 'shape': []}),
51
+ ('channel2',
52
+ {'source': 'XF23-ID:SOME_PV_NAME',
53
+ 'dtype': 'number',
54
+ 'shape': []}))
55
+ """
56
+ ...
57
+
58
+
59
+ @runtime_checkable
60
+ class AsyncConfigurable(Protocol):
61
+ @abstractmethod
62
+ async def read_configuration(self) -> Dict[str, Reading]:
63
+ """Same API as ``read`` but for slow-changing fields related to configuration.
64
+ e.g., exposure time. These will typically be read only once per run.
65
+ """
66
+ ...
67
+
68
+ @abstractmethod
69
+ async def describe_configuration(self) -> Dict[str, DataKey]:
70
+ """Same API as ``describe``, but corresponding to the keys in
71
+ ``read_configuration``.
72
+ """
73
+ ...
74
+
75
+
76
+ @runtime_checkable
77
+ class AsyncPausable(Protocol):
78
+ @abstractmethod
79
+ async def pause(self) -> None:
80
+ """Perform device-specific work when the RunEngine pauses."""
81
+ ...
82
+
83
+ @abstractmethod
84
+ async def resume(self) -> None:
85
+ """Perform device-specific work when the RunEngine resumes after a pause."""
86
+ ...
87
+
88
+
89
+ @runtime_checkable
90
+ class AsyncStageable(Protocol):
91
+ @abstractmethod
92
+ def stage(self) -> AsyncStatus:
93
+ """An optional hook for "setting up" the device for acquisition.
94
+
95
+ It should return a ``Status`` that is marked done when the device is
96
+ done staging.
97
+ """
98
+ ...
99
+
100
+ @abstractmethod
101
+ def unstage(self) -> AsyncStatus:
102
+ """A hook for "cleaning up" the device after acquisition.
103
+
104
+ It should return a ``Status`` that is marked done when the device is finished
105
+ unstaging.
106
+ """
107
+ ...
108
+
109
+
110
+ C = TypeVar("C", contravariant=True)
111
+
112
+
113
+ class Watcher(Protocol, Generic[C]):
114
+ @staticmethod
115
+ def __call__(
116
+ *,
117
+ current: C,
118
+ initial: C,
119
+ target: C,
120
+ name: str | None,
121
+ unit: str | None,
122
+ precision: float | None,
123
+ fraction: float | None,
124
+ time_elapsed: float | None,
125
+ time_remaining: float | None,
126
+ ) -> Any: ...