ophyd-async 0.1.0__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ophyd_async/__init__.py +1 -4
- ophyd_async/_version.py +2 -2
- ophyd_async/core/__init__.py +91 -19
- ophyd_async/core/_providers.py +68 -0
- ophyd_async/core/async_status.py +90 -42
- ophyd_async/core/detector.py +341 -0
- ophyd_async/core/device.py +226 -0
- ophyd_async/core/device_save_loader.py +286 -0
- ophyd_async/core/flyer.py +85 -0
- ophyd_async/core/mock_signal_backend.py +82 -0
- ophyd_async/core/mock_signal_utils.py +145 -0
- ophyd_async/core/{_device/_signal/signal.py → signal.py} +249 -61
- ophyd_async/core/{_device/_backend/signal_backend.py → signal_backend.py} +12 -5
- ophyd_async/core/{_device/_backend/sim_signal_backend.py → soft_signal_backend.py} +54 -48
- ophyd_async/core/standard_readable.py +261 -0
- ophyd_async/core/utils.py +127 -30
- ophyd_async/epics/_backend/_aioca.py +62 -43
- ophyd_async/epics/_backend/_p4p.py +100 -52
- ophyd_async/epics/_backend/common.py +25 -0
- ophyd_async/epics/areadetector/__init__.py +16 -15
- ophyd_async/epics/areadetector/aravis.py +63 -0
- ophyd_async/epics/areadetector/controllers/__init__.py +5 -0
- ophyd_async/epics/areadetector/controllers/ad_sim_controller.py +52 -0
- ophyd_async/epics/areadetector/controllers/aravis_controller.py +78 -0
- ophyd_async/epics/areadetector/controllers/kinetix_controller.py +49 -0
- ophyd_async/epics/areadetector/controllers/pilatus_controller.py +61 -0
- ophyd_async/epics/areadetector/controllers/vimba_controller.py +66 -0
- ophyd_async/epics/areadetector/drivers/__init__.py +21 -0
- ophyd_async/epics/areadetector/drivers/ad_base.py +107 -0
- ophyd_async/epics/areadetector/drivers/aravis_driver.py +38 -0
- ophyd_async/epics/areadetector/drivers/kinetix_driver.py +27 -0
- ophyd_async/epics/areadetector/drivers/pilatus_driver.py +21 -0
- ophyd_async/epics/areadetector/drivers/vimba_driver.py +63 -0
- ophyd_async/epics/areadetector/kinetix.py +46 -0
- ophyd_async/epics/areadetector/pilatus.py +45 -0
- ophyd_async/epics/areadetector/single_trigger_det.py +18 -10
- ophyd_async/epics/areadetector/utils.py +91 -13
- ophyd_async/epics/areadetector/vimba.py +43 -0
- ophyd_async/epics/areadetector/writers/__init__.py +5 -0
- ophyd_async/epics/areadetector/writers/_hdfdataset.py +10 -0
- ophyd_async/epics/areadetector/writers/_hdffile.py +54 -0
- ophyd_async/epics/areadetector/writers/hdf_writer.py +142 -0
- ophyd_async/epics/areadetector/writers/nd_file_hdf.py +40 -0
- ophyd_async/epics/areadetector/writers/nd_plugin.py +38 -0
- ophyd_async/epics/demo/__init__.py +78 -51
- ophyd_async/epics/demo/demo_ad_sim_detector.py +35 -0
- ophyd_async/epics/motion/motor.py +67 -52
- ophyd_async/epics/pvi/__init__.py +3 -0
- ophyd_async/epics/pvi/pvi.py +318 -0
- ophyd_async/epics/signal/__init__.py +8 -3
- ophyd_async/epics/signal/signal.py +27 -10
- ophyd_async/log.py +130 -0
- ophyd_async/panda/__init__.py +24 -7
- ophyd_async/panda/_common_blocks.py +49 -0
- ophyd_async/panda/_hdf_panda.py +48 -0
- ophyd_async/panda/_panda_controller.py +37 -0
- ophyd_async/panda/_table.py +158 -0
- ophyd_async/panda/_trigger.py +39 -0
- ophyd_async/panda/_utils.py +15 -0
- ophyd_async/panda/writers/__init__.py +3 -0
- ophyd_async/panda/writers/_hdf_writer.py +220 -0
- ophyd_async/panda/writers/_panda_hdf_file.py +58 -0
- ophyd_async/plan_stubs/__init__.py +13 -0
- ophyd_async/plan_stubs/ensure_connected.py +22 -0
- ophyd_async/plan_stubs/fly.py +149 -0
- ophyd_async/protocols.py +126 -0
- ophyd_async/sim/__init__.py +11 -0
- ophyd_async/sim/demo/__init__.py +3 -0
- ophyd_async/sim/demo/sim_motor.py +103 -0
- ophyd_async/sim/pattern_generator.py +318 -0
- ophyd_async/sim/sim_pattern_detector_control.py +55 -0
- ophyd_async/sim/sim_pattern_detector_writer.py +34 -0
- ophyd_async/sim/sim_pattern_generator.py +37 -0
- {ophyd_async-0.1.0.dist-info → ophyd_async-0.3.0.dist-info}/METADATA +35 -67
- ophyd_async-0.3.0.dist-info/RECORD +86 -0
- {ophyd_async-0.1.0.dist-info → ophyd_async-0.3.0.dist-info}/WHEEL +1 -1
- ophyd_async/core/_device/__init__.py +0 -0
- ophyd_async/core/_device/_backend/__init__.py +0 -0
- ophyd_async/core/_device/_signal/__init__.py +0 -0
- ophyd_async/core/_device/device.py +0 -60
- ophyd_async/core/_device/device_collector.py +0 -121
- ophyd_async/core/_device/device_vector.py +0 -14
- ophyd_async/core/_device/standard_readable.py +0 -72
- ophyd_async/epics/areadetector/ad_driver.py +0 -18
- ophyd_async/epics/areadetector/directory_provider.py +0 -18
- ophyd_async/epics/areadetector/hdf_streamer_det.py +0 -167
- ophyd_async/epics/areadetector/nd_file_hdf.py +0 -22
- ophyd_async/epics/areadetector/nd_plugin.py +0 -13
- ophyd_async/epics/signal/pvi_get.py +0 -22
- ophyd_async/panda/panda.py +0 -332
- ophyd_async-0.1.0.dist-info/RECORD +0 -45
- {ophyd_async-0.1.0.dist-info → ophyd_async-0.3.0.dist-info}/LICENSE +0 -0
- {ophyd_async-0.1.0.dist-info → ophyd_async-0.3.0.dist-info}/entry_points.txt +0 -0
- {ophyd_async-0.1.0.dist-info → ophyd_async-0.3.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from typing import Optional
|
|
3
|
+
|
|
4
|
+
from ophyd_async.core import (
|
|
5
|
+
AsyncStatus,
|
|
6
|
+
DetectorControl,
|
|
7
|
+
DetectorTrigger,
|
|
8
|
+
wait_for_value,
|
|
9
|
+
)
|
|
10
|
+
from ophyd_async.panda import PcapBlock
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class PandaPcapController(DetectorControl):
|
|
14
|
+
def __init__(self, pcap: PcapBlock) -> None:
|
|
15
|
+
self.pcap = pcap
|
|
16
|
+
|
|
17
|
+
def get_deadtime(self, exposure: float) -> float:
|
|
18
|
+
return 0.000000008
|
|
19
|
+
|
|
20
|
+
async def arm(
|
|
21
|
+
self,
|
|
22
|
+
num: int,
|
|
23
|
+
trigger: DetectorTrigger = DetectorTrigger.constant_gate,
|
|
24
|
+
exposure: Optional[float] = None,
|
|
25
|
+
) -> AsyncStatus:
|
|
26
|
+
assert trigger in (
|
|
27
|
+
DetectorTrigger.constant_gate,
|
|
28
|
+
trigger == DetectorTrigger.variable_gate,
|
|
29
|
+
), "Only constant_gate and variable_gate triggering is supported on the PandA"
|
|
30
|
+
await asyncio.gather(self.pcap.arm.set(True))
|
|
31
|
+
await wait_for_value(self.pcap.active, True, timeout=1)
|
|
32
|
+
return AsyncStatus(wait_for_value(self.pcap.active, False, timeout=None))
|
|
33
|
+
|
|
34
|
+
async def disarm(self) -> AsyncStatus:
|
|
35
|
+
await asyncio.gather(self.pcap.arm.set(False))
|
|
36
|
+
await wait_for_value(self.pcap.active, False, timeout=1)
|
|
37
|
+
return AsyncStatus(wait_for_value(self.pcap.active, False, timeout=None))
|
|
@@ -0,0 +1,158 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from enum import Enum
|
|
3
|
+
from typing import Optional, Sequence, Type, TypedDict, TypeVar
|
|
4
|
+
|
|
5
|
+
import numpy as np
|
|
6
|
+
import numpy.typing as npt
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
class SeqTrigger(str, Enum):
|
|
10
|
+
IMMEDIATE = "Immediate"
|
|
11
|
+
BITA_0 = "BITA=0"
|
|
12
|
+
BITA_1 = "BITA=1"
|
|
13
|
+
BITB_0 = "BITB=0"
|
|
14
|
+
BITB_1 = "BITB=1"
|
|
15
|
+
BITC_0 = "BITC=0"
|
|
16
|
+
BITC_1 = "BITC=1"
|
|
17
|
+
POSA_GT = "POSA>=POSITION"
|
|
18
|
+
POSA_LT = "POSA<=POSITION"
|
|
19
|
+
POSB_GT = "POSB>=POSITION"
|
|
20
|
+
POSB_LT = "POSB<=POSITION"
|
|
21
|
+
POSC_GT = "POSC>=POSITION"
|
|
22
|
+
POSC_LT = "POSC<=POSITION"
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
@dataclass
|
|
26
|
+
class SeqTableRow:
|
|
27
|
+
repeats: int = 1
|
|
28
|
+
trigger: SeqTrigger = SeqTrigger.IMMEDIATE
|
|
29
|
+
position: int = 0
|
|
30
|
+
time1: int = 0
|
|
31
|
+
outa1: bool = False
|
|
32
|
+
outb1: bool = False
|
|
33
|
+
outc1: bool = False
|
|
34
|
+
outd1: bool = False
|
|
35
|
+
oute1: bool = False
|
|
36
|
+
outf1: bool = False
|
|
37
|
+
time2: int = 0
|
|
38
|
+
outa2: bool = False
|
|
39
|
+
outb2: bool = False
|
|
40
|
+
outc2: bool = False
|
|
41
|
+
outd2: bool = False
|
|
42
|
+
oute2: bool = False
|
|
43
|
+
outf2: bool = False
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class SeqTable(TypedDict):
|
|
47
|
+
repeats: npt.NDArray[np.uint16]
|
|
48
|
+
trigger: Sequence[SeqTrigger]
|
|
49
|
+
position: npt.NDArray[np.int32]
|
|
50
|
+
time1: npt.NDArray[np.uint32]
|
|
51
|
+
outa1: npt.NDArray[np.bool_]
|
|
52
|
+
outb1: npt.NDArray[np.bool_]
|
|
53
|
+
outc1: npt.NDArray[np.bool_]
|
|
54
|
+
outd1: npt.NDArray[np.bool_]
|
|
55
|
+
oute1: npt.NDArray[np.bool_]
|
|
56
|
+
outf1: npt.NDArray[np.bool_]
|
|
57
|
+
time2: npt.NDArray[np.uint32]
|
|
58
|
+
outa2: npt.NDArray[np.bool_]
|
|
59
|
+
outb2: npt.NDArray[np.bool_]
|
|
60
|
+
outc2: npt.NDArray[np.bool_]
|
|
61
|
+
outd2: npt.NDArray[np.bool_]
|
|
62
|
+
oute2: npt.NDArray[np.bool_]
|
|
63
|
+
outf2: npt.NDArray[np.bool_]
|
|
64
|
+
|
|
65
|
+
|
|
66
|
+
def seq_table_from_rows(*rows: SeqTableRow):
|
|
67
|
+
"""
|
|
68
|
+
Constructs a sequence table from a series of rows.
|
|
69
|
+
"""
|
|
70
|
+
return seq_table_from_arrays(
|
|
71
|
+
repeats=np.array([row.repeats for row in rows], dtype=np.uint16),
|
|
72
|
+
trigger=[row.trigger for row in rows],
|
|
73
|
+
position=np.array([row.position for row in rows], dtype=np.int32),
|
|
74
|
+
time1=np.array([row.time1 for row in rows], dtype=np.uint32),
|
|
75
|
+
outa1=np.array([row.outa1 for row in rows], dtype=np.bool_),
|
|
76
|
+
outb1=np.array([row.outb1 for row in rows], dtype=np.bool_),
|
|
77
|
+
outc1=np.array([row.outc1 for row in rows], dtype=np.bool_),
|
|
78
|
+
outd1=np.array([row.outd1 for row in rows], dtype=np.bool_),
|
|
79
|
+
oute1=np.array([row.oute1 for row in rows], dtype=np.bool_),
|
|
80
|
+
outf1=np.array([row.outf1 for row in rows], dtype=np.bool_),
|
|
81
|
+
time2=np.array([row.time2 for row in rows], dtype=np.uint32),
|
|
82
|
+
outa2=np.array([row.outa2 for row in rows], dtype=np.bool_),
|
|
83
|
+
outb2=np.array([row.outb2 for row in rows], dtype=np.bool_),
|
|
84
|
+
outc2=np.array([row.outc2 for row in rows], dtype=np.bool_),
|
|
85
|
+
outd2=np.array([row.outd2 for row in rows], dtype=np.bool_),
|
|
86
|
+
oute2=np.array([row.oute2 for row in rows], dtype=np.bool_),
|
|
87
|
+
outf2=np.array([row.outf2 for row in rows], dtype=np.bool_),
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
T = TypeVar("T", bound=np.generic)
|
|
92
|
+
|
|
93
|
+
|
|
94
|
+
def seq_table_from_arrays(
|
|
95
|
+
*,
|
|
96
|
+
repeats: Optional[npt.NDArray[np.uint16]] = None,
|
|
97
|
+
trigger: Optional[Sequence[SeqTrigger]] = None,
|
|
98
|
+
position: Optional[npt.NDArray[np.int32]] = None,
|
|
99
|
+
time1: Optional[npt.NDArray[np.uint32]] = None,
|
|
100
|
+
outa1: Optional[npt.NDArray[np.bool_]] = None,
|
|
101
|
+
outb1: Optional[npt.NDArray[np.bool_]] = None,
|
|
102
|
+
outc1: Optional[npt.NDArray[np.bool_]] = None,
|
|
103
|
+
outd1: Optional[npt.NDArray[np.bool_]] = None,
|
|
104
|
+
oute1: Optional[npt.NDArray[np.bool_]] = None,
|
|
105
|
+
outf1: Optional[npt.NDArray[np.bool_]] = None,
|
|
106
|
+
time2: npt.NDArray[np.uint32],
|
|
107
|
+
outa2: Optional[npt.NDArray[np.bool_]] = None,
|
|
108
|
+
outb2: Optional[npt.NDArray[np.bool_]] = None,
|
|
109
|
+
outc2: Optional[npt.NDArray[np.bool_]] = None,
|
|
110
|
+
outd2: Optional[npt.NDArray[np.bool_]] = None,
|
|
111
|
+
oute2: Optional[npt.NDArray[np.bool_]] = None,
|
|
112
|
+
outf2: Optional[npt.NDArray[np.bool_]] = None,
|
|
113
|
+
) -> SeqTable:
|
|
114
|
+
"""
|
|
115
|
+
Constructs a sequence table from a series of columns as arrays.
|
|
116
|
+
time2 is the only required argument and must not be None.
|
|
117
|
+
All other provided arguments must be of equal length to time2.
|
|
118
|
+
If any other argument is not given, or else given as None or empty,
|
|
119
|
+
an array of length len(time2) filled with the following is defaulted:
|
|
120
|
+
repeats: 1
|
|
121
|
+
trigger: SeqTrigger.IMMEDIATE
|
|
122
|
+
all others: 0/False as appropriate
|
|
123
|
+
"""
|
|
124
|
+
assert time2 is not None, "time2 must be provided"
|
|
125
|
+
length = len(time2)
|
|
126
|
+
assert 0 < length < 4096, f"Length {length} not in range"
|
|
127
|
+
|
|
128
|
+
def or_default(
|
|
129
|
+
value: Optional[npt.NDArray[T]], dtype: Type[T], default_value: int = 0
|
|
130
|
+
) -> npt.NDArray[T]:
|
|
131
|
+
if value is None or len(value) == 0:
|
|
132
|
+
return np.full(length, default_value, dtype=dtype)
|
|
133
|
+
return value
|
|
134
|
+
|
|
135
|
+
table = SeqTable(
|
|
136
|
+
repeats=or_default(repeats, np.uint16, 1),
|
|
137
|
+
trigger=trigger or [SeqTrigger.IMMEDIATE] * length,
|
|
138
|
+
position=or_default(position, np.int32),
|
|
139
|
+
time1=or_default(time1, np.uint32),
|
|
140
|
+
outa1=or_default(outa1, np.bool_),
|
|
141
|
+
outb1=or_default(outb1, np.bool_),
|
|
142
|
+
outc1=or_default(outc1, np.bool_),
|
|
143
|
+
outd1=or_default(outd1, np.bool_),
|
|
144
|
+
oute1=or_default(oute1, np.bool_),
|
|
145
|
+
outf1=or_default(outf1, np.bool_),
|
|
146
|
+
time2=time2,
|
|
147
|
+
outa2=or_default(outa2, np.bool_),
|
|
148
|
+
outb2=or_default(outb2, np.bool_),
|
|
149
|
+
outc2=or_default(outc2, np.bool_),
|
|
150
|
+
outd2=or_default(outd2, np.bool_),
|
|
151
|
+
oute2=or_default(oute2, np.bool_),
|
|
152
|
+
outf2=or_default(outf2, np.bool_),
|
|
153
|
+
)
|
|
154
|
+
for k, v in table.items():
|
|
155
|
+
size = len(v) # type: ignore
|
|
156
|
+
if size != length:
|
|
157
|
+
raise ValueError(f"{k}: has length {size} not {length}")
|
|
158
|
+
return table
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from dataclasses import dataclass
|
|
3
|
+
|
|
4
|
+
from ophyd_async.core import TriggerLogic, wait_for_value
|
|
5
|
+
from ophyd_async.panda import SeqBlock, SeqTable, TimeUnits
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
@dataclass
|
|
9
|
+
class SeqTableInfo:
|
|
10
|
+
sequence_table: SeqTable
|
|
11
|
+
repeats: int
|
|
12
|
+
prescale_as_us: float = 1 # microseconds
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
class StaticSeqTableTriggerLogic(TriggerLogic[SeqTableInfo]):
|
|
16
|
+
def __init__(self, seq: SeqBlock) -> None:
|
|
17
|
+
self.seq = seq
|
|
18
|
+
|
|
19
|
+
async def prepare(self, value: SeqTableInfo):
|
|
20
|
+
await asyncio.gather(
|
|
21
|
+
self.seq.prescale_units.set(TimeUnits.us),
|
|
22
|
+
self.seq.enable.set("ZERO"),
|
|
23
|
+
)
|
|
24
|
+
await asyncio.gather(
|
|
25
|
+
self.seq.prescale.set(value.prescale_as_us),
|
|
26
|
+
self.seq.repeats.set(value.repeats),
|
|
27
|
+
self.seq.table.set(value.sequence_table),
|
|
28
|
+
)
|
|
29
|
+
|
|
30
|
+
async def kickoff(self) -> None:
|
|
31
|
+
await self.seq.enable.set("ONE")
|
|
32
|
+
await wait_for_value(self.seq.active, True, timeout=1)
|
|
33
|
+
|
|
34
|
+
async def complete(self) -> None:
|
|
35
|
+
await wait_for_value(self.seq.active, False, timeout=None)
|
|
36
|
+
|
|
37
|
+
async def stop(self):
|
|
38
|
+
await self.seq.enable.set("ZERO")
|
|
39
|
+
await wait_for_value(self.seq.active, False, timeout=1)
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
from typing import Any, Dict, Sequence
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def phase_sorter(panda_signal_values: Dict[str, Any]) -> Sequence[Dict[str, Any]]:
|
|
5
|
+
# Panda has two load phases. If the signal name ends in the string "UNITS",
|
|
6
|
+
# it needs to be loaded first so put in first phase
|
|
7
|
+
phase_1, phase_2 = {}, {}
|
|
8
|
+
|
|
9
|
+
for key, value in panda_signal_values.items():
|
|
10
|
+
if key.endswith("units"):
|
|
11
|
+
phase_1[key] = value
|
|
12
|
+
else:
|
|
13
|
+
phase_2[key] = value
|
|
14
|
+
|
|
15
|
+
return [phase_1, phase_2]
|
|
@@ -0,0 +1,220 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from dataclasses import dataclass
|
|
3
|
+
from enum import Enum
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
from typing import Any, AsyncGenerator, AsyncIterator, Dict, List, Optional
|
|
6
|
+
|
|
7
|
+
from bluesky.protocols import DataKey, StreamAsset
|
|
8
|
+
from p4p.client.thread import Context
|
|
9
|
+
|
|
10
|
+
from ophyd_async.core import (
|
|
11
|
+
DEFAULT_TIMEOUT,
|
|
12
|
+
DetectorWriter,
|
|
13
|
+
Device,
|
|
14
|
+
DirectoryProvider,
|
|
15
|
+
NameProvider,
|
|
16
|
+
SignalR,
|
|
17
|
+
wait_for_value,
|
|
18
|
+
)
|
|
19
|
+
from ophyd_async.core.signal import observe_value
|
|
20
|
+
from ophyd_async.panda import CommonPandaBlocks
|
|
21
|
+
|
|
22
|
+
from ._panda_hdf_file import _HDFDataset, _HDFFile
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
class Capture(str, Enum):
|
|
26
|
+
# Capture signals for the HDF Panda
|
|
27
|
+
No = "No"
|
|
28
|
+
Value = "Value"
|
|
29
|
+
Diff = "Diff"
|
|
30
|
+
Sum = "Sum"
|
|
31
|
+
Mean = "Mean"
|
|
32
|
+
Min = "Min"
|
|
33
|
+
Max = "Max"
|
|
34
|
+
MinMax = "Min Max"
|
|
35
|
+
MinMaxMean = "Min Max Mean"
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
def get_capture_signals(
|
|
39
|
+
block: Device, path_prefix: Optional[str] = ""
|
|
40
|
+
) -> Dict[str, SignalR]:
|
|
41
|
+
"""Get dict mapping a capture signal's name to the signal itself"""
|
|
42
|
+
if not path_prefix:
|
|
43
|
+
path_prefix = ""
|
|
44
|
+
signals: Dict[str, SignalR[Any]] = {}
|
|
45
|
+
for attr_name, attr in block.children():
|
|
46
|
+
# Capture signals end in _capture, but num_capture is a red herring
|
|
47
|
+
if attr_name == "num_capture":
|
|
48
|
+
continue
|
|
49
|
+
dot_path = f"{path_prefix}{attr_name}"
|
|
50
|
+
if isinstance(attr, SignalR) and attr_name.endswith("_capture"):
|
|
51
|
+
signals[dot_path] = attr
|
|
52
|
+
attr_signals = get_capture_signals(attr, path_prefix=dot_path + ".")
|
|
53
|
+
signals.update(attr_signals)
|
|
54
|
+
return signals
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
@dataclass
|
|
58
|
+
class CaptureSignalWrapper:
|
|
59
|
+
signal: SignalR
|
|
60
|
+
capture_type: Capture
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
# This should return a dictionary which contains a dict, containing the Capture
|
|
64
|
+
# signal object, and the value of that signal
|
|
65
|
+
async def get_signals_marked_for_capture(
|
|
66
|
+
capture_signals: Dict[str, SignalR],
|
|
67
|
+
) -> Dict[str, CaptureSignalWrapper]:
|
|
68
|
+
# Read signals to see if they should be captured
|
|
69
|
+
do_read = [signal.get_value() for signal in capture_signals.values()]
|
|
70
|
+
|
|
71
|
+
signal_values = await asyncio.gather(*do_read)
|
|
72
|
+
|
|
73
|
+
assert len(signal_values) == len(
|
|
74
|
+
capture_signals
|
|
75
|
+
), "Length of read signals are different to length of signals"
|
|
76
|
+
|
|
77
|
+
signals_to_capture: Dict[str, CaptureSignalWrapper] = {}
|
|
78
|
+
for signal_path, signal_object, signal_value in zip(
|
|
79
|
+
capture_signals.keys(), capture_signals.values(), signal_values
|
|
80
|
+
):
|
|
81
|
+
signal_path = signal_path.replace("_capture", "")
|
|
82
|
+
if (signal_value in iter(Capture)) and (signal_value != Capture.No):
|
|
83
|
+
signals_to_capture[signal_path] = CaptureSignalWrapper(
|
|
84
|
+
signal_object,
|
|
85
|
+
signal_value,
|
|
86
|
+
)
|
|
87
|
+
|
|
88
|
+
return signals_to_capture
|
|
89
|
+
|
|
90
|
+
|
|
91
|
+
class PandaHDFWriter(DetectorWriter):
|
|
92
|
+
_ctxt: Optional[Context] = None
|
|
93
|
+
|
|
94
|
+
def __init__(
|
|
95
|
+
self,
|
|
96
|
+
prefix: str,
|
|
97
|
+
directory_provider: DirectoryProvider,
|
|
98
|
+
name_provider: NameProvider,
|
|
99
|
+
panda_device: CommonPandaBlocks,
|
|
100
|
+
) -> None:
|
|
101
|
+
self.panda_device = panda_device
|
|
102
|
+
self._prefix = prefix
|
|
103
|
+
self._directory_provider = directory_provider
|
|
104
|
+
self._name_provider = name_provider
|
|
105
|
+
self._datasets: List[_HDFDataset] = []
|
|
106
|
+
self._file: Optional[_HDFFile] = None
|
|
107
|
+
self._multiplier = 1
|
|
108
|
+
|
|
109
|
+
# Triggered on PCAP arm
|
|
110
|
+
async def open(self, multiplier: int = 1) -> Dict[str, DataKey]:
|
|
111
|
+
"""Retrieve and get descriptor of all PandA signals marked for capture"""
|
|
112
|
+
|
|
113
|
+
# Get capture PVs by looking at panda. Gives mapping of dotted attribute path
|
|
114
|
+
# to Signal object
|
|
115
|
+
self.capture_signals = get_capture_signals(self.panda_device)
|
|
116
|
+
|
|
117
|
+
# Ensure flushes are immediate
|
|
118
|
+
await self.panda_device.data.flush_period.set(0)
|
|
119
|
+
|
|
120
|
+
to_capture = await get_signals_marked_for_capture(self.capture_signals)
|
|
121
|
+
self._file = None
|
|
122
|
+
info = self._directory_provider()
|
|
123
|
+
# Set the initial values
|
|
124
|
+
await asyncio.gather(
|
|
125
|
+
self.panda_device.data.hdf_directory.set(
|
|
126
|
+
str(info.root / info.resource_dir)
|
|
127
|
+
),
|
|
128
|
+
self.panda_device.data.hdf_file_name.set(
|
|
129
|
+
f"{info.prefix}{self.panda_device.name}{info.suffix}.h5",
|
|
130
|
+
),
|
|
131
|
+
self.panda_device.data.num_capture.set(0),
|
|
132
|
+
)
|
|
133
|
+
|
|
134
|
+
# Wait for it to start, stashing the status that tells us when it finishes
|
|
135
|
+
await self.panda_device.data.capture.set(True)
|
|
136
|
+
name = self._name_provider()
|
|
137
|
+
if multiplier > 1:
|
|
138
|
+
raise ValueError(
|
|
139
|
+
"All PandA datasets should be scalar, multiplier should be 1"
|
|
140
|
+
)
|
|
141
|
+
self._datasets = []
|
|
142
|
+
for attribute_path, capture_signal in to_capture.items():
|
|
143
|
+
split_path = attribute_path.split(".")
|
|
144
|
+
signal_name = split_path[-1]
|
|
145
|
+
# Get block names from numbered blocks, eg INENC[1]
|
|
146
|
+
block_name = (
|
|
147
|
+
f"{split_path[-3]}{split_path[-2]}"
|
|
148
|
+
if split_path[-2].isnumeric()
|
|
149
|
+
else split_path[-2]
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
for suffix in capture_signal.capture_type.split(" "):
|
|
153
|
+
self._datasets.append(
|
|
154
|
+
_HDFDataset(
|
|
155
|
+
name,
|
|
156
|
+
block_name,
|
|
157
|
+
f"{name}-{block_name}-{signal_name}-{suffix}",
|
|
158
|
+
f"{block_name}-{signal_name}".upper() + f"-{suffix}",
|
|
159
|
+
[1],
|
|
160
|
+
multiplier=1,
|
|
161
|
+
)
|
|
162
|
+
)
|
|
163
|
+
|
|
164
|
+
describe = {
|
|
165
|
+
ds.name: DataKey(
|
|
166
|
+
source=self.panda_device.data.hdf_directory.source,
|
|
167
|
+
shape=ds.shape,
|
|
168
|
+
dtype="array" if ds.shape != [1] else "number",
|
|
169
|
+
external="STREAM:",
|
|
170
|
+
)
|
|
171
|
+
for ds in self._datasets
|
|
172
|
+
}
|
|
173
|
+
return describe
|
|
174
|
+
|
|
175
|
+
# Next few functions are exactly the same as AD writer. Could move as default
|
|
176
|
+
# StandardDetector behavior
|
|
177
|
+
async def wait_for_index(
|
|
178
|
+
self, index: int, timeout: Optional[float] = DEFAULT_TIMEOUT
|
|
179
|
+
):
|
|
180
|
+
def matcher(value: int) -> bool:
|
|
181
|
+
return value >= index
|
|
182
|
+
|
|
183
|
+
matcher.__name__ = f"index_at_least_{index}"
|
|
184
|
+
await wait_for_value(
|
|
185
|
+
self.panda_device.data.num_captured, matcher, timeout=timeout
|
|
186
|
+
)
|
|
187
|
+
|
|
188
|
+
async def get_indices_written(self) -> int:
|
|
189
|
+
return await self.panda_device.data.num_captured.get_value()
|
|
190
|
+
|
|
191
|
+
async def observe_indices_written(
|
|
192
|
+
self, timeout=DEFAULT_TIMEOUT
|
|
193
|
+
) -> AsyncGenerator[int, None]:
|
|
194
|
+
"""Wait until a specific index is ready to be collected"""
|
|
195
|
+
async for num_captured in observe_value(
|
|
196
|
+
self.panda_device.data.num_captured, timeout
|
|
197
|
+
):
|
|
198
|
+
yield num_captured // self._multiplier
|
|
199
|
+
|
|
200
|
+
async def collect_stream_docs(
|
|
201
|
+
self, indices_written: int
|
|
202
|
+
) -> AsyncIterator[StreamAsset]:
|
|
203
|
+
# TODO: fail if we get dropped frames
|
|
204
|
+
if indices_written:
|
|
205
|
+
if not self._file:
|
|
206
|
+
self._file = _HDFFile(
|
|
207
|
+
self._directory_provider(),
|
|
208
|
+
Path(await self.panda_device.data.hdf_file_name.get_value()),
|
|
209
|
+
self._datasets,
|
|
210
|
+
)
|
|
211
|
+
for doc in self._file.stream_resources():
|
|
212
|
+
yield "stream_resource", doc
|
|
213
|
+
for doc in self._file.stream_data(indices_written):
|
|
214
|
+
yield "stream_datum", doc
|
|
215
|
+
|
|
216
|
+
# Could put this function as default for StandardDetector
|
|
217
|
+
async def close(self):
|
|
218
|
+
await self.panda_device.data.capture.set(
|
|
219
|
+
False, wait=True, timeout=DEFAULT_TIMEOUT
|
|
220
|
+
)
|
|
@@ -0,0 +1,58 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import Iterator, List
|
|
4
|
+
|
|
5
|
+
from event_model import StreamDatum, StreamResource, compose_stream_resource
|
|
6
|
+
|
|
7
|
+
from ophyd_async.core import DirectoryInfo
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
@dataclass
|
|
11
|
+
class _HDFDataset:
|
|
12
|
+
device_name: str
|
|
13
|
+
block: str
|
|
14
|
+
name: str
|
|
15
|
+
path: str
|
|
16
|
+
shape: List[int]
|
|
17
|
+
multiplier: int
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class _HDFFile:
|
|
21
|
+
def __init__(
|
|
22
|
+
self,
|
|
23
|
+
directory_info: DirectoryInfo,
|
|
24
|
+
full_file_name: Path,
|
|
25
|
+
datasets: List[_HDFDataset],
|
|
26
|
+
) -> None:
|
|
27
|
+
self._last_emitted = 0
|
|
28
|
+
self._bundles = [
|
|
29
|
+
compose_stream_resource(
|
|
30
|
+
spec="AD_HDF5_SWMR_SLICE",
|
|
31
|
+
root=str(directory_info.root),
|
|
32
|
+
data_key=ds.name,
|
|
33
|
+
resource_path=(f"{str(directory_info.root)}/{full_file_name}"),
|
|
34
|
+
resource_kwargs={
|
|
35
|
+
"name": ds.name,
|
|
36
|
+
"block": ds.block,
|
|
37
|
+
"path": ds.path,
|
|
38
|
+
"multiplier": ds.multiplier,
|
|
39
|
+
"timestamps": "/entry/instrument/NDAttributes/NDArrayTimeStamp",
|
|
40
|
+
},
|
|
41
|
+
)
|
|
42
|
+
for ds in datasets
|
|
43
|
+
]
|
|
44
|
+
|
|
45
|
+
def stream_resources(self) -> Iterator[StreamResource]:
|
|
46
|
+
for bundle in self._bundles:
|
|
47
|
+
yield bundle.stream_resource_doc
|
|
48
|
+
|
|
49
|
+
def stream_data(self, indices_written: int) -> Iterator[StreamDatum]:
|
|
50
|
+
# Indices are relative to resource
|
|
51
|
+
if indices_written > self._last_emitted:
|
|
52
|
+
indices = {
|
|
53
|
+
"start": self._last_emitted,
|
|
54
|
+
"stop": indices_written,
|
|
55
|
+
}
|
|
56
|
+
self._last_emitted = indices_written
|
|
57
|
+
for bundle in self._bundles:
|
|
58
|
+
yield bundle.compose_stream_datum(indices)
|
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
from .ensure_connected import ensure_connected
|
|
2
|
+
from .fly import (
|
|
3
|
+
fly_and_collect,
|
|
4
|
+
prepare_static_seq_table_flyer_and_detectors_with_same_trigger,
|
|
5
|
+
time_resolved_fly_and_collect_with_static_seq_table,
|
|
6
|
+
)
|
|
7
|
+
|
|
8
|
+
__all__ = [
|
|
9
|
+
"fly_and_collect",
|
|
10
|
+
"prepare_static_seq_table_flyer_and_detectors_with_same_trigger",
|
|
11
|
+
"time_resolved_fly_and_collect_with_static_seq_table",
|
|
12
|
+
"ensure_connected",
|
|
13
|
+
]
|
|
@@ -0,0 +1,22 @@
|
|
|
1
|
+
import bluesky.plan_stubs as bps
|
|
2
|
+
|
|
3
|
+
from ophyd_async.core.device import Device
|
|
4
|
+
from ophyd_async.core.utils import DEFAULT_TIMEOUT, wait_for_connection
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def ensure_connected(
|
|
8
|
+
*devices: Device,
|
|
9
|
+
mock: bool = False,
|
|
10
|
+
timeout: float = DEFAULT_TIMEOUT,
|
|
11
|
+
force_reconnect=False,
|
|
12
|
+
):
|
|
13
|
+
yield from bps.wait_for(
|
|
14
|
+
[
|
|
15
|
+
lambda: wait_for_connection(
|
|
16
|
+
**{
|
|
17
|
+
device.name: device.connect(mock, timeout, force_reconnect)
|
|
18
|
+
for device in devices
|
|
19
|
+
}
|
|
20
|
+
)
|
|
21
|
+
]
|
|
22
|
+
)
|