ophyd-async 0.9.0a1__py3-none-any.whl → 0.10.0a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ophyd_async/__init__.py +5 -8
- ophyd_async/_docs_parser.py +12 -0
- ophyd_async/_version.py +9 -4
- ophyd_async/core/__init__.py +102 -74
- ophyd_async/core/_derived_signal.py +271 -0
- ophyd_async/core/_derived_signal_backend.py +300 -0
- ophyd_async/core/_detector.py +158 -153
- ophyd_async/core/_device.py +143 -115
- ophyd_async/core/_device_filler.py +82 -9
- ophyd_async/core/_flyer.py +16 -7
- ophyd_async/core/_hdf_dataset.py +29 -22
- ophyd_async/core/_log.py +14 -23
- ophyd_async/core/_mock_signal_backend.py +11 -3
- ophyd_async/core/_protocol.py +65 -45
- ophyd_async/core/_providers.py +28 -9
- ophyd_async/core/_readable.py +74 -58
- ophyd_async/core/_settings.py +113 -0
- ophyd_async/core/_signal.py +304 -174
- ophyd_async/core/_signal_backend.py +60 -14
- ophyd_async/core/_soft_signal_backend.py +18 -12
- ophyd_async/core/_status.py +72 -24
- ophyd_async/core/_table.py +54 -17
- ophyd_async/core/_utils.py +101 -52
- ophyd_async/core/_yaml_settings.py +66 -0
- ophyd_async/epics/__init__.py +1 -0
- ophyd_async/epics/adandor/__init__.py +9 -0
- ophyd_async/epics/adandor/_andor.py +45 -0
- ophyd_async/epics/adandor/_andor_controller.py +51 -0
- ophyd_async/epics/adandor/_andor_io.py +34 -0
- ophyd_async/epics/adaravis/__init__.py +8 -1
- ophyd_async/epics/adaravis/_aravis.py +23 -41
- ophyd_async/epics/adaravis/_aravis_controller.py +23 -55
- ophyd_async/epics/adaravis/_aravis_io.py +13 -28
- ophyd_async/epics/adcore/__init__.py +36 -14
- ophyd_async/epics/adcore/_core_detector.py +81 -0
- ophyd_async/epics/adcore/_core_io.py +145 -95
- ophyd_async/epics/adcore/_core_logic.py +179 -88
- ophyd_async/epics/adcore/_core_writer.py +223 -0
- ophyd_async/epics/adcore/_hdf_writer.py +51 -92
- ophyd_async/epics/adcore/_jpeg_writer.py +26 -0
- ophyd_async/epics/adcore/_single_trigger.py +6 -5
- ophyd_async/epics/adcore/_tiff_writer.py +26 -0
- ophyd_async/epics/adcore/_utils.py +3 -2
- ophyd_async/epics/adkinetix/__init__.py +2 -1
- ophyd_async/epics/adkinetix/_kinetix.py +32 -27
- ophyd_async/epics/adkinetix/_kinetix_controller.py +11 -21
- ophyd_async/epics/adkinetix/_kinetix_io.py +12 -13
- ophyd_async/epics/adpilatus/__init__.py +7 -2
- ophyd_async/epics/adpilatus/_pilatus.py +28 -40
- ophyd_async/epics/adpilatus/_pilatus_controller.py +25 -22
- ophyd_async/epics/adpilatus/_pilatus_io.py +11 -9
- ophyd_async/epics/adsimdetector/__init__.py +8 -1
- ophyd_async/epics/adsimdetector/_sim.py +22 -16
- ophyd_async/epics/adsimdetector/_sim_controller.py +9 -43
- ophyd_async/epics/adsimdetector/_sim_io.py +10 -0
- ophyd_async/epics/advimba/__init__.py +10 -1
- ophyd_async/epics/advimba/_vimba.py +26 -25
- ophyd_async/epics/advimba/_vimba_controller.py +12 -24
- ophyd_async/epics/advimba/_vimba_io.py +23 -28
- ophyd_async/epics/core/_aioca.py +66 -30
- ophyd_async/epics/core/_epics_connector.py +4 -0
- ophyd_async/epics/core/_epics_device.py +2 -0
- ophyd_async/epics/core/_p4p.py +50 -18
- ophyd_async/epics/core/_pvi_connector.py +65 -8
- ophyd_async/epics/core/_signal.py +51 -51
- ophyd_async/epics/core/_util.py +5 -5
- ophyd_async/epics/demo/__init__.py +11 -49
- ophyd_async/epics/demo/__main__.py +31 -0
- ophyd_async/epics/demo/_ioc.py +32 -0
- ophyd_async/epics/demo/_motor.py +82 -0
- ophyd_async/epics/demo/_point_detector.py +42 -0
- ophyd_async/epics/demo/_point_detector_channel.py +22 -0
- ophyd_async/epics/demo/_stage.py +15 -0
- ophyd_async/epics/demo/{mover.db → motor.db} +2 -1
- ophyd_async/epics/demo/point_detector.db +59 -0
- ophyd_async/epics/demo/point_detector_channel.db +21 -0
- ophyd_async/epics/eiger/_eiger.py +1 -3
- ophyd_async/epics/eiger/_eiger_controller.py +11 -4
- ophyd_async/epics/eiger/_eiger_io.py +2 -0
- ophyd_async/epics/eiger/_odin_io.py +1 -2
- ophyd_async/epics/motor.py +83 -38
- ophyd_async/epics/signal.py +4 -1
- ophyd_async/epics/testing/__init__.py +14 -14
- ophyd_async/epics/testing/_example_ioc.py +68 -73
- ophyd_async/epics/testing/_utils.py +19 -44
- ophyd_async/epics/testing/test_records.db +16 -0
- ophyd_async/epics/testing/test_records_pva.db +17 -16
- ophyd_async/fastcs/__init__.py +1 -0
- ophyd_async/fastcs/core.py +6 -0
- ophyd_async/fastcs/odin/__init__.py +1 -0
- ophyd_async/fastcs/panda/__init__.py +8 -8
- ophyd_async/fastcs/panda/_block.py +29 -9
- ophyd_async/fastcs/panda/_control.py +12 -2
- ophyd_async/fastcs/panda/_hdf_panda.py +5 -1
- ophyd_async/fastcs/panda/_table.py +13 -7
- ophyd_async/fastcs/panda/_trigger.py +23 -9
- ophyd_async/fastcs/panda/_writer.py +27 -30
- ophyd_async/plan_stubs/__init__.py +16 -0
- ophyd_async/plan_stubs/_ensure_connected.py +12 -17
- ophyd_async/plan_stubs/_fly.py +3 -5
- ophyd_async/plan_stubs/_nd_attributes.py +9 -5
- ophyd_async/plan_stubs/_panda.py +14 -0
- ophyd_async/plan_stubs/_settings.py +152 -0
- ophyd_async/plan_stubs/_utils.py +3 -0
- ophyd_async/plan_stubs/_wait_for_awaitable.py +13 -0
- ophyd_async/sim/__init__.py +29 -0
- ophyd_async/sim/__main__.py +43 -0
- ophyd_async/sim/_blob_detector.py +33 -0
- ophyd_async/sim/_blob_detector_controller.py +48 -0
- ophyd_async/sim/_blob_detector_writer.py +105 -0
- ophyd_async/sim/_mirror_horizontal.py +46 -0
- ophyd_async/sim/_mirror_vertical.py +74 -0
- ophyd_async/sim/_motor.py +233 -0
- ophyd_async/sim/_pattern_generator.py +124 -0
- ophyd_async/sim/_point_detector.py +86 -0
- ophyd_async/sim/_stage.py +19 -0
- ophyd_async/tango/__init__.py +1 -0
- ophyd_async/tango/core/__init__.py +6 -1
- ophyd_async/tango/core/_base_device.py +41 -33
- ophyd_async/tango/core/_converters.py +81 -0
- ophyd_async/tango/core/_signal.py +21 -33
- ophyd_async/tango/core/_tango_readable.py +2 -19
- ophyd_async/tango/core/_tango_transport.py +148 -74
- ophyd_async/tango/core/_utils.py +47 -0
- ophyd_async/tango/demo/_counter.py +2 -0
- ophyd_async/tango/demo/_detector.py +2 -0
- ophyd_async/tango/demo/_mover.py +10 -6
- ophyd_async/tango/demo/_tango/_servers.py +4 -0
- ophyd_async/tango/testing/__init__.py +6 -0
- ophyd_async/tango/testing/_one_of_everything.py +200 -0
- ophyd_async/testing/__init__.py +48 -7
- ophyd_async/testing/__pytest_assert_rewrite.py +4 -0
- ophyd_async/testing/_assert.py +200 -96
- ophyd_async/testing/_mock_signal_utils.py +59 -73
- ophyd_async/testing/_one_of_everything.py +146 -0
- ophyd_async/testing/_single_derived.py +87 -0
- ophyd_async/testing/_utils.py +3 -0
- {ophyd_async-0.9.0a1.dist-info → ophyd_async-0.10.0a1.dist-info}/METADATA +25 -26
- ophyd_async-0.10.0a1.dist-info/RECORD +149 -0
- {ophyd_async-0.9.0a1.dist-info → ophyd_async-0.10.0a1.dist-info}/WHEEL +1 -1
- ophyd_async/core/_device_save_loader.py +0 -274
- ophyd_async/epics/demo/_mover.py +0 -95
- ophyd_async/epics/demo/_sensor.py +0 -37
- ophyd_async/epics/demo/sensor.db +0 -19
- ophyd_async/fastcs/panda/_utils.py +0 -16
- ophyd_async/sim/demo/__init__.py +0 -19
- ophyd_async/sim/demo/_pattern_detector/__init__.py +0 -13
- ophyd_async/sim/demo/_pattern_detector/_pattern_detector.py +0 -42
- ophyd_async/sim/demo/_pattern_detector/_pattern_detector_controller.py +0 -62
- ophyd_async/sim/demo/_pattern_detector/_pattern_detector_writer.py +0 -41
- ophyd_async/sim/demo/_pattern_detector/_pattern_generator.py +0 -207
- ophyd_async/sim/demo/_sim_motor.py +0 -107
- ophyd_async/sim/testing/__init__.py +0 -0
- ophyd_async-0.9.0a1.dist-info/RECORD +0 -119
- ophyd_async-0.9.0a1.dist-info/entry_points.txt +0 -2
- {ophyd_async-0.9.0a1.dist-info → ophyd_async-0.10.0a1.dist-info/licenses}/LICENSE +0 -0
- {ophyd_async-0.9.0a1.dist-info → ophyd_async-0.10.0a1.dist-info}/top_level.txt +0 -0
ophyd_async/fastcs/core.py
CHANGED
|
@@ -1,9 +1,15 @@
|
|
|
1
|
+
"""FastCS core module for ophyd-async."""
|
|
2
|
+
|
|
1
3
|
from ophyd_async.core import Device, DeviceConnector
|
|
2
4
|
from ophyd_async.epics.core import PviDeviceConnector
|
|
3
5
|
|
|
4
6
|
|
|
5
7
|
def fastcs_connector(device: Device, uri: str, error_hint: str = "") -> DeviceConnector:
|
|
8
|
+
"""Create devices and connections on pvi device `Device`."""
|
|
6
9
|
# TODO: add Tango support based on uri scheme
|
|
7
10
|
connector = PviDeviceConnector(uri, error_hint)
|
|
8
11
|
connector.create_children_from_annotations(device)
|
|
9
12
|
return connector
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
__all__ = ["fastcs_connector"]
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__all__ = []
|
|
@@ -1,13 +1,14 @@
|
|
|
1
1
|
from ._block import (
|
|
2
|
-
BitMux,
|
|
3
2
|
CommonPandaBlocks,
|
|
4
3
|
DataBlock,
|
|
4
|
+
PandaBitMux,
|
|
5
|
+
PandaCaptureMode,
|
|
6
|
+
PandaPcompDirection,
|
|
7
|
+
PandaTimeUnits,
|
|
5
8
|
PcapBlock,
|
|
6
9
|
PcompBlock,
|
|
7
|
-
PcompDirection,
|
|
8
10
|
PulseBlock,
|
|
9
11
|
SeqBlock,
|
|
10
|
-
TimeUnits,
|
|
11
12
|
)
|
|
12
13
|
from ._control import PandaPcapController
|
|
13
14
|
from ._hdf_panda import HDFPanda
|
|
@@ -23,19 +24,19 @@ from ._trigger import (
|
|
|
23
24
|
StaticPcompTriggerLogic,
|
|
24
25
|
StaticSeqTableTriggerLogic,
|
|
25
26
|
)
|
|
26
|
-
from ._utils import phase_sorter
|
|
27
27
|
from ._writer import PandaHDFWriter
|
|
28
28
|
|
|
29
29
|
__all__ = [
|
|
30
30
|
"CommonPandaBlocks",
|
|
31
31
|
"DataBlock",
|
|
32
|
-
"
|
|
32
|
+
"PandaBitMux",
|
|
33
|
+
"PandaCaptureMode",
|
|
33
34
|
"PcapBlock",
|
|
34
35
|
"PcompBlock",
|
|
35
|
-
"
|
|
36
|
+
"PandaPcompDirection",
|
|
36
37
|
"PulseBlock",
|
|
37
38
|
"SeqBlock",
|
|
38
|
-
"
|
|
39
|
+
"PandaTimeUnits",
|
|
39
40
|
"HDFPanda",
|
|
40
41
|
"PandaHDFWriter",
|
|
41
42
|
"PandaPcapController",
|
|
@@ -47,5 +48,4 @@ __all__ = [
|
|
|
47
48
|
"SeqTableInfo",
|
|
48
49
|
"StaticPcompTriggerLogic",
|
|
49
50
|
"StaticSeqTableTriggerLogic",
|
|
50
|
-
"phase_sorter",
|
|
51
51
|
]
|
|
@@ -10,13 +10,17 @@ from ophyd_async.core import (
|
|
|
10
10
|
from ._table import DatasetTable, SeqTable
|
|
11
11
|
|
|
12
12
|
|
|
13
|
-
class
|
|
13
|
+
class PandaCaptureMode(StrictEnum):
|
|
14
|
+
"""Capture mode for the `DataBlock` on the PandA."""
|
|
15
|
+
|
|
14
16
|
FIRST_N = "FIRST_N"
|
|
15
17
|
LAST_N = "LAST_N"
|
|
16
18
|
FOREVER = "FOREVER"
|
|
17
19
|
|
|
18
20
|
|
|
19
21
|
class DataBlock(Device):
|
|
22
|
+
"""Data block for the PandA. Used for writing data through the IOC."""
|
|
23
|
+
|
|
20
24
|
# In future we may decide to make hdf_* optional
|
|
21
25
|
hdf_directory: SignalRW[str]
|
|
22
26
|
hdf_file_name: SignalRW[str]
|
|
@@ -24,39 +28,49 @@ class DataBlock(Device):
|
|
|
24
28
|
num_captured: SignalR[int]
|
|
25
29
|
create_directory: SignalRW[int]
|
|
26
30
|
directory_exists: SignalR[bool]
|
|
27
|
-
capture_mode: SignalRW[
|
|
31
|
+
capture_mode: SignalRW[PandaCaptureMode]
|
|
28
32
|
capture: SignalRW[bool]
|
|
29
33
|
flush_period: SignalRW[float]
|
|
30
34
|
datasets: SignalR[DatasetTable]
|
|
31
35
|
|
|
32
36
|
|
|
33
37
|
class PulseBlock(Device):
|
|
38
|
+
"""Used for configuring pulses in the PandA."""
|
|
39
|
+
|
|
34
40
|
delay: SignalRW[float]
|
|
35
41
|
width: SignalRW[float]
|
|
36
42
|
|
|
37
43
|
|
|
38
|
-
class
|
|
44
|
+
class PandaPcompDirection(StrictEnum):
|
|
45
|
+
"""Direction options for position compare in the PandA."""
|
|
46
|
+
|
|
39
47
|
POSITIVE = "Positive"
|
|
40
48
|
NEGATIVE = "Negative"
|
|
41
49
|
EITHER = "Either"
|
|
42
50
|
|
|
43
51
|
|
|
44
|
-
class
|
|
52
|
+
class PandaBitMux(SubsetEnum):
|
|
53
|
+
"""Bit input with configurable delay in the PandA."""
|
|
54
|
+
|
|
45
55
|
ZERO = "ZERO"
|
|
46
56
|
ONE = "ONE"
|
|
47
57
|
|
|
48
58
|
|
|
49
59
|
class PcompBlock(Device):
|
|
60
|
+
"""Position compare block in the PandA."""
|
|
61
|
+
|
|
50
62
|
active: SignalR[bool]
|
|
51
|
-
dir: SignalRW[
|
|
52
|
-
enable: SignalRW[
|
|
63
|
+
dir: SignalRW[PandaPcompDirection]
|
|
64
|
+
enable: SignalRW[PandaBitMux]
|
|
53
65
|
pulses: SignalRW[int]
|
|
54
66
|
start: SignalRW[int]
|
|
55
67
|
step: SignalRW[int]
|
|
56
68
|
width: SignalRW[int]
|
|
57
69
|
|
|
58
70
|
|
|
59
|
-
class
|
|
71
|
+
class PandaTimeUnits(StrictEnum):
|
|
72
|
+
"""Options for units of time in the PandA."""
|
|
73
|
+
|
|
60
74
|
MIN = "min"
|
|
61
75
|
S = "s"
|
|
62
76
|
MS = "ms"
|
|
@@ -64,20 +78,26 @@ class TimeUnits(StrictEnum):
|
|
|
64
78
|
|
|
65
79
|
|
|
66
80
|
class SeqBlock(Device):
|
|
81
|
+
"""Sequencer block in the PandA."""
|
|
82
|
+
|
|
67
83
|
table: SignalRW[SeqTable]
|
|
68
84
|
active: SignalR[bool]
|
|
69
85
|
repeats: SignalRW[int]
|
|
70
86
|
prescale: SignalRW[float]
|
|
71
|
-
prescale_units: SignalRW[
|
|
72
|
-
enable: SignalRW[
|
|
87
|
+
prescale_units: SignalRW[PandaTimeUnits]
|
|
88
|
+
enable: SignalRW[PandaBitMux]
|
|
73
89
|
|
|
74
90
|
|
|
75
91
|
class PcapBlock(Device):
|
|
92
|
+
"""Position capture block in the PandA."""
|
|
93
|
+
|
|
76
94
|
active: SignalR[bool]
|
|
77
95
|
arm: SignalRW[bool]
|
|
78
96
|
|
|
79
97
|
|
|
80
98
|
class CommonPandaBlocks(Device):
|
|
99
|
+
"""Pandablocks device with blocks which are common and required on introspection."""
|
|
100
|
+
|
|
81
101
|
pulse: DeviceVector[PulseBlock]
|
|
82
102
|
seq: DeviceVector[SeqBlock]
|
|
83
103
|
pcomp: DeviceVector[PcompBlock]
|
|
@@ -10,6 +10,8 @@ from ._block import PcapBlock
|
|
|
10
10
|
|
|
11
11
|
|
|
12
12
|
class PandaPcapController(DetectorController):
|
|
13
|
+
"""For controlling a PCAP capture on the PandA."""
|
|
14
|
+
|
|
13
15
|
def __init__(self, pcap: PcapBlock) -> None:
|
|
14
16
|
self.pcap = pcap
|
|
15
17
|
self._arm_status: AsyncStatus | None = None
|
|
@@ -18,10 +20,15 @@ class PandaPcapController(DetectorController):
|
|
|
18
20
|
return 0.000000008
|
|
19
21
|
|
|
20
22
|
async def prepare(self, trigger_info: TriggerInfo):
|
|
21
|
-
|
|
23
|
+
if trigger_info.trigger not in (
|
|
22
24
|
DetectorTrigger.CONSTANT_GATE,
|
|
23
25
|
DetectorTrigger.VARIABLE_GATE,
|
|
24
|
-
)
|
|
26
|
+
):
|
|
27
|
+
msg = (
|
|
28
|
+
"Only constant_gate and variable_gate triggering is supported on "
|
|
29
|
+
"the PandA",
|
|
30
|
+
)
|
|
31
|
+
raise TypeError(msg)
|
|
25
32
|
|
|
26
33
|
async def arm(self):
|
|
27
34
|
self._arm_status = self.pcap.arm.set(True)
|
|
@@ -33,3 +40,6 @@ class PandaPcapController(DetectorController):
|
|
|
33
40
|
async def disarm(self):
|
|
34
41
|
await self.pcap.arm.set(False)
|
|
35
42
|
await wait_for_value(self.pcap.active, False, timeout=1)
|
|
43
|
+
if self._arm_status and not self._arm_status.done:
|
|
44
|
+
await self._arm_status
|
|
45
|
+
self._arm_status = None
|
|
@@ -12,7 +12,11 @@ from ._writer import PandaHDFWriter
|
|
|
12
12
|
MINIMUM_PANDA_IOC = "0.11.4"
|
|
13
13
|
|
|
14
14
|
|
|
15
|
-
class HDFPanda(
|
|
15
|
+
class HDFPanda(
|
|
16
|
+
CommonPandaBlocks, StandardDetector[PandaPcapController, PandaHDFWriter]
|
|
17
|
+
):
|
|
18
|
+
"""PandA with common blocks for standard HDF writing."""
|
|
19
|
+
|
|
16
20
|
def __init__(
|
|
17
21
|
self,
|
|
18
22
|
prefix: str,
|
|
@@ -7,6 +7,8 @@ from ophyd_async.core import Array1D, StrictEnum, Table
|
|
|
7
7
|
|
|
8
8
|
|
|
9
9
|
class PandaHdf5DatasetType(StrictEnum):
|
|
10
|
+
"""Dataset options for HDF capture."""
|
|
11
|
+
|
|
10
12
|
FLOAT_64 = "float64"
|
|
11
13
|
UINT_32 = "uint32"
|
|
12
14
|
|
|
@@ -17,6 +19,8 @@ class DatasetTable(Table):
|
|
|
17
19
|
|
|
18
20
|
|
|
19
21
|
class SeqTrigger(StrictEnum):
|
|
22
|
+
"""Trigger options for the SeqTable."""
|
|
23
|
+
|
|
20
24
|
IMMEDIATE = "Immediate"
|
|
21
25
|
BITA_0 = "BITA=0"
|
|
22
26
|
BITA_1 = "BITA=1"
|
|
@@ -33,6 +37,8 @@ class SeqTrigger(StrictEnum):
|
|
|
33
37
|
|
|
34
38
|
|
|
35
39
|
class SeqTable(Table):
|
|
40
|
+
"""Data type for the panda seq table."""
|
|
41
|
+
|
|
36
42
|
repeats: Array1D[np.uint16]
|
|
37
43
|
trigger: Sequence[SeqTrigger]
|
|
38
44
|
position: Array1D[np.int32]
|
|
@@ -76,12 +82,12 @@ class SeqTable(Table):
|
|
|
76
82
|
return SeqTable(**{k: [v] for k, v in locals().items()}) # type: ignore
|
|
77
83
|
|
|
78
84
|
@model_validator(mode="after")
|
|
79
|
-
def
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
the pydantic field doesn't work
|
|
83
|
-
"""
|
|
84
|
-
|
|
85
|
+
def _validate_max_length(self) -> "SeqTable":
|
|
86
|
+
# Used to check max_length. Unfortunately trying the ``max_length`` arg in
|
|
87
|
+
# the pydantic field doesn't work.
|
|
85
88
|
first_length = len(self)
|
|
86
|
-
|
|
89
|
+
max_length = 4096
|
|
90
|
+
if first_length > max_length:
|
|
91
|
+
msg = f"Length {first_length} is too long"
|
|
92
|
+
raise ValueError(msg)
|
|
87
93
|
return self
|
|
@@ -4,24 +4,34 @@ from pydantic import BaseModel, Field
|
|
|
4
4
|
|
|
5
5
|
from ophyd_async.core import FlyerController, wait_for_value
|
|
6
6
|
|
|
7
|
-
from ._block import
|
|
7
|
+
from ._block import (
|
|
8
|
+
PandaBitMux,
|
|
9
|
+
PandaPcompDirection,
|
|
10
|
+
PandaTimeUnits,
|
|
11
|
+
PcompBlock,
|
|
12
|
+
SeqBlock,
|
|
13
|
+
)
|
|
8
14
|
from ._table import SeqTable
|
|
9
15
|
|
|
10
16
|
|
|
11
17
|
class SeqTableInfo(BaseModel):
|
|
18
|
+
"""Info for the PandA `SeqTable` for flyscanning."""
|
|
19
|
+
|
|
12
20
|
sequence_table: SeqTable = Field(strict=True)
|
|
13
21
|
repeats: int = Field(ge=0)
|
|
14
22
|
prescale_as_us: float = Field(default=1, ge=0) # microseconds
|
|
15
23
|
|
|
16
24
|
|
|
17
25
|
class StaticSeqTableTriggerLogic(FlyerController[SeqTableInfo]):
|
|
26
|
+
"""For controlling the PandA `SeqTable` when flyscanning."""
|
|
27
|
+
|
|
18
28
|
def __init__(self, seq: SeqBlock) -> None:
|
|
19
29
|
self.seq = seq
|
|
20
30
|
|
|
21
31
|
async def prepare(self, value: SeqTableInfo):
|
|
22
32
|
await asyncio.gather(
|
|
23
|
-
self.seq.prescale_units.set(
|
|
24
|
-
self.seq.enable.set(
|
|
33
|
+
self.seq.prescale_units.set(PandaTimeUnits.US),
|
|
34
|
+
self.seq.enable.set(PandaBitMux.ZERO),
|
|
25
35
|
)
|
|
26
36
|
await asyncio.gather(
|
|
27
37
|
self.seq.prescale.set(value.prescale_as_us),
|
|
@@ -30,18 +40,20 @@ class StaticSeqTableTriggerLogic(FlyerController[SeqTableInfo]):
|
|
|
30
40
|
)
|
|
31
41
|
|
|
32
42
|
async def kickoff(self) -> None:
|
|
33
|
-
await self.seq.enable.set(
|
|
43
|
+
await self.seq.enable.set(PandaBitMux.ONE)
|
|
34
44
|
await wait_for_value(self.seq.active, True, timeout=1)
|
|
35
45
|
|
|
36
46
|
async def complete(self) -> None:
|
|
37
47
|
await wait_for_value(self.seq.active, False, timeout=None)
|
|
38
48
|
|
|
39
49
|
async def stop(self):
|
|
40
|
-
await self.seq.enable.set(
|
|
50
|
+
await self.seq.enable.set(PandaBitMux.ZERO)
|
|
41
51
|
await wait_for_value(self.seq.active, False, timeout=1)
|
|
42
52
|
|
|
43
53
|
|
|
44
54
|
class PcompInfo(BaseModel):
|
|
55
|
+
"""Info for the PandA `PcompBlock` for flyscanning."""
|
|
56
|
+
|
|
45
57
|
start_postion: int = Field(description="start position in counts")
|
|
46
58
|
pulse_width: int = Field(description="width of a single pulse in counts", gt=0)
|
|
47
59
|
rising_edge_step: int = Field(
|
|
@@ -54,7 +66,7 @@ class PcompInfo(BaseModel):
|
|
|
54
66
|
),
|
|
55
67
|
ge=0,
|
|
56
68
|
)
|
|
57
|
-
direction:
|
|
69
|
+
direction: PandaPcompDirection = Field(
|
|
58
70
|
description=(
|
|
59
71
|
"Specifies which direction the motor counts should be "
|
|
60
72
|
"moving. Pulses won't be sent unless the values are moving in "
|
|
@@ -64,11 +76,13 @@ class PcompInfo(BaseModel):
|
|
|
64
76
|
|
|
65
77
|
|
|
66
78
|
class StaticPcompTriggerLogic(FlyerController[PcompInfo]):
|
|
79
|
+
"""For controlling the PandA `PcompBlock` when flyscanning."""
|
|
80
|
+
|
|
67
81
|
def __init__(self, pcomp: PcompBlock) -> None:
|
|
68
82
|
self.pcomp = pcomp
|
|
69
83
|
|
|
70
84
|
async def prepare(self, value: PcompInfo):
|
|
71
|
-
await self.pcomp.enable.set(
|
|
85
|
+
await self.pcomp.enable.set(PandaBitMux.ZERO)
|
|
72
86
|
await asyncio.gather(
|
|
73
87
|
self.pcomp.start.set(value.start_postion),
|
|
74
88
|
self.pcomp.width.set(value.pulse_width),
|
|
@@ -78,12 +92,12 @@ class StaticPcompTriggerLogic(FlyerController[PcompInfo]):
|
|
|
78
92
|
)
|
|
79
93
|
|
|
80
94
|
async def kickoff(self) -> None:
|
|
81
|
-
await self.pcomp.enable.set(
|
|
95
|
+
await self.pcomp.enable.set(PandaBitMux.ONE)
|
|
82
96
|
await wait_for_value(self.pcomp.active, True, timeout=1)
|
|
83
97
|
|
|
84
98
|
async def complete(self, timeout: float | None = None) -> None:
|
|
85
99
|
await wait_for_value(self.pcomp.active, False, timeout=timeout)
|
|
86
100
|
|
|
87
101
|
async def stop(self):
|
|
88
|
-
await self.pcomp.enable.set(
|
|
102
|
+
await self.pcomp.enable.set(PandaBitMux.ZERO)
|
|
89
103
|
await wait_for_value(self.pcomp.active, False, timeout=1)
|
|
@@ -4,24 +4,23 @@ from pathlib import Path
|
|
|
4
4
|
|
|
5
5
|
from bluesky.protocols import StreamAsset
|
|
6
6
|
from event_model import DataKey
|
|
7
|
-
from p4p.client.thread import Context
|
|
8
7
|
|
|
9
8
|
from ophyd_async.core import (
|
|
10
9
|
DEFAULT_TIMEOUT,
|
|
11
10
|
DetectorWriter,
|
|
12
|
-
|
|
13
|
-
|
|
11
|
+
HDFDatasetDescription,
|
|
12
|
+
HDFDocumentComposer,
|
|
14
13
|
NameProvider,
|
|
15
14
|
PathProvider,
|
|
16
15
|
observe_value,
|
|
17
16
|
wait_for_value,
|
|
18
17
|
)
|
|
19
18
|
|
|
20
|
-
from ._block import
|
|
19
|
+
from ._block import DataBlock, PandaCaptureMode
|
|
21
20
|
|
|
22
21
|
|
|
23
22
|
class PandaHDFWriter(DetectorWriter):
|
|
24
|
-
|
|
23
|
+
"""For writing for PandA data from the `DataBlock`."""
|
|
25
24
|
|
|
26
25
|
def __init__(
|
|
27
26
|
self,
|
|
@@ -32,18 +31,17 @@ class PandaHDFWriter(DetectorWriter):
|
|
|
32
31
|
self.panda_data_block = panda_data_block
|
|
33
32
|
self._path_provider = path_provider
|
|
34
33
|
self._name_provider = name_provider
|
|
35
|
-
self._datasets: list[
|
|
36
|
-
self.
|
|
34
|
+
self._datasets: list[HDFDatasetDescription] = []
|
|
35
|
+
self._composer: HDFDocumentComposer | None = None
|
|
37
36
|
self._multiplier = 1
|
|
38
37
|
|
|
39
38
|
# Triggered on PCAP arm
|
|
40
39
|
async def open(self, multiplier: int = 1) -> dict[str, DataKey]:
|
|
41
|
-
"""Retrieve and get descriptor of all PandA signals marked for capture"""
|
|
42
|
-
|
|
40
|
+
"""Retrieve and get descriptor of all PandA signals marked for capture."""
|
|
43
41
|
# Ensure flushes are immediate
|
|
44
42
|
await self.panda_data_block.flush_period.set(0)
|
|
45
43
|
|
|
46
|
-
self.
|
|
44
|
+
self._composer = None
|
|
47
45
|
info = self._path_provider(device_name=self._name_provider())
|
|
48
46
|
|
|
49
47
|
# Set create dir depth first to guarantee that callback when setting
|
|
@@ -56,7 +54,7 @@ class PandaHDFWriter(DetectorWriter):
|
|
|
56
54
|
self.panda_data_block.hdf_file_name.set(
|
|
57
55
|
f"{info.filename}.h5",
|
|
58
56
|
),
|
|
59
|
-
self.panda_data_block.capture_mode.set(
|
|
57
|
+
self.panda_data_block.capture_mode.set(PandaCaptureMode.FOREVER),
|
|
60
58
|
)
|
|
61
59
|
|
|
62
60
|
# Make sure that directory exists or has been created.
|
|
@@ -76,18 +74,15 @@ class PandaHDFWriter(DetectorWriter):
|
|
|
76
74
|
return await self._describe()
|
|
77
75
|
|
|
78
76
|
async def _describe(self) -> dict[str, DataKey]:
|
|
79
|
-
"""
|
|
80
|
-
Return a describe based on the datasets PV
|
|
81
|
-
"""
|
|
82
|
-
|
|
77
|
+
"""Return a describe based on the datasets PV."""
|
|
83
78
|
await self._update_datasets()
|
|
84
79
|
describe = {
|
|
85
80
|
ds.data_key: DataKey(
|
|
86
81
|
source=self.panda_data_block.hdf_directory.source,
|
|
87
82
|
shape=list(ds.shape),
|
|
88
|
-
dtype="
|
|
83
|
+
dtype="number",
|
|
89
84
|
# PandA data should always be written as Float64
|
|
90
|
-
dtype_numpy=
|
|
85
|
+
dtype_numpy=ds.dtype_numpy,
|
|
91
86
|
external="STREAM:",
|
|
92
87
|
)
|
|
93
88
|
for ds in self._datasets
|
|
@@ -95,17 +90,19 @@ class PandaHDFWriter(DetectorWriter):
|
|
|
95
90
|
return describe
|
|
96
91
|
|
|
97
92
|
async def _update_datasets(self) -> None:
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
representation of datasets that the panda will write.
|
|
101
|
-
"""
|
|
102
|
-
|
|
93
|
+
# Load data from the datasets PV on the panda, update internal
|
|
94
|
+
# representation of datasets that the panda will write.
|
|
103
95
|
capture_table = await self.panda_data_block.datasets.get_value()
|
|
104
96
|
self._datasets = [
|
|
105
97
|
# TODO: Update chunk size to read signal once available in IOC
|
|
106
98
|
# Currently PandA IOC sets chunk size to 1024 points per chunk
|
|
107
|
-
|
|
108
|
-
dataset_name,
|
|
99
|
+
HDFDatasetDescription(
|
|
100
|
+
data_key=dataset_name,
|
|
101
|
+
dataset="/" + dataset_name,
|
|
102
|
+
shape=(),
|
|
103
|
+
dtype_numpy="<f8",
|
|
104
|
+
multiplier=1,
|
|
105
|
+
chunk_shape=(1024,),
|
|
109
106
|
)
|
|
110
107
|
for dataset_name in capture_table.name
|
|
111
108
|
]
|
|
@@ -135,9 +132,9 @@ class PandaHDFWriter(DetectorWriter):
|
|
|
135
132
|
return await self.panda_data_block.num_captured.get_value()
|
|
136
133
|
|
|
137
134
|
async def observe_indices_written(
|
|
138
|
-
self, timeout
|
|
135
|
+
self, timeout: float
|
|
139
136
|
) -> AsyncGenerator[int, None]:
|
|
140
|
-
"""Wait until a specific index is ready to be collected"""
|
|
137
|
+
"""Wait until a specific index is ready to be collected."""
|
|
141
138
|
async for num_captured in observe_value(
|
|
142
139
|
self.panda_data_block.num_captured, timeout
|
|
143
140
|
):
|
|
@@ -148,15 +145,15 @@ class PandaHDFWriter(DetectorWriter):
|
|
|
148
145
|
) -> AsyncIterator[StreamAsset]:
|
|
149
146
|
# TODO: fail if we get dropped frames
|
|
150
147
|
if indices_written:
|
|
151
|
-
if not self.
|
|
152
|
-
self.
|
|
148
|
+
if not self._composer:
|
|
149
|
+
self._composer = HDFDocumentComposer(
|
|
153
150
|
Path(await self.panda_data_block.hdf_directory.get_value())
|
|
154
151
|
/ Path(await self.panda_data_block.hdf_file_name.get_value()),
|
|
155
152
|
self._datasets,
|
|
156
153
|
)
|
|
157
|
-
for doc in self.
|
|
154
|
+
for doc in self._composer.stream_resources():
|
|
158
155
|
yield "stream_resource", doc
|
|
159
|
-
for doc in self.
|
|
156
|
+
for doc in self._composer.stream_data(indices_written):
|
|
160
157
|
yield "stream_datum", doc
|
|
161
158
|
|
|
162
159
|
# Could put this function as default for StandardDetector
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
"""Plan stubs for connecting, setting up and flying devices."""
|
|
2
|
+
|
|
1
3
|
from ._ensure_connected import ensure_connected
|
|
2
4
|
from ._fly import (
|
|
3
5
|
fly_and_collect,
|
|
@@ -5,6 +7,14 @@ from ._fly import (
|
|
|
5
7
|
time_resolved_fly_and_collect_with_static_seq_table,
|
|
6
8
|
)
|
|
7
9
|
from ._nd_attributes import setup_ndattributes, setup_ndstats_sum
|
|
10
|
+
from ._panda import apply_panda_settings
|
|
11
|
+
from ._settings import (
|
|
12
|
+
apply_settings,
|
|
13
|
+
apply_settings_if_different,
|
|
14
|
+
get_current_settings,
|
|
15
|
+
retrieve_settings,
|
|
16
|
+
store_settings,
|
|
17
|
+
)
|
|
8
18
|
|
|
9
19
|
__all__ = [
|
|
10
20
|
"fly_and_collect",
|
|
@@ -13,4 +23,10 @@ __all__ = [
|
|
|
13
23
|
"ensure_connected",
|
|
14
24
|
"setup_ndattributes",
|
|
15
25
|
"setup_ndstats_sum",
|
|
26
|
+
"apply_panda_settings",
|
|
27
|
+
"apply_settings",
|
|
28
|
+
"apply_settings_if_different",
|
|
29
|
+
"get_current_settings",
|
|
30
|
+
"retrieve_settings",
|
|
31
|
+
"store_settings",
|
|
16
32
|
]
|
|
@@ -1,33 +1,28 @@
|
|
|
1
|
-
from
|
|
2
|
-
|
|
3
|
-
import bluesky.plan_stubs as bps
|
|
1
|
+
from bluesky.utils import plan
|
|
4
2
|
|
|
5
3
|
from ophyd_async.core import DEFAULT_TIMEOUT, Device, LazyMock, wait_for_connection
|
|
6
4
|
|
|
5
|
+
from ._wait_for_awaitable import wait_for_awaitable
|
|
6
|
+
|
|
7
7
|
|
|
8
|
+
@plan
|
|
8
9
|
def ensure_connected(
|
|
9
10
|
*devices: Device,
|
|
10
11
|
mock: bool | LazyMock = False,
|
|
11
12
|
timeout: float = DEFAULT_TIMEOUT,
|
|
12
13
|
force_reconnect=False,
|
|
13
14
|
):
|
|
15
|
+
"""Plan stub to ensure devices are connected with a given timeout."""
|
|
14
16
|
device_names = [device.name for device in devices]
|
|
15
17
|
non_unique = {
|
|
16
18
|
device: device.name for device in devices if device_names.count(device.name) > 1
|
|
17
19
|
}
|
|
18
20
|
if non_unique:
|
|
19
21
|
raise ValueError(f"Devices do not have unique names {non_unique}")
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
}
|
|
28
|
-
return wait_for_connection(**coros)
|
|
29
|
-
|
|
30
|
-
(connect_task,) = yield from bps.wait_for([connect_devices])
|
|
31
|
-
|
|
32
|
-
if connect_task and connect_task.exception() is not None:
|
|
33
|
-
raise connect_task.exception()
|
|
22
|
+
coros = {
|
|
23
|
+
device.name: device.connect(
|
|
24
|
+
mock=mock, timeout=timeout, force_reconnect=force_reconnect
|
|
25
|
+
)
|
|
26
|
+
for device in devices
|
|
27
|
+
}
|
|
28
|
+
yield from wait_for_awaitable(wait_for_connection(**coros))
|
ophyd_async/plan_stubs/_fly.py
CHANGED
|
@@ -9,7 +9,7 @@ from ophyd_async.core import (
|
|
|
9
9
|
in_micros,
|
|
10
10
|
)
|
|
11
11
|
from ophyd_async.fastcs.panda import (
|
|
12
|
-
|
|
12
|
+
PandaPcompDirection,
|
|
13
13
|
PcompInfo,
|
|
14
14
|
SeqTable,
|
|
15
15
|
SeqTableInfo,
|
|
@@ -28,7 +28,6 @@ def prepare_static_pcomp_flyer_and_detectors(
|
|
|
28
28
|
same trigger.
|
|
29
29
|
|
|
30
30
|
"""
|
|
31
|
-
|
|
32
31
|
for det in detectors:
|
|
33
32
|
yield from bps.prepare(det, trigger_info, wait=False, group="prep")
|
|
34
33
|
yield from bps.prepare(flyer, pcomp_info, wait=False, group="prep")
|
|
@@ -58,7 +57,7 @@ def prepare_static_seq_table_flyer_and_detectors_with_same_trigger(
|
|
|
58
57
|
if not detectors:
|
|
59
58
|
raise ValueError("No detectors provided. There must be at least one.")
|
|
60
59
|
|
|
61
|
-
deadtime = max(det.
|
|
60
|
+
deadtime = max(det._controller.get_deadtime(exposure) for det in detectors) # noqa: SLF001
|
|
62
61
|
|
|
63
62
|
trigger_info = TriggerInfo(
|
|
64
63
|
number_of_triggers=number_of_frames * repeats,
|
|
@@ -147,7 +146,7 @@ def fly_and_collect_with_static_pcomp(
|
|
|
147
146
|
number_of_pulses: int,
|
|
148
147
|
pulse_width: int,
|
|
149
148
|
rising_edge_step: int,
|
|
150
|
-
direction:
|
|
149
|
+
direction: PandaPcompDirection,
|
|
151
150
|
trigger_info: TriggerInfo,
|
|
152
151
|
):
|
|
153
152
|
# Set up scan and prepare trigger
|
|
@@ -190,7 +189,6 @@ def time_resolved_fly_and_collect_with_static_seq_table(
|
|
|
190
189
|
stages/unstages the devices, and opens and closes the run.
|
|
191
190
|
|
|
192
191
|
"""
|
|
193
|
-
|
|
194
192
|
# Set up scan and prepare trigger
|
|
195
193
|
yield from prepare_static_seq_table_flyer_and_detectors_with_same_trigger(
|
|
196
194
|
flyer,
|
|
@@ -16,6 +16,7 @@ from ophyd_async.epics.adcore import (
|
|
|
16
16
|
def setup_ndattributes(
|
|
17
17
|
device: NDArrayBaseIO, ndattributes: Sequence[NDAttributePv | NDAttributeParam]
|
|
18
18
|
):
|
|
19
|
+
"""Set up attributes on NdArray devices."""
|
|
19
20
|
root = ET.Element("Attributes")
|
|
20
21
|
|
|
21
22
|
for ndattribute in ndattributes:
|
|
@@ -50,11 +51,14 @@ def setup_ndattributes(
|
|
|
50
51
|
|
|
51
52
|
|
|
52
53
|
def setup_ndstats_sum(detector: Device):
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
54
|
+
"""Set up nd stats for a detector."""
|
|
55
|
+
hdf = getattr(detector, "fileio", None)
|
|
56
|
+
if not isinstance(hdf, NDFileHDFIO):
|
|
57
|
+
msg = (
|
|
58
|
+
f"Expected {detector.name} to have 'fileio' attribute that is an "
|
|
59
|
+
f"NDFileHDFIO, got {hdf}"
|
|
60
|
+
)
|
|
61
|
+
raise TypeError(msg)
|
|
58
62
|
yield from (
|
|
59
63
|
setup_ndattributes(
|
|
60
64
|
hdf,
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
from bluesky.utils import MsgGenerator, plan
|
|
2
|
+
|
|
3
|
+
from ophyd_async.core import Settings
|
|
4
|
+
from ophyd_async.fastcs import panda
|
|
5
|
+
|
|
6
|
+
from ._settings import apply_settings
|
|
7
|
+
|
|
8
|
+
|
|
9
|
+
@plan
|
|
10
|
+
def apply_panda_settings(settings: Settings[panda.HDFPanda]) -> MsgGenerator[None]:
|
|
11
|
+
"""Apply given settings to a panda device."""
|
|
12
|
+
units, others = settings.partition(lambda signal: signal.name.endswith("_units"))
|
|
13
|
+
yield from apply_settings(units)
|
|
14
|
+
yield from apply_settings(others)
|