ophyd-async 0.7.0__py3-none-any.whl → 0.8.0a2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ophyd_async/_version.py +2 -2
- ophyd_async/core/__init__.py +23 -8
- ophyd_async/core/_detector.py +5 -10
- ophyd_async/core/_device.py +139 -66
- ophyd_async/core/_device_filler.py +191 -0
- ophyd_async/core/_device_save_loader.py +6 -7
- ophyd_async/core/_mock_signal_backend.py +32 -40
- ophyd_async/core/_mock_signal_utils.py +22 -16
- ophyd_async/core/_protocol.py +28 -8
- ophyd_async/core/_readable.py +5 -5
- ophyd_async/core/_signal.py +140 -152
- ophyd_async/core/_signal_backend.py +131 -64
- ophyd_async/core/_soft_signal_backend.py +125 -194
- ophyd_async/core/_status.py +22 -6
- ophyd_async/core/_table.py +97 -100
- ophyd_async/core/_utils.py +71 -18
- ophyd_async/epics/adaravis/_aravis_controller.py +2 -2
- ophyd_async/epics/adaravis/_aravis_io.py +7 -5
- ophyd_async/epics/adcore/_core_io.py +4 -6
- ophyd_async/epics/adcore/_hdf_writer.py +2 -2
- ophyd_async/epics/adcore/_utils.py +15 -10
- ophyd_async/epics/adkinetix/__init__.py +2 -1
- ophyd_async/epics/adkinetix/_kinetix_controller.py +6 -3
- ophyd_async/epics/adkinetix/_kinetix_io.py +3 -4
- ophyd_async/epics/adpilatus/_pilatus_controller.py +2 -2
- ophyd_async/epics/adpilatus/_pilatus_io.py +2 -3
- ophyd_async/epics/adsimdetector/_sim_controller.py +2 -2
- ophyd_async/epics/advimba/__init__.py +4 -1
- ophyd_async/epics/advimba/_vimba_controller.py +6 -3
- ophyd_async/epics/advimba/_vimba_io.py +7 -8
- ophyd_async/epics/demo/_sensor.py +8 -4
- ophyd_async/epics/eiger/_eiger.py +1 -2
- ophyd_async/epics/eiger/_eiger_controller.py +1 -1
- ophyd_async/epics/eiger/_eiger_io.py +2 -4
- ophyd_async/epics/eiger/_odin_io.py +4 -4
- ophyd_async/epics/pvi/__init__.py +2 -2
- ophyd_async/epics/pvi/_pvi.py +56 -321
- ophyd_async/epics/signal/__init__.py +3 -4
- ophyd_async/epics/signal/_aioca.py +184 -236
- ophyd_async/epics/signal/_common.py +35 -49
- ophyd_async/epics/signal/_p4p.py +254 -387
- ophyd_async/epics/signal/_signal.py +63 -21
- ophyd_async/fastcs/core.py +9 -0
- ophyd_async/fastcs/panda/__init__.py +4 -4
- ophyd_async/fastcs/panda/_block.py +18 -13
- ophyd_async/fastcs/panda/_control.py +3 -5
- ophyd_async/fastcs/panda/_hdf_panda.py +5 -19
- ophyd_async/fastcs/panda/_table.py +29 -51
- ophyd_async/fastcs/panda/_trigger.py +8 -8
- ophyd_async/fastcs/panda/_writer.py +2 -5
- ophyd_async/plan_stubs/_ensure_connected.py +3 -1
- ophyd_async/plan_stubs/_fly.py +2 -2
- ophyd_async/plan_stubs/_nd_attributes.py +5 -4
- ophyd_async/py.typed +0 -0
- ophyd_async/sim/demo/_pattern_detector/_pattern_detector_controller.py +1 -2
- ophyd_async/tango/__init__.py +2 -4
- ophyd_async/tango/base_devices/_base_device.py +76 -143
- ophyd_async/tango/demo/_counter.py +2 -2
- ophyd_async/tango/demo/_mover.py +2 -2
- ophyd_async/tango/signal/__init__.py +2 -4
- ophyd_async/tango/signal/_signal.py +29 -50
- ophyd_async/tango/signal/_tango_transport.py +38 -40
- {ophyd_async-0.7.0.dist-info → ophyd_async-0.8.0a2.dist-info}/METADATA +8 -12
- ophyd_async-0.8.0a2.dist-info/RECORD +110 -0
- {ophyd_async-0.7.0.dist-info → ophyd_async-0.8.0a2.dist-info}/WHEEL +1 -1
- ophyd_async/epics/signal/_epics_transport.py +0 -34
- ophyd_async-0.7.0.dist-info/RECORD +0 -108
- {ophyd_async-0.7.0.dist-info → ophyd_async-0.8.0a2.dist-info}/LICENSE +0 -0
- {ophyd_async-0.7.0.dist-info → ophyd_async-0.8.0a2.dist-info}/entry_points.txt +0 -0
- {ophyd_async-0.7.0.dist-info → ophyd_async-0.8.0a2.dist-info}/top_level.txt +0 -0
|
@@ -2,46 +2,81 @@
|
|
|
2
2
|
|
|
3
3
|
from __future__ import annotations
|
|
4
4
|
|
|
5
|
+
from enum import Enum
|
|
6
|
+
|
|
5
7
|
from ophyd_async.core import (
|
|
6
8
|
SignalBackend,
|
|
9
|
+
SignalDatatypeT,
|
|
7
10
|
SignalR,
|
|
8
11
|
SignalRW,
|
|
9
12
|
SignalW,
|
|
10
13
|
SignalX,
|
|
11
|
-
T,
|
|
12
14
|
get_unique,
|
|
13
15
|
)
|
|
14
16
|
|
|
15
|
-
from ._epics_transport import _EpicsTransport
|
|
16
17
|
|
|
17
|
-
|
|
18
|
+
def _make_unavailable_class(error: Exception) -> type:
|
|
19
|
+
class TransportNotAvailable:
|
|
20
|
+
def __init__(*args, **kwargs):
|
|
21
|
+
raise NotImplementedError("Transport not available") from error
|
|
22
|
+
|
|
23
|
+
return TransportNotAvailable
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class EpicsProtocol(Enum):
|
|
27
|
+
CA = "ca"
|
|
28
|
+
PVA = "pva"
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
_default_epics_protocol = EpicsProtocol.CA
|
|
32
|
+
|
|
33
|
+
try:
|
|
34
|
+
from ._p4p import PvaSignalBackend
|
|
35
|
+
except ImportError as pva_error:
|
|
36
|
+
PvaSignalBackend = _make_unavailable_class(pva_error)
|
|
37
|
+
else:
|
|
38
|
+
_default_epics_protocol = EpicsProtocol.PVA
|
|
39
|
+
|
|
40
|
+
try:
|
|
41
|
+
from ._aioca import CaSignalBackend
|
|
42
|
+
except ImportError as ca_error:
|
|
43
|
+
CaSignalBackend = _make_unavailable_class(ca_error)
|
|
44
|
+
else:
|
|
45
|
+
_default_epics_protocol = EpicsProtocol.CA
|
|
18
46
|
|
|
19
47
|
|
|
20
|
-
def
|
|
48
|
+
def _protocol_pv(pv: str) -> tuple[EpicsProtocol, str]:
|
|
21
49
|
split = pv.split("://", 1)
|
|
22
50
|
if len(split) > 1:
|
|
23
51
|
# We got something like pva://mydevice, so use specified comms mode
|
|
24
|
-
|
|
25
|
-
|
|
52
|
+
scheme, pv = split
|
|
53
|
+
protocol = EpicsProtocol(scheme)
|
|
26
54
|
else:
|
|
27
55
|
# No comms mode specified, use the default
|
|
28
|
-
|
|
29
|
-
return
|
|
56
|
+
protocol = _default_epics_protocol
|
|
57
|
+
return protocol, pv
|
|
30
58
|
|
|
31
59
|
|
|
32
60
|
def _epics_signal_backend(
|
|
33
|
-
datatype: type[
|
|
34
|
-
) -> SignalBackend[
|
|
61
|
+
datatype: type[SignalDatatypeT] | None, read_pv: str, write_pv: str
|
|
62
|
+
) -> SignalBackend[SignalDatatypeT]:
|
|
35
63
|
"""Create an epics signal backend."""
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
64
|
+
r_protocol, r_pv = _protocol_pv(read_pv)
|
|
65
|
+
w_protocol, w_pv = _protocol_pv(write_pv)
|
|
66
|
+
protocol = get_unique({read_pv: r_protocol, write_pv: w_protocol}, "protocols")
|
|
67
|
+
match protocol:
|
|
68
|
+
case EpicsProtocol.CA:
|
|
69
|
+
return CaSignalBackend(datatype, r_pv, w_pv)
|
|
70
|
+
case EpicsProtocol.PVA:
|
|
71
|
+
return PvaSignalBackend(datatype, r_pv, w_pv)
|
|
40
72
|
|
|
41
73
|
|
|
42
74
|
def epics_signal_rw(
|
|
43
|
-
datatype: type[
|
|
44
|
-
|
|
75
|
+
datatype: type[SignalDatatypeT],
|
|
76
|
+
read_pv: str,
|
|
77
|
+
write_pv: str | None = None,
|
|
78
|
+
name: str = "",
|
|
79
|
+
) -> SignalRW[SignalDatatypeT]:
|
|
45
80
|
"""Create a `SignalRW` backed by 1 or 2 EPICS PVs
|
|
46
81
|
|
|
47
82
|
Parameters
|
|
@@ -58,8 +93,11 @@ def epics_signal_rw(
|
|
|
58
93
|
|
|
59
94
|
|
|
60
95
|
def epics_signal_rw_rbv(
|
|
61
|
-
datatype: type[
|
|
62
|
-
|
|
96
|
+
datatype: type[SignalDatatypeT],
|
|
97
|
+
write_pv: str,
|
|
98
|
+
read_suffix: str = "_RBV",
|
|
99
|
+
name: str = "",
|
|
100
|
+
) -> SignalRW[SignalDatatypeT]:
|
|
63
101
|
"""Create a `SignalRW` backed by 1 or 2 EPICS PVs, with a suffix on the readback pv
|
|
64
102
|
|
|
65
103
|
Parameters
|
|
@@ -74,7 +112,9 @@ def epics_signal_rw_rbv(
|
|
|
74
112
|
return epics_signal_rw(datatype, f"{write_pv}{read_suffix}", write_pv, name)
|
|
75
113
|
|
|
76
114
|
|
|
77
|
-
def epics_signal_r(
|
|
115
|
+
def epics_signal_r(
|
|
116
|
+
datatype: type[SignalDatatypeT], read_pv: str, name: str = ""
|
|
117
|
+
) -> SignalR[SignalDatatypeT]:
|
|
78
118
|
"""Create a `SignalR` backed by 1 EPICS PV
|
|
79
119
|
|
|
80
120
|
Parameters
|
|
@@ -88,7 +128,9 @@ def epics_signal_r(datatype: type[T], read_pv: str, name: str = "") -> SignalR[T
|
|
|
88
128
|
return SignalR(backend, name=name)
|
|
89
129
|
|
|
90
130
|
|
|
91
|
-
def epics_signal_w(
|
|
131
|
+
def epics_signal_w(
|
|
132
|
+
datatype: type[SignalDatatypeT], write_pv: str, name: str = ""
|
|
133
|
+
) -> SignalW[SignalDatatypeT]:
|
|
92
134
|
"""Create a `SignalW` backed by 1 EPICS PVs
|
|
93
135
|
|
|
94
136
|
Parameters
|
|
@@ -110,5 +152,5 @@ def epics_signal_x(write_pv: str, name: str = "") -> SignalX:
|
|
|
110
152
|
write_pv:
|
|
111
153
|
The PV to write its initial value to on trigger
|
|
112
154
|
"""
|
|
113
|
-
backend
|
|
155
|
+
backend = _epics_signal_backend(None, write_pv, write_pv)
|
|
114
156
|
return SignalX(backend, name=name)
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
from ophyd_async.core import Device, DeviceConnector
|
|
2
|
+
from ophyd_async.epics.pvi import PviDeviceConnector
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def fastcs_connector(device: Device, uri: str) -> DeviceConnector:
|
|
6
|
+
# TODO: add Tango support based on uri scheme
|
|
7
|
+
connector = PviDeviceConnector(uri + "PVI")
|
|
8
|
+
connector.create_children_from_annotations(device)
|
|
9
|
+
return connector
|
|
@@ -1,10 +1,10 @@
|
|
|
1
1
|
from ._block import (
|
|
2
|
+
BitMux,
|
|
2
3
|
CommonPandaBlocks,
|
|
3
4
|
DataBlock,
|
|
4
|
-
EnableDisableOptions,
|
|
5
5
|
PcapBlock,
|
|
6
6
|
PcompBlock,
|
|
7
|
-
|
|
7
|
+
PcompDirection,
|
|
8
8
|
PulseBlock,
|
|
9
9
|
SeqBlock,
|
|
10
10
|
TimeUnits,
|
|
@@ -29,10 +29,10 @@ from ._writer import PandaHDFWriter
|
|
|
29
29
|
__all__ = [
|
|
30
30
|
"CommonPandaBlocks",
|
|
31
31
|
"DataBlock",
|
|
32
|
-
"
|
|
32
|
+
"BitMux",
|
|
33
33
|
"PcapBlock",
|
|
34
34
|
"PcompBlock",
|
|
35
|
-
"
|
|
35
|
+
"PcompDirection",
|
|
36
36
|
"PulseBlock",
|
|
37
37
|
"SeqBlock",
|
|
38
38
|
"TimeUnits",
|
|
@@ -1,13 +1,16 @@
|
|
|
1
|
-
from
|
|
2
|
-
|
|
3
|
-
|
|
4
|
-
|
|
5
|
-
|
|
1
|
+
from ophyd_async.core import (
|
|
2
|
+
Device,
|
|
3
|
+
DeviceVector,
|
|
4
|
+
SignalR,
|
|
5
|
+
SignalRW,
|
|
6
|
+
StrictEnum,
|
|
7
|
+
SubsetEnum,
|
|
8
|
+
)
|
|
6
9
|
|
|
7
10
|
from ._table import DatasetTable, SeqTable
|
|
8
11
|
|
|
9
12
|
|
|
10
|
-
class CaptureMode(
|
|
13
|
+
class CaptureMode(StrictEnum):
|
|
11
14
|
FIRST_N = "FIRST_N"
|
|
12
15
|
LAST_N = "LAST_N"
|
|
13
16
|
FOREVER = "FOREVER"
|
|
@@ -32,26 +35,28 @@ class PulseBlock(Device):
|
|
|
32
35
|
width: SignalRW[float]
|
|
33
36
|
|
|
34
37
|
|
|
35
|
-
class
|
|
38
|
+
class PcompDirection(StrictEnum):
|
|
36
39
|
positive = "Positive"
|
|
37
40
|
negative = "Negative"
|
|
38
41
|
either = "Either"
|
|
39
42
|
|
|
40
43
|
|
|
41
|
-
|
|
44
|
+
class BitMux(SubsetEnum):
|
|
45
|
+
zero = "ZERO"
|
|
46
|
+
one = "ONE"
|
|
42
47
|
|
|
43
48
|
|
|
44
49
|
class PcompBlock(Device):
|
|
45
50
|
active: SignalR[bool]
|
|
46
|
-
dir: SignalRW[
|
|
47
|
-
enable: SignalRW[
|
|
51
|
+
dir: SignalRW[PcompDirection]
|
|
52
|
+
enable: SignalRW[BitMux]
|
|
48
53
|
pulses: SignalRW[int]
|
|
49
54
|
start: SignalRW[int]
|
|
50
55
|
step: SignalRW[int]
|
|
51
56
|
width: SignalRW[int]
|
|
52
57
|
|
|
53
58
|
|
|
54
|
-
class TimeUnits(
|
|
59
|
+
class TimeUnits(StrictEnum):
|
|
55
60
|
min = "min"
|
|
56
61
|
s = "s"
|
|
57
62
|
ms = "ms"
|
|
@@ -60,11 +65,11 @@ class TimeUnits(str, Enum):
|
|
|
60
65
|
|
|
61
66
|
class SeqBlock(Device):
|
|
62
67
|
table: SignalRW[SeqTable]
|
|
63
|
-
active:
|
|
68
|
+
active: SignalR[bool]
|
|
64
69
|
repeats: SignalRW[int]
|
|
65
70
|
prescale: SignalRW[float]
|
|
66
71
|
prescale_units: SignalRW[TimeUnits]
|
|
67
|
-
enable: SignalRW[
|
|
72
|
+
enable: SignalRW[BitMux]
|
|
68
73
|
|
|
69
74
|
|
|
70
75
|
class PcapBlock(Device):
|
|
@@ -1,12 +1,10 @@
|
|
|
1
|
-
import asyncio
|
|
2
|
-
|
|
3
1
|
from ophyd_async.core import (
|
|
2
|
+
AsyncStatus,
|
|
4
3
|
DetectorController,
|
|
5
4
|
DetectorTrigger,
|
|
5
|
+
TriggerInfo,
|
|
6
6
|
wait_for_value,
|
|
7
7
|
)
|
|
8
|
-
from ophyd_async.core._detector import TriggerInfo
|
|
9
|
-
from ophyd_async.core._status import AsyncStatus
|
|
10
8
|
|
|
11
9
|
from ._block import PcapBlock
|
|
12
10
|
|
|
@@ -33,5 +31,5 @@ class PandaPcapController(DetectorController):
|
|
|
33
31
|
pass
|
|
34
32
|
|
|
35
33
|
async def disarm(self):
|
|
36
|
-
await
|
|
34
|
+
await self.pcap.arm.set(False)
|
|
37
35
|
await wait_for_value(self.pcap.active, False, timeout=1)
|
|
@@ -2,8 +2,8 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
from collections.abc import Sequence
|
|
4
4
|
|
|
5
|
-
from ophyd_async.core import
|
|
6
|
-
from ophyd_async.
|
|
5
|
+
from ophyd_async.core import PathProvider, SignalR, StandardDetector
|
|
6
|
+
from ophyd_async.fastcs.core import fastcs_connector
|
|
7
7
|
|
|
8
8
|
from ._block import CommonPandaBlocks
|
|
9
9
|
from ._control import PandaPcapController
|
|
@@ -18,12 +18,10 @@ class HDFPanda(CommonPandaBlocks, StandardDetector):
|
|
|
18
18
|
config_sigs: Sequence[SignalR] = (),
|
|
19
19
|
name: str = "",
|
|
20
20
|
):
|
|
21
|
-
self.
|
|
22
|
-
|
|
23
|
-
create_children_from_annotations(self)
|
|
21
|
+
# This has to be first so we make self.pcap
|
|
22
|
+
connector = fastcs_connector(self, prefix)
|
|
24
23
|
controller = PandaPcapController(pcap=self.pcap)
|
|
25
24
|
writer = PandaHDFWriter(
|
|
26
|
-
prefix=prefix,
|
|
27
25
|
path_provider=path_provider,
|
|
28
26
|
name_provider=lambda: name,
|
|
29
27
|
panda_data_block=self.data,
|
|
@@ -33,17 +31,5 @@ class HDFPanda(CommonPandaBlocks, StandardDetector):
|
|
|
33
31
|
writer=writer,
|
|
34
32
|
config_sigs=config_sigs,
|
|
35
33
|
name=name,
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
async def connect(
|
|
39
|
-
self,
|
|
40
|
-
mock: bool = False,
|
|
41
|
-
timeout: float = DEFAULT_TIMEOUT,
|
|
42
|
-
force_reconnect: bool = False,
|
|
43
|
-
):
|
|
44
|
-
# TODO: this doesn't support caching
|
|
45
|
-
# https://github.com/bluesky/ophyd-async/issues/472
|
|
46
|
-
await fill_pvi_entries(self, self._prefix + "PVI", timeout=timeout, mock=mock)
|
|
47
|
-
await super().connect(
|
|
48
|
-
mock=mock, timeout=timeout, force_reconnect=force_reconnect
|
|
34
|
+
connector=connector,
|
|
49
35
|
)
|
|
@@ -1,27 +1,22 @@
|
|
|
1
1
|
from collections.abc import Sequence
|
|
2
|
-
from enum import Enum
|
|
3
|
-
from typing import Annotated
|
|
4
2
|
|
|
5
3
|
import numpy as np
|
|
6
|
-
|
|
7
|
-
from pydantic import Field, model_validator
|
|
8
|
-
from pydantic_numpy.helper.annotation import NpArrayPydanticAnnotation
|
|
9
|
-
from typing_extensions import TypedDict
|
|
4
|
+
from pydantic import model_validator
|
|
10
5
|
|
|
11
|
-
from ophyd_async.core import Table
|
|
6
|
+
from ophyd_async.core import Array1D, StrictEnum, Table
|
|
12
7
|
|
|
13
8
|
|
|
14
|
-
class PandaHdf5DatasetType(
|
|
9
|
+
class PandaHdf5DatasetType(StrictEnum):
|
|
15
10
|
FLOAT_64 = "float64"
|
|
16
11
|
UINT_32 = "uint32"
|
|
17
12
|
|
|
18
13
|
|
|
19
|
-
class DatasetTable(
|
|
20
|
-
name:
|
|
14
|
+
class DatasetTable(Table):
|
|
15
|
+
name: Sequence[str]
|
|
21
16
|
hdf5_type: Sequence[PandaHdf5DatasetType]
|
|
22
17
|
|
|
23
18
|
|
|
24
|
-
class SeqTrigger(
|
|
19
|
+
class SeqTrigger(StrictEnum):
|
|
25
20
|
IMMEDIATE = "Immediate"
|
|
26
21
|
BITA_0 = "BITA=0"
|
|
27
22
|
BITA_1 = "BITA=1"
|
|
@@ -37,45 +32,27 @@ class SeqTrigger(str, Enum):
|
|
|
37
32
|
POSC_LT = "POSC<=POSITION"
|
|
38
33
|
|
|
39
34
|
|
|
40
|
-
PydanticNp1DArrayInt32 = Annotated[
|
|
41
|
-
np.ndarray[tuple[int], np.dtype[np.int32]],
|
|
42
|
-
NpArrayPydanticAnnotation.factory(
|
|
43
|
-
data_type=np.int32, dimensions=1, strict_data_typing=False
|
|
44
|
-
),
|
|
45
|
-
Field(default_factory=lambda: np.array([], np.int32)),
|
|
46
|
-
]
|
|
47
|
-
PydanticNp1DArrayBool = Annotated[
|
|
48
|
-
np.ndarray[tuple[int], np.dtype[np.bool_]],
|
|
49
|
-
NpArrayPydanticAnnotation.factory(
|
|
50
|
-
data_type=np.bool_, dimensions=1, strict_data_typing=False
|
|
51
|
-
),
|
|
52
|
-
Field(default_factory=lambda: np.array([], dtype=np.bool_)),
|
|
53
|
-
]
|
|
54
|
-
TriggerStr = Annotated[Sequence[SeqTrigger], Field(default_factory=list)]
|
|
55
|
-
|
|
56
|
-
|
|
57
35
|
class SeqTable(Table):
|
|
58
|
-
repeats:
|
|
59
|
-
trigger:
|
|
60
|
-
position:
|
|
61
|
-
time1:
|
|
62
|
-
outa1:
|
|
63
|
-
outb1:
|
|
64
|
-
outc1:
|
|
65
|
-
outd1:
|
|
66
|
-
oute1:
|
|
67
|
-
outf1:
|
|
68
|
-
time2:
|
|
69
|
-
outa2:
|
|
70
|
-
outb2:
|
|
71
|
-
outc2:
|
|
72
|
-
outd2:
|
|
73
|
-
oute2:
|
|
74
|
-
outf2:
|
|
36
|
+
repeats: Array1D[np.uint16]
|
|
37
|
+
trigger: Sequence[SeqTrigger]
|
|
38
|
+
position: Array1D[np.int32]
|
|
39
|
+
time1: Array1D[np.uint32]
|
|
40
|
+
outa1: Array1D[np.bool_]
|
|
41
|
+
outb1: Array1D[np.bool_]
|
|
42
|
+
outc1: Array1D[np.bool_]
|
|
43
|
+
outd1: Array1D[np.bool_]
|
|
44
|
+
oute1: Array1D[np.bool_]
|
|
45
|
+
outf1: Array1D[np.bool_]
|
|
46
|
+
time2: Array1D[np.uint32]
|
|
47
|
+
outa2: Array1D[np.bool_]
|
|
48
|
+
outb2: Array1D[np.bool_]
|
|
49
|
+
outc2: Array1D[np.bool_]
|
|
50
|
+
outd2: Array1D[np.bool_]
|
|
51
|
+
oute2: Array1D[np.bool_]
|
|
52
|
+
outf2: Array1D[np.bool_]
|
|
75
53
|
|
|
76
|
-
@
|
|
77
|
-
def row(
|
|
78
|
-
cls,
|
|
54
|
+
@staticmethod
|
|
55
|
+
def row(
|
|
79
56
|
*,
|
|
80
57
|
repeats: int = 1,
|
|
81
58
|
trigger: str = SeqTrigger.IMMEDIATE,
|
|
@@ -95,7 +72,8 @@ class SeqTable(Table):
|
|
|
95
72
|
oute2: bool = False,
|
|
96
73
|
outf2: bool = False,
|
|
97
74
|
) -> "SeqTable":
|
|
98
|
-
|
|
75
|
+
# Let pydantic do the conversions for us
|
|
76
|
+
return SeqTable(**{k: [v] for k, v in locals().items()}) # type: ignore
|
|
99
77
|
|
|
100
78
|
@model_validator(mode="after")
|
|
101
79
|
def validate_max_length(self) -> "SeqTable":
|
|
@@ -104,6 +82,6 @@ class SeqTable(Table):
|
|
|
104
82
|
the pydantic field doesn't work
|
|
105
83
|
"""
|
|
106
84
|
|
|
107
|
-
first_length = len(
|
|
108
|
-
assert
|
|
85
|
+
first_length = len(self)
|
|
86
|
+
assert first_length <= 4096, f"Length {first_length} is too long"
|
|
109
87
|
return self
|
|
@@ -4,7 +4,7 @@ from pydantic import BaseModel, Field
|
|
|
4
4
|
|
|
5
5
|
from ophyd_async.core import FlyerController, wait_for_value
|
|
6
6
|
|
|
7
|
-
from ._block import PcompBlock,
|
|
7
|
+
from ._block import BitMux, PcompBlock, PcompDirection, SeqBlock, TimeUnits
|
|
8
8
|
from ._table import SeqTable
|
|
9
9
|
|
|
10
10
|
|
|
@@ -21,7 +21,7 @@ class StaticSeqTableTriggerLogic(FlyerController[SeqTableInfo]):
|
|
|
21
21
|
async def prepare(self, value: SeqTableInfo):
|
|
22
22
|
await asyncio.gather(
|
|
23
23
|
self.seq.prescale_units.set(TimeUnits.us),
|
|
24
|
-
self.seq.enable.set(
|
|
24
|
+
self.seq.enable.set(BitMux.zero),
|
|
25
25
|
)
|
|
26
26
|
await asyncio.gather(
|
|
27
27
|
self.seq.prescale.set(value.prescale_as_us),
|
|
@@ -30,14 +30,14 @@ class StaticSeqTableTriggerLogic(FlyerController[SeqTableInfo]):
|
|
|
30
30
|
)
|
|
31
31
|
|
|
32
32
|
async def kickoff(self) -> None:
|
|
33
|
-
await self.seq.enable.set(
|
|
33
|
+
await self.seq.enable.set(BitMux.one)
|
|
34
34
|
await wait_for_value(self.seq.active, True, timeout=1)
|
|
35
35
|
|
|
36
36
|
async def complete(self) -> None:
|
|
37
37
|
await wait_for_value(self.seq.active, False, timeout=None)
|
|
38
38
|
|
|
39
39
|
async def stop(self):
|
|
40
|
-
await self.seq.enable.set(
|
|
40
|
+
await self.seq.enable.set(BitMux.zero)
|
|
41
41
|
await wait_for_value(self.seq.active, False, timeout=1)
|
|
42
42
|
|
|
43
43
|
|
|
@@ -54,7 +54,7 @@ class PcompInfo(BaseModel):
|
|
|
54
54
|
),
|
|
55
55
|
ge=0,
|
|
56
56
|
)
|
|
57
|
-
direction:
|
|
57
|
+
direction: PcompDirection = Field(
|
|
58
58
|
description=(
|
|
59
59
|
"Specifies which direction the motor counts should be "
|
|
60
60
|
"moving. Pulses won't be sent unless the values are moving in "
|
|
@@ -68,7 +68,7 @@ class StaticPcompTriggerLogic(FlyerController[PcompInfo]):
|
|
|
68
68
|
self.pcomp = pcomp
|
|
69
69
|
|
|
70
70
|
async def prepare(self, value: PcompInfo):
|
|
71
|
-
await self.pcomp.enable.set(
|
|
71
|
+
await self.pcomp.enable.set(BitMux.zero)
|
|
72
72
|
await asyncio.gather(
|
|
73
73
|
self.pcomp.start.set(value.start_postion),
|
|
74
74
|
self.pcomp.width.set(value.pulse_width),
|
|
@@ -78,12 +78,12 @@ class StaticPcompTriggerLogic(FlyerController[PcompInfo]):
|
|
|
78
78
|
)
|
|
79
79
|
|
|
80
80
|
async def kickoff(self) -> None:
|
|
81
|
-
await self.pcomp.enable.set(
|
|
81
|
+
await self.pcomp.enable.set(BitMux.one)
|
|
82
82
|
await wait_for_value(self.pcomp.active, True, timeout=1)
|
|
83
83
|
|
|
84
84
|
async def complete(self, timeout: float | None = None) -> None:
|
|
85
85
|
await wait_for_value(self.pcomp.active, False, timeout=timeout)
|
|
86
86
|
|
|
87
87
|
async def stop(self):
|
|
88
|
-
await self.pcomp.enable.set(
|
|
88
|
+
await self.pcomp.enable.set(BitMux.zero)
|
|
89
89
|
await wait_for_value(self.pcomp.active, False, timeout=1)
|
|
@@ -25,13 +25,11 @@ class PandaHDFWriter(DetectorWriter):
|
|
|
25
25
|
|
|
26
26
|
def __init__(
|
|
27
27
|
self,
|
|
28
|
-
prefix: str,
|
|
29
28
|
path_provider: PathProvider,
|
|
30
29
|
name_provider: NameProvider,
|
|
31
30
|
panda_data_block: DataBlock,
|
|
32
31
|
) -> None:
|
|
33
32
|
self.panda_data_block = panda_data_block
|
|
34
|
-
self._prefix = prefix
|
|
35
33
|
self._path_provider = path_provider
|
|
36
34
|
self._name_provider = name_provider
|
|
37
35
|
self._datasets: list[HDFDataset] = []
|
|
@@ -89,8 +87,7 @@ class PandaHDFWriter(DetectorWriter):
|
|
|
89
87
|
shape=list(ds.shape),
|
|
90
88
|
dtype="array" if ds.shape != [1] else "number",
|
|
91
89
|
# PandA data should always be written as Float64
|
|
92
|
-
|
|
93
|
-
dtype_numpy="<f8", # type: ignore
|
|
90
|
+
dtype_numpy="<f8",
|
|
94
91
|
external="STREAM:",
|
|
95
92
|
)
|
|
96
93
|
for ds in self._datasets
|
|
@@ -110,7 +107,7 @@ class PandaHDFWriter(DetectorWriter):
|
|
|
110
107
|
HDFDataset(
|
|
111
108
|
dataset_name, "/" + dataset_name, [1], multiplier=1, chunk_shape=(1024,)
|
|
112
109
|
)
|
|
113
|
-
for dataset_name in capture_table
|
|
110
|
+
for dataset_name in capture_table.name
|
|
114
111
|
]
|
|
115
112
|
|
|
116
113
|
# Warn user if dataset table is empty in PandA
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
from unittest.mock import Mock
|
|
2
|
+
|
|
1
3
|
import bluesky.plan_stubs as bps
|
|
2
4
|
|
|
3
5
|
from ophyd_async.core import DEFAULT_TIMEOUT, Device, wait_for_connection
|
|
@@ -5,7 +7,7 @@ from ophyd_async.core import DEFAULT_TIMEOUT, Device, wait_for_connection
|
|
|
5
7
|
|
|
6
8
|
def ensure_connected(
|
|
7
9
|
*devices: Device,
|
|
8
|
-
mock: bool = False,
|
|
10
|
+
mock: bool | Mock = False,
|
|
9
11
|
timeout: float = DEFAULT_TIMEOUT,
|
|
10
12
|
force_reconnect=False,
|
|
11
13
|
):
|
ophyd_async/plan_stubs/_fly.py
CHANGED
|
@@ -9,7 +9,7 @@ from ophyd_async.core import (
|
|
|
9
9
|
in_micros,
|
|
10
10
|
)
|
|
11
11
|
from ophyd_async.fastcs.panda import (
|
|
12
|
-
|
|
12
|
+
PcompDirection,
|
|
13
13
|
PcompInfo,
|
|
14
14
|
SeqTable,
|
|
15
15
|
SeqTableInfo,
|
|
@@ -147,7 +147,7 @@ def fly_and_collect_with_static_pcomp(
|
|
|
147
147
|
number_of_pulses: int,
|
|
148
148
|
pulse_width: int,
|
|
149
149
|
rising_edge_step: int,
|
|
150
|
-
direction:
|
|
150
|
+
direction: PcompDirection,
|
|
151
151
|
trigger_info: TriggerInfo,
|
|
152
152
|
):
|
|
153
153
|
# Set up scan and prepare trigger
|
|
@@ -16,12 +16,12 @@ from ophyd_async.epics.adcore import (
|
|
|
16
16
|
def setup_ndattributes(
|
|
17
17
|
device: NDArrayBaseIO, ndattributes: Sequence[NDAttributePv | NDAttributeParam]
|
|
18
18
|
):
|
|
19
|
-
|
|
19
|
+
root = ET.Element("Attributes")
|
|
20
20
|
|
|
21
21
|
for ndattribute in ndattributes:
|
|
22
22
|
if isinstance(ndattribute, NDAttributeParam):
|
|
23
23
|
ET.SubElement(
|
|
24
|
-
|
|
24
|
+
root,
|
|
25
25
|
"Attribute",
|
|
26
26
|
name=ndattribute.name,
|
|
27
27
|
type="PARAM",
|
|
@@ -32,7 +32,7 @@ def setup_ndattributes(
|
|
|
32
32
|
)
|
|
33
33
|
elif isinstance(ndattribute, NDAttributePv):
|
|
34
34
|
ET.SubElement(
|
|
35
|
-
|
|
35
|
+
root,
|
|
36
36
|
"Attribute",
|
|
37
37
|
name=ndattribute.name,
|
|
38
38
|
type="EPICS_PV",
|
|
@@ -45,7 +45,8 @@ def setup_ndattributes(
|
|
|
45
45
|
f"Invalid type for ndattributes: {type(ndattribute)}. "
|
|
46
46
|
"Expected NDAttributePv or NDAttributeParam."
|
|
47
47
|
)
|
|
48
|
-
|
|
48
|
+
xml_text = ET.tostring(root, encoding="unicode")
|
|
49
|
+
yield from bps.abs_set(device.nd_attributes_file, xml_text, wait=True)
|
|
49
50
|
|
|
50
51
|
|
|
51
52
|
def setup_ndstats_sum(detector: Device):
|
ophyd_async/py.typed
ADDED
|
File without changes
|
ophyd_async/tango/__init__.py
CHANGED
|
@@ -7,14 +7,13 @@ from .signal import (
|
|
|
7
7
|
AttributeProxy,
|
|
8
8
|
CommandProxy,
|
|
9
9
|
TangoSignalBackend,
|
|
10
|
-
__tango_signal_auto,
|
|
11
10
|
ensure_proper_executor,
|
|
12
11
|
get_dtype_extended,
|
|
13
12
|
get_python_type,
|
|
14
13
|
get_tango_trl,
|
|
15
14
|
get_trl_descriptor,
|
|
16
15
|
infer_python_type,
|
|
17
|
-
|
|
16
|
+
infer_signal_type,
|
|
18
17
|
make_backend,
|
|
19
18
|
tango_signal_r,
|
|
20
19
|
tango_signal_rw,
|
|
@@ -32,12 +31,11 @@ __all__ = [
|
|
|
32
31
|
"get_trl_descriptor",
|
|
33
32
|
"get_tango_trl",
|
|
34
33
|
"infer_python_type",
|
|
35
|
-
"
|
|
34
|
+
"infer_signal_type",
|
|
36
35
|
"make_backend",
|
|
37
36
|
"AttributeProxy",
|
|
38
37
|
"CommandProxy",
|
|
39
38
|
"ensure_proper_executor",
|
|
40
|
-
"__tango_signal_auto",
|
|
41
39
|
"tango_signal_r",
|
|
42
40
|
"tango_signal_rw",
|
|
43
41
|
"tango_signal_w",
|