ophyd-async 0.1.0__py3-none-any.whl → 0.3a1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ophyd_async/_version.py +2 -2
- ophyd_async/core/__init__.py +47 -12
- ophyd_async/core/_providers.py +66 -0
- ophyd_async/core/async_status.py +7 -5
- ophyd_async/core/detector.py +321 -0
- ophyd_async/core/device.py +184 -0
- ophyd_async/core/device_save_loader.py +286 -0
- ophyd_async/core/flyer.py +94 -0
- ophyd_async/core/{_device/_signal/signal.py → signal.py} +46 -18
- ophyd_async/core/{_device/_backend/signal_backend.py → signal_backend.py} +6 -2
- ophyd_async/core/{_device/_backend/sim_signal_backend.py → sim_signal_backend.py} +6 -2
- ophyd_async/core/{_device/standard_readable.py → standard_readable.py} +3 -3
- ophyd_async/core/utils.py +79 -29
- ophyd_async/epics/_backend/_aioca.py +38 -25
- ophyd_async/epics/_backend/_p4p.py +62 -27
- ophyd_async/epics/_backend/common.py +20 -0
- ophyd_async/epics/areadetector/__init__.py +10 -13
- ophyd_async/epics/areadetector/controllers/__init__.py +4 -0
- ophyd_async/epics/areadetector/controllers/ad_sim_controller.py +52 -0
- ophyd_async/epics/areadetector/controllers/pilatus_controller.py +49 -0
- ophyd_async/epics/areadetector/drivers/__init__.py +15 -0
- ophyd_async/epics/areadetector/drivers/ad_base.py +111 -0
- ophyd_async/epics/areadetector/drivers/pilatus_driver.py +18 -0
- ophyd_async/epics/areadetector/single_trigger_det.py +4 -4
- ophyd_async/epics/areadetector/utils.py +91 -3
- ophyd_async/epics/areadetector/writers/__init__.py +5 -0
- ophyd_async/epics/areadetector/writers/_hdfdataset.py +10 -0
- ophyd_async/epics/areadetector/writers/_hdffile.py +54 -0
- ophyd_async/epics/areadetector/writers/hdf_writer.py +133 -0
- ophyd_async/epics/areadetector/{nd_file_hdf.py → writers/nd_file_hdf.py} +22 -5
- ophyd_async/epics/areadetector/writers/nd_plugin.py +30 -0
- ophyd_async/epics/demo/__init__.py +3 -2
- ophyd_async/epics/demo/demo_ad_sim_detector.py +35 -0
- ophyd_async/epics/motion/motor.py +2 -1
- ophyd_async/epics/pvi.py +70 -0
- ophyd_async/epics/signal/__init__.py +0 -2
- ophyd_async/epics/signal/signal.py +1 -1
- ophyd_async/panda/__init__.py +12 -8
- ophyd_async/panda/panda.py +43 -134
- ophyd_async/panda/panda_controller.py +41 -0
- ophyd_async/panda/table.py +158 -0
- ophyd_async/panda/utils.py +15 -0
- {ophyd_async-0.1.0.dist-info → ophyd_async-0.3a1.dist-info}/METADATA +49 -42
- ophyd_async-0.3a1.dist-info/RECORD +56 -0
- {ophyd_async-0.1.0.dist-info → ophyd_async-0.3a1.dist-info}/WHEEL +1 -1
- ophyd_async/core/_device/__init__.py +0 -0
- ophyd_async/core/_device/_backend/__init__.py +0 -0
- ophyd_async/core/_device/_signal/__init__.py +0 -0
- ophyd_async/core/_device/device.py +0 -60
- ophyd_async/core/_device/device_collector.py +0 -121
- ophyd_async/core/_device/device_vector.py +0 -14
- ophyd_async/epics/areadetector/ad_driver.py +0 -18
- ophyd_async/epics/areadetector/directory_provider.py +0 -18
- ophyd_async/epics/areadetector/hdf_streamer_det.py +0 -167
- ophyd_async/epics/areadetector/nd_plugin.py +0 -13
- ophyd_async/epics/signal/pvi_get.py +0 -22
- ophyd_async-0.1.0.dist-info/RECORD +0 -45
- {ophyd_async-0.1.0.dist-info → ophyd_async-0.3a1.dist-info}/LICENSE +0 -0
- {ophyd_async-0.1.0.dist-info → ophyd_async-0.3a1.dist-info}/entry_points.txt +0 -0
- {ophyd_async-0.1.0.dist-info → ophyd_async-0.3a1.dist-info}/top_level.txt +0 -0
ophyd_async/core/utils.py
CHANGED
|
@@ -1,5 +1,18 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
1
3
|
import asyncio
|
|
2
|
-
|
|
4
|
+
import logging
|
|
5
|
+
from typing import (
|
|
6
|
+
Awaitable,
|
|
7
|
+
Callable,
|
|
8
|
+
Dict,
|
|
9
|
+
Iterable,
|
|
10
|
+
List,
|
|
11
|
+
Optional,
|
|
12
|
+
Type,
|
|
13
|
+
TypeVar,
|
|
14
|
+
Union,
|
|
15
|
+
)
|
|
3
16
|
|
|
4
17
|
import numpy as np
|
|
5
18
|
from bluesky.protocols import Reading
|
|
@@ -11,46 +24,79 @@ Callback = Callable[[T], None]
|
|
|
11
24
|
#: monitor updates
|
|
12
25
|
ReadingValueCallback = Callable[[Reading, T], None]
|
|
13
26
|
DEFAULT_TIMEOUT = 10.0
|
|
27
|
+
ErrorText = Union[str, Dict[str, Exception]]
|
|
14
28
|
|
|
15
29
|
|
|
16
30
|
class NotConnected(Exception):
|
|
17
31
|
"""Exception to be raised if a `Device.connect` is cancelled"""
|
|
18
32
|
|
|
19
|
-
|
|
20
|
-
|
|
33
|
+
_indent_width = " "
|
|
34
|
+
|
|
35
|
+
def __init__(self, errors: ErrorText):
|
|
36
|
+
"""
|
|
37
|
+
NotConnected holds a mapping of device/signal names to
|
|
38
|
+
errors.
|
|
39
|
+
|
|
40
|
+
Parameters
|
|
41
|
+
----------
|
|
42
|
+
errors: ErrorText
|
|
43
|
+
Mapping of device name to Exception or another NotConnected.
|
|
44
|
+
Alternatively a string with the signal error text.
|
|
45
|
+
"""
|
|
46
|
+
|
|
47
|
+
self._errors = errors
|
|
48
|
+
|
|
49
|
+
def _format_sub_errors(self, name: str, error: Exception, indent="") -> str:
|
|
50
|
+
if isinstance(error, NotConnected):
|
|
51
|
+
error_txt = ":" + error.format_error_string(indent + self._indent_width)
|
|
52
|
+
elif isinstance(error, Exception):
|
|
53
|
+
error_txt = ": " + err_str + "\n" if (err_str := str(error)) else "\n"
|
|
54
|
+
else:
|
|
55
|
+
raise RuntimeError(
|
|
56
|
+
f"Unexpected type `{type(error)}`, expected an Exception"
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
string = f"{indent}{name}: {type(error).__name__}" + error_txt
|
|
60
|
+
return string
|
|
61
|
+
|
|
62
|
+
def format_error_string(self, indent="") -> str:
|
|
63
|
+
if not isinstance(self._errors, dict) and not isinstance(self._errors, str):
|
|
64
|
+
raise RuntimeError(
|
|
65
|
+
f"Unexpected type `{type(self._errors)}` " "expected `str` or `dict`"
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
if isinstance(self._errors, str):
|
|
69
|
+
return " " + self._errors + "\n"
|
|
70
|
+
|
|
71
|
+
string = "\n"
|
|
72
|
+
for name, error in self._errors.items():
|
|
73
|
+
string += self._format_sub_errors(name, error, indent=indent)
|
|
74
|
+
return string
|
|
21
75
|
|
|
22
76
|
def __str__(self) -> str:
|
|
23
|
-
return "
|
|
77
|
+
return self.format_error_string(indent="")
|
|
24
78
|
|
|
25
79
|
|
|
26
80
|
async def wait_for_connection(**coros: Awaitable[None]):
|
|
27
|
-
"""Call many underlying signals, accumulating
|
|
81
|
+
"""Call many underlying signals, accumulating exceptions and returning them
|
|
28
82
|
|
|
29
|
-
|
|
30
|
-
------
|
|
31
|
-
`NotConnected` if cancelled
|
|
83
|
+
Expected kwargs should be a mapping of names to coroutine tasks to execute.
|
|
32
84
|
"""
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
lines += [f" {line}" for line in e.lines]
|
|
49
|
-
raise NotConnected(*lines)
|
|
50
|
-
else:
|
|
51
|
-
# Wait for everything to foreground the exceptions
|
|
52
|
-
for f in list(done) + list(pending):
|
|
53
|
-
await f
|
|
85
|
+
results = await asyncio.gather(*coros.values(), return_exceptions=True)
|
|
86
|
+
exceptions = {}
|
|
87
|
+
|
|
88
|
+
for name, result in zip(coros, results):
|
|
89
|
+
if isinstance(result, Exception):
|
|
90
|
+
exceptions[name] = result
|
|
91
|
+
if not isinstance(result, NotConnected):
|
|
92
|
+
logging.exception(
|
|
93
|
+
f"device `{name}` raised unexpected exception "
|
|
94
|
+
f"{type(result).__name__}",
|
|
95
|
+
exc_info=result,
|
|
96
|
+
)
|
|
97
|
+
|
|
98
|
+
if exceptions:
|
|
99
|
+
raise NotConnected(exceptions)
|
|
54
100
|
|
|
55
101
|
|
|
56
102
|
def get_dtype(typ: Type) -> Optional[np.dtype]:
|
|
@@ -98,3 +144,7 @@ async def merge_gathered_dicts(
|
|
|
98
144
|
for result in await asyncio.gather(*coros):
|
|
99
145
|
ret.update(result)
|
|
100
146
|
return ret
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
async def gather_list(coros: Iterable[Awaitable[T]]) -> List[T]:
|
|
150
|
+
return await asyncio.gather(*coros)
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
+
import logging
|
|
1
2
|
import sys
|
|
2
|
-
from asyncio import CancelledError
|
|
3
3
|
from dataclasses import dataclass
|
|
4
4
|
from enum import Enum
|
|
5
5
|
from typing import Any, Dict, Optional, Sequence, Type, Union
|
|
@@ -8,6 +8,7 @@ from aioca import (
|
|
|
8
8
|
FORMAT_CTRL,
|
|
9
9
|
FORMAT_RAW,
|
|
10
10
|
FORMAT_TIME,
|
|
11
|
+
CANothing,
|
|
11
12
|
Subscription,
|
|
12
13
|
caget,
|
|
13
14
|
camonitor,
|
|
@@ -18,7 +19,6 @@ from bluesky.protocols import Descriptor, Dtype, Reading
|
|
|
18
19
|
from epicscorelibs.ca import dbr
|
|
19
20
|
|
|
20
21
|
from ophyd_async.core import (
|
|
21
|
-
NotConnected,
|
|
22
22
|
ReadingValueCallback,
|
|
23
23
|
SignalBackend,
|
|
24
24
|
T,
|
|
@@ -26,6 +26,9 @@ from ophyd_async.core import (
|
|
|
26
26
|
get_unique,
|
|
27
27
|
wait_for_connection,
|
|
28
28
|
)
|
|
29
|
+
from ophyd_async.core.utils import DEFAULT_TIMEOUT, NotConnected
|
|
30
|
+
|
|
31
|
+
from .common import get_supported_enum_class
|
|
29
32
|
|
|
30
33
|
dbr_to_dtype: Dict[Dbr, Dtype] = {
|
|
31
34
|
dbr.DBR_STRING: "string",
|
|
@@ -59,6 +62,16 @@ class CaConverter:
|
|
|
59
62
|
return dict(source=source, dtype=dbr_to_dtype[value.datatype], shape=[])
|
|
60
63
|
|
|
61
64
|
|
|
65
|
+
class CaLongStrConverter(CaConverter):
|
|
66
|
+
def __init__(self):
|
|
67
|
+
return super().__init__(dbr.DBR_CHAR_STR, dbr.DBR_CHAR_STR)
|
|
68
|
+
|
|
69
|
+
def write_value(self, value: str):
|
|
70
|
+
# Add a null in here as this is what the commandline caput does
|
|
71
|
+
# TODO: this should be in the server so check if it can be pushed to asyn
|
|
72
|
+
return value + "\0"
|
|
73
|
+
|
|
74
|
+
|
|
62
75
|
class CaArrayConverter(CaConverter):
|
|
63
76
|
def descriptor(self, source: str, value: AugmentedValue) -> Descriptor:
|
|
64
77
|
return dict(source=source, dtype="array", shape=[len(value)])
|
|
@@ -79,9 +92,7 @@ class CaEnumConverter(CaConverter):
|
|
|
79
92
|
|
|
80
93
|
def descriptor(self, source: str, value: AugmentedValue) -> Descriptor:
|
|
81
94
|
choices = [e.value for e in self.enum_class]
|
|
82
|
-
return dict(
|
|
83
|
-
source=source, dtype="string", shape=[], choices=choices
|
|
84
|
-
) # type: ignore
|
|
95
|
+
return dict(source=source, dtype="string", shape=[], choices=choices)
|
|
85
96
|
|
|
86
97
|
|
|
87
98
|
class DisconnectedCaConverter(CaConverter):
|
|
@@ -97,7 +108,7 @@ def make_converter(
|
|
|
97
108
|
is_array = bool([v for v in values.values() if v.element_count > 1])
|
|
98
109
|
if is_array and datatype is str and pv_dbr == dbr.DBR_CHAR:
|
|
99
110
|
# Override waveform of chars to be treated as string
|
|
100
|
-
return
|
|
111
|
+
return CaLongStrConverter()
|
|
101
112
|
elif is_array and pv_dbr == dbr.DBR_STRING:
|
|
102
113
|
# Waveform of strings, check we wanted this
|
|
103
114
|
if datatype and datatype != Sequence[str]:
|
|
@@ -127,17 +138,7 @@ def make_converter(
|
|
|
127
138
|
pv_choices = get_unique(
|
|
128
139
|
{k: tuple(v.enums) for k, v in values.items()}, "choices"
|
|
129
140
|
)
|
|
130
|
-
|
|
131
|
-
if not issubclass(datatype, Enum):
|
|
132
|
-
raise TypeError(f"{pv} has type Enum not {datatype.__name__}")
|
|
133
|
-
choices = tuple(v.value for v in datatype)
|
|
134
|
-
if set(choices) != set(pv_choices):
|
|
135
|
-
raise TypeError(f"{pv} has choices {pv_choices} not {choices}")
|
|
136
|
-
enum_class = datatype
|
|
137
|
-
else:
|
|
138
|
-
enum_class = Enum( # type: ignore
|
|
139
|
-
"GeneratedChoices", {x: x for x in pv_choices}, type=str
|
|
140
|
-
)
|
|
141
|
+
enum_class = get_supported_enum_class(pv, datatype, pv_choices)
|
|
141
142
|
return CaEnumConverter(dbr.DBR_STRING, None, enum_class)
|
|
142
143
|
else:
|
|
143
144
|
value = list(values.values())[0]
|
|
@@ -172,23 +173,26 @@ class CaSignalBackend(SignalBackend[T]):
|
|
|
172
173
|
self.source = f"ca://{self.read_pv}"
|
|
173
174
|
self.subscription: Optional[Subscription] = None
|
|
174
175
|
|
|
175
|
-
async def _store_initial_value(self, pv):
|
|
176
|
+
async def _store_initial_value(self, pv, timeout: float = DEFAULT_TIMEOUT):
|
|
176
177
|
try:
|
|
177
|
-
self.initial_values[pv] = await caget(
|
|
178
|
-
|
|
179
|
-
|
|
178
|
+
self.initial_values[pv] = await caget(
|
|
179
|
+
pv, format=FORMAT_CTRL, timeout=timeout
|
|
180
|
+
)
|
|
181
|
+
except CANothing as exc:
|
|
182
|
+
logging.debug(f"signal ca://{pv} timed out")
|
|
183
|
+
raise NotConnected(f"ca://{pv}") from exc
|
|
180
184
|
|
|
181
|
-
async def connect(self):
|
|
185
|
+
async def connect(self, timeout: float = DEFAULT_TIMEOUT):
|
|
182
186
|
_use_pyepics_context_if_imported()
|
|
183
187
|
if self.read_pv != self.write_pv:
|
|
184
188
|
# Different, need to connect both
|
|
185
189
|
await wait_for_connection(
|
|
186
|
-
read_pv=self._store_initial_value(self.read_pv),
|
|
187
|
-
write_pv=self._store_initial_value(self.write_pv),
|
|
190
|
+
read_pv=self._store_initial_value(self.read_pv, timeout=timeout),
|
|
191
|
+
write_pv=self._store_initial_value(self.write_pv, timeout=timeout),
|
|
188
192
|
)
|
|
189
193
|
else:
|
|
190
194
|
# The same, so only need to connect one
|
|
191
|
-
await self._store_initial_value(self.read_pv)
|
|
195
|
+
await self._store_initial_value(self.read_pv, timeout=timeout)
|
|
192
196
|
self.converter = make_converter(self.datatype, self.initial_values)
|
|
193
197
|
|
|
194
198
|
async def put(self, value: Optional[T], wait=True, timeout=None):
|
|
@@ -224,6 +228,15 @@ class CaSignalBackend(SignalBackend[T]):
|
|
|
224
228
|
value = await self._caget(FORMAT_RAW)
|
|
225
229
|
return self.converter.value(value)
|
|
226
230
|
|
|
231
|
+
async def get_setpoint(self) -> T:
|
|
232
|
+
value = await caget(
|
|
233
|
+
self.write_pv,
|
|
234
|
+
datatype=self.converter.read_dbr,
|
|
235
|
+
format=FORMAT_RAW,
|
|
236
|
+
timeout=None,
|
|
237
|
+
)
|
|
238
|
+
return self.converter.value(value)
|
|
239
|
+
|
|
227
240
|
def set_callback(self, callback: Optional[ReadingValueCallback[T]]) -> None:
|
|
228
241
|
if callback:
|
|
229
242
|
assert (
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
import asyncio
|
|
2
2
|
import atexit
|
|
3
|
-
|
|
3
|
+
import logging
|
|
4
|
+
import time
|
|
4
5
|
from dataclasses import dataclass
|
|
5
6
|
from enum import Enum
|
|
6
7
|
from typing import Any, Dict, List, Optional, Sequence, Type, Union
|
|
@@ -10,7 +11,6 @@ from p4p import Value
|
|
|
10
11
|
from p4p.client.asyncio import Context, Subscription
|
|
11
12
|
|
|
12
13
|
from ophyd_async.core import (
|
|
13
|
-
NotConnected,
|
|
14
14
|
ReadingValueCallback,
|
|
15
15
|
SignalBackend,
|
|
16
16
|
T,
|
|
@@ -18,6 +18,9 @@ from ophyd_async.core import (
|
|
|
18
18
|
get_unique,
|
|
19
19
|
wait_for_connection,
|
|
20
20
|
)
|
|
21
|
+
from ophyd_async.core.utils import DEFAULT_TIMEOUT, NotConnected
|
|
22
|
+
|
|
23
|
+
from .common import get_supported_enum_class
|
|
21
24
|
|
|
22
25
|
# https://mdavidsaver.github.io/p4p/values.html
|
|
23
26
|
specifier_to_dtype: Dict[str, Dtype] = {
|
|
@@ -119,9 +122,7 @@ class PvaEnumConverter(PvaConverter):
|
|
|
119
122
|
|
|
120
123
|
def descriptor(self, source: str, value) -> Descriptor:
|
|
121
124
|
choices = [e.value for e in self.enum_class]
|
|
122
|
-
return dict(
|
|
123
|
-
source=source, dtype="string", shape=[], choices=choices
|
|
124
|
-
) # type: ignore
|
|
125
|
+
return dict(source=source, dtype="string", shape=[], choices=choices)
|
|
125
126
|
|
|
126
127
|
|
|
127
128
|
class PvaEnumBoolConverter(PvaConverter):
|
|
@@ -141,6 +142,32 @@ class PvaTableConverter(PvaConverter):
|
|
|
141
142
|
return dict(source=source, dtype="object", shape=[]) # type: ignore
|
|
142
143
|
|
|
143
144
|
|
|
145
|
+
class PvaDictConverter(PvaConverter):
|
|
146
|
+
def reading(self, value):
|
|
147
|
+
ts = time.time()
|
|
148
|
+
value = value.todict()
|
|
149
|
+
# Alarm severity is vacuously 0 for a table
|
|
150
|
+
return dict(value=value, timestamp=ts, alarm_severity=0)
|
|
151
|
+
|
|
152
|
+
def value(self, value: Value):
|
|
153
|
+
return value.todict()
|
|
154
|
+
|
|
155
|
+
def descriptor(self, source: str, value) -> Descriptor:
|
|
156
|
+
raise NotImplementedError("Describing Dict signals not currently supported")
|
|
157
|
+
|
|
158
|
+
def metadata_fields(self) -> List[str]:
|
|
159
|
+
"""
|
|
160
|
+
Fields to request from PVA for metadata.
|
|
161
|
+
"""
|
|
162
|
+
return []
|
|
163
|
+
|
|
164
|
+
def value_fields(self) -> List[str]:
|
|
165
|
+
"""
|
|
166
|
+
Fields to request from PVA for the value.
|
|
167
|
+
"""
|
|
168
|
+
return []
|
|
169
|
+
|
|
170
|
+
|
|
144
171
|
class DisconnectedPvaConverter(PvaConverter):
|
|
145
172
|
def __getattribute__(self, __name: str) -> Any:
|
|
146
173
|
raise NotImplementedError("No PV has been set as connect() has not been called")
|
|
@@ -149,7 +176,9 @@ class DisconnectedPvaConverter(PvaConverter):
|
|
|
149
176
|
def make_converter(datatype: Optional[Type], values: Dict[str, Any]) -> PvaConverter:
|
|
150
177
|
pv = list(values)[0]
|
|
151
178
|
typeid = get_unique({k: v.getID() for k, v in values.items()}, "typeids")
|
|
152
|
-
typ = get_unique(
|
|
179
|
+
typ = get_unique(
|
|
180
|
+
{k: type(v.get("value")) for k, v in values.items()}, "value types"
|
|
181
|
+
)
|
|
153
182
|
if "NTScalarArray" in typeid and typ == list:
|
|
154
183
|
# Waveform of strings, check we wanted this
|
|
155
184
|
if datatype and datatype != Sequence[str]:
|
|
@@ -185,24 +214,15 @@ def make_converter(datatype: Optional[Type], values: Dict[str, Any]) -> PvaConve
|
|
|
185
214
|
pv_choices = get_unique(
|
|
186
215
|
{k: tuple(v["value"]["choices"]) for k, v in values.items()}, "choices"
|
|
187
216
|
)
|
|
188
|
-
|
|
189
|
-
if not issubclass(datatype, Enum):
|
|
190
|
-
raise TypeError(f"{pv} has type Enum not {datatype.__name__}")
|
|
191
|
-
choices = tuple(v.value for v in datatype)
|
|
192
|
-
if set(choices) != set(pv_choices):
|
|
193
|
-
raise TypeError(f"{pv} has choices {pv_choices} not {choices}")
|
|
194
|
-
enum_class = datatype
|
|
195
|
-
else:
|
|
196
|
-
enum_class = Enum( # type: ignore
|
|
197
|
-
"GeneratedChoices", {x: x for x in pv_choices}, type=str
|
|
198
|
-
)
|
|
199
|
-
return PvaEnumConverter(enum_class)
|
|
217
|
+
return PvaEnumConverter(get_supported_enum_class(pv, datatype, pv_choices))
|
|
200
218
|
elif "NTScalar" in typeid:
|
|
201
219
|
if datatype and not issubclass(typ, datatype):
|
|
202
220
|
raise TypeError(f"{pv} has type {typ.__name__} not {datatype.__name__}")
|
|
203
221
|
return PvaConverter()
|
|
204
222
|
elif "NTTable" in typeid:
|
|
205
223
|
return PvaTableConverter()
|
|
224
|
+
elif "structure" in typeid:
|
|
225
|
+
return PvaDictConverter()
|
|
206
226
|
else:
|
|
207
227
|
raise TypeError(f"{pv}: Unsupported typeid {typeid}")
|
|
208
228
|
|
|
@@ -233,22 +253,25 @@ class PvaSignalBackend(SignalBackend[T]):
|
|
|
233
253
|
|
|
234
254
|
return PvaSignalBackend._ctxt
|
|
235
255
|
|
|
236
|
-
async def _store_initial_value(self, pv):
|
|
256
|
+
async def _store_initial_value(self, pv, timeout: float = DEFAULT_TIMEOUT):
|
|
237
257
|
try:
|
|
238
|
-
self.initial_values[pv] = await
|
|
239
|
-
|
|
240
|
-
|
|
258
|
+
self.initial_values[pv] = await asyncio.wait_for(
|
|
259
|
+
self.ctxt.get(pv), timeout=timeout
|
|
260
|
+
)
|
|
261
|
+
except asyncio.TimeoutError as exc:
|
|
262
|
+
logging.debug(f"signal pva://{pv} timed out", exc_info=True)
|
|
263
|
+
raise NotConnected(f"pva://{pv}") from exc
|
|
241
264
|
|
|
242
|
-
async def connect(self):
|
|
265
|
+
async def connect(self, timeout: float = DEFAULT_TIMEOUT):
|
|
243
266
|
if self.read_pv != self.write_pv:
|
|
244
267
|
# Different, need to connect both
|
|
245
268
|
await wait_for_connection(
|
|
246
|
-
read_pv=self._store_initial_value(self.read_pv),
|
|
247
|
-
write_pv=self._store_initial_value(self.write_pv),
|
|
269
|
+
read_pv=self._store_initial_value(self.read_pv, timeout=timeout),
|
|
270
|
+
write_pv=self._store_initial_value(self.write_pv, timeout=timeout),
|
|
248
271
|
)
|
|
249
272
|
else:
|
|
250
273
|
# The same, so only need to connect one
|
|
251
|
-
await self._store_initial_value(self.read_pv)
|
|
274
|
+
await self._store_initial_value(self.read_pv, timeout=timeout)
|
|
252
275
|
self.converter = make_converter(self.datatype, self.initial_values)
|
|
253
276
|
|
|
254
277
|
async def put(self, value: Optional[T], wait=True, timeout=None):
|
|
@@ -257,7 +280,15 @@ class PvaSignalBackend(SignalBackend[T]):
|
|
|
257
280
|
else:
|
|
258
281
|
write_value = self.converter.write_value(value)
|
|
259
282
|
coro = self.ctxt.put(self.write_pv, dict(value=write_value), wait=wait)
|
|
260
|
-
|
|
283
|
+
try:
|
|
284
|
+
await asyncio.wait_for(coro, timeout)
|
|
285
|
+
except asyncio.TimeoutError as exc:
|
|
286
|
+
logging.debug(
|
|
287
|
+
f"signal pva://{self.write_pv} timed out \
|
|
288
|
+
put value: {write_value}",
|
|
289
|
+
exc_info=True,
|
|
290
|
+
)
|
|
291
|
+
raise NotConnected(f"pva://{self.write_pv}") from exc
|
|
261
292
|
|
|
262
293
|
async def get_descriptor(self) -> Descriptor:
|
|
263
294
|
value = await self.ctxt.get(self.read_pv)
|
|
@@ -282,6 +313,10 @@ class PvaSignalBackend(SignalBackend[T]):
|
|
|
282
313
|
value = await self.ctxt.get(self.read_pv, request=request)
|
|
283
314
|
return self.converter.value(value)
|
|
284
315
|
|
|
316
|
+
async def get_setpoint(self) -> T:
|
|
317
|
+
value = await self.ctxt.get(self.write_pv, "field(value)")
|
|
318
|
+
return self.converter.value(value)
|
|
319
|
+
|
|
285
320
|
def set_callback(self, callback: Optional[ReadingValueCallback[T]]) -> None:
|
|
286
321
|
if callback:
|
|
287
322
|
assert (
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
from enum import Enum
|
|
2
|
+
from typing import Any, Optional, Tuple, Type
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def get_supported_enum_class(
|
|
6
|
+
pv: str,
|
|
7
|
+
datatype: Optional[Type[Enum]],
|
|
8
|
+
pv_choices: Tuple[Any, ...],
|
|
9
|
+
) -> Type[Enum]:
|
|
10
|
+
if datatype:
|
|
11
|
+
if not issubclass(datatype, Enum):
|
|
12
|
+
raise TypeError(f"{pv} has type Enum not {datatype.__name__}")
|
|
13
|
+
if not issubclass(datatype, str):
|
|
14
|
+
raise TypeError(f"{pv} has type Enum but doesn't inherit from String")
|
|
15
|
+
choices = tuple(v.value for v in datatype)
|
|
16
|
+
if set(choices).difference(pv_choices):
|
|
17
|
+
raise TypeError(f"{pv} has choices {pv_choices}: not all in {choices}")
|
|
18
|
+
return Enum(
|
|
19
|
+
"GeneratedChoices", {x or "_": x for x in pv_choices}, type=str
|
|
20
|
+
) # type: ignore
|
|
@@ -1,22 +1,19 @@
|
|
|
1
|
-
from .ad_driver import ADDriver
|
|
2
|
-
from .directory_provider import DirectoryProvider, TmpDirectoryProvider
|
|
3
|
-
from .hdf_streamer_det import HDFStreamerDet
|
|
4
|
-
from .nd_file_hdf import NDFileHDF
|
|
5
|
-
from .nd_plugin import NDPlugin, NDPluginStats
|
|
6
1
|
from .single_trigger_det import SingleTriggerDet
|
|
7
|
-
from .utils import
|
|
2
|
+
from .utils import (
|
|
3
|
+
FileWriteMode,
|
|
4
|
+
ImageMode,
|
|
5
|
+
NDAttributeDataType,
|
|
6
|
+
NDAttributesXML,
|
|
7
|
+
ad_r,
|
|
8
|
+
ad_rw,
|
|
9
|
+
)
|
|
8
10
|
|
|
9
11
|
__all__ = [
|
|
10
|
-
"ADDriver",
|
|
11
|
-
"DirectoryProvider",
|
|
12
|
-
"TmpDirectoryProvider",
|
|
13
|
-
"HDFStreamerDet",
|
|
14
|
-
"NDFileHDF",
|
|
15
|
-
"NDPlugin",
|
|
16
|
-
"NDPluginStats",
|
|
17
12
|
"SingleTriggerDet",
|
|
18
13
|
"FileWriteMode",
|
|
19
14
|
"ImageMode",
|
|
20
15
|
"ad_r",
|
|
21
16
|
"ad_rw",
|
|
17
|
+
"NDAttributeDataType",
|
|
18
|
+
"NDAttributesXML",
|
|
22
19
|
]
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from typing import Optional, Set
|
|
3
|
+
|
|
4
|
+
from ophyd_async.core import (
|
|
5
|
+
DEFAULT_TIMEOUT,
|
|
6
|
+
AsyncStatus,
|
|
7
|
+
DetectorControl,
|
|
8
|
+
DetectorTrigger,
|
|
9
|
+
)
|
|
10
|
+
|
|
11
|
+
from ..drivers.ad_base import (
|
|
12
|
+
DEFAULT_GOOD_STATES,
|
|
13
|
+
ADBase,
|
|
14
|
+
DetectorState,
|
|
15
|
+
ImageMode,
|
|
16
|
+
start_acquiring_driver_and_ensure_status,
|
|
17
|
+
)
|
|
18
|
+
from ..utils import stop_busy_record
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class ADSimController(DetectorControl):
|
|
22
|
+
def __init__(
|
|
23
|
+
self, driver: ADBase, good_states: Set[DetectorState] = set(DEFAULT_GOOD_STATES)
|
|
24
|
+
) -> None:
|
|
25
|
+
self.driver = driver
|
|
26
|
+
self.good_states = good_states
|
|
27
|
+
|
|
28
|
+
def get_deadtime(self, exposure: float) -> float:
|
|
29
|
+
return 0.002
|
|
30
|
+
|
|
31
|
+
async def arm(
|
|
32
|
+
self,
|
|
33
|
+
num: int,
|
|
34
|
+
trigger: DetectorTrigger = DetectorTrigger.internal,
|
|
35
|
+
exposure: Optional[float] = None,
|
|
36
|
+
) -> AsyncStatus:
|
|
37
|
+
assert (
|
|
38
|
+
trigger == DetectorTrigger.internal
|
|
39
|
+
), "fly scanning (i.e. external triggering) is not supported for this device"
|
|
40
|
+
frame_timeout = DEFAULT_TIMEOUT + await self.driver.acquire_time.get_value()
|
|
41
|
+
await asyncio.gather(
|
|
42
|
+
self.driver.num_images.set(num),
|
|
43
|
+
self.driver.image_mode.set(ImageMode.multiple),
|
|
44
|
+
)
|
|
45
|
+
return await start_acquiring_driver_and_ensure_status(
|
|
46
|
+
self.driver, good_states=self.good_states, timeout=frame_timeout
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
async def disarm(self):
|
|
50
|
+
# We can't use caput callback as we already used it in arm() and we can't have
|
|
51
|
+
# 2 or they will deadlock
|
|
52
|
+
await stop_busy_record(self.driver.acquire, False, timeout=1)
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from typing import Optional, Set
|
|
3
|
+
|
|
4
|
+
from ophyd_async.core import AsyncStatus, DetectorControl, DetectorTrigger
|
|
5
|
+
from ophyd_async.epics.areadetector.drivers.ad_base import (
|
|
6
|
+
DEFAULT_GOOD_STATES,
|
|
7
|
+
DetectorState,
|
|
8
|
+
start_acquiring_driver_and_ensure_status,
|
|
9
|
+
)
|
|
10
|
+
|
|
11
|
+
from ..drivers.pilatus_driver import PilatusDriver, TriggerMode
|
|
12
|
+
from ..utils import ImageMode, stop_busy_record
|
|
13
|
+
|
|
14
|
+
TRIGGER_MODE = {
|
|
15
|
+
DetectorTrigger.internal: TriggerMode.internal,
|
|
16
|
+
DetectorTrigger.constant_gate: TriggerMode.ext_enable,
|
|
17
|
+
DetectorTrigger.variable_gate: TriggerMode.ext_enable,
|
|
18
|
+
}
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class PilatusController(DetectorControl):
|
|
22
|
+
def __init__(
|
|
23
|
+
self,
|
|
24
|
+
driver: PilatusDriver,
|
|
25
|
+
good_states: Set[DetectorState] = set(DEFAULT_GOOD_STATES),
|
|
26
|
+
) -> None:
|
|
27
|
+
self.driver = driver
|
|
28
|
+
self.good_states = good_states
|
|
29
|
+
|
|
30
|
+
def get_deadtime(self, exposure: float) -> float:
|
|
31
|
+
return 0.001
|
|
32
|
+
|
|
33
|
+
async def arm(
|
|
34
|
+
self,
|
|
35
|
+
num: int,
|
|
36
|
+
trigger: DetectorTrigger = DetectorTrigger.internal,
|
|
37
|
+
exposure: Optional[float] = None,
|
|
38
|
+
) -> AsyncStatus:
|
|
39
|
+
await asyncio.gather(
|
|
40
|
+
self.driver.trigger_mode.set(TRIGGER_MODE[trigger]),
|
|
41
|
+
self.driver.num_images.set(999_999 if num == 0 else num),
|
|
42
|
+
self.driver.image_mode.set(ImageMode.multiple),
|
|
43
|
+
)
|
|
44
|
+
return await start_acquiring_driver_and_ensure_status(
|
|
45
|
+
self.driver, good_states=self.good_states
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
async def disarm(self):
|
|
49
|
+
await stop_busy_record(self.driver.acquire, False, timeout=1)
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
from .ad_base import (
|
|
2
|
+
ADBase,
|
|
3
|
+
ADBaseShapeProvider,
|
|
4
|
+
DetectorState,
|
|
5
|
+
start_acquiring_driver_and_ensure_status,
|
|
6
|
+
)
|
|
7
|
+
from .pilatus_driver import PilatusDriver
|
|
8
|
+
|
|
9
|
+
__all__ = [
|
|
10
|
+
"ADBase",
|
|
11
|
+
"ADBaseShapeProvider",
|
|
12
|
+
"PilatusDriver",
|
|
13
|
+
"start_acquiring_driver_and_ensure_status",
|
|
14
|
+
"DetectorState",
|
|
15
|
+
]
|