ophyd-async 0.1.0__py3-none-any.whl → 0.3.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ophyd_async/__init__.py +1 -4
- ophyd_async/_version.py +2 -2
- ophyd_async/core/__init__.py +91 -19
- ophyd_async/core/_providers.py +68 -0
- ophyd_async/core/async_status.py +90 -42
- ophyd_async/core/detector.py +341 -0
- ophyd_async/core/device.py +226 -0
- ophyd_async/core/device_save_loader.py +286 -0
- ophyd_async/core/flyer.py +85 -0
- ophyd_async/core/mock_signal_backend.py +82 -0
- ophyd_async/core/mock_signal_utils.py +145 -0
- ophyd_async/core/{_device/_signal/signal.py → signal.py} +249 -61
- ophyd_async/core/{_device/_backend/signal_backend.py → signal_backend.py} +12 -5
- ophyd_async/core/{_device/_backend/sim_signal_backend.py → soft_signal_backend.py} +54 -48
- ophyd_async/core/standard_readable.py +261 -0
- ophyd_async/core/utils.py +127 -30
- ophyd_async/epics/_backend/_aioca.py +62 -43
- ophyd_async/epics/_backend/_p4p.py +100 -52
- ophyd_async/epics/_backend/common.py +25 -0
- ophyd_async/epics/areadetector/__init__.py +16 -15
- ophyd_async/epics/areadetector/aravis.py +63 -0
- ophyd_async/epics/areadetector/controllers/__init__.py +5 -0
- ophyd_async/epics/areadetector/controllers/ad_sim_controller.py +52 -0
- ophyd_async/epics/areadetector/controllers/aravis_controller.py +78 -0
- ophyd_async/epics/areadetector/controllers/kinetix_controller.py +49 -0
- ophyd_async/epics/areadetector/controllers/pilatus_controller.py +61 -0
- ophyd_async/epics/areadetector/controllers/vimba_controller.py +66 -0
- ophyd_async/epics/areadetector/drivers/__init__.py +21 -0
- ophyd_async/epics/areadetector/drivers/ad_base.py +107 -0
- ophyd_async/epics/areadetector/drivers/aravis_driver.py +38 -0
- ophyd_async/epics/areadetector/drivers/kinetix_driver.py +27 -0
- ophyd_async/epics/areadetector/drivers/pilatus_driver.py +21 -0
- ophyd_async/epics/areadetector/drivers/vimba_driver.py +63 -0
- ophyd_async/epics/areadetector/kinetix.py +46 -0
- ophyd_async/epics/areadetector/pilatus.py +45 -0
- ophyd_async/epics/areadetector/single_trigger_det.py +18 -10
- ophyd_async/epics/areadetector/utils.py +91 -13
- ophyd_async/epics/areadetector/vimba.py +43 -0
- ophyd_async/epics/areadetector/writers/__init__.py +5 -0
- ophyd_async/epics/areadetector/writers/_hdfdataset.py +10 -0
- ophyd_async/epics/areadetector/writers/_hdffile.py +54 -0
- ophyd_async/epics/areadetector/writers/hdf_writer.py +142 -0
- ophyd_async/epics/areadetector/writers/nd_file_hdf.py +40 -0
- ophyd_async/epics/areadetector/writers/nd_plugin.py +38 -0
- ophyd_async/epics/demo/__init__.py +78 -51
- ophyd_async/epics/demo/demo_ad_sim_detector.py +35 -0
- ophyd_async/epics/motion/motor.py +67 -52
- ophyd_async/epics/pvi/__init__.py +3 -0
- ophyd_async/epics/pvi/pvi.py +318 -0
- ophyd_async/epics/signal/__init__.py +8 -3
- ophyd_async/epics/signal/signal.py +27 -10
- ophyd_async/log.py +130 -0
- ophyd_async/panda/__init__.py +24 -7
- ophyd_async/panda/_common_blocks.py +49 -0
- ophyd_async/panda/_hdf_panda.py +48 -0
- ophyd_async/panda/_panda_controller.py +37 -0
- ophyd_async/panda/_table.py +158 -0
- ophyd_async/panda/_trigger.py +39 -0
- ophyd_async/panda/_utils.py +15 -0
- ophyd_async/panda/writers/__init__.py +3 -0
- ophyd_async/panda/writers/_hdf_writer.py +220 -0
- ophyd_async/panda/writers/_panda_hdf_file.py +58 -0
- ophyd_async/plan_stubs/__init__.py +13 -0
- ophyd_async/plan_stubs/ensure_connected.py +22 -0
- ophyd_async/plan_stubs/fly.py +149 -0
- ophyd_async/protocols.py +126 -0
- ophyd_async/sim/__init__.py +11 -0
- ophyd_async/sim/demo/__init__.py +3 -0
- ophyd_async/sim/demo/sim_motor.py +103 -0
- ophyd_async/sim/pattern_generator.py +318 -0
- ophyd_async/sim/sim_pattern_detector_control.py +55 -0
- ophyd_async/sim/sim_pattern_detector_writer.py +34 -0
- ophyd_async/sim/sim_pattern_generator.py +37 -0
- {ophyd_async-0.1.0.dist-info → ophyd_async-0.3.0.dist-info}/METADATA +35 -67
- ophyd_async-0.3.0.dist-info/RECORD +86 -0
- {ophyd_async-0.1.0.dist-info → ophyd_async-0.3.0.dist-info}/WHEEL +1 -1
- ophyd_async/core/_device/__init__.py +0 -0
- ophyd_async/core/_device/_backend/__init__.py +0 -0
- ophyd_async/core/_device/_signal/__init__.py +0 -0
- ophyd_async/core/_device/device.py +0 -60
- ophyd_async/core/_device/device_collector.py +0 -121
- ophyd_async/core/_device/device_vector.py +0 -14
- ophyd_async/core/_device/standard_readable.py +0 -72
- ophyd_async/epics/areadetector/ad_driver.py +0 -18
- ophyd_async/epics/areadetector/directory_provider.py +0 -18
- ophyd_async/epics/areadetector/hdf_streamer_det.py +0 -167
- ophyd_async/epics/areadetector/nd_file_hdf.py +0 -22
- ophyd_async/epics/areadetector/nd_plugin.py +0 -13
- ophyd_async/epics/signal/pvi_get.py +0 -22
- ophyd_async/panda/panda.py +0 -332
- ophyd_async-0.1.0.dist-info/RECORD +0 -45
- {ophyd_async-0.1.0.dist-info → ophyd_async-0.3.0.dist-info}/LICENSE +0 -0
- {ophyd_async-0.1.0.dist-info → ophyd_async-0.3.0.dist-info}/entry_points.txt +0 -0
- {ophyd_async-0.1.0.dist-info → ophyd_async-0.3.0.dist-info}/top_level.txt +0 -0
|
@@ -1,16 +1,16 @@
|
|
|
1
1
|
import asyncio
|
|
2
2
|
import atexit
|
|
3
|
-
|
|
3
|
+
import logging
|
|
4
|
+
import time
|
|
4
5
|
from dataclasses import dataclass
|
|
5
6
|
from enum import Enum
|
|
6
7
|
from typing import Any, Dict, List, Optional, Sequence, Type, Union
|
|
7
8
|
|
|
8
|
-
from bluesky.protocols import
|
|
9
|
+
from bluesky.protocols import DataKey, Dtype, Reading
|
|
9
10
|
from p4p import Value
|
|
10
11
|
from p4p.client.asyncio import Context, Subscription
|
|
11
12
|
|
|
12
13
|
from ophyd_async.core import (
|
|
13
|
-
NotConnected,
|
|
14
14
|
ReadingValueCallback,
|
|
15
15
|
SignalBackend,
|
|
16
16
|
T,
|
|
@@ -18,6 +18,9 @@ from ophyd_async.core import (
|
|
|
18
18
|
get_unique,
|
|
19
19
|
wait_for_connection,
|
|
20
20
|
)
|
|
21
|
+
from ophyd_async.core.utils import DEFAULT_TIMEOUT, NotConnected
|
|
22
|
+
|
|
23
|
+
from .common import get_supported_values
|
|
21
24
|
|
|
22
25
|
# https://mdavidsaver.github.io/p4p/values.html
|
|
23
26
|
specifier_to_dtype: Dict[str, Dtype] = {
|
|
@@ -46,15 +49,15 @@ class PvaConverter:
|
|
|
46
49
|
def reading(self, value):
|
|
47
50
|
ts = value["timeStamp"]
|
|
48
51
|
sv = value["alarm"]["severity"]
|
|
49
|
-
return
|
|
50
|
-
value
|
|
51
|
-
timestamp
|
|
52
|
-
alarm_severity
|
|
53
|
-
|
|
52
|
+
return {
|
|
53
|
+
"value": self.value(value),
|
|
54
|
+
"timestamp": ts["secondsPastEpoch"] + ts["nanoseconds"] * 1e-9,
|
|
55
|
+
"alarm_severity": -1 if sv > 2 else sv,
|
|
56
|
+
}
|
|
54
57
|
|
|
55
|
-
def
|
|
58
|
+
def get_datakey(self, source: str, value) -> DataKey:
|
|
56
59
|
dtype = specifier_to_dtype[value.type().aspy("value")]
|
|
57
|
-
return
|
|
60
|
+
return {"source": source, "dtype": dtype, "shape": []}
|
|
58
61
|
|
|
59
62
|
def metadata_fields(self) -> List[str]:
|
|
60
63
|
"""
|
|
@@ -70,8 +73,8 @@ class PvaConverter:
|
|
|
70
73
|
|
|
71
74
|
|
|
72
75
|
class PvaArrayConverter(PvaConverter):
|
|
73
|
-
def
|
|
74
|
-
return
|
|
76
|
+
def get_datakey(self, source: str, value) -> DataKey:
|
|
77
|
+
return {"source": source, "dtype": "array", "shape": [len(value["value"])]}
|
|
75
78
|
|
|
76
79
|
|
|
77
80
|
class PvaNDArrayConverter(PvaConverter):
|
|
@@ -93,9 +96,9 @@ class PvaNDArrayConverter(PvaConverter):
|
|
|
93
96
|
dims = self._get_dimensions(value)
|
|
94
97
|
return value["value"].reshape(dims)
|
|
95
98
|
|
|
96
|
-
def
|
|
99
|
+
def get_datakey(self, source: str, value) -> DataKey:
|
|
97
100
|
dims = self._get_dimensions(value)
|
|
98
|
-
return
|
|
101
|
+
return {"source": source, "dtype": "array", "shape": dims}
|
|
99
102
|
|
|
100
103
|
def write_value(self, value):
|
|
101
104
|
# No clear use-case for writing directly to an NDArray, and some
|
|
@@ -106,7 +109,8 @@ class PvaNDArrayConverter(PvaConverter):
|
|
|
106
109
|
|
|
107
110
|
@dataclass
|
|
108
111
|
class PvaEnumConverter(PvaConverter):
|
|
109
|
-
|
|
112
|
+
def __init__(self, choices: dict[str, str]):
|
|
113
|
+
self.choices = tuple(choices.values())
|
|
110
114
|
|
|
111
115
|
def write_value(self, value: Union[Enum, str]):
|
|
112
116
|
if isinstance(value, Enum):
|
|
@@ -115,30 +119,58 @@ class PvaEnumConverter(PvaConverter):
|
|
|
115
119
|
return value
|
|
116
120
|
|
|
117
121
|
def value(self, value):
|
|
118
|
-
return
|
|
122
|
+
return self.choices[value["value"]["index"]]
|
|
119
123
|
|
|
120
|
-
def
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
124
|
+
def get_datakey(self, source: str, value) -> DataKey:
|
|
125
|
+
return {
|
|
126
|
+
"source": source,
|
|
127
|
+
"dtype": "string",
|
|
128
|
+
"shape": [],
|
|
129
|
+
"choices": list(self.choices),
|
|
130
|
+
}
|
|
125
131
|
|
|
126
132
|
|
|
127
133
|
class PvaEnumBoolConverter(PvaConverter):
|
|
128
134
|
def value(self, value):
|
|
129
135
|
return value["value"]["index"]
|
|
130
136
|
|
|
131
|
-
def
|
|
132
|
-
return
|
|
137
|
+
def get_datakey(self, source: str, value) -> DataKey:
|
|
138
|
+
return {"source": source, "dtype": "integer", "shape": []}
|
|
133
139
|
|
|
134
140
|
|
|
135
141
|
class PvaTableConverter(PvaConverter):
|
|
136
142
|
def value(self, value):
|
|
137
143
|
return value["value"].todict()
|
|
138
144
|
|
|
139
|
-
def
|
|
145
|
+
def get_datakey(self, source: str, value) -> DataKey:
|
|
140
146
|
# This is wrong, but defer until we know how to actually describe a table
|
|
141
|
-
return
|
|
147
|
+
return {"source": source, "dtype": "object", "shape": []} # type: ignore
|
|
148
|
+
|
|
149
|
+
|
|
150
|
+
class PvaDictConverter(PvaConverter):
|
|
151
|
+
def reading(self, value):
|
|
152
|
+
ts = time.time()
|
|
153
|
+
value = value.todict()
|
|
154
|
+
# Alarm severity is vacuously 0 for a table
|
|
155
|
+
return {"value": value, "timestamp": ts, "alarm_severity": 0}
|
|
156
|
+
|
|
157
|
+
def value(self, value: Value):
|
|
158
|
+
return value.todict()
|
|
159
|
+
|
|
160
|
+
def get_datakey(self, source: str, value) -> DataKey:
|
|
161
|
+
raise NotImplementedError("Describing Dict signals not currently supported")
|
|
162
|
+
|
|
163
|
+
def metadata_fields(self) -> List[str]:
|
|
164
|
+
"""
|
|
165
|
+
Fields to request from PVA for metadata.
|
|
166
|
+
"""
|
|
167
|
+
return []
|
|
168
|
+
|
|
169
|
+
def value_fields(self) -> List[str]:
|
|
170
|
+
"""
|
|
171
|
+
Fields to request from PVA for the value.
|
|
172
|
+
"""
|
|
173
|
+
return []
|
|
142
174
|
|
|
143
175
|
|
|
144
176
|
class DisconnectedPvaConverter(PvaConverter):
|
|
@@ -149,7 +181,9 @@ class DisconnectedPvaConverter(PvaConverter):
|
|
|
149
181
|
def make_converter(datatype: Optional[Type], values: Dict[str, Any]) -> PvaConverter:
|
|
150
182
|
pv = list(values)[0]
|
|
151
183
|
typeid = get_unique({k: v.getID() for k, v in values.items()}, "typeids")
|
|
152
|
-
typ = get_unique(
|
|
184
|
+
typ = get_unique(
|
|
185
|
+
{k: type(v.get("value")) for k, v in values.items()}, "value types"
|
|
186
|
+
)
|
|
153
187
|
if "NTScalarArray" in typeid and typ == list:
|
|
154
188
|
# Waveform of strings, check we wanted this
|
|
155
189
|
if datatype and datatype != Sequence[str]:
|
|
@@ -185,24 +219,21 @@ def make_converter(datatype: Optional[Type], values: Dict[str, Any]) -> PvaConve
|
|
|
185
219
|
pv_choices = get_unique(
|
|
186
220
|
{k: tuple(v["value"]["choices"]) for k, v in values.items()}, "choices"
|
|
187
221
|
)
|
|
188
|
-
|
|
189
|
-
if not issubclass(datatype, Enum):
|
|
190
|
-
raise TypeError(f"{pv} has type Enum not {datatype.__name__}")
|
|
191
|
-
choices = tuple(v.value for v in datatype)
|
|
192
|
-
if set(choices) != set(pv_choices):
|
|
193
|
-
raise TypeError(f"{pv} has choices {pv_choices} not {choices}")
|
|
194
|
-
enum_class = datatype
|
|
195
|
-
else:
|
|
196
|
-
enum_class = Enum( # type: ignore
|
|
197
|
-
"GeneratedChoices", {x: x for x in pv_choices}, type=str
|
|
198
|
-
)
|
|
199
|
-
return PvaEnumConverter(enum_class)
|
|
222
|
+
return PvaEnumConverter(get_supported_values(pv, datatype, pv_choices))
|
|
200
223
|
elif "NTScalar" in typeid:
|
|
201
|
-
if
|
|
224
|
+
if (
|
|
225
|
+
datatype
|
|
226
|
+
and not issubclass(typ, datatype)
|
|
227
|
+
and not (
|
|
228
|
+
typ is float and datatype is int
|
|
229
|
+
) # Allow float -> int since prec can be 0
|
|
230
|
+
):
|
|
202
231
|
raise TypeError(f"{pv} has type {typ.__name__} not {datatype.__name__}")
|
|
203
232
|
return PvaConverter()
|
|
204
233
|
elif "NTTable" in typeid:
|
|
205
234
|
return PvaTableConverter()
|
|
235
|
+
elif "structure" in typeid:
|
|
236
|
+
return PvaDictConverter()
|
|
206
237
|
else:
|
|
207
238
|
raise TypeError(f"{pv}: Unsupported typeid {typeid}")
|
|
208
239
|
|
|
@@ -216,9 +247,11 @@ class PvaSignalBackend(SignalBackend[T]):
|
|
|
216
247
|
self.write_pv = write_pv
|
|
217
248
|
self.initial_values: Dict[str, Any] = {}
|
|
218
249
|
self.converter: PvaConverter = DisconnectedPvaConverter()
|
|
219
|
-
self.source = f"pva://{self.read_pv}"
|
|
220
250
|
self.subscription: Optional[Subscription] = None
|
|
221
251
|
|
|
252
|
+
def source(self, name: str):
|
|
253
|
+
return f"pva://{self.read_pv}"
|
|
254
|
+
|
|
222
255
|
@property
|
|
223
256
|
def ctxt(self) -> Context:
|
|
224
257
|
if PvaSignalBackend._ctxt is None:
|
|
@@ -233,22 +266,25 @@ class PvaSignalBackend(SignalBackend[T]):
|
|
|
233
266
|
|
|
234
267
|
return PvaSignalBackend._ctxt
|
|
235
268
|
|
|
236
|
-
async def _store_initial_value(self, pv):
|
|
269
|
+
async def _store_initial_value(self, pv, timeout: float = DEFAULT_TIMEOUT):
|
|
237
270
|
try:
|
|
238
|
-
self.initial_values[pv] = await
|
|
239
|
-
|
|
240
|
-
|
|
271
|
+
self.initial_values[pv] = await asyncio.wait_for(
|
|
272
|
+
self.ctxt.get(pv), timeout=timeout
|
|
273
|
+
)
|
|
274
|
+
except asyncio.TimeoutError as exc:
|
|
275
|
+
logging.debug(f"signal pva://{pv} timed out", exc_info=True)
|
|
276
|
+
raise NotConnected(f"pva://{pv}") from exc
|
|
241
277
|
|
|
242
|
-
async def connect(self):
|
|
278
|
+
async def connect(self, timeout: float = DEFAULT_TIMEOUT):
|
|
243
279
|
if self.read_pv != self.write_pv:
|
|
244
280
|
# Different, need to connect both
|
|
245
281
|
await wait_for_connection(
|
|
246
|
-
read_pv=self._store_initial_value(self.read_pv),
|
|
247
|
-
write_pv=self._store_initial_value(self.write_pv),
|
|
282
|
+
read_pv=self._store_initial_value(self.read_pv, timeout=timeout),
|
|
283
|
+
write_pv=self._store_initial_value(self.write_pv, timeout=timeout),
|
|
248
284
|
)
|
|
249
285
|
else:
|
|
250
286
|
# The same, so only need to connect one
|
|
251
|
-
await self._store_initial_value(self.read_pv)
|
|
287
|
+
await self._store_initial_value(self.read_pv, timeout=timeout)
|
|
252
288
|
self.converter = make_converter(self.datatype, self.initial_values)
|
|
253
289
|
|
|
254
290
|
async def put(self, value: Optional[T], wait=True, timeout=None):
|
|
@@ -256,12 +292,20 @@ class PvaSignalBackend(SignalBackend[T]):
|
|
|
256
292
|
write_value = self.initial_values[self.write_pv]
|
|
257
293
|
else:
|
|
258
294
|
write_value = self.converter.write_value(value)
|
|
259
|
-
coro = self.ctxt.put(self.write_pv,
|
|
260
|
-
|
|
295
|
+
coro = self.ctxt.put(self.write_pv, {"value": write_value}, wait=wait)
|
|
296
|
+
try:
|
|
297
|
+
await asyncio.wait_for(coro, timeout)
|
|
298
|
+
except asyncio.TimeoutError as exc:
|
|
299
|
+
logging.debug(
|
|
300
|
+
f"signal pva://{self.write_pv} timed out \
|
|
301
|
+
put value: {write_value}",
|
|
302
|
+
exc_info=True,
|
|
303
|
+
)
|
|
304
|
+
raise NotConnected(f"pva://{self.write_pv}") from exc
|
|
261
305
|
|
|
262
|
-
async def
|
|
306
|
+
async def get_datakey(self, source: str) -> DataKey:
|
|
263
307
|
value = await self.ctxt.get(self.read_pv)
|
|
264
|
-
return self.converter.
|
|
308
|
+
return self.converter.get_datakey(source, value)
|
|
265
309
|
|
|
266
310
|
def _pva_request_string(self, fields: List[str]) -> str:
|
|
267
311
|
"""
|
|
@@ -282,6 +326,10 @@ class PvaSignalBackend(SignalBackend[T]):
|
|
|
282
326
|
value = await self.ctxt.get(self.read_pv, request=request)
|
|
283
327
|
return self.converter.value(value)
|
|
284
328
|
|
|
329
|
+
async def get_setpoint(self) -> T:
|
|
330
|
+
value = await self.ctxt.get(self.write_pv, "field(value)")
|
|
331
|
+
return self.converter.value(value)
|
|
332
|
+
|
|
285
333
|
def set_callback(self, callback: Optional[ReadingValueCallback[T]]) -> None:
|
|
286
334
|
if callback:
|
|
287
335
|
assert (
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
from enum import Enum
|
|
2
|
+
from typing import Dict, Optional, Tuple, Type
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def get_supported_values(
|
|
6
|
+
pv: str,
|
|
7
|
+
datatype: Optional[Type[str]],
|
|
8
|
+
pv_choices: Tuple[str, ...],
|
|
9
|
+
) -> Dict[str, str]:
|
|
10
|
+
if not datatype:
|
|
11
|
+
return {x: x or "_" for x in pv_choices}
|
|
12
|
+
|
|
13
|
+
if not issubclass(datatype, str):
|
|
14
|
+
raise TypeError(f"{pv} is type Enum but doesn't inherit from String")
|
|
15
|
+
if issubclass(datatype, Enum):
|
|
16
|
+
choices = tuple(v.value for v in datatype)
|
|
17
|
+
if set(choices) != set(pv_choices):
|
|
18
|
+
raise TypeError(
|
|
19
|
+
(
|
|
20
|
+
f"{pv} has choices {pv_choices}, "
|
|
21
|
+
f"which do not match {datatype}, which has {choices}"
|
|
22
|
+
)
|
|
23
|
+
)
|
|
24
|
+
return {x: datatype(x) for x in pv_choices}
|
|
25
|
+
return {x: x for x in pv_choices}
|
|
@@ -1,22 +1,23 @@
|
|
|
1
|
-
from .
|
|
2
|
-
from .
|
|
3
|
-
from .
|
|
4
|
-
from .nd_file_hdf import NDFileHDF
|
|
5
|
-
from .nd_plugin import NDPlugin, NDPluginStats
|
|
1
|
+
from .aravis import AravisDetector
|
|
2
|
+
from .kinetix import KinetixDetector
|
|
3
|
+
from .pilatus import PilatusDetector
|
|
6
4
|
from .single_trigger_det import SingleTriggerDet
|
|
7
|
-
from .utils import
|
|
5
|
+
from .utils import (
|
|
6
|
+
FileWriteMode,
|
|
7
|
+
ImageMode,
|
|
8
|
+
NDAttributeDataType,
|
|
9
|
+
NDAttributesXML,
|
|
10
|
+
)
|
|
11
|
+
from .vimba import VimbaDetector
|
|
8
12
|
|
|
9
13
|
__all__ = [
|
|
10
|
-
"
|
|
11
|
-
"
|
|
12
|
-
"
|
|
13
|
-
"HDFStreamerDet",
|
|
14
|
-
"NDFileHDF",
|
|
15
|
-
"NDPlugin",
|
|
16
|
-
"NDPluginStats",
|
|
14
|
+
"AravisDetector",
|
|
15
|
+
"KinetixDetector",
|
|
16
|
+
"VimbaDetector",
|
|
17
17
|
"SingleTriggerDet",
|
|
18
18
|
"FileWriteMode",
|
|
19
19
|
"ImageMode",
|
|
20
|
-
"
|
|
21
|
-
"
|
|
20
|
+
"NDAttributeDataType",
|
|
21
|
+
"NDAttributesXML",
|
|
22
|
+
"PilatusDetector",
|
|
22
23
|
]
|
|
@@ -0,0 +1,63 @@
|
|
|
1
|
+
from typing import get_args
|
|
2
|
+
|
|
3
|
+
from bluesky.protocols import HasHints, Hints
|
|
4
|
+
|
|
5
|
+
from ophyd_async.core import DirectoryProvider, StandardDetector
|
|
6
|
+
from ophyd_async.epics.areadetector.controllers.aravis_controller import (
|
|
7
|
+
AravisController,
|
|
8
|
+
)
|
|
9
|
+
from ophyd_async.epics.areadetector.drivers import ADBaseShapeProvider
|
|
10
|
+
from ophyd_async.epics.areadetector.drivers.aravis_driver import AravisDriver
|
|
11
|
+
from ophyd_async.epics.areadetector.writers import HDFWriter, NDFileHDF
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class AravisDetector(StandardDetector, HasHints):
|
|
15
|
+
"""
|
|
16
|
+
Ophyd-async implementation of an ADAravis Detector.
|
|
17
|
+
The detector may be configured for an external trigger on a GPIO port,
|
|
18
|
+
which must be done prior to preparing the detector
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
_controller: AravisController
|
|
22
|
+
_writer: HDFWriter
|
|
23
|
+
|
|
24
|
+
def __init__(
|
|
25
|
+
self,
|
|
26
|
+
prefix: str,
|
|
27
|
+
directory_provider: DirectoryProvider,
|
|
28
|
+
drv_suffix="cam1:",
|
|
29
|
+
hdf_suffix="HDF1:",
|
|
30
|
+
name="",
|
|
31
|
+
gpio_number: AravisController.GPIO_NUMBER = 1,
|
|
32
|
+
):
|
|
33
|
+
self.drv = AravisDriver(prefix + drv_suffix)
|
|
34
|
+
self.hdf = NDFileHDF(prefix + hdf_suffix)
|
|
35
|
+
|
|
36
|
+
super().__init__(
|
|
37
|
+
AravisController(self.drv, gpio_number=gpio_number),
|
|
38
|
+
HDFWriter(
|
|
39
|
+
self.hdf,
|
|
40
|
+
directory_provider,
|
|
41
|
+
lambda: self.name,
|
|
42
|
+
ADBaseShapeProvider(self.drv),
|
|
43
|
+
),
|
|
44
|
+
config_sigs=(self.drv.acquire_time,),
|
|
45
|
+
name=name,
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
def get_external_trigger_gpio(self):
|
|
49
|
+
return self._controller.gpio_number
|
|
50
|
+
|
|
51
|
+
def set_external_trigger_gpio(self, gpio_number: AravisController.GPIO_NUMBER):
|
|
52
|
+
supported_gpio_numbers = get_args(AravisController.GPIO_NUMBER)
|
|
53
|
+
if gpio_number not in supported_gpio_numbers:
|
|
54
|
+
raise ValueError(
|
|
55
|
+
f"{self.__class__.__name__} only supports the following GPIO "
|
|
56
|
+
f"indices: {supported_gpio_numbers} but was asked to "
|
|
57
|
+
f"use {gpio_number}"
|
|
58
|
+
)
|
|
59
|
+
self._controller.gpio_number = gpio_number
|
|
60
|
+
|
|
61
|
+
@property
|
|
62
|
+
def hints(self) -> Hints:
|
|
63
|
+
return self._writer.hints
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from typing import Optional, Set
|
|
3
|
+
|
|
4
|
+
from ophyd_async.core import (
|
|
5
|
+
DEFAULT_TIMEOUT,
|
|
6
|
+
AsyncStatus,
|
|
7
|
+
DetectorControl,
|
|
8
|
+
DetectorTrigger,
|
|
9
|
+
)
|
|
10
|
+
|
|
11
|
+
from ..drivers.ad_base import (
|
|
12
|
+
DEFAULT_GOOD_STATES,
|
|
13
|
+
ADBase,
|
|
14
|
+
DetectorState,
|
|
15
|
+
ImageMode,
|
|
16
|
+
start_acquiring_driver_and_ensure_status,
|
|
17
|
+
)
|
|
18
|
+
from ..utils import stop_busy_record
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
class ADSimController(DetectorControl):
|
|
22
|
+
def __init__(
|
|
23
|
+
self, driver: ADBase, good_states: Set[DetectorState] = set(DEFAULT_GOOD_STATES)
|
|
24
|
+
) -> None:
|
|
25
|
+
self.driver = driver
|
|
26
|
+
self.good_states = good_states
|
|
27
|
+
|
|
28
|
+
def get_deadtime(self, exposure: float) -> float:
|
|
29
|
+
return 0.002
|
|
30
|
+
|
|
31
|
+
async def arm(
|
|
32
|
+
self,
|
|
33
|
+
num: int,
|
|
34
|
+
trigger: DetectorTrigger = DetectorTrigger.internal,
|
|
35
|
+
exposure: Optional[float] = None,
|
|
36
|
+
) -> AsyncStatus:
|
|
37
|
+
assert (
|
|
38
|
+
trigger == DetectorTrigger.internal
|
|
39
|
+
), "fly scanning (i.e. external triggering) is not supported for this device"
|
|
40
|
+
frame_timeout = DEFAULT_TIMEOUT + await self.driver.acquire_time.get_value()
|
|
41
|
+
await asyncio.gather(
|
|
42
|
+
self.driver.num_images.set(num),
|
|
43
|
+
self.driver.image_mode.set(ImageMode.multiple),
|
|
44
|
+
)
|
|
45
|
+
return await start_acquiring_driver_and_ensure_status(
|
|
46
|
+
self.driver, good_states=self.good_states, timeout=frame_timeout
|
|
47
|
+
)
|
|
48
|
+
|
|
49
|
+
async def disarm(self):
|
|
50
|
+
# We can't use caput callback as we already used it in arm() and we can't have
|
|
51
|
+
# 2 or they will deadlock
|
|
52
|
+
await stop_busy_record(self.driver.acquire, False, timeout=1)
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from typing import Literal, Optional, Tuple
|
|
3
|
+
|
|
4
|
+
from ophyd_async.core import (
|
|
5
|
+
AsyncStatus,
|
|
6
|
+
DetectorControl,
|
|
7
|
+
DetectorTrigger,
|
|
8
|
+
set_and_wait_for_value,
|
|
9
|
+
)
|
|
10
|
+
from ophyd_async.epics.areadetector.drivers.aravis_driver import (
|
|
11
|
+
AravisDriver,
|
|
12
|
+
AravisTriggerMode,
|
|
13
|
+
AravisTriggerSource,
|
|
14
|
+
)
|
|
15
|
+
from ophyd_async.epics.areadetector.utils import ImageMode, stop_busy_record
|
|
16
|
+
|
|
17
|
+
# The deadtime of an ADaravis controller varies depending on the exact model of camera.
|
|
18
|
+
# Ideally we would maximize performance by dynamically retrieving the deadtime at
|
|
19
|
+
# runtime. See https://github.com/bluesky/ophyd-async/issues/308
|
|
20
|
+
_HIGHEST_POSSIBLE_DEADTIME = 1961e-6
|
|
21
|
+
|
|
22
|
+
|
|
23
|
+
class AravisController(DetectorControl):
|
|
24
|
+
GPIO_NUMBER = Literal[1, 2, 3, 4]
|
|
25
|
+
|
|
26
|
+
def __init__(self, driver: AravisDriver, gpio_number: GPIO_NUMBER) -> None:
|
|
27
|
+
self._drv = driver
|
|
28
|
+
self.gpio_number = gpio_number
|
|
29
|
+
|
|
30
|
+
def get_deadtime(self, exposure: float) -> float:
|
|
31
|
+
return _HIGHEST_POSSIBLE_DEADTIME
|
|
32
|
+
|
|
33
|
+
async def arm(
|
|
34
|
+
self,
|
|
35
|
+
num: int = 0,
|
|
36
|
+
trigger: DetectorTrigger = DetectorTrigger.internal,
|
|
37
|
+
exposure: Optional[float] = None,
|
|
38
|
+
) -> AsyncStatus:
|
|
39
|
+
if num == 0:
|
|
40
|
+
image_mode = ImageMode.continuous
|
|
41
|
+
else:
|
|
42
|
+
image_mode = ImageMode.multiple
|
|
43
|
+
if exposure is not None:
|
|
44
|
+
await self._drv.acquire_time.set(exposure)
|
|
45
|
+
|
|
46
|
+
trigger_mode, trigger_source = self._get_trigger_info(trigger)
|
|
47
|
+
# trigger mode must be set first and on it's own!
|
|
48
|
+
await self._drv.trigger_mode.set(trigger_mode)
|
|
49
|
+
|
|
50
|
+
await asyncio.gather(
|
|
51
|
+
self._drv.trigger_source.set(trigger_source),
|
|
52
|
+
self._drv.num_images.set(num),
|
|
53
|
+
self._drv.image_mode.set(image_mode),
|
|
54
|
+
)
|
|
55
|
+
|
|
56
|
+
status = await set_and_wait_for_value(self._drv.acquire, True)
|
|
57
|
+
return status
|
|
58
|
+
|
|
59
|
+
def _get_trigger_info(
|
|
60
|
+
self, trigger: DetectorTrigger
|
|
61
|
+
) -> Tuple[AravisTriggerMode, AravisTriggerSource]:
|
|
62
|
+
supported_trigger_types = (
|
|
63
|
+
DetectorTrigger.constant_gate,
|
|
64
|
+
DetectorTrigger.edge_trigger,
|
|
65
|
+
)
|
|
66
|
+
if trigger not in supported_trigger_types:
|
|
67
|
+
raise ValueError(
|
|
68
|
+
f"{self.__class__.__name__} only supports the following trigger "
|
|
69
|
+
f"types: {supported_trigger_types} but was asked to "
|
|
70
|
+
f"use {trigger}"
|
|
71
|
+
)
|
|
72
|
+
if trigger == DetectorTrigger.internal:
|
|
73
|
+
return AravisTriggerMode.off, "Freerun"
|
|
74
|
+
else:
|
|
75
|
+
return (AravisTriggerMode.on, f"Line{self.gpio_number}")
|
|
76
|
+
|
|
77
|
+
async def disarm(self):
|
|
78
|
+
await stop_busy_record(self._drv.acquire, False, timeout=1)
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from typing import Optional
|
|
3
|
+
|
|
4
|
+
from ophyd_async.core import AsyncStatus, DetectorControl, DetectorTrigger
|
|
5
|
+
from ophyd_async.epics.areadetector.drivers.ad_base import (
|
|
6
|
+
start_acquiring_driver_and_ensure_status,
|
|
7
|
+
)
|
|
8
|
+
|
|
9
|
+
from ..drivers.kinetix_driver import KinetixDriver, KinetixTriggerMode
|
|
10
|
+
from ..utils import ImageMode, stop_busy_record
|
|
11
|
+
|
|
12
|
+
KINETIX_TRIGGER_MODE_MAP = {
|
|
13
|
+
DetectorTrigger.internal: KinetixTriggerMode.internal,
|
|
14
|
+
DetectorTrigger.constant_gate: KinetixTriggerMode.gate,
|
|
15
|
+
DetectorTrigger.variable_gate: KinetixTriggerMode.gate,
|
|
16
|
+
DetectorTrigger.edge_trigger: KinetixTriggerMode.edge,
|
|
17
|
+
}
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class KinetixController(DetectorControl):
|
|
21
|
+
def __init__(
|
|
22
|
+
self,
|
|
23
|
+
driver: KinetixDriver,
|
|
24
|
+
) -> None:
|
|
25
|
+
self._drv = driver
|
|
26
|
+
|
|
27
|
+
def get_deadtime(self, exposure: float) -> float:
|
|
28
|
+
return 0.001
|
|
29
|
+
|
|
30
|
+
async def arm(
|
|
31
|
+
self,
|
|
32
|
+
num: int,
|
|
33
|
+
trigger: DetectorTrigger = DetectorTrigger.internal,
|
|
34
|
+
exposure: Optional[float] = None,
|
|
35
|
+
) -> AsyncStatus:
|
|
36
|
+
await asyncio.gather(
|
|
37
|
+
self._drv.trigger_mode.set(KINETIX_TRIGGER_MODE_MAP[trigger]),
|
|
38
|
+
self._drv.num_images.set(num),
|
|
39
|
+
self._drv.image_mode.set(ImageMode.multiple),
|
|
40
|
+
)
|
|
41
|
+
if exposure is not None and trigger not in [
|
|
42
|
+
DetectorTrigger.variable_gate,
|
|
43
|
+
DetectorTrigger.constant_gate,
|
|
44
|
+
]:
|
|
45
|
+
await self._drv.acquire_time.set(exposure)
|
|
46
|
+
return await start_acquiring_driver_and_ensure_status(self._drv)
|
|
47
|
+
|
|
48
|
+
async def disarm(self):
|
|
49
|
+
await stop_busy_record(self._drv.acquire, False, timeout=1)
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from typing import Optional
|
|
3
|
+
|
|
4
|
+
from ophyd_async.core.async_status import AsyncStatus
|
|
5
|
+
from ophyd_async.core.detector import DetectorControl, DetectorTrigger
|
|
6
|
+
from ophyd_async.epics.areadetector.drivers.ad_base import (
|
|
7
|
+
start_acquiring_driver_and_ensure_status,
|
|
8
|
+
)
|
|
9
|
+
from ophyd_async.epics.areadetector.drivers.pilatus_driver import (
|
|
10
|
+
PilatusDriver,
|
|
11
|
+
PilatusTriggerMode,
|
|
12
|
+
)
|
|
13
|
+
from ophyd_async.epics.areadetector.utils import ImageMode, stop_busy_record
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
class PilatusController(DetectorControl):
|
|
17
|
+
_supported_trigger_types = {
|
|
18
|
+
DetectorTrigger.internal: PilatusTriggerMode.internal,
|
|
19
|
+
DetectorTrigger.constant_gate: PilatusTriggerMode.ext_enable,
|
|
20
|
+
DetectorTrigger.variable_gate: PilatusTriggerMode.ext_enable,
|
|
21
|
+
}
|
|
22
|
+
|
|
23
|
+
def __init__(
|
|
24
|
+
self,
|
|
25
|
+
driver: PilatusDriver,
|
|
26
|
+
) -> None:
|
|
27
|
+
self._drv = driver
|
|
28
|
+
|
|
29
|
+
def get_deadtime(self, exposure: float) -> float:
|
|
30
|
+
# Cite: https://media.dectris.com/User_Manual-PILATUS2-V1_4.pdf
|
|
31
|
+
"""The required minimum time difference between ExpPeriod and ExpTime
|
|
32
|
+
(readout time) is 2.28 ms"""
|
|
33
|
+
return 2.28e-3
|
|
34
|
+
|
|
35
|
+
async def arm(
|
|
36
|
+
self,
|
|
37
|
+
num: int,
|
|
38
|
+
trigger: DetectorTrigger = DetectorTrigger.internal,
|
|
39
|
+
exposure: Optional[float] = None,
|
|
40
|
+
) -> AsyncStatus:
|
|
41
|
+
if exposure is not None:
|
|
42
|
+
await self._drv.acquire_time.set(exposure)
|
|
43
|
+
await asyncio.gather(
|
|
44
|
+
self._drv.trigger_mode.set(self._get_trigger_mode(trigger)),
|
|
45
|
+
self._drv.num_images.set(999_999 if num == 0 else num),
|
|
46
|
+
self._drv.image_mode.set(ImageMode.multiple),
|
|
47
|
+
)
|
|
48
|
+
return await start_acquiring_driver_and_ensure_status(self._drv)
|
|
49
|
+
|
|
50
|
+
@classmethod
|
|
51
|
+
def _get_trigger_mode(cls, trigger: DetectorTrigger) -> PilatusTriggerMode:
|
|
52
|
+
if trigger not in cls._supported_trigger_types.keys():
|
|
53
|
+
raise ValueError(
|
|
54
|
+
f"{cls.__name__} only supports the following trigger "
|
|
55
|
+
f"types: {cls._supported_trigger_types.keys()} but was asked to "
|
|
56
|
+
f"use {trigger}"
|
|
57
|
+
)
|
|
58
|
+
return cls._supported_trigger_types[trigger]
|
|
59
|
+
|
|
60
|
+
async def disarm(self):
|
|
61
|
+
await stop_busy_record(self._drv.acquire, False, timeout=1)
|