ophyd-async 0.5.2__py3-none-any.whl → 0.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ophyd_async/__init__.py +10 -1
- ophyd_async/__main__.py +12 -4
- ophyd_async/_version.py +2 -2
- ophyd_async/core/__init__.py +15 -7
- ophyd_async/core/_detector.py +133 -87
- ophyd_async/core/_device.py +19 -16
- ophyd_async/core/_device_save_loader.py +30 -19
- ophyd_async/core/_flyer.py +6 -19
- ophyd_async/core/_hdf_dataset.py +8 -9
- ophyd_async/core/_log.py +3 -1
- ophyd_async/core/_mock_signal_backend.py +11 -9
- ophyd_async/core/_mock_signal_utils.py +8 -5
- ophyd_async/core/_protocol.py +7 -7
- ophyd_async/core/_providers.py +11 -11
- ophyd_async/core/_readable.py +30 -22
- ophyd_async/core/_signal.py +52 -51
- ophyd_async/core/_signal_backend.py +20 -7
- ophyd_async/core/_soft_signal_backend.py +62 -32
- ophyd_async/core/_status.py +7 -9
- ophyd_async/core/_table.py +146 -0
- ophyd_async/core/_utils.py +24 -28
- ophyd_async/epics/adaravis/_aravis_controller.py +20 -19
- ophyd_async/epics/adaravis/_aravis_io.py +2 -1
- ophyd_async/epics/adcore/_core_io.py +2 -0
- ophyd_async/epics/adcore/_core_logic.py +4 -5
- ophyd_async/epics/adcore/_hdf_writer.py +19 -8
- ophyd_async/epics/adcore/_single_trigger.py +1 -1
- ophyd_async/epics/adcore/_utils.py +5 -6
- ophyd_async/epics/adkinetix/_kinetix_controller.py +20 -15
- ophyd_async/epics/adpilatus/_pilatus_controller.py +22 -18
- ophyd_async/epics/adsimdetector/_sim.py +7 -6
- ophyd_async/epics/adsimdetector/_sim_controller.py +22 -17
- ophyd_async/epics/advimba/_vimba_controller.py +22 -17
- ophyd_async/epics/demo/_mover.py +4 -5
- ophyd_async/epics/demo/sensor.db +0 -1
- ophyd_async/epics/eiger/_eiger.py +1 -1
- ophyd_async/epics/eiger/_eiger_controller.py +18 -18
- ophyd_async/epics/eiger/_odin_io.py +6 -5
- ophyd_async/epics/motor.py +8 -10
- ophyd_async/epics/pvi/_pvi.py +30 -33
- ophyd_async/epics/signal/_aioca.py +55 -25
- ophyd_async/epics/signal/_common.py +3 -10
- ophyd_async/epics/signal/_epics_transport.py +11 -8
- ophyd_async/epics/signal/_p4p.py +79 -30
- ophyd_async/epics/signal/_signal.py +6 -8
- ophyd_async/fastcs/panda/__init__.py +0 -6
- ophyd_async/fastcs/panda/_block.py +7 -0
- ophyd_async/fastcs/panda/_control.py +16 -17
- ophyd_async/fastcs/panda/_hdf_panda.py +11 -4
- ophyd_async/fastcs/panda/_table.py +77 -138
- ophyd_async/fastcs/panda/_trigger.py +4 -5
- ophyd_async/fastcs/panda/_utils.py +3 -2
- ophyd_async/fastcs/panda/_writer.py +30 -15
- ophyd_async/plan_stubs/_fly.py +15 -17
- ophyd_async/plan_stubs/_nd_attributes.py +12 -6
- ophyd_async/sim/demo/_pattern_detector/_pattern_detector.py +3 -3
- ophyd_async/sim/demo/_pattern_detector/_pattern_detector_controller.py +27 -21
- ophyd_async/sim/demo/_pattern_detector/_pattern_detector_writer.py +9 -6
- ophyd_async/sim/demo/_pattern_detector/_pattern_generator.py +21 -23
- ophyd_async/sim/demo/_sim_motor.py +2 -1
- ophyd_async/tango/__init__.py +45 -0
- ophyd_async/tango/base_devices/__init__.py +4 -0
- ophyd_async/tango/base_devices/_base_device.py +225 -0
- ophyd_async/tango/base_devices/_tango_readable.py +33 -0
- ophyd_async/tango/demo/__init__.py +12 -0
- ophyd_async/tango/demo/_counter.py +37 -0
- ophyd_async/tango/demo/_detector.py +42 -0
- ophyd_async/tango/demo/_mover.py +77 -0
- ophyd_async/tango/demo/_tango/__init__.py +3 -0
- ophyd_async/tango/demo/_tango/_servers.py +108 -0
- ophyd_async/tango/signal/__init__.py +39 -0
- ophyd_async/tango/signal/_signal.py +223 -0
- ophyd_async/tango/signal/_tango_transport.py +764 -0
- {ophyd_async-0.5.2.dist-info → ophyd_async-0.7.0.dist-info}/METADATA +50 -45
- ophyd_async-0.7.0.dist-info/RECORD +108 -0
- {ophyd_async-0.5.2.dist-info → ophyd_async-0.7.0.dist-info}/WHEEL +1 -1
- ophyd_async-0.5.2.dist-info/RECORD +0 -95
- {ophyd_async-0.5.2.dist-info → ophyd_async-0.7.0.dist-info}/LICENSE +0 -0
- {ophyd_async-0.5.2.dist-info → ophyd_async-0.7.0.dist-info}/entry_points.txt +0 -0
- {ophyd_async-0.5.2.dist-info → ophyd_async-0.7.0.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
from enum import Enum
|
|
2
|
+
from typing import TypeVar, get_args, get_origin
|
|
3
|
+
|
|
4
|
+
import numpy as np
|
|
5
|
+
from pydantic import BaseModel, ConfigDict, model_validator
|
|
6
|
+
|
|
7
|
+
TableSubclass = TypeVar("TableSubclass", bound="Table")
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def _concat(value1, value2):
|
|
11
|
+
if isinstance(value1, np.ndarray):
|
|
12
|
+
return np.concatenate((value1, value2))
|
|
13
|
+
else:
|
|
14
|
+
return value1 + value2
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class Table(BaseModel):
|
|
18
|
+
"""An abstraction of a Table of str to numpy array."""
|
|
19
|
+
|
|
20
|
+
model_config = ConfigDict(validate_assignment=True, strict=False)
|
|
21
|
+
|
|
22
|
+
@staticmethod
|
|
23
|
+
def row(cls: type[TableSubclass], **kwargs) -> TableSubclass: # type: ignore
|
|
24
|
+
arrayified_kwargs = {}
|
|
25
|
+
for field_name, field_value in cls.model_fields.items():
|
|
26
|
+
value = kwargs.pop(field_name)
|
|
27
|
+
if field_value.default_factory is None:
|
|
28
|
+
raise ValueError(
|
|
29
|
+
"`Table` models should have default factories for their "
|
|
30
|
+
"mutable empty columns."
|
|
31
|
+
)
|
|
32
|
+
default_array = field_value.default_factory()
|
|
33
|
+
if isinstance(default_array, np.ndarray):
|
|
34
|
+
arrayified_kwargs[field_name] = np.array(
|
|
35
|
+
[value], dtype=default_array.dtype
|
|
36
|
+
)
|
|
37
|
+
elif issubclass(type(value), Enum) and isinstance(value, str):
|
|
38
|
+
arrayified_kwargs[field_name] = [value]
|
|
39
|
+
else:
|
|
40
|
+
raise TypeError(
|
|
41
|
+
"Row column should be numpy arrays or sequence of string `Enum`."
|
|
42
|
+
)
|
|
43
|
+
if kwargs:
|
|
44
|
+
raise TypeError(
|
|
45
|
+
f"Unexpected keyword arguments {kwargs.keys()} for {cls.__name__}."
|
|
46
|
+
)
|
|
47
|
+
return cls(**arrayified_kwargs)
|
|
48
|
+
|
|
49
|
+
def __add__(self, right: TableSubclass) -> TableSubclass:
|
|
50
|
+
"""Concatenate the arrays in field values."""
|
|
51
|
+
|
|
52
|
+
if type(right) is not type(self):
|
|
53
|
+
raise RuntimeError(
|
|
54
|
+
f"{right} is not a `Table`, or is not the same "
|
|
55
|
+
f"type of `Table` as {self}."
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
return type(right)(
|
|
59
|
+
**{
|
|
60
|
+
field_name: _concat(
|
|
61
|
+
getattr(self, field_name), getattr(right, field_name)
|
|
62
|
+
)
|
|
63
|
+
for field_name in self.model_fields
|
|
64
|
+
}
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
def numpy_dtype(self) -> np.dtype:
|
|
68
|
+
dtype = []
|
|
69
|
+
for field_name, field_value in self.model_fields.items():
|
|
70
|
+
if np.ndarray in (
|
|
71
|
+
get_origin(field_value.annotation),
|
|
72
|
+
field_value.annotation,
|
|
73
|
+
):
|
|
74
|
+
dtype.append((field_name, getattr(self, field_name).dtype))
|
|
75
|
+
else:
|
|
76
|
+
enum_type = get_args(field_value.annotation)[0]
|
|
77
|
+
assert issubclass(enum_type, Enum)
|
|
78
|
+
enum_values = [element.value for element in enum_type]
|
|
79
|
+
max_length_in_enum = max(len(value) for value in enum_values)
|
|
80
|
+
dtype.append((field_name, np.dtype(f"<U{max_length_in_enum}")))
|
|
81
|
+
|
|
82
|
+
return np.dtype(dtype)
|
|
83
|
+
|
|
84
|
+
def numpy_table(self):
|
|
85
|
+
# It would be nice to be able to use np.transpose for this,
|
|
86
|
+
# but it defaults to the largest dtype for everything.
|
|
87
|
+
dtype = self.numpy_dtype()
|
|
88
|
+
transposed_list = [
|
|
89
|
+
np.array(tuple(row), dtype=dtype)
|
|
90
|
+
for row in zip(*self.numpy_columns(), strict=False)
|
|
91
|
+
]
|
|
92
|
+
transposed = np.array(transposed_list, dtype=dtype)
|
|
93
|
+
return transposed
|
|
94
|
+
|
|
95
|
+
def numpy_columns(self) -> list[np.ndarray]:
|
|
96
|
+
"""Columns in the table can be lists of string enums or numpy arrays.
|
|
97
|
+
|
|
98
|
+
This method returns the columns, converting the string enums to numpy arrays.
|
|
99
|
+
"""
|
|
100
|
+
|
|
101
|
+
columns = []
|
|
102
|
+
for field_name, field_value in self.model_fields.items():
|
|
103
|
+
if np.ndarray in (
|
|
104
|
+
get_origin(field_value.annotation),
|
|
105
|
+
field_value.annotation,
|
|
106
|
+
):
|
|
107
|
+
columns.append(getattr(self, field_name))
|
|
108
|
+
else:
|
|
109
|
+
enum_type = get_args(field_value.annotation)[0]
|
|
110
|
+
assert issubclass(enum_type, Enum)
|
|
111
|
+
enum_values = [element.value for element in enum_type]
|
|
112
|
+
max_length_in_enum = max(len(value) for value in enum_values)
|
|
113
|
+
dtype = np.dtype(f"<U{max_length_in_enum}")
|
|
114
|
+
|
|
115
|
+
columns.append(
|
|
116
|
+
np.array(
|
|
117
|
+
[enum.value for enum in getattr(self, field_name)], dtype=dtype
|
|
118
|
+
)
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
return columns
|
|
122
|
+
|
|
123
|
+
@model_validator(mode="after")
|
|
124
|
+
def validate_arrays(self) -> "Table":
|
|
125
|
+
first_length = len(next(iter(self))[1])
|
|
126
|
+
assert all(
|
|
127
|
+
len(field_value) == first_length for _, field_value in self
|
|
128
|
+
), "Rows should all be of equal size."
|
|
129
|
+
|
|
130
|
+
if not all(
|
|
131
|
+
# Checks if the values are numpy subtypes if the array is a numpy array,
|
|
132
|
+
# or if the value is a string enum.
|
|
133
|
+
np.issubdtype(getattr(self, field_name).dtype, default_array.dtype)
|
|
134
|
+
if isinstance(
|
|
135
|
+
default_array := self.model_fields[field_name].default_factory(), # type: ignore
|
|
136
|
+
np.ndarray,
|
|
137
|
+
)
|
|
138
|
+
else issubclass(get_args(field_value.annotation)[0], Enum)
|
|
139
|
+
for field_name, field_value in self.model_fields.items()
|
|
140
|
+
):
|
|
141
|
+
raise ValueError(
|
|
142
|
+
f"Cannot construct a `{type(self).__name__}`, "
|
|
143
|
+
"some rows have incorrect types."
|
|
144
|
+
)
|
|
145
|
+
|
|
146
|
+
return self
|
ophyd_async/core/_utils.py
CHANGED
|
@@ -2,23 +2,13 @@ from __future__ import annotations
|
|
|
2
2
|
|
|
3
3
|
import asyncio
|
|
4
4
|
import logging
|
|
5
|
+
from collections.abc import Awaitable, Callable, Iterable
|
|
5
6
|
from dataclasses import dataclass
|
|
6
|
-
from typing import
|
|
7
|
-
Awaitable,
|
|
8
|
-
Callable,
|
|
9
|
-
Dict,
|
|
10
|
-
Generic,
|
|
11
|
-
Iterable,
|
|
12
|
-
List,
|
|
13
|
-
Optional,
|
|
14
|
-
ParamSpec,
|
|
15
|
-
Type,
|
|
16
|
-
TypeVar,
|
|
17
|
-
Union,
|
|
18
|
-
)
|
|
7
|
+
from typing import Generic, Literal, ParamSpec, TypeVar, get_origin
|
|
19
8
|
|
|
20
9
|
import numpy as np
|
|
21
10
|
from bluesky.protocols import Reading
|
|
11
|
+
from pydantic import BaseModel
|
|
22
12
|
|
|
23
13
|
T = TypeVar("T")
|
|
24
14
|
P = ParamSpec("P")
|
|
@@ -28,18 +18,18 @@ Callback = Callable[[T], None]
|
|
|
28
18
|
#: monitor updates
|
|
29
19
|
ReadingValueCallback = Callable[[Reading, T], None]
|
|
30
20
|
DEFAULT_TIMEOUT = 10.0
|
|
31
|
-
ErrorText =
|
|
21
|
+
ErrorText = str | dict[str, Exception]
|
|
32
22
|
|
|
33
23
|
|
|
34
|
-
|
|
35
|
-
|
|
24
|
+
CALCULATE_TIMEOUT = "CALCULATE_TIMEOUT"
|
|
25
|
+
"""Sentinel used to implement ``myfunc(timeout=CalculateTimeout)``
|
|
36
26
|
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
27
|
+
This signifies that the function should calculate a suitable non-zero
|
|
28
|
+
timeout itself
|
|
29
|
+
"""
|
|
40
30
|
|
|
41
31
|
|
|
42
|
-
CalculatableTimeout = float | None |
|
|
32
|
+
CalculatableTimeout = float | None | Literal["CALCULATE_TIMEOUT"]
|
|
43
33
|
|
|
44
34
|
|
|
45
35
|
class NotConnected(Exception):
|
|
@@ -115,7 +105,7 @@ async def wait_for_connection(**coros: Awaitable[None]):
|
|
|
115
105
|
results = await asyncio.gather(*coros.values(), return_exceptions=True)
|
|
116
106
|
exceptions = {}
|
|
117
107
|
|
|
118
|
-
for name, result in zip(coros, results):
|
|
108
|
+
for name, result in zip(coros, results, strict=False):
|
|
119
109
|
if isinstance(result, Exception):
|
|
120
110
|
exceptions[name] = result
|
|
121
111
|
if not isinstance(result, NotConnected):
|
|
@@ -129,7 +119,7 @@ async def wait_for_connection(**coros: Awaitable[None]):
|
|
|
129
119
|
raise NotConnected(exceptions)
|
|
130
120
|
|
|
131
121
|
|
|
132
|
-
def get_dtype(typ:
|
|
122
|
+
def get_dtype(typ: type) -> np.dtype | None:
|
|
133
123
|
"""Get the runtime dtype from a numpy ndarray type annotation
|
|
134
124
|
|
|
135
125
|
>>> import numpy.typing as npt
|
|
@@ -144,8 +134,8 @@ def get_dtype(typ: Type) -> Optional[np.dtype]:
|
|
|
144
134
|
return None
|
|
145
135
|
|
|
146
136
|
|
|
147
|
-
def get_unique(values:
|
|
148
|
-
"""If all values are the same, return that value, otherwise
|
|
137
|
+
def get_unique(values: dict[str, T], types: str) -> T:
|
|
138
|
+
"""If all values are the same, return that value, otherwise raise TypeError
|
|
149
139
|
|
|
150
140
|
>>> get_unique({"a": 1, "b": 1}, "integers")
|
|
151
141
|
1
|
|
@@ -162,21 +152,21 @@ def get_unique(values: Dict[str, T], types: str) -> T:
|
|
|
162
152
|
|
|
163
153
|
|
|
164
154
|
async def merge_gathered_dicts(
|
|
165
|
-
coros: Iterable[Awaitable[
|
|
166
|
-
) ->
|
|
155
|
+
coros: Iterable[Awaitable[dict[str, T]]],
|
|
156
|
+
) -> dict[str, T]:
|
|
167
157
|
"""Merge dictionaries produced by a sequence of coroutines.
|
|
168
158
|
|
|
169
159
|
Can be used for merging ``read()`` or ``describe``. For instance::
|
|
170
160
|
|
|
171
161
|
combined_read = await merge_gathered_dicts(s.read() for s in signals)
|
|
172
162
|
"""
|
|
173
|
-
ret:
|
|
163
|
+
ret: dict[str, T] = {}
|
|
174
164
|
for result in await asyncio.gather(*coros):
|
|
175
165
|
ret.update(result)
|
|
176
166
|
return ret
|
|
177
167
|
|
|
178
168
|
|
|
179
|
-
async def gather_list(coros: Iterable[Awaitable[T]]) ->
|
|
169
|
+
async def gather_list(coros: Iterable[Awaitable[T]]) -> list[T]:
|
|
180
170
|
return await asyncio.gather(*coros)
|
|
181
171
|
|
|
182
172
|
|
|
@@ -195,3 +185,9 @@ def in_micros(t: float) -> int:
|
|
|
195
185
|
if t < 0:
|
|
196
186
|
raise ValueError(f"Expected a positive time in seconds, got {t!r}")
|
|
197
187
|
return int(np.ceil(t * 1e6))
|
|
188
|
+
|
|
189
|
+
|
|
190
|
+
def is_pydantic_model(datatype) -> bool:
|
|
191
|
+
while origin := get_origin(datatype):
|
|
192
|
+
datatype = origin
|
|
193
|
+
return datatype and issubclass(datatype, BaseModel)
|
|
@@ -1,12 +1,13 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
-
from typing import Literal
|
|
2
|
+
from typing import Literal
|
|
3
3
|
|
|
4
4
|
from ophyd_async.core import (
|
|
5
|
-
|
|
6
|
-
DetectorControl,
|
|
5
|
+
DetectorController,
|
|
7
6
|
DetectorTrigger,
|
|
7
|
+
TriggerInfo,
|
|
8
8
|
set_and_wait_for_value,
|
|
9
9
|
)
|
|
10
|
+
from ophyd_async.core._status import AsyncStatus
|
|
10
11
|
from ophyd_async.epics import adcore
|
|
11
12
|
|
|
12
13
|
from ._aravis_io import AravisDriverIO, AravisTriggerMode, AravisTriggerSource
|
|
@@ -17,45 +18,45 @@ from ._aravis_io import AravisDriverIO, AravisTriggerMode, AravisTriggerSource
|
|
|
17
18
|
_HIGHEST_POSSIBLE_DEADTIME = 1961e-6
|
|
18
19
|
|
|
19
20
|
|
|
20
|
-
class AravisController(
|
|
21
|
+
class AravisController(DetectorController):
|
|
21
22
|
GPIO_NUMBER = Literal[1, 2, 3, 4]
|
|
22
23
|
|
|
23
24
|
def __init__(self, driver: AravisDriverIO, gpio_number: GPIO_NUMBER) -> None:
|
|
24
25
|
self._drv = driver
|
|
25
26
|
self.gpio_number = gpio_number
|
|
27
|
+
self._arm_status: AsyncStatus | None = None
|
|
26
28
|
|
|
27
|
-
def get_deadtime(self, exposure: float) -> float:
|
|
29
|
+
def get_deadtime(self, exposure: float | None) -> float:
|
|
28
30
|
return _HIGHEST_POSSIBLE_DEADTIME
|
|
29
31
|
|
|
30
|
-
async def
|
|
31
|
-
|
|
32
|
-
num: int = 0,
|
|
33
|
-
trigger: DetectorTrigger = DetectorTrigger.internal,
|
|
34
|
-
exposure: Optional[float] = None,
|
|
35
|
-
) -> AsyncStatus:
|
|
36
|
-
if num == 0:
|
|
32
|
+
async def prepare(self, trigger_info: TriggerInfo):
|
|
33
|
+
if trigger_info.total_number_of_triggers == 0:
|
|
37
34
|
image_mode = adcore.ImageMode.continuous
|
|
38
35
|
else:
|
|
39
36
|
image_mode = adcore.ImageMode.multiple
|
|
40
|
-
if exposure is not None:
|
|
37
|
+
if (exposure := trigger_info.livetime) is not None:
|
|
41
38
|
await self._drv.acquire_time.set(exposure)
|
|
42
39
|
|
|
43
|
-
trigger_mode, trigger_source = self._get_trigger_info(trigger)
|
|
40
|
+
trigger_mode, trigger_source = self._get_trigger_info(trigger_info.trigger)
|
|
44
41
|
# trigger mode must be set first and on it's own!
|
|
45
42
|
await self._drv.trigger_mode.set(trigger_mode)
|
|
46
43
|
|
|
47
44
|
await asyncio.gather(
|
|
48
45
|
self._drv.trigger_source.set(trigger_source),
|
|
49
|
-
self._drv.num_images.set(
|
|
46
|
+
self._drv.num_images.set(trigger_info.total_number_of_triggers),
|
|
50
47
|
self._drv.image_mode.set(image_mode),
|
|
51
48
|
)
|
|
52
49
|
|
|
53
|
-
|
|
54
|
-
|
|
50
|
+
async def arm(self):
|
|
51
|
+
self._arm_status = await set_and_wait_for_value(self._drv.acquire, True)
|
|
52
|
+
|
|
53
|
+
async def wait_for_idle(self):
|
|
54
|
+
if self._arm_status:
|
|
55
|
+
await self._arm_status
|
|
55
56
|
|
|
56
57
|
def _get_trigger_info(
|
|
57
58
|
self, trigger: DetectorTrigger
|
|
58
|
-
) ->
|
|
59
|
+
) -> tuple[AravisTriggerMode, AravisTriggerSource]:
|
|
59
60
|
supported_trigger_types = (
|
|
60
61
|
DetectorTrigger.constant_gate,
|
|
61
62
|
DetectorTrigger.edge_trigger,
|
|
@@ -70,7 +71,7 @@ class AravisController(DetectorControl):
|
|
|
70
71
|
if trigger == DetectorTrigger.internal:
|
|
71
72
|
return AravisTriggerMode.off, "Freerun"
|
|
72
73
|
else:
|
|
73
|
-
return (AravisTriggerMode.on, f"Line{self.gpio_number}")
|
|
74
|
+
return (AravisTriggerMode.on, f"Line{self.gpio_number}") # type: ignore
|
|
74
75
|
|
|
75
76
|
async def disarm(self):
|
|
76
77
|
await adcore.stop_busy_record(self._drv.acquire, False, timeout=1)
|
|
@@ -38,6 +38,7 @@ class AravisDriverIO(adcore.ADBaseIO):
|
|
|
38
38
|
AravisTriggerMode, prefix + "TriggerMode"
|
|
39
39
|
)
|
|
40
40
|
self.trigger_source = epics_signal_rw_rbv(
|
|
41
|
-
AravisTriggerSource,
|
|
41
|
+
AravisTriggerSource, # type: ignore
|
|
42
|
+
prefix + "TriggerSource",
|
|
42
43
|
)
|
|
43
44
|
super().__init__(prefix, name=name)
|
|
@@ -135,4 +135,6 @@ class NDFileHDFIO(NDPluginBaseIO):
|
|
|
135
135
|
self.array_size0 = epics_signal_r(int, prefix + "ArraySize0")
|
|
136
136
|
self.array_size1 = epics_signal_r(int, prefix + "ArraySize1")
|
|
137
137
|
self.create_directory = epics_signal_rw(int, prefix + "CreateDirectory")
|
|
138
|
+
self.num_frames_chunks = epics_signal_r(int, prefix + "NumFramesChunks_RBV")
|
|
139
|
+
self.chunk_size_auto = epics_signal_rw_rbv(bool, prefix + "ChunkSizeAuto")
|
|
138
140
|
super().__init__(prefix, name)
|
|
@@ -1,11 +1,10 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
-
from typing import FrozenSet, Set
|
|
3
2
|
|
|
4
3
|
from ophyd_async.core import (
|
|
5
4
|
DEFAULT_TIMEOUT,
|
|
6
5
|
AsyncStatus,
|
|
7
6
|
DatasetDescriber,
|
|
8
|
-
|
|
7
|
+
DetectorController,
|
|
9
8
|
set_and_wait_for_value,
|
|
10
9
|
)
|
|
11
10
|
from ophyd_async.epics.adcore._utils import convert_ad_dtype_to_np
|
|
@@ -14,7 +13,7 @@ from ._core_io import ADBaseIO, DetectorState
|
|
|
14
13
|
|
|
15
14
|
# Default set of states that we should consider "good" i.e. the acquisition
|
|
16
15
|
# is complete and went well
|
|
17
|
-
DEFAULT_GOOD_STATES:
|
|
16
|
+
DEFAULT_GOOD_STATES: frozenset[DetectorState] = frozenset(
|
|
18
17
|
[DetectorState.Idle, DetectorState.Aborted]
|
|
19
18
|
)
|
|
20
19
|
|
|
@@ -35,7 +34,7 @@ class ADBaseDatasetDescriber(DatasetDescriber):
|
|
|
35
34
|
|
|
36
35
|
|
|
37
36
|
async def set_exposure_time_and_acquire_period_if_supplied(
|
|
38
|
-
controller:
|
|
37
|
+
controller: DetectorController,
|
|
39
38
|
driver: ADBaseIO,
|
|
40
39
|
exposure: float | None = None,
|
|
41
40
|
timeout: float = DEFAULT_TIMEOUT,
|
|
@@ -66,7 +65,7 @@ async def set_exposure_time_and_acquire_period_if_supplied(
|
|
|
66
65
|
|
|
67
66
|
async def start_acquiring_driver_and_ensure_status(
|
|
68
67
|
driver: ADBaseIO,
|
|
69
|
-
good_states:
|
|
68
|
+
good_states: frozenset[DetectorState] = frozenset(DEFAULT_GOOD_STATES),
|
|
70
69
|
timeout: float = DEFAULT_TIMEOUT,
|
|
71
70
|
) -> AsyncStatus:
|
|
72
71
|
"""
|
|
@@ -1,9 +1,10 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
+
from collections.abc import AsyncGenerator, AsyncIterator
|
|
2
3
|
from pathlib import Path
|
|
3
|
-
from typing import AsyncGenerator, AsyncIterator, Dict, List, Optional
|
|
4
4
|
from xml.etree import ElementTree as ET
|
|
5
5
|
|
|
6
|
-
from bluesky.protocols import
|
|
6
|
+
from bluesky.protocols import Hints, StreamAsset
|
|
7
|
+
from event_model import DataKey
|
|
7
8
|
|
|
8
9
|
from ophyd_async.core import (
|
|
9
10
|
DEFAULT_TIMEOUT,
|
|
@@ -42,19 +43,22 @@ class ADHDFWriter(DetectorWriter):
|
|
|
42
43
|
self._dataset_describer = dataset_describer
|
|
43
44
|
|
|
44
45
|
self._plugins = plugins
|
|
45
|
-
self._capture_status:
|
|
46
|
-
self._datasets:
|
|
47
|
-
self._file:
|
|
46
|
+
self._capture_status: AsyncStatus | None = None
|
|
47
|
+
self._datasets: list[HDFDataset] = []
|
|
48
|
+
self._file: HDFFile | None = None
|
|
48
49
|
self._multiplier = 1
|
|
49
50
|
|
|
50
|
-
async def open(self, multiplier: int = 1) ->
|
|
51
|
+
async def open(self, multiplier: int = 1) -> dict[str, DataKey]:
|
|
51
52
|
self._file = None
|
|
52
|
-
info = self._path_provider(device_name=self.
|
|
53
|
+
info = self._path_provider(device_name=self._name_provider())
|
|
53
54
|
|
|
54
55
|
# Set the directory creation depth first, since dir creation callback happens
|
|
55
56
|
# when directory path PV is processed.
|
|
56
57
|
await self.hdf.create_directory.set(info.create_dir_depth)
|
|
57
58
|
|
|
59
|
+
# Make sure we are using chunk auto-sizing
|
|
60
|
+
await asyncio.gather(self.hdf.chunk_size_auto.set(True))
|
|
61
|
+
|
|
58
62
|
await asyncio.gather(
|
|
59
63
|
self.hdf.num_extra_dims.set(0),
|
|
60
64
|
self.hdf.lazy_open.set(True),
|
|
@@ -83,6 +87,9 @@ class ADHDFWriter(DetectorWriter):
|
|
|
83
87
|
self._multiplier = multiplier
|
|
84
88
|
outer_shape = (multiplier,) if multiplier > 1 else ()
|
|
85
89
|
|
|
90
|
+
# Determine number of frames that will be saved per HDF chunk
|
|
91
|
+
frames_per_chunk = await self.hdf.num_frames_chunks.get_value()
|
|
92
|
+
|
|
86
93
|
# Add the main data
|
|
87
94
|
self._datasets = [
|
|
88
95
|
HDFDataset(
|
|
@@ -91,6 +98,7 @@ class ADHDFWriter(DetectorWriter):
|
|
|
91
98
|
shape=detector_shape,
|
|
92
99
|
dtype_numpy=np_dtype,
|
|
93
100
|
multiplier=multiplier,
|
|
101
|
+
chunk_shape=(frames_per_chunk, *detector_shape),
|
|
94
102
|
)
|
|
95
103
|
]
|
|
96
104
|
# And all the scalar datasets
|
|
@@ -117,6 +125,9 @@ class ADHDFWriter(DetectorWriter):
|
|
|
117
125
|
(),
|
|
118
126
|
np_datatype,
|
|
119
127
|
multiplier,
|
|
128
|
+
# NDAttributes appear to always be configured with
|
|
129
|
+
# this chunk size
|
|
130
|
+
chunk_shape=(16384,),
|
|
120
131
|
)
|
|
121
132
|
)
|
|
122
133
|
|
|
@@ -125,7 +136,7 @@ class ADHDFWriter(DetectorWriter):
|
|
|
125
136
|
source=self.hdf.full_file_name.source,
|
|
126
137
|
shape=outer_shape + tuple(ds.shape),
|
|
127
138
|
dtype="array" if ds.shape else "number",
|
|
128
|
-
dtype_numpy=ds.dtype_numpy,
|
|
139
|
+
dtype_numpy=ds.dtype_numpy, # type: ignore
|
|
129
140
|
external="STREAM:",
|
|
130
141
|
)
|
|
131
142
|
for ds in self._datasets
|
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
from dataclasses import dataclass
|
|
2
2
|
from enum import Enum
|
|
3
|
-
from typing import Optional
|
|
4
3
|
|
|
5
4
|
from ophyd_async.core import DEFAULT_TIMEOUT, SignalRW, T, wait_for_value
|
|
6
5
|
from ophyd_async.core._signal import SignalR
|
|
@@ -51,8 +50,8 @@ def convert_pv_dtype_to_np(datatype: str) -> str:
|
|
|
51
50
|
else:
|
|
52
51
|
try:
|
|
53
52
|
np_datatype = convert_ad_dtype_to_np(_pvattribute_to_ad_datatype[datatype])
|
|
54
|
-
except KeyError:
|
|
55
|
-
raise ValueError(f"Invalid dbr type {datatype}")
|
|
53
|
+
except KeyError as e:
|
|
54
|
+
raise ValueError(f"Invalid dbr type {datatype}") from e
|
|
56
55
|
return np_datatype
|
|
57
56
|
|
|
58
57
|
|
|
@@ -69,8 +68,8 @@ def convert_param_dtype_to_np(datatype: str) -> str:
|
|
|
69
68
|
np_datatype = convert_ad_dtype_to_np(
|
|
70
69
|
_paramattribute_to_ad_datatype[datatype]
|
|
71
70
|
)
|
|
72
|
-
except KeyError:
|
|
73
|
-
raise ValueError(f"Invalid datatype {datatype}")
|
|
71
|
+
except KeyError as e:
|
|
72
|
+
raise ValueError(f"Invalid datatype {datatype}") from e
|
|
74
73
|
return np_datatype
|
|
75
74
|
|
|
76
75
|
|
|
@@ -126,7 +125,7 @@ async def stop_busy_record(
|
|
|
126
125
|
signal: SignalRW[T],
|
|
127
126
|
value: T,
|
|
128
127
|
timeout: float = DEFAULT_TIMEOUT,
|
|
129
|
-
status_timeout:
|
|
128
|
+
status_timeout: float | None = None,
|
|
130
129
|
) -> None:
|
|
131
130
|
await signal.set(value, wait=False, timeout=status_timeout)
|
|
132
131
|
await wait_for_value(signal, value, timeout=timeout)
|
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
-
from typing import Optional
|
|
3
2
|
|
|
4
|
-
from ophyd_async.core import
|
|
3
|
+
from ophyd_async.core import DetectorController, DetectorTrigger
|
|
4
|
+
from ophyd_async.core._detector import TriggerInfo
|
|
5
|
+
from ophyd_async.core._status import AsyncStatus
|
|
5
6
|
from ophyd_async.epics import adcore
|
|
6
7
|
|
|
7
8
|
from ._kinetix_io import KinetixDriverIO, KinetixTriggerMode
|
|
@@ -14,33 +15,37 @@ KINETIX_TRIGGER_MODE_MAP = {
|
|
|
14
15
|
}
|
|
15
16
|
|
|
16
17
|
|
|
17
|
-
class KinetixController(
|
|
18
|
+
class KinetixController(DetectorController):
|
|
18
19
|
def __init__(
|
|
19
20
|
self,
|
|
20
21
|
driver: KinetixDriverIO,
|
|
21
22
|
) -> None:
|
|
22
23
|
self._drv = driver
|
|
24
|
+
self._arm_status: AsyncStatus | None = None
|
|
23
25
|
|
|
24
|
-
def get_deadtime(self, exposure: float) -> float:
|
|
26
|
+
def get_deadtime(self, exposure: float | None) -> float:
|
|
25
27
|
return 0.001
|
|
26
28
|
|
|
27
|
-
async def
|
|
28
|
-
self,
|
|
29
|
-
num: int,
|
|
30
|
-
trigger: DetectorTrigger = DetectorTrigger.internal,
|
|
31
|
-
exposure: Optional[float] = None,
|
|
32
|
-
) -> AsyncStatus:
|
|
29
|
+
async def prepare(self, trigger_info: TriggerInfo):
|
|
33
30
|
await asyncio.gather(
|
|
34
|
-
self._drv.trigger_mode.set(KINETIX_TRIGGER_MODE_MAP[trigger]),
|
|
35
|
-
self._drv.num_images.set(
|
|
31
|
+
self._drv.trigger_mode.set(KINETIX_TRIGGER_MODE_MAP[trigger_info.trigger]),
|
|
32
|
+
self._drv.num_images.set(trigger_info.total_number_of_triggers),
|
|
36
33
|
self._drv.image_mode.set(adcore.ImageMode.multiple),
|
|
37
34
|
)
|
|
38
|
-
if
|
|
35
|
+
if trigger_info.livetime is not None and trigger_info.trigger not in [
|
|
39
36
|
DetectorTrigger.variable_gate,
|
|
40
37
|
DetectorTrigger.constant_gate,
|
|
41
38
|
]:
|
|
42
|
-
await self._drv.acquire_time.set(
|
|
43
|
-
|
|
39
|
+
await self._drv.acquire_time.set(trigger_info.livetime)
|
|
40
|
+
|
|
41
|
+
async def arm(self):
|
|
42
|
+
self._arm_status = await adcore.start_acquiring_driver_and_ensure_status(
|
|
43
|
+
self._drv
|
|
44
|
+
)
|
|
45
|
+
|
|
46
|
+
async def wait_for_idle(self):
|
|
47
|
+
if self._arm_status:
|
|
48
|
+
await self._arm_status
|
|
44
49
|
|
|
45
50
|
async def disarm(self):
|
|
46
51
|
await adcore.stop_busy_record(self._drv.acquire, False, timeout=1)
|
|
@@ -1,19 +1,19 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
-
from typing import Optional
|
|
3
2
|
|
|
4
3
|
from ophyd_async.core import (
|
|
5
4
|
DEFAULT_TIMEOUT,
|
|
6
|
-
|
|
7
|
-
DetectorControl,
|
|
5
|
+
DetectorController,
|
|
8
6
|
DetectorTrigger,
|
|
9
7
|
wait_for_value,
|
|
10
8
|
)
|
|
9
|
+
from ophyd_async.core._detector import TriggerInfo
|
|
10
|
+
from ophyd_async.core._status import AsyncStatus
|
|
11
11
|
from ophyd_async.epics import adcore
|
|
12
12
|
|
|
13
13
|
from ._pilatus_io import PilatusDriverIO, PilatusTriggerMode
|
|
14
14
|
|
|
15
15
|
|
|
16
|
-
class PilatusController(
|
|
16
|
+
class PilatusController(DetectorController):
|
|
17
17
|
_supported_trigger_types = {
|
|
18
18
|
DetectorTrigger.internal: PilatusTriggerMode.internal,
|
|
19
19
|
DetectorTrigger.constant_gate: PilatusTriggerMode.ext_enable,
|
|
@@ -27,29 +27,31 @@ class PilatusController(DetectorControl):
|
|
|
27
27
|
) -> None:
|
|
28
28
|
self._drv = driver
|
|
29
29
|
self._readout_time = readout_time
|
|
30
|
+
self._arm_status: AsyncStatus | None = None
|
|
30
31
|
|
|
31
|
-
def get_deadtime(self, exposure: float) -> float:
|
|
32
|
+
def get_deadtime(self, exposure: float | None) -> float:
|
|
32
33
|
return self._readout_time
|
|
33
34
|
|
|
34
|
-
async def
|
|
35
|
-
|
|
36
|
-
num: int,
|
|
37
|
-
trigger: DetectorTrigger = DetectorTrigger.internal,
|
|
38
|
-
exposure: Optional[float] = None,
|
|
39
|
-
) -> AsyncStatus:
|
|
40
|
-
if exposure is not None:
|
|
35
|
+
async def prepare(self, trigger_info: TriggerInfo):
|
|
36
|
+
if trigger_info.livetime is not None:
|
|
41
37
|
await adcore.set_exposure_time_and_acquire_period_if_supplied(
|
|
42
|
-
self, self._drv,
|
|
38
|
+
self, self._drv, trigger_info.livetime
|
|
43
39
|
)
|
|
44
40
|
await asyncio.gather(
|
|
45
|
-
self._drv.trigger_mode.set(self._get_trigger_mode(trigger)),
|
|
46
|
-
self._drv.num_images.set(
|
|
41
|
+
self._drv.trigger_mode.set(self._get_trigger_mode(trigger_info.trigger)),
|
|
42
|
+
self._drv.num_images.set(
|
|
43
|
+
999_999
|
|
44
|
+
if trigger_info.total_number_of_triggers == 0
|
|
45
|
+
else trigger_info.total_number_of_triggers
|
|
46
|
+
),
|
|
47
47
|
self._drv.image_mode.set(adcore.ImageMode.multiple),
|
|
48
48
|
)
|
|
49
49
|
|
|
50
|
+
async def arm(self):
|
|
50
51
|
# Standard arm the detector and wait for the acquire PV to be True
|
|
51
|
-
|
|
52
|
-
|
|
52
|
+
self._arm_status = await adcore.start_acquiring_driver_and_ensure_status(
|
|
53
|
+
self._drv
|
|
54
|
+
)
|
|
53
55
|
# The pilatus has an additional PV that goes True when the camserver
|
|
54
56
|
# is actually ready. Should wait for that too or we risk dropping
|
|
55
57
|
# a frame
|
|
@@ -59,7 +61,9 @@ class PilatusController(DetectorControl):
|
|
|
59
61
|
timeout=DEFAULT_TIMEOUT,
|
|
60
62
|
)
|
|
61
63
|
|
|
62
|
-
|
|
64
|
+
async def wait_for_idle(self):
|
|
65
|
+
if self._arm_status:
|
|
66
|
+
await self._arm_status
|
|
63
67
|
|
|
64
68
|
@classmethod
|
|
65
69
|
def _get_trigger_mode(cls, trigger: DetectorTrigger) -> PilatusTriggerMode:
|