ophyd-async 0.9.0a1__py3-none-any.whl → 0.9.0a2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ophyd_async/_version.py +1 -1
- ophyd_async/core/__init__.py +13 -20
- ophyd_async/core/_detector.py +61 -37
- ophyd_async/core/_device.py +102 -80
- ophyd_async/core/_device_filler.py +17 -8
- ophyd_async/core/_flyer.py +2 -2
- ophyd_async/core/_readable.py +30 -23
- ophyd_async/core/_settings.py +104 -0
- ophyd_async/core/_signal.py +55 -17
- ophyd_async/core/_signal_backend.py +4 -1
- ophyd_async/core/_soft_signal_backend.py +2 -1
- ophyd_async/core/_table.py +18 -10
- ophyd_async/core/_utils.py +5 -3
- ophyd_async/core/_yaml_settings.py +64 -0
- ophyd_async/epics/adandor/__init__.py +9 -0
- ophyd_async/epics/adandor/_andor.py +45 -0
- ophyd_async/epics/adandor/_andor_controller.py +49 -0
- ophyd_async/epics/adandor/_andor_io.py +36 -0
- ophyd_async/epics/adaravis/__init__.py +3 -1
- ophyd_async/epics/adaravis/_aravis.py +23 -37
- ophyd_async/epics/adaravis/_aravis_controller.py +13 -22
- ophyd_async/epics/adcore/__init__.py +15 -8
- ophyd_async/epics/adcore/_core_detector.py +41 -0
- ophyd_async/epics/adcore/_core_io.py +35 -10
- ophyd_async/epics/adcore/_core_logic.py +98 -86
- ophyd_async/epics/adcore/_core_writer.py +219 -0
- ophyd_async/epics/adcore/_hdf_writer.py +38 -62
- ophyd_async/epics/adcore/_jpeg_writer.py +26 -0
- ophyd_async/epics/adcore/_single_trigger.py +4 -3
- ophyd_async/epics/adcore/_tiff_writer.py +26 -0
- ophyd_async/epics/adcore/_utils.py +2 -1
- ophyd_async/epics/adkinetix/_kinetix.py +29 -24
- ophyd_async/epics/adkinetix/_kinetix_controller.py +9 -21
- ophyd_async/epics/adpilatus/__init__.py +2 -2
- ophyd_async/epics/adpilatus/_pilatus.py +27 -39
- ophyd_async/epics/adpilatus/_pilatus_controller.py +44 -22
- ophyd_async/epics/adsimdetector/__init__.py +3 -3
- ophyd_async/epics/adsimdetector/_sim.py +33 -17
- ophyd_async/epics/advimba/_vimba.py +23 -23
- ophyd_async/epics/advimba/_vimba_controller.py +10 -24
- ophyd_async/epics/core/_aioca.py +31 -14
- ophyd_async/epics/core/_p4p.py +40 -16
- ophyd_async/epics/core/_util.py +1 -1
- ophyd_async/epics/motor.py +18 -10
- ophyd_async/epics/sim/_ioc.py +29 -0
- ophyd_async/epics/{demo → sim}/_mover.py +10 -4
- ophyd_async/epics/testing/__init__.py +14 -14
- ophyd_async/epics/testing/_example_ioc.py +48 -65
- ophyd_async/epics/testing/_utils.py +17 -45
- ophyd_async/epics/testing/test_records.db +8 -0
- ophyd_async/fastcs/panda/__init__.py +0 -2
- ophyd_async/fastcs/panda/_control.py +7 -2
- ophyd_async/fastcs/panda/_hdf_panda.py +3 -1
- ophyd_async/fastcs/panda/_table.py +4 -1
- ophyd_async/plan_stubs/__init__.py +14 -0
- ophyd_async/plan_stubs/_ensure_connected.py +11 -17
- ophyd_async/plan_stubs/_fly.py +1 -1
- ophyd_async/plan_stubs/_nd_attributes.py +7 -5
- ophyd_async/plan_stubs/_panda.py +13 -0
- ophyd_async/plan_stubs/_settings.py +125 -0
- ophyd_async/plan_stubs/_wait_for_awaitable.py +13 -0
- ophyd_async/sim/__init__.py +19 -0
- ophyd_async/sim/{demo/_pattern_detector → _pattern_detector}/_pattern_detector_controller.py +9 -2
- ophyd_async/sim/{demo/_pattern_detector → _pattern_detector}/_pattern_generator.py +13 -6
- ophyd_async/tango/core/_signal.py +3 -1
- ophyd_async/tango/core/_tango_transport.py +12 -14
- ophyd_async/tango/{demo → sim}/_mover.py +5 -2
- ophyd_async/testing/__init__.py +19 -0
- ophyd_async/testing/__pytest_assert_rewrite.py +4 -0
- ophyd_async/testing/_assert.py +88 -40
- ophyd_async/testing/_mock_signal_utils.py +3 -3
- ophyd_async/testing/_one_of_everything.py +126 -0
- {ophyd_async-0.9.0a1.dist-info → ophyd_async-0.9.0a2.dist-info}/METADATA +2 -2
- ophyd_async-0.9.0a2.dist-info/RECORD +129 -0
- {ophyd_async-0.9.0a1.dist-info → ophyd_async-0.9.0a2.dist-info}/WHEEL +1 -1
- ophyd_async/core/_device_save_loader.py +0 -274
- ophyd_async/epics/adsimdetector/_sim_controller.py +0 -51
- ophyd_async/fastcs/panda/_utils.py +0 -16
- ophyd_async/sim/demo/__init__.py +0 -19
- ophyd_async/sim/testing/__init__.py +0 -0
- ophyd_async-0.9.0a1.dist-info/RECORD +0 -119
- ophyd_async-0.9.0a1.dist-info/entry_points.txt +0 -2
- /ophyd_async/epics/{demo → sim}/__init__.py +0 -0
- /ophyd_async/epics/{demo → sim}/_sensor.py +0 -0
- /ophyd_async/epics/{demo → sim}/mover.db +0 -0
- /ophyd_async/epics/{demo → sim}/sensor.db +0 -0
- /ophyd_async/sim/{demo/_pattern_detector → _pattern_detector}/__init__.py +0 -0
- /ophyd_async/sim/{demo/_pattern_detector → _pattern_detector}/_pattern_detector.py +0 -0
- /ophyd_async/sim/{demo/_pattern_detector → _pattern_detector}/_pattern_detector_writer.py +0 -0
- /ophyd_async/sim/{demo/_sim_motor.py → _sim_motor.py} +0 -0
- /ophyd_async/tango/{demo → sim}/__init__.py +0 -0
- /ophyd_async/tango/{demo → sim}/_counter.py +0 -0
- /ophyd_async/tango/{demo → sim}/_detector.py +0 -0
- /ophyd_async/tango/{demo → sim}/_tango/__init__.py +0 -0
- /ophyd_async/tango/{demo → sim}/_tango/_servers.py +0 -0
- {ophyd_async-0.9.0a1.dist-info → ophyd_async-0.9.0a2.dist-info}/LICENSE +0 -0
- {ophyd_async-0.9.0a1.dist-info → ophyd_async-0.9.0a2.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,219 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from collections.abc import AsyncGenerator, AsyncIterator
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Generic, TypeVar, get_args
|
|
5
|
+
from urllib.parse import urlunparse
|
|
6
|
+
|
|
7
|
+
from bluesky.protocols import Hints, StreamAsset
|
|
8
|
+
from event_model import (
|
|
9
|
+
ComposeStreamResource,
|
|
10
|
+
DataKey,
|
|
11
|
+
StreamRange,
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
from ophyd_async.core._detector import DetectorWriter
|
|
15
|
+
from ophyd_async.core._providers import DatasetDescriber, NameProvider, PathProvider
|
|
16
|
+
from ophyd_async.core._signal import (
|
|
17
|
+
observe_value,
|
|
18
|
+
set_and_wait_for_value,
|
|
19
|
+
wait_for_value,
|
|
20
|
+
)
|
|
21
|
+
from ophyd_async.core._status import AsyncStatus
|
|
22
|
+
from ophyd_async.core._utils import DEFAULT_TIMEOUT
|
|
23
|
+
|
|
24
|
+
# from ophyd_async.epics.adcore._core_logic import ADBaseDatasetDescriber
|
|
25
|
+
from ._core_io import (
|
|
26
|
+
ADBaseDatasetDescriber,
|
|
27
|
+
Callback,
|
|
28
|
+
NDArrayBaseIO,
|
|
29
|
+
NDFileIO,
|
|
30
|
+
NDPluginBaseIO,
|
|
31
|
+
)
|
|
32
|
+
from ._utils import FileWriteMode
|
|
33
|
+
|
|
34
|
+
NDFileIOT = TypeVar("NDFileIOT", bound=NDFileIO)
|
|
35
|
+
ADWriterT = TypeVar("ADWriterT", bound="ADWriter")
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class ADWriter(DetectorWriter, Generic[NDFileIOT]):
|
|
39
|
+
default_suffix: str = "FILE1:"
|
|
40
|
+
|
|
41
|
+
def __init__(
|
|
42
|
+
self,
|
|
43
|
+
fileio: NDFileIOT,
|
|
44
|
+
path_provider: PathProvider,
|
|
45
|
+
name_provider: NameProvider,
|
|
46
|
+
dataset_describer: DatasetDescriber,
|
|
47
|
+
file_extension: str = "",
|
|
48
|
+
mimetype: str = "",
|
|
49
|
+
plugins: dict[str, NDPluginBaseIO] | None = None,
|
|
50
|
+
) -> None:
|
|
51
|
+
self._plugins = plugins or {}
|
|
52
|
+
self.fileio = fileio
|
|
53
|
+
self._path_provider = path_provider
|
|
54
|
+
self._name_provider = name_provider
|
|
55
|
+
self._dataset_describer = dataset_describer
|
|
56
|
+
self._file_extension = file_extension
|
|
57
|
+
self._mimetype = mimetype
|
|
58
|
+
self._last_emitted = 0
|
|
59
|
+
self._emitted_resource = None
|
|
60
|
+
|
|
61
|
+
self._capture_status: AsyncStatus | None = None
|
|
62
|
+
self._multiplier = 1
|
|
63
|
+
self._filename_template = "%s%s_%6.6d"
|
|
64
|
+
|
|
65
|
+
@classmethod
|
|
66
|
+
def with_io(
|
|
67
|
+
cls: type[ADWriterT],
|
|
68
|
+
prefix: str,
|
|
69
|
+
path_provider: PathProvider,
|
|
70
|
+
dataset_source: NDArrayBaseIO | None = None,
|
|
71
|
+
fileio_suffix: str | None = None,
|
|
72
|
+
plugins: dict[str, NDPluginBaseIO] | None = None,
|
|
73
|
+
) -> ADWriterT:
|
|
74
|
+
try:
|
|
75
|
+
fileio_cls = get_args(cls.__orig_bases__[0])[0] # type: ignore
|
|
76
|
+
except IndexError as err:
|
|
77
|
+
raise RuntimeError("File IO class for writer not specified!") from err
|
|
78
|
+
|
|
79
|
+
fileio = fileio_cls(prefix + (fileio_suffix or cls.default_suffix))
|
|
80
|
+
dataset_describer = ADBaseDatasetDescriber(dataset_source or fileio)
|
|
81
|
+
|
|
82
|
+
def name_provider() -> str:
|
|
83
|
+
if fileio.parent == "Not attached to a detector":
|
|
84
|
+
raise RuntimeError("Initializing writer without parent detector!")
|
|
85
|
+
return fileio.parent.name
|
|
86
|
+
|
|
87
|
+
writer = cls(
|
|
88
|
+
fileio, path_provider, name_provider, dataset_describer, plugins=plugins
|
|
89
|
+
)
|
|
90
|
+
return writer
|
|
91
|
+
|
|
92
|
+
async def begin_capture(self) -> None:
|
|
93
|
+
info = self._path_provider(device_name=self._name_provider())
|
|
94
|
+
|
|
95
|
+
await self.fileio.enable_callbacks.set(Callback.ENABLE)
|
|
96
|
+
|
|
97
|
+
# Set the directory creation depth first, since dir creation callback happens
|
|
98
|
+
# when directory path PV is processed.
|
|
99
|
+
await self.fileio.create_directory.set(info.create_dir_depth)
|
|
100
|
+
|
|
101
|
+
await asyncio.gather(
|
|
102
|
+
# See https://github.com/bluesky/ophyd-async/issues/122
|
|
103
|
+
self.fileio.file_path.set(str(info.directory_path)),
|
|
104
|
+
self.fileio.file_name.set(info.filename),
|
|
105
|
+
self.fileio.file_write_mode.set(FileWriteMode.STREAM),
|
|
106
|
+
# For non-HDF file writers, use AD file templating mechanism
|
|
107
|
+
# for generating multi-image datasets
|
|
108
|
+
self.fileio.file_template.set(
|
|
109
|
+
self._filename_template + self._file_extension
|
|
110
|
+
),
|
|
111
|
+
self.fileio.auto_increment.set(True),
|
|
112
|
+
self.fileio.file_number.set(0),
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
if not await self.fileio.file_path_exists.get_value():
|
|
116
|
+
msg = f"File path {info.directory_path} for file plugin does not exist"
|
|
117
|
+
raise FileNotFoundError(msg)
|
|
118
|
+
|
|
119
|
+
# Overwrite num_capture to go forever
|
|
120
|
+
await self.fileio.num_capture.set(0)
|
|
121
|
+
# Wait for it to start, stashing the status that tells us when it finishes
|
|
122
|
+
self._capture_status = await set_and_wait_for_value(
|
|
123
|
+
self.fileio.capture, True, wait_for_set_completion=False
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
async def open(self, multiplier: int = 1) -> dict[str, DataKey]:
|
|
127
|
+
self._emitted_resource = None
|
|
128
|
+
self._last_emitted = 0
|
|
129
|
+
self._multiplier = multiplier
|
|
130
|
+
frame_shape = await self._dataset_describer.shape()
|
|
131
|
+
dtype_numpy = await self._dataset_describer.np_datatype()
|
|
132
|
+
|
|
133
|
+
await self.begin_capture()
|
|
134
|
+
|
|
135
|
+
describe = {
|
|
136
|
+
self._name_provider(): DataKey(
|
|
137
|
+
source=self._name_provider(),
|
|
138
|
+
shape=list(frame_shape),
|
|
139
|
+
dtype="array",
|
|
140
|
+
dtype_numpy=dtype_numpy,
|
|
141
|
+
external="STREAM:",
|
|
142
|
+
) # type: ignore
|
|
143
|
+
}
|
|
144
|
+
return describe
|
|
145
|
+
|
|
146
|
+
async def observe_indices_written(
|
|
147
|
+
self, timeout=DEFAULT_TIMEOUT
|
|
148
|
+
) -> AsyncGenerator[int, None]:
|
|
149
|
+
"""Wait until a specific index is ready to be collected"""
|
|
150
|
+
async for num_captured in observe_value(self.fileio.num_captured, timeout):
|
|
151
|
+
yield num_captured // self._multiplier
|
|
152
|
+
|
|
153
|
+
async def get_indices_written(self) -> int:
|
|
154
|
+
num_captured = await self.fileio.num_captured.get_value()
|
|
155
|
+
return num_captured // self._multiplier
|
|
156
|
+
|
|
157
|
+
async def collect_stream_docs(
|
|
158
|
+
self, indices_written: int
|
|
159
|
+
) -> AsyncIterator[StreamAsset]:
|
|
160
|
+
if indices_written:
|
|
161
|
+
if not self._emitted_resource:
|
|
162
|
+
file_path = Path(await self.fileio.file_path.get_value())
|
|
163
|
+
file_name = await self.fileio.file_name.get_value()
|
|
164
|
+
file_template = file_name + "_{:06d}" + self._file_extension
|
|
165
|
+
|
|
166
|
+
frame_shape = await self._dataset_describer.shape()
|
|
167
|
+
|
|
168
|
+
uri = urlunparse(
|
|
169
|
+
(
|
|
170
|
+
"file",
|
|
171
|
+
"localhost",
|
|
172
|
+
str(file_path.absolute()) + "/",
|
|
173
|
+
"",
|
|
174
|
+
"",
|
|
175
|
+
None,
|
|
176
|
+
)
|
|
177
|
+
)
|
|
178
|
+
|
|
179
|
+
bundler_composer = ComposeStreamResource()
|
|
180
|
+
|
|
181
|
+
self._emitted_resource = bundler_composer(
|
|
182
|
+
mimetype=self._mimetype,
|
|
183
|
+
uri=uri,
|
|
184
|
+
data_key=self._name_provider(),
|
|
185
|
+
parameters={
|
|
186
|
+
# Assume that we always write 1 frame per file/chunk
|
|
187
|
+
"chunk_shape": (1, *frame_shape),
|
|
188
|
+
# Include file template for reconstruction in consolidator
|
|
189
|
+
"template": file_template,
|
|
190
|
+
},
|
|
191
|
+
uid=None,
|
|
192
|
+
validate=True,
|
|
193
|
+
)
|
|
194
|
+
|
|
195
|
+
yield "stream_resource", self._emitted_resource.stream_resource_doc
|
|
196
|
+
|
|
197
|
+
# Indices are relative to resource
|
|
198
|
+
if indices_written > self._last_emitted:
|
|
199
|
+
indices: StreamRange = {
|
|
200
|
+
"start": self._last_emitted,
|
|
201
|
+
"stop": indices_written,
|
|
202
|
+
}
|
|
203
|
+
self._last_emitted = indices_written
|
|
204
|
+
yield (
|
|
205
|
+
"stream_datum",
|
|
206
|
+
self._emitted_resource.compose_stream_datum(indices),
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
async def close(self):
|
|
210
|
+
# Already done a caput callback in _capture_status, so can't do one here
|
|
211
|
+
await self.fileio.capture.set(False, wait=False)
|
|
212
|
+
await wait_for_value(self.fileio.capture, False, DEFAULT_TIMEOUT)
|
|
213
|
+
if self._capture_status:
|
|
214
|
+
# We kicked off an open, so wait for it to return
|
|
215
|
+
await self._capture_status
|
|
216
|
+
|
|
217
|
+
@property
|
|
218
|
+
def hints(self) -> Hints:
|
|
219
|
+
return {"fields": [self._name_provider()]}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
-
from collections.abc import
|
|
2
|
+
from collections.abc import AsyncIterator
|
|
3
3
|
from pathlib import Path
|
|
4
4
|
from xml.etree import ElementTree as ET
|
|
5
5
|
|
|
@@ -8,82 +8,69 @@ from event_model import DataKey
|
|
|
8
8
|
|
|
9
9
|
from ophyd_async.core import (
|
|
10
10
|
DEFAULT_TIMEOUT,
|
|
11
|
-
AsyncStatus,
|
|
12
11
|
DatasetDescriber,
|
|
13
|
-
DetectorWriter,
|
|
14
12
|
HDFDataset,
|
|
15
13
|
HDFFile,
|
|
16
14
|
NameProvider,
|
|
17
15
|
PathProvider,
|
|
18
|
-
observe_value,
|
|
19
|
-
set_and_wait_for_value,
|
|
20
16
|
wait_for_value,
|
|
21
17
|
)
|
|
22
18
|
|
|
23
|
-
from ._core_io import
|
|
19
|
+
from ._core_io import NDFileHDFIO, NDPluginBaseIO
|
|
20
|
+
from ._core_writer import ADWriter
|
|
24
21
|
from ._utils import (
|
|
25
|
-
FileWriteMode,
|
|
26
22
|
convert_param_dtype_to_np,
|
|
27
23
|
convert_pv_dtype_to_np,
|
|
28
24
|
)
|
|
29
25
|
|
|
30
26
|
|
|
31
|
-
class ADHDFWriter(
|
|
27
|
+
class ADHDFWriter(ADWriter[NDFileHDFIO]):
|
|
28
|
+
default_suffix: str = "HDF1:"
|
|
29
|
+
|
|
32
30
|
def __init__(
|
|
33
31
|
self,
|
|
34
|
-
|
|
32
|
+
fileio: NDFileHDFIO,
|
|
35
33
|
path_provider: PathProvider,
|
|
36
34
|
name_provider: NameProvider,
|
|
37
35
|
dataset_describer: DatasetDescriber,
|
|
38
|
-
|
|
36
|
+
plugins: dict[str, NDPluginBaseIO] | None = None,
|
|
39
37
|
) -> None:
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
38
|
+
super().__init__(
|
|
39
|
+
fileio,
|
|
40
|
+
path_provider,
|
|
41
|
+
name_provider,
|
|
42
|
+
dataset_describer,
|
|
43
|
+
plugins=plugins,
|
|
44
|
+
file_extension=".h5",
|
|
45
|
+
mimetype="application/x-hdf5",
|
|
46
|
+
)
|
|
47
47
|
self._datasets: list[HDFDataset] = []
|
|
48
48
|
self._file: HDFFile | None = None
|
|
49
|
-
self.
|
|
49
|
+
self._include_file_number = False
|
|
50
50
|
|
|
51
51
|
async def open(self, multiplier: int = 1) -> dict[str, DataKey]:
|
|
52
52
|
self._file = None
|
|
53
|
-
info = self._path_provider(device_name=self._name_provider())
|
|
54
53
|
|
|
55
|
-
#
|
|
56
|
-
# when directory path PV is processed.
|
|
57
|
-
await self.hdf.create_directory.set(info.create_dir_depth)
|
|
54
|
+
# Setting HDF writer specific signals
|
|
58
55
|
|
|
59
56
|
# Make sure we are using chunk auto-sizing
|
|
60
|
-
await asyncio.gather(self.
|
|
57
|
+
await asyncio.gather(self.fileio.chunk_size_auto.set(True))
|
|
61
58
|
|
|
62
59
|
await asyncio.gather(
|
|
63
|
-
self.
|
|
64
|
-
self.
|
|
65
|
-
self.
|
|
66
|
-
|
|
67
|
-
self.hdf.file_path.set(str(info.directory_path)),
|
|
68
|
-
self.hdf.file_name.set(info.filename),
|
|
69
|
-
self.hdf.file_template.set("%s/%s.h5"),
|
|
70
|
-
self.hdf.file_write_mode.set(FileWriteMode.STREAM),
|
|
71
|
-
# Never use custom xml layout file but use the one defined
|
|
72
|
-
# in the source code file NDFileHDF5LayoutXML.cpp
|
|
73
|
-
self.hdf.xml_file_name.set(""),
|
|
74
|
-
self.hdf.enable_callbacks.set(Callback.ENABLE),
|
|
60
|
+
self.fileio.num_extra_dims.set(0),
|
|
61
|
+
self.fileio.lazy_open.set(True),
|
|
62
|
+
self.fileio.swmr_mode.set(True),
|
|
63
|
+
self.fileio.xml_file_name.set(""),
|
|
75
64
|
)
|
|
76
65
|
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
66
|
+
# By default, don't add file number to filename
|
|
67
|
+
self._filename_template = "%s%s"
|
|
68
|
+
if self._include_file_number:
|
|
69
|
+
self._filename_template += "_%6.6d"
|
|
70
|
+
|
|
71
|
+
# Set common AD file plugin params, begin capturing
|
|
72
|
+
await self.begin_capture()
|
|
80
73
|
|
|
81
|
-
# Overwrite num_capture to go forever
|
|
82
|
-
await self.hdf.num_capture.set(0)
|
|
83
|
-
# Wait for it to start, stashing the status that tells us when it finishes
|
|
84
|
-
self._capture_status = await set_and_wait_for_value(
|
|
85
|
-
self.hdf.capture, True, wait_for_set_completion=False
|
|
86
|
-
)
|
|
87
74
|
name = self._name_provider()
|
|
88
75
|
detector_shape = await self._dataset_describer.shape()
|
|
89
76
|
np_dtype = await self._dataset_describer.np_datatype()
|
|
@@ -91,7 +78,7 @@ class ADHDFWriter(DetectorWriter):
|
|
|
91
78
|
outer_shape = (multiplier,) if multiplier > 1 else ()
|
|
92
79
|
|
|
93
80
|
# Determine number of frames that will be saved per HDF chunk
|
|
94
|
-
frames_per_chunk = await self.
|
|
81
|
+
frames_per_chunk = await self.fileio.num_frames_chunks.get_value()
|
|
95
82
|
|
|
96
83
|
# Add the main data
|
|
97
84
|
self._datasets = [
|
|
@@ -105,7 +92,7 @@ class ADHDFWriter(DetectorWriter):
|
|
|
105
92
|
)
|
|
106
93
|
]
|
|
107
94
|
# And all the scalar datasets
|
|
108
|
-
for plugin in self._plugins:
|
|
95
|
+
for plugin in self._plugins.values():
|
|
109
96
|
maybe_xml = await plugin.nd_attributes_file.get_value()
|
|
110
97
|
# This is the check that ADCore does to see if it is an XML string
|
|
111
98
|
# rather than a filename to parse
|
|
@@ -136,7 +123,7 @@ class ADHDFWriter(DetectorWriter):
|
|
|
136
123
|
|
|
137
124
|
describe = {
|
|
138
125
|
ds.data_key: DataKey(
|
|
139
|
-
source=self.
|
|
126
|
+
source=self.fileio.full_file_name.source,
|
|
140
127
|
shape=list(outer_shape + tuple(ds.shape)),
|
|
141
128
|
dtype="array" if ds.shape else "number",
|
|
142
129
|
dtype_numpy=ds.dtype_numpy,
|
|
@@ -146,25 +133,14 @@ class ADHDFWriter(DetectorWriter):
|
|
|
146
133
|
}
|
|
147
134
|
return describe
|
|
148
135
|
|
|
149
|
-
async def observe_indices_written(
|
|
150
|
-
self, timeout=DEFAULT_TIMEOUT
|
|
151
|
-
) -> AsyncGenerator[int, None]:
|
|
152
|
-
"""Wait until a specific index is ready to be collected"""
|
|
153
|
-
async for num_captured in observe_value(self.hdf.num_captured, timeout):
|
|
154
|
-
yield num_captured // self._multiplier
|
|
155
|
-
|
|
156
|
-
async def get_indices_written(self) -> int:
|
|
157
|
-
num_captured = await self.hdf.num_captured.get_value()
|
|
158
|
-
return num_captured // self._multiplier
|
|
159
|
-
|
|
160
136
|
async def collect_stream_docs(
|
|
161
137
|
self, indices_written: int
|
|
162
138
|
) -> AsyncIterator[StreamAsset]:
|
|
163
139
|
# TODO: fail if we get dropped frames
|
|
164
|
-
await self.
|
|
140
|
+
await self.fileio.flush_now.set(True)
|
|
165
141
|
if indices_written:
|
|
166
142
|
if not self._file:
|
|
167
|
-
path = Path(await self.
|
|
143
|
+
path = Path(await self.fileio.full_file_name.get_value())
|
|
168
144
|
self._file = HDFFile(
|
|
169
145
|
# See https://github.com/bluesky/ophyd-async/issues/122
|
|
170
146
|
path,
|
|
@@ -181,8 +157,8 @@ class ADHDFWriter(DetectorWriter):
|
|
|
181
157
|
|
|
182
158
|
async def close(self):
|
|
183
159
|
# Already done a caput callback in _capture_status, so can't do one here
|
|
184
|
-
await self.
|
|
185
|
-
await wait_for_value(self.
|
|
160
|
+
await self.fileio.capture.set(False, wait=False)
|
|
161
|
+
await wait_for_value(self.fileio.capture, False, DEFAULT_TIMEOUT)
|
|
186
162
|
if self._capture_status:
|
|
187
163
|
# We kicked off an open, so wait for it to return
|
|
188
164
|
await self._capture_status
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from ophyd_async.core import DatasetDescriber, NameProvider, PathProvider
|
|
2
|
+
|
|
3
|
+
from ._core_io import NDFileIO, NDPluginBaseIO
|
|
4
|
+
from ._core_writer import ADWriter
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class ADJPEGWriter(ADWriter[NDFileIO]):
|
|
8
|
+
default_suffix: str = "JPEG1:"
|
|
9
|
+
|
|
10
|
+
def __init__(
|
|
11
|
+
self,
|
|
12
|
+
fileio: NDFileIO,
|
|
13
|
+
path_provider: PathProvider,
|
|
14
|
+
name_provider: NameProvider,
|
|
15
|
+
dataset_describer: DatasetDescriber,
|
|
16
|
+
plugins: dict[str, NDPluginBaseIO] | None = None,
|
|
17
|
+
) -> None:
|
|
18
|
+
super().__init__(
|
|
19
|
+
fileio,
|
|
20
|
+
path_provider,
|
|
21
|
+
name_provider,
|
|
22
|
+
dataset_describer,
|
|
23
|
+
plugins=plugins,
|
|
24
|
+
file_extension=".jpg",
|
|
25
|
+
mimetype="multipart/related;type=image/jpeg",
|
|
26
|
+
)
|
|
@@ -16,11 +16,12 @@ class SingleTriggerDetector(StandardReadable, Triggerable):
|
|
|
16
16
|
drv: ADBaseIO,
|
|
17
17
|
read_uncached: Sequence[SignalR] = (),
|
|
18
18
|
name="",
|
|
19
|
-
|
|
19
|
+
plugins: dict[str, NDPluginBaseIO] | None = None,
|
|
20
20
|
) -> None:
|
|
21
21
|
self.drv = drv
|
|
22
|
-
|
|
23
|
-
|
|
22
|
+
if plugins is not None:
|
|
23
|
+
for k, v in plugins.items():
|
|
24
|
+
setattr(self, k, v)
|
|
24
25
|
|
|
25
26
|
self.add_readables(
|
|
26
27
|
[self.drv.array_counter, *read_uncached],
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from ophyd_async.core import DatasetDescriber, NameProvider, PathProvider
|
|
2
|
+
|
|
3
|
+
from ._core_io import NDFileIO, NDPluginBaseIO
|
|
4
|
+
from ._core_writer import ADWriter
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class ADTIFFWriter(ADWriter[NDFileIO]):
|
|
8
|
+
default_suffix: str = "TIFF1:"
|
|
9
|
+
|
|
10
|
+
def __init__(
|
|
11
|
+
self,
|
|
12
|
+
fileio: NDFileIO,
|
|
13
|
+
path_provider: PathProvider,
|
|
14
|
+
name_provider: NameProvider,
|
|
15
|
+
dataset_describer: DatasetDescriber,
|
|
16
|
+
plugins: dict[str, NDPluginBaseIO] | None = None,
|
|
17
|
+
) -> None:
|
|
18
|
+
super().__init__(
|
|
19
|
+
fileio,
|
|
20
|
+
path_provider,
|
|
21
|
+
name_provider,
|
|
22
|
+
dataset_describer,
|
|
23
|
+
plugins=plugins,
|
|
24
|
+
file_extension=".tiff",
|
|
25
|
+
mimetype="multipart/related;type=image/tiff",
|
|
26
|
+
)
|
|
@@ -6,6 +6,7 @@ from ophyd_async.core import (
|
|
|
6
6
|
SignalR,
|
|
7
7
|
SignalRW,
|
|
8
8
|
StrictEnum,
|
|
9
|
+
SubsetEnum,
|
|
9
10
|
wait_for_value,
|
|
10
11
|
)
|
|
11
12
|
|
|
@@ -84,7 +85,7 @@ class FileWriteMode(StrictEnum):
|
|
|
84
85
|
STREAM = "Stream"
|
|
85
86
|
|
|
86
87
|
|
|
87
|
-
class ImageMode(
|
|
88
|
+
class ImageMode(SubsetEnum):
|
|
88
89
|
SINGLE = "Single"
|
|
89
90
|
MULTIPLE = "Multiple"
|
|
90
91
|
CONTINUOUS = "Continuous"
|
|
@@ -1,44 +1,49 @@
|
|
|
1
|
-
from
|
|
1
|
+
from collections.abc import Sequence
|
|
2
2
|
|
|
3
|
-
from ophyd_async.core import PathProvider,
|
|
4
|
-
from ophyd_async.epics import
|
|
3
|
+
from ophyd_async.core import PathProvider, SignalR
|
|
4
|
+
from ophyd_async.epics.adcore import (
|
|
5
|
+
ADHDFWriter,
|
|
6
|
+
ADWriter,
|
|
7
|
+
AreaDetector,
|
|
8
|
+
NDPluginBaseIO,
|
|
9
|
+
)
|
|
5
10
|
|
|
6
11
|
from ._kinetix_controller import KinetixController
|
|
7
12
|
from ._kinetix_io import KinetixDriverIO
|
|
8
13
|
|
|
9
14
|
|
|
10
|
-
class KinetixDetector(
|
|
15
|
+
class KinetixDetector(AreaDetector[KinetixController]):
|
|
11
16
|
"""
|
|
12
17
|
Ophyd-async implementation of an ADKinetix Detector.
|
|
13
18
|
https://github.com/NSLS-II/ADKinetix
|
|
14
19
|
"""
|
|
15
20
|
|
|
16
|
-
_controller: KinetixController
|
|
17
|
-
_writer: adcore.ADHDFWriter
|
|
18
|
-
|
|
19
21
|
def __init__(
|
|
20
22
|
self,
|
|
21
23
|
prefix: str,
|
|
22
24
|
path_provider: PathProvider,
|
|
23
|
-
drv_suffix="cam1:",
|
|
24
|
-
|
|
25
|
-
|
|
25
|
+
drv_suffix: str = "cam1:",
|
|
26
|
+
writer_cls: type[ADWriter] = ADHDFWriter,
|
|
27
|
+
fileio_suffix: str | None = None,
|
|
28
|
+
name: str = "",
|
|
29
|
+
plugins: dict[str, NDPluginBaseIO] | None = None,
|
|
30
|
+
config_sigs: Sequence[SignalR] = (),
|
|
26
31
|
):
|
|
27
|
-
|
|
28
|
-
|
|
32
|
+
driver = KinetixDriverIO(prefix + drv_suffix)
|
|
33
|
+
controller = KinetixController(driver)
|
|
34
|
+
|
|
35
|
+
writer = writer_cls.with_io(
|
|
36
|
+
prefix,
|
|
37
|
+
path_provider,
|
|
38
|
+
dataset_source=driver,
|
|
39
|
+
fileio_suffix=fileio_suffix,
|
|
40
|
+
plugins=plugins,
|
|
41
|
+
)
|
|
29
42
|
|
|
30
43
|
super().__init__(
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
path_provider,
|
|
35
|
-
lambda: self.name,
|
|
36
|
-
adcore.ADBaseDatasetDescriber(self.drv),
|
|
37
|
-
),
|
|
38
|
-
config_sigs=(self.drv.acquire_time,),
|
|
44
|
+
controller=controller,
|
|
45
|
+
writer=writer,
|
|
46
|
+
plugins=plugins,
|
|
39
47
|
name=name,
|
|
48
|
+
config_sigs=config_sigs,
|
|
40
49
|
)
|
|
41
|
-
|
|
42
|
-
@property
|
|
43
|
-
def hints(self) -> Hints:
|
|
44
|
-
return self._writer.hints
|
|
@@ -1,8 +1,6 @@
|
|
|
1
1
|
import asyncio
|
|
2
2
|
|
|
3
3
|
from ophyd_async.core import (
|
|
4
|
-
AsyncStatus,
|
|
5
|
-
DetectorController,
|
|
6
4
|
DetectorTrigger,
|
|
7
5
|
TriggerInfo,
|
|
8
6
|
)
|
|
@@ -18,37 +16,27 @@ KINETIX_TRIGGER_MODE_MAP = {
|
|
|
18
16
|
}
|
|
19
17
|
|
|
20
18
|
|
|
21
|
-
class KinetixController(
|
|
19
|
+
class KinetixController(adcore.ADBaseController[KinetixDriverIO]):
|
|
22
20
|
def __init__(
|
|
23
21
|
self,
|
|
24
22
|
driver: KinetixDriverIO,
|
|
23
|
+
good_states: frozenset[adcore.DetectorState] = adcore.DEFAULT_GOOD_STATES,
|
|
25
24
|
) -> None:
|
|
26
|
-
|
|
27
|
-
self._arm_status: AsyncStatus | None = None
|
|
25
|
+
super().__init__(driver, good_states=good_states)
|
|
28
26
|
|
|
29
27
|
def get_deadtime(self, exposure: float | None) -> float:
|
|
30
28
|
return 0.001
|
|
31
29
|
|
|
32
30
|
async def prepare(self, trigger_info: TriggerInfo):
|
|
33
31
|
await asyncio.gather(
|
|
34
|
-
self.
|
|
35
|
-
|
|
36
|
-
|
|
32
|
+
self.driver.trigger_mode.set(
|
|
33
|
+
KINETIX_TRIGGER_MODE_MAP[trigger_info.trigger]
|
|
34
|
+
),
|
|
35
|
+
self.driver.num_images.set(trigger_info.total_number_of_triggers),
|
|
36
|
+
self.driver.image_mode.set(adcore.ImageMode.MULTIPLE),
|
|
37
37
|
)
|
|
38
38
|
if trigger_info.livetime is not None and trigger_info.trigger not in [
|
|
39
39
|
DetectorTrigger.VARIABLE_GATE,
|
|
40
40
|
DetectorTrigger.CONSTANT_GATE,
|
|
41
41
|
]:
|
|
42
|
-
await self.
|
|
43
|
-
|
|
44
|
-
async def arm(self):
|
|
45
|
-
self._arm_status = await adcore.start_acquiring_driver_and_ensure_status(
|
|
46
|
-
self._drv
|
|
47
|
-
)
|
|
48
|
-
|
|
49
|
-
async def wait_for_idle(self):
|
|
50
|
-
if self._arm_status:
|
|
51
|
-
await self._arm_status
|
|
52
|
-
|
|
53
|
-
async def disarm(self):
|
|
54
|
-
await adcore.stop_busy_record(self._drv.acquire, False, timeout=1)
|
|
42
|
+
await self.driver.acquire_time.set(trigger_info.livetime)
|
|
@@ -1,5 +1,5 @@
|
|
|
1
|
-
from ._pilatus import PilatusDetector
|
|
2
|
-
from ._pilatus_controller import PilatusController
|
|
1
|
+
from ._pilatus import PilatusDetector
|
|
2
|
+
from ._pilatus_controller import PilatusController, PilatusReadoutTime
|
|
3
3
|
from ._pilatus_io import PilatusDriverIO, PilatusTriggerMode
|
|
4
4
|
|
|
5
5
|
__all__ = [
|