ophyd-async 0.8.0a6__py3-none-any.whl → 0.9.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ophyd_async/_version.py +2 -2
- ophyd_async/core/__init__.py +15 -46
- ophyd_async/core/_detector.py +68 -44
- ophyd_async/core/_device.py +120 -79
- ophyd_async/core/_device_filler.py +17 -8
- ophyd_async/core/_flyer.py +2 -2
- ophyd_async/core/_protocol.py +0 -28
- ophyd_async/core/_readable.py +30 -23
- ophyd_async/core/_settings.py +104 -0
- ophyd_async/core/_signal.py +91 -151
- ophyd_async/core/_signal_backend.py +4 -1
- ophyd_async/core/_soft_signal_backend.py +2 -1
- ophyd_async/core/_table.py +18 -10
- ophyd_async/core/_utils.py +30 -5
- ophyd_async/core/_yaml_settings.py +64 -0
- ophyd_async/epics/adandor/__init__.py +9 -0
- ophyd_async/epics/adandor/_andor.py +45 -0
- ophyd_async/epics/adandor/_andor_controller.py +49 -0
- ophyd_async/epics/adandor/_andor_io.py +36 -0
- ophyd_async/epics/adaravis/__init__.py +3 -1
- ophyd_async/epics/adaravis/_aravis.py +23 -37
- ophyd_async/epics/adaravis/_aravis_controller.py +21 -30
- ophyd_async/epics/adaravis/_aravis_io.py +4 -4
- ophyd_async/epics/adcore/__init__.py +15 -8
- ophyd_async/epics/adcore/_core_detector.py +41 -0
- ophyd_async/epics/adcore/_core_io.py +56 -31
- ophyd_async/epics/adcore/_core_logic.py +99 -86
- ophyd_async/epics/adcore/_core_writer.py +219 -0
- ophyd_async/epics/adcore/_hdf_writer.py +33 -59
- ophyd_async/epics/adcore/_jpeg_writer.py +26 -0
- ophyd_async/epics/adcore/_single_trigger.py +5 -4
- ophyd_async/epics/adcore/_tiff_writer.py +26 -0
- ophyd_async/epics/adcore/_utils.py +37 -36
- ophyd_async/epics/adkinetix/_kinetix.py +29 -24
- ophyd_async/epics/adkinetix/_kinetix_controller.py +15 -27
- ophyd_async/epics/adkinetix/_kinetix_io.py +7 -7
- ophyd_async/epics/adpilatus/__init__.py +2 -2
- ophyd_async/epics/adpilatus/_pilatus.py +28 -40
- ophyd_async/epics/adpilatus/_pilatus_controller.py +47 -25
- ophyd_async/epics/adpilatus/_pilatus_io.py +5 -5
- ophyd_async/epics/adsimdetector/__init__.py +3 -3
- ophyd_async/epics/adsimdetector/_sim.py +33 -17
- ophyd_async/epics/advimba/_vimba.py +23 -23
- ophyd_async/epics/advimba/_vimba_controller.py +21 -35
- ophyd_async/epics/advimba/_vimba_io.py +23 -23
- ophyd_async/epics/core/_aioca.py +52 -21
- ophyd_async/epics/core/_p4p.py +59 -16
- ophyd_async/epics/core/_pvi_connector.py +4 -2
- ophyd_async/epics/core/_signal.py +9 -2
- ophyd_async/epics/core/_util.py +10 -1
- ophyd_async/epics/eiger/_eiger_controller.py +4 -4
- ophyd_async/epics/eiger/_eiger_io.py +3 -3
- ophyd_async/epics/motor.py +26 -15
- ophyd_async/epics/sim/_ioc.py +29 -0
- ophyd_async/epics/{demo → sim}/_mover.py +12 -6
- ophyd_async/epics/{demo → sim}/_sensor.py +2 -2
- ophyd_async/epics/testing/__init__.py +14 -14
- ophyd_async/epics/testing/_example_ioc.py +53 -67
- ophyd_async/epics/testing/_utils.py +17 -45
- ophyd_async/epics/testing/test_records.db +22 -0
- ophyd_async/fastcs/core.py +2 -2
- ophyd_async/fastcs/panda/__init__.py +0 -2
- ophyd_async/fastcs/panda/_block.py +9 -9
- ophyd_async/fastcs/panda/_control.py +9 -4
- ophyd_async/fastcs/panda/_hdf_panda.py +7 -2
- ophyd_async/fastcs/panda/_table.py +4 -1
- ophyd_async/fastcs/panda/_trigger.py +7 -7
- ophyd_async/plan_stubs/__init__.py +14 -0
- ophyd_async/plan_stubs/_ensure_connected.py +11 -17
- ophyd_async/plan_stubs/_fly.py +2 -2
- ophyd_async/plan_stubs/_nd_attributes.py +7 -5
- ophyd_async/plan_stubs/_panda.py +13 -0
- ophyd_async/plan_stubs/_settings.py +125 -0
- ophyd_async/plan_stubs/_wait_for_awaitable.py +13 -0
- ophyd_async/sim/__init__.py +19 -0
- ophyd_async/sim/{demo/_pattern_detector → _pattern_detector}/_pattern_detector_controller.py +9 -2
- ophyd_async/sim/{demo/_pattern_detector → _pattern_detector}/_pattern_generator.py +13 -6
- ophyd_async/sim/{demo/_sim_motor.py → _sim_motor.py} +34 -32
- ophyd_async/tango/core/_signal.py +3 -1
- ophyd_async/tango/core/_tango_transport.py +13 -15
- ophyd_async/tango/{demo → sim}/_mover.py +5 -2
- ophyd_async/testing/__init__.py +52 -0
- ophyd_async/testing/__pytest_assert_rewrite.py +4 -0
- ophyd_async/testing/_assert.py +176 -0
- ophyd_async/{core → testing}/_mock_signal_utils.py +15 -11
- ophyd_async/testing/_one_of_everything.py +126 -0
- ophyd_async/testing/_wait_for_pending.py +22 -0
- {ophyd_async-0.8.0a6.dist-info → ophyd_async-0.9.0.dist-info}/METADATA +4 -2
- ophyd_async-0.9.0.dist-info/RECORD +129 -0
- {ophyd_async-0.8.0a6.dist-info → ophyd_async-0.9.0.dist-info}/WHEEL +1 -1
- ophyd_async/core/_device_save_loader.py +0 -274
- ophyd_async/epics/adsimdetector/_sim_controller.py +0 -51
- ophyd_async/fastcs/panda/_utils.py +0 -16
- ophyd_async/sim/demo/__init__.py +0 -19
- ophyd_async/sim/testing/__init__.py +0 -0
- ophyd_async-0.8.0a6.dist-info/RECORD +0 -116
- ophyd_async-0.8.0a6.dist-info/entry_points.txt +0 -2
- /ophyd_async/epics/{demo → sim}/__init__.py +0 -0
- /ophyd_async/epics/{demo → sim}/mover.db +0 -0
- /ophyd_async/epics/{demo → sim}/sensor.db +0 -0
- /ophyd_async/sim/{demo/_pattern_detector → _pattern_detector}/__init__.py +0 -0
- /ophyd_async/sim/{demo/_pattern_detector → _pattern_detector}/_pattern_detector.py +0 -0
- /ophyd_async/sim/{demo/_pattern_detector → _pattern_detector}/_pattern_detector_writer.py +0 -0
- /ophyd_async/tango/{demo → sim}/__init__.py +0 -0
- /ophyd_async/tango/{demo → sim}/_counter.py +0 -0
- /ophyd_async/tango/{demo → sim}/_detector.py +0 -0
- /ophyd_async/tango/{demo → sim}/_tango/__init__.py +0 -0
- /ophyd_async/tango/{demo → sim}/_tango/_servers.py +0 -0
- {ophyd_async-0.8.0a6.dist-info → ophyd_async-0.9.0.dist-info}/LICENSE +0 -0
- {ophyd_async-0.8.0a6.dist-info → ophyd_async-0.9.0.dist-info}/top_level.txt +0 -0
|
@@ -1,108 +1,121 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
+
from typing import Generic, TypeVar
|
|
2
3
|
|
|
3
4
|
from ophyd_async.core import (
|
|
4
5
|
DEFAULT_TIMEOUT,
|
|
5
6
|
AsyncStatus,
|
|
6
|
-
DatasetDescriber,
|
|
7
7
|
DetectorController,
|
|
8
|
+
DetectorTrigger,
|
|
9
|
+
TriggerInfo,
|
|
8
10
|
set_and_wait_for_value,
|
|
9
11
|
)
|
|
10
|
-
from ophyd_async.epics.adcore._utils import convert_ad_dtype_to_np
|
|
11
12
|
|
|
12
13
|
from ._core_io import ADBaseIO, DetectorState
|
|
14
|
+
from ._utils import ImageMode, stop_busy_record
|
|
13
15
|
|
|
14
16
|
# Default set of states that we should consider "good" i.e. the acquisition
|
|
15
17
|
# is complete and went well
|
|
16
18
|
DEFAULT_GOOD_STATES: frozenset[DetectorState] = frozenset(
|
|
17
|
-
[DetectorState.
|
|
19
|
+
[DetectorState.IDLE, DetectorState.ABORTED]
|
|
18
20
|
)
|
|
19
21
|
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
22
|
+
ADBaseIOT = TypeVar("ADBaseIOT", bound=ADBaseIO)
|
|
23
|
+
ADBaseControllerT = TypeVar("ADBaseControllerT", bound="ADBaseController")
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class ADBaseController(DetectorController, Generic[ADBaseIOT]):
|
|
27
|
+
def __init__(
|
|
28
|
+
self,
|
|
29
|
+
driver: ADBaseIOT,
|
|
30
|
+
good_states: frozenset[DetectorState] = DEFAULT_GOOD_STATES,
|
|
31
|
+
) -> None:
|
|
32
|
+
self.driver = driver
|
|
33
|
+
self.good_states = good_states
|
|
34
|
+
self.frame_timeout = DEFAULT_TIMEOUT
|
|
35
|
+
self._arm_status: AsyncStatus | None = None
|
|
36
|
+
|
|
37
|
+
async def prepare(self, trigger_info: TriggerInfo) -> None:
|
|
38
|
+
if trigger_info.trigger != DetectorTrigger.INTERNAL:
|
|
39
|
+
msg = (
|
|
40
|
+
"fly scanning (i.e. external triggering) is not supported for this "
|
|
41
|
+
"device"
|
|
42
|
+
)
|
|
43
|
+
raise TypeError(msg)
|
|
44
|
+
self.frame_timeout = (
|
|
45
|
+
DEFAULT_TIMEOUT + await self.driver.acquire_time.get_value()
|
|
32
46
|
)
|
|
33
|
-
return shape
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
async def set_exposure_time_and_acquire_period_if_supplied(
|
|
37
|
-
controller: DetectorController,
|
|
38
|
-
driver: ADBaseIO,
|
|
39
|
-
exposure: float | None = None,
|
|
40
|
-
timeout: float = DEFAULT_TIMEOUT,
|
|
41
|
-
) -> None:
|
|
42
|
-
"""
|
|
43
|
-
Sets the exposure time if it is not None and the acquire period to the
|
|
44
|
-
exposure time plus the deadtime. This is expected behavior for most
|
|
45
|
-
AreaDetectors, but some may require more specialized handling.
|
|
46
|
-
|
|
47
|
-
Parameters
|
|
48
|
-
----------
|
|
49
|
-
controller:
|
|
50
|
-
Controller that can supply a deadtime.
|
|
51
|
-
driver:
|
|
52
|
-
The driver to start acquiring. Must subclass ADBaseIO.
|
|
53
|
-
exposure:
|
|
54
|
-
Desired exposure time, this is a noop if it is None.
|
|
55
|
-
timeout:
|
|
56
|
-
How long to wait for the exposure time and acquire period to be set.
|
|
57
|
-
"""
|
|
58
|
-
if exposure is not None:
|
|
59
|
-
full_frame_time = exposure + controller.get_deadtime(exposure)
|
|
60
47
|
await asyncio.gather(
|
|
61
|
-
driver.
|
|
62
|
-
driver.
|
|
48
|
+
self.driver.num_images.set(trigger_info.total_number_of_triggers),
|
|
49
|
+
self.driver.image_mode.set(ImageMode.MULTIPLE),
|
|
63
50
|
)
|
|
64
51
|
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
|
|
68
|
-
|
|
69
|
-
|
|
70
|
-
|
|
71
|
-
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
|
|
81
|
-
|
|
82
|
-
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
|
|
89
|
-
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
"""NOTE: possible race condition here between the callback from
|
|
100
|
-
set_and_wait_for_value and the detector state updating."""
|
|
101
|
-
await status
|
|
102
|
-
state = await driver.detector_state.get_value()
|
|
103
|
-
if state not in good_states:
|
|
104
|
-
raise ValueError(
|
|
105
|
-
f"Final detector state {state} not in valid end states: {good_states}"
|
|
52
|
+
async def arm(self):
|
|
53
|
+
self._arm_status = await self.start_acquiring_driver_and_ensure_status()
|
|
54
|
+
|
|
55
|
+
async def wait_for_idle(self):
|
|
56
|
+
if self._arm_status:
|
|
57
|
+
await self._arm_status
|
|
58
|
+
|
|
59
|
+
async def disarm(self):
|
|
60
|
+
# We can't use caput callback as we already used it in arm() and we can't have
|
|
61
|
+
# 2 or they will deadlock
|
|
62
|
+
await stop_busy_record(self.driver.acquire, False, timeout=1)
|
|
63
|
+
|
|
64
|
+
async def set_exposure_time_and_acquire_period_if_supplied(
|
|
65
|
+
self,
|
|
66
|
+
exposure: float | None = None,
|
|
67
|
+
timeout: float = DEFAULT_TIMEOUT,
|
|
68
|
+
) -> None:
|
|
69
|
+
"""
|
|
70
|
+
Sets the exposure time if it is not None and the acquire period to the
|
|
71
|
+
exposure time plus the deadtime. This is expected behavior for most
|
|
72
|
+
AreaDetectors, but some may require more specialized handling.
|
|
73
|
+
|
|
74
|
+
Parameters
|
|
75
|
+
----------
|
|
76
|
+
exposure:
|
|
77
|
+
Desired exposure time, this is a noop if it is None.
|
|
78
|
+
timeout:
|
|
79
|
+
How long to wait for the exposure time and acquire period to be set.
|
|
80
|
+
"""
|
|
81
|
+
if exposure is not None:
|
|
82
|
+
full_frame_time = exposure + self.get_deadtime(exposure)
|
|
83
|
+
await asyncio.gather(
|
|
84
|
+
self.driver.acquire_time.set(exposure, timeout=timeout),
|
|
85
|
+
self.driver.acquire_period.set(full_frame_time, timeout=timeout),
|
|
106
86
|
)
|
|
107
87
|
|
|
108
|
-
|
|
88
|
+
async def start_acquiring_driver_and_ensure_status(self) -> AsyncStatus:
|
|
89
|
+
"""
|
|
90
|
+
Start acquiring driver, raising ValueError if the detector is in a bad state.
|
|
91
|
+
|
|
92
|
+
This sets driver.acquire to True, and waits for it to be True up to a timeout.
|
|
93
|
+
Then, it checks that the DetectorState PV is in DEFAULT_GOOD_STATES,
|
|
94
|
+
and otherwise raises a ValueError.
|
|
95
|
+
|
|
96
|
+
Returns
|
|
97
|
+
-------
|
|
98
|
+
AsyncStatus:
|
|
99
|
+
An AsyncStatus that can be awaited to set driver.acquire to True and perform
|
|
100
|
+
subsequent raising (if applicable) due to detector state.
|
|
101
|
+
"""
|
|
102
|
+
|
|
103
|
+
status = await set_and_wait_for_value(
|
|
104
|
+
self.driver.acquire,
|
|
105
|
+
True,
|
|
106
|
+
timeout=DEFAULT_TIMEOUT,
|
|
107
|
+
wait_for_set_completion=False,
|
|
108
|
+
)
|
|
109
|
+
|
|
110
|
+
async def complete_acquisition() -> None:
|
|
111
|
+
"""NOTE: possible race condition here between the callback from
|
|
112
|
+
set_and_wait_for_value and the detector state updating."""
|
|
113
|
+
await status
|
|
114
|
+
state = await self.driver.detector_state.get_value()
|
|
115
|
+
if state not in self.good_states:
|
|
116
|
+
raise ValueError(
|
|
117
|
+
f"Final detector state {state.value} not "
|
|
118
|
+
"in valid end states: {self.good_states}"
|
|
119
|
+
)
|
|
120
|
+
|
|
121
|
+
return AsyncStatus(complete_acquisition())
|
|
@@ -0,0 +1,219 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from collections.abc import AsyncGenerator, AsyncIterator
|
|
3
|
+
from pathlib import Path
|
|
4
|
+
from typing import Generic, TypeVar, get_args
|
|
5
|
+
from urllib.parse import urlunparse
|
|
6
|
+
|
|
7
|
+
from bluesky.protocols import Hints, StreamAsset
|
|
8
|
+
from event_model import (
|
|
9
|
+
ComposeStreamResource,
|
|
10
|
+
DataKey,
|
|
11
|
+
StreamRange,
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
from ophyd_async.core._detector import DetectorWriter
|
|
15
|
+
from ophyd_async.core._providers import DatasetDescriber, NameProvider, PathProvider
|
|
16
|
+
from ophyd_async.core._signal import (
|
|
17
|
+
observe_value,
|
|
18
|
+
set_and_wait_for_value,
|
|
19
|
+
wait_for_value,
|
|
20
|
+
)
|
|
21
|
+
from ophyd_async.core._status import AsyncStatus
|
|
22
|
+
from ophyd_async.core._utils import DEFAULT_TIMEOUT
|
|
23
|
+
|
|
24
|
+
# from ophyd_async.epics.adcore._core_logic import ADBaseDatasetDescriber
|
|
25
|
+
from ._core_io import (
|
|
26
|
+
ADBaseDatasetDescriber,
|
|
27
|
+
Callback,
|
|
28
|
+
NDArrayBaseIO,
|
|
29
|
+
NDFileIO,
|
|
30
|
+
NDPluginBaseIO,
|
|
31
|
+
)
|
|
32
|
+
from ._utils import FileWriteMode
|
|
33
|
+
|
|
34
|
+
NDFileIOT = TypeVar("NDFileIOT", bound=NDFileIO)
|
|
35
|
+
ADWriterT = TypeVar("ADWriterT", bound="ADWriter")
|
|
36
|
+
|
|
37
|
+
|
|
38
|
+
class ADWriter(DetectorWriter, Generic[NDFileIOT]):
|
|
39
|
+
default_suffix: str = "FILE1:"
|
|
40
|
+
|
|
41
|
+
def __init__(
|
|
42
|
+
self,
|
|
43
|
+
fileio: NDFileIOT,
|
|
44
|
+
path_provider: PathProvider,
|
|
45
|
+
name_provider: NameProvider,
|
|
46
|
+
dataset_describer: DatasetDescriber,
|
|
47
|
+
file_extension: str = "",
|
|
48
|
+
mimetype: str = "",
|
|
49
|
+
plugins: dict[str, NDPluginBaseIO] | None = None,
|
|
50
|
+
) -> None:
|
|
51
|
+
self._plugins = plugins or {}
|
|
52
|
+
self.fileio = fileio
|
|
53
|
+
self._path_provider = path_provider
|
|
54
|
+
self._name_provider = name_provider
|
|
55
|
+
self._dataset_describer = dataset_describer
|
|
56
|
+
self._file_extension = file_extension
|
|
57
|
+
self._mimetype = mimetype
|
|
58
|
+
self._last_emitted = 0
|
|
59
|
+
self._emitted_resource = None
|
|
60
|
+
|
|
61
|
+
self._capture_status: AsyncStatus | None = None
|
|
62
|
+
self._multiplier = 1
|
|
63
|
+
self._filename_template = "%s%s_%6.6d"
|
|
64
|
+
|
|
65
|
+
@classmethod
|
|
66
|
+
def with_io(
|
|
67
|
+
cls: type[ADWriterT],
|
|
68
|
+
prefix: str,
|
|
69
|
+
path_provider: PathProvider,
|
|
70
|
+
dataset_source: NDArrayBaseIO | None = None,
|
|
71
|
+
fileio_suffix: str | None = None,
|
|
72
|
+
plugins: dict[str, NDPluginBaseIO] | None = None,
|
|
73
|
+
) -> ADWriterT:
|
|
74
|
+
try:
|
|
75
|
+
fileio_cls = get_args(cls.__orig_bases__[0])[0] # type: ignore
|
|
76
|
+
except IndexError as err:
|
|
77
|
+
raise RuntimeError("File IO class for writer not specified!") from err
|
|
78
|
+
|
|
79
|
+
fileio = fileio_cls(prefix + (fileio_suffix or cls.default_suffix))
|
|
80
|
+
dataset_describer = ADBaseDatasetDescriber(dataset_source or fileio)
|
|
81
|
+
|
|
82
|
+
def name_provider() -> str:
|
|
83
|
+
if fileio.parent == "Not attached to a detector":
|
|
84
|
+
raise RuntimeError("Initializing writer without parent detector!")
|
|
85
|
+
return fileio.parent.name
|
|
86
|
+
|
|
87
|
+
writer = cls(
|
|
88
|
+
fileio, path_provider, name_provider, dataset_describer, plugins=plugins
|
|
89
|
+
)
|
|
90
|
+
return writer
|
|
91
|
+
|
|
92
|
+
async def begin_capture(self) -> None:
|
|
93
|
+
info = self._path_provider(device_name=self._name_provider())
|
|
94
|
+
|
|
95
|
+
await self.fileio.enable_callbacks.set(Callback.ENABLE)
|
|
96
|
+
|
|
97
|
+
# Set the directory creation depth first, since dir creation callback happens
|
|
98
|
+
# when directory path PV is processed.
|
|
99
|
+
await self.fileio.create_directory.set(info.create_dir_depth)
|
|
100
|
+
|
|
101
|
+
await asyncio.gather(
|
|
102
|
+
# See https://github.com/bluesky/ophyd-async/issues/122
|
|
103
|
+
self.fileio.file_path.set(str(info.directory_path)),
|
|
104
|
+
self.fileio.file_name.set(info.filename),
|
|
105
|
+
self.fileio.file_write_mode.set(FileWriteMode.STREAM),
|
|
106
|
+
# For non-HDF file writers, use AD file templating mechanism
|
|
107
|
+
# for generating multi-image datasets
|
|
108
|
+
self.fileio.file_template.set(
|
|
109
|
+
self._filename_template + self._file_extension
|
|
110
|
+
),
|
|
111
|
+
self.fileio.auto_increment.set(True),
|
|
112
|
+
self.fileio.file_number.set(0),
|
|
113
|
+
)
|
|
114
|
+
|
|
115
|
+
if not await self.fileio.file_path_exists.get_value():
|
|
116
|
+
msg = f"File path {info.directory_path} for file plugin does not exist"
|
|
117
|
+
raise FileNotFoundError(msg)
|
|
118
|
+
|
|
119
|
+
# Overwrite num_capture to go forever
|
|
120
|
+
await self.fileio.num_capture.set(0)
|
|
121
|
+
# Wait for it to start, stashing the status that tells us when it finishes
|
|
122
|
+
self._capture_status = await set_and_wait_for_value(
|
|
123
|
+
self.fileio.capture, True, wait_for_set_completion=False
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
async def open(self, multiplier: int = 1) -> dict[str, DataKey]:
|
|
127
|
+
self._emitted_resource = None
|
|
128
|
+
self._last_emitted = 0
|
|
129
|
+
self._multiplier = multiplier
|
|
130
|
+
frame_shape = await self._dataset_describer.shape()
|
|
131
|
+
dtype_numpy = await self._dataset_describer.np_datatype()
|
|
132
|
+
|
|
133
|
+
await self.begin_capture()
|
|
134
|
+
|
|
135
|
+
describe = {
|
|
136
|
+
self._name_provider(): DataKey(
|
|
137
|
+
source=self._name_provider(),
|
|
138
|
+
shape=list(frame_shape),
|
|
139
|
+
dtype="array",
|
|
140
|
+
dtype_numpy=dtype_numpy,
|
|
141
|
+
external="STREAM:",
|
|
142
|
+
) # type: ignore
|
|
143
|
+
}
|
|
144
|
+
return describe
|
|
145
|
+
|
|
146
|
+
async def observe_indices_written(
|
|
147
|
+
self, timeout=DEFAULT_TIMEOUT
|
|
148
|
+
) -> AsyncGenerator[int, None]:
|
|
149
|
+
"""Wait until a specific index is ready to be collected"""
|
|
150
|
+
async for num_captured in observe_value(self.fileio.num_captured, timeout):
|
|
151
|
+
yield num_captured // self._multiplier
|
|
152
|
+
|
|
153
|
+
async def get_indices_written(self) -> int:
|
|
154
|
+
num_captured = await self.fileio.num_captured.get_value()
|
|
155
|
+
return num_captured // self._multiplier
|
|
156
|
+
|
|
157
|
+
async def collect_stream_docs(
|
|
158
|
+
self, indices_written: int
|
|
159
|
+
) -> AsyncIterator[StreamAsset]:
|
|
160
|
+
if indices_written:
|
|
161
|
+
if not self._emitted_resource:
|
|
162
|
+
file_path = Path(await self.fileio.file_path.get_value())
|
|
163
|
+
file_name = await self.fileio.file_name.get_value()
|
|
164
|
+
file_template = file_name + "_{:06d}" + self._file_extension
|
|
165
|
+
|
|
166
|
+
frame_shape = await self._dataset_describer.shape()
|
|
167
|
+
|
|
168
|
+
uri = urlunparse(
|
|
169
|
+
(
|
|
170
|
+
"file",
|
|
171
|
+
"localhost",
|
|
172
|
+
str(file_path.absolute()) + "/",
|
|
173
|
+
"",
|
|
174
|
+
"",
|
|
175
|
+
None,
|
|
176
|
+
)
|
|
177
|
+
)
|
|
178
|
+
|
|
179
|
+
bundler_composer = ComposeStreamResource()
|
|
180
|
+
|
|
181
|
+
self._emitted_resource = bundler_composer(
|
|
182
|
+
mimetype=self._mimetype,
|
|
183
|
+
uri=uri,
|
|
184
|
+
data_key=self._name_provider(),
|
|
185
|
+
parameters={
|
|
186
|
+
# Assume that we always write 1 frame per file/chunk
|
|
187
|
+
"chunk_shape": (1, *frame_shape),
|
|
188
|
+
# Include file template for reconstruction in consolidator
|
|
189
|
+
"template": file_template,
|
|
190
|
+
},
|
|
191
|
+
uid=None,
|
|
192
|
+
validate=True,
|
|
193
|
+
)
|
|
194
|
+
|
|
195
|
+
yield "stream_resource", self._emitted_resource.stream_resource_doc
|
|
196
|
+
|
|
197
|
+
# Indices are relative to resource
|
|
198
|
+
if indices_written > self._last_emitted:
|
|
199
|
+
indices: StreamRange = {
|
|
200
|
+
"start": self._last_emitted,
|
|
201
|
+
"stop": indices_written,
|
|
202
|
+
}
|
|
203
|
+
self._last_emitted = indices_written
|
|
204
|
+
yield (
|
|
205
|
+
"stream_datum",
|
|
206
|
+
self._emitted_resource.compose_stream_datum(indices),
|
|
207
|
+
)
|
|
208
|
+
|
|
209
|
+
async def close(self):
|
|
210
|
+
# Already done a caput callback in _capture_status, so can't do one here
|
|
211
|
+
await self.fileio.capture.set(False, wait=False)
|
|
212
|
+
await wait_for_value(self.fileio.capture, False, DEFAULT_TIMEOUT)
|
|
213
|
+
if self._capture_status:
|
|
214
|
+
# We kicked off an open, so wait for it to return
|
|
215
|
+
await self._capture_status
|
|
216
|
+
|
|
217
|
+
@property
|
|
218
|
+
def hints(self) -> Hints:
|
|
219
|
+
return {"fields": [self._name_provider()]}
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
-
from collections.abc import
|
|
2
|
+
from collections.abc import AsyncIterator
|
|
3
3
|
from pathlib import Path
|
|
4
4
|
from xml.etree import ElementTree as ET
|
|
5
5
|
|
|
@@ -8,79 +8,64 @@ from event_model import DataKey
|
|
|
8
8
|
|
|
9
9
|
from ophyd_async.core import (
|
|
10
10
|
DEFAULT_TIMEOUT,
|
|
11
|
-
AsyncStatus,
|
|
12
11
|
DatasetDescriber,
|
|
13
|
-
DetectorWriter,
|
|
14
12
|
HDFDataset,
|
|
15
13
|
HDFFile,
|
|
16
14
|
NameProvider,
|
|
17
15
|
PathProvider,
|
|
18
|
-
observe_value,
|
|
19
|
-
set_and_wait_for_value,
|
|
20
16
|
wait_for_value,
|
|
21
17
|
)
|
|
22
18
|
|
|
23
|
-
from ._core_io import
|
|
19
|
+
from ._core_io import NDFileHDFIO, NDPluginBaseIO
|
|
20
|
+
from ._core_writer import ADWriter
|
|
24
21
|
from ._utils import (
|
|
25
|
-
FileWriteMode,
|
|
26
22
|
convert_param_dtype_to_np,
|
|
27
23
|
convert_pv_dtype_to_np,
|
|
28
24
|
)
|
|
29
25
|
|
|
30
26
|
|
|
31
|
-
class ADHDFWriter(
|
|
27
|
+
class ADHDFWriter(ADWriter[NDFileHDFIO]):
|
|
28
|
+
default_suffix: str = "HDF1:"
|
|
29
|
+
|
|
32
30
|
def __init__(
|
|
33
31
|
self,
|
|
34
|
-
|
|
32
|
+
fileio: NDFileHDFIO,
|
|
35
33
|
path_provider: PathProvider,
|
|
36
34
|
name_provider: NameProvider,
|
|
37
35
|
dataset_describer: DatasetDescriber,
|
|
38
|
-
|
|
36
|
+
plugins: dict[str, NDPluginBaseIO] | None = None,
|
|
39
37
|
) -> None:
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
38
|
+
super().__init__(
|
|
39
|
+
fileio,
|
|
40
|
+
path_provider,
|
|
41
|
+
name_provider,
|
|
42
|
+
dataset_describer,
|
|
43
|
+
plugins=plugins,
|
|
44
|
+
file_extension=".h5",
|
|
45
|
+
mimetype="application/x-hdf5",
|
|
46
|
+
)
|
|
47
47
|
self._datasets: list[HDFDataset] = []
|
|
48
48
|
self._file: HDFFile | None = None
|
|
49
|
-
self.
|
|
49
|
+
self._filename_template = "%s%s"
|
|
50
50
|
|
|
51
51
|
async def open(self, multiplier: int = 1) -> dict[str, DataKey]:
|
|
52
52
|
self._file = None
|
|
53
|
-
info = self._path_provider(device_name=self._name_provider())
|
|
54
53
|
|
|
55
|
-
#
|
|
56
|
-
# when directory path PV is processed.
|
|
57
|
-
await self.hdf.create_directory.set(info.create_dir_depth)
|
|
54
|
+
# Setting HDF writer specific signals
|
|
58
55
|
|
|
59
56
|
# Make sure we are using chunk auto-sizing
|
|
60
|
-
await asyncio.gather(self.
|
|
57
|
+
await asyncio.gather(self.fileio.chunk_size_auto.set(True))
|
|
61
58
|
|
|
62
59
|
await asyncio.gather(
|
|
63
|
-
self.
|
|
64
|
-
self.
|
|
65
|
-
self.
|
|
66
|
-
|
|
67
|
-
self.hdf.file_path.set(str(info.directory_path)),
|
|
68
|
-
self.hdf.file_name.set(info.filename),
|
|
69
|
-
self.hdf.file_template.set("%s/%s.h5"),
|
|
70
|
-
self.hdf.file_write_mode.set(FileWriteMode.stream),
|
|
71
|
-
# Never use custom xml layout file but use the one defined
|
|
72
|
-
# in the source code file NDFileHDF5LayoutXML.cpp
|
|
73
|
-
self.hdf.xml_file_name.set(""),
|
|
60
|
+
self.fileio.num_extra_dims.set(0),
|
|
61
|
+
self.fileio.lazy_open.set(True),
|
|
62
|
+
self.fileio.swmr_mode.set(True),
|
|
63
|
+
self.fileio.xml_file_name.set(""),
|
|
74
64
|
)
|
|
75
65
|
|
|
76
|
-
|
|
77
|
-
|
|
78
|
-
), f"File path {info.directory_path} for hdf plugin does not exist"
|
|
66
|
+
# Set common AD file plugin params, begin capturing
|
|
67
|
+
await self.begin_capture()
|
|
79
68
|
|
|
80
|
-
# Overwrite num_capture to go forever
|
|
81
|
-
await self.hdf.num_capture.set(0)
|
|
82
|
-
# Wait for it to start, stashing the status that tells us when it finishes
|
|
83
|
-
self._capture_status = await set_and_wait_for_value(self.hdf.capture, True)
|
|
84
69
|
name = self._name_provider()
|
|
85
70
|
detector_shape = await self._dataset_describer.shape()
|
|
86
71
|
np_dtype = await self._dataset_describer.np_datatype()
|
|
@@ -88,7 +73,7 @@ class ADHDFWriter(DetectorWriter):
|
|
|
88
73
|
outer_shape = (multiplier,) if multiplier > 1 else ()
|
|
89
74
|
|
|
90
75
|
# Determine number of frames that will be saved per HDF chunk
|
|
91
|
-
frames_per_chunk = await self.
|
|
76
|
+
frames_per_chunk = await self.fileio.num_frames_chunks.get_value()
|
|
92
77
|
|
|
93
78
|
# Add the main data
|
|
94
79
|
self._datasets = [
|
|
@@ -102,7 +87,7 @@ class ADHDFWriter(DetectorWriter):
|
|
|
102
87
|
)
|
|
103
88
|
]
|
|
104
89
|
# And all the scalar datasets
|
|
105
|
-
for plugin in self._plugins:
|
|
90
|
+
for plugin in self._plugins.values():
|
|
106
91
|
maybe_xml = await plugin.nd_attributes_file.get_value()
|
|
107
92
|
# This is the check that ADCore does to see if it is an XML string
|
|
108
93
|
# rather than a filename to parse
|
|
@@ -133,7 +118,7 @@ class ADHDFWriter(DetectorWriter):
|
|
|
133
118
|
|
|
134
119
|
describe = {
|
|
135
120
|
ds.data_key: DataKey(
|
|
136
|
-
source=self.
|
|
121
|
+
source=self.fileio.full_file_name.source,
|
|
137
122
|
shape=list(outer_shape + tuple(ds.shape)),
|
|
138
123
|
dtype="array" if ds.shape else "number",
|
|
139
124
|
dtype_numpy=ds.dtype_numpy,
|
|
@@ -143,25 +128,14 @@ class ADHDFWriter(DetectorWriter):
|
|
|
143
128
|
}
|
|
144
129
|
return describe
|
|
145
130
|
|
|
146
|
-
async def observe_indices_written(
|
|
147
|
-
self, timeout=DEFAULT_TIMEOUT
|
|
148
|
-
) -> AsyncGenerator[int, None]:
|
|
149
|
-
"""Wait until a specific index is ready to be collected"""
|
|
150
|
-
async for num_captured in observe_value(self.hdf.num_captured, timeout):
|
|
151
|
-
yield num_captured // self._multiplier
|
|
152
|
-
|
|
153
|
-
async def get_indices_written(self) -> int:
|
|
154
|
-
num_captured = await self.hdf.num_captured.get_value()
|
|
155
|
-
return num_captured // self._multiplier
|
|
156
|
-
|
|
157
131
|
async def collect_stream_docs(
|
|
158
132
|
self, indices_written: int
|
|
159
133
|
) -> AsyncIterator[StreamAsset]:
|
|
160
134
|
# TODO: fail if we get dropped frames
|
|
161
|
-
await self.
|
|
135
|
+
await self.fileio.flush_now.set(True)
|
|
162
136
|
if indices_written:
|
|
163
137
|
if not self._file:
|
|
164
|
-
path = Path(await self.
|
|
138
|
+
path = Path(await self.fileio.full_file_name.get_value())
|
|
165
139
|
self._file = HDFFile(
|
|
166
140
|
# See https://github.com/bluesky/ophyd-async/issues/122
|
|
167
141
|
path,
|
|
@@ -178,8 +152,8 @@ class ADHDFWriter(DetectorWriter):
|
|
|
178
152
|
|
|
179
153
|
async def close(self):
|
|
180
154
|
# Already done a caput callback in _capture_status, so can't do one here
|
|
181
|
-
await self.
|
|
182
|
-
await wait_for_value(self.
|
|
155
|
+
await self.fileio.capture.set(False, wait=False)
|
|
156
|
+
await wait_for_value(self.fileio.capture, False, DEFAULT_TIMEOUT)
|
|
183
157
|
if self._capture_status:
|
|
184
158
|
# We kicked off an open, so wait for it to return
|
|
185
159
|
await self._capture_status
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from ophyd_async.core import DatasetDescriber, NameProvider, PathProvider
|
|
2
|
+
|
|
3
|
+
from ._core_io import NDFileIO, NDPluginBaseIO
|
|
4
|
+
from ._core_writer import ADWriter
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class ADJPEGWriter(ADWriter[NDFileIO]):
|
|
8
|
+
default_suffix: str = "JPEG1:"
|
|
9
|
+
|
|
10
|
+
def __init__(
|
|
11
|
+
self,
|
|
12
|
+
fileio: NDFileIO,
|
|
13
|
+
path_provider: PathProvider,
|
|
14
|
+
name_provider: NameProvider,
|
|
15
|
+
dataset_describer: DatasetDescriber,
|
|
16
|
+
plugins: dict[str, NDPluginBaseIO] | None = None,
|
|
17
|
+
) -> None:
|
|
18
|
+
super().__init__(
|
|
19
|
+
fileio,
|
|
20
|
+
path_provider,
|
|
21
|
+
name_provider,
|
|
22
|
+
dataset_describer,
|
|
23
|
+
plugins=plugins,
|
|
24
|
+
file_extension=".jpg",
|
|
25
|
+
mimetype="multipart/related;type=image/jpeg",
|
|
26
|
+
)
|
|
@@ -16,11 +16,12 @@ class SingleTriggerDetector(StandardReadable, Triggerable):
|
|
|
16
16
|
drv: ADBaseIO,
|
|
17
17
|
read_uncached: Sequence[SignalR] = (),
|
|
18
18
|
name="",
|
|
19
|
-
|
|
19
|
+
plugins: dict[str, NDPluginBaseIO] | None = None,
|
|
20
20
|
) -> None:
|
|
21
21
|
self.drv = drv
|
|
22
|
-
|
|
23
|
-
|
|
22
|
+
if plugins is not None:
|
|
23
|
+
for k, v in plugins.items():
|
|
24
|
+
setattr(self, k, v)
|
|
24
25
|
|
|
25
26
|
self.add_readables(
|
|
26
27
|
[self.drv.array_counter, *read_uncached],
|
|
@@ -34,7 +35,7 @@ class SingleTriggerDetector(StandardReadable, Triggerable):
|
|
|
34
35
|
@AsyncStatus.wrap
|
|
35
36
|
async def stage(self) -> None:
|
|
36
37
|
await asyncio.gather(
|
|
37
|
-
self.drv.image_mode.set(ImageMode.
|
|
38
|
+
self.drv.image_mode.set(ImageMode.SINGLE),
|
|
38
39
|
self.drv.wait_for_plugins.set(True),
|
|
39
40
|
)
|
|
40
41
|
await super().stage()
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
from ophyd_async.core import DatasetDescriber, NameProvider, PathProvider
|
|
2
|
+
|
|
3
|
+
from ._core_io import NDFileIO, NDPluginBaseIO
|
|
4
|
+
from ._core_writer import ADWriter
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class ADTIFFWriter(ADWriter[NDFileIO]):
|
|
8
|
+
default_suffix: str = "TIFF1:"
|
|
9
|
+
|
|
10
|
+
def __init__(
|
|
11
|
+
self,
|
|
12
|
+
fileio: NDFileIO,
|
|
13
|
+
path_provider: PathProvider,
|
|
14
|
+
name_provider: NameProvider,
|
|
15
|
+
dataset_describer: DatasetDescriber,
|
|
16
|
+
plugins: dict[str, NDPluginBaseIO] | None = None,
|
|
17
|
+
) -> None:
|
|
18
|
+
super().__init__(
|
|
19
|
+
fileio,
|
|
20
|
+
path_provider,
|
|
21
|
+
name_provider,
|
|
22
|
+
dataset_describer,
|
|
23
|
+
plugins=plugins,
|
|
24
|
+
file_extension=".tiff",
|
|
25
|
+
mimetype="multipart/related;type=image/tiff",
|
|
26
|
+
)
|