ophyd-async 0.3a1__py3-none-any.whl → 0.3a2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ophyd_async/__init__.py +1 -4
- ophyd_async/_version.py +1 -1
- ophyd_async/core/__init__.py +12 -2
- ophyd_async/core/_providers.py +3 -1
- ophyd_async/core/detector.py +65 -38
- ophyd_async/core/device.py +8 -0
- ophyd_async/core/flyer.py +10 -19
- ophyd_async/core/signal.py +36 -17
- ophyd_async/core/signal_backend.py +5 -2
- ophyd_async/core/sim_signal_backend.py +28 -16
- ophyd_async/core/standard_readable.py +4 -2
- ophyd_async/core/utils.py +18 -1
- ophyd_async/epics/_backend/_aioca.py +13 -11
- ophyd_async/epics/_backend/_p4p.py +19 -16
- ophyd_async/epics/_backend/common.py +16 -11
- ophyd_async/epics/areadetector/__init__.py +4 -0
- ophyd_async/epics/areadetector/aravis.py +69 -0
- ophyd_async/epics/areadetector/controllers/aravis_controller.py +73 -0
- ophyd_async/epics/areadetector/controllers/pilatus_controller.py +36 -24
- ophyd_async/epics/areadetector/drivers/aravis_driver.py +154 -0
- ophyd_async/epics/areadetector/drivers/pilatus_driver.py +4 -4
- ophyd_async/epics/areadetector/pilatus.py +50 -0
- ophyd_async/epics/areadetector/writers/_hdffile.py +4 -4
- ophyd_async/epics/areadetector/writers/hdf_writer.py +6 -1
- ophyd_async/epics/demo/__init__.py +33 -3
- ophyd_async/epics/motion/motor.py +20 -14
- ophyd_async/epics/pvi/__init__.py +3 -0
- ophyd_async/epics/pvi/pvi.py +318 -0
- ophyd_async/epics/signal/signal.py +26 -9
- ophyd_async/panda/__init__.py +17 -6
- ophyd_async/panda/_common_blocks.py +49 -0
- ophyd_async/panda/_hdf_panda.py +48 -0
- ophyd_async/panda/{panda_controller.py → _panda_controller.py} +3 -7
- ophyd_async/panda/_trigger.py +39 -0
- ophyd_async/panda/writers/__init__.py +3 -0
- ophyd_async/panda/writers/_hdf_writer.py +220 -0
- ophyd_async/panda/writers/_panda_hdf_file.py +58 -0
- ophyd_async/planstubs/__init__.py +5 -0
- ophyd_async/planstubs/prepare_trigger_and_dets.py +57 -0
- ophyd_async/protocols.py +73 -0
- ophyd_async/sim/__init__.py +11 -0
- ophyd_async/sim/demo/__init__.py +3 -0
- ophyd_async/sim/demo/sim_motor.py +116 -0
- ophyd_async/sim/pattern_generator.py +318 -0
- ophyd_async/sim/sim_pattern_detector_control.py +55 -0
- ophyd_async/sim/sim_pattern_detector_writer.py +34 -0
- ophyd_async/sim/sim_pattern_generator.py +37 -0
- {ophyd_async-0.3a1.dist-info → ophyd_async-0.3a2.dist-info}/METADATA +19 -75
- ophyd_async-0.3a2.dist-info/RECORD +76 -0
- ophyd_async/epics/pvi.py +0 -70
- ophyd_async/panda/panda.py +0 -241
- ophyd_async-0.3a1.dist-info/RECORD +0 -56
- /ophyd_async/panda/{table.py → _table.py} +0 -0
- /ophyd_async/panda/{utils.py → _utils.py} +0 -0
- {ophyd_async-0.3a1.dist-info → ophyd_async-0.3a2.dist-info}/LICENSE +0 -0
- {ophyd_async-0.3a1.dist-info → ophyd_async-0.3a2.dist-info}/WHEEL +0 -0
- {ophyd_async-0.3a1.dist-info → ophyd_async-0.3a2.dist-info}/entry_points.txt +0 -0
- {ophyd_async-0.3a1.dist-info → ophyd_async-0.3a2.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,116 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
import time
|
|
3
|
+
from typing import Callable, List, Optional
|
|
4
|
+
|
|
5
|
+
from bluesky.protocols import Movable, Stoppable
|
|
6
|
+
|
|
7
|
+
from ophyd_async.core import StandardReadable
|
|
8
|
+
from ophyd_async.core.async_status import AsyncStatus
|
|
9
|
+
from ophyd_async.core.signal import soft_signal_r_and_backend, soft_signal_rw
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
class SimMotor(StandardReadable, Movable, Stoppable):
|
|
13
|
+
def __init__(self, name="", instant=True) -> None:
|
|
14
|
+
"""
|
|
15
|
+
Simulated motor device
|
|
16
|
+
|
|
17
|
+
args:
|
|
18
|
+
- prefix: str: Signal names prefix
|
|
19
|
+
- name: str: name of device
|
|
20
|
+
- instant: bool: whether to move instantly, or with a delay
|
|
21
|
+
"""
|
|
22
|
+
self._instant = instant
|
|
23
|
+
self._move_task: Optional[asyncio.Task] = None
|
|
24
|
+
|
|
25
|
+
# Define some signals
|
|
26
|
+
self.user_setpoint = soft_signal_rw(float, 0)
|
|
27
|
+
self.user_readback, self._user_readback = soft_signal_r_and_backend(float, 0)
|
|
28
|
+
self.velocity = soft_signal_rw(float, 1.0)
|
|
29
|
+
self.egu = soft_signal_rw(float, "mm")
|
|
30
|
+
|
|
31
|
+
# Set name and signals for read() and read_configuration()
|
|
32
|
+
self.set_readable_signals(
|
|
33
|
+
read=[self.user_readback],
|
|
34
|
+
config=[self.velocity, self.egu],
|
|
35
|
+
)
|
|
36
|
+
super().__init__(name=name)
|
|
37
|
+
|
|
38
|
+
# Whether set() should complete successfully or not
|
|
39
|
+
self._set_success = True
|
|
40
|
+
|
|
41
|
+
def stop(self, success=False):
|
|
42
|
+
"""
|
|
43
|
+
Stop the motor if it is moving
|
|
44
|
+
"""
|
|
45
|
+
if self._move_task:
|
|
46
|
+
self._move_task.cancel()
|
|
47
|
+
self._move_task = None
|
|
48
|
+
|
|
49
|
+
self._set_success = success
|
|
50
|
+
|
|
51
|
+
def set(self, new_position: float, timeout: Optional[float] = None) -> AsyncStatus: # noqa: F821
|
|
52
|
+
"""
|
|
53
|
+
Asynchronously move the motor to a new position.
|
|
54
|
+
"""
|
|
55
|
+
watchers: List[Callable] = []
|
|
56
|
+
coro = asyncio.wait_for(self._move(new_position, watchers), timeout=timeout)
|
|
57
|
+
return AsyncStatus(coro, watchers)
|
|
58
|
+
|
|
59
|
+
async def _move(self, new_position: float, watchers: List[Callable] = []):
|
|
60
|
+
"""
|
|
61
|
+
Start the motor moving to a new position.
|
|
62
|
+
|
|
63
|
+
If the motor is already moving, it will stop first.
|
|
64
|
+
If this is an instant motor the move will be instantaneous.
|
|
65
|
+
"""
|
|
66
|
+
self.stop()
|
|
67
|
+
start = time.monotonic()
|
|
68
|
+
|
|
69
|
+
current_position = await self.user_readback.get_value()
|
|
70
|
+
distance = abs(new_position - current_position)
|
|
71
|
+
travel_time = 0 if self._instant else distance / await self.velocity.get_value()
|
|
72
|
+
|
|
73
|
+
old_position, units = await asyncio.gather(
|
|
74
|
+
self.user_setpoint.get_value(),
|
|
75
|
+
self.egu.get_value(),
|
|
76
|
+
)
|
|
77
|
+
|
|
78
|
+
async def update_position():
|
|
79
|
+
while True:
|
|
80
|
+
time_elapsed = round(time.monotonic() - start, 2)
|
|
81
|
+
|
|
82
|
+
# update position based on time elapsed
|
|
83
|
+
if time_elapsed >= travel_time:
|
|
84
|
+
# successfully reached our target position
|
|
85
|
+
await self._user_readback.put(new_position)
|
|
86
|
+
self._set_success = True
|
|
87
|
+
break
|
|
88
|
+
else:
|
|
89
|
+
current_position = (
|
|
90
|
+
old_position + distance * time_elapsed / travel_time
|
|
91
|
+
)
|
|
92
|
+
|
|
93
|
+
await self._user_readback.put(current_position)
|
|
94
|
+
|
|
95
|
+
# notify watchers of the new position
|
|
96
|
+
for watcher in watchers:
|
|
97
|
+
watcher(
|
|
98
|
+
name=self.name,
|
|
99
|
+
current=current_position,
|
|
100
|
+
initial=old_position,
|
|
101
|
+
target=new_position,
|
|
102
|
+
unit=units,
|
|
103
|
+
time_elapsed=time.monotonic() - start,
|
|
104
|
+
)
|
|
105
|
+
|
|
106
|
+
# 10hz update loop
|
|
107
|
+
await asyncio.sleep(0.1)
|
|
108
|
+
|
|
109
|
+
# set up a task that updates the motor position at 10hz
|
|
110
|
+
self._move_task = asyncio.create_task(update_position())
|
|
111
|
+
|
|
112
|
+
try:
|
|
113
|
+
await self._move_task
|
|
114
|
+
finally:
|
|
115
|
+
if not self._set_success:
|
|
116
|
+
raise RuntimeError("Motor was stopped")
|
|
@@ -0,0 +1,318 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import (
|
|
4
|
+
Any,
|
|
5
|
+
AsyncGenerator,
|
|
6
|
+
AsyncIterator,
|
|
7
|
+
Dict,
|
|
8
|
+
Iterator,
|
|
9
|
+
List,
|
|
10
|
+
Optional,
|
|
11
|
+
Sequence,
|
|
12
|
+
)
|
|
13
|
+
|
|
14
|
+
import h5py
|
|
15
|
+
import numpy as np
|
|
16
|
+
from bluesky.protocols import Descriptor, StreamAsset
|
|
17
|
+
from event_model import (
|
|
18
|
+
ComposeStreamResource,
|
|
19
|
+
ComposeStreamResourceBundle,
|
|
20
|
+
StreamDatum,
|
|
21
|
+
StreamRange,
|
|
22
|
+
StreamResource,
|
|
23
|
+
)
|
|
24
|
+
|
|
25
|
+
from ophyd_async.core import DirectoryInfo, DirectoryProvider
|
|
26
|
+
from ophyd_async.core.signal import SignalR, observe_value
|
|
27
|
+
from ophyd_async.core.sim_signal_backend import SimSignalBackend
|
|
28
|
+
from ophyd_async.core.utils import DEFAULT_TIMEOUT
|
|
29
|
+
|
|
30
|
+
# raw data path
|
|
31
|
+
DATA_PATH = "/entry/data/data"
|
|
32
|
+
|
|
33
|
+
# pixel sum path
|
|
34
|
+
SUM_PATH = "/entry/sum"
|
|
35
|
+
|
|
36
|
+
MAX_UINT8_VALUE = np.iinfo(np.uint8).max
|
|
37
|
+
|
|
38
|
+
SLICE_NAME = "AD_HDF5_SWMR_SLICE"
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
@dataclass
|
|
42
|
+
class DatasetConfig:
|
|
43
|
+
name: str
|
|
44
|
+
shape: Sequence[int]
|
|
45
|
+
maxshape: tuple[Any, ...] = (None,)
|
|
46
|
+
path: Optional[str] = None
|
|
47
|
+
multiplier: Optional[int] = 1
|
|
48
|
+
dtype: Optional[Any] = None
|
|
49
|
+
fillvalue: Optional[int] = None
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def get_full_file_description(
|
|
53
|
+
datasets: List[DatasetConfig], outer_shape: tuple[int, ...]
|
|
54
|
+
):
|
|
55
|
+
full_file_description: Dict[str, Descriptor] = {}
|
|
56
|
+
for d in datasets:
|
|
57
|
+
source = f"soft://{d.name}"
|
|
58
|
+
shape = outer_shape + tuple(d.shape)
|
|
59
|
+
dtype = "number" if d.shape == [1] else "array"
|
|
60
|
+
descriptor = Descriptor(
|
|
61
|
+
source=source, shape=shape, dtype=dtype, external="STREAM:"
|
|
62
|
+
)
|
|
63
|
+
key = d.name.replace("/", "_")
|
|
64
|
+
full_file_description[key] = descriptor
|
|
65
|
+
return full_file_description
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def generate_gaussian_blob(height: int, width: int) -> np.ndarray:
|
|
69
|
+
"""Make a Gaussian Blob with float values in range 0..1"""
|
|
70
|
+
x, y = np.meshgrid(np.linspace(-1, 1, width), np.linspace(-1, 1, height))
|
|
71
|
+
d = np.sqrt(x * x + y * y)
|
|
72
|
+
blob = np.exp(-(d**2))
|
|
73
|
+
return blob
|
|
74
|
+
|
|
75
|
+
|
|
76
|
+
def generate_interesting_pattern(x: float, y: float) -> float:
|
|
77
|
+
"""This function is interesting in x and y in range -10..10, returning
|
|
78
|
+
a float value in range 0..1
|
|
79
|
+
"""
|
|
80
|
+
z = 0.5 + (np.sin(x) ** 10 + np.cos(10 + y * x) * np.cos(x)) / 2
|
|
81
|
+
return z
|
|
82
|
+
|
|
83
|
+
|
|
84
|
+
class HdfStreamProvider:
|
|
85
|
+
def __init__(
|
|
86
|
+
self,
|
|
87
|
+
directory_info: DirectoryInfo,
|
|
88
|
+
full_file_name: Path,
|
|
89
|
+
datasets: List[DatasetConfig],
|
|
90
|
+
) -> None:
|
|
91
|
+
self._last_emitted = 0
|
|
92
|
+
self._bundles: List[ComposeStreamResourceBundle] = self._compose_bundles(
|
|
93
|
+
directory_info, full_file_name, datasets
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
def _compose_bundles(
|
|
97
|
+
self,
|
|
98
|
+
directory_info: DirectoryInfo,
|
|
99
|
+
full_file_name: Path,
|
|
100
|
+
datasets: List[DatasetConfig],
|
|
101
|
+
) -> List[StreamAsset]:
|
|
102
|
+
path = str(full_file_name.relative_to(directory_info.root))
|
|
103
|
+
root = str(directory_info.root)
|
|
104
|
+
bundler_composer = ComposeStreamResource()
|
|
105
|
+
|
|
106
|
+
bundles: List[ComposeStreamResourceBundle] = []
|
|
107
|
+
|
|
108
|
+
bundles = [
|
|
109
|
+
bundler_composer(
|
|
110
|
+
spec=SLICE_NAME,
|
|
111
|
+
root=root,
|
|
112
|
+
resource_path=path,
|
|
113
|
+
data_key=d.name.replace("/", "_"),
|
|
114
|
+
resource_kwargs={
|
|
115
|
+
"path": d.path,
|
|
116
|
+
"multiplier": d.multiplier,
|
|
117
|
+
"timestamps": "/entry/instrument/NDAttributes/NDArrayTimeStamp",
|
|
118
|
+
},
|
|
119
|
+
)
|
|
120
|
+
for d in datasets
|
|
121
|
+
]
|
|
122
|
+
return bundles
|
|
123
|
+
|
|
124
|
+
def stream_resources(self) -> Iterator[StreamResource]:
|
|
125
|
+
for bundle in self._bundles:
|
|
126
|
+
yield bundle.stream_resource_doc
|
|
127
|
+
|
|
128
|
+
def stream_data(self, indices_written: int) -> Iterator[StreamDatum]:
|
|
129
|
+
# Indices are relative to resource
|
|
130
|
+
if indices_written > self._last_emitted:
|
|
131
|
+
updated_stream_range = StreamRange(
|
|
132
|
+
start=self._last_emitted,
|
|
133
|
+
stop=indices_written,
|
|
134
|
+
)
|
|
135
|
+
self._last_emitted = indices_written
|
|
136
|
+
for bundle in self._bundles:
|
|
137
|
+
yield bundle.compose_stream_datum(indices=updated_stream_range)
|
|
138
|
+
return None
|
|
139
|
+
|
|
140
|
+
def close(self) -> None:
|
|
141
|
+
for bundle in self._bundles:
|
|
142
|
+
bundle.close()
|
|
143
|
+
|
|
144
|
+
|
|
145
|
+
class PatternGenerator:
|
|
146
|
+
def __init__(
|
|
147
|
+
self,
|
|
148
|
+
saturation_exposure_time: float = 1,
|
|
149
|
+
detector_width: int = 320,
|
|
150
|
+
detector_height: int = 240,
|
|
151
|
+
) -> None:
|
|
152
|
+
self.saturation_exposure_time = saturation_exposure_time
|
|
153
|
+
self.exposure = saturation_exposure_time
|
|
154
|
+
self.x = 0.0
|
|
155
|
+
self.y = 0.0
|
|
156
|
+
self.height = detector_height
|
|
157
|
+
self.width = detector_width
|
|
158
|
+
self.written_images_counter: int = 0
|
|
159
|
+
|
|
160
|
+
# it automatically initializes to 0
|
|
161
|
+
self.signal_backend = SimSignalBackend(int)
|
|
162
|
+
self.sim_signal = SignalR(self.signal_backend)
|
|
163
|
+
blob = np.array(
|
|
164
|
+
generate_gaussian_blob(width=detector_width, height=detector_height)
|
|
165
|
+
* MAX_UINT8_VALUE
|
|
166
|
+
)
|
|
167
|
+
self.STARTING_BLOB = blob
|
|
168
|
+
self._hdf_stream_provider: Optional[HdfStreamProvider] = None
|
|
169
|
+
self._handle_for_h5_file: Optional[h5py.File] = None
|
|
170
|
+
self.target_path: Optional[Path] = None
|
|
171
|
+
|
|
172
|
+
async def write_image_to_file(self) -> None:
|
|
173
|
+
assert self._handle_for_h5_file, "no file has been opened!"
|
|
174
|
+
# prepare - resize the fixed hdf5 data structure
|
|
175
|
+
# so that the new image can be written
|
|
176
|
+
new_layer = self.written_images_counter + 1
|
|
177
|
+
target_dimensions = (new_layer, self.height, self.width)
|
|
178
|
+
|
|
179
|
+
# generate the simulated data
|
|
180
|
+
intensity: float = generate_interesting_pattern(self.x, self.y)
|
|
181
|
+
detector_data: np.uint8 = np.uint8(
|
|
182
|
+
self.STARTING_BLOB
|
|
183
|
+
* intensity
|
|
184
|
+
* self.exposure
|
|
185
|
+
/ self.saturation_exposure_time
|
|
186
|
+
)
|
|
187
|
+
|
|
188
|
+
self._handle_for_h5_file[DATA_PATH].resize(target_dimensions)
|
|
189
|
+
|
|
190
|
+
print(f"writing image {new_layer}")
|
|
191
|
+
assert self._handle_for_h5_file, "no file has been opened!"
|
|
192
|
+
self._handle_for_h5_file[DATA_PATH].resize(target_dimensions)
|
|
193
|
+
|
|
194
|
+
self._handle_for_h5_file[SUM_PATH].resize((new_layer,))
|
|
195
|
+
|
|
196
|
+
# write data to disc (intermediate step)
|
|
197
|
+
self._handle_for_h5_file[DATA_PATH][self.written_images_counter] = detector_data
|
|
198
|
+
self._handle_for_h5_file[SUM_PATH][self.written_images_counter] = np.sum(
|
|
199
|
+
detector_data
|
|
200
|
+
)
|
|
201
|
+
|
|
202
|
+
# save metadata - so that it's discoverable
|
|
203
|
+
self._handle_for_h5_file[DATA_PATH].flush()
|
|
204
|
+
self._handle_for_h5_file[SUM_PATH].flush()
|
|
205
|
+
|
|
206
|
+
# counter increment is last
|
|
207
|
+
# as only at this point the new data is visible from the outside
|
|
208
|
+
self.written_images_counter += 1
|
|
209
|
+
await self.signal_backend.put(self.written_images_counter)
|
|
210
|
+
|
|
211
|
+
def set_exposure(self, value: float) -> None:
|
|
212
|
+
self.exposure = value
|
|
213
|
+
|
|
214
|
+
def set_x(self, value: float) -> None:
|
|
215
|
+
self.x = value
|
|
216
|
+
|
|
217
|
+
def set_y(self, value: float) -> None:
|
|
218
|
+
self.y = value
|
|
219
|
+
|
|
220
|
+
async def open_file(
|
|
221
|
+
self, directory: DirectoryProvider, multiplier: int = 1
|
|
222
|
+
) -> Dict[str, Descriptor]:
|
|
223
|
+
await self.sim_signal.connect()
|
|
224
|
+
|
|
225
|
+
self.target_path = self._get_new_path(directory)
|
|
226
|
+
|
|
227
|
+
self._handle_for_h5_file = h5py.File(self.target_path, "w", libver="latest")
|
|
228
|
+
|
|
229
|
+
assert self._handle_for_h5_file, "not loaded the file right"
|
|
230
|
+
|
|
231
|
+
datasets = self._get_datasets()
|
|
232
|
+
for d in datasets:
|
|
233
|
+
self._handle_for_h5_file.create_dataset(
|
|
234
|
+
name=d.name,
|
|
235
|
+
shape=d.shape,
|
|
236
|
+
dtype=d.dtype,
|
|
237
|
+
maxshape=d.maxshape,
|
|
238
|
+
)
|
|
239
|
+
|
|
240
|
+
# once datasets written, can switch the model to single writer multiple reader
|
|
241
|
+
self._handle_for_h5_file.swmr_mode = True
|
|
242
|
+
|
|
243
|
+
outer_shape = (multiplier,) if multiplier > 1 else ()
|
|
244
|
+
full_file_description = get_full_file_description(datasets, outer_shape)
|
|
245
|
+
|
|
246
|
+
# cache state to self
|
|
247
|
+
self._datasets = datasets
|
|
248
|
+
self.multiplier = multiplier
|
|
249
|
+
self._directory_provider = directory
|
|
250
|
+
return full_file_description
|
|
251
|
+
|
|
252
|
+
def _get_new_path(self, directory: DirectoryProvider) -> Path:
|
|
253
|
+
info = directory()
|
|
254
|
+
filename = f"{info.prefix}pattern{info.suffix}.h5"
|
|
255
|
+
new_path: Path = info.root / info.resource_dir / filename
|
|
256
|
+
return new_path
|
|
257
|
+
|
|
258
|
+
def _get_datasets(self) -> List[DatasetConfig]:
|
|
259
|
+
raw_dataset = DatasetConfig(
|
|
260
|
+
# name=data_name,
|
|
261
|
+
name=DATA_PATH,
|
|
262
|
+
dtype=np.uint8,
|
|
263
|
+
shape=(1, self.height, self.width),
|
|
264
|
+
maxshape=(None, self.height, self.width),
|
|
265
|
+
)
|
|
266
|
+
|
|
267
|
+
sum_dataset = DatasetConfig(
|
|
268
|
+
name=SUM_PATH,
|
|
269
|
+
dtype=np.float64,
|
|
270
|
+
shape=(1,),
|
|
271
|
+
maxshape=(None,),
|
|
272
|
+
fillvalue=-1,
|
|
273
|
+
)
|
|
274
|
+
|
|
275
|
+
datasets: List[DatasetConfig] = [raw_dataset, sum_dataset]
|
|
276
|
+
return datasets
|
|
277
|
+
|
|
278
|
+
async def collect_stream_docs(
|
|
279
|
+
self, indices_written: int
|
|
280
|
+
) -> AsyncIterator[StreamAsset]:
|
|
281
|
+
"""
|
|
282
|
+
stream resource says "here is a dataset",
|
|
283
|
+
stream datum says "here are N frames in that stream resource",
|
|
284
|
+
you get one stream resource and many stream datums per scan
|
|
285
|
+
"""
|
|
286
|
+
if self._handle_for_h5_file:
|
|
287
|
+
self._handle_for_h5_file.flush()
|
|
288
|
+
# when already something was written to the file
|
|
289
|
+
if indices_written:
|
|
290
|
+
# if no frames arrived yet, there's no file to speak of
|
|
291
|
+
# cannot get the full filename the HDF writer will write
|
|
292
|
+
# until the first frame comes in
|
|
293
|
+
if not self._hdf_stream_provider:
|
|
294
|
+
assert self.target_path, "open file has not been called"
|
|
295
|
+
datasets = self._get_datasets()
|
|
296
|
+
self._datasets = datasets
|
|
297
|
+
self._hdf_stream_provider = HdfStreamProvider(
|
|
298
|
+
self._directory_provider(),
|
|
299
|
+
self.target_path,
|
|
300
|
+
self._datasets,
|
|
301
|
+
)
|
|
302
|
+
for doc in self._hdf_stream_provider.stream_resources():
|
|
303
|
+
yield "stream_resource", doc
|
|
304
|
+
if self._hdf_stream_provider:
|
|
305
|
+
for doc in self._hdf_stream_provider.stream_data(indices_written):
|
|
306
|
+
yield "stream_datum", doc
|
|
307
|
+
|
|
308
|
+
def close(self) -> None:
|
|
309
|
+
if self._handle_for_h5_file:
|
|
310
|
+
self._handle_for_h5_file.close()
|
|
311
|
+
print("file closed")
|
|
312
|
+
self._handle_for_h5_file = None
|
|
313
|
+
|
|
314
|
+
async def observe_indices_written(
|
|
315
|
+
self, timeout=DEFAULT_TIMEOUT
|
|
316
|
+
) -> AsyncGenerator[int, None]:
|
|
317
|
+
async for num_captured in observe_value(self.sim_signal, timeout=timeout):
|
|
318
|
+
yield num_captured // self.multiplier
|
|
@@ -0,0 +1,55 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from typing import Optional
|
|
3
|
+
|
|
4
|
+
from ophyd_async.core import DirectoryProvider
|
|
5
|
+
from ophyd_async.core.async_status import AsyncStatus
|
|
6
|
+
from ophyd_async.core.detector import DetectorControl, DetectorTrigger
|
|
7
|
+
from ophyd_async.sim.pattern_generator import PatternGenerator
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class SimPatternDetectorControl(DetectorControl):
|
|
11
|
+
def __init__(
|
|
12
|
+
self,
|
|
13
|
+
pattern_generator: PatternGenerator,
|
|
14
|
+
directory_provider: DirectoryProvider,
|
|
15
|
+
exposure: float = 0.1,
|
|
16
|
+
) -> None:
|
|
17
|
+
self.pattern_generator: PatternGenerator = pattern_generator
|
|
18
|
+
self.pattern_generator.set_exposure(exposure)
|
|
19
|
+
self.directory_provider: DirectoryProvider = directory_provider
|
|
20
|
+
self.task: Optional[asyncio.Task] = None
|
|
21
|
+
super().__init__()
|
|
22
|
+
|
|
23
|
+
async def arm(
|
|
24
|
+
self,
|
|
25
|
+
num: int,
|
|
26
|
+
trigger: DetectorTrigger = DetectorTrigger.internal,
|
|
27
|
+
exposure: Optional[float] = 0.01,
|
|
28
|
+
) -> AsyncStatus:
|
|
29
|
+
assert exposure is not None
|
|
30
|
+
period: float = exposure + self.get_deadtime(exposure)
|
|
31
|
+
task = asyncio.create_task(
|
|
32
|
+
self._coroutine_for_image_writing(exposure, period, num)
|
|
33
|
+
)
|
|
34
|
+
self.task = task
|
|
35
|
+
return AsyncStatus(task)
|
|
36
|
+
|
|
37
|
+
async def disarm(self):
|
|
38
|
+
if self.task:
|
|
39
|
+
self.task.cancel()
|
|
40
|
+
try:
|
|
41
|
+
await self.task
|
|
42
|
+
except asyncio.CancelledError:
|
|
43
|
+
pass
|
|
44
|
+
self.task = None
|
|
45
|
+
|
|
46
|
+
def get_deadtime(self, exposure: float) -> float:
|
|
47
|
+
return 0.001
|
|
48
|
+
|
|
49
|
+
async def _coroutine_for_image_writing(
|
|
50
|
+
self, exposure: float, period: float, frames_number: int
|
|
51
|
+
):
|
|
52
|
+
for _ in range(frames_number):
|
|
53
|
+
self.pattern_generator.set_exposure(exposure)
|
|
54
|
+
await asyncio.sleep(period)
|
|
55
|
+
await self.pattern_generator.write_image_to_file()
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
from typing import AsyncGenerator, AsyncIterator, Dict
|
|
2
|
+
|
|
3
|
+
from bluesky.protocols import Descriptor
|
|
4
|
+
|
|
5
|
+
from ophyd_async.core import DirectoryProvider
|
|
6
|
+
from ophyd_async.core.detector import DetectorWriter
|
|
7
|
+
from ophyd_async.sim.pattern_generator import PatternGenerator
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class SimPatternDetectorWriter(DetectorWriter):
|
|
11
|
+
pattern_generator: PatternGenerator
|
|
12
|
+
|
|
13
|
+
def __init__(
|
|
14
|
+
self, pattern_generator: PatternGenerator, directoryProvider: DirectoryProvider
|
|
15
|
+
) -> None:
|
|
16
|
+
self.pattern_generator = pattern_generator
|
|
17
|
+
self.directory_provider = directoryProvider
|
|
18
|
+
|
|
19
|
+
async def open(self, multiplier: int = 1) -> Dict[str, Descriptor]:
|
|
20
|
+
return await self.pattern_generator.open_file(
|
|
21
|
+
self.directory_provider, multiplier
|
|
22
|
+
)
|
|
23
|
+
|
|
24
|
+
async def close(self) -> None:
|
|
25
|
+
self.pattern_generator.close()
|
|
26
|
+
|
|
27
|
+
def collect_stream_docs(self, indices_written: int) -> AsyncIterator:
|
|
28
|
+
return self.pattern_generator.collect_stream_docs(indices_written)
|
|
29
|
+
|
|
30
|
+
def observe_indices_written(self, timeout=...) -> AsyncGenerator[int, None]:
|
|
31
|
+
return self.pattern_generator.observe_indices_written()
|
|
32
|
+
|
|
33
|
+
async def get_indices_written(self) -> int:
|
|
34
|
+
return self.pattern_generator.written_images_counter
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
from typing import Sequence
|
|
3
|
+
|
|
4
|
+
from ophyd_async.core import DirectoryProvider, StaticDirectoryProvider
|
|
5
|
+
from ophyd_async.core.detector import StandardDetector
|
|
6
|
+
from ophyd_async.core.signal import SignalR
|
|
7
|
+
from ophyd_async.sim.pattern_generator import PatternGenerator
|
|
8
|
+
|
|
9
|
+
from .sim_pattern_detector_control import SimPatternDetectorControl
|
|
10
|
+
from .sim_pattern_detector_writer import SimPatternDetectorWriter
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class SimPatternDetector(StandardDetector):
|
|
14
|
+
def __init__(
|
|
15
|
+
self,
|
|
16
|
+
path: Path,
|
|
17
|
+
config_sigs: Sequence[SignalR] = [],
|
|
18
|
+
name: str = "sim_pattern_detector",
|
|
19
|
+
writer_timeout: float = 1,
|
|
20
|
+
) -> None:
|
|
21
|
+
self.directory_provider: DirectoryProvider = StaticDirectoryProvider(path)
|
|
22
|
+
self.pattern_generator = PatternGenerator()
|
|
23
|
+
writer = SimPatternDetectorWriter(
|
|
24
|
+
pattern_generator=self.pattern_generator,
|
|
25
|
+
directoryProvider=self.directory_provider,
|
|
26
|
+
)
|
|
27
|
+
controller = SimPatternDetectorControl(
|
|
28
|
+
pattern_generator=self.pattern_generator,
|
|
29
|
+
directory_provider=self.directory_provider,
|
|
30
|
+
)
|
|
31
|
+
super().__init__(
|
|
32
|
+
controller=controller,
|
|
33
|
+
writer=writer,
|
|
34
|
+
config_sigs=config_sigs,
|
|
35
|
+
name=name,
|
|
36
|
+
writer_timeout=writer_timeout,
|
|
37
|
+
)
|