ophyd-async 0.2.0__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (79) hide show
  1. ophyd_async/__init__.py +1 -4
  2. ophyd_async/_version.py +2 -2
  3. ophyd_async/core/__init__.py +52 -19
  4. ophyd_async/core/_providers.py +38 -5
  5. ophyd_async/core/async_status.py +86 -40
  6. ophyd_async/core/detector.py +214 -72
  7. ophyd_async/core/device.py +91 -50
  8. ophyd_async/core/device_save_loader.py +96 -23
  9. ophyd_async/core/flyer.py +32 -246
  10. ophyd_async/core/mock_signal_backend.py +82 -0
  11. ophyd_async/core/mock_signal_utils.py +145 -0
  12. ophyd_async/core/signal.py +225 -58
  13. ophyd_async/core/signal_backend.py +8 -5
  14. ophyd_async/core/{sim_signal_backend.py → soft_signal_backend.py} +51 -49
  15. ophyd_async/core/standard_readable.py +212 -23
  16. ophyd_async/core/utils.py +123 -30
  17. ophyd_async/epics/_backend/_aioca.py +42 -44
  18. ophyd_async/epics/_backend/_p4p.py +96 -52
  19. ophyd_async/epics/_backend/common.py +25 -0
  20. ophyd_async/epics/areadetector/__init__.py +8 -4
  21. ophyd_async/epics/areadetector/aravis.py +63 -0
  22. ophyd_async/epics/areadetector/controllers/__init__.py +2 -1
  23. ophyd_async/epics/areadetector/controllers/ad_sim_controller.py +1 -1
  24. ophyd_async/epics/areadetector/controllers/aravis_controller.py +78 -0
  25. ophyd_async/epics/areadetector/controllers/kinetix_controller.py +49 -0
  26. ophyd_async/epics/areadetector/controllers/pilatus_controller.py +37 -25
  27. ophyd_async/epics/areadetector/controllers/vimba_controller.py +66 -0
  28. ophyd_async/epics/areadetector/drivers/__init__.py +6 -0
  29. ophyd_async/epics/areadetector/drivers/ad_base.py +8 -12
  30. ophyd_async/epics/areadetector/drivers/aravis_driver.py +38 -0
  31. ophyd_async/epics/areadetector/drivers/kinetix_driver.py +27 -0
  32. ophyd_async/epics/areadetector/drivers/pilatus_driver.py +8 -5
  33. ophyd_async/epics/areadetector/drivers/vimba_driver.py +63 -0
  34. ophyd_async/epics/areadetector/kinetix.py +46 -0
  35. ophyd_async/epics/areadetector/pilatus.py +45 -0
  36. ophyd_async/epics/areadetector/single_trigger_det.py +14 -6
  37. ophyd_async/epics/areadetector/utils.py +2 -12
  38. ophyd_async/epics/areadetector/vimba.py +43 -0
  39. ophyd_async/epics/areadetector/writers/_hdffile.py +21 -7
  40. ophyd_async/epics/areadetector/writers/hdf_writer.py +32 -17
  41. ophyd_async/epics/areadetector/writers/nd_file_hdf.py +19 -18
  42. ophyd_async/epics/areadetector/writers/nd_plugin.py +15 -7
  43. ophyd_async/epics/demo/__init__.py +75 -49
  44. ophyd_async/epics/motion/motor.py +67 -53
  45. ophyd_async/epics/pvi/__init__.py +3 -0
  46. ophyd_async/epics/pvi/pvi.py +318 -0
  47. ophyd_async/epics/signal/__init__.py +8 -3
  48. ophyd_async/epics/signal/signal.py +26 -9
  49. ophyd_async/log.py +130 -0
  50. ophyd_async/panda/__init__.py +21 -5
  51. ophyd_async/panda/_common_blocks.py +49 -0
  52. ophyd_async/panda/_hdf_panda.py +48 -0
  53. ophyd_async/panda/_panda_controller.py +37 -0
  54. ophyd_async/panda/_trigger.py +39 -0
  55. ophyd_async/panda/_utils.py +15 -0
  56. ophyd_async/panda/writers/__init__.py +3 -0
  57. ophyd_async/panda/writers/_hdf_writer.py +220 -0
  58. ophyd_async/panda/writers/_panda_hdf_file.py +58 -0
  59. ophyd_async/plan_stubs/__init__.py +13 -0
  60. ophyd_async/plan_stubs/ensure_connected.py +22 -0
  61. ophyd_async/plan_stubs/fly.py +149 -0
  62. ophyd_async/protocols.py +126 -0
  63. ophyd_async/sim/__init__.py +11 -0
  64. ophyd_async/sim/demo/__init__.py +3 -0
  65. ophyd_async/sim/demo/sim_motor.py +103 -0
  66. ophyd_async/sim/pattern_generator.py +318 -0
  67. ophyd_async/sim/sim_pattern_detector_control.py +55 -0
  68. ophyd_async/sim/sim_pattern_detector_writer.py +34 -0
  69. ophyd_async/sim/sim_pattern_generator.py +37 -0
  70. {ophyd_async-0.2.0.dist-info → ophyd_async-0.3.0.dist-info}/METADATA +31 -70
  71. ophyd_async-0.3.0.dist-info/RECORD +86 -0
  72. {ophyd_async-0.2.0.dist-info → ophyd_async-0.3.0.dist-info}/WHEEL +1 -1
  73. ophyd_async/epics/signal/pvi_get.py +0 -22
  74. ophyd_async/panda/panda.py +0 -294
  75. ophyd_async-0.2.0.dist-info/RECORD +0 -53
  76. /ophyd_async/panda/{table.py → _table.py} +0 -0
  77. {ophyd_async-0.2.0.dist-info → ophyd_async-0.3.0.dist-info}/LICENSE +0 -0
  78. {ophyd_async-0.2.0.dist-info → ophyd_async-0.3.0.dist-info}/entry_points.txt +0 -0
  79. {ophyd_async-0.2.0.dist-info → ophyd_async-0.3.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,11 @@
1
+ from .pattern_generator import PatternGenerator
2
+ from .sim_pattern_detector_control import SimPatternDetectorControl
3
+ from .sim_pattern_detector_writer import SimPatternDetectorWriter
4
+ from .sim_pattern_generator import SimPatternDetector
5
+
6
+ __all__ = [
7
+ "PatternGenerator",
8
+ "SimPatternDetectorControl",
9
+ "SimPatternDetectorWriter",
10
+ "SimPatternDetector",
11
+ ]
@@ -0,0 +1,3 @@
1
+ from .sim_motor import SimMotor
2
+
3
+ __all__ = ["SimMotor"]
@@ -0,0 +1,103 @@
1
+ import asyncio
2
+ import contextlib
3
+ import time
4
+
5
+ from bluesky.protocols import Movable, Stoppable
6
+
7
+ from ophyd_async.core import StandardReadable
8
+ from ophyd_async.core.async_status import AsyncStatus, WatchableAsyncStatus
9
+ from ophyd_async.core.signal import (
10
+ observe_value,
11
+ soft_signal_r_and_setter,
12
+ soft_signal_rw,
13
+ )
14
+ from ophyd_async.core.standard_readable import ConfigSignal, HintedSignal
15
+ from ophyd_async.core.utils import WatcherUpdate
16
+
17
+
18
+ class SimMotor(StandardReadable, Movable, Stoppable):
19
+ def __init__(self, name="", instant=True) -> None:
20
+ """
21
+ Simulated motor device
22
+
23
+ args:
24
+ - prefix: str: Signal names prefix
25
+ - name: str: name of device
26
+ - instant: bool: whether to move instantly, or with a delay
27
+ """
28
+ # Define some signals
29
+ with self.add_children_as_readables(HintedSignal):
30
+ self.user_readback, self._user_readback_set = soft_signal_r_and_setter(
31
+ float, 0
32
+ )
33
+ with self.add_children_as_readables(ConfigSignal):
34
+ self.velocity = soft_signal_rw(float, 0 if instant else 1.0)
35
+ self.units = soft_signal_rw(str, "mm")
36
+ self.user_setpoint = soft_signal_rw(float, 0)
37
+
38
+ # Whether set() should complete successfully or not
39
+ self._set_success = True
40
+ self._move_status: AsyncStatus | None = None
41
+
42
+ super().__init__(name=name)
43
+
44
+ async def _move(self, old_position: float, new_position: float, move_time: float):
45
+ start = time.monotonic()
46
+ distance = abs(new_position - old_position)
47
+ while True:
48
+ time_elapsed = round(time.monotonic() - start, 2)
49
+
50
+ # update position based on time elapsed
51
+ if time_elapsed >= move_time:
52
+ # successfully reached our target position
53
+ self._user_readback_set(new_position)
54
+ break
55
+ else:
56
+ current_position = old_position + distance * time_elapsed / move_time
57
+
58
+ self._user_readback_set(current_position)
59
+
60
+ # 10hz update loop
61
+ await asyncio.sleep(0.1)
62
+
63
+ @WatchableAsyncStatus.wrap
64
+ async def set(self, new_position: float):
65
+ """
66
+ Asynchronously move the motor to a new position.
67
+ """
68
+ # Make sure any existing move tasks are stopped
69
+ await self.stop()
70
+ old_position, units, velocity = await asyncio.gather(
71
+ self.user_setpoint.get_value(),
72
+ self.units.get_value(),
73
+ self.velocity.get_value(),
74
+ )
75
+ # If zero velocity, do instant move
76
+ move_time = abs(new_position - old_position) / velocity if velocity else 0
77
+ self._move_status = AsyncStatus(
78
+ self._move(old_position, new_position, move_time)
79
+ )
80
+ # If stop is called then this will raise a CancelledError, ignore it
81
+ with contextlib.suppress(asyncio.CancelledError):
82
+ async for current_position in observe_value(
83
+ self.user_readback, done_status=self._move_status
84
+ ):
85
+ yield WatcherUpdate(
86
+ current=current_position,
87
+ initial=old_position,
88
+ target=new_position,
89
+ name=self.name,
90
+ unit=units,
91
+ )
92
+ if not self._set_success:
93
+ raise RuntimeError("Motor was stopped")
94
+
95
+ async def stop(self, success=True):
96
+ """
97
+ Stop the motor if it is moving
98
+ """
99
+ self._set_success = success
100
+ if self._move_status:
101
+ self._move_status.task.cancel()
102
+ self._move_status = None
103
+ await self.user_setpoint.set(await self.user_readback.get_value())
@@ -0,0 +1,318 @@
1
+ from dataclasses import dataclass
2
+ from pathlib import Path
3
+ from typing import (
4
+ Any,
5
+ AsyncGenerator,
6
+ AsyncIterator,
7
+ Dict,
8
+ Iterator,
9
+ List,
10
+ Optional,
11
+ Sequence,
12
+ )
13
+
14
+ import h5py
15
+ import numpy as np
16
+ from bluesky.protocols import DataKey, StreamAsset
17
+ from event_model import (
18
+ ComposeStreamResource,
19
+ ComposeStreamResourceBundle,
20
+ StreamDatum,
21
+ StreamRange,
22
+ StreamResource,
23
+ )
24
+
25
+ from ophyd_async.core import DirectoryInfo, DirectoryProvider
26
+ from ophyd_async.core.mock_signal_backend import MockSignalBackend
27
+ from ophyd_async.core.signal import SignalR, observe_value
28
+ from ophyd_async.core.utils import DEFAULT_TIMEOUT
29
+
30
+ # raw data path
31
+ DATA_PATH = "/entry/data/data"
32
+
33
+ # pixel sum path
34
+ SUM_PATH = "/entry/sum"
35
+
36
+ MAX_UINT8_VALUE = np.iinfo(np.uint8).max
37
+
38
+ SLICE_NAME = "AD_HDF5_SWMR_SLICE"
39
+
40
+
41
+ @dataclass
42
+ class DatasetConfig:
43
+ name: str
44
+ shape: Sequence[int]
45
+ maxshape: tuple[Any, ...] = (None,)
46
+ path: Optional[str] = None
47
+ multiplier: Optional[int] = 1
48
+ dtype: Optional[Any] = None
49
+ fillvalue: Optional[int] = None
50
+
51
+
52
+ def get_full_file_description(
53
+ datasets: List[DatasetConfig], outer_shape: tuple[int, ...]
54
+ ):
55
+ full_file_description: Dict[str, DataKey] = {}
56
+ for d in datasets:
57
+ source = f"soft://{d.name}"
58
+ shape = outer_shape + tuple(d.shape)
59
+ dtype = "number" if d.shape == [1] else "array"
60
+ descriptor = DataKey(
61
+ source=source, shape=shape, dtype=dtype, external="STREAM:"
62
+ )
63
+ key = d.name.replace("/", "_")
64
+ full_file_description[key] = descriptor
65
+ return full_file_description
66
+
67
+
68
+ def generate_gaussian_blob(height: int, width: int) -> np.ndarray:
69
+ """Make a Gaussian Blob with float values in range 0..1"""
70
+ x, y = np.meshgrid(np.linspace(-1, 1, width), np.linspace(-1, 1, height))
71
+ d = np.sqrt(x * x + y * y)
72
+ blob = np.exp(-(d**2))
73
+ return blob
74
+
75
+
76
+ def generate_interesting_pattern(x: float, y: float) -> float:
77
+ """This function is interesting in x and y in range -10..10, returning
78
+ a float value in range 0..1
79
+ """
80
+ z = 0.5 + (np.sin(x) ** 10 + np.cos(10 + y * x) * np.cos(x)) / 2
81
+ return z
82
+
83
+
84
+ class HdfStreamProvider:
85
+ def __init__(
86
+ self,
87
+ directory_info: DirectoryInfo,
88
+ full_file_name: Path,
89
+ datasets: List[DatasetConfig],
90
+ ) -> None:
91
+ self._last_emitted = 0
92
+ self._bundles: List[ComposeStreamResourceBundle] = self._compose_bundles(
93
+ directory_info, full_file_name, datasets
94
+ )
95
+
96
+ def _compose_bundles(
97
+ self,
98
+ directory_info: DirectoryInfo,
99
+ full_file_name: Path,
100
+ datasets: List[DatasetConfig],
101
+ ) -> List[StreamAsset]:
102
+ path = str(full_file_name.relative_to(directory_info.root))
103
+ root = str(directory_info.root)
104
+ bundler_composer = ComposeStreamResource()
105
+
106
+ bundles: List[ComposeStreamResourceBundle] = []
107
+
108
+ bundles = [
109
+ bundler_composer(
110
+ spec=SLICE_NAME,
111
+ root=root,
112
+ resource_path=path,
113
+ data_key=d.name.replace("/", "_"),
114
+ resource_kwargs={
115
+ "path": d.path,
116
+ "multiplier": d.multiplier,
117
+ "timestamps": "/entry/instrument/NDAttributes/NDArrayTimeStamp",
118
+ },
119
+ )
120
+ for d in datasets
121
+ ]
122
+ return bundles
123
+
124
+ def stream_resources(self) -> Iterator[StreamResource]:
125
+ for bundle in self._bundles:
126
+ yield bundle.stream_resource_doc
127
+
128
+ def stream_data(self, indices_written: int) -> Iterator[StreamDatum]:
129
+ # Indices are relative to resource
130
+ if indices_written > self._last_emitted:
131
+ updated_stream_range = StreamRange(
132
+ start=self._last_emitted,
133
+ stop=indices_written,
134
+ )
135
+ self._last_emitted = indices_written
136
+ for bundle in self._bundles:
137
+ yield bundle.compose_stream_datum(indices=updated_stream_range)
138
+ return None
139
+
140
+ def close(self) -> None:
141
+ for bundle in self._bundles:
142
+ bundle.close()
143
+
144
+
145
+ class PatternGenerator:
146
+ def __init__(
147
+ self,
148
+ saturation_exposure_time: float = 1,
149
+ detector_width: int = 320,
150
+ detector_height: int = 240,
151
+ ) -> None:
152
+ self.saturation_exposure_time = saturation_exposure_time
153
+ self.exposure = saturation_exposure_time
154
+ self.x = 0.0
155
+ self.y = 0.0
156
+ self.height = detector_height
157
+ self.width = detector_width
158
+ self.written_images_counter: int = 0
159
+
160
+ # it automatically initializes to 0
161
+ self.signal_backend = MockSignalBackend(int)
162
+ self.mock_signal = SignalR(self.signal_backend)
163
+ blob = np.array(
164
+ generate_gaussian_blob(width=detector_width, height=detector_height)
165
+ * MAX_UINT8_VALUE
166
+ )
167
+ self.STARTING_BLOB = blob
168
+ self._hdf_stream_provider: Optional[HdfStreamProvider] = None
169
+ self._handle_for_h5_file: Optional[h5py.File] = None
170
+ self.target_path: Optional[Path] = None
171
+
172
+ async def write_image_to_file(self) -> None:
173
+ assert self._handle_for_h5_file, "no file has been opened!"
174
+ # prepare - resize the fixed hdf5 data structure
175
+ # so that the new image can be written
176
+ new_layer = self.written_images_counter + 1
177
+ target_dimensions = (new_layer, self.height, self.width)
178
+
179
+ # generate the simulated data
180
+ intensity: float = generate_interesting_pattern(self.x, self.y)
181
+ detector_data: np.uint8 = np.uint8(
182
+ self.STARTING_BLOB
183
+ * intensity
184
+ * self.exposure
185
+ / self.saturation_exposure_time
186
+ )
187
+
188
+ self._handle_for_h5_file[DATA_PATH].resize(target_dimensions)
189
+
190
+ print(f"writing image {new_layer}")
191
+ assert self._handle_for_h5_file, "no file has been opened!"
192
+ self._handle_for_h5_file[DATA_PATH].resize(target_dimensions)
193
+
194
+ self._handle_for_h5_file[SUM_PATH].resize((new_layer,))
195
+
196
+ # write data to disc (intermediate step)
197
+ self._handle_for_h5_file[DATA_PATH][self.written_images_counter] = detector_data
198
+ self._handle_for_h5_file[SUM_PATH][self.written_images_counter] = np.sum(
199
+ detector_data
200
+ )
201
+
202
+ # save metadata - so that it's discoverable
203
+ self._handle_for_h5_file[DATA_PATH].flush()
204
+ self._handle_for_h5_file[SUM_PATH].flush()
205
+
206
+ # counter increment is last
207
+ # as only at this point the new data is visible from the outside
208
+ self.written_images_counter += 1
209
+ await self.signal_backend.put(self.written_images_counter)
210
+
211
+ def set_exposure(self, value: float) -> None:
212
+ self.exposure = value
213
+
214
+ def set_x(self, value: float) -> None:
215
+ self.x = value
216
+
217
+ def set_y(self, value: float) -> None:
218
+ self.y = value
219
+
220
+ async def open_file(
221
+ self, directory: DirectoryProvider, multiplier: int = 1
222
+ ) -> Dict[str, DataKey]:
223
+ await self.mock_signal.connect()
224
+
225
+ self.target_path = self._get_new_path(directory)
226
+
227
+ self._handle_for_h5_file = h5py.File(self.target_path, "w", libver="latest")
228
+
229
+ assert self._handle_for_h5_file, "not loaded the file right"
230
+
231
+ datasets = self._get_datasets()
232
+ for d in datasets:
233
+ self._handle_for_h5_file.create_dataset(
234
+ name=d.name,
235
+ shape=d.shape,
236
+ dtype=d.dtype,
237
+ maxshape=d.maxshape,
238
+ )
239
+
240
+ # once datasets written, can switch the model to single writer multiple reader
241
+ self._handle_for_h5_file.swmr_mode = True
242
+
243
+ outer_shape = (multiplier,) if multiplier > 1 else ()
244
+ full_file_description = get_full_file_description(datasets, outer_shape)
245
+
246
+ # cache state to self
247
+ self._datasets = datasets
248
+ self.multiplier = multiplier
249
+ self._directory_provider = directory
250
+ return full_file_description
251
+
252
+ def _get_new_path(self, directory: DirectoryProvider) -> Path:
253
+ info = directory()
254
+ filename = f"{info.prefix}pattern{info.suffix}.h5"
255
+ new_path: Path = info.root / info.resource_dir / filename
256
+ return new_path
257
+
258
+ def _get_datasets(self) -> List[DatasetConfig]:
259
+ raw_dataset = DatasetConfig(
260
+ # name=data_name,
261
+ name=DATA_PATH,
262
+ dtype=np.uint8,
263
+ shape=(1, self.height, self.width),
264
+ maxshape=(None, self.height, self.width),
265
+ )
266
+
267
+ sum_dataset = DatasetConfig(
268
+ name=SUM_PATH,
269
+ dtype=np.float64,
270
+ shape=(1,),
271
+ maxshape=(None,),
272
+ fillvalue=-1,
273
+ )
274
+
275
+ datasets: List[DatasetConfig] = [raw_dataset, sum_dataset]
276
+ return datasets
277
+
278
+ async def collect_stream_docs(
279
+ self, indices_written: int
280
+ ) -> AsyncIterator[StreamAsset]:
281
+ """
282
+ stream resource says "here is a dataset",
283
+ stream datum says "here are N frames in that stream resource",
284
+ you get one stream resource and many stream datums per scan
285
+ """
286
+ if self._handle_for_h5_file:
287
+ self._handle_for_h5_file.flush()
288
+ # when already something was written to the file
289
+ if indices_written:
290
+ # if no frames arrived yet, there's no file to speak of
291
+ # cannot get the full filename the HDF writer will write
292
+ # until the first frame comes in
293
+ if not self._hdf_stream_provider:
294
+ assert self.target_path, "open file has not been called"
295
+ datasets = self._get_datasets()
296
+ self._datasets = datasets
297
+ self._hdf_stream_provider = HdfStreamProvider(
298
+ self._directory_provider(),
299
+ self.target_path,
300
+ self._datasets,
301
+ )
302
+ for doc in self._hdf_stream_provider.stream_resources():
303
+ yield "stream_resource", doc
304
+ if self._hdf_stream_provider:
305
+ for doc in self._hdf_stream_provider.stream_data(indices_written):
306
+ yield "stream_datum", doc
307
+
308
+ def close(self) -> None:
309
+ if self._handle_for_h5_file:
310
+ self._handle_for_h5_file.close()
311
+ print("file closed")
312
+ self._handle_for_h5_file = None
313
+
314
+ async def observe_indices_written(
315
+ self, timeout=DEFAULT_TIMEOUT
316
+ ) -> AsyncGenerator[int, None]:
317
+ async for num_captured in observe_value(self.mock_signal, timeout=timeout):
318
+ yield num_captured // self.multiplier
@@ -0,0 +1,55 @@
1
+ import asyncio
2
+ from typing import Optional
3
+
4
+ from ophyd_async.core import DirectoryProvider
5
+ from ophyd_async.core.async_status import AsyncStatus
6
+ from ophyd_async.core.detector import DetectorControl, DetectorTrigger
7
+ from ophyd_async.sim.pattern_generator import PatternGenerator
8
+
9
+
10
+ class SimPatternDetectorControl(DetectorControl):
11
+ def __init__(
12
+ self,
13
+ pattern_generator: PatternGenerator,
14
+ directory_provider: DirectoryProvider,
15
+ exposure: float = 0.1,
16
+ ) -> None:
17
+ self.pattern_generator: PatternGenerator = pattern_generator
18
+ self.pattern_generator.set_exposure(exposure)
19
+ self.directory_provider: DirectoryProvider = directory_provider
20
+ self.task: Optional[asyncio.Task] = None
21
+ super().__init__()
22
+
23
+ async def arm(
24
+ self,
25
+ num: int,
26
+ trigger: DetectorTrigger = DetectorTrigger.internal,
27
+ exposure: Optional[float] = 0.01,
28
+ ) -> AsyncStatus:
29
+ assert exposure is not None
30
+ period: float = exposure + self.get_deadtime(exposure)
31
+ task = asyncio.create_task(
32
+ self._coroutine_for_image_writing(exposure, period, num)
33
+ )
34
+ self.task = task
35
+ return AsyncStatus(task)
36
+
37
+ async def disarm(self):
38
+ if self.task:
39
+ self.task.cancel()
40
+ try:
41
+ await self.task
42
+ except asyncio.CancelledError:
43
+ pass
44
+ self.task = None
45
+
46
+ def get_deadtime(self, exposure: float) -> float:
47
+ return 0.001
48
+
49
+ async def _coroutine_for_image_writing(
50
+ self, exposure: float, period: float, frames_number: int
51
+ ):
52
+ for _ in range(frames_number):
53
+ self.pattern_generator.set_exposure(exposure)
54
+ await asyncio.sleep(period)
55
+ await self.pattern_generator.write_image_to_file()
@@ -0,0 +1,34 @@
1
+ from typing import AsyncGenerator, AsyncIterator, Dict
2
+
3
+ from bluesky.protocols import DataKey
4
+
5
+ from ophyd_async.core import DirectoryProvider
6
+ from ophyd_async.core.detector import DetectorWriter
7
+ from ophyd_async.sim.pattern_generator import PatternGenerator
8
+
9
+
10
+ class SimPatternDetectorWriter(DetectorWriter):
11
+ pattern_generator: PatternGenerator
12
+
13
+ def __init__(
14
+ self, pattern_generator: PatternGenerator, directoryProvider: DirectoryProvider
15
+ ) -> None:
16
+ self.pattern_generator = pattern_generator
17
+ self.directory_provider = directoryProvider
18
+
19
+ async def open(self, multiplier: int = 1) -> Dict[str, DataKey]:
20
+ return await self.pattern_generator.open_file(
21
+ self.directory_provider, multiplier
22
+ )
23
+
24
+ async def close(self) -> None:
25
+ self.pattern_generator.close()
26
+
27
+ def collect_stream_docs(self, indices_written: int) -> AsyncIterator:
28
+ return self.pattern_generator.collect_stream_docs(indices_written)
29
+
30
+ def observe_indices_written(self, timeout=...) -> AsyncGenerator[int, None]:
31
+ return self.pattern_generator.observe_indices_written()
32
+
33
+ async def get_indices_written(self) -> int:
34
+ return self.pattern_generator.written_images_counter
@@ -0,0 +1,37 @@
1
+ from pathlib import Path
2
+ from typing import Sequence
3
+
4
+ from ophyd_async.core import DirectoryProvider, StaticDirectoryProvider
5
+ from ophyd_async.core.detector import StandardDetector
6
+ from ophyd_async.protocols import AsyncReadable
7
+ from ophyd_async.sim.pattern_generator import PatternGenerator
8
+
9
+ from .sim_pattern_detector_control import SimPatternDetectorControl
10
+ from .sim_pattern_detector_writer import SimPatternDetectorWriter
11
+
12
+
13
+ class SimPatternDetector(StandardDetector):
14
+ def __init__(
15
+ self,
16
+ path: Path,
17
+ config_sigs: Sequence[AsyncReadable] = [],
18
+ name: str = "sim_pattern_detector",
19
+ writer_timeout: float = 1,
20
+ ) -> None:
21
+ self.directory_provider: DirectoryProvider = StaticDirectoryProvider(path)
22
+ self.pattern_generator = PatternGenerator()
23
+ writer = SimPatternDetectorWriter(
24
+ pattern_generator=self.pattern_generator,
25
+ directoryProvider=self.directory_provider,
26
+ )
27
+ controller = SimPatternDetectorControl(
28
+ pattern_generator=self.pattern_generator,
29
+ directory_provider=self.directory_provider,
30
+ )
31
+ super().__init__(
32
+ controller=controller,
33
+ writer=writer,
34
+ config_sigs=config_sigs,
35
+ name=name,
36
+ writer_timeout=writer_timeout,
37
+ )