ophyd-async 0.1.0__py3-none-any.whl → 0.3.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (94) hide show
  1. ophyd_async/__init__.py +1 -4
  2. ophyd_async/_version.py +2 -2
  3. ophyd_async/core/__init__.py +91 -19
  4. ophyd_async/core/_providers.py +68 -0
  5. ophyd_async/core/async_status.py +90 -42
  6. ophyd_async/core/detector.py +341 -0
  7. ophyd_async/core/device.py +226 -0
  8. ophyd_async/core/device_save_loader.py +286 -0
  9. ophyd_async/core/flyer.py +85 -0
  10. ophyd_async/core/mock_signal_backend.py +82 -0
  11. ophyd_async/core/mock_signal_utils.py +145 -0
  12. ophyd_async/core/{_device/_signal/signal.py → signal.py} +249 -61
  13. ophyd_async/core/{_device/_backend/signal_backend.py → signal_backend.py} +12 -5
  14. ophyd_async/core/{_device/_backend/sim_signal_backend.py → soft_signal_backend.py} +54 -48
  15. ophyd_async/core/standard_readable.py +261 -0
  16. ophyd_async/core/utils.py +127 -30
  17. ophyd_async/epics/_backend/_aioca.py +62 -43
  18. ophyd_async/epics/_backend/_p4p.py +100 -52
  19. ophyd_async/epics/_backend/common.py +25 -0
  20. ophyd_async/epics/areadetector/__init__.py +16 -15
  21. ophyd_async/epics/areadetector/aravis.py +63 -0
  22. ophyd_async/epics/areadetector/controllers/__init__.py +5 -0
  23. ophyd_async/epics/areadetector/controllers/ad_sim_controller.py +52 -0
  24. ophyd_async/epics/areadetector/controllers/aravis_controller.py +78 -0
  25. ophyd_async/epics/areadetector/controllers/kinetix_controller.py +49 -0
  26. ophyd_async/epics/areadetector/controllers/pilatus_controller.py +61 -0
  27. ophyd_async/epics/areadetector/controllers/vimba_controller.py +66 -0
  28. ophyd_async/epics/areadetector/drivers/__init__.py +21 -0
  29. ophyd_async/epics/areadetector/drivers/ad_base.py +107 -0
  30. ophyd_async/epics/areadetector/drivers/aravis_driver.py +38 -0
  31. ophyd_async/epics/areadetector/drivers/kinetix_driver.py +27 -0
  32. ophyd_async/epics/areadetector/drivers/pilatus_driver.py +21 -0
  33. ophyd_async/epics/areadetector/drivers/vimba_driver.py +63 -0
  34. ophyd_async/epics/areadetector/kinetix.py +46 -0
  35. ophyd_async/epics/areadetector/pilatus.py +45 -0
  36. ophyd_async/epics/areadetector/single_trigger_det.py +18 -10
  37. ophyd_async/epics/areadetector/utils.py +91 -13
  38. ophyd_async/epics/areadetector/vimba.py +43 -0
  39. ophyd_async/epics/areadetector/writers/__init__.py +5 -0
  40. ophyd_async/epics/areadetector/writers/_hdfdataset.py +10 -0
  41. ophyd_async/epics/areadetector/writers/_hdffile.py +54 -0
  42. ophyd_async/epics/areadetector/writers/hdf_writer.py +142 -0
  43. ophyd_async/epics/areadetector/writers/nd_file_hdf.py +40 -0
  44. ophyd_async/epics/areadetector/writers/nd_plugin.py +38 -0
  45. ophyd_async/epics/demo/__init__.py +78 -51
  46. ophyd_async/epics/demo/demo_ad_sim_detector.py +35 -0
  47. ophyd_async/epics/motion/motor.py +67 -52
  48. ophyd_async/epics/pvi/__init__.py +3 -0
  49. ophyd_async/epics/pvi/pvi.py +318 -0
  50. ophyd_async/epics/signal/__init__.py +8 -3
  51. ophyd_async/epics/signal/signal.py +27 -10
  52. ophyd_async/log.py +130 -0
  53. ophyd_async/panda/__init__.py +24 -7
  54. ophyd_async/panda/_common_blocks.py +49 -0
  55. ophyd_async/panda/_hdf_panda.py +48 -0
  56. ophyd_async/panda/_panda_controller.py +37 -0
  57. ophyd_async/panda/_table.py +158 -0
  58. ophyd_async/panda/_trigger.py +39 -0
  59. ophyd_async/panda/_utils.py +15 -0
  60. ophyd_async/panda/writers/__init__.py +3 -0
  61. ophyd_async/panda/writers/_hdf_writer.py +220 -0
  62. ophyd_async/panda/writers/_panda_hdf_file.py +58 -0
  63. ophyd_async/plan_stubs/__init__.py +13 -0
  64. ophyd_async/plan_stubs/ensure_connected.py +22 -0
  65. ophyd_async/plan_stubs/fly.py +149 -0
  66. ophyd_async/protocols.py +126 -0
  67. ophyd_async/sim/__init__.py +11 -0
  68. ophyd_async/sim/demo/__init__.py +3 -0
  69. ophyd_async/sim/demo/sim_motor.py +103 -0
  70. ophyd_async/sim/pattern_generator.py +318 -0
  71. ophyd_async/sim/sim_pattern_detector_control.py +55 -0
  72. ophyd_async/sim/sim_pattern_detector_writer.py +34 -0
  73. ophyd_async/sim/sim_pattern_generator.py +37 -0
  74. {ophyd_async-0.1.0.dist-info → ophyd_async-0.3.0.dist-info}/METADATA +35 -67
  75. ophyd_async-0.3.0.dist-info/RECORD +86 -0
  76. {ophyd_async-0.1.0.dist-info → ophyd_async-0.3.0.dist-info}/WHEEL +1 -1
  77. ophyd_async/core/_device/__init__.py +0 -0
  78. ophyd_async/core/_device/_backend/__init__.py +0 -0
  79. ophyd_async/core/_device/_signal/__init__.py +0 -0
  80. ophyd_async/core/_device/device.py +0 -60
  81. ophyd_async/core/_device/device_collector.py +0 -121
  82. ophyd_async/core/_device/device_vector.py +0 -14
  83. ophyd_async/core/_device/standard_readable.py +0 -72
  84. ophyd_async/epics/areadetector/ad_driver.py +0 -18
  85. ophyd_async/epics/areadetector/directory_provider.py +0 -18
  86. ophyd_async/epics/areadetector/hdf_streamer_det.py +0 -167
  87. ophyd_async/epics/areadetector/nd_file_hdf.py +0 -22
  88. ophyd_async/epics/areadetector/nd_plugin.py +0 -13
  89. ophyd_async/epics/signal/pvi_get.py +0 -22
  90. ophyd_async/panda/panda.py +0 -332
  91. ophyd_async-0.1.0.dist-info/RECORD +0 -45
  92. {ophyd_async-0.1.0.dist-info → ophyd_async-0.3.0.dist-info}/LICENSE +0 -0
  93. {ophyd_async-0.1.0.dist-info → ophyd_async-0.3.0.dist-info}/entry_points.txt +0 -0
  94. {ophyd_async-0.1.0.dist-info → ophyd_async-0.3.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,318 @@
1
+ from dataclasses import dataclass
2
+ from pathlib import Path
3
+ from typing import (
4
+ Any,
5
+ AsyncGenerator,
6
+ AsyncIterator,
7
+ Dict,
8
+ Iterator,
9
+ List,
10
+ Optional,
11
+ Sequence,
12
+ )
13
+
14
+ import h5py
15
+ import numpy as np
16
+ from bluesky.protocols import DataKey, StreamAsset
17
+ from event_model import (
18
+ ComposeStreamResource,
19
+ ComposeStreamResourceBundle,
20
+ StreamDatum,
21
+ StreamRange,
22
+ StreamResource,
23
+ )
24
+
25
+ from ophyd_async.core import DirectoryInfo, DirectoryProvider
26
+ from ophyd_async.core.mock_signal_backend import MockSignalBackend
27
+ from ophyd_async.core.signal import SignalR, observe_value
28
+ from ophyd_async.core.utils import DEFAULT_TIMEOUT
29
+
30
+ # raw data path
31
+ DATA_PATH = "/entry/data/data"
32
+
33
+ # pixel sum path
34
+ SUM_PATH = "/entry/sum"
35
+
36
+ MAX_UINT8_VALUE = np.iinfo(np.uint8).max
37
+
38
+ SLICE_NAME = "AD_HDF5_SWMR_SLICE"
39
+
40
+
41
+ @dataclass
42
+ class DatasetConfig:
43
+ name: str
44
+ shape: Sequence[int]
45
+ maxshape: tuple[Any, ...] = (None,)
46
+ path: Optional[str] = None
47
+ multiplier: Optional[int] = 1
48
+ dtype: Optional[Any] = None
49
+ fillvalue: Optional[int] = None
50
+
51
+
52
+ def get_full_file_description(
53
+ datasets: List[DatasetConfig], outer_shape: tuple[int, ...]
54
+ ):
55
+ full_file_description: Dict[str, DataKey] = {}
56
+ for d in datasets:
57
+ source = f"soft://{d.name}"
58
+ shape = outer_shape + tuple(d.shape)
59
+ dtype = "number" if d.shape == [1] else "array"
60
+ descriptor = DataKey(
61
+ source=source, shape=shape, dtype=dtype, external="STREAM:"
62
+ )
63
+ key = d.name.replace("/", "_")
64
+ full_file_description[key] = descriptor
65
+ return full_file_description
66
+
67
+
68
+ def generate_gaussian_blob(height: int, width: int) -> np.ndarray:
69
+ """Make a Gaussian Blob with float values in range 0..1"""
70
+ x, y = np.meshgrid(np.linspace(-1, 1, width), np.linspace(-1, 1, height))
71
+ d = np.sqrt(x * x + y * y)
72
+ blob = np.exp(-(d**2))
73
+ return blob
74
+
75
+
76
+ def generate_interesting_pattern(x: float, y: float) -> float:
77
+ """This function is interesting in x and y in range -10..10, returning
78
+ a float value in range 0..1
79
+ """
80
+ z = 0.5 + (np.sin(x) ** 10 + np.cos(10 + y * x) * np.cos(x)) / 2
81
+ return z
82
+
83
+
84
+ class HdfStreamProvider:
85
+ def __init__(
86
+ self,
87
+ directory_info: DirectoryInfo,
88
+ full_file_name: Path,
89
+ datasets: List[DatasetConfig],
90
+ ) -> None:
91
+ self._last_emitted = 0
92
+ self._bundles: List[ComposeStreamResourceBundle] = self._compose_bundles(
93
+ directory_info, full_file_name, datasets
94
+ )
95
+
96
+ def _compose_bundles(
97
+ self,
98
+ directory_info: DirectoryInfo,
99
+ full_file_name: Path,
100
+ datasets: List[DatasetConfig],
101
+ ) -> List[StreamAsset]:
102
+ path = str(full_file_name.relative_to(directory_info.root))
103
+ root = str(directory_info.root)
104
+ bundler_composer = ComposeStreamResource()
105
+
106
+ bundles: List[ComposeStreamResourceBundle] = []
107
+
108
+ bundles = [
109
+ bundler_composer(
110
+ spec=SLICE_NAME,
111
+ root=root,
112
+ resource_path=path,
113
+ data_key=d.name.replace("/", "_"),
114
+ resource_kwargs={
115
+ "path": d.path,
116
+ "multiplier": d.multiplier,
117
+ "timestamps": "/entry/instrument/NDAttributes/NDArrayTimeStamp",
118
+ },
119
+ )
120
+ for d in datasets
121
+ ]
122
+ return bundles
123
+
124
+ def stream_resources(self) -> Iterator[StreamResource]:
125
+ for bundle in self._bundles:
126
+ yield bundle.stream_resource_doc
127
+
128
+ def stream_data(self, indices_written: int) -> Iterator[StreamDatum]:
129
+ # Indices are relative to resource
130
+ if indices_written > self._last_emitted:
131
+ updated_stream_range = StreamRange(
132
+ start=self._last_emitted,
133
+ stop=indices_written,
134
+ )
135
+ self._last_emitted = indices_written
136
+ for bundle in self._bundles:
137
+ yield bundle.compose_stream_datum(indices=updated_stream_range)
138
+ return None
139
+
140
+ def close(self) -> None:
141
+ for bundle in self._bundles:
142
+ bundle.close()
143
+
144
+
145
+ class PatternGenerator:
146
+ def __init__(
147
+ self,
148
+ saturation_exposure_time: float = 1,
149
+ detector_width: int = 320,
150
+ detector_height: int = 240,
151
+ ) -> None:
152
+ self.saturation_exposure_time = saturation_exposure_time
153
+ self.exposure = saturation_exposure_time
154
+ self.x = 0.0
155
+ self.y = 0.0
156
+ self.height = detector_height
157
+ self.width = detector_width
158
+ self.written_images_counter: int = 0
159
+
160
+ # it automatically initializes to 0
161
+ self.signal_backend = MockSignalBackend(int)
162
+ self.mock_signal = SignalR(self.signal_backend)
163
+ blob = np.array(
164
+ generate_gaussian_blob(width=detector_width, height=detector_height)
165
+ * MAX_UINT8_VALUE
166
+ )
167
+ self.STARTING_BLOB = blob
168
+ self._hdf_stream_provider: Optional[HdfStreamProvider] = None
169
+ self._handle_for_h5_file: Optional[h5py.File] = None
170
+ self.target_path: Optional[Path] = None
171
+
172
+ async def write_image_to_file(self) -> None:
173
+ assert self._handle_for_h5_file, "no file has been opened!"
174
+ # prepare - resize the fixed hdf5 data structure
175
+ # so that the new image can be written
176
+ new_layer = self.written_images_counter + 1
177
+ target_dimensions = (new_layer, self.height, self.width)
178
+
179
+ # generate the simulated data
180
+ intensity: float = generate_interesting_pattern(self.x, self.y)
181
+ detector_data: np.uint8 = np.uint8(
182
+ self.STARTING_BLOB
183
+ * intensity
184
+ * self.exposure
185
+ / self.saturation_exposure_time
186
+ )
187
+
188
+ self._handle_for_h5_file[DATA_PATH].resize(target_dimensions)
189
+
190
+ print(f"writing image {new_layer}")
191
+ assert self._handle_for_h5_file, "no file has been opened!"
192
+ self._handle_for_h5_file[DATA_PATH].resize(target_dimensions)
193
+
194
+ self._handle_for_h5_file[SUM_PATH].resize((new_layer,))
195
+
196
+ # write data to disc (intermediate step)
197
+ self._handle_for_h5_file[DATA_PATH][self.written_images_counter] = detector_data
198
+ self._handle_for_h5_file[SUM_PATH][self.written_images_counter] = np.sum(
199
+ detector_data
200
+ )
201
+
202
+ # save metadata - so that it's discoverable
203
+ self._handle_for_h5_file[DATA_PATH].flush()
204
+ self._handle_for_h5_file[SUM_PATH].flush()
205
+
206
+ # counter increment is last
207
+ # as only at this point the new data is visible from the outside
208
+ self.written_images_counter += 1
209
+ await self.signal_backend.put(self.written_images_counter)
210
+
211
+ def set_exposure(self, value: float) -> None:
212
+ self.exposure = value
213
+
214
+ def set_x(self, value: float) -> None:
215
+ self.x = value
216
+
217
+ def set_y(self, value: float) -> None:
218
+ self.y = value
219
+
220
+ async def open_file(
221
+ self, directory: DirectoryProvider, multiplier: int = 1
222
+ ) -> Dict[str, DataKey]:
223
+ await self.mock_signal.connect()
224
+
225
+ self.target_path = self._get_new_path(directory)
226
+
227
+ self._handle_for_h5_file = h5py.File(self.target_path, "w", libver="latest")
228
+
229
+ assert self._handle_for_h5_file, "not loaded the file right"
230
+
231
+ datasets = self._get_datasets()
232
+ for d in datasets:
233
+ self._handle_for_h5_file.create_dataset(
234
+ name=d.name,
235
+ shape=d.shape,
236
+ dtype=d.dtype,
237
+ maxshape=d.maxshape,
238
+ )
239
+
240
+ # once datasets written, can switch the model to single writer multiple reader
241
+ self._handle_for_h5_file.swmr_mode = True
242
+
243
+ outer_shape = (multiplier,) if multiplier > 1 else ()
244
+ full_file_description = get_full_file_description(datasets, outer_shape)
245
+
246
+ # cache state to self
247
+ self._datasets = datasets
248
+ self.multiplier = multiplier
249
+ self._directory_provider = directory
250
+ return full_file_description
251
+
252
+ def _get_new_path(self, directory: DirectoryProvider) -> Path:
253
+ info = directory()
254
+ filename = f"{info.prefix}pattern{info.suffix}.h5"
255
+ new_path: Path = info.root / info.resource_dir / filename
256
+ return new_path
257
+
258
+ def _get_datasets(self) -> List[DatasetConfig]:
259
+ raw_dataset = DatasetConfig(
260
+ # name=data_name,
261
+ name=DATA_PATH,
262
+ dtype=np.uint8,
263
+ shape=(1, self.height, self.width),
264
+ maxshape=(None, self.height, self.width),
265
+ )
266
+
267
+ sum_dataset = DatasetConfig(
268
+ name=SUM_PATH,
269
+ dtype=np.float64,
270
+ shape=(1,),
271
+ maxshape=(None,),
272
+ fillvalue=-1,
273
+ )
274
+
275
+ datasets: List[DatasetConfig] = [raw_dataset, sum_dataset]
276
+ return datasets
277
+
278
+ async def collect_stream_docs(
279
+ self, indices_written: int
280
+ ) -> AsyncIterator[StreamAsset]:
281
+ """
282
+ stream resource says "here is a dataset",
283
+ stream datum says "here are N frames in that stream resource",
284
+ you get one stream resource and many stream datums per scan
285
+ """
286
+ if self._handle_for_h5_file:
287
+ self._handle_for_h5_file.flush()
288
+ # when already something was written to the file
289
+ if indices_written:
290
+ # if no frames arrived yet, there's no file to speak of
291
+ # cannot get the full filename the HDF writer will write
292
+ # until the first frame comes in
293
+ if not self._hdf_stream_provider:
294
+ assert self.target_path, "open file has not been called"
295
+ datasets = self._get_datasets()
296
+ self._datasets = datasets
297
+ self._hdf_stream_provider = HdfStreamProvider(
298
+ self._directory_provider(),
299
+ self.target_path,
300
+ self._datasets,
301
+ )
302
+ for doc in self._hdf_stream_provider.stream_resources():
303
+ yield "stream_resource", doc
304
+ if self._hdf_stream_provider:
305
+ for doc in self._hdf_stream_provider.stream_data(indices_written):
306
+ yield "stream_datum", doc
307
+
308
+ def close(self) -> None:
309
+ if self._handle_for_h5_file:
310
+ self._handle_for_h5_file.close()
311
+ print("file closed")
312
+ self._handle_for_h5_file = None
313
+
314
+ async def observe_indices_written(
315
+ self, timeout=DEFAULT_TIMEOUT
316
+ ) -> AsyncGenerator[int, None]:
317
+ async for num_captured in observe_value(self.mock_signal, timeout=timeout):
318
+ yield num_captured // self.multiplier
@@ -0,0 +1,55 @@
1
+ import asyncio
2
+ from typing import Optional
3
+
4
+ from ophyd_async.core import DirectoryProvider
5
+ from ophyd_async.core.async_status import AsyncStatus
6
+ from ophyd_async.core.detector import DetectorControl, DetectorTrigger
7
+ from ophyd_async.sim.pattern_generator import PatternGenerator
8
+
9
+
10
+ class SimPatternDetectorControl(DetectorControl):
11
+ def __init__(
12
+ self,
13
+ pattern_generator: PatternGenerator,
14
+ directory_provider: DirectoryProvider,
15
+ exposure: float = 0.1,
16
+ ) -> None:
17
+ self.pattern_generator: PatternGenerator = pattern_generator
18
+ self.pattern_generator.set_exposure(exposure)
19
+ self.directory_provider: DirectoryProvider = directory_provider
20
+ self.task: Optional[asyncio.Task] = None
21
+ super().__init__()
22
+
23
+ async def arm(
24
+ self,
25
+ num: int,
26
+ trigger: DetectorTrigger = DetectorTrigger.internal,
27
+ exposure: Optional[float] = 0.01,
28
+ ) -> AsyncStatus:
29
+ assert exposure is not None
30
+ period: float = exposure + self.get_deadtime(exposure)
31
+ task = asyncio.create_task(
32
+ self._coroutine_for_image_writing(exposure, period, num)
33
+ )
34
+ self.task = task
35
+ return AsyncStatus(task)
36
+
37
+ async def disarm(self):
38
+ if self.task:
39
+ self.task.cancel()
40
+ try:
41
+ await self.task
42
+ except asyncio.CancelledError:
43
+ pass
44
+ self.task = None
45
+
46
+ def get_deadtime(self, exposure: float) -> float:
47
+ return 0.001
48
+
49
+ async def _coroutine_for_image_writing(
50
+ self, exposure: float, period: float, frames_number: int
51
+ ):
52
+ for _ in range(frames_number):
53
+ self.pattern_generator.set_exposure(exposure)
54
+ await asyncio.sleep(period)
55
+ await self.pattern_generator.write_image_to_file()
@@ -0,0 +1,34 @@
1
+ from typing import AsyncGenerator, AsyncIterator, Dict
2
+
3
+ from bluesky.protocols import DataKey
4
+
5
+ from ophyd_async.core import DirectoryProvider
6
+ from ophyd_async.core.detector import DetectorWriter
7
+ from ophyd_async.sim.pattern_generator import PatternGenerator
8
+
9
+
10
+ class SimPatternDetectorWriter(DetectorWriter):
11
+ pattern_generator: PatternGenerator
12
+
13
+ def __init__(
14
+ self, pattern_generator: PatternGenerator, directoryProvider: DirectoryProvider
15
+ ) -> None:
16
+ self.pattern_generator = pattern_generator
17
+ self.directory_provider = directoryProvider
18
+
19
+ async def open(self, multiplier: int = 1) -> Dict[str, DataKey]:
20
+ return await self.pattern_generator.open_file(
21
+ self.directory_provider, multiplier
22
+ )
23
+
24
+ async def close(self) -> None:
25
+ self.pattern_generator.close()
26
+
27
+ def collect_stream_docs(self, indices_written: int) -> AsyncIterator:
28
+ return self.pattern_generator.collect_stream_docs(indices_written)
29
+
30
+ def observe_indices_written(self, timeout=...) -> AsyncGenerator[int, None]:
31
+ return self.pattern_generator.observe_indices_written()
32
+
33
+ async def get_indices_written(self) -> int:
34
+ return self.pattern_generator.written_images_counter
@@ -0,0 +1,37 @@
1
+ from pathlib import Path
2
+ from typing import Sequence
3
+
4
+ from ophyd_async.core import DirectoryProvider, StaticDirectoryProvider
5
+ from ophyd_async.core.detector import StandardDetector
6
+ from ophyd_async.protocols import AsyncReadable
7
+ from ophyd_async.sim.pattern_generator import PatternGenerator
8
+
9
+ from .sim_pattern_detector_control import SimPatternDetectorControl
10
+ from .sim_pattern_detector_writer import SimPatternDetectorWriter
11
+
12
+
13
+ class SimPatternDetector(StandardDetector):
14
+ def __init__(
15
+ self,
16
+ path: Path,
17
+ config_sigs: Sequence[AsyncReadable] = [],
18
+ name: str = "sim_pattern_detector",
19
+ writer_timeout: float = 1,
20
+ ) -> None:
21
+ self.directory_provider: DirectoryProvider = StaticDirectoryProvider(path)
22
+ self.pattern_generator = PatternGenerator()
23
+ writer = SimPatternDetectorWriter(
24
+ pattern_generator=self.pattern_generator,
25
+ directoryProvider=self.directory_provider,
26
+ )
27
+ controller = SimPatternDetectorControl(
28
+ pattern_generator=self.pattern_generator,
29
+ directory_provider=self.directory_provider,
30
+ )
31
+ super().__init__(
32
+ controller=controller,
33
+ writer=writer,
34
+ config_sigs=config_sigs,
35
+ name=name,
36
+ writer_timeout=writer_timeout,
37
+ )
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: ophyd-async
3
- Version: 0.1.0
3
+ Version: 0.3.0
4
4
  Summary: Asynchronous Bluesky hardware abstraction code, compatible with control systems like EPICS and Tango
5
5
  Author-email: Tom Cobb <tom.cobb@diamond.ac.uk>
6
6
  License: BSD 3-Clause License
@@ -35,33 +35,26 @@ License: BSD 3-Clause License
35
35
  Project-URL: GitHub, https://github.com/bluesky/ophyd-async
36
36
  Classifier: Development Status :: 3 - Alpha
37
37
  Classifier: License :: OSI Approved :: BSD License
38
- Classifier: Programming Language :: Python :: 3.9
39
38
  Classifier: Programming Language :: Python :: 3.10
40
39
  Classifier: Programming Language :: Python :: 3.11
41
- Requires-Python: >=3.9
42
- Description-Content-Type: text/x-rst
40
+ Requires-Python: >=3.10
41
+ Description-Content-Type: text/markdown
43
42
  License-File: LICENSE
44
43
  Requires-Dist: networkx >=2.0
45
44
  Requires-Dist: numpy
46
45
  Requires-Dist: packaging
47
46
  Requires-Dist: pint
48
- Requires-Dist: bluesky
49
- Requires-Dist: event-model
47
+ Requires-Dist: bluesky >=1.13.0a3
48
+ Requires-Dist: event-model <1.21.0
50
49
  Requires-Dist: p4p
51
- Requires-Dist: typing-extensions ; python_version < "3.8"
50
+ Requires-Dist: pyyaml
51
+ Requires-Dist: colorlog
52
52
  Provides-Extra: ca
53
53
  Requires-Dist: aioca >=1.6 ; extra == 'ca'
54
54
  Provides-Extra: dev
55
55
  Requires-Dist: ophyd-async[pva] ; extra == 'dev'
56
56
  Requires-Dist: ophyd-async[ca] ; extra == 'dev'
57
- Requires-Dist: attrs >=19.3.0 ; extra == 'dev'
58
57
  Requires-Dist: black ; extra == 'dev'
59
- Requires-Dist: bluesky >=1.11.0 ; extra == 'dev'
60
- Requires-Dist: caproto[standard] >=0.4.2rc1 ; extra == 'dev'
61
- Requires-Dist: pytest-codecov ; extra == 'dev'
62
- Requires-Dist: databroker >=1.0.0b1 ; extra == 'dev'
63
- Requires-Dist: doctr ; extra == 'dev'
64
- Requires-Dist: epics-pypdb ; extra == 'dev'
65
58
  Requires-Dist: flake8 ; extra == 'dev'
66
59
  Requires-Dist: flake8-isort ; extra == 'dev'
67
60
  Requires-Dist: Flake8-pyproject ; extra == 'dev'
@@ -70,87 +63,62 @@ Requires-Dist: inflection ; extra == 'dev'
70
63
  Requires-Dist: ipython ; extra == 'dev'
71
64
  Requires-Dist: ipywidgets ; extra == 'dev'
72
65
  Requires-Dist: matplotlib ; extra == 'dev'
73
- Requires-Dist: mypy ; extra == 'dev'
74
66
  Requires-Dist: myst-parser ; extra == 'dev'
75
67
  Requires-Dist: numpydoc ; extra == 'dev'
76
68
  Requires-Dist: ophyd ; extra == 'dev'
69
+ Requires-Dist: pickleshare ; extra == 'dev'
77
70
  Requires-Dist: pipdeptree ; extra == 'dev'
78
71
  Requires-Dist: pre-commit ; extra == 'dev'
79
72
  Requires-Dist: pydata-sphinx-theme >=0.12 ; extra == 'dev'
80
73
  Requires-Dist: pyepics >=3.4.2 ; extra == 'dev'
81
- Requires-Dist: pyside6 ; extra == 'dev'
74
+ Requires-Dist: pyside6 ==6.7.0 ; extra == 'dev'
82
75
  Requires-Dist: pytest ; extra == 'dev'
83
76
  Requires-Dist: pytest-asyncio ; extra == 'dev'
84
77
  Requires-Dist: pytest-cov ; extra == 'dev'
85
78
  Requires-Dist: pytest-faulthandler ; extra == 'dev'
86
79
  Requires-Dist: pytest-rerunfailures ; extra == 'dev'
87
80
  Requires-Dist: pytest-timeout ; extra == 'dev'
81
+ Requires-Dist: ruff ; extra == 'dev'
88
82
  Requires-Dist: sphinx-autobuild ; extra == 'dev'
83
+ Requires-Dist: sphinxcontrib-mermaid ; extra == 'dev'
89
84
  Requires-Dist: sphinx-copybutton ; extra == 'dev'
90
85
  Requires-Dist: sphinx-design ; extra == 'dev'
91
86
  Requires-Dist: tox-direct ; extra == 'dev'
92
87
  Requires-Dist: types-mock ; extra == 'dev'
88
+ Requires-Dist: types-pyyaml ; extra == 'dev'
93
89
  Provides-Extra: pva
94
90
  Requires-Dist: p4p ; extra == 'pva'
95
91
 
96
- ***********
97
- Ophyd Async
98
- ***********
92
+ [![CI](https://github.com/bluesky/ophyd-async/actions/workflows/ci.yml/badge.svg)](https://github.com/bluesky/ophyd-async/actions/workflows/ci.yml)
93
+ [![Coverage](https://codecov.io/gh/bluesky/ophyd-async/branch/main/graph/badge.svg)](https://codecov.io/gh/bluesky/ophyd-async)
94
+ [![PyPI](https://img.shields.io/pypi/v/ophyd-async.svg)](https://pypi.org/project/ophyd-async)
95
+ [![License](https://img.shields.io/badge/License-BSD_3--Clause-blue.svg)](https://opensource.org/licenses/BSD-3-Clause)
99
96
 
100
- |build_status| |coverage| |pypi_version| |license|
97
+ # ophyd-async
101
98
 
102
- Ophyd is a Python library for interfacing with hardware. It provides an
103
- abstraction layer that enables experiment orchestration and data acquisition
104
- code to operate above the specifics of particular devices and control systems.
99
+ Asynchronous Bluesky hardware abstraction code, compatible with control systems like EPICS and Tango.
105
100
 
106
- Ophyd is typically used with the `Bluesky Run Engine`_ for experiment
107
- orchestration and data acquisition. It is also sometimes used in a stand-alone
108
- fashion.
101
+ | Source | <https://github.com/bluesky/ophyd-async> |
102
+ | :-----------: | :-----------------------------------------------: |
103
+ | PyPI | `pip install ophyd-async` |
104
+ | Documentation | <https://bluesky.github.io/ophyd-async> |
105
+ | Releases | <https://github.com/bluesky/ophyd-async/releases> |
109
106
 
110
- Many facilities use ophyd to integrate with control systems that use `EPICS`_ ,
111
- but ophyd's design and some of its objects are also used to integrate with
112
- other control systems.
107
+ Ophyd-async is a Python library for asynchronously interfacing with hardware, intended to
108
+ be used as an abstraction layer that enables experiment orchestration and data acquisition code to operate above the specifics of particular devices and control
109
+ systems.
113
110
 
114
- * Put the details specific to a device or control system behind a **high-level
115
- interface** with methods like ``trigger()``, ``read()``, and ``set(...)``.
116
- * **Group** individual control channels (such as EPICS V3 PVs) into logical
117
- "Devices" to be configured and used as units with internal coordination.
118
- * Assign readings with **names meaningful for data analysis** that will
119
- propagate into metadata.
120
- * **Categorize** readings by "kind" (primary reading, configuration,
121
- engineering/debugging) which can be read selectively.
111
+ Both ophyd and ophyd-async are typically used with the [Bluesky Run Engine][] for experiment orchestration and data acquisition.
122
112
 
123
- ============== ==============================================================
124
- PyPI ``pip install ophyd``
125
- Conda ``conda install -c conda-forge ophyd``
126
- Source code https://github.com/bluesky/ophyd
127
- Documentation https://blueskyproject.io/ophyd
128
- ============== ==============================================================
113
+ While [EPICS][] is the most common control system layer that ophyd-async can interface with, support for other control systems like [Tango][] will be supported in the future. The focus of ophyd-async is:
129
114
 
130
- See the tutorials for usage examples.
115
+ * Asynchronous signal access, opening the possibility for hardware-triggered scanning (also known as fly-scanning)
116
+ * Simpler instantiation of devices (groupings of signals) with less reliance upon complex class hierarchies
131
117
 
132
- .. |build_status| image:: https://github.com/bluesky/ophyd/workflows/Unit%20Tests/badge.svg?branch=master
133
- :target: https://github.com/bluesky/ophyd/actions?query=workflow%3A%22Unit+Tests%22
134
- :alt: Build Status
118
+ [Bluesky Run Engine]: http://blueskyproject.io/bluesky
119
+ [EPICS]: http://www.aps.anl.gov/epics/
120
+ [Tango]: https://www.tango-controls.org/
135
121
 
136
- .. |coverage| image:: https://codecov.io/gh/bluesky/ophyd/branch/master/graph/badge.svg
137
- :target: https://codecov.io/gh/bluesky/ophyd
138
- :alt: Test Coverage
122
+ <!-- README only content. Anything below this line won't be included in index.md -->
139
123
 
140
- .. |pypi_version| image:: https://img.shields.io/pypi/v/ophyd.svg
141
- :target: https://pypi.org/project/ophyd
142
- :alt: Latest PyPI version
143
-
144
- .. |license| image:: https://img.shields.io/badge/License-BSD%203--Clause-blue.svg
145
- :target: https://opensource.org/licenses/BSD-3-Clause
146
- :alt: BSD 3-Clause License
147
-
148
- .. _Bluesky Run Engine: http://blueskyproject.io/bluesky
149
-
150
- .. _EPICS: http://www.aps.anl.gov/epics/
151
-
152
- ..
153
- Anything below this line is used when viewing README.rst and will be replaced
154
- when included in index.rst
155
-
156
- See https://blueskyproject.io/ophyd-async for more detailed documentation.
124
+ See https://bluesky.github.io/ophyd-async for more detailed documentation.