ophyd-async 0.11__py3-none-any.whl → 0.12.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
ophyd_async/_version.py CHANGED
@@ -17,5 +17,5 @@ __version__: str
17
17
  __version_tuple__: VERSION_TUPLE
18
18
  version_tuple: VERSION_TUPLE
19
19
 
20
- __version__ = version = '0.11'
21
- __version_tuple__ = version_tuple = (0, 11)
20
+ __version__ = version = '0.12.1'
21
+ __version_tuple__ = version_tuple = (0, 12, 1)
@@ -17,15 +17,15 @@ class FlyerController(ABC, Generic[T]):
17
17
 
18
18
  @abstractmethod
19
19
  async def prepare(self, value: T) -> Any:
20
- """Move to the start of the flyscan."""
20
+ """Move to the start of the fly scan."""
21
21
 
22
22
  @abstractmethod
23
23
  async def kickoff(self):
24
- """Start the flyscan."""
24
+ """Start the fly scan."""
25
25
 
26
26
  @abstractmethod
27
27
  async def complete(self):
28
- """Block until the flyscan is done."""
28
+ """Block until the fly scan is done."""
29
29
 
30
30
  @abstractmethod
31
31
  async def stop(self):
@@ -101,7 +101,7 @@ class StandardFlyer(
101
101
  return AsyncStatus(self._prepare(value))
102
102
 
103
103
  async def _prepare(self, value: T) -> None:
104
- # Move to start and setup the flyscan
104
+ # Move to start and setup the fly scan
105
105
  await self._trigger_logic.prepare(value)
106
106
 
107
107
  @AsyncStatus.wrap
@@ -1,6 +1,4 @@
1
1
  from collections.abc import Iterator
2
- from pathlib import Path
3
- from urllib.parse import urlunparse
4
2
 
5
3
  from bluesky.protocols import StreamAsset
6
4
  from event_model import ( # type: ignore
@@ -48,22 +46,11 @@ class HDFDocumentComposer:
48
46
 
49
47
  def __init__(
50
48
  self,
51
- full_file_name: Path,
49
+ file_uri: str,
52
50
  datasets: list[HDFDatasetDescription],
53
- hostname: str = "localhost",
54
51
  ) -> None:
55
52
  self._last_emitted = 0
56
- self._hostname = hostname
57
- uri = urlunparse(
58
- (
59
- "file",
60
- self._hostname,
61
- str(full_file_name.absolute()),
62
- "",
63
- "",
64
- None,
65
- )
66
- )
53
+ uri = file_uri
67
54
  bundler_composer = ComposeStreamResource()
68
55
  self._bundles: list[ComposeStreamResourceBundle] = [
69
56
  bundler_composer(
@@ -1,11 +1,11 @@
1
- import os
2
1
  import uuid
3
2
  from abc import abstractmethod
4
3
  from collections.abc import Callable
5
4
  from dataclasses import dataclass
6
5
  from datetime import date
7
- from pathlib import Path
6
+ from pathlib import PurePath, PureWindowsPath
8
7
  from typing import Protocol
8
+ from urllib.parse import urlunparse
9
9
 
10
10
 
11
11
  @dataclass
@@ -16,11 +16,36 @@ class PathInfo:
16
16
  :param filename: Base filename to use generated by FilenameProvider, w/o extension
17
17
  :param create_dir_depth: Optional depth of directories to create if they do not
18
18
  exist
19
+ :param directory_uri: Optional URI to use for reading back resources. If not set,
20
+ it will be generated from the directory path.
19
21
  """
20
22
 
21
- directory_path: Path
23
+ directory_path: PurePath
22
24
  filename: str
23
25
  create_dir_depth: int = 0
26
+ directory_uri: str | None = None
27
+
28
+ def __post_init__(self):
29
+ if not self.directory_path.is_absolute():
30
+ raise ValueError(
31
+ f"directory_path must be an absolute path, got {self.directory_path}"
32
+ )
33
+
34
+ # If directory uri is not set, set it using the directory path.
35
+ if self.directory_uri is None:
36
+ self.directory_uri = urlunparse(
37
+ (
38
+ "file",
39
+ "localhost",
40
+ f"{self.directory_path.as_posix()}/",
41
+ "",
42
+ "",
43
+ None,
44
+ )
45
+ )
46
+ elif not self.directory_uri.endswith("/"):
47
+ # Ensure the directory URI ends with a slash.
48
+ self.directory_uri += "/"
24
49
 
25
50
 
26
51
  class FilenameProvider(Protocol):
@@ -112,11 +137,13 @@ class StaticPathProvider(PathProvider):
112
137
  def __init__(
113
138
  self,
114
139
  filename_provider: FilenameProvider,
115
- directory_path: Path | str,
140
+ directory_path: PurePath,
141
+ directory_uri: str | None = None,
116
142
  create_dir_depth: int = 0,
117
143
  ) -> None:
118
144
  self._filename_provider = filename_provider
119
- self._directory_path = Path(directory_path)
145
+ self._directory_path = directory_path
146
+ self._directory_uri = directory_uri
120
147
  self._create_dir_depth = create_dir_depth
121
148
 
122
149
  def __call__(self, device_name: str | None = None) -> PathInfo:
@@ -124,6 +151,7 @@ class StaticPathProvider(PathProvider):
124
151
 
125
152
  return PathInfo(
126
153
  directory_path=self._directory_path,
154
+ directory_uri=self._directory_uri,
127
155
  filename=filename,
128
156
  create_dir_depth=self._create_dir_depth,
129
157
  )
@@ -135,7 +163,8 @@ class AutoIncrementingPathProvider(PathProvider):
135
163
  def __init__(
136
164
  self,
137
165
  filename_provider: FilenameProvider,
138
- base_directory_path: Path,
166
+ base_directory_path: PurePath,
167
+ base_directory_uri: str | None = None,
139
168
  create_dir_depth: int = 0,
140
169
  max_digits: int = 5,
141
170
  starting_value: int = 0,
@@ -146,6 +175,12 @@ class AutoIncrementingPathProvider(PathProvider):
146
175
  ) -> None:
147
176
  self._filename_provider = filename_provider
148
177
  self._base_directory_path = base_directory_path
178
+ self._base_directory_uri = base_directory_uri
179
+ if (
180
+ self._base_directory_uri is not None
181
+ and not self._base_directory_uri.endswith("/")
182
+ ):
183
+ self._base_directory_uri += "/"
149
184
  self._create_dir_depth = create_dir_depth
150
185
  self._base_name = base_name
151
186
  self._starting_value = starting_value
@@ -174,8 +209,13 @@ class AutoIncrementingPathProvider(PathProvider):
174
209
  self._inc_counter = 0
175
210
  self._current_value += self._increment
176
211
 
212
+ directory_uri = None
213
+ if self._base_directory_uri is not None:
214
+ directory_uri = f"{self._base_directory_uri}{auto_inc_dir_name}"
215
+
177
216
  return PathInfo(
178
217
  directory_path=self._base_directory_path / auto_inc_dir_name,
218
+ directory_uri=directory_uri,
179
219
  filename=filename,
180
220
  create_dir_depth=self._create_dir_depth,
181
221
  )
@@ -187,34 +227,52 @@ class YMDPathProvider(PathProvider):
187
227
  def __init__(
188
228
  self,
189
229
  filename_provider: FilenameProvider,
190
- base_directory_path: Path,
230
+ base_directory_path: PurePath,
231
+ base_directory_uri: str | None = None,
191
232
  create_dir_depth: int = -3, # Default to -3 to create YMD dirs
192
233
  device_name_as_base_dir: bool = False,
193
234
  ) -> None:
194
235
  self._filename_provider = filename_provider
195
- self._base_directory_path = Path(base_directory_path)
236
+ self._base_directory_path = base_directory_path
237
+ self._base_directory_uri = base_directory_uri
238
+ if (
239
+ self._base_directory_uri is not None
240
+ and not self._base_directory_uri.endswith("/")
241
+ ):
242
+ self._base_directory_uri += "/"
196
243
  self._create_dir_depth = create_dir_depth
197
244
  self._device_name_as_base_dir = device_name_as_base_dir
198
245
 
199
246
  def __call__(self, device_name: str | None = None) -> PathInfo:
200
- sep = os.path.sep
247
+ path_type = type(self._base_directory_path)
248
+ if path_type == PureWindowsPath:
249
+ sep = "\\"
250
+ else:
251
+ sep = "/"
252
+
201
253
  current_date = date.today().strftime(f"%Y{sep}%m{sep}%d")
202
254
  if device_name is None:
203
255
  ymd_dir_path = current_date
204
256
  elif self._device_name_as_base_dir:
205
- ymd_dir_path = os.path.join(
257
+ ymd_dir_path = path_type(
206
258
  current_date,
207
259
  device_name,
208
260
  )
209
261
  else:
210
- ymd_dir_path = os.path.join(
262
+ ymd_dir_path = path_type(
211
263
  device_name,
212
264
  current_date,
213
265
  )
214
266
 
215
267
  filename = self._filename_provider(device_name)
268
+
269
+ directory_uri = None
270
+ if self._base_directory_uri is not None:
271
+ directory_uri = f"{self._base_directory_uri}{ymd_dir_path}"
272
+
216
273
  return PathInfo(
217
274
  directory_path=self._base_directory_path / ymd_dir_path,
275
+ directory_uri=directory_uri,
218
276
  filename=filename,
219
277
  create_dir_depth=self._create_dir_depth,
220
278
  )
@@ -17,6 +17,7 @@ from bluesky.protocols import (
17
17
  Subscribable,
18
18
  )
19
19
  from event_model import DataKey
20
+ from stamina import retry_context
20
21
 
21
22
  from ._device import Device, DeviceConnector
22
23
  from ._mock_signal_backend import MockSignalBackend
@@ -89,9 +90,11 @@ class Signal(Device, Generic[SignalDatatypeT]):
89
90
  backend: SignalBackend[SignalDatatypeT],
90
91
  timeout: float | None = DEFAULT_TIMEOUT,
91
92
  name: str = "",
93
+ attempts: int = 1,
92
94
  ) -> None:
93
95
  super().__init__(name=name, connector=SignalConnector(backend))
94
96
  self._timeout = timeout
97
+ self._attempts = attempts
95
98
 
96
99
  @property
97
100
  def source(self) -> str:
@@ -144,7 +147,8 @@ class _SignalCache(Generic[SignalDatatypeT]):
144
147
  )
145
148
  self._reading = reading
146
149
  self._valid.set()
147
- for function, want_value in self._listeners.items():
150
+ items = self._listeners.copy().items()
151
+ for function, want_value in items:
148
152
  self._notify(function, want_value)
149
153
 
150
154
  def _notify(
@@ -287,7 +291,16 @@ class SignalW(Signal[SignalDatatypeT], Movable):
287
291
  timeout = self._timeout
288
292
  source = self._connector.backend.source(self.name, read=False)
289
293
  self.log.debug(f"Putting value {value} to backend at source {source}")
290
- await _wait_for(self._connector.backend.put(value, wait=wait), timeout, source)
294
+ async for attempt in retry_context(
295
+ on=asyncio.TimeoutError,
296
+ attempts=self._attempts,
297
+ wait_initial=0,
298
+ wait_jitter=0,
299
+ ):
300
+ with attempt:
301
+ await _wait_for(
302
+ self._connector.backend.put(value, wait=wait), timeout, source
303
+ )
291
304
  self.log.debug(f"Successfully put value {value} to backend at source {source}")
292
305
 
293
306
 
@@ -148,7 +148,7 @@ class SoftSignalBackend(SignalBackend[SignalDatatypeT]):
148
148
  """Set the current value, alarm and timestamp."""
149
149
  self.reading = Reading(
150
150
  value=self.converter.write_value(value),
151
- timestamp=time.monotonic(),
151
+ timestamp=time.time(),
152
152
  alarm_severity=0,
153
153
  )
154
154
  if self.callback:
@@ -14,6 +14,7 @@ from ._core_io import (
14
14
  NDCBFlushOnSoftTrgMode,
15
15
  NDFileHDFIO,
16
16
  NDFileIO,
17
+ NDFilePluginIO,
17
18
  NDPluginBaseIO,
18
19
  NDPluginCBIO,
19
20
  NDPluginStatsIO,
@@ -45,6 +46,7 @@ __all__ = [
45
46
  "ContAcqAreaDetector",
46
47
  "NDArrayBaseIO",
47
48
  "NDFileIO",
49
+ "NDFilePluginIO",
48
50
  "NDFileHDFIO",
49
51
  "NDPluginBaseIO",
50
52
  "NDPluginStatsIO",
@@ -131,11 +131,11 @@ class ADCompression(StrictEnum):
131
131
  JPEG = "JPEG"
132
132
 
133
133
 
134
- class NDFileIO(NDPluginBaseIO):
135
- """Base class from which file plugins are derived.
134
+ class NDFileIO(NDArrayBaseIO):
135
+ """Base class from which file writing drivers are derived.
136
136
 
137
- This mirrors the interface provided by ADCore/db/NDFile.template.
138
- See HTML docs at https://areadetector.github.io/areaDetector/ADCore/NDPluginFile.html
137
+ This mirrors the interface provided by ADCore/ADApp/Db/NDFile.template.
138
+ It does not include any plugin-related fields, for that see NDFilePluginIO.
139
139
  """
140
140
 
141
141
  file_path: A[SignalRW[str], PvSuffix.rbv("FilePath")]
@@ -154,7 +154,17 @@ class NDFileIO(NDPluginBaseIO):
154
154
  create_directory: A[SignalRW[int], PvSuffix("CreateDirectory")]
155
155
 
156
156
 
157
- class NDFileHDFIO(NDFileIO):
157
+ class NDFilePluginIO(NDPluginBaseIO, NDFileIO):
158
+ """Base class from which file plugins are derived.
159
+
160
+ This mirrors the interface provided by ADCore/db/NDFilePlugin.template.
161
+ See HTML docs at https://areadetector.github.io/areaDetector/ADCore/NDPluginFile.html
162
+ """
163
+
164
+ ...
165
+
166
+
167
+ class NDFileHDFIO(NDFilePluginIO):
158
168
  """Plugin for storing data in HDF5 file format.
159
169
 
160
170
  This mirrors the interface provided by ADCore/db/NDFileHDF5.template.
@@ -1,8 +1,7 @@
1
1
  import asyncio
2
2
  from collections.abc import AsyncGenerator, AsyncIterator
3
- from pathlib import Path
3
+ from pathlib import PureWindowsPath
4
4
  from typing import Generic, TypeVar, get_args
5
- from urllib.parse import urlunparse
6
5
 
7
6
  from bluesky.protocols import Hints, StreamAsset
8
7
  from event_model import ( # type: ignore
@@ -13,14 +12,14 @@ from event_model import ( # type: ignore
13
12
  from pydantic import PositiveInt
14
13
 
15
14
  from ophyd_async.core._detector import DetectorWriter
16
- from ophyd_async.core._providers import DatasetDescriber, PathProvider
15
+ from ophyd_async.core._providers import DatasetDescriber, PathInfo, PathProvider
17
16
  from ophyd_async.core._signal import (
18
17
  observe_value,
19
18
  set_and_wait_for_value,
20
19
  wait_for_value,
21
20
  )
22
21
  from ophyd_async.core._status import AsyncStatus
23
- from ophyd_async.core._utils import DEFAULT_TIMEOUT
22
+ from ophyd_async.core._utils import DEFAULT_TIMEOUT, error_if_none
24
23
 
25
24
  # from ophyd_async.epics.adcore._core_logic import ADBaseDatasetDescriber
26
25
  from ._core_io import (
@@ -28,6 +27,7 @@ from ._core_io import (
28
27
  ADCallbacks,
29
28
  NDArrayBaseIO,
30
29
  NDFileIO,
30
+ NDFilePluginIO,
31
31
  NDPluginBaseIO,
32
32
  )
33
33
  from ._utils import ADFileWriteMode
@@ -52,7 +52,8 @@ class ADWriter(DetectorWriter, Generic[NDFileIOT]):
52
52
  ) -> None:
53
53
  self._plugins = plugins or {}
54
54
  self.fileio = fileio
55
- self._path_provider = path_provider
55
+ self._path_provider: PathProvider = path_provider
56
+ self._path_info: PathInfo | None = None
56
57
  self._dataset_describer = dataset_describer
57
58
  self._file_extension = file_extension
58
59
  self._mimetype = mimetype
@@ -82,19 +83,32 @@ class ADWriter(DetectorWriter, Generic[NDFileIOT]):
82
83
  writer = cls(fileio, path_provider, dataset_describer, plugins=plugins)
83
84
  return writer
84
85
 
85
- async def begin_capture(self, name: str) -> None:
86
- info = self._path_provider(device_name=name)
86
+ async def _begin_capture(self, name: str) -> None:
87
+ path_info = error_if_none(
88
+ self._path_info, "Writer must be opened before beginning capture!"
89
+ )
87
90
 
88
- await self.fileio.enable_callbacks.set(ADCallbacks.ENABLE)
91
+ if isinstance(self.fileio, NDFilePluginIO):
92
+ await self.fileio.enable_callbacks.set(ADCallbacks.ENABLE)
89
93
 
90
94
  # Set the directory creation depth first, since dir creation callback happens
91
95
  # when directory path PV is processed.
92
- await self.fileio.create_directory.set(info.create_dir_depth)
96
+ await self.fileio.create_directory.set(path_info.create_dir_depth)
97
+
98
+ # Need to ensure that trailing separator is added to the directory path.
99
+ # When setting the path for windows based AD IOCs, a '/' is added rather than
100
+ # a '\\', which will cause the readback to never register the same value.
101
+ dir_path_as_str = str(path_info.directory_path)
102
+ separator = "/"
103
+ if isinstance(path_info.directory_path, PureWindowsPath):
104
+ separator = "\\"
105
+
106
+ dir_path_as_str += separator
93
107
 
94
108
  await asyncio.gather(
95
109
  # See https://github.com/bluesky/ophyd-async/issues/122
96
- self.fileio.file_path.set(str(info.directory_path)),
97
- self.fileio.file_name.set(info.filename),
110
+ self.fileio.file_path.set(dir_path_as_str),
111
+ self.fileio.file_name.set(path_info.filename),
98
112
  self.fileio.file_write_mode.set(ADFileWriteMode.STREAM),
99
113
  # For non-HDF file writers, use AD file templating mechanism
100
114
  # for generating multi-image datasets
@@ -106,7 +120,7 @@ class ADWriter(DetectorWriter, Generic[NDFileIOT]):
106
120
  )
107
121
 
108
122
  if not await self.fileio.file_path_exists.get_value():
109
- msg = f"File path {info.directory_path} for file plugin does not exist"
123
+ msg = f"Path {dir_path_as_str} doesn't exist or not writable!"
110
124
  raise FileNotFoundError(msg)
111
125
 
112
126
  # Overwrite num_capture to go forever
@@ -125,7 +139,9 @@ class ADWriter(DetectorWriter, Generic[NDFileIOT]):
125
139
  frame_shape = await self._dataset_describer.shape()
126
140
  dtype_numpy = await self._dataset_describer.np_datatype()
127
141
 
128
- await self.begin_capture(name)
142
+ self._path_info = self._path_provider(device_name=name)
143
+
144
+ await self._begin_capture(name)
129
145
 
130
146
  describe = {
131
147
  name: DataKey(
@@ -152,30 +168,22 @@ class ADWriter(DetectorWriter, Generic[NDFileIOT]):
152
168
  async def collect_stream_docs(
153
169
  self, name: str, indices_written: int
154
170
  ) -> AsyncIterator[StreamAsset]:
171
+ path_info = error_if_none(
172
+ self._path_info, "Writer must be opened before collecting stream docs!"
173
+ )
174
+
155
175
  if indices_written:
156
176
  if not self._emitted_resource:
157
- file_path = Path(await self.fileio.file_path.get_value())
158
177
  file_name = await self.fileio.file_name.get_value()
159
178
  file_template = file_name + "_{:06d}" + self._file_extension
160
179
 
161
180
  frame_shape = await self._dataset_describer.shape()
162
181
 
163
- uri = urlunparse(
164
- (
165
- "file",
166
- "localhost",
167
- str(file_path.absolute()) + "/",
168
- "",
169
- "",
170
- None,
171
- )
172
- )
173
-
174
182
  bundler_composer = ComposeStreamResource()
175
183
 
176
184
  self._emitted_resource = bundler_composer(
177
185
  mimetype=self._mimetype,
178
- uri=uri,
186
+ uri=str(path_info.directory_uri),
179
187
  # TODO no reference to detector's name
180
188
  data_key=name,
181
189
  parameters={
@@ -1,6 +1,5 @@
1
1
  import asyncio
2
2
  from collections.abc import AsyncIterator
3
- from pathlib import Path
4
3
  from typing import TypeGuard
5
4
  from xml.etree import ElementTree as ET
6
5
 
@@ -65,8 +64,10 @@ class ADHDFWriter(ADWriter[NDFileHDFIO]):
65
64
  self.fileio.xml_file_name.set(""),
66
65
  )
67
66
 
67
+ self._path_info = self._path_provider(device_name=name)
68
+
68
69
  # Set common AD file plugin params, begin capturing
69
- await self.begin_capture(name)
70
+ await self._begin_capture(name)
70
71
 
71
72
  detector_shape = await self._dataset_describer.shape()
72
73
  np_dtype = await self._dataset_describer.np_datatype()
@@ -100,7 +101,7 @@ class ADHDFWriter(ADWriter[NDFileHDFIO]):
100
101
 
101
102
  self._composer = HDFDocumentComposer(
102
103
  # See https://github.com/bluesky/ophyd-async/issues/122
103
- Path(await self.fileio.full_file_name.get_value()),
104
+ f"{self._path_info.directory_uri}{self._path_info.filename}{self._file_extension}",
104
105
  self._datasets,
105
106
  )
106
107
 
@@ -1,15 +1,15 @@
1
1
  from ophyd_async.core import DatasetDescriber, PathProvider
2
2
 
3
- from ._core_io import NDFileIO, NDPluginBaseIO
3
+ from ._core_io import NDFilePluginIO, NDPluginBaseIO
4
4
  from ._core_writer import ADWriter
5
5
 
6
6
 
7
- class ADJPEGWriter(ADWriter[NDFileIO]):
7
+ class ADJPEGWriter(ADWriter[NDFilePluginIO]):
8
8
  default_suffix: str = "JPEG1:"
9
9
 
10
10
  def __init__(
11
11
  self,
12
- fileio: NDFileIO,
12
+ fileio: NDFilePluginIO,
13
13
  path_provider: PathProvider,
14
14
  dataset_describer: DatasetDescriber,
15
15
  plugins: dict[str, NDPluginBaseIO] | None = None,
@@ -1,15 +1,15 @@
1
1
  from ophyd_async.core import DatasetDescriber, PathProvider
2
2
 
3
- from ._core_io import NDFileIO, NDPluginBaseIO
3
+ from ._core_io import NDFilePluginIO, NDPluginBaseIO
4
4
  from ._core_writer import ADWriter
5
5
 
6
6
 
7
- class ADTIFFWriter(ADWriter[NDFileIO]):
7
+ class ADTIFFWriter(ADWriter[NDFilePluginIO]):
8
8
  default_suffix: str = "TIFF1:"
9
9
 
10
10
  def __init__(
11
11
  self,
12
- fileio: NDFileIO,
12
+ fileio: NDFilePluginIO,
13
13
  path_provider: PathProvider,
14
14
  dataset_describer: DatasetDescriber,
15
15
  plugins: dict[str, NDPluginBaseIO] | None = None,
@@ -250,10 +250,12 @@ class CaSignalBackend(EpicsSignalBackend[SignalDatatypeT]):
250
250
  datatype: type[SignalDatatypeT] | None,
251
251
  read_pv: str = "",
252
252
  write_pv: str = "",
253
+ all_updates: bool = True,
253
254
  ):
254
255
  self.converter: CaConverter = DisconnectedCaConverter(float, dbr.DBR_DOUBLE)
255
256
  self.initial_values: dict[str, AugmentedValue] = {}
256
257
  self.subscription: Subscription | None = None
258
+ self._all_updates = all_updates
257
259
  super().__init__(datatype, read_pv, write_pv)
258
260
 
259
261
  def source(self, name: str, read: bool):
@@ -356,4 +358,5 @@ class CaSignalBackend(EpicsSignalBackend[SignalDatatypeT]):
356
358
  lambda v: callback(self._make_reading(v)),
357
359
  datatype=self.converter.read_dbr,
358
360
  format=FORMAT_TIME,
361
+ all_updates=self._all_updates,
359
362
  )
@@ -94,6 +94,7 @@ def epics_signal_rw(
94
94
  write_pv: str | None = None,
95
95
  name: str = "",
96
96
  timeout: float = DEFAULT_TIMEOUT,
97
+ attempts: int = 1,
97
98
  ) -> SignalRW[SignalDatatypeT]:
98
99
  """Create a `SignalRW` backed by 1 or 2 EPICS PVs.
99
100
 
@@ -104,7 +105,7 @@ def epics_signal_rw(
104
105
  :param timeout: A timeout to be used when reading (not connecting) this signal
105
106
  """
106
107
  backend = _epics_signal_backend(datatype, read_pv, write_pv or read_pv)
107
- return SignalRW(backend, name=name, timeout=timeout)
108
+ return SignalRW(backend, name=name, timeout=timeout, attempts=attempts)
108
109
 
109
110
 
110
111
  def epics_signal_rw_rbv(
@@ -113,6 +114,7 @@ def epics_signal_rw_rbv(
113
114
  read_suffix: str = "_RBV",
114
115
  name: str = "",
115
116
  timeout: float = DEFAULT_TIMEOUT,
117
+ attempts: int = 1,
116
118
  ) -> SignalRW[SignalDatatypeT]:
117
119
  """Create a `SignalRW` backed by 1 or 2 EPICS PVs, with a suffix on the readback pv.
118
120
 
@@ -128,7 +130,9 @@ def epics_signal_rw_rbv(
128
130
  else:
129
131
  read_pv = f"{write_pv}{read_suffix}"
130
132
 
131
- return epics_signal_rw(datatype, read_pv, write_pv, name, timeout=timeout)
133
+ return epics_signal_rw(
134
+ datatype, read_pv, write_pv, name, timeout=timeout, attempts=attempts
135
+ )
132
136
 
133
137
 
134
138
  def epics_signal_r(
@@ -153,6 +157,7 @@ def epics_signal_w(
153
157
  write_pv: str,
154
158
  name: str = "",
155
159
  timeout: float = DEFAULT_TIMEOUT,
160
+ attempts: int = 1,
156
161
  ) -> SignalW[SignalDatatypeT]:
157
162
  """Create a `SignalW` backed by 1 EPICS PVs.
158
163
 
@@ -162,7 +167,7 @@ def epics_signal_w(
162
167
  :param timeout: A timeout to be used when reading (not connecting) this signal
163
168
  """
164
169
  backend = _epics_signal_backend(datatype, write_pv, write_pv)
165
- return SignalW(backend, name=name, timeout=timeout)
170
+ return SignalW(backend, name=name, timeout=timeout, attempts=attempts)
166
171
 
167
172
 
168
173
  def epics_signal_x(
@@ -108,7 +108,7 @@ class Motor(
108
108
 
109
109
  @AsyncStatus.wrap
110
110
  async def prepare(self, value: FlyMotorInfo):
111
- """Move to the beginning of a suitable run-up distance ready for a flyscan."""
111
+ """Move to the beginning of a suitable run-up distance ready for a fly scan."""
112
112
  self._fly_info = value
113
113
 
114
114
  # Velocity, at which motor travels from start_position to end_position, in motor
@@ -147,7 +147,7 @@ class Motor(
147
147
  await self.velocity.set(abs(max_speed))
148
148
  await self.set(ramp_up_start_pos)
149
149
 
150
- # Set velocity we will be using for the flyscan
150
+ # Set velocity we will be using for the fly scan
151
151
  await self.velocity.set(abs(value.velocity))
152
152
 
153
153
  @AsyncStatus.wrap
@@ -40,7 +40,7 @@ class TestingIOC:
40
40
  assert self._process.stdout # noqa: S101 # this is to make Pylance happy
41
41
  start_time = time.monotonic()
42
42
  while "iocRun: All initialization complete" not in self.output:
43
- if time.monotonic() - start_time > 10:
43
+ if time.monotonic() - start_time > 15:
44
44
  self.stop()
45
45
  raise TimeoutError(f"IOC did not start in time:\n{self.output}")
46
46
  self.output += self._process.stdout.readline()