ophyd-async 0.2.0__py3-none-any.whl → 0.3a1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (32) hide show
  1. ophyd_async/_version.py +2 -2
  2. ophyd_async/core/__init__.py +5 -9
  3. ophyd_async/core/_providers.py +36 -5
  4. ophyd_async/core/async_status.py +3 -3
  5. ophyd_async/core/detector.py +159 -37
  6. ophyd_async/core/device.py +37 -38
  7. ophyd_async/core/device_save_loader.py +96 -23
  8. ophyd_async/core/flyer.py +32 -237
  9. ophyd_async/core/signal.py +11 -4
  10. ophyd_async/core/signal_backend.py +2 -2
  11. ophyd_async/core/sim_signal_backend.py +2 -2
  12. ophyd_async/core/utils.py +75 -29
  13. ophyd_async/epics/_backend/_aioca.py +18 -26
  14. ophyd_async/epics/_backend/_p4p.py +58 -27
  15. ophyd_async/epics/_backend/common.py +20 -0
  16. ophyd_async/epics/areadetector/controllers/ad_sim_controller.py +1 -1
  17. ophyd_async/epics/areadetector/controllers/pilatus_controller.py +1 -1
  18. ophyd_async/epics/areadetector/writers/_hdffile.py +17 -3
  19. ophyd_async/epics/areadetector/writers/hdf_writer.py +21 -15
  20. ophyd_async/epics/pvi.py +70 -0
  21. ophyd_async/epics/signal/__init__.py +0 -2
  22. ophyd_async/panda/__init__.py +5 -2
  23. ophyd_async/panda/panda.py +41 -94
  24. ophyd_async/panda/panda_controller.py +41 -0
  25. ophyd_async/panda/utils.py +15 -0
  26. {ophyd_async-0.2.0.dist-info → ophyd_async-0.3a1.dist-info}/METADATA +2 -2
  27. {ophyd_async-0.2.0.dist-info → ophyd_async-0.3a1.dist-info}/RECORD +31 -28
  28. {ophyd_async-0.2.0.dist-info → ophyd_async-0.3a1.dist-info}/WHEEL +1 -1
  29. ophyd_async/epics/signal/pvi_get.py +0 -22
  30. {ophyd_async-0.2.0.dist-info → ophyd_async-0.3a1.dist-info}/LICENSE +0 -0
  31. {ophyd_async-0.2.0.dist-info → ophyd_async-0.3a1.dist-info}/entry_points.txt +0 -0
  32. {ophyd_async-0.2.0.dist-info → ophyd_async-0.3a1.dist-info}/top_level.txt +0 -0
ophyd_async/core/utils.py CHANGED
@@ -1,5 +1,18 @@
1
+ from __future__ import annotations
2
+
1
3
  import asyncio
2
- from typing import Awaitable, Callable, Dict, Iterable, List, Optional, Type, TypeVar
4
+ import logging
5
+ from typing import (
6
+ Awaitable,
7
+ Callable,
8
+ Dict,
9
+ Iterable,
10
+ List,
11
+ Optional,
12
+ Type,
13
+ TypeVar,
14
+ Union,
15
+ )
3
16
 
4
17
  import numpy as np
5
18
  from bluesky.protocols import Reading
@@ -11,46 +24,79 @@ Callback = Callable[[T], None]
11
24
  #: monitor updates
12
25
  ReadingValueCallback = Callable[[Reading, T], None]
13
26
  DEFAULT_TIMEOUT = 10.0
27
+ ErrorText = Union[str, Dict[str, Exception]]
14
28
 
15
29
 
16
30
  class NotConnected(Exception):
17
31
  """Exception to be raised if a `Device.connect` is cancelled"""
18
32
 
19
- def __init__(self, *lines: str):
20
- self.lines = list(lines)
33
+ _indent_width = " "
34
+
35
+ def __init__(self, errors: ErrorText):
36
+ """
37
+ NotConnected holds a mapping of device/signal names to
38
+ errors.
39
+
40
+ Parameters
41
+ ----------
42
+ errors: ErrorText
43
+ Mapping of device name to Exception or another NotConnected.
44
+ Alternatively a string with the signal error text.
45
+ """
46
+
47
+ self._errors = errors
48
+
49
+ def _format_sub_errors(self, name: str, error: Exception, indent="") -> str:
50
+ if isinstance(error, NotConnected):
51
+ error_txt = ":" + error.format_error_string(indent + self._indent_width)
52
+ elif isinstance(error, Exception):
53
+ error_txt = ": " + err_str + "\n" if (err_str := str(error)) else "\n"
54
+ else:
55
+ raise RuntimeError(
56
+ f"Unexpected type `{type(error)}`, expected an Exception"
57
+ )
58
+
59
+ string = f"{indent}{name}: {type(error).__name__}" + error_txt
60
+ return string
61
+
62
+ def format_error_string(self, indent="") -> str:
63
+ if not isinstance(self._errors, dict) and not isinstance(self._errors, str):
64
+ raise RuntimeError(
65
+ f"Unexpected type `{type(self._errors)}` " "expected `str` or `dict`"
66
+ )
67
+
68
+ if isinstance(self._errors, str):
69
+ return " " + self._errors + "\n"
70
+
71
+ string = "\n"
72
+ for name, error in self._errors.items():
73
+ string += self._format_sub_errors(name, error, indent=indent)
74
+ return string
21
75
 
22
76
  def __str__(self) -> str:
23
- return "\n".join(self.lines)
77
+ return self.format_error_string(indent="")
24
78
 
25
79
 
26
80
  async def wait_for_connection(**coros: Awaitable[None]):
27
- """Call many underlying signals, accumulating `NotConnected` exceptions
81
+ """Call many underlying signals, accumulating exceptions and returning them
28
82
 
29
- Raises
30
- ------
31
- `NotConnected` if cancelled
83
+ Expected kwargs should be a mapping of names to coroutine tasks to execute.
32
84
  """
33
- ts = {k: asyncio.create_task(c) for (k, c) in coros.items()} # type: ignore
34
- try:
35
- done, pending = await asyncio.wait(ts.values())
36
- except asyncio.CancelledError:
37
- for t in ts.values():
38
- t.cancel()
39
- lines: List[str] = []
40
- for k, t in ts.items():
41
- try:
42
- await t
43
- except NotConnected as e:
44
- if len(e.lines) == 1:
45
- lines.append(f"{k}: {e.lines[0]}")
46
- else:
47
- lines.append(f"{k}:")
48
- lines += [f" {line}" for line in e.lines]
49
- raise NotConnected(*lines)
50
- else:
51
- # Wait for everything to foreground the exceptions
52
- for f in list(done) + list(pending):
53
- await f
85
+ results = await asyncio.gather(*coros.values(), return_exceptions=True)
86
+ exceptions = {}
87
+
88
+ for name, result in zip(coros, results):
89
+ if isinstance(result, Exception):
90
+ exceptions[name] = result
91
+ if not isinstance(result, NotConnected):
92
+ logging.exception(
93
+ f"device `{name}` raised unexpected exception "
94
+ f"{type(result).__name__}",
95
+ exc_info=result,
96
+ )
97
+
98
+ if exceptions:
99
+ raise NotConnected(exceptions)
54
100
 
55
101
 
56
102
  def get_dtype(typ: Type) -> Optional[np.dtype]:
@@ -1,5 +1,5 @@
1
+ import logging
1
2
  import sys
2
- from asyncio import CancelledError
3
3
  from dataclasses import dataclass
4
4
  from enum import Enum
5
5
  from typing import Any, Dict, Optional, Sequence, Type, Union
@@ -8,6 +8,7 @@ from aioca import (
8
8
  FORMAT_CTRL,
9
9
  FORMAT_RAW,
10
10
  FORMAT_TIME,
11
+ CANothing,
11
12
  Subscription,
12
13
  caget,
13
14
  camonitor,
@@ -18,7 +19,6 @@ from bluesky.protocols import Descriptor, Dtype, Reading
18
19
  from epicscorelibs.ca import dbr
19
20
 
20
21
  from ophyd_async.core import (
21
- NotConnected,
22
22
  ReadingValueCallback,
23
23
  SignalBackend,
24
24
  T,
@@ -26,6 +26,9 @@ from ophyd_async.core import (
26
26
  get_unique,
27
27
  wait_for_connection,
28
28
  )
29
+ from ophyd_async.core.utils import DEFAULT_TIMEOUT, NotConnected
30
+
31
+ from .common import get_supported_enum_class
29
32
 
30
33
  dbr_to_dtype: Dict[Dbr, Dtype] = {
31
34
  dbr.DBR_STRING: "string",
@@ -89,9 +92,7 @@ class CaEnumConverter(CaConverter):
89
92
 
90
93
  def descriptor(self, source: str, value: AugmentedValue) -> Descriptor:
91
94
  choices = [e.value for e in self.enum_class]
92
- return dict(
93
- source=source, dtype="string", shape=[], choices=choices
94
- ) # type: ignore
95
+ return dict(source=source, dtype="string", shape=[], choices=choices)
95
96
 
96
97
 
97
98
  class DisconnectedCaConverter(CaConverter):
@@ -137,19 +138,7 @@ def make_converter(
137
138
  pv_choices = get_unique(
138
139
  {k: tuple(v.enums) for k, v in values.items()}, "choices"
139
140
  )
140
- if datatype:
141
- if not issubclass(datatype, Enum):
142
- raise TypeError(f"{pv} has type Enum not {datatype.__name__}")
143
- if not issubclass(datatype, str):
144
- raise TypeError(f"{pv} has type Enum but doesn't inherit from String")
145
- choices = tuple(v.value for v in datatype)
146
- if set(choices) != set(pv_choices):
147
- raise TypeError(f"{pv} has choices {pv_choices} not {choices}")
148
- enum_class = datatype
149
- else:
150
- enum_class = Enum( # type: ignore
151
- "GeneratedChoices", {x: x for x in pv_choices}, type=str
152
- )
141
+ enum_class = get_supported_enum_class(pv, datatype, pv_choices)
153
142
  return CaEnumConverter(dbr.DBR_STRING, None, enum_class)
154
143
  else:
155
144
  value = list(values.values())[0]
@@ -184,23 +173,26 @@ class CaSignalBackend(SignalBackend[T]):
184
173
  self.source = f"ca://{self.read_pv}"
185
174
  self.subscription: Optional[Subscription] = None
186
175
 
187
- async def _store_initial_value(self, pv):
176
+ async def _store_initial_value(self, pv, timeout: float = DEFAULT_TIMEOUT):
188
177
  try:
189
- self.initial_values[pv] = await caget(pv, format=FORMAT_CTRL, timeout=None)
190
- except CancelledError:
191
- raise NotConnected(self.source)
178
+ self.initial_values[pv] = await caget(
179
+ pv, format=FORMAT_CTRL, timeout=timeout
180
+ )
181
+ except CANothing as exc:
182
+ logging.debug(f"signal ca://{pv} timed out")
183
+ raise NotConnected(f"ca://{pv}") from exc
192
184
 
193
- async def connect(self):
185
+ async def connect(self, timeout: float = DEFAULT_TIMEOUT):
194
186
  _use_pyepics_context_if_imported()
195
187
  if self.read_pv != self.write_pv:
196
188
  # Different, need to connect both
197
189
  await wait_for_connection(
198
- read_pv=self._store_initial_value(self.read_pv),
199
- write_pv=self._store_initial_value(self.write_pv),
190
+ read_pv=self._store_initial_value(self.read_pv, timeout=timeout),
191
+ write_pv=self._store_initial_value(self.write_pv, timeout=timeout),
200
192
  )
201
193
  else:
202
194
  # The same, so only need to connect one
203
- await self._store_initial_value(self.read_pv)
195
+ await self._store_initial_value(self.read_pv, timeout=timeout)
204
196
  self.converter = make_converter(self.datatype, self.initial_values)
205
197
 
206
198
  async def put(self, value: Optional[T], wait=True, timeout=None):
@@ -1,6 +1,7 @@
1
1
  import asyncio
2
2
  import atexit
3
- from asyncio import CancelledError
3
+ import logging
4
+ import time
4
5
  from dataclasses import dataclass
5
6
  from enum import Enum
6
7
  from typing import Any, Dict, List, Optional, Sequence, Type, Union
@@ -10,7 +11,6 @@ from p4p import Value
10
11
  from p4p.client.asyncio import Context, Subscription
11
12
 
12
13
  from ophyd_async.core import (
13
- NotConnected,
14
14
  ReadingValueCallback,
15
15
  SignalBackend,
16
16
  T,
@@ -18,6 +18,9 @@ from ophyd_async.core import (
18
18
  get_unique,
19
19
  wait_for_connection,
20
20
  )
21
+ from ophyd_async.core.utils import DEFAULT_TIMEOUT, NotConnected
22
+
23
+ from .common import get_supported_enum_class
21
24
 
22
25
  # https://mdavidsaver.github.io/p4p/values.html
23
26
  specifier_to_dtype: Dict[str, Dtype] = {
@@ -119,9 +122,7 @@ class PvaEnumConverter(PvaConverter):
119
122
 
120
123
  def descriptor(self, source: str, value) -> Descriptor:
121
124
  choices = [e.value for e in self.enum_class]
122
- return dict(
123
- source=source, dtype="string", shape=[], choices=choices
124
- ) # type: ignore
125
+ return dict(source=source, dtype="string", shape=[], choices=choices)
125
126
 
126
127
 
127
128
  class PvaEnumBoolConverter(PvaConverter):
@@ -141,6 +142,32 @@ class PvaTableConverter(PvaConverter):
141
142
  return dict(source=source, dtype="object", shape=[]) # type: ignore
142
143
 
143
144
 
145
+ class PvaDictConverter(PvaConverter):
146
+ def reading(self, value):
147
+ ts = time.time()
148
+ value = value.todict()
149
+ # Alarm severity is vacuously 0 for a table
150
+ return dict(value=value, timestamp=ts, alarm_severity=0)
151
+
152
+ def value(self, value: Value):
153
+ return value.todict()
154
+
155
+ def descriptor(self, source: str, value) -> Descriptor:
156
+ raise NotImplementedError("Describing Dict signals not currently supported")
157
+
158
+ def metadata_fields(self) -> List[str]:
159
+ """
160
+ Fields to request from PVA for metadata.
161
+ """
162
+ return []
163
+
164
+ def value_fields(self) -> List[str]:
165
+ """
166
+ Fields to request from PVA for the value.
167
+ """
168
+ return []
169
+
170
+
144
171
  class DisconnectedPvaConverter(PvaConverter):
145
172
  def __getattribute__(self, __name: str) -> Any:
146
173
  raise NotImplementedError("No PV has been set as connect() has not been called")
@@ -149,7 +176,9 @@ class DisconnectedPvaConverter(PvaConverter):
149
176
  def make_converter(datatype: Optional[Type], values: Dict[str, Any]) -> PvaConverter:
150
177
  pv = list(values)[0]
151
178
  typeid = get_unique({k: v.getID() for k, v in values.items()}, "typeids")
152
- typ = get_unique({k: type(v["value"]) for k, v in values.items()}, "value types")
179
+ typ = get_unique(
180
+ {k: type(v.get("value")) for k, v in values.items()}, "value types"
181
+ )
153
182
  if "NTScalarArray" in typeid and typ == list:
154
183
  # Waveform of strings, check we wanted this
155
184
  if datatype and datatype != Sequence[str]:
@@ -185,24 +214,15 @@ def make_converter(datatype: Optional[Type], values: Dict[str, Any]) -> PvaConve
185
214
  pv_choices = get_unique(
186
215
  {k: tuple(v["value"]["choices"]) for k, v in values.items()}, "choices"
187
216
  )
188
- if datatype:
189
- if not issubclass(datatype, Enum):
190
- raise TypeError(f"{pv} has type Enum not {datatype.__name__}")
191
- choices = tuple(v.value for v in datatype)
192
- if set(choices) != set(pv_choices):
193
- raise TypeError(f"{pv} has choices {pv_choices} not {choices}")
194
- enum_class = datatype
195
- else:
196
- enum_class = Enum( # type: ignore
197
- "GeneratedChoices", {x or "_": x for x in pv_choices}, type=str
198
- )
199
- return PvaEnumConverter(enum_class)
217
+ return PvaEnumConverter(get_supported_enum_class(pv, datatype, pv_choices))
200
218
  elif "NTScalar" in typeid:
201
219
  if datatype and not issubclass(typ, datatype):
202
220
  raise TypeError(f"{pv} has type {typ.__name__} not {datatype.__name__}")
203
221
  return PvaConverter()
204
222
  elif "NTTable" in typeid:
205
223
  return PvaTableConverter()
224
+ elif "structure" in typeid:
225
+ return PvaDictConverter()
206
226
  else:
207
227
  raise TypeError(f"{pv}: Unsupported typeid {typeid}")
208
228
 
@@ -233,22 +253,25 @@ class PvaSignalBackend(SignalBackend[T]):
233
253
 
234
254
  return PvaSignalBackend._ctxt
235
255
 
236
- async def _store_initial_value(self, pv):
256
+ async def _store_initial_value(self, pv, timeout: float = DEFAULT_TIMEOUT):
237
257
  try:
238
- self.initial_values[pv] = await self.ctxt.get(pv)
239
- except CancelledError:
240
- raise NotConnected(self.source)
258
+ self.initial_values[pv] = await asyncio.wait_for(
259
+ self.ctxt.get(pv), timeout=timeout
260
+ )
261
+ except asyncio.TimeoutError as exc:
262
+ logging.debug(f"signal pva://{pv} timed out", exc_info=True)
263
+ raise NotConnected(f"pva://{pv}") from exc
241
264
 
242
- async def connect(self):
265
+ async def connect(self, timeout: float = DEFAULT_TIMEOUT):
243
266
  if self.read_pv != self.write_pv:
244
267
  # Different, need to connect both
245
268
  await wait_for_connection(
246
- read_pv=self._store_initial_value(self.read_pv),
247
- write_pv=self._store_initial_value(self.write_pv),
269
+ read_pv=self._store_initial_value(self.read_pv, timeout=timeout),
270
+ write_pv=self._store_initial_value(self.write_pv, timeout=timeout),
248
271
  )
249
272
  else:
250
273
  # The same, so only need to connect one
251
- await self._store_initial_value(self.read_pv)
274
+ await self._store_initial_value(self.read_pv, timeout=timeout)
252
275
  self.converter = make_converter(self.datatype, self.initial_values)
253
276
 
254
277
  async def put(self, value: Optional[T], wait=True, timeout=None):
@@ -257,7 +280,15 @@ class PvaSignalBackend(SignalBackend[T]):
257
280
  else:
258
281
  write_value = self.converter.write_value(value)
259
282
  coro = self.ctxt.put(self.write_pv, dict(value=write_value), wait=wait)
260
- await asyncio.wait_for(coro, timeout)
283
+ try:
284
+ await asyncio.wait_for(coro, timeout)
285
+ except asyncio.TimeoutError as exc:
286
+ logging.debug(
287
+ f"signal pva://{self.write_pv} timed out \
288
+ put value: {write_value}",
289
+ exc_info=True,
290
+ )
291
+ raise NotConnected(f"pva://{self.write_pv}") from exc
261
292
 
262
293
  async def get_descriptor(self) -> Descriptor:
263
294
  value = await self.ctxt.get(self.read_pv)
@@ -0,0 +1,20 @@
1
+ from enum import Enum
2
+ from typing import Any, Optional, Tuple, Type
3
+
4
+
5
+ def get_supported_enum_class(
6
+ pv: str,
7
+ datatype: Optional[Type[Enum]],
8
+ pv_choices: Tuple[Any, ...],
9
+ ) -> Type[Enum]:
10
+ if datatype:
11
+ if not issubclass(datatype, Enum):
12
+ raise TypeError(f"{pv} has type Enum not {datatype.__name__}")
13
+ if not issubclass(datatype, str):
14
+ raise TypeError(f"{pv} has type Enum but doesn't inherit from String")
15
+ choices = tuple(v.value for v in datatype)
16
+ if set(choices).difference(pv_choices):
17
+ raise TypeError(f"{pv} has choices {pv_choices}: not all in {choices}")
18
+ return Enum(
19
+ "GeneratedChoices", {x or "_": x for x in pv_choices}, type=str
20
+ ) # type: ignore
@@ -30,8 +30,8 @@ class ADSimController(DetectorControl):
30
30
 
31
31
  async def arm(
32
32
  self,
33
+ num: int,
33
34
  trigger: DetectorTrigger = DetectorTrigger.internal,
34
- num: int = 0,
35
35
  exposure: Optional[float] = None,
36
36
  ) -> AsyncStatus:
37
37
  assert (
@@ -32,8 +32,8 @@ class PilatusController(DetectorControl):
32
32
 
33
33
  async def arm(
34
34
  self,
35
+ num: int,
35
36
  trigger: DetectorTrigger = DetectorTrigger.internal,
36
- num: int = 0,
37
37
  exposure: Optional[float] = None,
38
38
  ) -> AsyncStatus:
39
39
  await asyncio.gather(
@@ -1,19 +1,33 @@
1
+ from pathlib import Path
1
2
  from typing import Iterator, List
2
3
 
3
4
  from event_model import StreamDatum, StreamResource, compose_stream_resource
4
5
 
6
+ from ophyd_async.core import DirectoryInfo
7
+
5
8
  from ._hdfdataset import _HDFDataset
6
9
 
7
10
 
8
11
  class _HDFFile:
9
- def __init__(self, full_file_name: str, datasets: List[_HDFDataset]) -> None:
12
+ """
13
+ :param directory_info: Contains information about how to construct a StreamResource
14
+ :param full_file_name: Absolute path to the file to be written
15
+ :param datasets: Datasets to write into the file
16
+ """
17
+
18
+ def __init__(
19
+ self,
20
+ directory_info: DirectoryInfo,
21
+ full_file_name: Path,
22
+ datasets: List[_HDFDataset],
23
+ ) -> None:
10
24
  self._last_emitted = 0
11
25
  self._bundles = [
12
26
  compose_stream_resource(
13
27
  spec="AD_HDF5_SWMR_SLICE",
14
- root="/",
28
+ root=str(directory_info.root),
15
29
  data_key=ds.name,
16
- resource_path=full_file_name,
30
+ resource_path=str(full_file_name.relative_to(directory_info.root)),
17
31
  resource_kwargs={
18
32
  "path": ds.path,
19
33
  "multiplier": ds.multiplier,
@@ -1,7 +1,8 @@
1
1
  import asyncio
2
- from typing import AsyncIterator, Dict, List, Optional
2
+ from pathlib import Path
3
+ from typing import AsyncGenerator, AsyncIterator, Dict, List, Optional
3
4
 
4
- from bluesky.protocols import Asset, Descriptor, Hints
5
+ from bluesky.protocols import Descriptor, Hints, StreamAsset
5
6
 
6
7
  from ophyd_async.core import (
7
8
  DEFAULT_TIMEOUT,
@@ -13,6 +14,7 @@ from ophyd_async.core import (
13
14
  set_and_wait_for_value,
14
15
  wait_for_value,
15
16
  )
17
+ from ophyd_async.core.signal import observe_value
16
18
 
17
19
  from ._hdfdataset import _HDFDataset
18
20
  from ._hdffile import _HDFFile
@@ -45,15 +47,16 @@ class HDFWriter(DetectorWriter):
45
47
  self.hdf.num_extra_dims.set(0),
46
48
  self.hdf.lazy_open.set(True),
47
49
  self.hdf.swmr_mode.set(True),
48
- self.hdf.file_path.set(info.directory_path),
49
- self.hdf.file_name.set(f"{info.filename_prefix}{self.hdf.name}"),
50
+ # See https://github.com/bluesky/ophyd-async/issues/122
51
+ self.hdf.file_path.set(str(info.root / info.resource_dir)),
52
+ self.hdf.file_name.set(f"{info.prefix}{self.hdf.name}{info.suffix}"),
50
53
  self.hdf.file_template.set("%s/%s.h5"),
51
54
  self.hdf.file_write_mode.set(FileWriteMode.stream),
52
55
  )
53
56
 
54
57
  assert (
55
58
  await self.hdf.file_path_exists.get_value()
56
- ), f"File path {info.directory_path} for hdf plugin does not exist"
59
+ ), f"File path {self.hdf.file_path.get_value()} for hdf plugin does not exist"
57
60
 
58
61
  # Overwrite num_capture to go forever
59
62
  await self.hdf.num_capture.set(0)
@@ -88,26 +91,29 @@ class HDFWriter(DetectorWriter):
88
91
  }
89
92
  return describe
90
93
 
91
- async def wait_for_index(
92
- self, index: int, timeout: Optional[float] = DEFAULT_TIMEOUT
93
- ):
94
- def matcher(value: int) -> bool:
95
- return value // self._multiplier >= index
96
-
97
- matcher.__name__ = f"index_at_least_{index}"
98
- await wait_for_value(self.hdf.num_captured, matcher, timeout=timeout)
94
+ async def observe_indices_written(
95
+ self, timeout=DEFAULT_TIMEOUT
96
+ ) -> AsyncGenerator[int, None]:
97
+ """Wait until a specific index is ready to be collected"""
98
+ async for num_captured in observe_value(self.hdf.num_captured, timeout):
99
+ yield num_captured // self._multiplier
99
100
 
100
101
  async def get_indices_written(self) -> int:
101
102
  num_captured = await self.hdf.num_captured.get_value()
102
103
  return num_captured // self._multiplier
103
104
 
104
- async def collect_stream_docs(self, indices_written: int) -> AsyncIterator[Asset]:
105
+ async def collect_stream_docs(
106
+ self, indices_written: int
107
+ ) -> AsyncIterator[StreamAsset]:
105
108
  # TODO: fail if we get dropped frames
106
109
  await self.hdf.flush_now.set(True)
107
110
  if indices_written:
108
111
  if not self._file:
109
112
  self._file = _HDFFile(
110
- await self.hdf.full_file_name.get_value(), self._datasets
113
+ self._directory_provider(),
114
+ # See https://github.com/bluesky/ophyd-async/issues/122
115
+ Path(await self.hdf.full_file_name.get_value()),
116
+ self._datasets,
111
117
  )
112
118
  for doc in self._file.stream_resources():
113
119
  yield "stream_resource", doc
@@ -0,0 +1,70 @@
1
+ from typing import Callable, Dict, FrozenSet, Optional, Type, TypedDict, TypeVar
2
+
3
+ from ophyd_async.core.signal import Signal
4
+ from ophyd_async.core.signal_backend import SignalBackend
5
+ from ophyd_async.core.utils import DEFAULT_TIMEOUT
6
+ from ophyd_async.epics._backend._p4p import PvaSignalBackend
7
+ from ophyd_async.epics.signal.signal import (
8
+ epics_signal_r,
9
+ epics_signal_rw,
10
+ epics_signal_w,
11
+ epics_signal_x,
12
+ )
13
+
14
+ T = TypeVar("T")
15
+
16
+
17
+ _pvi_mapping: Dict[FrozenSet[str], Callable[..., Signal]] = {
18
+ frozenset({"r", "w"}): lambda dtype, read_pv, write_pv: epics_signal_rw(
19
+ dtype, read_pv, write_pv
20
+ ),
21
+ frozenset({"rw"}): lambda dtype, read_pv, write_pv: epics_signal_rw(
22
+ dtype, read_pv, write_pv
23
+ ),
24
+ frozenset({"r"}): lambda dtype, read_pv, _: epics_signal_r(dtype, read_pv),
25
+ frozenset({"w"}): lambda dtype, _, write_pv: epics_signal_w(dtype, write_pv),
26
+ frozenset({"x"}): lambda _, __, write_pv: epics_signal_x(write_pv),
27
+ }
28
+
29
+
30
+ class PVIEntry(TypedDict, total=False):
31
+ d: str
32
+ r: str
33
+ rw: str
34
+ w: str
35
+ x: str
36
+
37
+
38
+ async def pvi_get(
39
+ read_pv: str, timeout: float = DEFAULT_TIMEOUT
40
+ ) -> Dict[str, PVIEntry]:
41
+ """Makes a PvaSignalBackend purely to connect to PVI information.
42
+
43
+ This backend is simply thrown away at the end of this method. This is useful
44
+ because the backend handles a CancelledError exception that gets thrown on
45
+ timeout, and therefore can be used for error reporting."""
46
+ backend: SignalBackend = PvaSignalBackend(None, read_pv, read_pv)
47
+ await backend.connect(timeout=timeout)
48
+ d: Dict[str, Dict[str, Dict[str, str]]] = await backend.get_value()
49
+ pv_info = d.get("pvi") or {}
50
+ result = {}
51
+
52
+ for attr_name, attr_info in pv_info.items():
53
+ result[attr_name] = PVIEntry(**attr_info) # type: ignore
54
+
55
+ return result
56
+
57
+
58
+ def make_signal(signal_pvi: PVIEntry, dtype: Optional[Type[T]] = None) -> Signal[T]:
59
+ """Make a signal.
60
+
61
+ This assumes datatype is None so it can be used to create dynamic signals.
62
+ """
63
+ operations = frozenset(signal_pvi.keys())
64
+ pvs = [signal_pvi[i] for i in operations] # type: ignore
65
+ signal_factory = _pvi_mapping[operations]
66
+
67
+ write_pv = "pva://" + pvs[0]
68
+ read_pv = write_pv if len(pvs) < 2 else "pva://" + pvs[1]
69
+
70
+ return signal_factory(dtype, read_pv, write_pv)
@@ -1,8 +1,6 @@
1
- from .pvi_get import pvi_get
2
1
  from .signal import epics_signal_r, epics_signal_rw, epics_signal_w, epics_signal_x
3
2
 
4
3
  __all__ = [
5
- "pvi_get",
6
4
  "epics_signal_r",
7
5
  "epics_signal_rw",
8
6
  "epics_signal_w",
@@ -1,4 +1,5 @@
1
- from .panda import PandA, PcapBlock, PulseBlock, PVIEntry, SeqBlock, SeqTable, pvi
1
+ from .panda import PandA, PcapBlock, PulseBlock, PVIEntry, SeqBlock, SeqTable
2
+ from .panda_controller import PandaPcapController
2
3
  from .table import (
3
4
  SeqTable,
4
5
  SeqTableRow,
@@ -6,6 +7,7 @@ from .table import (
6
7
  seq_table_from_arrays,
7
8
  seq_table_from_rows,
8
9
  )
10
+ from .utils import phase_sorter
9
11
 
10
12
  __all__ = [
11
13
  "PandA",
@@ -18,5 +20,6 @@ __all__ = [
18
20
  "SeqTable",
19
21
  "SeqTableRow",
20
22
  "SeqTrigger",
21
- "pvi",
23
+ "phase_sorter",
24
+ "PandaPcapController",
22
25
  ]