ophyd-async 0.7.0__py3-none-any.whl → 0.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. ophyd_async/_version.py +2 -2
  2. ophyd_async/core/__init__.py +34 -9
  3. ophyd_async/core/_detector.py +5 -10
  4. ophyd_async/core/_device.py +170 -68
  5. ophyd_async/core/_device_filler.py +269 -0
  6. ophyd_async/core/_device_save_loader.py +6 -7
  7. ophyd_async/core/_mock_signal_backend.py +35 -40
  8. ophyd_async/core/_mock_signal_utils.py +25 -16
  9. ophyd_async/core/_protocol.py +28 -8
  10. ophyd_async/core/_readable.py +133 -134
  11. ophyd_async/core/_signal.py +219 -163
  12. ophyd_async/core/_signal_backend.py +131 -64
  13. ophyd_async/core/_soft_signal_backend.py +131 -194
  14. ophyd_async/core/_status.py +22 -6
  15. ophyd_async/core/_table.py +102 -100
  16. ophyd_async/core/_utils.py +143 -32
  17. ophyd_async/epics/adaravis/_aravis_controller.py +2 -2
  18. ophyd_async/epics/adaravis/_aravis_io.py +8 -6
  19. ophyd_async/epics/adcore/_core_io.py +5 -7
  20. ophyd_async/epics/adcore/_core_logic.py +3 -1
  21. ophyd_async/epics/adcore/_hdf_writer.py +2 -2
  22. ophyd_async/epics/adcore/_single_trigger.py +6 -10
  23. ophyd_async/epics/adcore/_utils.py +15 -10
  24. ophyd_async/epics/adkinetix/__init__.py +2 -1
  25. ophyd_async/epics/adkinetix/_kinetix_controller.py +6 -3
  26. ophyd_async/epics/adkinetix/_kinetix_io.py +4 -5
  27. ophyd_async/epics/adpilatus/_pilatus_controller.py +2 -2
  28. ophyd_async/epics/adpilatus/_pilatus_io.py +3 -4
  29. ophyd_async/epics/adsimdetector/_sim_controller.py +2 -2
  30. ophyd_async/epics/advimba/__init__.py +4 -1
  31. ophyd_async/epics/advimba/_vimba_controller.py +6 -3
  32. ophyd_async/epics/advimba/_vimba_io.py +8 -9
  33. ophyd_async/epics/core/__init__.py +26 -0
  34. ophyd_async/epics/core/_aioca.py +323 -0
  35. ophyd_async/epics/core/_epics_connector.py +53 -0
  36. ophyd_async/epics/core/_epics_device.py +13 -0
  37. ophyd_async/epics/core/_p4p.py +383 -0
  38. ophyd_async/epics/core/_pvi_connector.py +91 -0
  39. ophyd_async/epics/core/_signal.py +171 -0
  40. ophyd_async/epics/core/_util.py +61 -0
  41. ophyd_async/epics/demo/_mover.py +4 -5
  42. ophyd_async/epics/demo/_sensor.py +14 -13
  43. ophyd_async/epics/eiger/_eiger.py +1 -2
  44. ophyd_async/epics/eiger/_eiger_controller.py +7 -2
  45. ophyd_async/epics/eiger/_eiger_io.py +3 -5
  46. ophyd_async/epics/eiger/_odin_io.py +5 -5
  47. ophyd_async/epics/motor.py +4 -5
  48. ophyd_async/epics/signal.py +11 -0
  49. ophyd_async/epics/testing/__init__.py +24 -0
  50. ophyd_async/epics/testing/_example_ioc.py +105 -0
  51. ophyd_async/epics/testing/_utils.py +78 -0
  52. ophyd_async/epics/testing/test_records.db +152 -0
  53. ophyd_async/epics/testing/test_records_pva.db +177 -0
  54. ophyd_async/fastcs/core.py +9 -0
  55. ophyd_async/fastcs/panda/__init__.py +4 -4
  56. ophyd_async/fastcs/panda/_block.py +18 -13
  57. ophyd_async/fastcs/panda/_control.py +3 -5
  58. ophyd_async/fastcs/panda/_hdf_panda.py +5 -19
  59. ophyd_async/fastcs/panda/_table.py +30 -52
  60. ophyd_async/fastcs/panda/_trigger.py +8 -8
  61. ophyd_async/fastcs/panda/_writer.py +2 -5
  62. ophyd_async/plan_stubs/_ensure_connected.py +20 -13
  63. ophyd_async/plan_stubs/_fly.py +2 -2
  64. ophyd_async/plan_stubs/_nd_attributes.py +5 -4
  65. ophyd_async/py.typed +0 -0
  66. ophyd_async/sim/demo/_pattern_detector/_pattern_detector_controller.py +1 -2
  67. ophyd_async/sim/demo/_sim_motor.py +3 -4
  68. ophyd_async/tango/__init__.py +0 -45
  69. ophyd_async/tango/{signal → core}/__init__.py +9 -6
  70. ophyd_async/tango/core/_base_device.py +132 -0
  71. ophyd_async/tango/{signal → core}/_signal.py +42 -53
  72. ophyd_async/tango/{base_devices → core}/_tango_readable.py +3 -4
  73. ophyd_async/tango/{signal → core}/_tango_transport.py +38 -40
  74. ophyd_async/tango/demo/_counter.py +12 -23
  75. ophyd_async/tango/demo/_mover.py +13 -13
  76. {ophyd_async-0.7.0.dist-info → ophyd_async-0.8.0.dist-info}/METADATA +52 -55
  77. ophyd_async-0.8.0.dist-info/RECORD +116 -0
  78. {ophyd_async-0.7.0.dist-info → ophyd_async-0.8.0.dist-info}/WHEEL +1 -1
  79. ophyd_async/epics/pvi/__init__.py +0 -3
  80. ophyd_async/epics/pvi/_pvi.py +0 -338
  81. ophyd_async/epics/signal/__init__.py +0 -21
  82. ophyd_async/epics/signal/_aioca.py +0 -378
  83. ophyd_async/epics/signal/_common.py +0 -57
  84. ophyd_async/epics/signal/_epics_transport.py +0 -34
  85. ophyd_async/epics/signal/_p4p.py +0 -518
  86. ophyd_async/epics/signal/_signal.py +0 -114
  87. ophyd_async/tango/base_devices/__init__.py +0 -4
  88. ophyd_async/tango/base_devices/_base_device.py +0 -225
  89. ophyd_async-0.7.0.dist-info/RECORD +0 -108
  90. {ophyd_async-0.7.0.dist-info → ophyd_async-0.8.0.dist-info}/LICENSE +0 -0
  91. {ophyd_async-0.7.0.dist-info → ophyd_async-0.8.0.dist-info}/entry_points.txt +0 -0
  92. {ophyd_async-0.7.0.dist-info → ophyd_async-0.8.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,177 @@
1
+ record(waveform, "$(device)int8a") {
2
+ field(NELM, "3")
3
+ field(FTVL, "CHAR")
4
+ field(INP, {const:[-128, 127]})
5
+ field(PINI, "YES")
6
+ }
7
+
8
+ record(waveform, "$(device)uint16a") {
9
+ field(NELM, "3")
10
+ field(FTVL, "USHORT")
11
+ field(INP, {const:[0, 65535]})
12
+ field(PINI, "YES")
13
+ }
14
+
15
+ record(waveform, "$(device)uint32a") {
16
+ field(NELM, "3")
17
+ field(FTVL, "ULONG")
18
+ field(INP, {const:[0, 4294967295]})
19
+ field(PINI, "YES")
20
+ }
21
+
22
+ record(waveform, "$(device)int64a") {
23
+ field(NELM, "3")
24
+ field(FTVL, "INT64")
25
+ # Can't do 64-bit int with JSON numbers in a const link...
26
+ field(INP, {const:[-2147483649, 2147483648]})
27
+ field(PINI, "YES")
28
+ }
29
+
30
+ record(waveform, "$(device)uint64a") {
31
+ field(NELM, "3")
32
+ field(FTVL, "UINT64")
33
+ field(INP, {const:[0, 4294967297]})
34
+ field(PINI, "YES")
35
+ }
36
+
37
+ record(waveform, "$(device)table:labels") {
38
+ field(FTVL, "STRING")
39
+ field(NELM, "5")
40
+ field(INP, {const:["Bool", "Int", "Float", "Str", "Enum"]})
41
+ field(PINI, "YES")
42
+ info(Q:group, {
43
+ "$(device)table": {
44
+ "+id": "epics:nt/NTTable:1.0",
45
+ "labels": {
46
+ "+type": "plain",
47
+ "+channel": "VAL"
48
+ }
49
+ }
50
+ })
51
+ }
52
+
53
+ record(waveform, "$(device)table:bool")
54
+ {
55
+ field(FTVL, "UCHAR")
56
+ field(NELM, "4096")
57
+ field(INP, {const:[false, false, true, true]})
58
+ field(PINI, "YES")
59
+ info(Q:group, {
60
+ "$(device)table": {
61
+ "value.bool": {
62
+ "+type": "plain",
63
+ "+channel": "VAL",
64
+ "+putorder": 1
65
+ }
66
+ }
67
+ })
68
+ }
69
+
70
+ record(waveform, "$(device)table:int")
71
+ {
72
+ field(FTVL, "LONG")
73
+ field(NELM, "4096")
74
+ field(INP, {const:[1, 8, -9, 32]})
75
+ field(PINI, "YES")
76
+ info(Q:group, {
77
+ "$(device)table": {
78
+ "value.int": {
79
+ "+type": "plain",
80
+ "+channel": "VAL",
81
+ "+putorder": 2
82
+ }
83
+ }
84
+ })
85
+ }
86
+
87
+ record(waveform, "$(device)table:float")
88
+ {
89
+ field(FTVL, "DOUBLE")
90
+ field(NELM, "4096")
91
+ field(INP, {const:[1.8, 8.2, -6, 32.9887]})
92
+ field(PINI, "YES")
93
+ info(Q:group, {
94
+ "$(device)table": {
95
+ "value.float": {
96
+ "+type": "plain",
97
+ "+channel": "VAL",
98
+ "+putorder": 3
99
+ }
100
+ }
101
+ })
102
+ }
103
+
104
+ record(waveform, "$(device)table:str")
105
+ {
106
+ field(FTVL, "STRING")
107
+ field(NELM, "4096")
108
+ field(INP, {const:["Hello", "World", "Foo", "Bar"]})
109
+ field(PINI, "YES")
110
+ info(Q:group, {
111
+ "$(device)table": {
112
+ "value.str": {
113
+ "+type": "plain",
114
+ "+channel": "VAL",
115
+ "+putorder": 4
116
+ }
117
+ }
118
+ })
119
+ }
120
+
121
+ record(waveform, "$(device)table:enum")
122
+ {
123
+ field(FTVL, "STRING")
124
+ field(NELM, "4096")
125
+ field(INP, {const:["Aaa", "Bbb", "Aaa", "Ccc"]})
126
+ field(PINI, "YES")
127
+ info(Q:group, {
128
+ "$(device)table": {
129
+ "value.enum": {
130
+ "+type": "plain",
131
+ "+channel": "VAL",
132
+ "+putorder": 5,
133
+ "+trigger": "*",
134
+ },
135
+ "": {"+type": "meta", "+channel": "VAL"}
136
+ }
137
+ })
138
+ }
139
+
140
+ record(longout, "$(device)ntndarray:ArraySize0_RBV") {
141
+ field(VAL, "3")
142
+ field(PINI, "YES")
143
+ info(Q:group, {
144
+ "$(device)ntndarray":{
145
+ "dimension[0].size":{+channel:"VAL", +type:"plain", +putorder:0}
146
+ }
147
+ })
148
+ }
149
+
150
+ record(longout, "$(device)ntndarray:ArraySize1_RBV") {
151
+ field(VAL, "2")
152
+ field(PINI, "YES")
153
+ info(Q:group, {
154
+ "$(device)ntndarray":{
155
+ "dimension[1].size":{+channel:"VAL", +type:"plain", +putorder:0}
156
+ }
157
+ })
158
+ }
159
+
160
+ record(waveform, "$(device)ntndarray:data")
161
+ {
162
+ field(FTVL, "INT64")
163
+ field(NELM, "6")
164
+ field(INP, {const:[0, 0, 0, 0, 0, 0]})
165
+ field(PINI, "YES")
166
+ info(Q:group, {
167
+ "$(device)ntndarray":{
168
+ +id:"epics:nt/NTNDArray:1.0",
169
+ "value":{
170
+ +type:"any",
171
+ +channel:"VAL",
172
+ +trigger:"*",
173
+ },
174
+ "": {+type:"meta", +channel:"SEVR"}
175
+ }
176
+ })
177
+ }
@@ -0,0 +1,9 @@
1
+ from ophyd_async.core import Device, DeviceConnector
2
+ from ophyd_async.epics.core import PviDeviceConnector
3
+
4
+
5
+ def fastcs_connector(device: Device, uri: str) -> DeviceConnector:
6
+ # TODO: add Tango support based on uri scheme
7
+ connector = PviDeviceConnector(uri)
8
+ connector.create_children_from_annotations(device)
9
+ return connector
@@ -1,10 +1,10 @@
1
1
  from ._block import (
2
+ BitMux,
2
3
  CommonPandaBlocks,
3
4
  DataBlock,
4
- EnableDisableOptions,
5
5
  PcapBlock,
6
6
  PcompBlock,
7
- PcompDirectionOptions,
7
+ PcompDirection,
8
8
  PulseBlock,
9
9
  SeqBlock,
10
10
  TimeUnits,
@@ -29,10 +29,10 @@ from ._writer import PandaHDFWriter
29
29
  __all__ = [
30
30
  "CommonPandaBlocks",
31
31
  "DataBlock",
32
- "EnableDisableOptions",
32
+ "BitMux",
33
33
  "PcapBlock",
34
34
  "PcompBlock",
35
- "PcompDirectionOptions",
35
+ "PcompDirection",
36
36
  "PulseBlock",
37
37
  "SeqBlock",
38
38
  "TimeUnits",
@@ -1,13 +1,16 @@
1
- from __future__ import annotations
2
-
3
- from enum import Enum
4
-
5
- from ophyd_async.core import Device, DeviceVector, SignalR, SignalRW, SubsetEnum
1
+ from ophyd_async.core import (
2
+ Device,
3
+ DeviceVector,
4
+ SignalR,
5
+ SignalRW,
6
+ StrictEnum,
7
+ SubsetEnum,
8
+ )
6
9
 
7
10
  from ._table import DatasetTable, SeqTable
8
11
 
9
12
 
10
- class CaptureMode(str, Enum):
13
+ class CaptureMode(StrictEnum):
11
14
  FIRST_N = "FIRST_N"
12
15
  LAST_N = "LAST_N"
13
16
  FOREVER = "FOREVER"
@@ -32,26 +35,28 @@ class PulseBlock(Device):
32
35
  width: SignalRW[float]
33
36
 
34
37
 
35
- class PcompDirectionOptions(str, Enum):
38
+ class PcompDirection(StrictEnum):
36
39
  positive = "Positive"
37
40
  negative = "Negative"
38
41
  either = "Either"
39
42
 
40
43
 
41
- EnableDisableOptions = SubsetEnum["ZERO", "ONE"]
44
+ class BitMux(SubsetEnum):
45
+ zero = "ZERO"
46
+ one = "ONE"
42
47
 
43
48
 
44
49
  class PcompBlock(Device):
45
50
  active: SignalR[bool]
46
- dir: SignalRW[PcompDirectionOptions]
47
- enable: SignalRW[EnableDisableOptions]
51
+ dir: SignalRW[PcompDirection]
52
+ enable: SignalRW[BitMux]
48
53
  pulses: SignalRW[int]
49
54
  start: SignalRW[int]
50
55
  step: SignalRW[int]
51
56
  width: SignalRW[int]
52
57
 
53
58
 
54
- class TimeUnits(str, Enum):
59
+ class TimeUnits(StrictEnum):
55
60
  min = "min"
56
61
  s = "s"
57
62
  ms = "ms"
@@ -60,11 +65,11 @@ class TimeUnits(str, Enum):
60
65
 
61
66
  class SeqBlock(Device):
62
67
  table: SignalRW[SeqTable]
63
- active: SignalRW[bool]
68
+ active: SignalR[bool]
64
69
  repeats: SignalRW[int]
65
70
  prescale: SignalRW[float]
66
71
  prescale_units: SignalRW[TimeUnits]
67
- enable: SignalRW[EnableDisableOptions]
72
+ enable: SignalRW[BitMux]
68
73
 
69
74
 
70
75
  class PcapBlock(Device):
@@ -1,12 +1,10 @@
1
- import asyncio
2
-
3
1
  from ophyd_async.core import (
2
+ AsyncStatus,
4
3
  DetectorController,
5
4
  DetectorTrigger,
5
+ TriggerInfo,
6
6
  wait_for_value,
7
7
  )
8
- from ophyd_async.core._detector import TriggerInfo
9
- from ophyd_async.core._status import AsyncStatus
10
8
 
11
9
  from ._block import PcapBlock
12
10
 
@@ -33,5 +31,5 @@ class PandaPcapController(DetectorController):
33
31
  pass
34
32
 
35
33
  async def disarm(self):
36
- await asyncio.gather(self.pcap.arm.set(False))
34
+ await self.pcap.arm.set(False)
37
35
  await wait_for_value(self.pcap.active, False, timeout=1)
@@ -2,8 +2,8 @@ from __future__ import annotations
2
2
 
3
3
  from collections.abc import Sequence
4
4
 
5
- from ophyd_async.core import DEFAULT_TIMEOUT, PathProvider, SignalR, StandardDetector
6
- from ophyd_async.epics.pvi import create_children_from_annotations, fill_pvi_entries
5
+ from ophyd_async.core import PathProvider, SignalR, StandardDetector
6
+ from ophyd_async.fastcs.core import fastcs_connector
7
7
 
8
8
  from ._block import CommonPandaBlocks
9
9
  from ._control import PandaPcapController
@@ -18,12 +18,10 @@ class HDFPanda(CommonPandaBlocks, StandardDetector):
18
18
  config_sigs: Sequence[SignalR] = (),
19
19
  name: str = "",
20
20
  ):
21
- self._prefix = prefix
22
-
23
- create_children_from_annotations(self)
21
+ # This has to be first so we make self.pcap
22
+ connector = fastcs_connector(self, prefix)
24
23
  controller = PandaPcapController(pcap=self.pcap)
25
24
  writer = PandaHDFWriter(
26
- prefix=prefix,
27
25
  path_provider=path_provider,
28
26
  name_provider=lambda: name,
29
27
  panda_data_block=self.data,
@@ -33,17 +31,5 @@ class HDFPanda(CommonPandaBlocks, StandardDetector):
33
31
  writer=writer,
34
32
  config_sigs=config_sigs,
35
33
  name=name,
36
- )
37
-
38
- async def connect(
39
- self,
40
- mock: bool = False,
41
- timeout: float = DEFAULT_TIMEOUT,
42
- force_reconnect: bool = False,
43
- ):
44
- # TODO: this doesn't support caching
45
- # https://github.com/bluesky/ophyd-async/issues/472
46
- await fill_pvi_entries(self, self._prefix + "PVI", timeout=timeout, mock=mock)
47
- await super().connect(
48
- mock=mock, timeout=timeout, force_reconnect=force_reconnect
34
+ connector=connector,
49
35
  )
@@ -1,27 +1,22 @@
1
1
  from collections.abc import Sequence
2
- from enum import Enum
3
- from typing import Annotated
4
2
 
5
3
  import numpy as np
6
- import numpy.typing as npt
7
- from pydantic import Field, model_validator
8
- from pydantic_numpy.helper.annotation import NpArrayPydanticAnnotation
9
- from typing_extensions import TypedDict
4
+ from pydantic import model_validator
10
5
 
11
- from ophyd_async.core import Table
6
+ from ophyd_async.core import Array1D, StrictEnum, Table
12
7
 
13
8
 
14
- class PandaHdf5DatasetType(str, Enum):
9
+ class PandaHdf5DatasetType(StrictEnum):
15
10
  FLOAT_64 = "float64"
16
11
  UINT_32 = "uint32"
17
12
 
18
13
 
19
- class DatasetTable(TypedDict):
20
- name: npt.NDArray[np.str_]
21
- hdf5_type: Sequence[PandaHdf5DatasetType]
14
+ class DatasetTable(Table):
15
+ name: Sequence[str]
16
+ dtype: Sequence[PandaHdf5DatasetType]
22
17
 
23
18
 
24
- class SeqTrigger(str, Enum):
19
+ class SeqTrigger(StrictEnum):
25
20
  IMMEDIATE = "Immediate"
26
21
  BITA_0 = "BITA=0"
27
22
  BITA_1 = "BITA=1"
@@ -37,45 +32,27 @@ class SeqTrigger(str, Enum):
37
32
  POSC_LT = "POSC<=POSITION"
38
33
 
39
34
 
40
- PydanticNp1DArrayInt32 = Annotated[
41
- np.ndarray[tuple[int], np.dtype[np.int32]],
42
- NpArrayPydanticAnnotation.factory(
43
- data_type=np.int32, dimensions=1, strict_data_typing=False
44
- ),
45
- Field(default_factory=lambda: np.array([], np.int32)),
46
- ]
47
- PydanticNp1DArrayBool = Annotated[
48
- np.ndarray[tuple[int], np.dtype[np.bool_]],
49
- NpArrayPydanticAnnotation.factory(
50
- data_type=np.bool_, dimensions=1, strict_data_typing=False
51
- ),
52
- Field(default_factory=lambda: np.array([], dtype=np.bool_)),
53
- ]
54
- TriggerStr = Annotated[Sequence[SeqTrigger], Field(default_factory=list)]
55
-
56
-
57
35
  class SeqTable(Table):
58
- repeats: PydanticNp1DArrayInt32
59
- trigger: TriggerStr
60
- position: PydanticNp1DArrayInt32
61
- time1: PydanticNp1DArrayInt32
62
- outa1: PydanticNp1DArrayBool
63
- outb1: PydanticNp1DArrayBool
64
- outc1: PydanticNp1DArrayBool
65
- outd1: PydanticNp1DArrayBool
66
- oute1: PydanticNp1DArrayBool
67
- outf1: PydanticNp1DArrayBool
68
- time2: PydanticNp1DArrayInt32
69
- outa2: PydanticNp1DArrayBool
70
- outb2: PydanticNp1DArrayBool
71
- outc2: PydanticNp1DArrayBool
72
- outd2: PydanticNp1DArrayBool
73
- oute2: PydanticNp1DArrayBool
74
- outf2: PydanticNp1DArrayBool
36
+ repeats: Array1D[np.uint16]
37
+ trigger: Sequence[SeqTrigger]
38
+ position: Array1D[np.int32]
39
+ time1: Array1D[np.uint32]
40
+ outa1: Array1D[np.bool_]
41
+ outb1: Array1D[np.bool_]
42
+ outc1: Array1D[np.bool_]
43
+ outd1: Array1D[np.bool_]
44
+ oute1: Array1D[np.bool_]
45
+ outf1: Array1D[np.bool_]
46
+ time2: Array1D[np.uint32]
47
+ outa2: Array1D[np.bool_]
48
+ outb2: Array1D[np.bool_]
49
+ outc2: Array1D[np.bool_]
50
+ outd2: Array1D[np.bool_]
51
+ oute2: Array1D[np.bool_]
52
+ outf2: Array1D[np.bool_]
75
53
 
76
- @classmethod
77
- def row( # type: ignore
78
- cls,
54
+ @staticmethod
55
+ def row(
79
56
  *,
80
57
  repeats: int = 1,
81
58
  trigger: str = SeqTrigger.IMMEDIATE,
@@ -95,7 +72,8 @@ class SeqTable(Table):
95
72
  oute2: bool = False,
96
73
  outf2: bool = False,
97
74
  ) -> "SeqTable":
98
- return Table.row(**locals())
75
+ # Let pydantic do the conversions for us
76
+ return SeqTable(**{k: [v] for k, v in locals().items()}) # type: ignore
99
77
 
100
78
  @model_validator(mode="after")
101
79
  def validate_max_length(self) -> "SeqTable":
@@ -104,6 +82,6 @@ class SeqTable(Table):
104
82
  the pydantic field doesn't work
105
83
  """
106
84
 
107
- first_length = len(next(iter(self))[1])
108
- assert 0 <= first_length < 4096, f"Length {first_length} not in range."
85
+ first_length = len(self)
86
+ assert first_length <= 4096, f"Length {first_length} is too long"
109
87
  return self
@@ -4,7 +4,7 @@ from pydantic import BaseModel, Field
4
4
 
5
5
  from ophyd_async.core import FlyerController, wait_for_value
6
6
 
7
- from ._block import PcompBlock, PcompDirectionOptions, SeqBlock, TimeUnits
7
+ from ._block import BitMux, PcompBlock, PcompDirection, SeqBlock, TimeUnits
8
8
  from ._table import SeqTable
9
9
 
10
10
 
@@ -21,7 +21,7 @@ class StaticSeqTableTriggerLogic(FlyerController[SeqTableInfo]):
21
21
  async def prepare(self, value: SeqTableInfo):
22
22
  await asyncio.gather(
23
23
  self.seq.prescale_units.set(TimeUnits.us),
24
- self.seq.enable.set("ZERO"),
24
+ self.seq.enable.set(BitMux.zero),
25
25
  )
26
26
  await asyncio.gather(
27
27
  self.seq.prescale.set(value.prescale_as_us),
@@ -30,14 +30,14 @@ class StaticSeqTableTriggerLogic(FlyerController[SeqTableInfo]):
30
30
  )
31
31
 
32
32
  async def kickoff(self) -> None:
33
- await self.seq.enable.set("ONE")
33
+ await self.seq.enable.set(BitMux.one)
34
34
  await wait_for_value(self.seq.active, True, timeout=1)
35
35
 
36
36
  async def complete(self) -> None:
37
37
  await wait_for_value(self.seq.active, False, timeout=None)
38
38
 
39
39
  async def stop(self):
40
- await self.seq.enable.set("ZERO")
40
+ await self.seq.enable.set(BitMux.zero)
41
41
  await wait_for_value(self.seq.active, False, timeout=1)
42
42
 
43
43
 
@@ -54,7 +54,7 @@ class PcompInfo(BaseModel):
54
54
  ),
55
55
  ge=0,
56
56
  )
57
- direction: PcompDirectionOptions = Field(
57
+ direction: PcompDirection = Field(
58
58
  description=(
59
59
  "Specifies which direction the motor counts should be "
60
60
  "moving. Pulses won't be sent unless the values are moving in "
@@ -68,7 +68,7 @@ class StaticPcompTriggerLogic(FlyerController[PcompInfo]):
68
68
  self.pcomp = pcomp
69
69
 
70
70
  async def prepare(self, value: PcompInfo):
71
- await self.pcomp.enable.set("ZERO")
71
+ await self.pcomp.enable.set(BitMux.zero)
72
72
  await asyncio.gather(
73
73
  self.pcomp.start.set(value.start_postion),
74
74
  self.pcomp.width.set(value.pulse_width),
@@ -78,12 +78,12 @@ class StaticPcompTriggerLogic(FlyerController[PcompInfo]):
78
78
  )
79
79
 
80
80
  async def kickoff(self) -> None:
81
- await self.pcomp.enable.set("ONE")
81
+ await self.pcomp.enable.set(BitMux.one)
82
82
  await wait_for_value(self.pcomp.active, True, timeout=1)
83
83
 
84
84
  async def complete(self, timeout: float | None = None) -> None:
85
85
  await wait_for_value(self.pcomp.active, False, timeout=timeout)
86
86
 
87
87
  async def stop(self):
88
- await self.pcomp.enable.set("ZERO")
88
+ await self.pcomp.enable.set(BitMux.zero)
89
89
  await wait_for_value(self.pcomp.active, False, timeout=1)
@@ -25,13 +25,11 @@ class PandaHDFWriter(DetectorWriter):
25
25
 
26
26
  def __init__(
27
27
  self,
28
- prefix: str,
29
28
  path_provider: PathProvider,
30
29
  name_provider: NameProvider,
31
30
  panda_data_block: DataBlock,
32
31
  ) -> None:
33
32
  self.panda_data_block = panda_data_block
34
- self._prefix = prefix
35
33
  self._path_provider = path_provider
36
34
  self._name_provider = name_provider
37
35
  self._datasets: list[HDFDataset] = []
@@ -89,8 +87,7 @@ class PandaHDFWriter(DetectorWriter):
89
87
  shape=list(ds.shape),
90
88
  dtype="array" if ds.shape != [1] else "number",
91
89
  # PandA data should always be written as Float64
92
- # Ignore type check until https://github.com/bluesky/event-model/issues/308
93
- dtype_numpy="<f8", # type: ignore
90
+ dtype_numpy="<f8",
94
91
  external="STREAM:",
95
92
  )
96
93
  for ds in self._datasets
@@ -110,7 +107,7 @@ class PandaHDFWriter(DetectorWriter):
110
107
  HDFDataset(
111
108
  dataset_name, "/" + dataset_name, [1], multiplier=1, chunk_shape=(1024,)
112
109
  )
113
- for dataset_name in capture_table["name"]
110
+ for dataset_name in capture_table.name
114
111
  ]
115
112
 
116
113
  # Warn user if dataset table is empty in PandA
@@ -1,26 +1,33 @@
1
+ from collections.abc import Awaitable
2
+
1
3
  import bluesky.plan_stubs as bps
2
4
 
3
- from ophyd_async.core import DEFAULT_TIMEOUT, Device, wait_for_connection
5
+ from ophyd_async.core import DEFAULT_TIMEOUT, Device, LazyMock, wait_for_connection
4
6
 
5
7
 
6
8
  def ensure_connected(
7
9
  *devices: Device,
8
- mock: bool = False,
10
+ mock: bool | LazyMock = False,
9
11
  timeout: float = DEFAULT_TIMEOUT,
10
12
  force_reconnect=False,
11
13
  ):
12
- (connect_task,) = yield from bps.wait_for(
13
- [
14
- lambda: wait_for_connection(
15
- **{
16
- device.name: device.connect(
17
- mock=mock, timeout=timeout, force_reconnect=force_reconnect
18
- )
19
- for device in devices
20
- }
14
+ device_names = [device.name for device in devices]
15
+ non_unique = {
16
+ device: device.name for device in devices if device_names.count(device.name) > 1
17
+ }
18
+ if non_unique:
19
+ raise ValueError(f"Devices do not have unique names {non_unique}")
20
+
21
+ def connect_devices() -> Awaitable[None]:
22
+ coros = {
23
+ device.name: device.connect(
24
+ mock=mock, timeout=timeout, force_reconnect=force_reconnect
21
25
  )
22
- ]
23
- )
26
+ for device in devices
27
+ }
28
+ return wait_for_connection(**coros)
29
+
30
+ (connect_task,) = yield from bps.wait_for([connect_devices])
24
31
 
25
32
  if connect_task and connect_task.exception() is not None:
26
33
  raise connect_task.exception()
@@ -9,7 +9,7 @@ from ophyd_async.core import (
9
9
  in_micros,
10
10
  )
11
11
  from ophyd_async.fastcs.panda import (
12
- PcompDirectionOptions,
12
+ PcompDirection,
13
13
  PcompInfo,
14
14
  SeqTable,
15
15
  SeqTableInfo,
@@ -147,7 +147,7 @@ def fly_and_collect_with_static_pcomp(
147
147
  number_of_pulses: int,
148
148
  pulse_width: int,
149
149
  rising_edge_step: int,
150
- direction: PcompDirectionOptions,
150
+ direction: PcompDirection,
151
151
  trigger_info: TriggerInfo,
152
152
  ):
153
153
  # Set up scan and prepare trigger
@@ -16,12 +16,12 @@ from ophyd_async.epics.adcore import (
16
16
  def setup_ndattributes(
17
17
  device: NDArrayBaseIO, ndattributes: Sequence[NDAttributePv | NDAttributeParam]
18
18
  ):
19
- xml_text = ET.Element("Attributes")
19
+ root = ET.Element("Attributes")
20
20
 
21
21
  for ndattribute in ndattributes:
22
22
  if isinstance(ndattribute, NDAttributeParam):
23
23
  ET.SubElement(
24
- xml_text,
24
+ root,
25
25
  "Attribute",
26
26
  name=ndattribute.name,
27
27
  type="PARAM",
@@ -32,7 +32,7 @@ def setup_ndattributes(
32
32
  )
33
33
  elif isinstance(ndattribute, NDAttributePv):
34
34
  ET.SubElement(
35
- xml_text,
35
+ root,
36
36
  "Attribute",
37
37
  name=ndattribute.name,
38
38
  type="EPICS_PV",
@@ -45,7 +45,8 @@ def setup_ndattributes(
45
45
  f"Invalid type for ndattributes: {type(ndattribute)}. "
46
46
  "Expected NDAttributePv or NDAttributeParam."
47
47
  )
48
- yield from bps.mv(device.nd_attributes_file, xml_text)
48
+ xml_text = ET.tostring(root, encoding="unicode")
49
+ yield from bps.abs_set(device.nd_attributes_file, xml_text, wait=True)
49
50
 
50
51
 
51
52
  def setup_ndstats_sum(detector: Device):
ophyd_async/py.typed ADDED
File without changes
@@ -1,7 +1,6 @@
1
1
  import asyncio
2
2
 
3
- from ophyd_async.core import DetectorController, PathProvider
4
- from ophyd_async.core._detector import TriggerInfo
3
+ from ophyd_async.core import DetectorController, PathProvider, TriggerInfo
5
4
 
6
5
  from ._pattern_generator import PatternGenerator
7
6