ophyd-async 0.2.0__py3-none-any.whl → 0.3a2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- ophyd_async/__init__.py +1 -4
- ophyd_async/_version.py +2 -2
- ophyd_async/core/__init__.py +16 -10
- ophyd_async/core/_providers.py +38 -5
- ophyd_async/core/async_status.py +3 -3
- ophyd_async/core/detector.py +211 -62
- ophyd_async/core/device.py +45 -38
- ophyd_async/core/device_save_loader.py +96 -23
- ophyd_async/core/flyer.py +30 -244
- ophyd_async/core/signal.py +47 -21
- ophyd_async/core/signal_backend.py +7 -4
- ophyd_async/core/sim_signal_backend.py +30 -18
- ophyd_async/core/standard_readable.py +4 -2
- ophyd_async/core/utils.py +93 -30
- ophyd_async/epics/_backend/_aioca.py +30 -36
- ophyd_async/epics/_backend/_p4p.py +75 -41
- ophyd_async/epics/_backend/common.py +25 -0
- ophyd_async/epics/areadetector/__init__.py +4 -0
- ophyd_async/epics/areadetector/aravis.py +69 -0
- ophyd_async/epics/areadetector/controllers/ad_sim_controller.py +1 -1
- ophyd_async/epics/areadetector/controllers/aravis_controller.py +73 -0
- ophyd_async/epics/areadetector/controllers/pilatus_controller.py +37 -25
- ophyd_async/epics/areadetector/drivers/aravis_driver.py +154 -0
- ophyd_async/epics/areadetector/drivers/pilatus_driver.py +4 -4
- ophyd_async/epics/areadetector/pilatus.py +50 -0
- ophyd_async/epics/areadetector/writers/_hdffile.py +21 -7
- ophyd_async/epics/areadetector/writers/hdf_writer.py +26 -15
- ophyd_async/epics/demo/__init__.py +33 -3
- ophyd_async/epics/motion/motor.py +20 -14
- ophyd_async/epics/pvi/__init__.py +3 -0
- ophyd_async/epics/pvi/pvi.py +318 -0
- ophyd_async/epics/signal/__init__.py +0 -2
- ophyd_async/epics/signal/signal.py +26 -9
- ophyd_async/panda/__init__.py +19 -5
- ophyd_async/panda/_common_blocks.py +49 -0
- ophyd_async/panda/_hdf_panda.py +48 -0
- ophyd_async/panda/_panda_controller.py +37 -0
- ophyd_async/panda/_trigger.py +39 -0
- ophyd_async/panda/_utils.py +15 -0
- ophyd_async/panda/writers/__init__.py +3 -0
- ophyd_async/panda/writers/_hdf_writer.py +220 -0
- ophyd_async/panda/writers/_panda_hdf_file.py +58 -0
- ophyd_async/planstubs/__init__.py +5 -0
- ophyd_async/planstubs/prepare_trigger_and_dets.py +57 -0
- ophyd_async/protocols.py +73 -0
- ophyd_async/sim/__init__.py +11 -0
- ophyd_async/sim/demo/__init__.py +3 -0
- ophyd_async/sim/demo/sim_motor.py +116 -0
- ophyd_async/sim/pattern_generator.py +318 -0
- ophyd_async/sim/sim_pattern_detector_control.py +55 -0
- ophyd_async/sim/sim_pattern_detector_writer.py +34 -0
- ophyd_async/sim/sim_pattern_generator.py +37 -0
- {ophyd_async-0.2.0.dist-info → ophyd_async-0.3a2.dist-info}/METADATA +20 -76
- ophyd_async-0.3a2.dist-info/RECORD +76 -0
- {ophyd_async-0.2.0.dist-info → ophyd_async-0.3a2.dist-info}/WHEEL +1 -1
- ophyd_async/epics/signal/pvi_get.py +0 -22
- ophyd_async/panda/panda.py +0 -294
- ophyd_async-0.2.0.dist-info/RECORD +0 -53
- /ophyd_async/panda/{table.py → _table.py} +0 -0
- {ophyd_async-0.2.0.dist-info → ophyd_async-0.3a2.dist-info}/LICENSE +0 -0
- {ophyd_async-0.2.0.dist-info → ophyd_async-0.3a2.dist-info}/entry_points.txt +0 -0
- {ophyd_async-0.2.0.dist-info → ophyd_async-0.3a2.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
from enum import Enum
|
|
2
|
+
from typing import Any, Optional, Tuple, Type
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def get_supported_enum_class(
|
|
6
|
+
pv: str,
|
|
7
|
+
datatype: Optional[Type[Enum]],
|
|
8
|
+
pv_choices: Tuple[Any, ...],
|
|
9
|
+
) -> Type[Enum]:
|
|
10
|
+
if not datatype:
|
|
11
|
+
return Enum("GeneratedChoices", {x or "_": x for x in pv_choices}, type=str) # type: ignore
|
|
12
|
+
|
|
13
|
+
if not issubclass(datatype, Enum):
|
|
14
|
+
raise TypeError(f"{pv} has type Enum not {datatype.__name__}")
|
|
15
|
+
if not issubclass(datatype, str):
|
|
16
|
+
raise TypeError(f"{pv} has type Enum but doesn't inherit from String")
|
|
17
|
+
choices = tuple(v.value for v in datatype)
|
|
18
|
+
if set(choices) != set(pv_choices):
|
|
19
|
+
raise TypeError(
|
|
20
|
+
(
|
|
21
|
+
f"{pv} has choices {pv_choices}, "
|
|
22
|
+
f"which do not match {datatype}, which has {choices}"
|
|
23
|
+
)
|
|
24
|
+
)
|
|
25
|
+
return datatype
|
|
@@ -1,3 +1,5 @@
|
|
|
1
|
+
from .aravis import AravisDetector
|
|
2
|
+
from .pilatus import PilatusDetector
|
|
1
3
|
from .single_trigger_det import SingleTriggerDet
|
|
2
4
|
from .utils import (
|
|
3
5
|
FileWriteMode,
|
|
@@ -9,6 +11,7 @@ from .utils import (
|
|
|
9
11
|
)
|
|
10
12
|
|
|
11
13
|
__all__ = [
|
|
14
|
+
"AravisDetector",
|
|
12
15
|
"SingleTriggerDet",
|
|
13
16
|
"FileWriteMode",
|
|
14
17
|
"ImageMode",
|
|
@@ -16,4 +19,5 @@ __all__ = [
|
|
|
16
19
|
"ad_rw",
|
|
17
20
|
"NDAttributeDataType",
|
|
18
21
|
"NDAttributesXML",
|
|
22
|
+
"PilatusDetector",
|
|
19
23
|
]
|
|
@@ -0,0 +1,69 @@
|
|
|
1
|
+
from typing import get_args
|
|
2
|
+
|
|
3
|
+
from bluesky.protocols import HasHints, Hints
|
|
4
|
+
|
|
5
|
+
from ophyd_async.core import DirectoryProvider, StandardDetector, TriggerInfo
|
|
6
|
+
from ophyd_async.epics.areadetector.controllers.aravis_controller import (
|
|
7
|
+
AravisController,
|
|
8
|
+
)
|
|
9
|
+
from ophyd_async.epics.areadetector.drivers import ADBaseShapeProvider
|
|
10
|
+
from ophyd_async.epics.areadetector.drivers.aravis_driver import AravisDriver
|
|
11
|
+
from ophyd_async.epics.areadetector.writers import HDFWriter, NDFileHDF
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class AravisDetector(StandardDetector, HasHints):
|
|
15
|
+
"""
|
|
16
|
+
Ophyd-async implementation of an ADAravis Detector.
|
|
17
|
+
The detector may be configured for an external trigger on a GPIO port,
|
|
18
|
+
which must be done prior to preparing the detector
|
|
19
|
+
"""
|
|
20
|
+
|
|
21
|
+
_controller: AravisController
|
|
22
|
+
_writer: HDFWriter
|
|
23
|
+
|
|
24
|
+
def __init__(
|
|
25
|
+
self,
|
|
26
|
+
name: str,
|
|
27
|
+
directory_provider: DirectoryProvider,
|
|
28
|
+
driver: AravisDriver,
|
|
29
|
+
hdf: NDFileHDF,
|
|
30
|
+
gpio_number: AravisController.GPIO_NUMBER = 1,
|
|
31
|
+
**scalar_sigs: str,
|
|
32
|
+
):
|
|
33
|
+
# Must be child of Detector to pick up connect()
|
|
34
|
+
self.drv = driver
|
|
35
|
+
self.hdf = hdf
|
|
36
|
+
|
|
37
|
+
super().__init__(
|
|
38
|
+
AravisController(self.drv, gpio_number=gpio_number),
|
|
39
|
+
HDFWriter(
|
|
40
|
+
self.hdf,
|
|
41
|
+
directory_provider,
|
|
42
|
+
lambda: self.name,
|
|
43
|
+
ADBaseShapeProvider(self.drv),
|
|
44
|
+
**scalar_sigs,
|
|
45
|
+
),
|
|
46
|
+
config_sigs=(self.drv.acquire_time, self.drv.acquire),
|
|
47
|
+
name=name,
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
async def _prepare(self, value: TriggerInfo) -> None:
|
|
51
|
+
await self.drv.fetch_deadtime()
|
|
52
|
+
await super()._prepare(value)
|
|
53
|
+
|
|
54
|
+
def get_external_trigger_gpio(self):
|
|
55
|
+
return self._controller.gpio_number
|
|
56
|
+
|
|
57
|
+
def set_external_trigger_gpio(self, gpio_number: AravisController.GPIO_NUMBER):
|
|
58
|
+
supported_gpio_numbers = get_args(AravisController.GPIO_NUMBER)
|
|
59
|
+
if gpio_number not in supported_gpio_numbers:
|
|
60
|
+
raise ValueError(
|
|
61
|
+
f"{self.__class__.__name__} only supports the following GPIO "
|
|
62
|
+
f"indices: {supported_gpio_numbers} but was asked to "
|
|
63
|
+
f"use {gpio_number}"
|
|
64
|
+
)
|
|
65
|
+
self._controller.gpio_number = gpio_number
|
|
66
|
+
|
|
67
|
+
@property
|
|
68
|
+
def hints(self) -> Hints:
|
|
69
|
+
return self._writer.hints
|
|
@@ -0,0 +1,73 @@
|
|
|
1
|
+
import asyncio
|
|
2
|
+
from typing import Literal, Optional, Tuple
|
|
3
|
+
|
|
4
|
+
from ophyd_async.core import (
|
|
5
|
+
AsyncStatus,
|
|
6
|
+
DetectorControl,
|
|
7
|
+
DetectorTrigger,
|
|
8
|
+
set_and_wait_for_value,
|
|
9
|
+
)
|
|
10
|
+
from ophyd_async.epics.areadetector.drivers.aravis_driver import (
|
|
11
|
+
AravisDriver,
|
|
12
|
+
AravisTriggerMode,
|
|
13
|
+
AravisTriggerSource,
|
|
14
|
+
)
|
|
15
|
+
from ophyd_async.epics.areadetector.utils import ImageMode, stop_busy_record
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
class AravisController(DetectorControl):
|
|
19
|
+
GPIO_NUMBER = Literal[1, 2, 3, 4]
|
|
20
|
+
|
|
21
|
+
def __init__(self, driver: AravisDriver, gpio_number: GPIO_NUMBER) -> None:
|
|
22
|
+
self._drv = driver
|
|
23
|
+
self.gpio_number = gpio_number
|
|
24
|
+
|
|
25
|
+
def get_deadtime(self, exposure: float) -> float:
|
|
26
|
+
return self._drv.dead_time or 0
|
|
27
|
+
|
|
28
|
+
async def arm(
|
|
29
|
+
self,
|
|
30
|
+
num: int = 0,
|
|
31
|
+
trigger: DetectorTrigger = DetectorTrigger.internal,
|
|
32
|
+
exposure: Optional[float] = None,
|
|
33
|
+
) -> AsyncStatus:
|
|
34
|
+
if num == 0:
|
|
35
|
+
image_mode = ImageMode.continuous
|
|
36
|
+
else:
|
|
37
|
+
image_mode = ImageMode.multiple
|
|
38
|
+
if exposure is not None:
|
|
39
|
+
await self._drv.acquire_time.set(exposure)
|
|
40
|
+
|
|
41
|
+
trigger_mode, trigger_source = self._get_trigger_info(trigger)
|
|
42
|
+
# trigger mode must be set first and on it's own!
|
|
43
|
+
await self._drv.trigger_mode.set(trigger_mode)
|
|
44
|
+
|
|
45
|
+
await asyncio.gather(
|
|
46
|
+
self._drv.trigger_source.set(trigger_source),
|
|
47
|
+
self._drv.num_images.set(num),
|
|
48
|
+
self._drv.image_mode.set(image_mode),
|
|
49
|
+
)
|
|
50
|
+
|
|
51
|
+
status = await set_and_wait_for_value(self._drv.acquire, True)
|
|
52
|
+
return status
|
|
53
|
+
|
|
54
|
+
def _get_trigger_info(
|
|
55
|
+
self, trigger: DetectorTrigger
|
|
56
|
+
) -> Tuple[AravisTriggerMode, AravisTriggerSource]:
|
|
57
|
+
supported_trigger_types = (
|
|
58
|
+
DetectorTrigger.constant_gate,
|
|
59
|
+
DetectorTrigger.edge_trigger,
|
|
60
|
+
)
|
|
61
|
+
if trigger not in supported_trigger_types:
|
|
62
|
+
raise ValueError(
|
|
63
|
+
f"{self.__class__.__name__} only supports the following trigger "
|
|
64
|
+
f"types: {supported_trigger_types} but was asked to "
|
|
65
|
+
f"use {trigger}"
|
|
66
|
+
)
|
|
67
|
+
if trigger == DetectorTrigger.internal:
|
|
68
|
+
return AravisTriggerMode.off, "Freerun"
|
|
69
|
+
else:
|
|
70
|
+
return (AravisTriggerMode.on, f"Line{self.gpio_number}")
|
|
71
|
+
|
|
72
|
+
async def disarm(self):
|
|
73
|
+
await stop_busy_record(self._drv.acquire, False, timeout=1)
|
|
@@ -1,49 +1,61 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
-
from typing import Optional
|
|
2
|
+
from typing import Optional
|
|
3
3
|
|
|
4
|
-
from ophyd_async.core import AsyncStatus
|
|
4
|
+
from ophyd_async.core.async_status import AsyncStatus
|
|
5
|
+
from ophyd_async.core.detector import DetectorControl, DetectorTrigger
|
|
5
6
|
from ophyd_async.epics.areadetector.drivers.ad_base import (
|
|
6
|
-
DEFAULT_GOOD_STATES,
|
|
7
|
-
DetectorState,
|
|
8
7
|
start_acquiring_driver_and_ensure_status,
|
|
9
8
|
)
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
DetectorTrigger.internal: TriggerMode.internal,
|
|
16
|
-
DetectorTrigger.constant_gate: TriggerMode.ext_enable,
|
|
17
|
-
DetectorTrigger.variable_gate: TriggerMode.ext_enable,
|
|
18
|
-
}
|
|
9
|
+
from ophyd_async.epics.areadetector.drivers.pilatus_driver import (
|
|
10
|
+
PilatusDriver,
|
|
11
|
+
PilatusTriggerMode,
|
|
12
|
+
)
|
|
13
|
+
from ophyd_async.epics.areadetector.utils import ImageMode, stop_busy_record
|
|
19
14
|
|
|
20
15
|
|
|
21
16
|
class PilatusController(DetectorControl):
|
|
17
|
+
_supported_trigger_types = {
|
|
18
|
+
DetectorTrigger.internal: PilatusTriggerMode.internal,
|
|
19
|
+
DetectorTrigger.constant_gate: PilatusTriggerMode.ext_enable,
|
|
20
|
+
DetectorTrigger.variable_gate: PilatusTriggerMode.ext_enable,
|
|
21
|
+
}
|
|
22
|
+
|
|
22
23
|
def __init__(
|
|
23
24
|
self,
|
|
24
25
|
driver: PilatusDriver,
|
|
25
|
-
good_states: Set[DetectorState] = set(DEFAULT_GOOD_STATES),
|
|
26
26
|
) -> None:
|
|
27
|
-
self.
|
|
28
|
-
self.good_states = good_states
|
|
27
|
+
self._drv = driver
|
|
29
28
|
|
|
30
29
|
def get_deadtime(self, exposure: float) -> float:
|
|
31
|
-
|
|
30
|
+
# Cite: https://media.dectris.com/User_Manual-PILATUS2-V1_4.pdf
|
|
31
|
+
"""The required minimum time difference between ExpPeriod and ExpTime
|
|
32
|
+
(readout time) is 2.28 ms"""
|
|
33
|
+
return 2.28e-3
|
|
32
34
|
|
|
33
35
|
async def arm(
|
|
34
36
|
self,
|
|
37
|
+
num: int,
|
|
35
38
|
trigger: DetectorTrigger = DetectorTrigger.internal,
|
|
36
|
-
num: int = 0,
|
|
37
39
|
exposure: Optional[float] = None,
|
|
38
40
|
) -> AsyncStatus:
|
|
41
|
+
if exposure is not None:
|
|
42
|
+
await self._drv.acquire_time.set(exposure)
|
|
39
43
|
await asyncio.gather(
|
|
40
|
-
self.
|
|
41
|
-
self.
|
|
42
|
-
self.
|
|
43
|
-
)
|
|
44
|
-
return await start_acquiring_driver_and_ensure_status(
|
|
45
|
-
self.driver, good_states=self.good_states
|
|
44
|
+
self._drv.trigger_mode.set(self._get_trigger_mode(trigger)),
|
|
45
|
+
self._drv.num_images.set(999_999 if num == 0 else num),
|
|
46
|
+
self._drv.image_mode.set(ImageMode.multiple),
|
|
46
47
|
)
|
|
48
|
+
return await start_acquiring_driver_and_ensure_status(self._drv)
|
|
49
|
+
|
|
50
|
+
@classmethod
|
|
51
|
+
def _get_trigger_mode(cls, trigger: DetectorTrigger) -> PilatusTriggerMode:
|
|
52
|
+
if trigger not in cls._supported_trigger_types.keys():
|
|
53
|
+
raise ValueError(
|
|
54
|
+
f"{cls.__name__} only supports the following trigger "
|
|
55
|
+
f"types: {cls._supported_trigger_types.keys()} but was asked to "
|
|
56
|
+
f"use {trigger}"
|
|
57
|
+
)
|
|
58
|
+
return cls._supported_trigger_types[trigger]
|
|
47
59
|
|
|
48
60
|
async def disarm(self):
|
|
49
|
-
await stop_busy_record(self.
|
|
61
|
+
await stop_busy_record(self._drv.acquire, False, timeout=1)
|
|
@@ -0,0 +1,154 @@
|
|
|
1
|
+
from enum import Enum
|
|
2
|
+
from typing import Callable, Dict, Literal, Optional, Tuple
|
|
3
|
+
|
|
4
|
+
from ophyd_async.epics.areadetector.drivers import ADBase
|
|
5
|
+
from ophyd_async.epics.areadetector.utils import ad_r, ad_rw
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class AravisTriggerMode(str, Enum):
|
|
9
|
+
"""GigEVision GenICAM standard: on=externally triggered"""
|
|
10
|
+
|
|
11
|
+
on = "On"
|
|
12
|
+
off = "Off"
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
"""A minimal set of TriggerSources that must be supported by the underlying record.
|
|
16
|
+
To enable hardware triggered scanning, line_N must support each N in GPIO_NUMBER.
|
|
17
|
+
To enable software triggered scanning, freerun must be supported.
|
|
18
|
+
Other enumerated values may or may not be preset.
|
|
19
|
+
To prevent requiring one Enum class per possible configuration, we set as this Enum
|
|
20
|
+
but read from the underlying signal as a str.
|
|
21
|
+
"""
|
|
22
|
+
AravisTriggerSource = Literal["Freerun", "Line1", "Line2", "Line3", "Line4"]
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def _reverse_lookup(
|
|
26
|
+
model_deadtimes: Dict[float, Tuple[str, ...]],
|
|
27
|
+
) -> Callable[[str], float]:
|
|
28
|
+
def inner(pixel_format: str, model_name: str) -> float:
|
|
29
|
+
for deadtime, pixel_formats in model_deadtimes.items():
|
|
30
|
+
if pixel_format in pixel_formats:
|
|
31
|
+
return deadtime
|
|
32
|
+
raise ValueError(
|
|
33
|
+
f"Model {model_name} does not have a defined deadtime "
|
|
34
|
+
f"for pixel format {pixel_format}"
|
|
35
|
+
)
|
|
36
|
+
|
|
37
|
+
return inner
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
_deadtimes: Dict[str, Callable[[str, str], float]] = {
|
|
41
|
+
# cite: https://cdn.alliedvision.com/fileadmin/content/documents/products/cameras/Manta/techman/Manta_TechMan.pdf retrieved 2024-04-05 # noqa: E501
|
|
42
|
+
"Manta G-125": lambda _, __: 63e-6,
|
|
43
|
+
"Manta G-145": lambda _, __: 106e-6,
|
|
44
|
+
"Manta G-235": _reverse_lookup(
|
|
45
|
+
{
|
|
46
|
+
118e-6: (
|
|
47
|
+
"Mono8",
|
|
48
|
+
"Mono12Packed",
|
|
49
|
+
"BayerRG8",
|
|
50
|
+
"BayerRG12",
|
|
51
|
+
"BayerRG12Packed",
|
|
52
|
+
"YUV411Packed",
|
|
53
|
+
),
|
|
54
|
+
256e-6: ("Mono12", "BayerRG12", "YUV422Packed"),
|
|
55
|
+
390e-6: ("RGB8Packed", "BGR8Packed", "YUV444Packed"),
|
|
56
|
+
}
|
|
57
|
+
),
|
|
58
|
+
"Manta G-895": _reverse_lookup(
|
|
59
|
+
{
|
|
60
|
+
404e-6: (
|
|
61
|
+
"Mono8",
|
|
62
|
+
"Mono12Packed",
|
|
63
|
+
"BayerRG8",
|
|
64
|
+
"BayerRG12Packed",
|
|
65
|
+
"YUV411Packed",
|
|
66
|
+
),
|
|
67
|
+
542e-6: ("Mono12", "BayerRG12", "YUV422Packed"),
|
|
68
|
+
822e-6: ("RGB8Packed", "BGR8Packed", "YUV444Packed"),
|
|
69
|
+
}
|
|
70
|
+
),
|
|
71
|
+
"Manta G-2460": _reverse_lookup(
|
|
72
|
+
{
|
|
73
|
+
979e-6: (
|
|
74
|
+
"Mono8",
|
|
75
|
+
"Mono12Packed",
|
|
76
|
+
"BayerRG8",
|
|
77
|
+
"BayerRG12Packed",
|
|
78
|
+
"YUV411Packed",
|
|
79
|
+
),
|
|
80
|
+
1304e-6: ("Mono12", "BayerRG12", "YUV422Packed"),
|
|
81
|
+
1961e-6: ("RGB8Packed", "BGR8Packed", "YUV444Packed"),
|
|
82
|
+
}
|
|
83
|
+
),
|
|
84
|
+
# cite: https://cdn.alliedvision.com/fileadmin/content/documents/products/cameras/various/appnote/GigE/GigE-Cameras_AppNote_PIV-Min-Time-Between-Exposures.pdf retrieved 2024-04-05 # noqa: E501
|
|
85
|
+
"Manta G-609": lambda _, __: 47e-6,
|
|
86
|
+
# cite: https://cdn.alliedvision.com/fileadmin/content/documents/products/cameras/Mako/techman/Mako_TechMan_en.pdf retrieved 2024-04-05 # noqa: E501
|
|
87
|
+
"Mako G-040": _reverse_lookup(
|
|
88
|
+
{
|
|
89
|
+
101e-6: (
|
|
90
|
+
"Mono8",
|
|
91
|
+
"Mono12Packed",
|
|
92
|
+
"BayerRG8",
|
|
93
|
+
"BayerRG12Packed",
|
|
94
|
+
"YUV411Packed",
|
|
95
|
+
),
|
|
96
|
+
140e-6: ("Mono12", "BayerRG12", "YUV422Packed"),
|
|
97
|
+
217e-6: ("RGB8Packed", "BGR8Packed", "YUV444Packed"),
|
|
98
|
+
}
|
|
99
|
+
),
|
|
100
|
+
"Mako G-125": lambda _, __: 70e-6,
|
|
101
|
+
# Assume 12 bits: 10 bits = 275e-6
|
|
102
|
+
"Mako G-234": _reverse_lookup(
|
|
103
|
+
{
|
|
104
|
+
356e-6: (
|
|
105
|
+
"Mono8",
|
|
106
|
+
"BayerRG8",
|
|
107
|
+
"BayerRG12",
|
|
108
|
+
"BayerRG12Packed",
|
|
109
|
+
"YUV411Packed",
|
|
110
|
+
"YUV422Packed",
|
|
111
|
+
),
|
|
112
|
+
# Assume 12 bits: 10 bits = 563e-6
|
|
113
|
+
726e-6: ("RGB8Packed", "BRG8Packed", "YUV444Packed"),
|
|
114
|
+
}
|
|
115
|
+
),
|
|
116
|
+
"Mako G-507": _reverse_lookup(
|
|
117
|
+
{
|
|
118
|
+
270e-6: (
|
|
119
|
+
"Mono8",
|
|
120
|
+
"Mono12Packed",
|
|
121
|
+
"BayerRG8",
|
|
122
|
+
"BayerRG12Packed",
|
|
123
|
+
"YUV411Packed",
|
|
124
|
+
),
|
|
125
|
+
363e-6: ("Mono12", "BayerRG12", "YUV422Packed"),
|
|
126
|
+
554e-6: ("RGB8Packed", "BGR8Packed", "YUV444Packed"),
|
|
127
|
+
}
|
|
128
|
+
),
|
|
129
|
+
}
|
|
130
|
+
|
|
131
|
+
|
|
132
|
+
class AravisDriver(ADBase):
|
|
133
|
+
# If instantiating a new instance, ensure it is supported in the _deadtimes dict
|
|
134
|
+
"""Generic Driver supporting the Manta and Mako drivers.
|
|
135
|
+
Fetches deadtime prior to use in a Streaming scan.
|
|
136
|
+
Requires driver firmware up to date:
|
|
137
|
+
- Model_RBV must be of the form "^(Mako|Manta) (model)$"
|
|
138
|
+
"""
|
|
139
|
+
|
|
140
|
+
def __init__(self, prefix: str, name: str = "") -> None:
|
|
141
|
+
self.trigger_mode = ad_rw(AravisTriggerMode, prefix + "TriggerMode")
|
|
142
|
+
self.trigger_source = ad_rw(str, prefix + "TriggerSource")
|
|
143
|
+
self.model = ad_r(str, prefix + "Model")
|
|
144
|
+
self.pixel_format = ad_rw(str, prefix + "PixelFormat")
|
|
145
|
+
self.dead_time: Optional[float] = None
|
|
146
|
+
super().__init__(prefix, name=name)
|
|
147
|
+
|
|
148
|
+
async def fetch_deadtime(self) -> None:
|
|
149
|
+
# All known in-use version B/C have same deadtime as non-B/C
|
|
150
|
+
model: str = (await self.model.get_value()).removesuffix("B").removesuffix("C")
|
|
151
|
+
if model not in _deadtimes:
|
|
152
|
+
raise ValueError(f"Model {model} does not have defined deadtimes")
|
|
153
|
+
pixel_format: str = await self.pixel_format.get_value()
|
|
154
|
+
self.dead_time = _deadtimes.get(model)(pixel_format, model)
|
|
@@ -4,7 +4,7 @@ from ..utils import ad_rw
|
|
|
4
4
|
from .ad_base import ADBase
|
|
5
5
|
|
|
6
6
|
|
|
7
|
-
class
|
|
7
|
+
class PilatusTriggerMode(str, Enum):
|
|
8
8
|
internal = "Internal"
|
|
9
9
|
ext_enable = "Ext. Enable"
|
|
10
10
|
ext_trigger = "Ext. Trigger"
|
|
@@ -13,6 +13,6 @@ class TriggerMode(str, Enum):
|
|
|
13
13
|
|
|
14
14
|
|
|
15
15
|
class PilatusDriver(ADBase):
|
|
16
|
-
def __init__(self, prefix: str) -> None:
|
|
17
|
-
self.trigger_mode = ad_rw(
|
|
18
|
-
super().__init__(prefix)
|
|
16
|
+
def __init__(self, prefix: str, name: str = "") -> None:
|
|
17
|
+
self.trigger_mode = ad_rw(PilatusTriggerMode, prefix + "TriggerMode")
|
|
18
|
+
super().__init__(prefix, name)
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
from typing import Optional, Sequence
|
|
2
|
+
|
|
3
|
+
from bluesky.protocols import Hints
|
|
4
|
+
|
|
5
|
+
from ophyd_async.core import DirectoryProvider
|
|
6
|
+
from ophyd_async.core.detector import StandardDetector
|
|
7
|
+
from ophyd_async.core.signal import SignalR
|
|
8
|
+
from ophyd_async.epics.areadetector.controllers.pilatus_controller import (
|
|
9
|
+
PilatusController,
|
|
10
|
+
)
|
|
11
|
+
from ophyd_async.epics.areadetector.drivers.ad_base import ADBaseShapeProvider
|
|
12
|
+
from ophyd_async.epics.areadetector.drivers.pilatus_driver import PilatusDriver
|
|
13
|
+
from ophyd_async.epics.areadetector.writers.hdf_writer import HDFWriter
|
|
14
|
+
from ophyd_async.epics.areadetector.writers.nd_file_hdf import NDFileHDF
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
class PilatusDetector(StandardDetector):
|
|
18
|
+
"""A Pilatus StandardDetector writing HDF files"""
|
|
19
|
+
|
|
20
|
+
_controller: PilatusController
|
|
21
|
+
_writer: HDFWriter
|
|
22
|
+
|
|
23
|
+
def __init__(
|
|
24
|
+
self,
|
|
25
|
+
name: str,
|
|
26
|
+
directory_provider: DirectoryProvider,
|
|
27
|
+
driver: PilatusDriver,
|
|
28
|
+
hdf: NDFileHDF,
|
|
29
|
+
config_sigs: Optional[Sequence[SignalR]] = None,
|
|
30
|
+
**scalar_sigs: str,
|
|
31
|
+
):
|
|
32
|
+
self.drv = driver
|
|
33
|
+
self.hdf = hdf
|
|
34
|
+
|
|
35
|
+
super().__init__(
|
|
36
|
+
PilatusController(self.drv),
|
|
37
|
+
HDFWriter(
|
|
38
|
+
self.hdf,
|
|
39
|
+
directory_provider,
|
|
40
|
+
lambda: self.name,
|
|
41
|
+
ADBaseShapeProvider(self.drv),
|
|
42
|
+
**scalar_sigs,
|
|
43
|
+
),
|
|
44
|
+
config_sigs=config_sigs or (self.drv.acquire_time,),
|
|
45
|
+
name=name,
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
@property
|
|
49
|
+
def hints(self) -> Hints:
|
|
50
|
+
return self._writer.hints
|
|
@@ -1,19 +1,33 @@
|
|
|
1
|
+
from pathlib import Path
|
|
1
2
|
from typing import Iterator, List
|
|
2
3
|
|
|
3
4
|
from event_model import StreamDatum, StreamResource, compose_stream_resource
|
|
4
5
|
|
|
6
|
+
from ophyd_async.core import DirectoryInfo
|
|
7
|
+
|
|
5
8
|
from ._hdfdataset import _HDFDataset
|
|
6
9
|
|
|
7
10
|
|
|
8
11
|
class _HDFFile:
|
|
9
|
-
|
|
12
|
+
"""
|
|
13
|
+
:param directory_info: Contains information about how to construct a StreamResource
|
|
14
|
+
:param full_file_name: Absolute path to the file to be written
|
|
15
|
+
:param datasets: Datasets to write into the file
|
|
16
|
+
"""
|
|
17
|
+
|
|
18
|
+
def __init__(
|
|
19
|
+
self,
|
|
20
|
+
directory_info: DirectoryInfo,
|
|
21
|
+
full_file_name: Path,
|
|
22
|
+
datasets: List[_HDFDataset],
|
|
23
|
+
) -> None:
|
|
10
24
|
self._last_emitted = 0
|
|
11
25
|
self._bundles = [
|
|
12
26
|
compose_stream_resource(
|
|
13
27
|
spec="AD_HDF5_SWMR_SLICE",
|
|
14
|
-
root=
|
|
28
|
+
root=str(directory_info.root),
|
|
15
29
|
data_key=ds.name,
|
|
16
|
-
resource_path=full_file_name,
|
|
30
|
+
resource_path=str(full_file_name.relative_to(directory_info.root)),
|
|
17
31
|
resource_kwargs={
|
|
18
32
|
"path": ds.path,
|
|
19
33
|
"multiplier": ds.multiplier,
|
|
@@ -30,10 +44,10 @@ class _HDFFile:
|
|
|
30
44
|
def stream_data(self, indices_written: int) -> Iterator[StreamDatum]:
|
|
31
45
|
# Indices are relative to resource
|
|
32
46
|
if indices_written > self._last_emitted:
|
|
33
|
-
indices =
|
|
34
|
-
start
|
|
35
|
-
stop
|
|
36
|
-
|
|
47
|
+
indices = {
|
|
48
|
+
"start": self._last_emitted,
|
|
49
|
+
"stop": indices_written,
|
|
50
|
+
}
|
|
37
51
|
self._last_emitted = indices_written
|
|
38
52
|
for bundle in self._bundles:
|
|
39
53
|
yield bundle.compose_stream_datum(indices)
|
|
@@ -1,7 +1,8 @@
|
|
|
1
1
|
import asyncio
|
|
2
|
-
from
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from typing import AsyncGenerator, AsyncIterator, Dict, List, Optional
|
|
3
4
|
|
|
4
|
-
from bluesky.protocols import
|
|
5
|
+
from bluesky.protocols import Descriptor, Hints, StreamAsset
|
|
5
6
|
|
|
6
7
|
from ophyd_async.core import (
|
|
7
8
|
DEFAULT_TIMEOUT,
|
|
@@ -13,6 +14,7 @@ from ophyd_async.core import (
|
|
|
13
14
|
set_and_wait_for_value,
|
|
14
15
|
wait_for_value,
|
|
15
16
|
)
|
|
17
|
+
from ophyd_async.core.signal import observe_value
|
|
16
18
|
|
|
17
19
|
from ._hdfdataset import _HDFDataset
|
|
18
20
|
from ._hdffile import _HDFFile
|
|
@@ -45,15 +47,16 @@ class HDFWriter(DetectorWriter):
|
|
|
45
47
|
self.hdf.num_extra_dims.set(0),
|
|
46
48
|
self.hdf.lazy_open.set(True),
|
|
47
49
|
self.hdf.swmr_mode.set(True),
|
|
48
|
-
|
|
49
|
-
self.hdf.
|
|
50
|
+
# See https://github.com/bluesky/ophyd-async/issues/122
|
|
51
|
+
self.hdf.file_path.set(str(info.root / info.resource_dir)),
|
|
52
|
+
self.hdf.file_name.set(f"{info.prefix}{self.hdf.name}{info.suffix}"),
|
|
50
53
|
self.hdf.file_template.set("%s/%s.h5"),
|
|
51
54
|
self.hdf.file_write_mode.set(FileWriteMode.stream),
|
|
52
55
|
)
|
|
53
56
|
|
|
54
57
|
assert (
|
|
55
58
|
await self.hdf.file_path_exists.get_value()
|
|
56
|
-
), f"File path {
|
|
59
|
+
), f"File path {self.hdf.file_path.get_value()} for hdf plugin does not exist"
|
|
57
60
|
|
|
58
61
|
# Overwrite num_capture to go forever
|
|
59
62
|
await self.hdf.num_capture.set(0)
|
|
@@ -88,27 +91,35 @@ class HDFWriter(DetectorWriter):
|
|
|
88
91
|
}
|
|
89
92
|
return describe
|
|
90
93
|
|
|
91
|
-
async def
|
|
92
|
-
self,
|
|
93
|
-
):
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
matcher.__name__ = f"index_at_least_{index}"
|
|
98
|
-
await wait_for_value(self.hdf.num_captured, matcher, timeout=timeout)
|
|
94
|
+
async def observe_indices_written(
|
|
95
|
+
self, timeout=DEFAULT_TIMEOUT
|
|
96
|
+
) -> AsyncGenerator[int, None]:
|
|
97
|
+
"""Wait until a specific index is ready to be collected"""
|
|
98
|
+
async for num_captured in observe_value(self.hdf.num_captured, timeout):
|
|
99
|
+
yield num_captured // self._multiplier
|
|
99
100
|
|
|
100
101
|
async def get_indices_written(self) -> int:
|
|
101
102
|
num_captured = await self.hdf.num_captured.get_value()
|
|
102
103
|
return num_captured // self._multiplier
|
|
103
104
|
|
|
104
|
-
async def collect_stream_docs(
|
|
105
|
+
async def collect_stream_docs(
|
|
106
|
+
self, indices_written: int
|
|
107
|
+
) -> AsyncIterator[StreamAsset]:
|
|
105
108
|
# TODO: fail if we get dropped frames
|
|
106
109
|
await self.hdf.flush_now.set(True)
|
|
107
110
|
if indices_written:
|
|
108
111
|
if not self._file:
|
|
112
|
+
path = Path(await self.hdf.full_file_name.get_value())
|
|
109
113
|
self._file = _HDFFile(
|
|
110
|
-
|
|
114
|
+
self._directory_provider(),
|
|
115
|
+
# See https://github.com/bluesky/ophyd-async/issues/122
|
|
116
|
+
path,
|
|
117
|
+
self._datasets,
|
|
111
118
|
)
|
|
119
|
+
# stream resource says "here is a dataset",
|
|
120
|
+
# stream datum says "here are N frames in that stream resource",
|
|
121
|
+
# you get one stream resource and many stream datums per scan
|
|
122
|
+
|
|
112
123
|
for doc in self._file.stream_resources():
|
|
113
124
|
yield "stream_resource", doc
|
|
114
125
|
for doc in self._file.stream_data(indices_written):
|