ophyd-async 0.3.1a1__tar.gz → 0.3.2__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/PKG-INFO +2 -2
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/pyproject.toml +1 -1
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/_version.py +2 -2
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/core/device_save_loader.py +1 -23
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/core/signal.py +52 -19
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/core/soft_signal_backend.py +23 -9
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/_backend/_aioca.py +95 -18
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/_backend/_p4p.py +81 -16
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/_backend/common.py +24 -1
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/pvi/pvi.py +26 -6
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/panda/_common_blocks.py +2 -1
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/panda/_hdf_panda.py +0 -2
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/panda/_table.py +10 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/panda/writers/_hdf_writer.py +22 -105
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/panda/writers/_panda_hdf_file.py +4 -8
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async.egg-info/PKG-INFO +2 -2
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async.egg-info/SOURCES.txt +2 -1
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async.egg-info/requires.txt +1 -1
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/core/test_device_save_loader.py +99 -1
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/core/test_mock_signal_backend.py +32 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/core/test_signal.py +31 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/core/test_soft_signal_backend.py +37 -1
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/epics/test_pvi.py +58 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/epics/test_records.db +28 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/epics/test_signals.py +330 -82
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/panda/test_hdf_panda.py +22 -24
- ophyd_async-0.3.2/tests/panda/test_writer.py +198 -0
- ophyd_async-0.3.2/tests/test_data/test_yaml_save.yml +22 -0
- ophyd_async-0.3.1a1/tests/panda/test_writer.py +0 -208
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/.codecov.yml +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/.copier-answers.yml +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/.devcontainer/devcontainer.json +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/.git-blame-ignore-revs +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/.github/CONTRIBUTING.md +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/.github/actions/install_requirements/action.yml +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/.github/dependabot.yml +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/.github/pages/index.html +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/.github/pages/make_switcher.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/.github/workflows/_check.yml +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/.github/workflows/_dist.yml +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/.github/workflows/_docs.yml +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/.github/workflows/_pypi.yml +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/.github/workflows/_release.yml +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/.github/workflows/_test.yml +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/.github/workflows/_tox.yml +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/.github/workflows/ci.yml +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/.github/workflows/periodic.yml +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/.gitignore +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/.mailmap +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/.pre-commit-config.yaml +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/Dockerfile +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/LICENSE +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/README.md +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/_templates/README +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/_templates/custom-class-template.rst +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/_templates/custom-module-template.rst +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/conf.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/examples/epics_demo.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/examples/foo_detector.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/explanations/decisions/0001-record-architecture-decisions.md +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/explanations/decisions/0002-switched-to-python-copier-template.md +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/explanations/decisions/0003-ophyd-async-migration.rst +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/explanations/decisions/0004-repository-structure.rst +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/explanations/decisions/0005-respect-black-line-length.rst +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/explanations/decisions/0006-procedural-device-definitions.rst +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/explanations/decisions/COPYME +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/explanations/decisions.md +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/explanations/design-goals.rst +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/explanations/event-loop-choice.rst +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/explanations/flyscanning.rst +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/explanations.md +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/genindex.rst +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/how-to/choose-interfaces-for-devices.md +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/how-to/compound-devices.rst +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/how-to/contribute.md +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/how-to/make-a-simple-device.rst +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/how-to/make-a-standard-detector.rst +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/how-to/write-tests-for-devices.rst +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/how-to.md +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/images/bluesky_ophyd_epics_devices_logo.svg +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/images/bluesky_ophyd_logo.svg +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/images/ophyd_favicon.svg +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/index.md +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/reference/api.rst +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/reference.md +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/tutorials/installation.md +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/tutorials/using-existing-devices.rst +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/docs/tutorials.md +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/setup.cfg +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/__init__.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/__main__.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/core/__init__.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/core/_providers.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/core/async_status.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/core/detector.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/core/device.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/core/flyer.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/core/mock_signal_backend.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/core/mock_signal_utils.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/core/signal_backend.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/core/standard_readable.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/core/utils.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/__init__.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/_backend/__init__.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/areadetector/__init__.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/areadetector/aravis.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/areadetector/controllers/__init__.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/areadetector/controllers/ad_sim_controller.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/areadetector/controllers/aravis_controller.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/areadetector/controllers/kinetix_controller.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/areadetector/controllers/pilatus_controller.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/areadetector/controllers/vimba_controller.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/areadetector/drivers/__init__.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/areadetector/drivers/ad_base.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/areadetector/drivers/aravis_driver.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/areadetector/drivers/kinetix_driver.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/areadetector/drivers/pilatus_driver.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/areadetector/drivers/vimba_driver.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/areadetector/kinetix.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/areadetector/pilatus.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/areadetector/single_trigger_det.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/areadetector/utils.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/areadetector/vimba.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/areadetector/writers/__init__.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/areadetector/writers/_hdfdataset.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/areadetector/writers/_hdffile.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/areadetector/writers/hdf_writer.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/areadetector/writers/nd_file_hdf.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/areadetector/writers/nd_plugin.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/demo/__init__.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/demo/demo_ad_sim_detector.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/demo/mover.db +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/demo/sensor.db +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/motion/__init__.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/motion/motor.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/pvi/__init__.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/signal/__init__.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/signal/_epics_transport.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/epics/signal/signal.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/log.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/panda/__init__.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/panda/_panda_controller.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/panda/_trigger.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/panda/_utils.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/panda/writers/__init__.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/plan_stubs/__init__.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/plan_stubs/ensure_connected.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/plan_stubs/fly.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/protocols.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/sim/__init__.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/sim/demo/__init__.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/sim/demo/sim_motor.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/sim/pattern_generator.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/sim/sim_pattern_detector_control.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/sim/sim_pattern_detector_writer.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async/sim/sim_pattern_generator.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async.egg-info/dependency_links.txt +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async.egg-info/entry_points.txt +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/src/ophyd_async.egg-info/top_level.txt +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/conftest.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/core/test_async_status.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/core/test_device.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/core/test_device_collector.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/core/test_flyer.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/core/test_standard_readable.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/core/test_utils.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/core/test_watchable_async_status.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/epics/_backend/test_common.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/epics/areadetector/__init__.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/epics/areadetector/test_aravis.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/epics/areadetector/test_controllers.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/epics/areadetector/test_drivers.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/epics/areadetector/test_kinetix.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/epics/areadetector/test_pilatus.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/epics/areadetector/test_scans.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/epics/areadetector/test_single_trigger_det.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/epics/areadetector/test_utils.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/epics/areadetector/test_vimba.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/epics/areadetector/test_writers.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/epics/demo/test_demo.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/epics/demo/test_demo_ad_sim_detector.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/epics/motion/__init__.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/epics/motion/test_motor.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/panda/db/panda.db +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/panda/test_panda_connect.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/panda/test_panda_controller.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/panda/test_panda_utils.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/panda/test_table.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/panda/test_trigger.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/plan_stubs/test_fly.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/protocols/test_protocols.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/sim/__init__.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/sim/conftest.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/sim/demo/__init__.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/sim/demo/test_sim_motor.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/sim/test_pattern_generator.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/sim/test_sim_detector.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/sim/test_sim_writer.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/sim/test_streaming_plan.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/test_cli.py +0 -0
- {ophyd_async-0.3.1a1 → ophyd_async-0.3.2}/tests/test_log.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: ophyd-async
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.2
|
|
4
4
|
Summary: Asynchronous Bluesky hardware abstraction code, compatible with control systems like EPICS and Tango
|
|
5
5
|
Author-email: Tom Cobb <tom.cobb@diamond.ac.uk>
|
|
6
6
|
License: BSD 3-Clause License
|
|
@@ -41,7 +41,7 @@ Requires-Python: >=3.10
|
|
|
41
41
|
Description-Content-Type: text/markdown
|
|
42
42
|
License-File: LICENSE
|
|
43
43
|
Requires-Dist: networkx>=2.0
|
|
44
|
-
Requires-Dist: numpy
|
|
44
|
+
Requires-Dist: numpy<2.0.0
|
|
45
45
|
Requires-Dist: packaging
|
|
46
46
|
Requires-Dist: pint
|
|
47
47
|
Requires-Dist: bluesky>=1.13.0a3
|
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
from enum import Enum
|
|
2
|
-
from
|
|
3
|
-
from typing import Any, Callable, Dict, Generator, List, Optional, Sequence, Union
|
|
2
|
+
from typing import Any, Callable, Dict, Generator, List, Optional, Sequence
|
|
4
3
|
|
|
5
4
|
import numpy as np
|
|
6
5
|
import numpy.typing as npt
|
|
@@ -8,13 +7,10 @@ import yaml
|
|
|
8
7
|
from bluesky.plan_stubs import abs_set, wait
|
|
9
8
|
from bluesky.protocols import Location
|
|
10
9
|
from bluesky.utils import Msg
|
|
11
|
-
from epicscorelibs.ca.dbr import ca_array, ca_float, ca_int, ca_str
|
|
12
10
|
|
|
13
11
|
from .device import Device
|
|
14
12
|
from .signal import SignalRW
|
|
15
13
|
|
|
16
|
-
CaType = Union[ca_float, ca_int, ca_str, ca_array]
|
|
17
|
-
|
|
18
14
|
|
|
19
15
|
def ndarray_representer(dumper: yaml.Dumper, array: npt.NDArray[Any]) -> yaml.Node:
|
|
20
16
|
return dumper.represent_sequence(
|
|
@@ -22,19 +18,6 @@ def ndarray_representer(dumper: yaml.Dumper, array: npt.NDArray[Any]) -> yaml.No
|
|
|
22
18
|
)
|
|
23
19
|
|
|
24
20
|
|
|
25
|
-
def ca_dbr_representer(dumper: yaml.Dumper, value: CaType) -> yaml.Node:
|
|
26
|
-
# if it's an array, just call ndarray_representer...
|
|
27
|
-
represent_array = partial(ndarray_representer, dumper)
|
|
28
|
-
|
|
29
|
-
representers: Dict[CaType, Callable[[CaType], yaml.Node]] = {
|
|
30
|
-
ca_float: dumper.represent_float,
|
|
31
|
-
ca_int: dumper.represent_int,
|
|
32
|
-
ca_str: dumper.represent_str,
|
|
33
|
-
ca_array: represent_array,
|
|
34
|
-
}
|
|
35
|
-
return representers[type(value)](value)
|
|
36
|
-
|
|
37
|
-
|
|
38
21
|
class OphydDumper(yaml.Dumper):
|
|
39
22
|
def represent_data(self, data: Any) -> Any:
|
|
40
23
|
if isinstance(data, Enum):
|
|
@@ -152,11 +135,6 @@ def save_to_yaml(phases: Sequence[Dict[str, Any]], save_path: str) -> None:
|
|
|
152
135
|
|
|
153
136
|
yaml.add_representer(np.ndarray, ndarray_representer, Dumper=yaml.Dumper)
|
|
154
137
|
|
|
155
|
-
yaml.add_representer(ca_float, ca_dbr_representer, Dumper=yaml.Dumper)
|
|
156
|
-
yaml.add_representer(ca_int, ca_dbr_representer, Dumper=yaml.Dumper)
|
|
157
|
-
yaml.add_representer(ca_str, ca_dbr_representer, Dumper=yaml.Dumper)
|
|
158
|
-
yaml.add_representer(ca_array, ca_dbr_representer, Dumper=yaml.Dumper)
|
|
159
|
-
|
|
160
138
|
with open(save_path, "w") as file:
|
|
161
139
|
yaml.dump(phases, file, Dumper=OphydDumper, default_flow_style=False)
|
|
162
140
|
|
|
@@ -31,7 +31,7 @@ from ophyd_async.protocols import AsyncConfigurable, AsyncReadable, AsyncStageab
|
|
|
31
31
|
from .async_status import AsyncStatus
|
|
32
32
|
from .device import Device
|
|
33
33
|
from .signal_backend import SignalBackend
|
|
34
|
-
from .soft_signal_backend import SoftSignalBackend
|
|
34
|
+
from .soft_signal_backend import SignalMetadata, SoftSignalBackend
|
|
35
35
|
from .utils import DEFAULT_TIMEOUT, CalculatableTimeout, CalculateTimeout, Callback, T
|
|
36
36
|
|
|
37
37
|
|
|
@@ -57,7 +57,7 @@ class Signal(Device, Generic[T]):
|
|
|
57
57
|
|
|
58
58
|
def __init__(
|
|
59
59
|
self,
|
|
60
|
-
backend: SignalBackend[T],
|
|
60
|
+
backend: Optional[SignalBackend[T]] = None,
|
|
61
61
|
timeout: Optional[float] = DEFAULT_TIMEOUT,
|
|
62
62
|
name: str = "",
|
|
63
63
|
) -> None:
|
|
@@ -66,13 +66,24 @@ class Signal(Device, Generic[T]):
|
|
|
66
66
|
super().__init__(name)
|
|
67
67
|
|
|
68
68
|
async def connect(
|
|
69
|
-
self,
|
|
69
|
+
self,
|
|
70
|
+
mock=False,
|
|
71
|
+
timeout=DEFAULT_TIMEOUT,
|
|
72
|
+
force_reconnect: bool = False,
|
|
73
|
+
backend: Optional[SignalBackend[T]] = None,
|
|
70
74
|
):
|
|
75
|
+
if backend:
|
|
76
|
+
if self._initial_backend and backend is not self._initial_backend:
|
|
77
|
+
raise ValueError(
|
|
78
|
+
"Backend at connection different from initialised one."
|
|
79
|
+
)
|
|
80
|
+
self._backend = backend
|
|
71
81
|
if mock and not isinstance(self._backend, MockSignalBackend):
|
|
72
82
|
# Using a soft backend, look to the initial value
|
|
73
|
-
self._backend = MockSignalBackend(
|
|
74
|
-
|
|
75
|
-
|
|
83
|
+
self._backend = MockSignalBackend(initial_backend=self._backend)
|
|
84
|
+
|
|
85
|
+
if self._backend is None:
|
|
86
|
+
raise RuntimeError("`connect` called on signal without backend")
|
|
76
87
|
self.log.debug(f"Connecting to {self.source}")
|
|
77
88
|
await self._backend.connect(timeout=timeout)
|
|
78
89
|
|
|
@@ -261,9 +272,17 @@ def soft_signal_rw(
|
|
|
261
272
|
datatype: Optional[Type[T]] = None,
|
|
262
273
|
initial_value: Optional[T] = None,
|
|
263
274
|
name: str = "",
|
|
275
|
+
units: str | None = None,
|
|
276
|
+
precision: int | None = None,
|
|
264
277
|
) -> SignalRW[T]:
|
|
265
|
-
"""Creates a read-writable Signal with a SoftSignalBackend
|
|
266
|
-
|
|
278
|
+
"""Creates a read-writable Signal with a SoftSignalBackend.
|
|
279
|
+
May pass metadata, which are propagated into describe.
|
|
280
|
+
"""
|
|
281
|
+
metadata = SignalMetadata(units=units, precision=precision)
|
|
282
|
+
signal = SignalRW(
|
|
283
|
+
SoftSignalBackend(datatype, initial_value, metadata=metadata),
|
|
284
|
+
name=name,
|
|
285
|
+
)
|
|
267
286
|
return signal
|
|
268
287
|
|
|
269
288
|
|
|
@@ -271,27 +290,31 @@ def soft_signal_r_and_setter(
|
|
|
271
290
|
datatype: Optional[Type[T]] = None,
|
|
272
291
|
initial_value: Optional[T] = None,
|
|
273
292
|
name: str = "",
|
|
293
|
+
units: str | None = None,
|
|
294
|
+
precision: int | None = None,
|
|
274
295
|
) -> Tuple[SignalR[T], Callable[[T], None]]:
|
|
275
296
|
"""Returns a tuple of a read-only Signal and a callable through
|
|
276
|
-
which the signal can be internally modified within the device.
|
|
277
|
-
|
|
297
|
+
which the signal can be internally modified within the device.
|
|
298
|
+
May pass metadata, which are propagated into describe.
|
|
299
|
+
Use soft_signal_rw if you want a device that is externally modifiable
|
|
278
300
|
"""
|
|
279
|
-
|
|
301
|
+
metadata = SignalMetadata(units=units, precision=precision)
|
|
302
|
+
backend = SoftSignalBackend(datatype, initial_value, metadata=metadata)
|
|
280
303
|
signal = SignalR(backend, name=name)
|
|
281
304
|
|
|
282
305
|
return (signal, backend.set_value)
|
|
283
306
|
|
|
284
307
|
|
|
285
308
|
def _generate_assert_error_msg(
|
|
286
|
-
name: str, expected_result: str,
|
|
309
|
+
name: str, expected_result: str, actual_result: str
|
|
287
310
|
) -> str:
|
|
288
311
|
WARNING = "\033[93m"
|
|
289
312
|
FAIL = "\033[91m"
|
|
290
313
|
ENDC = "\033[0m"
|
|
291
314
|
return (
|
|
292
315
|
f"Expected {WARNING}{name}{ENDC} to produce"
|
|
293
|
-
+ f"\n{FAIL}{
|
|
294
|
-
+ f"\nbut actually got \n{FAIL}{
|
|
316
|
+
+ f"\n{FAIL}{expected_result}{ENDC}"
|
|
317
|
+
+ f"\nbut actually got \n{FAIL}{actual_result}{ENDC}"
|
|
295
318
|
)
|
|
296
319
|
|
|
297
320
|
|
|
@@ -313,7 +336,9 @@ async def assert_value(signal: SignalR[T], value: Any) -> None:
|
|
|
313
336
|
"""
|
|
314
337
|
actual_value = await signal.get_value()
|
|
315
338
|
assert actual_value == value, _generate_assert_error_msg(
|
|
316
|
-
signal.name,
|
|
339
|
+
name=signal.name,
|
|
340
|
+
expected_result=value,
|
|
341
|
+
actual_result=actual_value,
|
|
317
342
|
)
|
|
318
343
|
|
|
319
344
|
|
|
@@ -338,7 +363,9 @@ async def assert_reading(
|
|
|
338
363
|
"""
|
|
339
364
|
actual_reading = await readable.read()
|
|
340
365
|
assert expected_reading == actual_reading, _generate_assert_error_msg(
|
|
341
|
-
readable.name,
|
|
366
|
+
name=readable.name,
|
|
367
|
+
expected_result=expected_reading,
|
|
368
|
+
actual_result=actual_reading,
|
|
342
369
|
)
|
|
343
370
|
|
|
344
371
|
|
|
@@ -364,7 +391,9 @@ async def assert_configuration(
|
|
|
364
391
|
"""
|
|
365
392
|
actual_configurable = await configurable.read_configuration()
|
|
366
393
|
assert configuration == actual_configurable, _generate_assert_error_msg(
|
|
367
|
-
configurable.name,
|
|
394
|
+
name=configurable.name,
|
|
395
|
+
expected_result=configuration,
|
|
396
|
+
actual_result=actual_configurable,
|
|
368
397
|
)
|
|
369
398
|
|
|
370
399
|
|
|
@@ -386,11 +415,15 @@ def assert_emitted(docs: Mapping[str, list[dict]], **numbers: int):
|
|
|
386
415
|
resource=1, datum=1, event=1, stop=1)
|
|
387
416
|
"""
|
|
388
417
|
assert list(docs) == list(numbers), _generate_assert_error_msg(
|
|
389
|
-
"documents",
|
|
418
|
+
name="documents",
|
|
419
|
+
expected_result=list(numbers),
|
|
420
|
+
actual_result=list(docs),
|
|
390
421
|
)
|
|
391
422
|
actual_numbers = {name: len(d) for name, d in docs.items()}
|
|
392
423
|
assert actual_numbers == numbers, _generate_assert_error_msg(
|
|
393
|
-
"emitted",
|
|
424
|
+
name="emitted",
|
|
425
|
+
expected_result=numbers,
|
|
426
|
+
actual_result=actual_numbers,
|
|
394
427
|
)
|
|
395
428
|
|
|
396
429
|
|
|
@@ -5,7 +5,7 @@ import time
|
|
|
5
5
|
from collections import abc
|
|
6
6
|
from dataclasses import dataclass
|
|
7
7
|
from enum import Enum
|
|
8
|
-
from typing import Dict, Generic, Optional, Type, Union, cast, get_origin
|
|
8
|
+
from typing import Dict, Generic, Optional, Type, TypedDict, Union, cast, get_origin
|
|
9
9
|
|
|
10
10
|
import numpy as np
|
|
11
11
|
from bluesky.protocols import DataKey, Dtype, Reading
|
|
@@ -21,6 +21,11 @@ primitive_dtypes: Dict[type, Dtype] = {
|
|
|
21
21
|
}
|
|
22
22
|
|
|
23
23
|
|
|
24
|
+
class SignalMetadata(TypedDict):
|
|
25
|
+
units: str | None = None
|
|
26
|
+
precision: int | None = None
|
|
27
|
+
|
|
28
|
+
|
|
24
29
|
class SoftConverter(Generic[T]):
|
|
25
30
|
def value(self, value: T) -> T:
|
|
26
31
|
return value
|
|
@@ -35,7 +40,8 @@ class SoftConverter(Generic[T]):
|
|
|
35
40
|
alarm_severity=-1 if severity > 2 else severity,
|
|
36
41
|
)
|
|
37
42
|
|
|
38
|
-
def get_datakey(self, source: str, value) -> DataKey:
|
|
43
|
+
def get_datakey(self, source: str, value, **metadata) -> DataKey:
|
|
44
|
+
dk = {"source": source, "shape": [], **metadata}
|
|
39
45
|
dtype = type(value)
|
|
40
46
|
if np.issubdtype(dtype, np.integer):
|
|
41
47
|
dtype = int
|
|
@@ -44,8 +50,8 @@ class SoftConverter(Generic[T]):
|
|
|
44
50
|
assert (
|
|
45
51
|
dtype in primitive_dtypes
|
|
46
52
|
), f"invalid converter for value of type {type(value)}"
|
|
47
|
-
|
|
48
|
-
return
|
|
53
|
+
dk["dtype"] = primitive_dtypes[dtype]
|
|
54
|
+
return dk
|
|
49
55
|
|
|
50
56
|
def make_initial_value(self, datatype: Optional[Type[T]]) -> T:
|
|
51
57
|
if datatype is None:
|
|
@@ -55,8 +61,8 @@ class SoftConverter(Generic[T]):
|
|
|
55
61
|
|
|
56
62
|
|
|
57
63
|
class SoftArrayConverter(SoftConverter):
|
|
58
|
-
def get_datakey(self, source: str, value) -> DataKey:
|
|
59
|
-
return {"source": source, "dtype": "array", "shape": [len(value)]}
|
|
64
|
+
def get_datakey(self, source: str, value, **metadata) -> DataKey:
|
|
65
|
+
return {"source": source, "dtype": "array", "shape": [len(value)], **metadata}
|
|
60
66
|
|
|
61
67
|
def make_initial_value(self, datatype: Optional[Type[T]]) -> T:
|
|
62
68
|
if datatype is None:
|
|
@@ -78,9 +84,15 @@ class SoftEnumConverter(SoftConverter):
|
|
|
78
84
|
else:
|
|
79
85
|
return self.enum_class(value)
|
|
80
86
|
|
|
81
|
-
def get_datakey(self, source: str, value) -> DataKey:
|
|
87
|
+
def get_datakey(self, source: str, value, **metadata) -> DataKey:
|
|
82
88
|
choices = [e.value for e in self.enum_class]
|
|
83
|
-
return {
|
|
89
|
+
return {
|
|
90
|
+
"source": source,
|
|
91
|
+
"dtype": "string",
|
|
92
|
+
"shape": [],
|
|
93
|
+
"choices": choices,
|
|
94
|
+
**metadata,
|
|
95
|
+
}
|
|
84
96
|
|
|
85
97
|
def make_initial_value(self, datatype: Optional[Type[T]]) -> T:
|
|
86
98
|
if datatype is None:
|
|
@@ -114,9 +126,11 @@ class SoftSignalBackend(SignalBackend[T]):
|
|
|
114
126
|
self,
|
|
115
127
|
datatype: Optional[Type[T]],
|
|
116
128
|
initial_value: Optional[T] = None,
|
|
129
|
+
metadata: SignalMetadata = None,
|
|
117
130
|
) -> None:
|
|
118
131
|
self.datatype = datatype
|
|
119
132
|
self._initial_value = initial_value
|
|
133
|
+
self._metadata = metadata or {}
|
|
120
134
|
self.converter: SoftConverter = make_converter(datatype)
|
|
121
135
|
if self._initial_value is None:
|
|
122
136
|
self._initial_value = self.converter.make_initial_value(self.datatype)
|
|
@@ -155,7 +169,7 @@ class SoftSignalBackend(SignalBackend[T]):
|
|
|
155
169
|
self.callback(reading, self._value)
|
|
156
170
|
|
|
157
171
|
async def get_datakey(self, source: str) -> DataKey:
|
|
158
|
-
return self.converter.get_datakey(source, self._value)
|
|
172
|
+
return self.converter.get_datakey(source, self._value, **self._metadata)
|
|
159
173
|
|
|
160
174
|
async def get_reading(self) -> Reading:
|
|
161
175
|
return self.converter.reading(self._value, self._timestamp, self._severity)
|
|
@@ -2,8 +2,10 @@ import logging
|
|
|
2
2
|
import sys
|
|
3
3
|
from dataclasses import dataclass
|
|
4
4
|
from enum import Enum
|
|
5
|
-
from
|
|
5
|
+
from math import isnan, nan
|
|
6
|
+
from typing import Any, Dict, List, Optional, Type, Union
|
|
6
7
|
|
|
8
|
+
import numpy as np
|
|
7
9
|
from aioca import (
|
|
8
10
|
FORMAT_CTRL,
|
|
9
11
|
FORMAT_RAW,
|
|
@@ -28,7 +30,7 @@ from ophyd_async.core import (
|
|
|
28
30
|
)
|
|
29
31
|
from ophyd_async.core.utils import DEFAULT_TIMEOUT, NotConnected
|
|
30
32
|
|
|
31
|
-
from .common import get_supported_values
|
|
33
|
+
from .common import LimitPair, Limits, common_meta, get_supported_values
|
|
32
34
|
|
|
33
35
|
dbr_to_dtype: Dict[Dbr, Dtype] = {
|
|
34
36
|
dbr.DBR_STRING: "string",
|
|
@@ -40,6 +42,66 @@ dbr_to_dtype: Dict[Dbr, Dtype] = {
|
|
|
40
42
|
}
|
|
41
43
|
|
|
42
44
|
|
|
45
|
+
def _data_key_from_augmented_value(
|
|
46
|
+
value: AugmentedValue,
|
|
47
|
+
*,
|
|
48
|
+
choices: Optional[List[str]] = None,
|
|
49
|
+
dtype: Optional[str] = None,
|
|
50
|
+
) -> DataKey:
|
|
51
|
+
"""Use the return value of get with FORMAT_CTRL to construct a DataKey
|
|
52
|
+
describing the signal. See docstring of AugmentedValue for expected
|
|
53
|
+
value fields by DBR type.
|
|
54
|
+
|
|
55
|
+
Args:
|
|
56
|
+
value (AugmentedValue): Description of the the return type of a DB record
|
|
57
|
+
choices: Optional list of enum choices to pass as metadata in the datakey
|
|
58
|
+
dtype: Optional override dtype when AugmentedValue is ambiguous, e.g. booleans
|
|
59
|
+
|
|
60
|
+
Returns:
|
|
61
|
+
DataKey: A rich DataKey describing the DB record
|
|
62
|
+
"""
|
|
63
|
+
source = f"ca://{value.name}"
|
|
64
|
+
assert value.ok, f"Error reading {source}: {value}"
|
|
65
|
+
|
|
66
|
+
scalar = value.element_count == 1
|
|
67
|
+
dtype = dtype or dbr_to_dtype[value.datatype]
|
|
68
|
+
|
|
69
|
+
d = DataKey(
|
|
70
|
+
source=source,
|
|
71
|
+
dtype=dtype if scalar else "array",
|
|
72
|
+
# strictly value.element_count >= len(value)
|
|
73
|
+
shape=[] if scalar else [len(value)],
|
|
74
|
+
)
|
|
75
|
+
for key in common_meta:
|
|
76
|
+
attr = getattr(value, key, nan)
|
|
77
|
+
if isinstance(attr, str) or not isnan(attr):
|
|
78
|
+
d[key] = attr
|
|
79
|
+
|
|
80
|
+
if choices is not None:
|
|
81
|
+
d["choices"] = choices
|
|
82
|
+
|
|
83
|
+
if limits := _limits_from_augmented_value(value):
|
|
84
|
+
d["limits"] = limits
|
|
85
|
+
|
|
86
|
+
return d
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def _limits_from_augmented_value(value: AugmentedValue) -> Limits:
|
|
90
|
+
def get_limits(limit: str) -> LimitPair:
|
|
91
|
+
low = getattr(value, f"lower_{limit}_limit", nan)
|
|
92
|
+
high = getattr(value, f"upper_{limit}_limit", nan)
|
|
93
|
+
return LimitPair(
|
|
94
|
+
low=None if isnan(low) else low, high=None if isnan(high) else high
|
|
95
|
+
)
|
|
96
|
+
|
|
97
|
+
return Limits(
|
|
98
|
+
alarm=get_limits("alarm"),
|
|
99
|
+
control=get_limits("ctrl"),
|
|
100
|
+
display=get_limits("disp"),
|
|
101
|
+
warning=get_limits("warning"),
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
|
|
43
105
|
@dataclass
|
|
44
106
|
class CaConverter:
|
|
45
107
|
read_dbr: Optional[Dbr]
|
|
@@ -49,7 +111,10 @@ class CaConverter:
|
|
|
49
111
|
return value
|
|
50
112
|
|
|
51
113
|
def value(self, value: AugmentedValue):
|
|
52
|
-
|
|
114
|
+
# for channel access ca_xxx classes, this
|
|
115
|
+
# invokes __pos__ operator to return an instance of
|
|
116
|
+
# the builtin base class
|
|
117
|
+
return +value
|
|
53
118
|
|
|
54
119
|
def reading(self, value: AugmentedValue):
|
|
55
120
|
return {
|
|
@@ -58,8 +123,8 @@ class CaConverter:
|
|
|
58
123
|
"alarm_severity": -1 if value.severity > 2 else value.severity,
|
|
59
124
|
}
|
|
60
125
|
|
|
61
|
-
def get_datakey(self,
|
|
62
|
-
return
|
|
126
|
+
def get_datakey(self, value: AugmentedValue) -> DataKey:
|
|
127
|
+
return _data_key_from_augmented_value(value)
|
|
63
128
|
|
|
64
129
|
|
|
65
130
|
class CaLongStrConverter(CaConverter):
|
|
@@ -73,12 +138,17 @@ class CaLongStrConverter(CaConverter):
|
|
|
73
138
|
|
|
74
139
|
|
|
75
140
|
class CaArrayConverter(CaConverter):
|
|
76
|
-
def
|
|
77
|
-
return
|
|
141
|
+
def value(self, value: AugmentedValue):
|
|
142
|
+
return np.array(value, copy=False)
|
|
78
143
|
|
|
79
144
|
|
|
80
145
|
@dataclass
|
|
81
146
|
class CaEnumConverter(CaConverter):
|
|
147
|
+
"""To prevent issues when a signal is restarted and returns with different enum
|
|
148
|
+
values or orders, we put treat an Enum signal as a string, and cache the
|
|
149
|
+
choices on this class.
|
|
150
|
+
"""
|
|
151
|
+
|
|
82
152
|
choices: dict[str, str]
|
|
83
153
|
|
|
84
154
|
def write_value(self, value: Union[Enum, str]):
|
|
@@ -90,13 +160,18 @@ class CaEnumConverter(CaConverter):
|
|
|
90
160
|
def value(self, value: AugmentedValue):
|
|
91
161
|
return self.choices[value]
|
|
92
162
|
|
|
93
|
-
def get_datakey(self,
|
|
94
|
-
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
163
|
+
def get_datakey(self, value: AugmentedValue) -> DataKey:
|
|
164
|
+
# Sometimes DBR_TYPE returns as String, must pass choices still
|
|
165
|
+
return _data_key_from_augmented_value(value, choices=list(self.choices.keys()))
|
|
166
|
+
|
|
167
|
+
|
|
168
|
+
@dataclass
|
|
169
|
+
class CaBoolConverter(CaConverter):
|
|
170
|
+
def value(self, value: AugmentedValue) -> bool:
|
|
171
|
+
return bool(value)
|
|
172
|
+
|
|
173
|
+
def get_datakey(self, value: AugmentedValue) -> DataKey:
|
|
174
|
+
return _data_key_from_augmented_value(value, dtype="bool")
|
|
100
175
|
|
|
101
176
|
|
|
102
177
|
class DisconnectedCaConverter(CaConverter):
|
|
@@ -115,8 +190,10 @@ def make_converter(
|
|
|
115
190
|
return CaLongStrConverter()
|
|
116
191
|
elif is_array and pv_dbr == dbr.DBR_STRING:
|
|
117
192
|
# Waveform of strings, check we wanted this
|
|
118
|
-
if datatype
|
|
119
|
-
|
|
193
|
+
if datatype:
|
|
194
|
+
datatype_dtype = get_dtype(datatype)
|
|
195
|
+
if not datatype_dtype or not np.can_cast(datatype_dtype, np.str_):
|
|
196
|
+
raise TypeError(f"{pv} has type [str] not {datatype.__name__}")
|
|
120
197
|
return CaArrayConverter(pv_dbr, None)
|
|
121
198
|
elif is_array:
|
|
122
199
|
pv_dtype = get_unique({k: v.dtype for k, v in values.items()}, "dtypes")
|
|
@@ -136,7 +213,7 @@ def make_converter(
|
|
|
136
213
|
)
|
|
137
214
|
if pv_choices_len != 2:
|
|
138
215
|
raise TypeError(f"{pv} has {pv_choices_len} choices, can't map to bool")
|
|
139
|
-
return
|
|
216
|
+
return CaBoolConverter(dbr.DBR_SHORT, dbr.DBR_SHORT)
|
|
140
217
|
elif pv_dbr == dbr.DBR_ENUM:
|
|
141
218
|
# This is an Enum
|
|
142
219
|
pv_choices = get_unique(
|
|
@@ -224,7 +301,7 @@ class CaSignalBackend(SignalBackend[T]):
|
|
|
224
301
|
|
|
225
302
|
async def get_datakey(self, source: str) -> DataKey:
|
|
226
303
|
value = await self._caget(FORMAT_CTRL)
|
|
227
|
-
return self.converter.get_datakey(
|
|
304
|
+
return self.converter.get_datakey(value)
|
|
228
305
|
|
|
229
306
|
async def get_reading(self) -> Reading:
|
|
230
307
|
value = await self._caget(FORMAT_TIME)
|
|
@@ -4,6 +4,7 @@ import logging
|
|
|
4
4
|
import time
|
|
5
5
|
from dataclasses import dataclass
|
|
6
6
|
from enum import Enum
|
|
7
|
+
from math import isnan, nan
|
|
7
8
|
from typing import Any, Dict, List, Optional, Sequence, Type, Union
|
|
8
9
|
|
|
9
10
|
from bluesky.protocols import DataKey, Dtype, Reading
|
|
@@ -20,7 +21,7 @@ from ophyd_async.core import (
|
|
|
20
21
|
)
|
|
21
22
|
from ophyd_async.core.utils import DEFAULT_TIMEOUT, NotConnected
|
|
22
23
|
|
|
23
|
-
from .common import get_supported_values
|
|
24
|
+
from .common import LimitPair, Limits, common_meta, get_supported_values
|
|
24
25
|
|
|
25
26
|
# https://mdavidsaver.github.io/p4p/values.html
|
|
26
27
|
specifier_to_dtype: Dict[str, Dtype] = {
|
|
@@ -39,6 +40,67 @@ specifier_to_dtype: Dict[str, Dtype] = {
|
|
|
39
40
|
}
|
|
40
41
|
|
|
41
42
|
|
|
43
|
+
def _data_key_from_value(
|
|
44
|
+
source: str,
|
|
45
|
+
value: Value,
|
|
46
|
+
*,
|
|
47
|
+
shape: Optional[list[int]] = None,
|
|
48
|
+
choices: Optional[list[str]] = None,
|
|
49
|
+
dtype: Optional[str] = None,
|
|
50
|
+
) -> DataKey:
|
|
51
|
+
"""
|
|
52
|
+
Args:
|
|
53
|
+
value (Value): Description of the the return type of a DB record
|
|
54
|
+
shape: Optional override shape when len(shape) > 1
|
|
55
|
+
choices: Optional list of enum choices to pass as metadata in the datakey
|
|
56
|
+
dtype: Optional override dtype when AugmentedValue is ambiguous, e.g. booleans
|
|
57
|
+
|
|
58
|
+
Returns:
|
|
59
|
+
DataKey: A rich DataKey describing the DB record
|
|
60
|
+
"""
|
|
61
|
+
shape = shape or []
|
|
62
|
+
dtype = dtype or specifier_to_dtype[value.type().aspy("value")]
|
|
63
|
+
display_data = getattr(value, "display", None)
|
|
64
|
+
|
|
65
|
+
d = DataKey(
|
|
66
|
+
source=source,
|
|
67
|
+
dtype=dtype,
|
|
68
|
+
shape=shape,
|
|
69
|
+
)
|
|
70
|
+
if display_data is not None:
|
|
71
|
+
for key in common_meta:
|
|
72
|
+
attr = getattr(display_data, key, nan)
|
|
73
|
+
if isinstance(attr, str) or not isnan(attr):
|
|
74
|
+
d[key] = attr
|
|
75
|
+
|
|
76
|
+
if choices is not None:
|
|
77
|
+
d["choices"] = choices
|
|
78
|
+
|
|
79
|
+
if limits := _limits_from_value(value):
|
|
80
|
+
d["limits"] = limits
|
|
81
|
+
|
|
82
|
+
return d
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
def _limits_from_value(value: Value) -> Limits:
|
|
86
|
+
def get_limits(
|
|
87
|
+
substucture_name: str, low_name: str = "limitLow", high_name: str = "limitHigh"
|
|
88
|
+
) -> LimitPair:
|
|
89
|
+
substructure = getattr(value, substucture_name, None)
|
|
90
|
+
low = getattr(substructure, low_name, nan)
|
|
91
|
+
high = getattr(substructure, high_name, nan)
|
|
92
|
+
return LimitPair(
|
|
93
|
+
low=None if isnan(low) else low, high=None if isnan(high) else high
|
|
94
|
+
)
|
|
95
|
+
|
|
96
|
+
return Limits(
|
|
97
|
+
alarm=get_limits("valueAlarm", "lowAlarmLimit", "highAlarmLimit"),
|
|
98
|
+
control=get_limits("control"),
|
|
99
|
+
display=get_limits("display"),
|
|
100
|
+
warning=get_limits("valueAlarm", "lowWarningLimit", "highWarningLimit"),
|
|
101
|
+
)
|
|
102
|
+
|
|
103
|
+
|
|
42
104
|
class PvaConverter:
|
|
43
105
|
def write_value(self, value):
|
|
44
106
|
return value
|
|
@@ -56,8 +118,7 @@ class PvaConverter:
|
|
|
56
118
|
}
|
|
57
119
|
|
|
58
120
|
def get_datakey(self, source: str, value) -> DataKey:
|
|
59
|
-
|
|
60
|
-
return {"source": source, "dtype": dtype, "shape": []}
|
|
121
|
+
return _data_key_from_value(source, value)
|
|
61
122
|
|
|
62
123
|
def metadata_fields(self) -> List[str]:
|
|
63
124
|
"""
|
|
@@ -74,7 +135,9 @@ class PvaConverter:
|
|
|
74
135
|
|
|
75
136
|
class PvaArrayConverter(PvaConverter):
|
|
76
137
|
def get_datakey(self, source: str, value) -> DataKey:
|
|
77
|
-
return
|
|
138
|
+
return _data_key_from_value(
|
|
139
|
+
source, value, dtype="array", shape=[len(value["value"])]
|
|
140
|
+
)
|
|
78
141
|
|
|
79
142
|
|
|
80
143
|
class PvaNDArrayConverter(PvaConverter):
|
|
@@ -98,7 +161,7 @@ class PvaNDArrayConverter(PvaConverter):
|
|
|
98
161
|
|
|
99
162
|
def get_datakey(self, source: str, value) -> DataKey:
|
|
100
163
|
dims = self._get_dimensions(value)
|
|
101
|
-
return
|
|
164
|
+
return _data_key_from_value(source, value, dtype="array", shape=dims)
|
|
102
165
|
|
|
103
166
|
def write_value(self, value):
|
|
104
167
|
# No clear use-case for writing directly to an NDArray, and some
|
|
@@ -109,6 +172,11 @@ class PvaNDArrayConverter(PvaConverter):
|
|
|
109
172
|
|
|
110
173
|
@dataclass
|
|
111
174
|
class PvaEnumConverter(PvaConverter):
|
|
175
|
+
"""To prevent issues when a signal is restarted and returns with different enum
|
|
176
|
+
values or orders, we put treat an Enum signal as a string, and cache the
|
|
177
|
+
choices on this class.
|
|
178
|
+
"""
|
|
179
|
+
|
|
112
180
|
def __init__(self, choices: dict[str, str]):
|
|
113
181
|
self.choices = tuple(choices.values())
|
|
114
182
|
|
|
@@ -122,20 +190,17 @@ class PvaEnumConverter(PvaConverter):
|
|
|
122
190
|
return self.choices[value["value"]["index"]]
|
|
123
191
|
|
|
124
192
|
def get_datakey(self, source: str, value) -> DataKey:
|
|
125
|
-
return
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
"shape": [],
|
|
129
|
-
"choices": list(self.choices),
|
|
130
|
-
}
|
|
193
|
+
return _data_key_from_value(
|
|
194
|
+
source, value, choices=list(self.choices), dtype="string"
|
|
195
|
+
)
|
|
131
196
|
|
|
132
197
|
|
|
133
|
-
class
|
|
198
|
+
class PvaEmumBoolConverter(PvaConverter):
|
|
134
199
|
def value(self, value):
|
|
135
|
-
return value["value"]["index"]
|
|
200
|
+
return bool(value["value"]["index"])
|
|
136
201
|
|
|
137
202
|
def get_datakey(self, source: str, value) -> DataKey:
|
|
138
|
-
return
|
|
203
|
+
return _data_key_from_value(source, value, dtype="bool")
|
|
139
204
|
|
|
140
205
|
|
|
141
206
|
class PvaTableConverter(PvaConverter):
|
|
@@ -144,7 +209,7 @@ class PvaTableConverter(PvaConverter):
|
|
|
144
209
|
|
|
145
210
|
def get_datakey(self, source: str, value) -> DataKey:
|
|
146
211
|
# This is wrong, but defer until we know how to actually describe a table
|
|
147
|
-
return
|
|
212
|
+
return _data_key_from_value(source, value, dtype="object")
|
|
148
213
|
|
|
149
214
|
|
|
150
215
|
class PvaDictConverter(PvaConverter):
|
|
@@ -213,7 +278,7 @@ def make_converter(datatype: Optional[Type], values: Dict[str, Any]) -> PvaConve
|
|
|
213
278
|
)
|
|
214
279
|
if pv_choices_len != 2:
|
|
215
280
|
raise TypeError(f"{pv} has {pv_choices_len} choices, can't map to bool")
|
|
216
|
-
return
|
|
281
|
+
return PvaEmumBoolConverter()
|
|
217
282
|
elif "NTEnum" in typeid:
|
|
218
283
|
# This is an Enum
|
|
219
284
|
pv_choices = get_unique(
|