np-services 0.1.69__py3-none-any.whl → 0.1.71__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- np_services/__init__.py +8 -8
- np_services/open_ephys.py +377 -378
- np_services/protocols.py +185 -185
- np_services/proxies.py +1489 -1489
- np_services/resources/mvr_connector.py +260 -260
- np_services/resources/zro.py +325 -325
- np_services/scripts/pretest.py +412 -389
- np_services/stim_computer_theme_changer.py +41 -41
- np_services/utils.py +167 -167
- {np_services-0.1.69.dist-info → np_services-0.1.71.dist-info}/METADATA +5 -5
- np_services-0.1.71.dist-info/RECORD +15 -0
- {np_services-0.1.69.dist-info → np_services-0.1.71.dist-info}/WHEEL +2 -1
- {np_services-0.1.69.dist-info → np_services-0.1.71.dist-info}/entry_points.txt +1 -1
- np_services-0.1.71.dist-info/top_level.txt +1 -0
- np_services/.mypy_cache/.gitignore +0 -2
- np_services/.mypy_cache/3.9/@plugins_snapshot.json +0 -1
- np_services/.mypy_cache/3.9/__future__.data.json +0 -1
- np_services/.mypy_cache/3.9/__future__.meta.json +0 -1
- np_services/.mypy_cache/3.9/_ast.data.json +0 -1
- np_services/.mypy_cache/3.9/_ast.meta.json +0 -1
- np_services/.mypy_cache/3.9/_codecs.data.json +0 -1
- np_services/.mypy_cache/3.9/_codecs.meta.json +0 -1
- np_services/.mypy_cache/3.9/_collections_abc.data.json +0 -1
- np_services/.mypy_cache/3.9/_collections_abc.meta.json +0 -1
- np_services/.mypy_cache/3.9/_ctypes.data.json +0 -1
- np_services/.mypy_cache/3.9/_ctypes.meta.json +0 -1
- np_services/.mypy_cache/3.9/_decimal.data.json +0 -1
- np_services/.mypy_cache/3.9/_decimal.meta.json +0 -1
- np_services/.mypy_cache/3.9/_random.data.json +0 -1
- np_services/.mypy_cache/3.9/_random.meta.json +0 -1
- np_services/.mypy_cache/3.9/_socket.data.json +0 -1
- np_services/.mypy_cache/3.9/_socket.meta.json +0 -1
- np_services/.mypy_cache/3.9/_thread.data.json +0 -1
- np_services/.mypy_cache/3.9/_thread.meta.json +0 -1
- np_services/.mypy_cache/3.9/_typeshed/__init__.data.json +0 -1
- np_services/.mypy_cache/3.9/_typeshed/__init__.meta.json +0 -1
- np_services/.mypy_cache/3.9/_warnings.data.json +0 -1
- np_services/.mypy_cache/3.9/_warnings.meta.json +0 -1
- np_services/.mypy_cache/3.9/_weakref.data.json +0 -1
- np_services/.mypy_cache/3.9/_weakref.meta.json +0 -1
- np_services/.mypy_cache/3.9/_weakrefset.data.json +0 -1
- np_services/.mypy_cache/3.9/_weakrefset.meta.json +0 -1
- np_services/.mypy_cache/3.9/_winapi.data.json +0 -1
- np_services/.mypy_cache/3.9/_winapi.meta.json +0 -1
- np_services/.mypy_cache/3.9/abc.data.json +0 -1
- np_services/.mypy_cache/3.9/abc.meta.json +0 -1
- np_services/.mypy_cache/3.9/array.data.json +0 -1
- np_services/.mypy_cache/3.9/array.meta.json +0 -1
- np_services/.mypy_cache/3.9/atexit.data.json +0 -1
- np_services/.mypy_cache/3.9/atexit.meta.json +0 -1
- np_services/.mypy_cache/3.9/builtins.data.json +0 -1
- np_services/.mypy_cache/3.9/builtins.meta.json +0 -1
- np_services/.mypy_cache/3.9/codecs.data.json +0 -1
- np_services/.mypy_cache/3.9/codecs.meta.json +0 -1
- np_services/.mypy_cache/3.9/collections/__init__.data.json +0 -1
- np_services/.mypy_cache/3.9/collections/__init__.meta.json +0 -1
- np_services/.mypy_cache/3.9/collections/abc.data.json +0 -1
- np_services/.mypy_cache/3.9/collections/abc.meta.json +0 -1
- np_services/.mypy_cache/3.9/contextlib.data.json +0 -1
- np_services/.mypy_cache/3.9/contextlib.meta.json +0 -1
- np_services/.mypy_cache/3.9/ctypes/__init__.data.json +0 -1
- np_services/.mypy_cache/3.9/ctypes/__init__.meta.json +0 -1
- np_services/.mypy_cache/3.9/datetime.data.json +0 -1
- np_services/.mypy_cache/3.9/datetime.meta.json +0 -1
- np_services/.mypy_cache/3.9/decimal.data.json +0 -1
- np_services/.mypy_cache/3.9/decimal.meta.json +0 -1
- np_services/.mypy_cache/3.9/email/__init__.data.json +0 -1
- np_services/.mypy_cache/3.9/email/__init__.meta.json +0 -1
- np_services/.mypy_cache/3.9/email/charset.data.json +0 -1
- np_services/.mypy_cache/3.9/email/charset.meta.json +0 -1
- np_services/.mypy_cache/3.9/email/contentmanager.data.json +0 -1
- np_services/.mypy_cache/3.9/email/contentmanager.meta.json +0 -1
- np_services/.mypy_cache/3.9/email/errors.data.json +0 -1
- np_services/.mypy_cache/3.9/email/errors.meta.json +0 -1
- np_services/.mypy_cache/3.9/email/header.data.json +0 -1
- np_services/.mypy_cache/3.9/email/header.meta.json +0 -1
- np_services/.mypy_cache/3.9/email/message.data.json +0 -1
- np_services/.mypy_cache/3.9/email/message.meta.json +0 -1
- np_services/.mypy_cache/3.9/email/policy.data.json +0 -1
- np_services/.mypy_cache/3.9/email/policy.meta.json +0 -1
- np_services/.mypy_cache/3.9/enum.data.json +0 -1
- np_services/.mypy_cache/3.9/enum.meta.json +0 -1
- np_services/.mypy_cache/3.9/errno.data.json +0 -1
- np_services/.mypy_cache/3.9/errno.meta.json +0 -1
- np_services/.mypy_cache/3.9/fractions.data.json +0 -1
- np_services/.mypy_cache/3.9/fractions.meta.json +0 -1
- np_services/.mypy_cache/3.9/genericpath.data.json +0 -1
- np_services/.mypy_cache/3.9/genericpath.meta.json +0 -1
- np_services/.mypy_cache/3.9/importlib/__init__.data.json +0 -1
- np_services/.mypy_cache/3.9/importlib/__init__.meta.json +0 -1
- np_services/.mypy_cache/3.9/importlib/abc.data.json +0 -1
- np_services/.mypy_cache/3.9/importlib/abc.meta.json +0 -1
- np_services/.mypy_cache/3.9/importlib/machinery.data.json +0 -1
- np_services/.mypy_cache/3.9/importlib/machinery.meta.json +0 -1
- np_services/.mypy_cache/3.9/importlib/metadata/__init__.data.json +0 -1
- np_services/.mypy_cache/3.9/importlib/metadata/__init__.meta.json +0 -1
- np_services/.mypy_cache/3.9/io.data.json +0 -1
- np_services/.mypy_cache/3.9/io.meta.json +0 -1
- np_services/.mypy_cache/3.9/json/__init__.data.json +0 -1
- np_services/.mypy_cache/3.9/json/__init__.meta.json +0 -1
- np_services/.mypy_cache/3.9/json/decoder.data.json +0 -1
- np_services/.mypy_cache/3.9/json/decoder.meta.json +0 -1
- np_services/.mypy_cache/3.9/json/encoder.data.json +0 -1
- np_services/.mypy_cache/3.9/json/encoder.meta.json +0 -1
- np_services/.mypy_cache/3.9/logging/__init__.data.json +0 -1
- np_services/.mypy_cache/3.9/logging/__init__.meta.json +0 -1
- np_services/.mypy_cache/3.9/math.data.json +0 -1
- np_services/.mypy_cache/3.9/math.meta.json +0 -1
- np_services/.mypy_cache/3.9/mmap.data.json +0 -1
- np_services/.mypy_cache/3.9/mmap.meta.json +0 -1
- np_services/.mypy_cache/3.9/np_services/__init__.data.json +0 -1
- np_services/.mypy_cache/3.9/np_services/__init__.meta.json +0 -1
- np_services/.mypy_cache/3.9/np_services/config.data.json +0 -1
- np_services/.mypy_cache/3.9/np_services/config.meta.json +0 -1
- np_services/.mypy_cache/3.9/np_services/protocols.data.json +0 -1
- np_services/.mypy_cache/3.9/np_services/protocols.meta.json +0 -1
- np_services/.mypy_cache/3.9/np_services/zro.data.json +0 -1
- np_services/.mypy_cache/3.9/np_services/zro.meta.json +0 -1
- np_services/.mypy_cache/3.9/ntpath.data.json +0 -1
- np_services/.mypy_cache/3.9/ntpath.meta.json +0 -1
- np_services/.mypy_cache/3.9/numbers.data.json +0 -1
- np_services/.mypy_cache/3.9/numbers.meta.json +0 -1
- np_services/.mypy_cache/3.9/os/__init__.data.json +0 -1
- np_services/.mypy_cache/3.9/os/__init__.meta.json +0 -1
- np_services/.mypy_cache/3.9/os/path.data.json +0 -1
- np_services/.mypy_cache/3.9/os/path.meta.json +0 -1
- np_services/.mypy_cache/3.9/pathlib.data.json +0 -1
- np_services/.mypy_cache/3.9/pathlib.meta.json +0 -1
- np_services/.mypy_cache/3.9/pickle.data.json +0 -1
- np_services/.mypy_cache/3.9/pickle.meta.json +0 -1
- np_services/.mypy_cache/3.9/platform.data.json +0 -1
- np_services/.mypy_cache/3.9/platform.meta.json +0 -1
- np_services/.mypy_cache/3.9/posixpath.data.json +0 -1
- np_services/.mypy_cache/3.9/posixpath.meta.json +0 -1
- np_services/.mypy_cache/3.9/random.data.json +0 -1
- np_services/.mypy_cache/3.9/random.meta.json +0 -1
- np_services/.mypy_cache/3.9/re.data.json +0 -1
- np_services/.mypy_cache/3.9/re.meta.json +0 -1
- np_services/.mypy_cache/3.9/shutil.data.json +0 -1
- np_services/.mypy_cache/3.9/shutil.meta.json +0 -1
- np_services/.mypy_cache/3.9/socket.data.json +0 -1
- np_services/.mypy_cache/3.9/socket.meta.json +0 -1
- np_services/.mypy_cache/3.9/sre_compile.data.json +0 -1
- np_services/.mypy_cache/3.9/sre_compile.meta.json +0 -1
- np_services/.mypy_cache/3.9/sre_constants.data.json +0 -1
- np_services/.mypy_cache/3.9/sre_constants.meta.json +0 -1
- np_services/.mypy_cache/3.9/sre_parse.data.json +0 -1
- np_services/.mypy_cache/3.9/sre_parse.meta.json +0 -1
- np_services/.mypy_cache/3.9/string.data.json +0 -1
- np_services/.mypy_cache/3.9/string.meta.json +0 -1
- np_services/.mypy_cache/3.9/subprocess.data.json +0 -1
- np_services/.mypy_cache/3.9/subprocess.meta.json +0 -1
- np_services/.mypy_cache/3.9/sys.data.json +0 -1
- np_services/.mypy_cache/3.9/sys.meta.json +0 -1
- np_services/.mypy_cache/3.9/threading.data.json +0 -1
- np_services/.mypy_cache/3.9/threading.meta.json +0 -1
- np_services/.mypy_cache/3.9/time.data.json +0 -1
- np_services/.mypy_cache/3.9/time.meta.json +0 -1
- np_services/.mypy_cache/3.9/types.data.json +0 -1
- np_services/.mypy_cache/3.9/types.meta.json +0 -1
- np_services/.mypy_cache/3.9/typing.data.json +0 -1
- np_services/.mypy_cache/3.9/typing.meta.json +0 -1
- np_services/.mypy_cache/3.9/typing_extensions.data.json +0 -1
- np_services/.mypy_cache/3.9/typing_extensions.meta.json +0 -1
- np_services/.mypy_cache/3.9/warnings.data.json +0 -1
- np_services/.mypy_cache/3.9/warnings.meta.json +0 -1
- np_services/.mypy_cache/3.9/weakref.data.json +0 -1
- np_services/.mypy_cache/3.9/weakref.meta.json +0 -1
- np_services/.mypy_cache/3.9/zmq/__init__.data.json +0 -1
- np_services/.mypy_cache/3.9/zmq/__init__.meta.json +0 -1
- np_services/.mypy_cache/3.9/zmq/_typing.data.json +0 -1
- np_services/.mypy_cache/3.9/zmq/_typing.meta.json +0 -1
- np_services/.mypy_cache/3.9/zmq/backend/__init__.data.json +0 -1
- np_services/.mypy_cache/3.9/zmq/backend/__init__.meta.json +0 -1
- np_services/.mypy_cache/3.9/zmq/backend/select.data.json +0 -1
- np_services/.mypy_cache/3.9/zmq/backend/select.meta.json +0 -1
- np_services/.mypy_cache/3.9/zmq/constants.data.json +0 -1
- np_services/.mypy_cache/3.9/zmq/constants.meta.json +0 -1
- np_services/.mypy_cache/3.9/zmq/error.data.json +0 -1
- np_services/.mypy_cache/3.9/zmq/error.meta.json +0 -1
- np_services/.mypy_cache/3.9/zmq/sugar/__init__.data.json +0 -1
- np_services/.mypy_cache/3.9/zmq/sugar/__init__.meta.json +0 -1
- np_services/.mypy_cache/3.9/zmq/sugar/attrsettr.data.json +0 -1
- np_services/.mypy_cache/3.9/zmq/sugar/attrsettr.meta.json +0 -1
- np_services/.mypy_cache/3.9/zmq/sugar/context.data.json +0 -1
- np_services/.mypy_cache/3.9/zmq/sugar/context.meta.json +0 -1
- np_services/.mypy_cache/3.9/zmq/sugar/frame.data.json +0 -1
- np_services/.mypy_cache/3.9/zmq/sugar/frame.meta.json +0 -1
- np_services/.mypy_cache/3.9/zmq/sugar/poll.data.json +0 -1
- np_services/.mypy_cache/3.9/zmq/sugar/poll.meta.json +0 -1
- np_services/.mypy_cache/3.9/zmq/sugar/socket.data.json +0 -1
- np_services/.mypy_cache/3.9/zmq/sugar/socket.meta.json +0 -1
- np_services/.mypy_cache/3.9/zmq/sugar/tracker.data.json +0 -1
- np_services/.mypy_cache/3.9/zmq/sugar/tracker.meta.json +0 -1
- np_services/.mypy_cache/3.9/zmq/sugar/version.data.json +0 -1
- np_services/.mypy_cache/3.9/zmq/sugar/version.meta.json +0 -1
- np_services/.mypy_cache/3.9/zmq/utils/__init__.data.json +0 -1
- np_services/.mypy_cache/3.9/zmq/utils/__init__.meta.json +0 -1
- np_services/.mypy_cache/3.9/zmq/utils/interop.data.json +0 -1
- np_services/.mypy_cache/3.9/zmq/utils/interop.meta.json +0 -1
- np_services/.mypy_cache/3.9/zmq/utils/jsonapi.data.json +0 -1
- np_services/.mypy_cache/3.9/zmq/utils/jsonapi.meta.json +0 -1
- np_services/.mypy_cache/CACHEDIR.TAG +0 -3
- np_services/resources/black_desktop.ps1 +0 -66
- np_services/resources/grey_desktop.ps1 +0 -66
- np_services/resources/reset_desktop.ps1 +0 -66
- np_services-0.1.69.dist-info/RECORD +0 -206
np_services/proxies.py
CHANGED
|
@@ -1,1489 +1,1489 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Proxy classes for interacting with devices via zro/zmq.
|
|
3
|
-
|
|
4
|
-
Proxy class names must match the name of the proxy key in the config dict.
|
|
5
|
-
"""
|
|
6
|
-
import abc
|
|
7
|
-
import contextlib
|
|
8
|
-
import copy
|
|
9
|
-
import csv
|
|
10
|
-
import datetime
|
|
11
|
-
import functools
|
|
12
|
-
import itertools
|
|
13
|
-
import json # loading config from Sync proxy will instantiate datetime objects
|
|
14
|
-
import logging
|
|
15
|
-
import pathlib
|
|
16
|
-
import re
|
|
17
|
-
import tempfile
|
|
18
|
-
import time
|
|
19
|
-
from typing import Any, ClassVar, Literal, Mapping, Optional, Sequence
|
|
20
|
-
|
|
21
|
-
import fabric
|
|
22
|
-
import np_config
|
|
23
|
-
import np_logging
|
|
24
|
-
import np_session
|
|
25
|
-
import npc_stim
|
|
26
|
-
import npc_sync
|
|
27
|
-
import npc_mvr
|
|
28
|
-
import np_tools
|
|
29
|
-
import yaml
|
|
30
|
-
import pandas as pd
|
|
31
|
-
|
|
32
|
-
import np_services.resources.mvr_connector as mvr_connector
|
|
33
|
-
import np_services.utils as utils
|
|
34
|
-
import np_services.resources.zro as zro
|
|
35
|
-
from np_services.protocols import *
|
|
36
|
-
|
|
37
|
-
logger = np_logging.getLogger(__name__)
|
|
38
|
-
|
|
39
|
-
CONFIG = utils.config_from_zk()
|
|
40
|
-
|
|
41
|
-
ProxyState = tuple[Literal["", "READY", "BUSY"], str]
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
class Proxy(abc.ABC):
|
|
45
|
-
# req proxy config - hardcode or overload ensure_config()
|
|
46
|
-
host: ClassVar[str]
|
|
47
|
-
port: ClassVar[int]
|
|
48
|
-
timeout: ClassVar[float]
|
|
49
|
-
serialization: ClassVar[Literal["json", "pickle"]]
|
|
50
|
-
|
|
51
|
-
# if a program needs to be launched (e.g. via RSC):
|
|
52
|
-
rsc_app_id: str
|
|
53
|
-
|
|
54
|
-
# if device records:
|
|
55
|
-
gb_per_hr: ClassVar[int | float]
|
|
56
|
-
min_rec_hr: ClassVar[int | float]
|
|
57
|
-
pretest_duration_sec: ClassVar[int | float]
|
|
58
|
-
|
|
59
|
-
# for resulting data, if device records:
|
|
60
|
-
data_root: ClassVar[Optional[pathlib.Path]] = None
|
|
61
|
-
data_files: ClassVar[Optional[Sequence[pathlib.Path]]] = None
|
|
62
|
-
|
|
63
|
-
# info
|
|
64
|
-
exc: ClassVar[Optional[Exception]] = None
|
|
65
|
-
|
|
66
|
-
latest_start: ClassVar[float | int] = 0
|
|
67
|
-
"`time.time()` when the service was last started via `start()`."
|
|
68
|
-
|
|
69
|
-
@classmethod
|
|
70
|
-
def ensure_config(cls) -> None:
|
|
71
|
-
"""Updates any missing parameters for class proxy.
|
|
72
|
-
|
|
73
|
-
Is called in `get_proxy()` so any time we need the proxy, we have a
|
|
74
|
-
correct config, without remembering to run `initialize()` or some such.
|
|
75
|
-
"""
|
|
76
|
-
config = CONFIG.get(
|
|
77
|
-
__class__.__name__, {}
|
|
78
|
-
) # class where this function is defined
|
|
79
|
-
config.update(**CONFIG.get(cls.__name__, {})) # the calling class, if different
|
|
80
|
-
|
|
81
|
-
# for proxy (reqd):
|
|
82
|
-
if not hasattr(cls, "host"):
|
|
83
|
-
cls.host = config["host"]
|
|
84
|
-
if not hasattr(cls, "port"):
|
|
85
|
-
cls.port = int(config["port"])
|
|
86
|
-
if not hasattr(cls, "timeout"):
|
|
87
|
-
cls.timeout = float(config.get("timeout", 10.0))
|
|
88
|
-
if not hasattr(cls, "serialization"):
|
|
89
|
-
cls.serialization = config.get("serialization", "json")
|
|
90
|
-
|
|
91
|
-
# for pretest (reqd, not used if device doesn't record)
|
|
92
|
-
if not hasattr(cls, "pretest_duration_sec"):
|
|
93
|
-
cls.pretest_duration_sec = config.get("pretest_duration_sec", 5)
|
|
94
|
-
if not hasattr(cls, "gb_per_hr"):
|
|
95
|
-
cls.gb_per_hr = config.get("gb_per_hr", 2.0)
|
|
96
|
-
if not hasattr(cls, "min_rec_hr"):
|
|
97
|
-
cls.min_rec_hr = config.get("min_rec_hr", 3.0)
|
|
98
|
-
|
|
99
|
-
# for resulting data (optional):
|
|
100
|
-
if not cls.data_root or cls.host not in cls.data_root.parts:
|
|
101
|
-
relative_path = config.get("data", None)
|
|
102
|
-
if relative_path:
|
|
103
|
-
root = pathlib.Path(f"//{cls.host}/{relative_path}")
|
|
104
|
-
try:
|
|
105
|
-
_ = root.exists()
|
|
106
|
-
except OSError as exc:
|
|
107
|
-
cls.exc = exc
|
|
108
|
-
logger.exception(
|
|
109
|
-
"Error accessing %s data path: %s", cls.__name__, root
|
|
110
|
-
)
|
|
111
|
-
raise FileNotFoundError(
|
|
112
|
-
f"{cls.__name__} data path is not accessible: {root}"
|
|
113
|
-
) from exc
|
|
114
|
-
else:
|
|
115
|
-
cls.data_root = root
|
|
116
|
-
if hasattr(cls, "data_root") and cls.data_root:
|
|
117
|
-
cls.data_root.mkdir(parents=True, exist_ok=True)
|
|
118
|
-
|
|
119
|
-
@classmethod
|
|
120
|
-
def launch(cls) -> None:
|
|
121
|
-
utils.start_rsc_app(cls.host, cls.rsc_app_id)
|
|
122
|
-
|
|
123
|
-
@classmethod
|
|
124
|
-
def kill(cls) -> None:
|
|
125
|
-
utils.kill_rsc_app(cls.host, cls.rsc_app_id)
|
|
126
|
-
|
|
127
|
-
@classmethod
|
|
128
|
-
def initialize(cls) -> None:
|
|
129
|
-
cls.launch()
|
|
130
|
-
with contextlib.suppress(AttributeError):
|
|
131
|
-
del cls.proxy
|
|
132
|
-
cls.proxy = cls.get_proxy()
|
|
133
|
-
if isinstance(cls, Startable) and not cls.is_ready_to_start():
|
|
134
|
-
if isinstance(cls, Finalizable):
|
|
135
|
-
cls.finalize()
|
|
136
|
-
if not cls.is_ready_to_start():
|
|
137
|
-
logger.warning(
|
|
138
|
-
"%s not ready to start: %s", cls.__name__, cls.get_state()
|
|
139
|
-
)
|
|
140
|
-
return
|
|
141
|
-
if cls.data_root:
|
|
142
|
-
cls.data_files = []
|
|
143
|
-
cls.sync_path = None
|
|
144
|
-
cls.initialization = time.time()
|
|
145
|
-
logger.info("%s(%s) initialized: ready for use", __class__.__name__, cls.__name__)
|
|
146
|
-
|
|
147
|
-
@classmethod
|
|
148
|
-
def test(cls) -> None:
|
|
149
|
-
"Quickly verify service is working and ready for use, or raise `TestError`."
|
|
150
|
-
logger.debug("Testing %s proxy", cls.__name__)
|
|
151
|
-
if not cls.is_connected():
|
|
152
|
-
raise TestError(
|
|
153
|
-
f"{cls.__name__} not connected to {cls.host}:{cls.port}"
|
|
154
|
-
) from cls.exc
|
|
155
|
-
logger.debug(
|
|
156
|
-
"%s proxy connection to %s:%s confirmed", cls.__name__, cls.host, cls.port
|
|
157
|
-
)
|
|
158
|
-
gb = cls.get_required_disk_gb()
|
|
159
|
-
if not cls.is_disk_space_ok():
|
|
160
|
-
raise TestError(
|
|
161
|
-
f"{cls.__name__} free disk space on {cls.data_root.drive} doesn't meet minimum of {gb} GB"
|
|
162
|
-
) from cls.exc
|
|
163
|
-
logger.debug("%s(%s) tested successfully", __class__.__name__, cls.__name__)
|
|
164
|
-
|
|
165
|
-
@classmethod
|
|
166
|
-
def get_proxy(cls) -> zro.DeviceProxy:
|
|
167
|
-
"Return a proxy to the service without re-creating unnecessarily."
|
|
168
|
-
with contextlib.suppress(AttributeError):
|
|
169
|
-
return cls.proxy
|
|
170
|
-
cls.ensure_config()
|
|
171
|
-
logger.debug("Creating %s proxy to %s:%s", cls.__name__, cls.host, cls.port)
|
|
172
|
-
cls.proxy = zro.DeviceProxy(cls.host, cls.port, cls.timeout, cls.serialization)
|
|
173
|
-
return cls.get_proxy()
|
|
174
|
-
|
|
175
|
-
@classmethod
|
|
176
|
-
def get_state(cls) -> ProxyState | dict:
|
|
177
|
-
"Dict may be deprecated: is no longer returned by Sync or Camstim proxies."
|
|
178
|
-
state = cls.get_proxy().get_state()
|
|
179
|
-
logger.debug("%s state: %s", cls.__name__, state)
|
|
180
|
-
return state
|
|
181
|
-
|
|
182
|
-
@classmethod
|
|
183
|
-
def get_latest_data(
|
|
184
|
-
cls: Recorder, glob: Optional[str] = None, subfolders: str = ""
|
|
185
|
-
) -> list[pathlib.Path] | None:
|
|
186
|
-
cls.ensure_config()
|
|
187
|
-
if not cls.data_root:
|
|
188
|
-
return None
|
|
189
|
-
if subfolders == "/": # can alter path to drive root
|
|
190
|
-
subfolders = ""
|
|
191
|
-
if not glob:
|
|
192
|
-
glob = f"*{cls.raw_suffix}" if hasattr(cls, "raw_suffix") else "*"
|
|
193
|
-
if not hasattr(cls, "latest_start"):
|
|
194
|
-
data_paths = utils.get_files_created_between(
|
|
195
|
-
cls.data_root / subfolders, glob
|
|
196
|
-
)
|
|
197
|
-
if not data_paths:
|
|
198
|
-
return None
|
|
199
|
-
return [
|
|
200
|
-
max(data_paths, key=lambda x: x.stat().st_mtime)
|
|
201
|
-
]
|
|
202
|
-
return utils.get_files_created_between(
|
|
203
|
-
cls.data_root / subfolders, glob, cls.latest_start
|
|
204
|
-
)
|
|
205
|
-
|
|
206
|
-
@classmethod
|
|
207
|
-
def get_required_disk_gb(cls) -> float:
|
|
208
|
-
"Return the minimum disk space required prior to start (to .1 GB). Returns `0.0` if service generates no data."
|
|
209
|
-
cls.ensure_config()
|
|
210
|
-
if not isinstance(cls, Startable):
|
|
211
|
-
return 0.0
|
|
212
|
-
return round(cls.min_rec_hr * cls.gb_per_hr, 1)
|
|
213
|
-
|
|
214
|
-
@classmethod
|
|
215
|
-
def is_disk_space_ok(cls) -> bool:
|
|
216
|
-
required = cls.get_required_disk_gb()
|
|
217
|
-
if required == 0.0:
|
|
218
|
-
return True
|
|
219
|
-
try:
|
|
220
|
-
free = utils.free_gb(cls.data_root)
|
|
221
|
-
except FileNotFoundError as exc:
|
|
222
|
-
cls.exc = exc
|
|
223
|
-
logger.exception(
|
|
224
|
-
f"{cls.__name__} data path not accessible: {cls.data_root}"
|
|
225
|
-
)
|
|
226
|
-
return False
|
|
227
|
-
else:
|
|
228
|
-
logger.debug(
|
|
229
|
-
"%s free disk space on %s: %s GB",
|
|
230
|
-
cls.__name__,
|
|
231
|
-
cls.data_root.drive,
|
|
232
|
-
free,
|
|
233
|
-
)
|
|
234
|
-
return free > required
|
|
235
|
-
|
|
236
|
-
@classmethod
|
|
237
|
-
def is_connected(cls) -> bool:
|
|
238
|
-
if not utils.is_online(cls.host):
|
|
239
|
-
cls.exc = ConnectionError(
|
|
240
|
-
f"No response from {cls.host}: may be offline or unreachable"
|
|
241
|
-
)
|
|
242
|
-
return False
|
|
243
|
-
try:
|
|
244
|
-
_ = cls.get_proxy().uptime
|
|
245
|
-
except zro.ZroError as exc:
|
|
246
|
-
cls.exc = exc
|
|
247
|
-
logger.exception(
|
|
248
|
-
f"{cls.__name__} proxy connection to {cls.host}:{cls.port} failed"
|
|
249
|
-
)
|
|
250
|
-
return False
|
|
251
|
-
try:
|
|
252
|
-
_ = cls.get_state()
|
|
253
|
-
except zro.ZroError as exc:
|
|
254
|
-
cls.exc = exc
|
|
255
|
-
logger.exception(
|
|
256
|
-
f"{cls.__name__} proxy connection to {cls.host}:{cls.port} failed"
|
|
257
|
-
)
|
|
258
|
-
return False
|
|
259
|
-
return True
|
|
260
|
-
|
|
261
|
-
|
|
262
|
-
class CamstimSyncShared(Proxy):
|
|
263
|
-
started_state: ClassVar[Sequence[str]]
|
|
264
|
-
|
|
265
|
-
@classmethod
|
|
266
|
-
def is_ready_to_start(cls) -> bool:
|
|
267
|
-
if cls.is_started():
|
|
268
|
-
return False
|
|
269
|
-
state = cls.get_state()
|
|
270
|
-
if isinstance(state, Mapping) and state.get("message", "") == "READY":
|
|
271
|
-
return True
|
|
272
|
-
if isinstance(state, Sequence) and "READY" in state:
|
|
273
|
-
return True
|
|
274
|
-
return False
|
|
275
|
-
|
|
276
|
-
@classmethod
|
|
277
|
-
def is_started(cls) -> bool:
|
|
278
|
-
return len(state := cls.get_state()) and all(
|
|
279
|
-
msg in state for msg in cls.started_state
|
|
280
|
-
)
|
|
281
|
-
|
|
282
|
-
@classmethod
|
|
283
|
-
def start(cls) -> None:
|
|
284
|
-
logger.info("%s | Starting recording", cls.__name__)
|
|
285
|
-
if cls.is_started():
|
|
286
|
-
logger.warning(
|
|
287
|
-
"%s already started - should be stopped manually", cls.__name__
|
|
288
|
-
)
|
|
289
|
-
return
|
|
290
|
-
# otherwise, Sync - for example - would stop current recording and start another
|
|
291
|
-
if not cls.is_ready_to_start():
|
|
292
|
-
logger.error("%s not ready to start: %s", cls.__name__, cls.get_state())
|
|
293
|
-
raise AssertionError(
|
|
294
|
-
f"{cls.__name__} not ready to start: {cls.get_state()}"
|
|
295
|
-
)
|
|
296
|
-
cls.latest_start = time.time()
|
|
297
|
-
cls.get_proxy().start()
|
|
298
|
-
|
|
299
|
-
@classmethod
|
|
300
|
-
def pretest(cls) -> None:
|
|
301
|
-
"Test all critical functions"
|
|
302
|
-
with np_logging.debug():
|
|
303
|
-
logger.debug("Starting %s pretest", cls.__name__)
|
|
304
|
-
cls.initialize() # calls test()
|
|
305
|
-
|
|
306
|
-
with utils.stop_on_error(cls):
|
|
307
|
-
cls.start()
|
|
308
|
-
time.sleep(1)
|
|
309
|
-
cls.verify()
|
|
310
|
-
time.sleep(cls.pretest_duration_sec)
|
|
311
|
-
# stop() called by context manager at exit, regardless
|
|
312
|
-
cls.finalize()
|
|
313
|
-
cls.validate()
|
|
314
|
-
logger.info("%s pretest complete", cls.__name__)
|
|
315
|
-
|
|
316
|
-
@classmethod
|
|
317
|
-
def verify(cls) -> None:
|
|
318
|
-
"Assert latest data file is currently increasing in size, or raise AssertionError."
|
|
319
|
-
if not cls.is_started():
|
|
320
|
-
logger.warning(
|
|
321
|
-
"Cannot verify %s if not started: %s", cls.__name__, cls.get_state()
|
|
322
|
-
)
|
|
323
|
-
raise AssertionError(f"{cls.__name__} not started: {cls.get_state()}")
|
|
324
|
-
|
|
325
|
-
@classmethod
|
|
326
|
-
def stop(cls) -> None:
|
|
327
|
-
logger.debug("Stopping %s", cls.__name__)
|
|
328
|
-
cls.get_proxy().stop()
|
|
329
|
-
logger.info("%s | Stopped recording", cls.__name__)
|
|
330
|
-
|
|
331
|
-
# --- End of possible Camstim/Sync shared methods ---
|
|
332
|
-
|
|
333
|
-
# --- Sync-specific methods ---
|
|
334
|
-
|
|
335
|
-
|
|
336
|
-
class Sync(CamstimSyncShared):
|
|
337
|
-
host = np_config.Rig().Sync
|
|
338
|
-
started_state = ("BUSY", "RECORDING")
|
|
339
|
-
raw_suffix: str = ".sync"
|
|
340
|
-
rsc_app_id: str = "sync_device"
|
|
341
|
-
|
|
342
|
-
@classmethod
|
|
343
|
-
def ensure_config(cls) -> None:
|
|
344
|
-
"""Updates any missing parameters for class proxy.
|
|
345
|
-
|
|
346
|
-
Is called in `get_proxy()` so any time we need the proxy, we have a
|
|
347
|
-
correct config, without remembering to run `initialize()` or some such.
|
|
348
|
-
"""
|
|
349
|
-
config = CONFIG.get(
|
|
350
|
-
__class__.__name__, {}
|
|
351
|
-
) # class where this function is defined
|
|
352
|
-
config.update(**CONFIG.get(cls.__name__, {})) # the calling class, if different
|
|
353
|
-
|
|
354
|
-
# for proxy (reqd):
|
|
355
|
-
if not hasattr(cls, "host"):
|
|
356
|
-
cls.host = config["host"]
|
|
357
|
-
if not hasattr(cls, "port"):
|
|
358
|
-
cls.port = int(config["port"])
|
|
359
|
-
if not hasattr(cls, "timeout"):
|
|
360
|
-
cls.timeout = float(config.get("timeout", 10.0))
|
|
361
|
-
if not hasattr(cls, "serialization"):
|
|
362
|
-
cls.serialization = config.get("serialization", "json")
|
|
363
|
-
|
|
364
|
-
# for pretest (reqd, not used if device doesn't record)
|
|
365
|
-
if not hasattr(cls, "pretest_duration_sec"):
|
|
366
|
-
cls.pretest_duration_sec = config.get("pretest_duration_sec", 5)
|
|
367
|
-
if not hasattr(cls, "gb_per_hr"):
|
|
368
|
-
cls.gb_per_hr = config.get("gb_per_hr", 2.0)
|
|
369
|
-
if not hasattr(cls, "min_rec_hr"):
|
|
370
|
-
cls.min_rec_hr = config.get("min_rec_hr", 3.0)
|
|
371
|
-
|
|
372
|
-
# for resulting data (optional):
|
|
373
|
-
if not cls.data_root or cls.host not in cls.data_root.parts:
|
|
374
|
-
relative_path = config.get("data", None)
|
|
375
|
-
if relative_path:
|
|
376
|
-
root = pathlib.Path(f"//{cls.host}/{relative_path}")
|
|
377
|
-
try:
|
|
378
|
-
_ = root.exists()
|
|
379
|
-
except OSError as exc:
|
|
380
|
-
cls.exc = exc
|
|
381
|
-
logger.exception(
|
|
382
|
-
"Error accessing %s data path: %s", cls.__name__, root
|
|
383
|
-
)
|
|
384
|
-
raise FileNotFoundError(
|
|
385
|
-
f"{cls.__name__} data path is not accessible: {root}"
|
|
386
|
-
) from exc
|
|
387
|
-
else:
|
|
388
|
-
cls.data_root = root
|
|
389
|
-
if hasattr(cls, "data_root"):
|
|
390
|
-
cls.data_root.mkdir(parents=True, exist_ok=True)
|
|
391
|
-
|
|
392
|
-
@classmethod
|
|
393
|
-
def finalize(cls) -> None:
|
|
394
|
-
logger.debug("Finalizing %s", cls.__name__)
|
|
395
|
-
if cls.is_started():
|
|
396
|
-
cls.stop()
|
|
397
|
-
while not cls.is_ready_to_start():
|
|
398
|
-
logger.debug("Waiting for %s to finish processing", cls.__name__)
|
|
399
|
-
time.sleep(1) # TODO add backoff module
|
|
400
|
-
if not cls.data_files:
|
|
401
|
-
cls.data_files = []
|
|
402
|
-
cls.data_files.extend(new := cls.get_latest_data("*.h5"))
|
|
403
|
-
logger.debug("%s processing finished: %s", cls.__name__, [_.name for _ in new])
|
|
404
|
-
|
|
405
|
-
@classmethod
|
|
406
|
-
def shutdown(cls) -> None:
|
|
407
|
-
logger.debug("Shutting down %s", cls.__name__)
|
|
408
|
-
cls.stop()
|
|
409
|
-
try:
|
|
410
|
-
del cls.proxy
|
|
411
|
-
except Exception as exc:
|
|
412
|
-
logger.debug("Failed to delete %s proxy: %s", cls.__name__, exc)
|
|
413
|
-
cls.exc = exc
|
|
414
|
-
|
|
415
|
-
@classmethod
|
|
416
|
-
def get_config(cls) -> dict[str, Any | datetime.datetime]:
|
|
417
|
-
"Sync config, including `line_labels` and `frequency`"
|
|
418
|
-
if cls.serialization in ("json", "j"):
|
|
419
|
-
return eval(cls.get_proxy().config)
|
|
420
|
-
if cls.serialization in ("pickle", "pkl", "p"):
|
|
421
|
-
return cls.get_proxy().config
|
|
422
|
-
|
|
423
|
-
@classmethod
|
|
424
|
-
def validate(cls, data: Optional[pathlib.Path] = None) -> None:
|
|
425
|
-
"Check that data file is valid, or raise AssertionError."
|
|
426
|
-
logger.debug("Validating %s data", cls.__name__)
|
|
427
|
-
if not data and bool(files := cls.get_latest_data("*.h5")):
|
|
428
|
-
data = files[-1]
|
|
429
|
-
logger.debug(
|
|
430
|
-
"No data file provided: validating most-recent data in %s: %s",
|
|
431
|
-
cls.data_root,
|
|
432
|
-
data.name,
|
|
433
|
-
)
|
|
434
|
-
if cls.is_started():
|
|
435
|
-
logger.warning(
|
|
436
|
-
f"Attempted to validate current data file while recording"
|
|
437
|
-
)
|
|
438
|
-
return
|
|
439
|
-
elif not cls.is_ready_to_start():
|
|
440
|
-
cls.finalize()
|
|
441
|
-
try:
|
|
442
|
-
import h5py
|
|
443
|
-
except ImportError:
|
|
444
|
-
logger.warning("h5py not installed: cannot open Sync data")
|
|
445
|
-
cls.min_validation(data)
|
|
446
|
-
else:
|
|
447
|
-
cls.full_validation(data)
|
|
448
|
-
|
|
449
|
-
@classmethod
|
|
450
|
-
def verify(cls) -> None:
|
|
451
|
-
"Assert latest data file is currently increasing in size, or raise AssertionError."
|
|
452
|
-
super().verify()
|
|
453
|
-
if cls.data_root and not utils.is_file_growing(cls.get_latest_data()[-1]):
|
|
454
|
-
raise AssertionError(
|
|
455
|
-
f"{cls.__name__} latest data file is not increasing in size: {cls.get_latest_data()[-1]}"
|
|
456
|
-
)
|
|
457
|
-
logger.info("%s | Verified: file on disk is increasing in size", cls.__name__)
|
|
458
|
-
|
|
459
|
-
@classmethod
|
|
460
|
-
def full_validation(cls, data: pathlib.Path) -> None:
|
|
461
|
-
npc_sync.get_sync_data(data).validate()
|
|
462
|
-
|
|
463
|
-
@classmethod
|
|
464
|
-
def min_validation(cls, data: pathlib.Path) -> None:
|
|
465
|
-
if data.stat().st_size == 0:
|
|
466
|
-
raise AssertionError(f"Empty file: {data}")
|
|
467
|
-
if data.suffix != ".h5":
|
|
468
|
-
raise FileNotFoundError(
|
|
469
|
-
f"Expected .sync to be converted to .h5 immediately after recording stopped: {data}"
|
|
470
|
-
)
|
|
471
|
-
logger.debug("%s minimal validation passed for %s", cls.__name__, data.name)
|
|
472
|
-
|
|
473
|
-
|
|
474
|
-
class Phidget(CamstimSyncShared):
|
|
475
|
-
host = np_config.Rig().Stim
|
|
476
|
-
rsc_app_id = "phidget_server"
|
|
477
|
-
|
|
478
|
-
|
|
479
|
-
class Camstim(CamstimSyncShared):
|
|
480
|
-
host = np_config.Rig().Stim
|
|
481
|
-
started_state = ("BUSY", "Script in progress.")
|
|
482
|
-
rsc_app_id = "camstim_agent"
|
|
483
|
-
sync_path: Optional[pathlib.Path] = None
|
|
484
|
-
|
|
485
|
-
@classmethod
|
|
486
|
-
def launch(cls) -> None:
|
|
487
|
-
super().launch()
|
|
488
|
-
Phidget.launch()
|
|
489
|
-
|
|
490
|
-
@classmethod
|
|
491
|
-
def get_config(cls) -> dict[str, Any]:
|
|
492
|
-
return cls.get_proxy().config
|
|
493
|
-
|
|
494
|
-
@classmethod
|
|
495
|
-
def ensure_config(cls) -> None:
|
|
496
|
-
"""Updates any missing parameters for class proxy.
|
|
497
|
-
|
|
498
|
-
Is called in `get_proxy()` so any time we need the proxy, we have a
|
|
499
|
-
correct config, without remembering to run `initialize()` or some such.
|
|
500
|
-
"""
|
|
501
|
-
config = CONFIG.get(
|
|
502
|
-
__class__.__name__, {}
|
|
503
|
-
) # class where this function is defined
|
|
504
|
-
config.update(**CONFIG.get(cls.__name__, {})) # the calling class, if different
|
|
505
|
-
|
|
506
|
-
# for proxy (reqd):
|
|
507
|
-
if not hasattr(cls, "host"):
|
|
508
|
-
cls.host = config["host"]
|
|
509
|
-
if not hasattr(cls, "port"):
|
|
510
|
-
cls.port = int(config["port"])
|
|
511
|
-
if not hasattr(cls, "timeout"):
|
|
512
|
-
cls.timeout = float(config.get("timeout", 10.0))
|
|
513
|
-
if not hasattr(cls, "serialization"):
|
|
514
|
-
cls.serialization = config.get("serialization", "json")
|
|
515
|
-
|
|
516
|
-
# for pretest (reqd, not used if device doesn't record)
|
|
517
|
-
if not hasattr(cls, "pretest_duration_sec"):
|
|
518
|
-
cls.pretest_duration_sec = config.get("pretest_duration_sec", 5)
|
|
519
|
-
if not hasattr(cls, "gb_per_hr"):
|
|
520
|
-
cls.gb_per_hr = config.get("gb_per_hr", 2.0)
|
|
521
|
-
if not hasattr(cls, "min_rec_hr"):
|
|
522
|
-
cls.min_rec_hr = config.get("min_rec_hr", 3.0)
|
|
523
|
-
|
|
524
|
-
# for resulting data (optional):
|
|
525
|
-
if not cls.data_root:
|
|
526
|
-
relative_path = config.get("data", None)
|
|
527
|
-
if relative_path:
|
|
528
|
-
root = pathlib.Path(f"//{cls.host}/{relative_path}")
|
|
529
|
-
try:
|
|
530
|
-
_ = root.exists()
|
|
531
|
-
except OSError as exc:
|
|
532
|
-
cls.exc = exc
|
|
533
|
-
logger.exception(
|
|
534
|
-
"Error accessing %s data path: %s", cls.__name__, root
|
|
535
|
-
)
|
|
536
|
-
raise FileNotFoundError(
|
|
537
|
-
f"{cls.__name__} data path is not accessible: {root}"
|
|
538
|
-
) from exc
|
|
539
|
-
else:
|
|
540
|
-
cls.data_root = root
|
|
541
|
-
if hasattr(cls, "data_root") and cls.data_root is not None:
|
|
542
|
-
cls.data_root.mkdir(parents=True, exist_ok=True)
|
|
543
|
-
|
|
544
|
-
@classmethod
|
|
545
|
-
def finalize(cls) -> None:
|
|
546
|
-
logger.info("Finalizing %s", cls.__name__)
|
|
547
|
-
if cls.is_started():
|
|
548
|
-
cls.stop()
|
|
549
|
-
count = 0
|
|
550
|
-
while not cls.is_ready_to_start():
|
|
551
|
-
if count % 120 == 0:
|
|
552
|
-
logger.debug("Waiting for %s to finish processing", cls.__name__)
|
|
553
|
-
time.sleep(1) # TODO add backoff module
|
|
554
|
-
if not cls.data_files:
|
|
555
|
-
cls.data_files = []
|
|
556
|
-
cls.data_files.extend(new := itertools.chain(cls.get_latest_data("*pkl"), cls.get_latest_data("*hdf5")))
|
|
557
|
-
logger.info("%s added new data: %s", cls.__name__, [_.name for _ in new])
|
|
558
|
-
|
|
559
|
-
@classmethod
|
|
560
|
-
def validate(cls) -> None:
|
|
561
|
-
if not cls.sync_path:
|
|
562
|
-
logger.warning("Cannot validate stim without sync file: assign `stim.sync_path`")
|
|
563
|
-
return
|
|
564
|
-
logger.info("Validating %s", cls.__name__)
|
|
565
|
-
for file in cls.data_files:
|
|
566
|
-
npc_stim.validate_stim(file, sync=cls.sync_path)
|
|
567
|
-
logger.info(f"Validated {len(cls.data_files)} stim files with sync")
|
|
568
|
-
|
|
569
|
-
class ScriptCamstim(Camstim):
|
|
570
|
-
script: ClassVar[str]
|
|
571
|
-
"path to script on Stim computer"
|
|
572
|
-
params: ClassVar[dict[str, Any]] = {}
|
|
573
|
-
|
|
574
|
-
@classmethod
|
|
575
|
-
def pretest(cls) -> None:
|
|
576
|
-
pretest_mouse = "599657"
|
|
577
|
-
|
|
578
|
-
cls.script = "C:/ProgramData/StimulusFiles/dev/bi_script_pretest_v2.py"
|
|
579
|
-
|
|
580
|
-
# get params from MTrain, as if we were running `Agent.start_session`
|
|
581
|
-
cls.params = np_session.mtrain.MTrain(pretest_mouse).stage["parameters"]
|
|
582
|
-
cls.params.update(dict(user_name="ben.hardcastle", mouse_id=pretest_mouse))
|
|
583
|
-
|
|
584
|
-
logger.info(
|
|
585
|
-
"%s | Pretest: running %s with MTrain stage params for mouse %s",
|
|
586
|
-
cls.__name__,
|
|
587
|
-
cls.script,
|
|
588
|
-
pretest_mouse,
|
|
589
|
-
)
|
|
590
|
-
cls.initialize()
|
|
591
|
-
cls.test()
|
|
592
|
-
cls.start()
|
|
593
|
-
while not cls.is_ready_to_start():
|
|
594
|
-
logger.debug("Waiting for %s to finish processing", cls.__name__)
|
|
595
|
-
time.sleep(10)
|
|
596
|
-
cls.finalize()
|
|
597
|
-
# cls.validate()
|
|
598
|
-
cls.initialize()
|
|
599
|
-
|
|
600
|
-
@classmethod
|
|
601
|
-
def start(cls):
|
|
602
|
-
cls.latest_start = time.time()
|
|
603
|
-
cls.get_proxy().start_script(cls.script, cls.params)
|
|
604
|
-
|
|
605
|
-
|
|
606
|
-
class SessionCamstim(Camstim):
|
|
607
|
-
lims_user_id: ClassVar[str]
|
|
608
|
-
labtracks_mouse_id: ClassVar[int]
|
|
609
|
-
override_params: ClassVar[dict[str, Any] | None] = None
|
|
610
|
-
|
|
611
|
-
@classmethod
|
|
612
|
-
def start(cls):
|
|
613
|
-
cls.latest_start = time.time()
|
|
614
|
-
cls.get_proxy().start_session(
|
|
615
|
-
cls.labtracks_mouse_id, cls.lims_user_id, override_params=cls.override_params
|
|
616
|
-
)
|
|
617
|
-
|
|
618
|
-
@classmethod
|
|
619
|
-
def pretest(cls) -> None:
|
|
620
|
-
cls.labtracks_mouse_id = 598796
|
|
621
|
-
cls.lims_user_id = "ben.hardcastle"
|
|
622
|
-
logger.info(
|
|
623
|
-
"%s | Pretest with mouse %s, user %s",
|
|
624
|
-
cls.__name__,
|
|
625
|
-
cls.labtracks_mouse_id,
|
|
626
|
-
cls.lims_user_id,
|
|
627
|
-
)
|
|
628
|
-
super().pretest()
|
|
629
|
-
|
|
630
|
-
|
|
631
|
-
class NoCamstim(Camstim):
|
|
632
|
-
"Run remote files (e.g. .bat) without sending directly to Camstim Agent"
|
|
633
|
-
|
|
634
|
-
remote_file: ClassVar[str | pathlib.Path]
|
|
635
|
-
extra_args: ClassVar[list[str]] = []
|
|
636
|
-
ssh: ClassVar[fabric.Connection]
|
|
637
|
-
user: ClassVar[str] = "svc_neuropix"
|
|
638
|
-
password: ClassVar[str]
|
|
639
|
-
|
|
640
|
-
@classmethod
|
|
641
|
-
def pretest(cls) -> None:
|
|
642
|
-
logger.warning("%s | Pretest not implemented", cls.__name__)
|
|
643
|
-
|
|
644
|
-
@classmethod
|
|
645
|
-
def get_ssh(cls) -> fabric.Connection:
|
|
646
|
-
with contextlib.suppress(AttributeError):
|
|
647
|
-
return cls.ssh
|
|
648
|
-
cls.initialize()
|
|
649
|
-
return cls.ssh
|
|
650
|
-
|
|
651
|
-
@classmethod
|
|
652
|
-
def initialize(cls) -> None:
|
|
653
|
-
if not hasattr(cls, "password"):
|
|
654
|
-
cls.password = input(f"{cls.__name__} | Enter password for {cls.host}: ")
|
|
655
|
-
cls.remote_file = utils.unc_to_local(pathlib.Path(cls.remote_file))
|
|
656
|
-
cls.ssh = fabric.Connection(
|
|
657
|
-
cls.host, cls.user, connect_kwargs=dict(password=cls.password)
|
|
658
|
-
)
|
|
659
|
-
super().initialize()
|
|
660
|
-
cls.test()
|
|
661
|
-
|
|
662
|
-
@classmethod
|
|
663
|
-
def test(cls) -> None:
|
|
664
|
-
super().test()
|
|
665
|
-
logger.debug(f"{cls.__name__} | Testing")
|
|
666
|
-
try:
|
|
667
|
-
result = cls.get_ssh().run("hostname", hide=True)
|
|
668
|
-
except Exception as exc:
|
|
669
|
-
raise TestError(
|
|
670
|
-
f"{cls.__name__} Error connecting to {cls.host} via ssh: {exc!r}. Is this password correct? {cls.password}"
|
|
671
|
-
)
|
|
672
|
-
else:
|
|
673
|
-
if result.exited != 0:
|
|
674
|
-
raise TestError(
|
|
675
|
-
f"{cls.__name__} Error connecting to {cls.host} via ssh: {result}"
|
|
676
|
-
)
|
|
677
|
-
logger.debug(f"{cls.__name__} | Connected to {cls.host} via ssh")
|
|
678
|
-
|
|
679
|
-
try:
|
|
680
|
-
result = cls.get_ssh().run(f"type {cls.remote_file}", hide=True)
|
|
681
|
-
except Exception as exc:
|
|
682
|
-
extra = (
|
|
683
|
-
f" | '{exc.result.command}': {exc.result.stderr.strip()!r}"
|
|
684
|
-
if hasattr(exc, "result")
|
|
685
|
-
else ""
|
|
686
|
-
)
|
|
687
|
-
raise TestError(
|
|
688
|
-
f"{cls.__name__} | Error calling ssh-executed command{extra}"
|
|
689
|
-
)
|
|
690
|
-
else:
|
|
691
|
-
if result.exited != 0:
|
|
692
|
-
raise TestError(
|
|
693
|
-
f"{cls.__name__} Error accessing {cls.remote_file} on {cls.host} - is filepath correct? {result}"
|
|
694
|
-
)
|
|
695
|
-
logger.debug(
|
|
696
|
-
f"{cls.__name__} | {cls.remote_file} is accessible via ssh on {cls.host}"
|
|
697
|
-
)
|
|
698
|
-
|
|
699
|
-
@classmethod
|
|
700
|
-
def start(cls):
|
|
701
|
-
if cls.is_started():
|
|
702
|
-
logger.warning(f"{cls.__name__} already started")
|
|
703
|
-
return
|
|
704
|
-
logger.debug(f"{cls.__name__} | Starting {cls.remote_file} on {cls.host}")
|
|
705
|
-
cls.latest_start = time.time()
|
|
706
|
-
cls.get_ssh().run(f"call {cls.remote_file} {cls.extra_args}")
|
|
707
|
-
|
|
708
|
-
@classmethod
|
|
709
|
-
def verify(cls):
|
|
710
|
-
logger.warning(f"{cls.__name__} | No verification implemented")
|
|
711
|
-
|
|
712
|
-
|
|
713
|
-
class MouseDirector(Proxy):
|
|
714
|
-
"""Communicate with the ZMQ remote object specified here:
|
|
715
|
-
http://aibspi.corp.alleninstitute.org/braintv/visual_behavior/mouse_director/-/blob/master/src/mousedirector.py
|
|
716
|
-
|
|
717
|
-
::
|
|
718
|
-
MouseDirector.get_proxy().set_mouse_id(str(366122))
|
|
719
|
-
MouseDirector.get_proxy().set_user_id("ben.hardcastle")
|
|
720
|
-
"""
|
|
721
|
-
|
|
722
|
-
user: ClassVar[str | np_session.User]
|
|
723
|
-
mouse: ClassVar[str | int | np_session.Mouse]
|
|
724
|
-
|
|
725
|
-
rsc_app_id = CONFIG['MouseDirector']['rsc_app_id']
|
|
726
|
-
host = np_config.Rig().Mon
|
|
727
|
-
gb_per_hr = 0
|
|
728
|
-
serialization = "json"
|
|
729
|
-
started_state: ClassVar[ProxyState] = ("READY", "")
|
|
730
|
-
not_connected_state: ClassVar[ProxyState] = ("", "NOT_CONNECTED")
|
|
731
|
-
|
|
732
|
-
@classmethod
|
|
733
|
-
def pretest(cls):
|
|
734
|
-
with np_logging.debug():
|
|
735
|
-
logger.debug(f"{cls.__name__} | Pretest")
|
|
736
|
-
cls.user = "ben.hardcastle"
|
|
737
|
-
cls.mouse = 366122
|
|
738
|
-
cls.initialize()
|
|
739
|
-
cls.test()
|
|
740
|
-
cls.get_proxy().retract_lick_spout()
|
|
741
|
-
time.sleep(3)
|
|
742
|
-
cls.get_proxy().extend_lick_spout()
|
|
743
|
-
time.sleep(3)
|
|
744
|
-
cls.get_proxy().retract_lick_spout()
|
|
745
|
-
time.sleep(3)
|
|
746
|
-
logger.info(f"{cls.__name__} | Pretest passed")
|
|
747
|
-
|
|
748
|
-
@classmethod
|
|
749
|
-
def initialize(cls):
|
|
750
|
-
logger.debug(f"{cls.__name__} | Initializing")
|
|
751
|
-
super().initialize()
|
|
752
|
-
cls.get_proxy().set_mouse_id(str(cls.mouse))
|
|
753
|
-
time.sleep(1)
|
|
754
|
-
cls.get_proxy().set_user_id(str(cls.user))
|
|
755
|
-
time.sleep(1)
|
|
756
|
-
logger.debug(f"{cls.__name__} | Initialized with mouse {cls.mouse}, user {cls.user}")
|
|
757
|
-
|
|
758
|
-
@classmethod
|
|
759
|
-
def get_state(cls) -> ProxyState:
|
|
760
|
-
result: str = cls.get_proxy().rig_dict
|
|
761
|
-
if str(np_config.Rig()) in result:
|
|
762
|
-
return cls.started_state
|
|
763
|
-
return cls.not_connected_state
|
|
764
|
-
|
|
765
|
-
class Cam3d(CamstimSyncShared):
|
|
766
|
-
|
|
767
|
-
label: str
|
|
768
|
-
|
|
769
|
-
host = np_config.Rig().Mon
|
|
770
|
-
serialization = "json"
|
|
771
|
-
started_state = ["READY", "CAMERAS_OPEN,CAMERAS_ACQUIRING"]
|
|
772
|
-
rsc_app_id = CONFIG['Cam3d']['rsc_app_id']
|
|
773
|
-
data_files: ClassVar[list[pathlib.Path]] = []
|
|
774
|
-
|
|
775
|
-
@classmethod
|
|
776
|
-
def is_started(cls) -> bool:
|
|
777
|
-
return cls.get_state() == cls.started_state
|
|
778
|
-
|
|
779
|
-
@classmethod
|
|
780
|
-
def is_ready_to_start(cls) -> bool:
|
|
781
|
-
if cls.is_started():
|
|
782
|
-
return False
|
|
783
|
-
time.sleep(1)
|
|
784
|
-
if (
|
|
785
|
-
cls.get_state() == cls.started_state
|
|
786
|
-
or 'READY' not in cls.get_state()
|
|
787
|
-
):
|
|
788
|
-
return False
|
|
789
|
-
return True
|
|
790
|
-
|
|
791
|
-
@classmethod
|
|
792
|
-
def initialize(cls) -> None:
|
|
793
|
-
logger.debug(f"{cls.__name__} | Initializing")
|
|
794
|
-
super().initialize()
|
|
795
|
-
if not cls.is_ready_to_start():
|
|
796
|
-
cls.reenable_cameras()
|
|
797
|
-
|
|
798
|
-
time.sleep(1)
|
|
799
|
-
|
|
800
|
-
@classmethod
|
|
801
|
-
def reenable_cameras(cls) -> None:
|
|
802
|
-
cls.get_proxy().release_cameras()
|
|
803
|
-
time.sleep(.2)
|
|
804
|
-
cls.get_proxy().enable_cameras()
|
|
805
|
-
time.sleep(.2)
|
|
806
|
-
cls.get_proxy().stop_capture()
|
|
807
|
-
time.sleep(.2)
|
|
808
|
-
cls.get_proxy().start_capture()
|
|
809
|
-
time.sleep(.2)
|
|
810
|
-
|
|
811
|
-
@classmethod
|
|
812
|
-
def generate_image_paths(cls) -> tuple[pathlib.Path, pathlib.Path]:
|
|
813
|
-
if not hasattr(cls, 'label') or not cls.label:
|
|
814
|
-
logger.warning(f"{cls.__name__} | `cls.label` not specified")
|
|
815
|
-
def path(side: str) -> pathlib.Path:
|
|
816
|
-
return cls.data_root / f"{datetime.datetime.now():%Y%m%d_%H%M%S}_{getattr(cls, 'label', 'image')}_{side}.png"
|
|
817
|
-
return path('left'), path('right')
|
|
818
|
-
|
|
819
|
-
@classmethod
|
|
820
|
-
def start(cls) -> None:
|
|
821
|
-
logger.debug(f"{cls.__name__} | Starting")
|
|
822
|
-
cls.latest_start = time.time()
|
|
823
|
-
left, right = cls.generate_image_paths()
|
|
824
|
-
cls.get_proxy().save_left_image(str(left))
|
|
825
|
-
cls.get_proxy().save_right_image(str(right))
|
|
826
|
-
time.sleep(.5)
|
|
827
|
-
for path, side in zip((left, right), ('Left', 'Right')):
|
|
828
|
-
if path.exists():
|
|
829
|
-
logger.debug(f"{cls.__name__} | {side} image saved to {path}")
|
|
830
|
-
else:
|
|
831
|
-
logger.debug(f"{cls.__name__} | {side} image capture request sent, but image not saved")
|
|
832
|
-
|
|
833
|
-
@classmethod
|
|
834
|
-
def finalize(cls) -> None:
|
|
835
|
-
logger.debug(f"{cls.__name__} | Finalizing")
|
|
836
|
-
counter = 0
|
|
837
|
-
while (
|
|
838
|
-
not (latest := cls.get_latest_data('*'))
|
|
839
|
-
or cls.is_started()
|
|
840
|
-
):
|
|
841
|
-
time.sleep(1)
|
|
842
|
-
counter += 1
|
|
843
|
-
if counter == 3:
|
|
844
|
-
cls.reenable_cameras()
|
|
845
|
-
break
|
|
846
|
-
cls.data_files.extend(latest)
|
|
847
|
-
logger.debug(f"{cls.__name__} | Images captured: {latest}")
|
|
848
|
-
|
|
849
|
-
@classmethod
|
|
850
|
-
def validate(cls):
|
|
851
|
-
if not (latest := cls.get_latest_data('*')) or len(latest) != 2:
|
|
852
|
-
raise AssertionError(f"{cls.__name__} | Expected 2 images, got {len(latest)}: {latest}")
|
|
853
|
-
|
|
854
|
-
@classmethod
|
|
855
|
-
def stop(cls):
|
|
856
|
-
logger.debug("%s | `stop()` not implemented", cls.__name__)
|
|
857
|
-
|
|
858
|
-
@classmethod
|
|
859
|
-
def pretest(cls):
|
|
860
|
-
with np_logging.debug():
|
|
861
|
-
logger.debug(f"{cls.__name__} | Pretest")
|
|
862
|
-
cls.label = 'pretest'
|
|
863
|
-
cls.initialize()
|
|
864
|
-
cls.test()
|
|
865
|
-
cls.start()
|
|
866
|
-
cls.finalize()
|
|
867
|
-
cls.validate()
|
|
868
|
-
logger.info(f"{cls.__name__} | Pretest passed")
|
|
869
|
-
|
|
870
|
-
class MVR(CamstimSyncShared):
|
|
871
|
-
|
|
872
|
-
# req proxy config - hardcode or overload ensure_config()
|
|
873
|
-
host: ClassVar[str] = np_config.Rig().Mon
|
|
874
|
-
port: ClassVar[int] = CONFIG['MVR']['port']
|
|
875
|
-
|
|
876
|
-
re_aux: re.Pattern = re.compile("aux|USB!|none", re.IGNORECASE)
|
|
877
|
-
|
|
878
|
-
@classmethod
|
|
879
|
-
def is_connected(cls) -> bool:
|
|
880
|
-
if not utils.is_online(cls.host):
|
|
881
|
-
cls.exc = ConnectionError(
|
|
882
|
-
f"No response from {cls.host}: may be offline or unreachable"
|
|
883
|
-
)
|
|
884
|
-
return False
|
|
885
|
-
if not cls.get_proxy()._mvr_connected:
|
|
886
|
-
cls.exc = ConnectionError(f"MVR likely not running on {cls.host}")
|
|
887
|
-
return False
|
|
888
|
-
try:
|
|
889
|
-
_ = cls.get_camera_status()
|
|
890
|
-
except ConnectionError as exc:
|
|
891
|
-
cls.exc = exc
|
|
892
|
-
return False
|
|
893
|
-
return True
|
|
894
|
-
|
|
895
|
-
@classmethod
|
|
896
|
-
def initialize(cls) -> None:
|
|
897
|
-
with contextlib.suppress(AttributeError):
|
|
898
|
-
del cls.proxy
|
|
899
|
-
cls.proxy = cls.get_proxy()
|
|
900
|
-
cls.test()
|
|
901
|
-
cls.configure_cameras()
|
|
902
|
-
_ = cls.get_proxy().read() # empty buffer
|
|
903
|
-
if isinstance(cls, Startable) and not cls.is_ready_to_start():
|
|
904
|
-
if cls.is_started() and isinstance(cls, Stoppable):
|
|
905
|
-
cls.stop()
|
|
906
|
-
if isinstance(cls, Finalizable):
|
|
907
|
-
cls.finalize()
|
|
908
|
-
if not cls.is_ready_to_start():
|
|
909
|
-
logger.warning(
|
|
910
|
-
"%s not ready to start: %s", cls.__name__, cls.get_state()
|
|
911
|
-
)
|
|
912
|
-
return
|
|
913
|
-
if cls.data_root:
|
|
914
|
-
cls.data_files = []
|
|
915
|
-
cls.initialization = time.time()
|
|
916
|
-
logger.info("%s initialized: ready for use", cls.__name__)
|
|
917
|
-
|
|
918
|
-
@classmethod
|
|
919
|
-
def shutdown(cls) -> None:
|
|
920
|
-
cls.get_proxy()._mvr_sock.close()
|
|
921
|
-
del cls.proxy
|
|
922
|
-
|
|
923
|
-
@classmethod
|
|
924
|
-
def get_proxy(cls) -> mvr_connector.MVRConnector:
|
|
925
|
-
with contextlib.suppress(AttributeError):
|
|
926
|
-
return cls.proxy
|
|
927
|
-
cls.ensure_config()
|
|
928
|
-
logger.debug("Creating %s proxy to %s:%s", cls.__name__, cls.host, cls.port)
|
|
929
|
-
cls.proxy = mvr_connector.MVRConnector({"host": cls.host, "port": cls.port})
|
|
930
|
-
cls.proxy._mvr_sock.settimeout(cls.timeout)
|
|
931
|
-
return cls.get_proxy()
|
|
932
|
-
|
|
933
|
-
@classmethod
|
|
934
|
-
def get_cameras(cls) -> list[dict[str, str]]:
|
|
935
|
-
if not hasattr(cls, "all_cameras"):
|
|
936
|
-
cls.get_proxy().read()
|
|
937
|
-
cls.all_cameras = cls.get_proxy().request_camera_ids()[0]["value"]
|
|
938
|
-
return cls.all_cameras
|
|
939
|
-
|
|
940
|
-
@classmethod
|
|
941
|
-
def get_camera_status(cls) -> list[dict[str, str]]:
|
|
942
|
-
_ = cls.get_proxy().read() # empty buffer
|
|
943
|
-
_ = cls.get_proxy()._send({"mvr_request": "get_camera_status"})
|
|
944
|
-
for msg in reversed(cls.get_proxy().read()):
|
|
945
|
-
if msg.get("mvr_response", "") == "get_camera_status" and (
|
|
946
|
-
cams := msg.get("value", [])
|
|
947
|
-
):
|
|
948
|
-
break
|
|
949
|
-
else:
|
|
950
|
-
logger.error("Could not get camera status from %s", cls.host)
|
|
951
|
-
raise ConnectionError(f"Could not get camera status from {cls.host}")
|
|
952
|
-
return [
|
|
953
|
-
_
|
|
954
|
-
for _ in cams
|
|
955
|
-
if any(_["camera_id"] == __["id"] for __ in cls.get_cameras())
|
|
956
|
-
]
|
|
957
|
-
|
|
958
|
-
@classmethod
|
|
959
|
-
def get_state(cls) -> ProxyState:
|
|
960
|
-
if not cls.is_connected():
|
|
961
|
-
return "", "MVR_CLOSED"
|
|
962
|
-
status = cls.get_camera_status()
|
|
963
|
-
# cam status could change between calls, so only get once
|
|
964
|
-
if any(not _["is_open"] for _ in status):
|
|
965
|
-
return "", "CAMERA_CLOSED"
|
|
966
|
-
if any(not _["is_streaming"] for _ in status):
|
|
967
|
-
return "", "CAMERA_NOT_STREAMING"
|
|
968
|
-
if cls.get_cameras_recording(status):
|
|
969
|
-
return "BUSY", "RECORDING"
|
|
970
|
-
return "READY", ""
|
|
971
|
-
|
|
972
|
-
@classmethod
|
|
973
|
-
def get_cameras_recording(cls, status=None) -> list[dict[str, str]]:
|
|
974
|
-
return [_ for _ in status or cls.get_camera_status() if _["is_recording"]]
|
|
975
|
-
|
|
976
|
-
@classmethod
|
|
977
|
-
def is_ready_to_start(cls) -> bool:
|
|
978
|
-
if cls.is_started():
|
|
979
|
-
return False
|
|
980
|
-
return all(
|
|
981
|
-
_["is_open"] and _["is_streaming"] and not _["is_recording"]
|
|
982
|
-
for _ in cls.get_camera_status()
|
|
983
|
-
)
|
|
984
|
-
|
|
985
|
-
@classmethod
|
|
986
|
-
def configure_cameras(cls) -> None:
|
|
987
|
-
"Set MVR to record video from subset of all cameras, via `get_cameras` (implemented by subclass)"
|
|
988
|
-
cam_ids = [_["id"] for _ in cls.get_cameras()]
|
|
989
|
-
cls.get_proxy().define_hosts(cam_ids)
|
|
990
|
-
cls.get_proxy().start_display()
|
|
991
|
-
|
|
992
|
-
|
|
993
|
-
class ImageMVR(MVR):
|
|
994
|
-
|
|
995
|
-
gb_per_hr: ClassVar[int | float] = CONFIG['ImageMVR']["gb_per_hr"]
|
|
996
|
-
min_rec_hr: ClassVar[int | float] = CONFIG['ImageMVR']["min_rec_hr"]
|
|
997
|
-
|
|
998
|
-
label: ClassVar[str]
|
|
999
|
-
"Rename file after capture to include label"
|
|
1000
|
-
|
|
1001
|
-
# TODO ready state is if Aux cam is_open
|
|
1002
|
-
@classmethod
|
|
1003
|
-
def get_cameras(cls) -> list[dict[str, str]]:
|
|
1004
|
-
"Aux cam only"
|
|
1005
|
-
cams = super().get_cameras()
|
|
1006
|
-
return [_ for _ in cams if cls.re_aux.search(_["label"])]
|
|
1007
|
-
|
|
1008
|
-
@classmethod
|
|
1009
|
-
def start(cls):
|
|
1010
|
-
if not cls.is_ready_to_start():
|
|
1011
|
-
# TODO display state, wait on user input to continue
|
|
1012
|
-
logger.error("%s not ready to start: %s", cls.__name__, cls.get_state())
|
|
1013
|
-
raise AssertionError(
|
|
1014
|
-
f"{cls.__name__} not ready to start: {cls.get_state()}"
|
|
1015
|
-
)
|
|
1016
|
-
cls.latest_start = time.time()
|
|
1017
|
-
cls.get_proxy().take_snapshot()
|
|
1018
|
-
|
|
1019
|
-
@classmethod
|
|
1020
|
-
def stop(cls):
|
|
1021
|
-
"Overload parent method to do nothing"
|
|
1022
|
-
pass
|
|
1023
|
-
|
|
1024
|
-
@classmethod
|
|
1025
|
-
def is_started(cls) -> bool:
|
|
1026
|
-
for msg in cls.get_proxy().read():
|
|
1027
|
-
if msg.get("mvr_broadcast", "") == "snapshot_converted":
|
|
1028
|
-
return True
|
|
1029
|
-
if msg.get("mvr_broadcast", "") == "snapshot_failed":
|
|
1030
|
-
return False
|
|
1031
|
-
return False
|
|
1032
|
-
|
|
1033
|
-
@classmethod
|
|
1034
|
-
def verify(cls):
|
|
1035
|
-
"Overload parent method to do nothing"
|
|
1036
|
-
pass
|
|
1037
|
-
|
|
1038
|
-
# TODO
|
|
1039
|
-
@classmethod
|
|
1040
|
-
def validate(cls) -> None:
|
|
1041
|
-
logger.warning("%s.validate() not implemented", cls.__name__)
|
|
1042
|
-
|
|
1043
|
-
@classmethod
|
|
1044
|
-
def finalize(cls) -> None:
|
|
1045
|
-
logger.debug("Finalizing %s", cls.__name__)
|
|
1046
|
-
t0 = time.time()
|
|
1047
|
-
timedout = lambda: time.time() > t0 + 10
|
|
1048
|
-
while (
|
|
1049
|
-
cls.is_started()
|
|
1050
|
-
or not cls.is_ready_to_start()
|
|
1051
|
-
or not cls.get_latest_data("*")
|
|
1052
|
-
or cls.get_latest_data(".bmp")
|
|
1053
|
-
) and not timedout():
|
|
1054
|
-
logger.debug("Waiting for %s to finish processing", cls.__name__)
|
|
1055
|
-
time.sleep(1) # TODO add backoff module
|
|
1056
|
-
if timedout():
|
|
1057
|
-
logger.warning(
|
|
1058
|
-
"Timed out waiting for %s to finish processing", cls.__name__
|
|
1059
|
-
)
|
|
1060
|
-
return
|
|
1061
|
-
if not hasattr(cls, "data_files") or not cls.data_files:
|
|
1062
|
-
cls.data_files = []
|
|
1063
|
-
new = cls.get_latest_data("*")
|
|
1064
|
-
if hasattr(cls, "label") and cls.label:
|
|
1065
|
-
new = [_.rename(_.with_stem(f"{_.stem}_{cls.label}")) for _ in new]
|
|
1066
|
-
cls.data_files.extend(new)
|
|
1067
|
-
logger.debug("%s processing finished: %s", cls.__name__, [_.name for _ in new])
|
|
1068
|
-
|
|
1069
|
-
|
|
1070
|
-
class VideoMVR(MVR):
|
|
1071
|
-
|
|
1072
|
-
pretest_duration_sec: ClassVar[int | float] = CONFIG['VideoMVR']["pretest_duration_sec"]
|
|
1073
|
-
gb_per_hr: ClassVar[int | float] = CONFIG['VideoMVR']["gb_per_hr"]
|
|
1074
|
-
min_rec_hr: ClassVar[int | float] = CONFIG['VideoMVR']["min_rec_hr"]
|
|
1075
|
-
|
|
1076
|
-
raw_suffix: ClassVar[str] = ".mp4"
|
|
1077
|
-
|
|
1078
|
-
started_state = ("BUSY", "RECORDING")
|
|
1079
|
-
sync_path: Optional[pathlib.Path] = None
|
|
1080
|
-
|
|
1081
|
-
@classmethod
|
|
1082
|
-
def get_cameras(cls) -> list[dict[str, str]]:
|
|
1083
|
-
"All available cams except Aux"
|
|
1084
|
-
cams = super().get_cameras()
|
|
1085
|
-
# check for camera labels with known Aux cam names
|
|
1086
|
-
return [_ for _ in cams if cls.re_aux.search(_["label"]) is None]
|
|
1087
|
-
|
|
1088
|
-
@classmethod
|
|
1089
|
-
def start(cls) -> None:
|
|
1090
|
-
logger.info("%s | Starting recording", cls.__name__)
|
|
1091
|
-
cls.latest_start = time.time()
|
|
1092
|
-
cls.get_proxy().start_record(record_time=24 * 60 * 60,) # sec
|
|
1093
|
-
|
|
1094
|
-
@classmethod
|
|
1095
|
-
def verify(cls) -> None:
|
|
1096
|
-
"Assert data exists since latest start, or raise AssertionError."
|
|
1097
|
-
# files grow infrequently while MVR's recording - checking their size
|
|
1098
|
-
# is unreliable
|
|
1099
|
-
if not cls.is_started():
|
|
1100
|
-
logger.warning(
|
|
1101
|
-
"Cannot verify %s if not started: %s", cls.__name__, cls.get_state()
|
|
1102
|
-
)
|
|
1103
|
-
raise AssertionError(f"{cls.__name__} not started: {cls.get_state()}")
|
|
1104
|
-
if datetime.datetime.fromtimestamp(
|
|
1105
|
-
cls.latest_start
|
|
1106
|
-
) < datetime.datetime.now() - datetime.timedelta(
|
|
1107
|
-
seconds=cls.pretest_duration_sec
|
|
1108
|
-
):
|
|
1109
|
-
time.sleep(cls.pretest_duration_sec)
|
|
1110
|
-
if not (files := cls.get_latest_data()) or len(files) < len(
|
|
1111
|
-
cls.get_cameras_recording()
|
|
1112
|
-
):
|
|
1113
|
-
raise AssertionError(
|
|
1114
|
-
f"{cls.__name__} files do not match the number of cameras: {files}"
|
|
1115
|
-
)
|
|
1116
|
-
logger.info(
|
|
1117
|
-
"%s | Verified: %s cameras recording to disk", cls.__name__, len(files)
|
|
1118
|
-
)
|
|
1119
|
-
@classmethod
|
|
1120
|
-
def stop(cls) -> None:
|
|
1121
|
-
cls.get_proxy().stop_record()
|
|
1122
|
-
logger.info("%s | Stopped recording", cls.__name__)
|
|
1123
|
-
|
|
1124
|
-
@classmethod
|
|
1125
|
-
def is_started(cls) -> bool:
|
|
1126
|
-
if len(state := cls.get_state()) and all(
|
|
1127
|
-
msg in state for msg in cls.started_state
|
|
1128
|
-
):
|
|
1129
|
-
return True
|
|
1130
|
-
return False
|
|
1131
|
-
|
|
1132
|
-
@classmethod
|
|
1133
|
-
def finalize(cls) -> None:
|
|
1134
|
-
logger.debug("Finalizing %s", cls.__name__)
|
|
1135
|
-
if cls.is_started():
|
|
1136
|
-
cls.stop()
|
|
1137
|
-
t0 = time.time()
|
|
1138
|
-
timedout = lambda: time.time() > t0 + 30
|
|
1139
|
-
while not cls.is_ready_to_start() and not timedout():
|
|
1140
|
-
logger.debug("Waiting for %s to finish processing", cls.__name__)
|
|
1141
|
-
time.sleep(1) # TODO add backoff module
|
|
1142
|
-
if timedout():
|
|
1143
|
-
logger.warning(
|
|
1144
|
-
"Timed out waiting for %s to finish processing", cls.__name__
|
|
1145
|
-
)
|
|
1146
|
-
return
|
|
1147
|
-
if not hasattr(cls, "data_files"):
|
|
1148
|
-
cls.data_files = []
|
|
1149
|
-
cls.data_files.extend(
|
|
1150
|
-
new := (cls.get_latest_data("*.mp4") + cls.get_latest_data("*.json"))
|
|
1151
|
-
)
|
|
1152
|
-
logger.debug("%s processing finished: %s", cls.__name__, [_.name for _ in new])
|
|
1153
|
-
|
|
1154
|
-
@classmethod
|
|
1155
|
-
def validate(cls) -> None:
|
|
1156
|
-
tempdir = pathlib.Path(tempfile.gettempdir())
|
|
1157
|
-
tempfiles: list[pathlib.Path] = []
|
|
1158
|
-
# currently can't pass individual files to mvrdataset - just a dir
|
|
1159
|
-
for file in itertools.chain(cls.get_latest_data("*.mp4"), cls.get_latest_data("*.json")):
|
|
1160
|
-
np_tools.copy(file, t := tempdir / file.name)
|
|
1161
|
-
tempfiles.append(t)
|
|
1162
|
-
npc_mvr.MVRDataset(
|
|
1163
|
-
tempdir,
|
|
1164
|
-
getattr(cls, "sync_path", None),
|
|
1165
|
-
)
|
|
1166
|
-
logger.info(f"Validated {len(tempfiles)} video/info files {'with' if getattr(cls, 'sync_path', None) else 'without'} sync")
|
|
1167
|
-
for file in tempfiles:
|
|
1168
|
-
file.unlink(missing_ok=True)
|
|
1169
|
-
class JsonRecorder:
|
|
1170
|
-
"Just needs a `start` method that calls `write()`."
|
|
1171
|
-
|
|
1172
|
-
log_name: ClassVar[str]
|
|
1173
|
-
log_root: ClassVar[pathlib.Path]
|
|
1174
|
-
|
|
1175
|
-
@abc.abstractclassmethod
|
|
1176
|
-
def start() -> None:
|
|
1177
|
-
pass
|
|
1178
|
-
|
|
1179
|
-
@classmethod
|
|
1180
|
-
def pretest(cls) -> None:
|
|
1181
|
-
with np_logging.debug():
|
|
1182
|
-
cls.initialize()
|
|
1183
|
-
cls.start()
|
|
1184
|
-
cls.validate()
|
|
1185
|
-
logger.info("%s | Pretest passed", cls.__name__)
|
|
1186
|
-
|
|
1187
|
-
@classmethod
|
|
1188
|
-
def ensure_config(cls) -> None:
|
|
1189
|
-
config = CONFIG.get(
|
|
1190
|
-
__class__.__name__, {}
|
|
1191
|
-
) # class where this function is defined
|
|
1192
|
-
config.update(**CONFIG.get(cls.__name__, {})) # the calling class, if different
|
|
1193
|
-
|
|
1194
|
-
if not hasattr(cls, "log_name"):
|
|
1195
|
-
cls.log_name = config.get("log_name", "{}_.json")
|
|
1196
|
-
cls.log_name = cls.log_name.format(
|
|
1197
|
-
datetime.datetime.now().strftime("%Y-%m-%d_%H%M%S")
|
|
1198
|
-
)
|
|
1199
|
-
|
|
1200
|
-
if not hasattr(cls, "log_root"):
|
|
1201
|
-
cls.log_root = config.get("log_root", ".")
|
|
1202
|
-
cls.log_root = pathlib.Path(cls.log_root).resolve()
|
|
1203
|
-
|
|
1204
|
-
@classmethod
|
|
1205
|
-
def initialize(cls) -> None:
|
|
1206
|
-
logger.debug("%s initializing", __class__.__name__)
|
|
1207
|
-
cls.ensure_config()
|
|
1208
|
-
cls.initialization = time.time()
|
|
1209
|
-
log = (cls.log_root / cls.log_name).with_suffix(".json")
|
|
1210
|
-
log.parent.mkdir(parents=True, exist_ok=True)
|
|
1211
|
-
log.touch(exist_ok=True)
|
|
1212
|
-
if log.read_text().strip() == "":
|
|
1213
|
-
log.write_text("{}")
|
|
1214
|
-
cls.all_files = [log]
|
|
1215
|
-
cls.test()
|
|
1216
|
-
|
|
1217
|
-
@classmethod
|
|
1218
|
-
def test(cls) -> None:
|
|
1219
|
-
logger.debug("%s testing", __class__.__name__)
|
|
1220
|
-
try:
|
|
1221
|
-
_ = cls.get_current_log().read_bytes()
|
|
1222
|
-
except OSError as exc:
|
|
1223
|
-
raise TestError(
|
|
1224
|
-
f"{__class__.__name__} failed to open {cls.get_current_log()}"
|
|
1225
|
-
) from exc
|
|
1226
|
-
|
|
1227
|
-
@classmethod
|
|
1228
|
-
def get_current_log(cls) -> pathlib.Path:
|
|
1229
|
-
if not hasattr(cls, "initialization"):
|
|
1230
|
-
cls.initialize()
|
|
1231
|
-
return cls.all_files[-1]
|
|
1232
|
-
|
|
1233
|
-
@classmethod
|
|
1234
|
-
def read(cls) -> dict[str, str | float]:
|
|
1235
|
-
try:
|
|
1236
|
-
data = json.loads(cls.get_current_log().read_bytes())
|
|
1237
|
-
except json.JSONDecodeError as exc:
|
|
1238
|
-
if cls.get_current_log().stat().st_size:
|
|
1239
|
-
raise
|
|
1240
|
-
logger.debug("%s | Error encountered reading file %s: %r", cls.__name__, cls.get_current_log(), exc)
|
|
1241
|
-
data = {} # file was empty
|
|
1242
|
-
else:
|
|
1243
|
-
logger.debug("%s | Read from %s", cls.__name__, cls.get_current_log())
|
|
1244
|
-
return data
|
|
1245
|
-
|
|
1246
|
-
@classmethod
|
|
1247
|
-
def write(cls, value: dict) -> None:
|
|
1248
|
-
try:
|
|
1249
|
-
data = cls.read()
|
|
1250
|
-
except json.JSONDecodeError:
|
|
1251
|
-
data = {}
|
|
1252
|
-
file = cls.get_current_log().with_suffix(".new.json")
|
|
1253
|
-
file.touch()
|
|
1254
|
-
cls.all_files.append(file)
|
|
1255
|
-
else:
|
|
1256
|
-
file = cls.get_current_log()
|
|
1257
|
-
np_config.merge(data, value)
|
|
1258
|
-
file.write_text(json.dumps(data, indent=4, sort_keys=False, default=str))
|
|
1259
|
-
logger.debug("%s wrote to %s", cls.__name__, file)
|
|
1260
|
-
|
|
1261
|
-
@classmethod
|
|
1262
|
-
def validate(cls) -> None:
|
|
1263
|
-
if not (log := cls.read()):
|
|
1264
|
-
cls.exc = TestError(
|
|
1265
|
-
f"{cls.__name__} failed to validate because log is empty: {cls.get_current_log()}"
|
|
1266
|
-
)
|
|
1267
|
-
logger.error(
|
|
1268
|
-
"%s failed to validate: log is empty %s",
|
|
1269
|
-
cls.__name__,
|
|
1270
|
-
cls.get_current_log(),
|
|
1271
|
-
exc_info=cls.exc,
|
|
1272
|
-
)
|
|
1273
|
-
logger.debug("%s validated", __class__.__name__)
|
|
1274
|
-
|
|
1275
|
-
|
|
1276
|
-
class YamlRecorder(JsonRecorder):
|
|
1277
|
-
@classmethod
|
|
1278
|
-
def test(cls) -> None:
|
|
1279
|
-
logger.debug("%s testing", __class__.__name__)
|
|
1280
|
-
super().test()
|
|
1281
|
-
try:
|
|
1282
|
-
import yaml
|
|
1283
|
-
except ImportError as exc:
|
|
1284
|
-
raise TestError(f"{__class__.__name__} failed to import yaml") from exc
|
|
1285
|
-
|
|
1286
|
-
@classmethod
|
|
1287
|
-
def finalize(cls) -> None:
|
|
1288
|
-
logger.debug("Finalizing %s", __class__.__name__)
|
|
1289
|
-
log = json.load(cls.get_current_log().read_bytes())
|
|
1290
|
-
with contextlib.suppress(
|
|
1291
|
-
AttributeError, OSError
|
|
1292
|
-
): # if this fails we still have the json file
|
|
1293
|
-
yaml.dump(log, cls.get_current_log().with_suffix(".yaml").read_bytes())
|
|
1294
|
-
|
|
1295
|
-
|
|
1296
|
-
class NewScaleCoordinateRecorder(JsonRecorder):
|
|
1297
|
-
"Gets current manipulator coordinates and stores them in a file with a timestamp."
|
|
1298
|
-
|
|
1299
|
-
host: ClassVar[str] = np_config.Rig().Mon
|
|
1300
|
-
data_root: ClassVar[pathlib.Path] = CONFIG['NewScaleCoordinateRecorder']['data']
|
|
1301
|
-
data_name: ClassVar[str] = CONFIG['NewScaleCoordinateRecorder']['data_name']
|
|
1302
|
-
data_fieldnames: ClassVar[Sequence[str]] = CONFIG['NewScaleCoordinateRecorder']['data_fieldnames']
|
|
1303
|
-
data_files: ClassVar[list[pathlib.Path]] = []
|
|
1304
|
-
"Files to be copied after exp"
|
|
1305
|
-
|
|
1306
|
-
max_z_travel: ClassVar[int] = CONFIG['NewScaleCoordinateRecorder']['max_z_travel']
|
|
1307
|
-
num_probes: ClassVar[int] = 6
|
|
1308
|
-
log_name: ClassVar[str] = "newscale_coords_{}.json"
|
|
1309
|
-
log_root: ClassVar[pathlib.Path] = pathlib.Path(tempfile.gettempdir()).resolve()
|
|
1310
|
-
label: ClassVar[str] = ""
|
|
1311
|
-
"A label to tag each entry with"
|
|
1312
|
-
latest_start: ClassVar[int] = 0
|
|
1313
|
-
"`time.time()` when the service was last started via `start()`."
|
|
1314
|
-
log_time_fmt: str = CONFIG['NewScaleCoordinateRecorder']['log_time_fmt']
|
|
1315
|
-
|
|
1316
|
-
@classmethod
|
|
1317
|
-
def pretest(cls) -> None:
|
|
1318
|
-
cls.label = 'pretest'
|
|
1319
|
-
super().pretest()
|
|
1320
|
-
|
|
1321
|
-
@classmethod
|
|
1322
|
-
def get_current_data(cls) -> pathlib.Path:
|
|
1323
|
-
cls.ensure_config()
|
|
1324
|
-
return cls.data_root / cls.data_name
|
|
1325
|
-
|
|
1326
|
-
@classmethod
|
|
1327
|
-
def last_logged_coords_csv(cls) -> dict[str, float]:
|
|
1328
|
-
"Get the most recent coordinates from the log file using the csv parser in the stdlib."
|
|
1329
|
-
with cls.get_current_data().open("r") as _:
|
|
1330
|
-
reader = csv.DictReader(_, fieldnames=cls.data_fieldnames)
|
|
1331
|
-
rows = list(reader)
|
|
1332
|
-
last_moved_label = cls.data_fieldnames[0]
|
|
1333
|
-
coords = {}
|
|
1334
|
-
for row in reversed(rows): # search for the most recent coordinates
|
|
1335
|
-
if len(coords.keys()) == cls.num_probes:
|
|
1336
|
-
break # we have an entry for each probe
|
|
1337
|
-
if (m := row.pop(cls.data_fieldnames[1]).strip()) not in coords:
|
|
1338
|
-
coords[m] = {}
|
|
1339
|
-
for k, v in row.items():
|
|
1340
|
-
if "virtual" in k:
|
|
1341
|
-
continue
|
|
1342
|
-
if k == last_moved_label:
|
|
1343
|
-
v = datetime.datetime.strptime(v, cls.log_time_fmt)
|
|
1344
|
-
else:
|
|
1345
|
-
v = v.strip()
|
|
1346
|
-
with contextlib.suppress(ValueError):
|
|
1347
|
-
v = float(v)
|
|
1348
|
-
coords[m].update({k: v})
|
|
1349
|
-
return coords
|
|
1350
|
-
|
|
1351
|
-
@classmethod
|
|
1352
|
-
def last_logged_coords_pd(cls) -> dict[str, float]:
|
|
1353
|
-
"Get the most recent coordinates from the log file using pandas."
|
|
1354
|
-
coords = {}
|
|
1355
|
-
manipulator_label = cls.data_fieldnames[1]
|
|
1356
|
-
last_moved_label = cls.data_fieldnames[0]
|
|
1357
|
-
df = pd.read_csv(cls.get_current_data(), names=cls.data_fieldnames, parse_dates=[last_moved_label])
|
|
1358
|
-
# group by manipulator_label and get the maximum value in last_moved_label for each group
|
|
1359
|
-
# (i.e. the most recent entry for each manipulator)
|
|
1360
|
-
last_moved = df.loc[
|
|
1361
|
-
df.groupby(manipulator_label)[last_moved_label].idxmax()
|
|
1362
|
-
].set_index(manipulator_label).sort_values(last_moved_label, ascending=False)
|
|
1363
|
-
for serial_number, row in last_moved.iloc[:cls.num_probes].iterrows():
|
|
1364
|
-
new = {key: row[key] for key in cls.data_fieldnames if (key != manipulator_label and 'virtual' not in key)}
|
|
1365
|
-
new[last_moved_label] = row[last_moved_label].to_pydatetime()
|
|
1366
|
-
coords[str(serial_number).strip()] = new
|
|
1367
|
-
return coords
|
|
1368
|
-
|
|
1369
|
-
@classmethod
|
|
1370
|
-
def convert_serial_numbers_to_probe_labels(cls, coords: dict[str, float]) -> None:
|
|
1371
|
-
for k, v in CONFIG[cls.__name__].get("probe_to_serial_number", {}).items():
|
|
1372
|
-
if v in coords:
|
|
1373
|
-
coords[k] = coords.pop(v)
|
|
1374
|
-
coords[k]['serial_number'] = v
|
|
1375
|
-
|
|
1376
|
-
@classmethod
|
|
1377
|
-
def get_coordinates(cls) -> dict[str, float]:
|
|
1378
|
-
try:
|
|
1379
|
-
import pandas as pd
|
|
1380
|
-
except ImportError:
|
|
1381
|
-
coords = cls.last_logged_coords_csv()
|
|
1382
|
-
else:
|
|
1383
|
-
coords = cls.last_logged_coords_pd()
|
|
1384
|
-
|
|
1385
|
-
def adjust_z_travel(coords):
|
|
1386
|
-
for v in coords.values():
|
|
1387
|
-
if 'z' in v:
|
|
1388
|
-
v['z'] = cls.max_z_travel - v['z']
|
|
1389
|
-
adjust_z_travel(coords)
|
|
1390
|
-
cls.convert_serial_numbers_to_probe_labels(coords)
|
|
1391
|
-
coords["label"] = cls.label
|
|
1392
|
-
logger.debug("%s | Retrieved coordinates: %s", cls.__name__, coords)
|
|
1393
|
-
return coords
|
|
1394
|
-
|
|
1395
|
-
@classmethod
|
|
1396
|
-
def write_to_platform_json(cls):
|
|
1397
|
-
coords = cls.get_coordinates()
|
|
1398
|
-
for k, v in coords.items():
|
|
1399
|
-
if isinstance(v, Mapping) and (last_moved := v.get('last_moved')):
|
|
1400
|
-
del coords[k]['last_moved']
|
|
1401
|
-
del coords[k]['serial_number']
|
|
1402
|
-
continue
|
|
1403
|
-
# if last_moved is kept, then normalize it depending on csv/pd method:
|
|
1404
|
-
match last_moved:
|
|
1405
|
-
case str():
|
|
1406
|
-
timestamp = datetime.datetime.strptime(last_moved, cls.log_time_fmt)
|
|
1407
|
-
case datetime.datetime():
|
|
1408
|
-
timestamp = last_moved
|
|
1409
|
-
coords[k]['last_moved'] = np_config.normalize_time(timestamp)
|
|
1410
|
-
|
|
1411
|
-
# rearrange so `label`` is top-level key, or use capture-timestamp if no label
|
|
1412
|
-
platform_json = np_session.PlatformJson(cls.get_current_log())
|
|
1413
|
-
platform_json_entry = copy.deepcopy(platform_json.manipulator_coordinates)
|
|
1414
|
-
coords = {str(coords.pop('label', np_config.normalize_time(cls.latest_start))): coords}
|
|
1415
|
-
logger.debug("%s | Adding to platform json: %s", cls.__name__, coords)
|
|
1416
|
-
platform_json.manipulator_coordinates = np_config.merge(platform_json_entry, coords)
|
|
1417
|
-
if (csv := cls.get_current_data()) not in cls.data_files:
|
|
1418
|
-
cls.data_files.append(csv)
|
|
1419
|
-
|
|
1420
|
-
@classmethod
|
|
1421
|
-
def start(cls):
|
|
1422
|
-
cls.latest_start = time.time()
|
|
1423
|
-
if 'platformD1' in cls.log_name:
|
|
1424
|
-
cls.write_to_platform_json()
|
|
1425
|
-
else:
|
|
1426
|
-
cls.write({str(datetime.datetime.now()): cls.get_coordinates()})
|
|
1427
|
-
|
|
1428
|
-
@classmethod
|
|
1429
|
-
def test(cls) -> None:
|
|
1430
|
-
super().test()
|
|
1431
|
-
logger.debug("%s | Testing", __class__.__name__)
|
|
1432
|
-
try:
|
|
1433
|
-
_ = cls.get_current_data().open("r")
|
|
1434
|
-
except OSError as exc:
|
|
1435
|
-
raise TestError(
|
|
1436
|
-
f"{cls.__name__} failed to open {cls.get_current_data()}"
|
|
1437
|
-
) from exc
|
|
1438
|
-
try:
|
|
1439
|
-
_ = cls.get_coordinates()
|
|
1440
|
-
except Exception as exc:
|
|
1441
|
-
raise TestError(f"{cls.__name__} failed to get coordinates") from exc
|
|
1442
|
-
else:
|
|
1443
|
-
logger.info("%s | Test passed", cls.__name__)
|
|
1444
|
-
|
|
1445
|
-
@classmethod
|
|
1446
|
-
def ensure_config(cls) -> None:
|
|
1447
|
-
super().ensure_config()
|
|
1448
|
-
|
|
1449
|
-
if CONFIG.get("services", {}):
|
|
1450
|
-
config = CONFIG["services"].get(__class__.__name__, {})
|
|
1451
|
-
config.update(**CONFIG["services"].get(cls.__name__, {}))
|
|
1452
|
-
else:
|
|
1453
|
-
config = CONFIG.get(
|
|
1454
|
-
__class__.__name__, {}
|
|
1455
|
-
) # class where this function is defined
|
|
1456
|
-
config.update(
|
|
1457
|
-
**CONFIG.get(cls.__name__, {})
|
|
1458
|
-
) # the calling class, if different
|
|
1459
|
-
|
|
1460
|
-
if not hasattr(cls, "host"):
|
|
1461
|
-
cls.host = config["host"]
|
|
1462
|
-
|
|
1463
|
-
# for resulting data
|
|
1464
|
-
if (
|
|
1465
|
-
not hasattr(cls, "data_root")
|
|
1466
|
-
or cls.host not in pathlib.Path(cls.data_root).parts
|
|
1467
|
-
):
|
|
1468
|
-
relative_path = config["data"]
|
|
1469
|
-
if relative_path:
|
|
1470
|
-
root = pathlib.Path(f"//{cls.host}/{relative_path}")
|
|
1471
|
-
try:
|
|
1472
|
-
_ = root.exists()
|
|
1473
|
-
except OSError as exc:
|
|
1474
|
-
cls.exc = exc
|
|
1475
|
-
logger.exception(
|
|
1476
|
-
"Error accessing %s data path: %s", cls.__name__, root
|
|
1477
|
-
)
|
|
1478
|
-
raise FileNotFoundError(
|
|
1479
|
-
f"{cls.__name__} data path is not accessible: {root}"
|
|
1480
|
-
) from exc
|
|
1481
|
-
else:
|
|
1482
|
-
cls.data_root = root
|
|
1483
|
-
|
|
1484
|
-
if not hasattr(cls, "data_name"):
|
|
1485
|
-
cls.data_name = config["data_name"]
|
|
1486
|
-
if not hasattr(cls, "data_fieldnames"):
|
|
1487
|
-
cls.data_fieldnames = config["data_fieldnames"]
|
|
1488
|
-
|
|
1489
|
-
|
|
1
|
+
"""
|
|
2
|
+
Proxy classes for interacting with devices via zro/zmq.
|
|
3
|
+
|
|
4
|
+
Proxy class names must match the name of the proxy key in the config dict.
|
|
5
|
+
"""
|
|
6
|
+
import abc
|
|
7
|
+
import contextlib
|
|
8
|
+
import copy
|
|
9
|
+
import csv
|
|
10
|
+
import datetime
|
|
11
|
+
import functools
|
|
12
|
+
import itertools
|
|
13
|
+
import json # loading config from Sync proxy will instantiate datetime objects
|
|
14
|
+
import logging
|
|
15
|
+
import pathlib
|
|
16
|
+
import re
|
|
17
|
+
import tempfile
|
|
18
|
+
import time
|
|
19
|
+
from typing import Any, ClassVar, Literal, Mapping, Optional, Sequence
|
|
20
|
+
|
|
21
|
+
import fabric
|
|
22
|
+
import np_config
|
|
23
|
+
import np_logging
|
|
24
|
+
import np_session
|
|
25
|
+
import npc_stim
|
|
26
|
+
import npc_sync
|
|
27
|
+
import npc_mvr
|
|
28
|
+
import np_tools
|
|
29
|
+
import yaml
|
|
30
|
+
import pandas as pd
|
|
31
|
+
|
|
32
|
+
import np_services.resources.mvr_connector as mvr_connector
|
|
33
|
+
import np_services.utils as utils
|
|
34
|
+
import np_services.resources.zro as zro
|
|
35
|
+
from np_services.protocols import *
|
|
36
|
+
|
|
37
|
+
logger = np_logging.getLogger(__name__)
|
|
38
|
+
|
|
39
|
+
CONFIG = utils.config_from_zk()
|
|
40
|
+
|
|
41
|
+
ProxyState = tuple[Literal["", "READY", "BUSY"], str]
|
|
42
|
+
|
|
43
|
+
|
|
44
|
+
class Proxy(abc.ABC):
|
|
45
|
+
# req proxy config - hardcode or overload ensure_config()
|
|
46
|
+
host: ClassVar[str]
|
|
47
|
+
port: ClassVar[int]
|
|
48
|
+
timeout: ClassVar[float]
|
|
49
|
+
serialization: ClassVar[Literal["json", "pickle"]]
|
|
50
|
+
|
|
51
|
+
# if a program needs to be launched (e.g. via RSC):
|
|
52
|
+
rsc_app_id: str
|
|
53
|
+
|
|
54
|
+
# if device records:
|
|
55
|
+
gb_per_hr: ClassVar[int | float]
|
|
56
|
+
min_rec_hr: ClassVar[int | float]
|
|
57
|
+
pretest_duration_sec: ClassVar[int | float]
|
|
58
|
+
|
|
59
|
+
# for resulting data, if device records:
|
|
60
|
+
data_root: ClassVar[Optional[pathlib.Path]] = None
|
|
61
|
+
data_files: ClassVar[Optional[Sequence[pathlib.Path]]] = None
|
|
62
|
+
|
|
63
|
+
# info
|
|
64
|
+
exc: ClassVar[Optional[Exception]] = None
|
|
65
|
+
|
|
66
|
+
latest_start: ClassVar[float | int] = 0
|
|
67
|
+
"`time.time()` when the service was last started via `start()`."
|
|
68
|
+
|
|
69
|
+
@classmethod
|
|
70
|
+
def ensure_config(cls) -> None:
|
|
71
|
+
"""Updates any missing parameters for class proxy.
|
|
72
|
+
|
|
73
|
+
Is called in `get_proxy()` so any time we need the proxy, we have a
|
|
74
|
+
correct config, without remembering to run `initialize()` or some such.
|
|
75
|
+
"""
|
|
76
|
+
config = CONFIG.get(
|
|
77
|
+
__class__.__name__, {}
|
|
78
|
+
) # class where this function is defined
|
|
79
|
+
config.update(**CONFIG.get(cls.__name__, {})) # the calling class, if different
|
|
80
|
+
|
|
81
|
+
# for proxy (reqd):
|
|
82
|
+
if not hasattr(cls, "host"):
|
|
83
|
+
cls.host = config["host"]
|
|
84
|
+
if not hasattr(cls, "port"):
|
|
85
|
+
cls.port = int(config["port"])
|
|
86
|
+
if not hasattr(cls, "timeout"):
|
|
87
|
+
cls.timeout = float(config.get("timeout", 10.0))
|
|
88
|
+
if not hasattr(cls, "serialization"):
|
|
89
|
+
cls.serialization = config.get("serialization", "json")
|
|
90
|
+
|
|
91
|
+
# for pretest (reqd, not used if device doesn't record)
|
|
92
|
+
if not hasattr(cls, "pretest_duration_sec"):
|
|
93
|
+
cls.pretest_duration_sec = config.get("pretest_duration_sec", 5)
|
|
94
|
+
if not hasattr(cls, "gb_per_hr"):
|
|
95
|
+
cls.gb_per_hr = config.get("gb_per_hr", 2.0)
|
|
96
|
+
if not hasattr(cls, "min_rec_hr"):
|
|
97
|
+
cls.min_rec_hr = config.get("min_rec_hr", 3.0)
|
|
98
|
+
|
|
99
|
+
# for resulting data (optional):
|
|
100
|
+
if not cls.data_root or cls.host not in cls.data_root.parts:
|
|
101
|
+
relative_path = config.get("data", None)
|
|
102
|
+
if relative_path:
|
|
103
|
+
root = pathlib.Path(f"//{cls.host}/{relative_path}")
|
|
104
|
+
try:
|
|
105
|
+
_ = root.exists()
|
|
106
|
+
except OSError as exc:
|
|
107
|
+
cls.exc = exc
|
|
108
|
+
logger.exception(
|
|
109
|
+
"Error accessing %s data path: %s", cls.__name__, root
|
|
110
|
+
)
|
|
111
|
+
raise FileNotFoundError(
|
|
112
|
+
f"{cls.__name__} data path is not accessible: {root}"
|
|
113
|
+
) from exc
|
|
114
|
+
else:
|
|
115
|
+
cls.data_root = root
|
|
116
|
+
if hasattr(cls, "data_root") and cls.data_root:
|
|
117
|
+
cls.data_root.mkdir(parents=True, exist_ok=True)
|
|
118
|
+
|
|
119
|
+
@classmethod
|
|
120
|
+
def launch(cls) -> None:
|
|
121
|
+
utils.start_rsc_app(cls.host, cls.rsc_app_id)
|
|
122
|
+
|
|
123
|
+
@classmethod
|
|
124
|
+
def kill(cls) -> None:
|
|
125
|
+
utils.kill_rsc_app(cls.host, cls.rsc_app_id)
|
|
126
|
+
|
|
127
|
+
@classmethod
|
|
128
|
+
def initialize(cls) -> None:
|
|
129
|
+
cls.launch()
|
|
130
|
+
with contextlib.suppress(AttributeError):
|
|
131
|
+
del cls.proxy
|
|
132
|
+
cls.proxy = cls.get_proxy()
|
|
133
|
+
if isinstance(cls, Startable) and not cls.is_ready_to_start():
|
|
134
|
+
if isinstance(cls, Finalizable):
|
|
135
|
+
cls.finalize()
|
|
136
|
+
if not cls.is_ready_to_start():
|
|
137
|
+
logger.warning(
|
|
138
|
+
"%s not ready to start: %s", cls.__name__, cls.get_state()
|
|
139
|
+
)
|
|
140
|
+
return
|
|
141
|
+
if cls.data_root:
|
|
142
|
+
cls.data_files = []
|
|
143
|
+
cls.sync_path = None
|
|
144
|
+
cls.initialization = time.time()
|
|
145
|
+
logger.info("%s(%s) initialized: ready for use", __class__.__name__, cls.__name__)
|
|
146
|
+
|
|
147
|
+
@classmethod
|
|
148
|
+
def test(cls) -> None:
|
|
149
|
+
"Quickly verify service is working and ready for use, or raise `TestError`."
|
|
150
|
+
logger.debug("Testing %s proxy", cls.__name__)
|
|
151
|
+
if not cls.is_connected():
|
|
152
|
+
raise TestError(
|
|
153
|
+
f"{cls.__name__} not connected to {cls.host}:{cls.port}"
|
|
154
|
+
) from cls.exc
|
|
155
|
+
logger.debug(
|
|
156
|
+
"%s proxy connection to %s:%s confirmed", cls.__name__, cls.host, cls.port
|
|
157
|
+
)
|
|
158
|
+
gb = cls.get_required_disk_gb()
|
|
159
|
+
if not cls.is_disk_space_ok():
|
|
160
|
+
raise TestError(
|
|
161
|
+
f"{cls.__name__} free disk space on {cls.data_root.drive} doesn't meet minimum of {gb} GB"
|
|
162
|
+
) from cls.exc
|
|
163
|
+
logger.debug("%s(%s) tested successfully", __class__.__name__, cls.__name__)
|
|
164
|
+
|
|
165
|
+
@classmethod
|
|
166
|
+
def get_proxy(cls) -> zro.DeviceProxy:
|
|
167
|
+
"Return a proxy to the service without re-creating unnecessarily."
|
|
168
|
+
with contextlib.suppress(AttributeError):
|
|
169
|
+
return cls.proxy
|
|
170
|
+
cls.ensure_config()
|
|
171
|
+
logger.debug("Creating %s proxy to %s:%s", cls.__name__, cls.host, cls.port)
|
|
172
|
+
cls.proxy = zro.DeviceProxy(cls.host, cls.port, cls.timeout, cls.serialization)
|
|
173
|
+
return cls.get_proxy()
|
|
174
|
+
|
|
175
|
+
@classmethod
|
|
176
|
+
def get_state(cls) -> ProxyState | dict:
|
|
177
|
+
"Dict may be deprecated: is no longer returned by Sync or Camstim proxies."
|
|
178
|
+
state = cls.get_proxy().get_state()
|
|
179
|
+
logger.debug("%s state: %s", cls.__name__, state)
|
|
180
|
+
return state
|
|
181
|
+
|
|
182
|
+
@classmethod
|
|
183
|
+
def get_latest_data(
|
|
184
|
+
cls: Recorder, glob: Optional[str] = None, subfolders: str = ""
|
|
185
|
+
) -> list[pathlib.Path] | None:
|
|
186
|
+
cls.ensure_config()
|
|
187
|
+
if not cls.data_root:
|
|
188
|
+
return None
|
|
189
|
+
if subfolders == "/": # can alter path to drive root
|
|
190
|
+
subfolders = ""
|
|
191
|
+
if not glob:
|
|
192
|
+
glob = f"*{cls.raw_suffix}" if hasattr(cls, "raw_suffix") else "*"
|
|
193
|
+
if not hasattr(cls, "latest_start"):
|
|
194
|
+
data_paths = utils.get_files_created_between(
|
|
195
|
+
cls.data_root / subfolders, glob
|
|
196
|
+
)
|
|
197
|
+
if not data_paths:
|
|
198
|
+
return None
|
|
199
|
+
return [
|
|
200
|
+
max(data_paths, key=lambda x: x.stat().st_mtime)
|
|
201
|
+
]
|
|
202
|
+
return utils.get_files_created_between(
|
|
203
|
+
cls.data_root / subfolders, glob, cls.latest_start
|
|
204
|
+
)
|
|
205
|
+
|
|
206
|
+
@classmethod
|
|
207
|
+
def get_required_disk_gb(cls) -> float:
|
|
208
|
+
"Return the minimum disk space required prior to start (to .1 GB). Returns `0.0` if service generates no data."
|
|
209
|
+
cls.ensure_config()
|
|
210
|
+
if not isinstance(cls, Startable):
|
|
211
|
+
return 0.0
|
|
212
|
+
return round(cls.min_rec_hr * cls.gb_per_hr, 1)
|
|
213
|
+
|
|
214
|
+
@classmethod
|
|
215
|
+
def is_disk_space_ok(cls) -> bool:
|
|
216
|
+
required = cls.get_required_disk_gb()
|
|
217
|
+
if required == 0.0:
|
|
218
|
+
return True
|
|
219
|
+
try:
|
|
220
|
+
free = utils.free_gb(cls.data_root)
|
|
221
|
+
except FileNotFoundError as exc:
|
|
222
|
+
cls.exc = exc
|
|
223
|
+
logger.exception(
|
|
224
|
+
f"{cls.__name__} data path not accessible: {cls.data_root}"
|
|
225
|
+
)
|
|
226
|
+
return False
|
|
227
|
+
else:
|
|
228
|
+
logger.debug(
|
|
229
|
+
"%s free disk space on %s: %s GB",
|
|
230
|
+
cls.__name__,
|
|
231
|
+
cls.data_root.drive,
|
|
232
|
+
free,
|
|
233
|
+
)
|
|
234
|
+
return free > required
|
|
235
|
+
|
|
236
|
+
@classmethod
|
|
237
|
+
def is_connected(cls) -> bool:
|
|
238
|
+
if not utils.is_online(cls.host):
|
|
239
|
+
cls.exc = ConnectionError(
|
|
240
|
+
f"No response from {cls.host}: may be offline or unreachable"
|
|
241
|
+
)
|
|
242
|
+
return False
|
|
243
|
+
try:
|
|
244
|
+
_ = cls.get_proxy().uptime
|
|
245
|
+
except zro.ZroError as exc:
|
|
246
|
+
cls.exc = exc
|
|
247
|
+
logger.exception(
|
|
248
|
+
f"{cls.__name__} proxy connection to {cls.host}:{cls.port} failed"
|
|
249
|
+
)
|
|
250
|
+
return False
|
|
251
|
+
try:
|
|
252
|
+
_ = cls.get_state()
|
|
253
|
+
except zro.ZroError as exc:
|
|
254
|
+
cls.exc = exc
|
|
255
|
+
logger.exception(
|
|
256
|
+
f"{cls.__name__} proxy connection to {cls.host}:{cls.port} failed"
|
|
257
|
+
)
|
|
258
|
+
return False
|
|
259
|
+
return True
|
|
260
|
+
|
|
261
|
+
|
|
262
|
+
class CamstimSyncShared(Proxy):
|
|
263
|
+
started_state: ClassVar[Sequence[str]]
|
|
264
|
+
|
|
265
|
+
@classmethod
|
|
266
|
+
def is_ready_to_start(cls) -> bool:
|
|
267
|
+
if cls.is_started():
|
|
268
|
+
return False
|
|
269
|
+
state = cls.get_state()
|
|
270
|
+
if isinstance(state, Mapping) and state.get("message", "") == "READY":
|
|
271
|
+
return True
|
|
272
|
+
if isinstance(state, Sequence) and "READY" in state:
|
|
273
|
+
return True
|
|
274
|
+
return False
|
|
275
|
+
|
|
276
|
+
@classmethod
|
|
277
|
+
def is_started(cls) -> bool:
|
|
278
|
+
return len(state := cls.get_state()) and all(
|
|
279
|
+
msg in state for msg in cls.started_state
|
|
280
|
+
)
|
|
281
|
+
|
|
282
|
+
@classmethod
|
|
283
|
+
def start(cls) -> None:
|
|
284
|
+
logger.info("%s | Starting recording", cls.__name__)
|
|
285
|
+
if cls.is_started():
|
|
286
|
+
logger.warning(
|
|
287
|
+
"%s already started - should be stopped manually", cls.__name__
|
|
288
|
+
)
|
|
289
|
+
return
|
|
290
|
+
# otherwise, Sync - for example - would stop current recording and start another
|
|
291
|
+
if not cls.is_ready_to_start():
|
|
292
|
+
logger.error("%s not ready to start: %s", cls.__name__, cls.get_state())
|
|
293
|
+
raise AssertionError(
|
|
294
|
+
f"{cls.__name__} not ready to start: {cls.get_state()}"
|
|
295
|
+
)
|
|
296
|
+
cls.latest_start = time.time()
|
|
297
|
+
cls.get_proxy().start()
|
|
298
|
+
|
|
299
|
+
@classmethod
|
|
300
|
+
def pretest(cls) -> None:
|
|
301
|
+
"Test all critical functions"
|
|
302
|
+
with np_logging.debug():
|
|
303
|
+
logger.debug("Starting %s pretest", cls.__name__)
|
|
304
|
+
cls.initialize() # calls test()
|
|
305
|
+
|
|
306
|
+
with utils.stop_on_error(cls):
|
|
307
|
+
cls.start()
|
|
308
|
+
time.sleep(1)
|
|
309
|
+
cls.verify()
|
|
310
|
+
time.sleep(cls.pretest_duration_sec)
|
|
311
|
+
# stop() called by context manager at exit, regardless
|
|
312
|
+
cls.finalize()
|
|
313
|
+
cls.validate()
|
|
314
|
+
logger.info("%s pretest complete", cls.__name__)
|
|
315
|
+
|
|
316
|
+
@classmethod
|
|
317
|
+
def verify(cls) -> None:
|
|
318
|
+
"Assert latest data file is currently increasing in size, or raise AssertionError."
|
|
319
|
+
if not cls.is_started():
|
|
320
|
+
logger.warning(
|
|
321
|
+
"Cannot verify %s if not started: %s", cls.__name__, cls.get_state()
|
|
322
|
+
)
|
|
323
|
+
raise AssertionError(f"{cls.__name__} not started: {cls.get_state()}")
|
|
324
|
+
|
|
325
|
+
@classmethod
|
|
326
|
+
def stop(cls) -> None:
|
|
327
|
+
logger.debug("Stopping %s", cls.__name__)
|
|
328
|
+
cls.get_proxy().stop()
|
|
329
|
+
logger.info("%s | Stopped recording", cls.__name__)
|
|
330
|
+
|
|
331
|
+
# --- End of possible Camstim/Sync shared methods ---
|
|
332
|
+
|
|
333
|
+
# --- Sync-specific methods ---
|
|
334
|
+
|
|
335
|
+
|
|
336
|
+
class Sync(CamstimSyncShared):
|
|
337
|
+
host = np_config.Rig().Sync
|
|
338
|
+
started_state = ("BUSY", "RECORDING")
|
|
339
|
+
raw_suffix: str = ".sync"
|
|
340
|
+
rsc_app_id: str = "sync_device"
|
|
341
|
+
|
|
342
|
+
@classmethod
|
|
343
|
+
def ensure_config(cls) -> None:
|
|
344
|
+
"""Updates any missing parameters for class proxy.
|
|
345
|
+
|
|
346
|
+
Is called in `get_proxy()` so any time we need the proxy, we have a
|
|
347
|
+
correct config, without remembering to run `initialize()` or some such.
|
|
348
|
+
"""
|
|
349
|
+
config = CONFIG.get(
|
|
350
|
+
__class__.__name__, {}
|
|
351
|
+
) # class where this function is defined
|
|
352
|
+
config.update(**CONFIG.get(cls.__name__, {})) # the calling class, if different
|
|
353
|
+
|
|
354
|
+
# for proxy (reqd):
|
|
355
|
+
if not hasattr(cls, "host"):
|
|
356
|
+
cls.host = config["host"]
|
|
357
|
+
if not hasattr(cls, "port"):
|
|
358
|
+
cls.port = int(config["port"])
|
|
359
|
+
if not hasattr(cls, "timeout"):
|
|
360
|
+
cls.timeout = float(config.get("timeout", 10.0))
|
|
361
|
+
if not hasattr(cls, "serialization"):
|
|
362
|
+
cls.serialization = config.get("serialization", "json")
|
|
363
|
+
|
|
364
|
+
# for pretest (reqd, not used if device doesn't record)
|
|
365
|
+
if not hasattr(cls, "pretest_duration_sec"):
|
|
366
|
+
cls.pretest_duration_sec = config.get("pretest_duration_sec", 5)
|
|
367
|
+
if not hasattr(cls, "gb_per_hr"):
|
|
368
|
+
cls.gb_per_hr = config.get("gb_per_hr", 2.0)
|
|
369
|
+
if not hasattr(cls, "min_rec_hr"):
|
|
370
|
+
cls.min_rec_hr = config.get("min_rec_hr", 3.0)
|
|
371
|
+
|
|
372
|
+
# for resulting data (optional):
|
|
373
|
+
if not cls.data_root or cls.host not in cls.data_root.parts:
|
|
374
|
+
relative_path = config.get("data", None)
|
|
375
|
+
if relative_path:
|
|
376
|
+
root = pathlib.Path(f"//{cls.host}/{relative_path}")
|
|
377
|
+
try:
|
|
378
|
+
_ = root.exists()
|
|
379
|
+
except OSError as exc:
|
|
380
|
+
cls.exc = exc
|
|
381
|
+
logger.exception(
|
|
382
|
+
"Error accessing %s data path: %s", cls.__name__, root
|
|
383
|
+
)
|
|
384
|
+
raise FileNotFoundError(
|
|
385
|
+
f"{cls.__name__} data path is not accessible: {root}"
|
|
386
|
+
) from exc
|
|
387
|
+
else:
|
|
388
|
+
cls.data_root = root
|
|
389
|
+
if hasattr(cls, "data_root"):
|
|
390
|
+
cls.data_root.mkdir(parents=True, exist_ok=True)
|
|
391
|
+
|
|
392
|
+
@classmethod
|
|
393
|
+
def finalize(cls) -> None:
|
|
394
|
+
logger.debug("Finalizing %s", cls.__name__)
|
|
395
|
+
if cls.is_started():
|
|
396
|
+
cls.stop()
|
|
397
|
+
while not cls.is_ready_to_start():
|
|
398
|
+
logger.debug("Waiting for %s to finish processing", cls.__name__)
|
|
399
|
+
time.sleep(1) # TODO add backoff module
|
|
400
|
+
if not cls.data_files:
|
|
401
|
+
cls.data_files = []
|
|
402
|
+
cls.data_files.extend(new := cls.get_latest_data("*.h5"))
|
|
403
|
+
logger.debug("%s processing finished: %s", cls.__name__, [_.name for _ in new])
|
|
404
|
+
|
|
405
|
+
@classmethod
|
|
406
|
+
def shutdown(cls) -> None:
|
|
407
|
+
logger.debug("Shutting down %s", cls.__name__)
|
|
408
|
+
cls.stop()
|
|
409
|
+
try:
|
|
410
|
+
del cls.proxy
|
|
411
|
+
except Exception as exc:
|
|
412
|
+
logger.debug("Failed to delete %s proxy: %s", cls.__name__, exc)
|
|
413
|
+
cls.exc = exc
|
|
414
|
+
|
|
415
|
+
@classmethod
|
|
416
|
+
def get_config(cls) -> dict[str, Any | datetime.datetime]:
|
|
417
|
+
"Sync config, including `line_labels` and `frequency`"
|
|
418
|
+
if cls.serialization in ("json", "j"):
|
|
419
|
+
return eval(cls.get_proxy().config)
|
|
420
|
+
if cls.serialization in ("pickle", "pkl", "p"):
|
|
421
|
+
return cls.get_proxy().config
|
|
422
|
+
|
|
423
|
+
@classmethod
|
|
424
|
+
def validate(cls, data: Optional[pathlib.Path] = None) -> None:
|
|
425
|
+
"Check that data file is valid, or raise AssertionError."
|
|
426
|
+
logger.debug("Validating %s data", cls.__name__)
|
|
427
|
+
if not data and bool(files := cls.get_latest_data("*.h5")):
|
|
428
|
+
data = files[-1]
|
|
429
|
+
logger.debug(
|
|
430
|
+
"No data file provided: validating most-recent data in %s: %s",
|
|
431
|
+
cls.data_root,
|
|
432
|
+
data.name,
|
|
433
|
+
)
|
|
434
|
+
if cls.is_started():
|
|
435
|
+
logger.warning(
|
|
436
|
+
f"Attempted to validate current data file while recording"
|
|
437
|
+
)
|
|
438
|
+
return
|
|
439
|
+
elif not cls.is_ready_to_start():
|
|
440
|
+
cls.finalize()
|
|
441
|
+
try:
|
|
442
|
+
import h5py
|
|
443
|
+
except ImportError:
|
|
444
|
+
logger.warning("h5py not installed: cannot open Sync data")
|
|
445
|
+
cls.min_validation(data)
|
|
446
|
+
else:
|
|
447
|
+
cls.full_validation(data)
|
|
448
|
+
|
|
449
|
+
@classmethod
|
|
450
|
+
def verify(cls) -> None:
|
|
451
|
+
"Assert latest data file is currently increasing in size, or raise AssertionError."
|
|
452
|
+
super().verify()
|
|
453
|
+
if cls.data_root and not utils.is_file_growing(cls.get_latest_data()[-1]):
|
|
454
|
+
raise AssertionError(
|
|
455
|
+
f"{cls.__name__} latest data file is not increasing in size: {cls.get_latest_data()[-1]}"
|
|
456
|
+
)
|
|
457
|
+
logger.info("%s | Verified: file on disk is increasing in size", cls.__name__)
|
|
458
|
+
|
|
459
|
+
@classmethod
|
|
460
|
+
def full_validation(cls, data: pathlib.Path) -> None:
|
|
461
|
+
npc_sync.get_sync_data(data).validate()
|
|
462
|
+
|
|
463
|
+
@classmethod
|
|
464
|
+
def min_validation(cls, data: pathlib.Path) -> None:
|
|
465
|
+
if data.stat().st_size == 0:
|
|
466
|
+
raise AssertionError(f"Empty file: {data}")
|
|
467
|
+
if data.suffix != ".h5":
|
|
468
|
+
raise FileNotFoundError(
|
|
469
|
+
f"Expected .sync to be converted to .h5 immediately after recording stopped: {data}"
|
|
470
|
+
)
|
|
471
|
+
logger.debug("%s minimal validation passed for %s", cls.__name__, data.name)
|
|
472
|
+
|
|
473
|
+
|
|
474
|
+
class Phidget(CamstimSyncShared):
|
|
475
|
+
host = np_config.Rig().Stim
|
|
476
|
+
rsc_app_id = "phidget_server"
|
|
477
|
+
|
|
478
|
+
|
|
479
|
+
class Camstim(CamstimSyncShared):
|
|
480
|
+
host = np_config.Rig().Stim
|
|
481
|
+
started_state = ("BUSY", "Script in progress.")
|
|
482
|
+
rsc_app_id = "camstim_agent"
|
|
483
|
+
sync_path: Optional[pathlib.Path] = None
|
|
484
|
+
|
|
485
|
+
@classmethod
|
|
486
|
+
def launch(cls) -> None:
|
|
487
|
+
super().launch()
|
|
488
|
+
Phidget.launch()
|
|
489
|
+
|
|
490
|
+
@classmethod
|
|
491
|
+
def get_config(cls) -> dict[str, Any]:
|
|
492
|
+
return cls.get_proxy().config
|
|
493
|
+
|
|
494
|
+
@classmethod
|
|
495
|
+
def ensure_config(cls) -> None:
|
|
496
|
+
"""Updates any missing parameters for class proxy.
|
|
497
|
+
|
|
498
|
+
Is called in `get_proxy()` so any time we need the proxy, we have a
|
|
499
|
+
correct config, without remembering to run `initialize()` or some such.
|
|
500
|
+
"""
|
|
501
|
+
config = CONFIG.get(
|
|
502
|
+
__class__.__name__, {}
|
|
503
|
+
) # class where this function is defined
|
|
504
|
+
config.update(**CONFIG.get(cls.__name__, {})) # the calling class, if different
|
|
505
|
+
|
|
506
|
+
# for proxy (reqd):
|
|
507
|
+
if not hasattr(cls, "host"):
|
|
508
|
+
cls.host = config["host"]
|
|
509
|
+
if not hasattr(cls, "port"):
|
|
510
|
+
cls.port = int(config["port"])
|
|
511
|
+
if not hasattr(cls, "timeout"):
|
|
512
|
+
cls.timeout = float(config.get("timeout", 10.0))
|
|
513
|
+
if not hasattr(cls, "serialization"):
|
|
514
|
+
cls.serialization = config.get("serialization", "json")
|
|
515
|
+
|
|
516
|
+
# for pretest (reqd, not used if device doesn't record)
|
|
517
|
+
if not hasattr(cls, "pretest_duration_sec"):
|
|
518
|
+
cls.pretest_duration_sec = config.get("pretest_duration_sec", 5)
|
|
519
|
+
if not hasattr(cls, "gb_per_hr"):
|
|
520
|
+
cls.gb_per_hr = config.get("gb_per_hr", 2.0)
|
|
521
|
+
if not hasattr(cls, "min_rec_hr"):
|
|
522
|
+
cls.min_rec_hr = config.get("min_rec_hr", 3.0)
|
|
523
|
+
|
|
524
|
+
# for resulting data (optional):
|
|
525
|
+
if not cls.data_root:
|
|
526
|
+
relative_path = config.get("data", None)
|
|
527
|
+
if relative_path:
|
|
528
|
+
root = pathlib.Path(f"//{cls.host}/{relative_path}")
|
|
529
|
+
try:
|
|
530
|
+
_ = root.exists()
|
|
531
|
+
except OSError as exc:
|
|
532
|
+
cls.exc = exc
|
|
533
|
+
logger.exception(
|
|
534
|
+
"Error accessing %s data path: %s", cls.__name__, root
|
|
535
|
+
)
|
|
536
|
+
raise FileNotFoundError(
|
|
537
|
+
f"{cls.__name__} data path is not accessible: {root}"
|
|
538
|
+
) from exc
|
|
539
|
+
else:
|
|
540
|
+
cls.data_root = root
|
|
541
|
+
if hasattr(cls, "data_root") and cls.data_root is not None:
|
|
542
|
+
cls.data_root.mkdir(parents=True, exist_ok=True)
|
|
543
|
+
|
|
544
|
+
@classmethod
|
|
545
|
+
def finalize(cls) -> None:
|
|
546
|
+
logger.info("Finalizing %s", cls.__name__)
|
|
547
|
+
if cls.is_started():
|
|
548
|
+
cls.stop()
|
|
549
|
+
count = 0
|
|
550
|
+
while not cls.is_ready_to_start():
|
|
551
|
+
if count % 120 == 0:
|
|
552
|
+
logger.debug("Waiting for %s to finish processing", cls.__name__)
|
|
553
|
+
time.sleep(1) # TODO add backoff module
|
|
554
|
+
if not cls.data_files:
|
|
555
|
+
cls.data_files = []
|
|
556
|
+
cls.data_files.extend(new := itertools.chain(cls.get_latest_data("*pkl"), cls.get_latest_data("*hdf5")))
|
|
557
|
+
logger.info("%s added new data: %s", cls.__name__, [_.name for _ in new])
|
|
558
|
+
|
|
559
|
+
@classmethod
|
|
560
|
+
def validate(cls) -> None:
|
|
561
|
+
if not cls.sync_path:
|
|
562
|
+
logger.warning("Cannot validate stim without sync file: assign `stim.sync_path`")
|
|
563
|
+
return
|
|
564
|
+
logger.info("Validating %s", cls.__name__)
|
|
565
|
+
for file in cls.data_files:
|
|
566
|
+
npc_stim.validate_stim(file, sync=cls.sync_path)
|
|
567
|
+
logger.info(f"Validated {len(cls.data_files)} stim files with sync")
|
|
568
|
+
|
|
569
|
+
class ScriptCamstim(Camstim):
|
|
570
|
+
script: ClassVar[str]
|
|
571
|
+
"path to script on Stim computer"
|
|
572
|
+
params: ClassVar[dict[str, Any]] = {}
|
|
573
|
+
|
|
574
|
+
@classmethod
|
|
575
|
+
def pretest(cls) -> None:
|
|
576
|
+
pretest_mouse = "599657"
|
|
577
|
+
|
|
578
|
+
cls.script = "C:/ProgramData/StimulusFiles/dev/bi_script_pretest_v2.py"
|
|
579
|
+
|
|
580
|
+
# get params from MTrain, as if we were running `Agent.start_session`
|
|
581
|
+
cls.params = np_session.mtrain.MTrain(pretest_mouse).stage["parameters"]
|
|
582
|
+
cls.params.update(dict(user_name="ben.hardcastle", mouse_id=pretest_mouse))
|
|
583
|
+
|
|
584
|
+
logger.info(
|
|
585
|
+
"%s | Pretest: running %s with MTrain stage params for mouse %s",
|
|
586
|
+
cls.__name__,
|
|
587
|
+
cls.script,
|
|
588
|
+
pretest_mouse,
|
|
589
|
+
)
|
|
590
|
+
cls.initialize()
|
|
591
|
+
cls.test()
|
|
592
|
+
cls.start()
|
|
593
|
+
while not cls.is_ready_to_start():
|
|
594
|
+
logger.debug("Waiting for %s to finish processing", cls.__name__)
|
|
595
|
+
time.sleep(10)
|
|
596
|
+
cls.finalize()
|
|
597
|
+
# cls.validate()
|
|
598
|
+
cls.initialize()
|
|
599
|
+
|
|
600
|
+
@classmethod
|
|
601
|
+
def start(cls):
|
|
602
|
+
cls.latest_start = time.time()
|
|
603
|
+
cls.get_proxy().start_script(cls.script, cls.params)
|
|
604
|
+
|
|
605
|
+
|
|
606
|
+
class SessionCamstim(Camstim):
|
|
607
|
+
lims_user_id: ClassVar[str]
|
|
608
|
+
labtracks_mouse_id: ClassVar[int]
|
|
609
|
+
override_params: ClassVar[dict[str, Any] | None] = None
|
|
610
|
+
|
|
611
|
+
@classmethod
|
|
612
|
+
def start(cls):
|
|
613
|
+
cls.latest_start = time.time()
|
|
614
|
+
cls.get_proxy().start_session(
|
|
615
|
+
cls.labtracks_mouse_id, cls.lims_user_id, override_params=cls.override_params
|
|
616
|
+
)
|
|
617
|
+
|
|
618
|
+
@classmethod
|
|
619
|
+
def pretest(cls) -> None:
|
|
620
|
+
cls.labtracks_mouse_id = 598796
|
|
621
|
+
cls.lims_user_id = "ben.hardcastle"
|
|
622
|
+
logger.info(
|
|
623
|
+
"%s | Pretest with mouse %s, user %s",
|
|
624
|
+
cls.__name__,
|
|
625
|
+
cls.labtracks_mouse_id,
|
|
626
|
+
cls.lims_user_id,
|
|
627
|
+
)
|
|
628
|
+
super().pretest()
|
|
629
|
+
|
|
630
|
+
|
|
631
|
+
class NoCamstim(Camstim):
|
|
632
|
+
"Run remote files (e.g. .bat) without sending directly to Camstim Agent"
|
|
633
|
+
|
|
634
|
+
remote_file: ClassVar[str | pathlib.Path]
|
|
635
|
+
extra_args: ClassVar[list[str]] = []
|
|
636
|
+
ssh: ClassVar[fabric.Connection]
|
|
637
|
+
user: ClassVar[str] = "svc_neuropix"
|
|
638
|
+
password: ClassVar[str]
|
|
639
|
+
|
|
640
|
+
@classmethod
|
|
641
|
+
def pretest(cls) -> None:
|
|
642
|
+
logger.warning("%s | Pretest not implemented", cls.__name__)
|
|
643
|
+
|
|
644
|
+
@classmethod
|
|
645
|
+
def get_ssh(cls) -> fabric.Connection:
|
|
646
|
+
with contextlib.suppress(AttributeError):
|
|
647
|
+
return cls.ssh
|
|
648
|
+
cls.initialize()
|
|
649
|
+
return cls.ssh
|
|
650
|
+
|
|
651
|
+
@classmethod
|
|
652
|
+
def initialize(cls) -> None:
|
|
653
|
+
if not hasattr(cls, "password"):
|
|
654
|
+
cls.password = input(f"{cls.__name__} | Enter password for {cls.host}: ")
|
|
655
|
+
cls.remote_file = utils.unc_to_local(pathlib.Path(cls.remote_file))
|
|
656
|
+
cls.ssh = fabric.Connection(
|
|
657
|
+
cls.host, cls.user, connect_kwargs=dict(password=cls.password)
|
|
658
|
+
)
|
|
659
|
+
super().initialize()
|
|
660
|
+
cls.test()
|
|
661
|
+
|
|
662
|
+
@classmethod
|
|
663
|
+
def test(cls) -> None:
|
|
664
|
+
super().test()
|
|
665
|
+
logger.debug(f"{cls.__name__} | Testing")
|
|
666
|
+
try:
|
|
667
|
+
result = cls.get_ssh().run("hostname", hide=True)
|
|
668
|
+
except Exception as exc:
|
|
669
|
+
raise TestError(
|
|
670
|
+
f"{cls.__name__} Error connecting to {cls.host} via ssh: {exc!r}. Is this password correct? {cls.password}"
|
|
671
|
+
)
|
|
672
|
+
else:
|
|
673
|
+
if result.exited != 0:
|
|
674
|
+
raise TestError(
|
|
675
|
+
f"{cls.__name__} Error connecting to {cls.host} via ssh: {result}"
|
|
676
|
+
)
|
|
677
|
+
logger.debug(f"{cls.__name__} | Connected to {cls.host} via ssh")
|
|
678
|
+
|
|
679
|
+
try:
|
|
680
|
+
result = cls.get_ssh().run(f"type {cls.remote_file}", hide=True)
|
|
681
|
+
except Exception as exc:
|
|
682
|
+
extra = (
|
|
683
|
+
f" | '{exc.result.command}': {exc.result.stderr.strip()!r}"
|
|
684
|
+
if hasattr(exc, "result")
|
|
685
|
+
else ""
|
|
686
|
+
)
|
|
687
|
+
raise TestError(
|
|
688
|
+
f"{cls.__name__} | Error calling ssh-executed command{extra}"
|
|
689
|
+
)
|
|
690
|
+
else:
|
|
691
|
+
if result.exited != 0:
|
|
692
|
+
raise TestError(
|
|
693
|
+
f"{cls.__name__} Error accessing {cls.remote_file} on {cls.host} - is filepath correct? {result}"
|
|
694
|
+
)
|
|
695
|
+
logger.debug(
|
|
696
|
+
f"{cls.__name__} | {cls.remote_file} is accessible via ssh on {cls.host}"
|
|
697
|
+
)
|
|
698
|
+
|
|
699
|
+
@classmethod
|
|
700
|
+
def start(cls):
|
|
701
|
+
if cls.is_started():
|
|
702
|
+
logger.warning(f"{cls.__name__} already started")
|
|
703
|
+
return
|
|
704
|
+
logger.debug(f"{cls.__name__} | Starting {cls.remote_file} on {cls.host}")
|
|
705
|
+
cls.latest_start = time.time()
|
|
706
|
+
cls.get_ssh().run(f"call {cls.remote_file} {cls.extra_args}")
|
|
707
|
+
|
|
708
|
+
@classmethod
|
|
709
|
+
def verify(cls):
|
|
710
|
+
logger.warning(f"{cls.__name__} | No verification implemented")
|
|
711
|
+
|
|
712
|
+
|
|
713
|
+
class MouseDirector(Proxy):
|
|
714
|
+
"""Communicate with the ZMQ remote object specified here:
|
|
715
|
+
http://aibspi.corp.alleninstitute.org/braintv/visual_behavior/mouse_director/-/blob/master/src/mousedirector.py
|
|
716
|
+
|
|
717
|
+
::
|
|
718
|
+
MouseDirector.get_proxy().set_mouse_id(str(366122))
|
|
719
|
+
MouseDirector.get_proxy().set_user_id("ben.hardcastle")
|
|
720
|
+
"""
|
|
721
|
+
|
|
722
|
+
user: ClassVar[str | np_session.User]
|
|
723
|
+
mouse: ClassVar[str | int | np_session.Mouse]
|
|
724
|
+
|
|
725
|
+
rsc_app_id = CONFIG['MouseDirector']['rsc_app_id']
|
|
726
|
+
host = np_config.Rig().Mon
|
|
727
|
+
gb_per_hr = 0
|
|
728
|
+
serialization = "json"
|
|
729
|
+
started_state: ClassVar[ProxyState] = ("READY", "")
|
|
730
|
+
not_connected_state: ClassVar[ProxyState] = ("", "NOT_CONNECTED")
|
|
731
|
+
|
|
732
|
+
@classmethod
|
|
733
|
+
def pretest(cls):
|
|
734
|
+
with np_logging.debug():
|
|
735
|
+
logger.debug(f"{cls.__name__} | Pretest")
|
|
736
|
+
cls.user = "ben.hardcastle"
|
|
737
|
+
cls.mouse = 366122
|
|
738
|
+
cls.initialize()
|
|
739
|
+
cls.test()
|
|
740
|
+
cls.get_proxy().retract_lick_spout()
|
|
741
|
+
time.sleep(3)
|
|
742
|
+
cls.get_proxy().extend_lick_spout()
|
|
743
|
+
time.sleep(3)
|
|
744
|
+
cls.get_proxy().retract_lick_spout()
|
|
745
|
+
time.sleep(3)
|
|
746
|
+
logger.info(f"{cls.__name__} | Pretest passed")
|
|
747
|
+
|
|
748
|
+
@classmethod
|
|
749
|
+
def initialize(cls):
|
|
750
|
+
logger.debug(f"{cls.__name__} | Initializing")
|
|
751
|
+
super().initialize()
|
|
752
|
+
cls.get_proxy().set_mouse_id(str(cls.mouse))
|
|
753
|
+
time.sleep(1)
|
|
754
|
+
cls.get_proxy().set_user_id(str(cls.user))
|
|
755
|
+
time.sleep(1)
|
|
756
|
+
logger.debug(f"{cls.__name__} | Initialized with mouse {cls.mouse}, user {cls.user}")
|
|
757
|
+
|
|
758
|
+
@classmethod
|
|
759
|
+
def get_state(cls) -> ProxyState:
|
|
760
|
+
result: str = cls.get_proxy().rig_dict
|
|
761
|
+
if str(np_config.Rig()) in result:
|
|
762
|
+
return cls.started_state
|
|
763
|
+
return cls.not_connected_state
|
|
764
|
+
|
|
765
|
+
class Cam3d(CamstimSyncShared):
|
|
766
|
+
|
|
767
|
+
label: str
|
|
768
|
+
|
|
769
|
+
host = np_config.Rig().Mon
|
|
770
|
+
serialization = "json"
|
|
771
|
+
started_state = ["READY", "CAMERAS_OPEN,CAMERAS_ACQUIRING"]
|
|
772
|
+
rsc_app_id = CONFIG['Cam3d']['rsc_app_id']
|
|
773
|
+
data_files: ClassVar[list[pathlib.Path]] = []
|
|
774
|
+
|
|
775
|
+
@classmethod
|
|
776
|
+
def is_started(cls) -> bool:
|
|
777
|
+
return cls.get_state() == cls.started_state
|
|
778
|
+
|
|
779
|
+
@classmethod
|
|
780
|
+
def is_ready_to_start(cls) -> bool:
|
|
781
|
+
if cls.is_started():
|
|
782
|
+
return False
|
|
783
|
+
time.sleep(1)
|
|
784
|
+
if (
|
|
785
|
+
cls.get_state() == cls.started_state
|
|
786
|
+
or 'READY' not in cls.get_state()
|
|
787
|
+
):
|
|
788
|
+
return False
|
|
789
|
+
return True
|
|
790
|
+
|
|
791
|
+
@classmethod
|
|
792
|
+
def initialize(cls) -> None:
|
|
793
|
+
logger.debug(f"{cls.__name__} | Initializing")
|
|
794
|
+
super().initialize()
|
|
795
|
+
if not cls.is_ready_to_start():
|
|
796
|
+
cls.reenable_cameras()
|
|
797
|
+
|
|
798
|
+
time.sleep(1)
|
|
799
|
+
|
|
800
|
+
@classmethod
|
|
801
|
+
def reenable_cameras(cls) -> None:
|
|
802
|
+
cls.get_proxy().release_cameras()
|
|
803
|
+
time.sleep(.2)
|
|
804
|
+
cls.get_proxy().enable_cameras()
|
|
805
|
+
time.sleep(.2)
|
|
806
|
+
cls.get_proxy().stop_capture()
|
|
807
|
+
time.sleep(.2)
|
|
808
|
+
cls.get_proxy().start_capture()
|
|
809
|
+
time.sleep(.2)
|
|
810
|
+
|
|
811
|
+
@classmethod
|
|
812
|
+
def generate_image_paths(cls) -> tuple[pathlib.Path, pathlib.Path]:
|
|
813
|
+
if not hasattr(cls, 'label') or not cls.label:
|
|
814
|
+
logger.warning(f"{cls.__name__} | `cls.label` not specified")
|
|
815
|
+
def path(side: str) -> pathlib.Path:
|
|
816
|
+
return cls.data_root / f"{datetime.datetime.now():%Y%m%d_%H%M%S}_{getattr(cls, 'label', 'image')}_{side}.png"
|
|
817
|
+
return path('left'), path('right')
|
|
818
|
+
|
|
819
|
+
@classmethod
|
|
820
|
+
def start(cls) -> None:
|
|
821
|
+
logger.debug(f"{cls.__name__} | Starting")
|
|
822
|
+
cls.latest_start = time.time()
|
|
823
|
+
left, right = cls.generate_image_paths()
|
|
824
|
+
cls.get_proxy().save_left_image(str(left))
|
|
825
|
+
cls.get_proxy().save_right_image(str(right))
|
|
826
|
+
time.sleep(.5)
|
|
827
|
+
for path, side in zip((left, right), ('Left', 'Right')):
|
|
828
|
+
if path.exists():
|
|
829
|
+
logger.debug(f"{cls.__name__} | {side} image saved to {path}")
|
|
830
|
+
else:
|
|
831
|
+
logger.debug(f"{cls.__name__} | {side} image capture request sent, but image not saved")
|
|
832
|
+
|
|
833
|
+
@classmethod
|
|
834
|
+
def finalize(cls) -> None:
|
|
835
|
+
logger.debug(f"{cls.__name__} | Finalizing")
|
|
836
|
+
counter = 0
|
|
837
|
+
while (
|
|
838
|
+
not (latest := cls.get_latest_data('*'))
|
|
839
|
+
or cls.is_started()
|
|
840
|
+
):
|
|
841
|
+
time.sleep(1)
|
|
842
|
+
counter += 1
|
|
843
|
+
if counter == 3:
|
|
844
|
+
cls.reenable_cameras()
|
|
845
|
+
break
|
|
846
|
+
cls.data_files.extend(latest)
|
|
847
|
+
logger.debug(f"{cls.__name__} | Images captured: {latest}")
|
|
848
|
+
|
|
849
|
+
@classmethod
|
|
850
|
+
def validate(cls):
|
|
851
|
+
if not (latest := cls.get_latest_data('*')) or len(latest) != 2:
|
|
852
|
+
raise AssertionError(f"{cls.__name__} | Expected 2 images, got {len(latest)}: {latest}")
|
|
853
|
+
|
|
854
|
+
@classmethod
|
|
855
|
+
def stop(cls):
|
|
856
|
+
logger.debug("%s | `stop()` not implemented", cls.__name__)
|
|
857
|
+
|
|
858
|
+
@classmethod
|
|
859
|
+
def pretest(cls):
|
|
860
|
+
with np_logging.debug():
|
|
861
|
+
logger.debug(f"{cls.__name__} | Pretest")
|
|
862
|
+
cls.label = 'pretest'
|
|
863
|
+
cls.initialize()
|
|
864
|
+
cls.test()
|
|
865
|
+
cls.start()
|
|
866
|
+
cls.finalize()
|
|
867
|
+
cls.validate()
|
|
868
|
+
logger.info(f"{cls.__name__} | Pretest passed")
|
|
869
|
+
|
|
870
|
+
class MVR(CamstimSyncShared):
|
|
871
|
+
|
|
872
|
+
# req proxy config - hardcode or overload ensure_config()
|
|
873
|
+
host: ClassVar[str] = np_config.Rig().Mon
|
|
874
|
+
port: ClassVar[int] = CONFIG['MVR']['port']
|
|
875
|
+
|
|
876
|
+
re_aux: re.Pattern = re.compile("aux|USB!|none", re.IGNORECASE)
|
|
877
|
+
|
|
878
|
+
@classmethod
|
|
879
|
+
def is_connected(cls) -> bool:
|
|
880
|
+
if not utils.is_online(cls.host):
|
|
881
|
+
cls.exc = ConnectionError(
|
|
882
|
+
f"No response from {cls.host}: may be offline or unreachable"
|
|
883
|
+
)
|
|
884
|
+
return False
|
|
885
|
+
if not cls.get_proxy()._mvr_connected:
|
|
886
|
+
cls.exc = ConnectionError(f"MVR likely not running on {cls.host}")
|
|
887
|
+
return False
|
|
888
|
+
try:
|
|
889
|
+
_ = cls.get_camera_status()
|
|
890
|
+
except ConnectionError as exc:
|
|
891
|
+
cls.exc = exc
|
|
892
|
+
return False
|
|
893
|
+
return True
|
|
894
|
+
|
|
895
|
+
@classmethod
|
|
896
|
+
def initialize(cls) -> None:
|
|
897
|
+
with contextlib.suppress(AttributeError):
|
|
898
|
+
del cls.proxy
|
|
899
|
+
cls.proxy = cls.get_proxy()
|
|
900
|
+
cls.test()
|
|
901
|
+
cls.configure_cameras()
|
|
902
|
+
_ = cls.get_proxy().read() # empty buffer
|
|
903
|
+
if isinstance(cls, Startable) and not cls.is_ready_to_start():
|
|
904
|
+
if cls.is_started() and isinstance(cls, Stoppable):
|
|
905
|
+
cls.stop()
|
|
906
|
+
if isinstance(cls, Finalizable):
|
|
907
|
+
cls.finalize()
|
|
908
|
+
if not cls.is_ready_to_start():
|
|
909
|
+
logger.warning(
|
|
910
|
+
"%s not ready to start: %s", cls.__name__, cls.get_state()
|
|
911
|
+
)
|
|
912
|
+
return
|
|
913
|
+
if cls.data_root:
|
|
914
|
+
cls.data_files = []
|
|
915
|
+
cls.initialization = time.time()
|
|
916
|
+
logger.info("%s initialized: ready for use", cls.__name__)
|
|
917
|
+
|
|
918
|
+
@classmethod
|
|
919
|
+
def shutdown(cls) -> None:
|
|
920
|
+
cls.get_proxy()._mvr_sock.close()
|
|
921
|
+
del cls.proxy
|
|
922
|
+
|
|
923
|
+
@classmethod
|
|
924
|
+
def get_proxy(cls) -> mvr_connector.MVRConnector:
|
|
925
|
+
with contextlib.suppress(AttributeError):
|
|
926
|
+
return cls.proxy
|
|
927
|
+
cls.ensure_config()
|
|
928
|
+
logger.debug("Creating %s proxy to %s:%s", cls.__name__, cls.host, cls.port)
|
|
929
|
+
cls.proxy = mvr_connector.MVRConnector({"host": cls.host, "port": cls.port})
|
|
930
|
+
cls.proxy._mvr_sock.settimeout(cls.timeout)
|
|
931
|
+
return cls.get_proxy()
|
|
932
|
+
|
|
933
|
+
@classmethod
|
|
934
|
+
def get_cameras(cls) -> list[dict[str, str]]:
|
|
935
|
+
if not hasattr(cls, "all_cameras"):
|
|
936
|
+
cls.get_proxy().read()
|
|
937
|
+
cls.all_cameras = cls.get_proxy().request_camera_ids()[0]["value"]
|
|
938
|
+
return cls.all_cameras
|
|
939
|
+
|
|
940
|
+
@classmethod
|
|
941
|
+
def get_camera_status(cls) -> list[dict[str, str]]:
|
|
942
|
+
_ = cls.get_proxy().read() # empty buffer
|
|
943
|
+
_ = cls.get_proxy()._send({"mvr_request": "get_camera_status"})
|
|
944
|
+
for msg in reversed(cls.get_proxy().read()):
|
|
945
|
+
if msg.get("mvr_response", "") == "get_camera_status" and (
|
|
946
|
+
cams := msg.get("value", [])
|
|
947
|
+
):
|
|
948
|
+
break
|
|
949
|
+
else:
|
|
950
|
+
logger.error("Could not get camera status from %s", cls.host)
|
|
951
|
+
raise ConnectionError(f"Could not get camera status from {cls.host}")
|
|
952
|
+
return [
|
|
953
|
+
_
|
|
954
|
+
for _ in cams
|
|
955
|
+
if any(_["camera_id"] == __["id"] for __ in cls.get_cameras())
|
|
956
|
+
]
|
|
957
|
+
|
|
958
|
+
@classmethod
|
|
959
|
+
def get_state(cls) -> ProxyState:
|
|
960
|
+
if not cls.is_connected():
|
|
961
|
+
return "", "MVR_CLOSED"
|
|
962
|
+
status = cls.get_camera_status()
|
|
963
|
+
# cam status could change between calls, so only get once
|
|
964
|
+
if any(not _["is_open"] for _ in status):
|
|
965
|
+
return "", "CAMERA_CLOSED"
|
|
966
|
+
if any(not _["is_streaming"] for _ in status):
|
|
967
|
+
return "", "CAMERA_NOT_STREAMING"
|
|
968
|
+
if cls.get_cameras_recording(status):
|
|
969
|
+
return "BUSY", "RECORDING"
|
|
970
|
+
return "READY", ""
|
|
971
|
+
|
|
972
|
+
@classmethod
|
|
973
|
+
def get_cameras_recording(cls, status=None) -> list[dict[str, str]]:
|
|
974
|
+
return [_ for _ in status or cls.get_camera_status() if _["is_recording"]]
|
|
975
|
+
|
|
976
|
+
@classmethod
|
|
977
|
+
def is_ready_to_start(cls) -> bool:
|
|
978
|
+
if cls.is_started():
|
|
979
|
+
return False
|
|
980
|
+
return all(
|
|
981
|
+
_["is_open"] and _["is_streaming"] and not _["is_recording"]
|
|
982
|
+
for _ in cls.get_camera_status()
|
|
983
|
+
)
|
|
984
|
+
|
|
985
|
+
@classmethod
|
|
986
|
+
def configure_cameras(cls) -> None:
|
|
987
|
+
"Set MVR to record video from subset of all cameras, via `get_cameras` (implemented by subclass)"
|
|
988
|
+
cam_ids = [_["id"] for _ in cls.get_cameras()]
|
|
989
|
+
cls.get_proxy().define_hosts(cam_ids)
|
|
990
|
+
cls.get_proxy().start_display()
|
|
991
|
+
|
|
992
|
+
|
|
993
|
+
class ImageMVR(MVR):
|
|
994
|
+
|
|
995
|
+
gb_per_hr: ClassVar[int | float] = CONFIG['ImageMVR']["gb_per_hr"]
|
|
996
|
+
min_rec_hr: ClassVar[int | float] = CONFIG['ImageMVR']["min_rec_hr"]
|
|
997
|
+
|
|
998
|
+
label: ClassVar[str]
|
|
999
|
+
"Rename file after capture to include label"
|
|
1000
|
+
|
|
1001
|
+
# TODO ready state is if Aux cam is_open
|
|
1002
|
+
@classmethod
|
|
1003
|
+
def get_cameras(cls) -> list[dict[str, str]]:
|
|
1004
|
+
"Aux cam only"
|
|
1005
|
+
cams = super().get_cameras()
|
|
1006
|
+
return [_ for _ in cams if cls.re_aux.search(_["label"])]
|
|
1007
|
+
|
|
1008
|
+
@classmethod
|
|
1009
|
+
def start(cls):
|
|
1010
|
+
if not cls.is_ready_to_start():
|
|
1011
|
+
# TODO display state, wait on user input to continue
|
|
1012
|
+
logger.error("%s not ready to start: %s", cls.__name__, cls.get_state())
|
|
1013
|
+
raise AssertionError(
|
|
1014
|
+
f"{cls.__name__} not ready to start: {cls.get_state()}"
|
|
1015
|
+
)
|
|
1016
|
+
cls.latest_start = time.time()
|
|
1017
|
+
cls.get_proxy().take_snapshot()
|
|
1018
|
+
|
|
1019
|
+
@classmethod
|
|
1020
|
+
def stop(cls):
|
|
1021
|
+
"Overload parent method to do nothing"
|
|
1022
|
+
pass
|
|
1023
|
+
|
|
1024
|
+
@classmethod
|
|
1025
|
+
def is_started(cls) -> bool:
|
|
1026
|
+
for msg in cls.get_proxy().read():
|
|
1027
|
+
if msg.get("mvr_broadcast", "") == "snapshot_converted":
|
|
1028
|
+
return True
|
|
1029
|
+
if msg.get("mvr_broadcast", "") == "snapshot_failed":
|
|
1030
|
+
return False
|
|
1031
|
+
return False
|
|
1032
|
+
|
|
1033
|
+
@classmethod
|
|
1034
|
+
def verify(cls):
|
|
1035
|
+
"Overload parent method to do nothing"
|
|
1036
|
+
pass
|
|
1037
|
+
|
|
1038
|
+
# TODO
|
|
1039
|
+
@classmethod
|
|
1040
|
+
def validate(cls) -> None:
|
|
1041
|
+
logger.warning("%s.validate() not implemented", cls.__name__)
|
|
1042
|
+
|
|
1043
|
+
@classmethod
|
|
1044
|
+
def finalize(cls) -> None:
|
|
1045
|
+
logger.debug("Finalizing %s", cls.__name__)
|
|
1046
|
+
t0 = time.time()
|
|
1047
|
+
timedout = lambda: time.time() > t0 + 10
|
|
1048
|
+
while (
|
|
1049
|
+
cls.is_started()
|
|
1050
|
+
or not cls.is_ready_to_start()
|
|
1051
|
+
or not cls.get_latest_data("*")
|
|
1052
|
+
or cls.get_latest_data(".bmp")
|
|
1053
|
+
) and not timedout():
|
|
1054
|
+
logger.debug("Waiting for %s to finish processing", cls.__name__)
|
|
1055
|
+
time.sleep(1) # TODO add backoff module
|
|
1056
|
+
if timedout():
|
|
1057
|
+
logger.warning(
|
|
1058
|
+
"Timed out waiting for %s to finish processing", cls.__name__
|
|
1059
|
+
)
|
|
1060
|
+
return
|
|
1061
|
+
if not hasattr(cls, "data_files") or not cls.data_files:
|
|
1062
|
+
cls.data_files = []
|
|
1063
|
+
new = cls.get_latest_data("*")
|
|
1064
|
+
if hasattr(cls, "label") and cls.label:
|
|
1065
|
+
new = [_.rename(_.with_stem(f"{_.stem}_{cls.label}")) for _ in new]
|
|
1066
|
+
cls.data_files.extend(new)
|
|
1067
|
+
logger.debug("%s processing finished: %s", cls.__name__, [_.name for _ in new])
|
|
1068
|
+
|
|
1069
|
+
|
|
1070
|
+
class VideoMVR(MVR):
|
|
1071
|
+
|
|
1072
|
+
pretest_duration_sec: ClassVar[int | float] = CONFIG['VideoMVR']["pretest_duration_sec"]
|
|
1073
|
+
gb_per_hr: ClassVar[int | float] = CONFIG['VideoMVR']["gb_per_hr"]
|
|
1074
|
+
min_rec_hr: ClassVar[int | float] = CONFIG['VideoMVR']["min_rec_hr"]
|
|
1075
|
+
|
|
1076
|
+
raw_suffix: ClassVar[str] = ".mp4"
|
|
1077
|
+
|
|
1078
|
+
started_state = ("BUSY", "RECORDING")
|
|
1079
|
+
sync_path: Optional[pathlib.Path] = None
|
|
1080
|
+
|
|
1081
|
+
@classmethod
|
|
1082
|
+
def get_cameras(cls) -> list[dict[str, str]]:
|
|
1083
|
+
"All available cams except Aux"
|
|
1084
|
+
cams = super().get_cameras()
|
|
1085
|
+
# check for camera labels with known Aux cam names
|
|
1086
|
+
return [_ for _ in cams if cls.re_aux.search(_["label"]) is None]
|
|
1087
|
+
|
|
1088
|
+
@classmethod
|
|
1089
|
+
def start(cls) -> None:
|
|
1090
|
+
logger.info("%s | Starting recording", cls.__name__)
|
|
1091
|
+
cls.latest_start = time.time()
|
|
1092
|
+
cls.get_proxy().start_record(record_time=24 * 60 * 60,) # sec
|
|
1093
|
+
|
|
1094
|
+
@classmethod
|
|
1095
|
+
def verify(cls) -> None:
|
|
1096
|
+
"Assert data exists since latest start, or raise AssertionError."
|
|
1097
|
+
# files grow infrequently while MVR's recording - checking their size
|
|
1098
|
+
# is unreliable
|
|
1099
|
+
if not cls.is_started():
|
|
1100
|
+
logger.warning(
|
|
1101
|
+
"Cannot verify %s if not started: %s", cls.__name__, cls.get_state()
|
|
1102
|
+
)
|
|
1103
|
+
raise AssertionError(f"{cls.__name__} not started: {cls.get_state()}")
|
|
1104
|
+
if datetime.datetime.fromtimestamp(
|
|
1105
|
+
cls.latest_start
|
|
1106
|
+
) < datetime.datetime.now() - datetime.timedelta(
|
|
1107
|
+
seconds=cls.pretest_duration_sec
|
|
1108
|
+
):
|
|
1109
|
+
time.sleep(cls.pretest_duration_sec)
|
|
1110
|
+
if not (files := cls.get_latest_data()) or len(files) < len(
|
|
1111
|
+
cls.get_cameras_recording()
|
|
1112
|
+
):
|
|
1113
|
+
raise AssertionError(
|
|
1114
|
+
f"{cls.__name__} files do not match the number of cameras: {files}"
|
|
1115
|
+
)
|
|
1116
|
+
logger.info(
|
|
1117
|
+
"%s | Verified: %s cameras recording to disk", cls.__name__, len(files)
|
|
1118
|
+
)
|
|
1119
|
+
@classmethod
|
|
1120
|
+
def stop(cls) -> None:
|
|
1121
|
+
cls.get_proxy().stop_record()
|
|
1122
|
+
logger.info("%s | Stopped recording", cls.__name__)
|
|
1123
|
+
|
|
1124
|
+
@classmethod
|
|
1125
|
+
def is_started(cls) -> bool:
|
|
1126
|
+
if len(state := cls.get_state()) and all(
|
|
1127
|
+
msg in state for msg in cls.started_state
|
|
1128
|
+
):
|
|
1129
|
+
return True
|
|
1130
|
+
return False
|
|
1131
|
+
|
|
1132
|
+
@classmethod
|
|
1133
|
+
def finalize(cls) -> None:
|
|
1134
|
+
logger.debug("Finalizing %s", cls.__name__)
|
|
1135
|
+
if cls.is_started():
|
|
1136
|
+
cls.stop()
|
|
1137
|
+
t0 = time.time()
|
|
1138
|
+
timedout = lambda: time.time() > t0 + 30
|
|
1139
|
+
while not cls.is_ready_to_start() and not timedout():
|
|
1140
|
+
logger.debug("Waiting for %s to finish processing", cls.__name__)
|
|
1141
|
+
time.sleep(1) # TODO add backoff module
|
|
1142
|
+
if timedout():
|
|
1143
|
+
logger.warning(
|
|
1144
|
+
"Timed out waiting for %s to finish processing", cls.__name__
|
|
1145
|
+
)
|
|
1146
|
+
return
|
|
1147
|
+
if not hasattr(cls, "data_files"):
|
|
1148
|
+
cls.data_files = []
|
|
1149
|
+
cls.data_files.extend(
|
|
1150
|
+
new := (cls.get_latest_data("*.mp4") + cls.get_latest_data("*.json"))
|
|
1151
|
+
)
|
|
1152
|
+
logger.debug("%s processing finished: %s", cls.__name__, [_.name for _ in new])
|
|
1153
|
+
|
|
1154
|
+
@classmethod
|
|
1155
|
+
def validate(cls) -> None:
|
|
1156
|
+
tempdir = pathlib.Path(tempfile.gettempdir())
|
|
1157
|
+
tempfiles: list[pathlib.Path] = []
|
|
1158
|
+
# currently can't pass individual files to mvrdataset - just a dir
|
|
1159
|
+
for file in itertools.chain(cls.get_latest_data("*.mp4"), cls.get_latest_data("*.json")):
|
|
1160
|
+
np_tools.copy(file, t := tempdir / file.name)
|
|
1161
|
+
tempfiles.append(t)
|
|
1162
|
+
npc_mvr.MVRDataset(
|
|
1163
|
+
tempdir,
|
|
1164
|
+
getattr(cls, "sync_path", None),
|
|
1165
|
+
)
|
|
1166
|
+
logger.info(f"Validated {len(tempfiles)} video/info files {'with' if getattr(cls, 'sync_path', None) else 'without'} sync")
|
|
1167
|
+
for file in tempfiles:
|
|
1168
|
+
file.unlink(missing_ok=True)
|
|
1169
|
+
class JsonRecorder:
|
|
1170
|
+
"Just needs a `start` method that calls `write()`."
|
|
1171
|
+
|
|
1172
|
+
log_name: ClassVar[str]
|
|
1173
|
+
log_root: ClassVar[pathlib.Path]
|
|
1174
|
+
|
|
1175
|
+
@abc.abstractclassmethod
|
|
1176
|
+
def start() -> None:
|
|
1177
|
+
pass
|
|
1178
|
+
|
|
1179
|
+
@classmethod
|
|
1180
|
+
def pretest(cls) -> None:
|
|
1181
|
+
with np_logging.debug():
|
|
1182
|
+
cls.initialize()
|
|
1183
|
+
cls.start()
|
|
1184
|
+
cls.validate()
|
|
1185
|
+
logger.info("%s | Pretest passed", cls.__name__)
|
|
1186
|
+
|
|
1187
|
+
@classmethod
|
|
1188
|
+
def ensure_config(cls) -> None:
|
|
1189
|
+
config = CONFIG.get(
|
|
1190
|
+
__class__.__name__, {}
|
|
1191
|
+
) # class where this function is defined
|
|
1192
|
+
config.update(**CONFIG.get(cls.__name__, {})) # the calling class, if different
|
|
1193
|
+
|
|
1194
|
+
if not hasattr(cls, "log_name"):
|
|
1195
|
+
cls.log_name = config.get("log_name", "{}_.json")
|
|
1196
|
+
cls.log_name = cls.log_name.format(
|
|
1197
|
+
datetime.datetime.now().strftime("%Y-%m-%d_%H%M%S")
|
|
1198
|
+
)
|
|
1199
|
+
|
|
1200
|
+
if not hasattr(cls, "log_root"):
|
|
1201
|
+
cls.log_root = config.get("log_root", ".")
|
|
1202
|
+
cls.log_root = pathlib.Path(cls.log_root).resolve()
|
|
1203
|
+
|
|
1204
|
+
@classmethod
|
|
1205
|
+
def initialize(cls) -> None:
|
|
1206
|
+
logger.debug("%s initializing", __class__.__name__)
|
|
1207
|
+
cls.ensure_config()
|
|
1208
|
+
cls.initialization = time.time()
|
|
1209
|
+
log = (cls.log_root / cls.log_name).with_suffix(".json")
|
|
1210
|
+
log.parent.mkdir(parents=True, exist_ok=True)
|
|
1211
|
+
log.touch(exist_ok=True)
|
|
1212
|
+
if log.read_text().strip() == "":
|
|
1213
|
+
log.write_text("{}")
|
|
1214
|
+
cls.all_files = [log]
|
|
1215
|
+
cls.test()
|
|
1216
|
+
|
|
1217
|
+
@classmethod
|
|
1218
|
+
def test(cls) -> None:
|
|
1219
|
+
logger.debug("%s testing", __class__.__name__)
|
|
1220
|
+
try:
|
|
1221
|
+
_ = cls.get_current_log().read_bytes()
|
|
1222
|
+
except OSError as exc:
|
|
1223
|
+
raise TestError(
|
|
1224
|
+
f"{__class__.__name__} failed to open {cls.get_current_log()}"
|
|
1225
|
+
) from exc
|
|
1226
|
+
|
|
1227
|
+
@classmethod
|
|
1228
|
+
def get_current_log(cls) -> pathlib.Path:
|
|
1229
|
+
if not hasattr(cls, "initialization"):
|
|
1230
|
+
cls.initialize()
|
|
1231
|
+
return cls.all_files[-1]
|
|
1232
|
+
|
|
1233
|
+
@classmethod
|
|
1234
|
+
def read(cls) -> dict[str, str | float]:
|
|
1235
|
+
try:
|
|
1236
|
+
data = json.loads(cls.get_current_log().read_bytes())
|
|
1237
|
+
except json.JSONDecodeError as exc:
|
|
1238
|
+
if cls.get_current_log().stat().st_size:
|
|
1239
|
+
raise
|
|
1240
|
+
logger.debug("%s | Error encountered reading file %s: %r", cls.__name__, cls.get_current_log(), exc)
|
|
1241
|
+
data = {} # file was empty
|
|
1242
|
+
else:
|
|
1243
|
+
logger.debug("%s | Read from %s", cls.__name__, cls.get_current_log())
|
|
1244
|
+
return data
|
|
1245
|
+
|
|
1246
|
+
@classmethod
|
|
1247
|
+
def write(cls, value: dict) -> None:
|
|
1248
|
+
try:
|
|
1249
|
+
data = cls.read()
|
|
1250
|
+
except json.JSONDecodeError:
|
|
1251
|
+
data = {}
|
|
1252
|
+
file = cls.get_current_log().with_suffix(".new.json")
|
|
1253
|
+
file.touch()
|
|
1254
|
+
cls.all_files.append(file)
|
|
1255
|
+
else:
|
|
1256
|
+
file = cls.get_current_log()
|
|
1257
|
+
np_config.merge(data, value)
|
|
1258
|
+
file.write_text(json.dumps(data, indent=4, sort_keys=False, default=str))
|
|
1259
|
+
logger.debug("%s wrote to %s", cls.__name__, file)
|
|
1260
|
+
|
|
1261
|
+
@classmethod
|
|
1262
|
+
def validate(cls) -> None:
|
|
1263
|
+
if not (log := cls.read()):
|
|
1264
|
+
cls.exc = TestError(
|
|
1265
|
+
f"{cls.__name__} failed to validate because log is empty: {cls.get_current_log()}"
|
|
1266
|
+
)
|
|
1267
|
+
logger.error(
|
|
1268
|
+
"%s failed to validate: log is empty %s",
|
|
1269
|
+
cls.__name__,
|
|
1270
|
+
cls.get_current_log(),
|
|
1271
|
+
exc_info=cls.exc,
|
|
1272
|
+
)
|
|
1273
|
+
logger.debug("%s validated", __class__.__name__)
|
|
1274
|
+
|
|
1275
|
+
|
|
1276
|
+
class YamlRecorder(JsonRecorder):
|
|
1277
|
+
@classmethod
|
|
1278
|
+
def test(cls) -> None:
|
|
1279
|
+
logger.debug("%s testing", __class__.__name__)
|
|
1280
|
+
super().test()
|
|
1281
|
+
try:
|
|
1282
|
+
import yaml
|
|
1283
|
+
except ImportError as exc:
|
|
1284
|
+
raise TestError(f"{__class__.__name__} failed to import yaml") from exc
|
|
1285
|
+
|
|
1286
|
+
@classmethod
|
|
1287
|
+
def finalize(cls) -> None:
|
|
1288
|
+
logger.debug("Finalizing %s", __class__.__name__)
|
|
1289
|
+
log = json.load(cls.get_current_log().read_bytes())
|
|
1290
|
+
with contextlib.suppress(
|
|
1291
|
+
AttributeError, OSError
|
|
1292
|
+
): # if this fails we still have the json file
|
|
1293
|
+
yaml.dump(log, cls.get_current_log().with_suffix(".yaml").read_bytes())
|
|
1294
|
+
|
|
1295
|
+
|
|
1296
|
+
class NewScaleCoordinateRecorder(JsonRecorder):
|
|
1297
|
+
"Gets current manipulator coordinates and stores them in a file with a timestamp."
|
|
1298
|
+
|
|
1299
|
+
host: ClassVar[str] = np_config.Rig().Mon
|
|
1300
|
+
data_root: ClassVar[pathlib.Path] = CONFIG['NewScaleCoordinateRecorder']['data']
|
|
1301
|
+
data_name: ClassVar[str] = CONFIG['NewScaleCoordinateRecorder']['data_name']
|
|
1302
|
+
data_fieldnames: ClassVar[Sequence[str]] = CONFIG['NewScaleCoordinateRecorder']['data_fieldnames']
|
|
1303
|
+
data_files: ClassVar[list[pathlib.Path]] = []
|
|
1304
|
+
"Files to be copied after exp"
|
|
1305
|
+
|
|
1306
|
+
max_z_travel: ClassVar[int] = CONFIG['NewScaleCoordinateRecorder']['max_z_travel']
|
|
1307
|
+
num_probes: ClassVar[int] = 6
|
|
1308
|
+
log_name: ClassVar[str] = "newscale_coords_{}.json"
|
|
1309
|
+
log_root: ClassVar[pathlib.Path] = pathlib.Path(tempfile.gettempdir()).resolve()
|
|
1310
|
+
label: ClassVar[str] = ""
|
|
1311
|
+
"A label to tag each entry with"
|
|
1312
|
+
latest_start: ClassVar[int] = 0
|
|
1313
|
+
"`time.time()` when the service was last started via `start()`."
|
|
1314
|
+
log_time_fmt: str = CONFIG['NewScaleCoordinateRecorder']['log_time_fmt']
|
|
1315
|
+
|
|
1316
|
+
@classmethod
|
|
1317
|
+
def pretest(cls) -> None:
|
|
1318
|
+
cls.label = 'pretest'
|
|
1319
|
+
super().pretest()
|
|
1320
|
+
|
|
1321
|
+
@classmethod
|
|
1322
|
+
def get_current_data(cls) -> pathlib.Path:
|
|
1323
|
+
cls.ensure_config()
|
|
1324
|
+
return cls.data_root / cls.data_name
|
|
1325
|
+
|
|
1326
|
+
@classmethod
|
|
1327
|
+
def last_logged_coords_csv(cls) -> dict[str, float]:
|
|
1328
|
+
"Get the most recent coordinates from the log file using the csv parser in the stdlib."
|
|
1329
|
+
with cls.get_current_data().open("r") as _:
|
|
1330
|
+
reader = csv.DictReader(_, fieldnames=cls.data_fieldnames)
|
|
1331
|
+
rows = list(reader)
|
|
1332
|
+
last_moved_label = cls.data_fieldnames[0]
|
|
1333
|
+
coords = {}
|
|
1334
|
+
for row in reversed(rows): # search for the most recent coordinates
|
|
1335
|
+
if len(coords.keys()) == cls.num_probes:
|
|
1336
|
+
break # we have an entry for each probe
|
|
1337
|
+
if (m := row.pop(cls.data_fieldnames[1]).strip()) not in coords:
|
|
1338
|
+
coords[m] = {}
|
|
1339
|
+
for k, v in row.items():
|
|
1340
|
+
if "virtual" in k:
|
|
1341
|
+
continue
|
|
1342
|
+
if k == last_moved_label:
|
|
1343
|
+
v = datetime.datetime.strptime(v, cls.log_time_fmt)
|
|
1344
|
+
else:
|
|
1345
|
+
v = v.strip()
|
|
1346
|
+
with contextlib.suppress(ValueError):
|
|
1347
|
+
v = float(v)
|
|
1348
|
+
coords[m].update({k: v})
|
|
1349
|
+
return coords
|
|
1350
|
+
|
|
1351
|
+
@classmethod
|
|
1352
|
+
def last_logged_coords_pd(cls) -> dict[str, float]:
|
|
1353
|
+
"Get the most recent coordinates from the log file using pandas."
|
|
1354
|
+
coords = {}
|
|
1355
|
+
manipulator_label = cls.data_fieldnames[1]
|
|
1356
|
+
last_moved_label = cls.data_fieldnames[0]
|
|
1357
|
+
df = pd.read_csv(cls.get_current_data(), names=cls.data_fieldnames, parse_dates=[last_moved_label])
|
|
1358
|
+
# group by manipulator_label and get the maximum value in last_moved_label for each group
|
|
1359
|
+
# (i.e. the most recent entry for each manipulator)
|
|
1360
|
+
last_moved = df.loc[
|
|
1361
|
+
df.groupby(manipulator_label)[last_moved_label].idxmax()
|
|
1362
|
+
].set_index(manipulator_label).sort_values(last_moved_label, ascending=False)
|
|
1363
|
+
for serial_number, row in last_moved.iloc[:cls.num_probes].iterrows():
|
|
1364
|
+
new = {key: row[key] for key in cls.data_fieldnames if (key != manipulator_label and 'virtual' not in key)}
|
|
1365
|
+
new[last_moved_label] = row[last_moved_label].to_pydatetime()
|
|
1366
|
+
coords[str(serial_number).strip()] = new
|
|
1367
|
+
return coords
|
|
1368
|
+
|
|
1369
|
+
@classmethod
|
|
1370
|
+
def convert_serial_numbers_to_probe_labels(cls, coords: dict[str, float]) -> None:
|
|
1371
|
+
for k, v in CONFIG[cls.__name__].get("probe_to_serial_number", {}).items():
|
|
1372
|
+
if v in coords:
|
|
1373
|
+
coords[k] = coords.pop(v)
|
|
1374
|
+
coords[k]['serial_number'] = v
|
|
1375
|
+
|
|
1376
|
+
@classmethod
|
|
1377
|
+
def get_coordinates(cls) -> dict[str, float]:
|
|
1378
|
+
try:
|
|
1379
|
+
import pandas as pd
|
|
1380
|
+
except ImportError:
|
|
1381
|
+
coords = cls.last_logged_coords_csv()
|
|
1382
|
+
else:
|
|
1383
|
+
coords = cls.last_logged_coords_pd()
|
|
1384
|
+
|
|
1385
|
+
def adjust_z_travel(coords):
|
|
1386
|
+
for v in coords.values():
|
|
1387
|
+
if 'z' in v:
|
|
1388
|
+
v['z'] = cls.max_z_travel - v['z']
|
|
1389
|
+
adjust_z_travel(coords)
|
|
1390
|
+
cls.convert_serial_numbers_to_probe_labels(coords)
|
|
1391
|
+
coords["label"] = cls.label
|
|
1392
|
+
logger.debug("%s | Retrieved coordinates: %s", cls.__name__, coords)
|
|
1393
|
+
return coords
|
|
1394
|
+
|
|
1395
|
+
@classmethod
|
|
1396
|
+
def write_to_platform_json(cls):
|
|
1397
|
+
coords = cls.get_coordinates()
|
|
1398
|
+
for k, v in coords.items():
|
|
1399
|
+
if isinstance(v, Mapping) and (last_moved := v.get('last_moved')):
|
|
1400
|
+
del coords[k]['last_moved']
|
|
1401
|
+
del coords[k]['serial_number']
|
|
1402
|
+
continue
|
|
1403
|
+
# if last_moved is kept, then normalize it depending on csv/pd method:
|
|
1404
|
+
match last_moved:
|
|
1405
|
+
case str():
|
|
1406
|
+
timestamp = datetime.datetime.strptime(last_moved, cls.log_time_fmt)
|
|
1407
|
+
case datetime.datetime():
|
|
1408
|
+
timestamp = last_moved
|
|
1409
|
+
coords[k]['last_moved'] = np_config.normalize_time(timestamp)
|
|
1410
|
+
|
|
1411
|
+
# rearrange so `label`` is top-level key, or use capture-timestamp if no label
|
|
1412
|
+
platform_json = np_session.PlatformJson(cls.get_current_log())
|
|
1413
|
+
platform_json_entry = copy.deepcopy(platform_json.manipulator_coordinates)
|
|
1414
|
+
coords = {str(coords.pop('label', np_config.normalize_time(cls.latest_start))): coords}
|
|
1415
|
+
logger.debug("%s | Adding to platform json: %s", cls.__name__, coords)
|
|
1416
|
+
platform_json.manipulator_coordinates = np_config.merge(platform_json_entry, coords)
|
|
1417
|
+
if (csv := cls.get_current_data()) not in cls.data_files:
|
|
1418
|
+
cls.data_files.append(csv)
|
|
1419
|
+
|
|
1420
|
+
@classmethod
|
|
1421
|
+
def start(cls):
|
|
1422
|
+
cls.latest_start = time.time()
|
|
1423
|
+
if 'platformD1' in cls.log_name:
|
|
1424
|
+
cls.write_to_platform_json()
|
|
1425
|
+
else:
|
|
1426
|
+
cls.write({str(datetime.datetime.now()): cls.get_coordinates()})
|
|
1427
|
+
|
|
1428
|
+
@classmethod
|
|
1429
|
+
def test(cls) -> None:
|
|
1430
|
+
super().test()
|
|
1431
|
+
logger.debug("%s | Testing", __class__.__name__)
|
|
1432
|
+
try:
|
|
1433
|
+
_ = cls.get_current_data().open("r")
|
|
1434
|
+
except OSError as exc:
|
|
1435
|
+
raise TestError(
|
|
1436
|
+
f"{cls.__name__} failed to open {cls.get_current_data()}"
|
|
1437
|
+
) from exc
|
|
1438
|
+
try:
|
|
1439
|
+
_ = cls.get_coordinates()
|
|
1440
|
+
except Exception as exc:
|
|
1441
|
+
raise TestError(f"{cls.__name__} failed to get coordinates") from exc
|
|
1442
|
+
else:
|
|
1443
|
+
logger.info("%s | Test passed", cls.__name__)
|
|
1444
|
+
|
|
1445
|
+
@classmethod
|
|
1446
|
+
def ensure_config(cls) -> None:
|
|
1447
|
+
super().ensure_config()
|
|
1448
|
+
|
|
1449
|
+
if CONFIG.get("services", {}):
|
|
1450
|
+
config = CONFIG["services"].get(__class__.__name__, {})
|
|
1451
|
+
config.update(**CONFIG["services"].get(cls.__name__, {}))
|
|
1452
|
+
else:
|
|
1453
|
+
config = CONFIG.get(
|
|
1454
|
+
__class__.__name__, {}
|
|
1455
|
+
) # class where this function is defined
|
|
1456
|
+
config.update(
|
|
1457
|
+
**CONFIG.get(cls.__name__, {})
|
|
1458
|
+
) # the calling class, if different
|
|
1459
|
+
|
|
1460
|
+
if not hasattr(cls, "host"):
|
|
1461
|
+
cls.host = config["host"]
|
|
1462
|
+
|
|
1463
|
+
# for resulting data
|
|
1464
|
+
if (
|
|
1465
|
+
not hasattr(cls, "data_root")
|
|
1466
|
+
or cls.host not in pathlib.Path(cls.data_root).parts
|
|
1467
|
+
):
|
|
1468
|
+
relative_path = config["data"]
|
|
1469
|
+
if relative_path:
|
|
1470
|
+
root = pathlib.Path(f"//{cls.host}/{relative_path}")
|
|
1471
|
+
try:
|
|
1472
|
+
_ = root.exists()
|
|
1473
|
+
except OSError as exc:
|
|
1474
|
+
cls.exc = exc
|
|
1475
|
+
logger.exception(
|
|
1476
|
+
"Error accessing %s data path: %s", cls.__name__, root
|
|
1477
|
+
)
|
|
1478
|
+
raise FileNotFoundError(
|
|
1479
|
+
f"{cls.__name__} data path is not accessible: {root}"
|
|
1480
|
+
) from exc
|
|
1481
|
+
else:
|
|
1482
|
+
cls.data_root = root
|
|
1483
|
+
|
|
1484
|
+
if not hasattr(cls, "data_name"):
|
|
1485
|
+
cls.data_name = config["data_name"]
|
|
1486
|
+
if not hasattr(cls, "data_fieldnames"):
|
|
1487
|
+
cls.data_fieldnames = config["data_fieldnames"]
|
|
1488
|
+
|
|
1489
|
+
|