np-services 0.1.59__py3-none-any.whl → 0.1.73__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (207) hide show
  1. np_services/__init__.py +8 -8
  2. np_services/open_ephys.py +377 -378
  3. np_services/protocols.py +185 -185
  4. np_services/proxies.py +1489 -1488
  5. np_services/resources/mvr_connector.py +260 -260
  6. np_services/resources/zro.py +325 -325
  7. np_services/scripts/pretest.py +170 -73
  8. np_services/stim_computer_theme_changer.py +41 -41
  9. np_services/utils.py +167 -167
  10. {np_services-0.1.59.dist-info → np_services-0.1.73.dist-info}/METADATA +7 -8
  11. np_services-0.1.73.dist-info/RECORD +15 -0
  12. {np_services-0.1.59.dist-info → np_services-0.1.73.dist-info}/WHEEL +2 -1
  13. {np_services-0.1.59.dist-info → np_services-0.1.73.dist-info}/entry_points.txt +1 -1
  14. np_services-0.1.73.dist-info/top_level.txt +1 -0
  15. np_services/.mypy_cache/.gitignore +0 -2
  16. np_services/.mypy_cache/3.9/@plugins_snapshot.json +0 -1
  17. np_services/.mypy_cache/3.9/__future__.data.json +0 -1
  18. np_services/.mypy_cache/3.9/__future__.meta.json +0 -1
  19. np_services/.mypy_cache/3.9/_ast.data.json +0 -1
  20. np_services/.mypy_cache/3.9/_ast.meta.json +0 -1
  21. np_services/.mypy_cache/3.9/_codecs.data.json +0 -1
  22. np_services/.mypy_cache/3.9/_codecs.meta.json +0 -1
  23. np_services/.mypy_cache/3.9/_collections_abc.data.json +0 -1
  24. np_services/.mypy_cache/3.9/_collections_abc.meta.json +0 -1
  25. np_services/.mypy_cache/3.9/_ctypes.data.json +0 -1
  26. np_services/.mypy_cache/3.9/_ctypes.meta.json +0 -1
  27. np_services/.mypy_cache/3.9/_decimal.data.json +0 -1
  28. np_services/.mypy_cache/3.9/_decimal.meta.json +0 -1
  29. np_services/.mypy_cache/3.9/_random.data.json +0 -1
  30. np_services/.mypy_cache/3.9/_random.meta.json +0 -1
  31. np_services/.mypy_cache/3.9/_socket.data.json +0 -1
  32. np_services/.mypy_cache/3.9/_socket.meta.json +0 -1
  33. np_services/.mypy_cache/3.9/_thread.data.json +0 -1
  34. np_services/.mypy_cache/3.9/_thread.meta.json +0 -1
  35. np_services/.mypy_cache/3.9/_typeshed/__init__.data.json +0 -1
  36. np_services/.mypy_cache/3.9/_typeshed/__init__.meta.json +0 -1
  37. np_services/.mypy_cache/3.9/_warnings.data.json +0 -1
  38. np_services/.mypy_cache/3.9/_warnings.meta.json +0 -1
  39. np_services/.mypy_cache/3.9/_weakref.data.json +0 -1
  40. np_services/.mypy_cache/3.9/_weakref.meta.json +0 -1
  41. np_services/.mypy_cache/3.9/_weakrefset.data.json +0 -1
  42. np_services/.mypy_cache/3.9/_weakrefset.meta.json +0 -1
  43. np_services/.mypy_cache/3.9/_winapi.data.json +0 -1
  44. np_services/.mypy_cache/3.9/_winapi.meta.json +0 -1
  45. np_services/.mypy_cache/3.9/abc.data.json +0 -1
  46. np_services/.mypy_cache/3.9/abc.meta.json +0 -1
  47. np_services/.mypy_cache/3.9/array.data.json +0 -1
  48. np_services/.mypy_cache/3.9/array.meta.json +0 -1
  49. np_services/.mypy_cache/3.9/atexit.data.json +0 -1
  50. np_services/.mypy_cache/3.9/atexit.meta.json +0 -1
  51. np_services/.mypy_cache/3.9/builtins.data.json +0 -1
  52. np_services/.mypy_cache/3.9/builtins.meta.json +0 -1
  53. np_services/.mypy_cache/3.9/codecs.data.json +0 -1
  54. np_services/.mypy_cache/3.9/codecs.meta.json +0 -1
  55. np_services/.mypy_cache/3.9/collections/__init__.data.json +0 -1
  56. np_services/.mypy_cache/3.9/collections/__init__.meta.json +0 -1
  57. np_services/.mypy_cache/3.9/collections/abc.data.json +0 -1
  58. np_services/.mypy_cache/3.9/collections/abc.meta.json +0 -1
  59. np_services/.mypy_cache/3.9/contextlib.data.json +0 -1
  60. np_services/.mypy_cache/3.9/contextlib.meta.json +0 -1
  61. np_services/.mypy_cache/3.9/ctypes/__init__.data.json +0 -1
  62. np_services/.mypy_cache/3.9/ctypes/__init__.meta.json +0 -1
  63. np_services/.mypy_cache/3.9/datetime.data.json +0 -1
  64. np_services/.mypy_cache/3.9/datetime.meta.json +0 -1
  65. np_services/.mypy_cache/3.9/decimal.data.json +0 -1
  66. np_services/.mypy_cache/3.9/decimal.meta.json +0 -1
  67. np_services/.mypy_cache/3.9/email/__init__.data.json +0 -1
  68. np_services/.mypy_cache/3.9/email/__init__.meta.json +0 -1
  69. np_services/.mypy_cache/3.9/email/charset.data.json +0 -1
  70. np_services/.mypy_cache/3.9/email/charset.meta.json +0 -1
  71. np_services/.mypy_cache/3.9/email/contentmanager.data.json +0 -1
  72. np_services/.mypy_cache/3.9/email/contentmanager.meta.json +0 -1
  73. np_services/.mypy_cache/3.9/email/errors.data.json +0 -1
  74. np_services/.mypy_cache/3.9/email/errors.meta.json +0 -1
  75. np_services/.mypy_cache/3.9/email/header.data.json +0 -1
  76. np_services/.mypy_cache/3.9/email/header.meta.json +0 -1
  77. np_services/.mypy_cache/3.9/email/message.data.json +0 -1
  78. np_services/.mypy_cache/3.9/email/message.meta.json +0 -1
  79. np_services/.mypy_cache/3.9/email/policy.data.json +0 -1
  80. np_services/.mypy_cache/3.9/email/policy.meta.json +0 -1
  81. np_services/.mypy_cache/3.9/enum.data.json +0 -1
  82. np_services/.mypy_cache/3.9/enum.meta.json +0 -1
  83. np_services/.mypy_cache/3.9/errno.data.json +0 -1
  84. np_services/.mypy_cache/3.9/errno.meta.json +0 -1
  85. np_services/.mypy_cache/3.9/fractions.data.json +0 -1
  86. np_services/.mypy_cache/3.9/fractions.meta.json +0 -1
  87. np_services/.mypy_cache/3.9/genericpath.data.json +0 -1
  88. np_services/.mypy_cache/3.9/genericpath.meta.json +0 -1
  89. np_services/.mypy_cache/3.9/importlib/__init__.data.json +0 -1
  90. np_services/.mypy_cache/3.9/importlib/__init__.meta.json +0 -1
  91. np_services/.mypy_cache/3.9/importlib/abc.data.json +0 -1
  92. np_services/.mypy_cache/3.9/importlib/abc.meta.json +0 -1
  93. np_services/.mypy_cache/3.9/importlib/machinery.data.json +0 -1
  94. np_services/.mypy_cache/3.9/importlib/machinery.meta.json +0 -1
  95. np_services/.mypy_cache/3.9/importlib/metadata/__init__.data.json +0 -1
  96. np_services/.mypy_cache/3.9/importlib/metadata/__init__.meta.json +0 -1
  97. np_services/.mypy_cache/3.9/io.data.json +0 -1
  98. np_services/.mypy_cache/3.9/io.meta.json +0 -1
  99. np_services/.mypy_cache/3.9/json/__init__.data.json +0 -1
  100. np_services/.mypy_cache/3.9/json/__init__.meta.json +0 -1
  101. np_services/.mypy_cache/3.9/json/decoder.data.json +0 -1
  102. np_services/.mypy_cache/3.9/json/decoder.meta.json +0 -1
  103. np_services/.mypy_cache/3.9/json/encoder.data.json +0 -1
  104. np_services/.mypy_cache/3.9/json/encoder.meta.json +0 -1
  105. np_services/.mypy_cache/3.9/logging/__init__.data.json +0 -1
  106. np_services/.mypy_cache/3.9/logging/__init__.meta.json +0 -1
  107. np_services/.mypy_cache/3.9/math.data.json +0 -1
  108. np_services/.mypy_cache/3.9/math.meta.json +0 -1
  109. np_services/.mypy_cache/3.9/mmap.data.json +0 -1
  110. np_services/.mypy_cache/3.9/mmap.meta.json +0 -1
  111. np_services/.mypy_cache/3.9/np_services/__init__.data.json +0 -1
  112. np_services/.mypy_cache/3.9/np_services/__init__.meta.json +0 -1
  113. np_services/.mypy_cache/3.9/np_services/config.data.json +0 -1
  114. np_services/.mypy_cache/3.9/np_services/config.meta.json +0 -1
  115. np_services/.mypy_cache/3.9/np_services/protocols.data.json +0 -1
  116. np_services/.mypy_cache/3.9/np_services/protocols.meta.json +0 -1
  117. np_services/.mypy_cache/3.9/np_services/zro.data.json +0 -1
  118. np_services/.mypy_cache/3.9/np_services/zro.meta.json +0 -1
  119. np_services/.mypy_cache/3.9/ntpath.data.json +0 -1
  120. np_services/.mypy_cache/3.9/ntpath.meta.json +0 -1
  121. np_services/.mypy_cache/3.9/numbers.data.json +0 -1
  122. np_services/.mypy_cache/3.9/numbers.meta.json +0 -1
  123. np_services/.mypy_cache/3.9/os/__init__.data.json +0 -1
  124. np_services/.mypy_cache/3.9/os/__init__.meta.json +0 -1
  125. np_services/.mypy_cache/3.9/os/path.data.json +0 -1
  126. np_services/.mypy_cache/3.9/os/path.meta.json +0 -1
  127. np_services/.mypy_cache/3.9/pathlib.data.json +0 -1
  128. np_services/.mypy_cache/3.9/pathlib.meta.json +0 -1
  129. np_services/.mypy_cache/3.9/pickle.data.json +0 -1
  130. np_services/.mypy_cache/3.9/pickle.meta.json +0 -1
  131. np_services/.mypy_cache/3.9/platform.data.json +0 -1
  132. np_services/.mypy_cache/3.9/platform.meta.json +0 -1
  133. np_services/.mypy_cache/3.9/posixpath.data.json +0 -1
  134. np_services/.mypy_cache/3.9/posixpath.meta.json +0 -1
  135. np_services/.mypy_cache/3.9/random.data.json +0 -1
  136. np_services/.mypy_cache/3.9/random.meta.json +0 -1
  137. np_services/.mypy_cache/3.9/re.data.json +0 -1
  138. np_services/.mypy_cache/3.9/re.meta.json +0 -1
  139. np_services/.mypy_cache/3.9/shutil.data.json +0 -1
  140. np_services/.mypy_cache/3.9/shutil.meta.json +0 -1
  141. np_services/.mypy_cache/3.9/socket.data.json +0 -1
  142. np_services/.mypy_cache/3.9/socket.meta.json +0 -1
  143. np_services/.mypy_cache/3.9/sre_compile.data.json +0 -1
  144. np_services/.mypy_cache/3.9/sre_compile.meta.json +0 -1
  145. np_services/.mypy_cache/3.9/sre_constants.data.json +0 -1
  146. np_services/.mypy_cache/3.9/sre_constants.meta.json +0 -1
  147. np_services/.mypy_cache/3.9/sre_parse.data.json +0 -1
  148. np_services/.mypy_cache/3.9/sre_parse.meta.json +0 -1
  149. np_services/.mypy_cache/3.9/string.data.json +0 -1
  150. np_services/.mypy_cache/3.9/string.meta.json +0 -1
  151. np_services/.mypy_cache/3.9/subprocess.data.json +0 -1
  152. np_services/.mypy_cache/3.9/subprocess.meta.json +0 -1
  153. np_services/.mypy_cache/3.9/sys.data.json +0 -1
  154. np_services/.mypy_cache/3.9/sys.meta.json +0 -1
  155. np_services/.mypy_cache/3.9/threading.data.json +0 -1
  156. np_services/.mypy_cache/3.9/threading.meta.json +0 -1
  157. np_services/.mypy_cache/3.9/time.data.json +0 -1
  158. np_services/.mypy_cache/3.9/time.meta.json +0 -1
  159. np_services/.mypy_cache/3.9/types.data.json +0 -1
  160. np_services/.mypy_cache/3.9/types.meta.json +0 -1
  161. np_services/.mypy_cache/3.9/typing.data.json +0 -1
  162. np_services/.mypy_cache/3.9/typing.meta.json +0 -1
  163. np_services/.mypy_cache/3.9/typing_extensions.data.json +0 -1
  164. np_services/.mypy_cache/3.9/typing_extensions.meta.json +0 -1
  165. np_services/.mypy_cache/3.9/warnings.data.json +0 -1
  166. np_services/.mypy_cache/3.9/warnings.meta.json +0 -1
  167. np_services/.mypy_cache/3.9/weakref.data.json +0 -1
  168. np_services/.mypy_cache/3.9/weakref.meta.json +0 -1
  169. np_services/.mypy_cache/3.9/zmq/__init__.data.json +0 -1
  170. np_services/.mypy_cache/3.9/zmq/__init__.meta.json +0 -1
  171. np_services/.mypy_cache/3.9/zmq/_typing.data.json +0 -1
  172. np_services/.mypy_cache/3.9/zmq/_typing.meta.json +0 -1
  173. np_services/.mypy_cache/3.9/zmq/backend/__init__.data.json +0 -1
  174. np_services/.mypy_cache/3.9/zmq/backend/__init__.meta.json +0 -1
  175. np_services/.mypy_cache/3.9/zmq/backend/select.data.json +0 -1
  176. np_services/.mypy_cache/3.9/zmq/backend/select.meta.json +0 -1
  177. np_services/.mypy_cache/3.9/zmq/constants.data.json +0 -1
  178. np_services/.mypy_cache/3.9/zmq/constants.meta.json +0 -1
  179. np_services/.mypy_cache/3.9/zmq/error.data.json +0 -1
  180. np_services/.mypy_cache/3.9/zmq/error.meta.json +0 -1
  181. np_services/.mypy_cache/3.9/zmq/sugar/__init__.data.json +0 -1
  182. np_services/.mypy_cache/3.9/zmq/sugar/__init__.meta.json +0 -1
  183. np_services/.mypy_cache/3.9/zmq/sugar/attrsettr.data.json +0 -1
  184. np_services/.mypy_cache/3.9/zmq/sugar/attrsettr.meta.json +0 -1
  185. np_services/.mypy_cache/3.9/zmq/sugar/context.data.json +0 -1
  186. np_services/.mypy_cache/3.9/zmq/sugar/context.meta.json +0 -1
  187. np_services/.mypy_cache/3.9/zmq/sugar/frame.data.json +0 -1
  188. np_services/.mypy_cache/3.9/zmq/sugar/frame.meta.json +0 -1
  189. np_services/.mypy_cache/3.9/zmq/sugar/poll.data.json +0 -1
  190. np_services/.mypy_cache/3.9/zmq/sugar/poll.meta.json +0 -1
  191. np_services/.mypy_cache/3.9/zmq/sugar/socket.data.json +0 -1
  192. np_services/.mypy_cache/3.9/zmq/sugar/socket.meta.json +0 -1
  193. np_services/.mypy_cache/3.9/zmq/sugar/tracker.data.json +0 -1
  194. np_services/.mypy_cache/3.9/zmq/sugar/tracker.meta.json +0 -1
  195. np_services/.mypy_cache/3.9/zmq/sugar/version.data.json +0 -1
  196. np_services/.mypy_cache/3.9/zmq/sugar/version.meta.json +0 -1
  197. np_services/.mypy_cache/3.9/zmq/utils/__init__.data.json +0 -1
  198. np_services/.mypy_cache/3.9/zmq/utils/__init__.meta.json +0 -1
  199. np_services/.mypy_cache/3.9/zmq/utils/interop.data.json +0 -1
  200. np_services/.mypy_cache/3.9/zmq/utils/interop.meta.json +0 -1
  201. np_services/.mypy_cache/3.9/zmq/utils/jsonapi.data.json +0 -1
  202. np_services/.mypy_cache/3.9/zmq/utils/jsonapi.meta.json +0 -1
  203. np_services/.mypy_cache/CACHEDIR.TAG +0 -3
  204. np_services/resources/black_desktop.ps1 +0 -66
  205. np_services/resources/grey_desktop.ps1 +0 -66
  206. np_services/resources/reset_desktop.ps1 +0 -66
  207. np_services-0.1.59.dist-info/RECORD +0 -206
np_services/proxies.py CHANGED
@@ -1,1488 +1,1489 @@
1
- """
2
- Proxy classes for interacting with devices via zro/zmq.
3
-
4
- Proxy class names must match the name of the proxy key in the config dict.
5
- """
6
- import abc
7
- import contextlib
8
- import copy
9
- import csv
10
- import datetime
11
- import functools
12
- import itertools
13
- import json # loading config from Sync proxy will instantiate datetime objects
14
- import logging
15
- import pathlib
16
- import re
17
- import tempfile
18
- import time
19
- from typing import Any, ClassVar, Literal, Mapping, Optional, Sequence
20
-
21
- import fabric
22
- import np_config
23
- import np_logging
24
- import np_session
25
- import npc_stim
26
- import npc_sync
27
- import npc_mvr
28
- import np_tools
29
- import yaml
30
- import pandas as pd
31
-
32
- import np_services.resources.mvr_connector as mvr_connector
33
- import np_services.utils as utils
34
- import np_services.resources.zro as zro
35
- from np_services.protocols import *
36
-
37
- logger = np_logging.getLogger(__name__)
38
-
39
- CONFIG = utils.config_from_zk()
40
-
41
- ProxyState = tuple[Literal["", "READY", "BUSY"], str]
42
-
43
-
44
- class Proxy(abc.ABC):
45
- # req proxy config - hardcode or overload ensure_config()
46
- host: ClassVar[str]
47
- port: ClassVar[int]
48
- timeout: ClassVar[float]
49
- serialization: ClassVar[Literal["json", "pickle"]]
50
-
51
- # if a program needs to be launched (e.g. via RSC):
52
- rsc_app_id: str
53
-
54
- # if device records:
55
- gb_per_hr: ClassVar[int | float]
56
- min_rec_hr: ClassVar[int | float]
57
- pretest_duration_sec: ClassVar[int | float]
58
-
59
- # for resulting data, if device records:
60
- data_root: ClassVar[Optional[pathlib.Path]] = None
61
- data_files: ClassVar[Optional[Sequence[pathlib.Path]]] = None
62
-
63
- # info
64
- exc: ClassVar[Optional[Exception]] = None
65
-
66
- latest_start: ClassVar[float | int] = 0
67
- "`time.time()` when the service was last started via `start()`."
68
-
69
- @classmethod
70
- def ensure_config(cls) -> None:
71
- """Updates any missing parameters for class proxy.
72
-
73
- Is called in `get_proxy()` so any time we need the proxy, we have a
74
- correct config, without remembering to run `initialize()` or some such.
75
- """
76
- config = CONFIG.get(
77
- __class__.__name__, {}
78
- ) # class where this function is defined
79
- config.update(**CONFIG.get(cls.__name__, {})) # the calling class, if different
80
-
81
- # for proxy (reqd):
82
- if not hasattr(cls, "host"):
83
- cls.host = config["host"]
84
- if not hasattr(cls, "port"):
85
- cls.port = int(config["port"])
86
- if not hasattr(cls, "timeout"):
87
- cls.timeout = float(config.get("timeout", 10.0))
88
- if not hasattr(cls, "serialization"):
89
- cls.serialization = config.get("serialization", "json")
90
-
91
- # for pretest (reqd, not used if device doesn't record)
92
- if not hasattr(cls, "pretest_duration_sec"):
93
- cls.pretest_duration_sec = config.get("pretest_duration_sec", 5)
94
- if not hasattr(cls, "gb_per_hr"):
95
- cls.gb_per_hr = config.get("gb_per_hr", 2.0)
96
- if not hasattr(cls, "min_rec_hr"):
97
- cls.min_rec_hr = config.get("min_rec_hr", 3.0)
98
-
99
- # for resulting data (optional):
100
- if not cls.data_root or cls.host not in cls.data_root.parts:
101
- relative_path = config.get("data", None)
102
- if relative_path:
103
- root = pathlib.Path(f"//{cls.host}/{relative_path}")
104
- try:
105
- _ = root.exists()
106
- except OSError as exc:
107
- cls.exc = exc
108
- logger.exception(
109
- "Error accessing %s data path: %s", cls.__name__, root
110
- )
111
- raise FileNotFoundError(
112
- f"{cls.__name__} data path is not accessible: {root}"
113
- ) from exc
114
- else:
115
- cls.data_root = root
116
- if hasattr(cls, "data_root") and cls.data_root:
117
- cls.data_root.mkdir(parents=True, exist_ok=True)
118
-
119
- @classmethod
120
- def launch(cls) -> None:
121
- utils.start_rsc_app(cls.host, cls.rsc_app_id)
122
-
123
- @classmethod
124
- def kill(cls) -> None:
125
- utils.kill_rsc_app(cls.host, cls.rsc_app_id)
126
-
127
- @classmethod
128
- def initialize(cls) -> None:
129
- cls.launch()
130
- with contextlib.suppress(AttributeError):
131
- del cls.proxy
132
- cls.proxy = cls.get_proxy()
133
- if isinstance(cls, Startable) and not cls.is_ready_to_start():
134
- if isinstance(cls, Finalizable):
135
- cls.finalize()
136
- if not cls.is_ready_to_start():
137
- logger.warning(
138
- "%s not ready to start: %s", cls.__name__, cls.get_state()
139
- )
140
- return
141
- if cls.data_root:
142
- cls.data_files = []
143
- cls.sync_path = None
144
- cls.initialization = time.time()
145
- logger.info("%s(%s) initialized: ready for use", __class__.__name__, cls.__name__)
146
-
147
- @classmethod
148
- def test(cls) -> None:
149
- "Quickly verify service is working and ready for use, or raise `TestError`."
150
- logger.debug("Testing %s proxy", cls.__name__)
151
- if not cls.is_connected():
152
- raise TestError(
153
- f"{cls.__name__} not connected to {cls.host}:{cls.port}"
154
- ) from cls.exc
155
- logger.debug(
156
- "%s proxy connection to %s:%s confirmed", cls.__name__, cls.host, cls.port
157
- )
158
- gb = cls.get_required_disk_gb()
159
- if not cls.is_disk_space_ok():
160
- raise TestError(
161
- f"{cls.__name__} free disk space on {cls.data_root.drive} doesn't meet minimum of {gb} GB"
162
- ) from cls.exc
163
- logger.debug("%s(%s) tested successfully", __class__.__name__, cls.__name__)
164
-
165
- @classmethod
166
- def get_proxy(cls) -> zro.DeviceProxy:
167
- "Return a proxy to the service without re-creating unnecessarily."
168
- with contextlib.suppress(AttributeError):
169
- return cls.proxy
170
- cls.ensure_config()
171
- logger.debug("Creating %s proxy to %s:%s", cls.__name__, cls.host, cls.port)
172
- cls.proxy = zro.DeviceProxy(cls.host, cls.port, cls.timeout, cls.serialization)
173
- return cls.get_proxy()
174
-
175
- @classmethod
176
- def get_state(cls) -> ProxyState | dict:
177
- "Dict may be deprecated: is no longer returned by Sync or Camstim proxies."
178
- state = cls.get_proxy().get_state()
179
- logger.debug("%s state: %s", cls.__name__, state)
180
- return state
181
-
182
- @classmethod
183
- def get_latest_data(
184
- cls: Recorder, glob: Optional[str] = None, subfolders: str = ""
185
- ) -> list[pathlib.Path] | None:
186
- cls.ensure_config()
187
- if not cls.data_root:
188
- return None
189
- if subfolders == "/": # can alter path to drive root
190
- subfolders = ""
191
- if not glob:
192
- glob = f"*{cls.raw_suffix}" if hasattr(cls, "raw_suffix") else "*"
193
- if not hasattr(cls, "latest_start"):
194
- data_paths = utils.get_files_created_between(
195
- cls.data_root / subfolders, glob
196
- )
197
- if not data_paths:
198
- return None
199
- return [
200
- max(data_paths, key=lambda x: x.stat().st_mtime)
201
- ]
202
- return utils.get_files_created_between(
203
- cls.data_root / subfolders, glob, cls.latest_start
204
- )
205
-
206
- @classmethod
207
- def get_required_disk_gb(cls) -> float:
208
- "Return the minimum disk space required prior to start (to .1 GB). Returns `0.0` if service generates no data."
209
- cls.ensure_config()
210
- if not isinstance(cls, Startable):
211
- return 0.0
212
- return round(cls.min_rec_hr * cls.gb_per_hr, 1)
213
-
214
- @classmethod
215
- def is_disk_space_ok(cls) -> bool:
216
- required = cls.get_required_disk_gb()
217
- if required == 0.0:
218
- return True
219
- try:
220
- free = utils.free_gb(cls.data_root)
221
- except FileNotFoundError as exc:
222
- cls.exc = exc
223
- logger.exception(
224
- f"{cls.__name__} data path not accessible: {cls.data_root}"
225
- )
226
- return False
227
- else:
228
- logger.debug(
229
- "%s free disk space on %s: %s GB",
230
- cls.__name__,
231
- cls.data_root.drive,
232
- free,
233
- )
234
- return free > required
235
-
236
- @classmethod
237
- def is_connected(cls) -> bool:
238
- if not utils.is_online(cls.host):
239
- cls.exc = ConnectionError(
240
- f"No response from {cls.host}: may be offline or unreachable"
241
- )
242
- return False
243
- try:
244
- _ = cls.get_proxy().uptime
245
- except zro.ZroError as exc:
246
- cls.exc = exc
247
- logger.exception(
248
- f"{cls.__name__} proxy connection to {cls.host}:{cls.port} failed"
249
- )
250
- return False
251
- try:
252
- _ = cls.get_state()
253
- except zro.ZroError as exc:
254
- cls.exc = exc
255
- logger.exception(
256
- f"{cls.__name__} proxy connection to {cls.host}:{cls.port} failed"
257
- )
258
- return False
259
- return True
260
-
261
-
262
- class CamstimSyncShared(Proxy):
263
- started_state: ClassVar[Sequence[str]]
264
-
265
- @classmethod
266
- def is_ready_to_start(cls) -> bool:
267
- if cls.is_started():
268
- return False
269
- state = cls.get_state()
270
- if isinstance(state, Mapping) and state.get("message", "") == "READY":
271
- return True
272
- if isinstance(state, Sequence) and "READY" in state:
273
- return True
274
- return False
275
-
276
- @classmethod
277
- def is_started(cls) -> bool:
278
- return len(state := cls.get_state()) and all(
279
- msg in state for msg in cls.started_state
280
- )
281
-
282
- @classmethod
283
- def start(cls) -> None:
284
- logger.info("%s | Starting recording", cls.__name__)
285
- if cls.is_started():
286
- logger.warning(
287
- "%s already started - should be stopped manually", cls.__name__
288
- )
289
- return
290
- # otherwise, Sync - for example - would stop current recording and start another
291
- if not cls.is_ready_to_start():
292
- logger.error("%s not ready to start: %s", cls.__name__, cls.get_state())
293
- raise AssertionError(
294
- f"{cls.__name__} not ready to start: {cls.get_state()}"
295
- )
296
- cls.latest_start = time.time()
297
- cls.get_proxy().start()
298
-
299
- @classmethod
300
- def pretest(cls) -> None:
301
- "Test all critical functions"
302
- with np_logging.debug():
303
- logger.debug("Starting %s pretest", cls.__name__)
304
- cls.initialize() # calls test()
305
-
306
- with utils.stop_on_error(cls):
307
- cls.start()
308
- time.sleep(1)
309
- cls.verify()
310
- time.sleep(cls.pretest_duration_sec)
311
- # stop() called by context manager at exit, regardless
312
- cls.finalize()
313
- cls.validate()
314
- logger.info("%s pretest complete", cls.__name__)
315
-
316
- @classmethod
317
- def verify(cls) -> None:
318
- "Assert latest data file is currently increasing in size, or raise AssertionError."
319
- if not cls.is_started():
320
- logger.warning(
321
- "Cannot verify %s if not started: %s", cls.__name__, cls.get_state()
322
- )
323
- raise AssertionError(f"{cls.__name__} not started: {cls.get_state()}")
324
-
325
- @classmethod
326
- def stop(cls) -> None:
327
- logger.debug("Stopping %s", cls.__name__)
328
- cls.get_proxy().stop()
329
- logger.info("%s | Stopped recording", cls.__name__)
330
-
331
- # --- End of possible Camstim/Sync shared methods ---
332
-
333
- # --- Sync-specific methods ---
334
-
335
-
336
- class Sync(CamstimSyncShared):
337
- host = np_config.Rig().Sync
338
- started_state = ("BUSY", "RECORDING")
339
- raw_suffix: str = ".sync"
340
- rsc_app_id: str = "sync_device"
341
-
342
- @classmethod
343
- def ensure_config(cls) -> None:
344
- """Updates any missing parameters for class proxy.
345
-
346
- Is called in `get_proxy()` so any time we need the proxy, we have a
347
- correct config, without remembering to run `initialize()` or some such.
348
- """
349
- config = CONFIG.get(
350
- __class__.__name__, {}
351
- ) # class where this function is defined
352
- config.update(**CONFIG.get(cls.__name__, {})) # the calling class, if different
353
-
354
- # for proxy (reqd):
355
- if not hasattr(cls, "host"):
356
- cls.host = config["host"]
357
- if not hasattr(cls, "port"):
358
- cls.port = int(config["port"])
359
- if not hasattr(cls, "timeout"):
360
- cls.timeout = float(config.get("timeout", 10.0))
361
- if not hasattr(cls, "serialization"):
362
- cls.serialization = config.get("serialization", "json")
363
-
364
- # for pretest (reqd, not used if device doesn't record)
365
- if not hasattr(cls, "pretest_duration_sec"):
366
- cls.pretest_duration_sec = config.get("pretest_duration_sec", 5)
367
- if not hasattr(cls, "gb_per_hr"):
368
- cls.gb_per_hr = config.get("gb_per_hr", 2.0)
369
- if not hasattr(cls, "min_rec_hr"):
370
- cls.min_rec_hr = config.get("min_rec_hr", 3.0)
371
-
372
- # for resulting data (optional):
373
- if not cls.data_root or cls.host not in cls.data_root.parts:
374
- relative_path = config.get("data", None)
375
- if relative_path:
376
- root = pathlib.Path(f"//{cls.host}/{relative_path}")
377
- try:
378
- _ = root.exists()
379
- except OSError as exc:
380
- cls.exc = exc
381
- logger.exception(
382
- "Error accessing %s data path: %s", cls.__name__, root
383
- )
384
- raise FileNotFoundError(
385
- f"{cls.__name__} data path is not accessible: {root}"
386
- ) from exc
387
- else:
388
- cls.data_root = root
389
- if hasattr(cls, "data_root"):
390
- cls.data_root.mkdir(parents=True, exist_ok=True)
391
-
392
- @classmethod
393
- def finalize(cls) -> None:
394
- logger.debug("Finalizing %s", cls.__name__)
395
- if cls.is_started():
396
- cls.stop()
397
- while not cls.is_ready_to_start():
398
- logger.debug("Waiting for %s to finish processing", cls.__name__)
399
- time.sleep(1) # TODO add backoff module
400
- if not cls.data_files:
401
- cls.data_files = []
402
- cls.data_files.extend(new := cls.get_latest_data("*.h5"))
403
- logger.debug("%s processing finished: %s", cls.__name__, [_.name for _ in new])
404
-
405
- @classmethod
406
- def shutdown(cls) -> None:
407
- logger.debug("Shutting down %s", cls.__name__)
408
- cls.stop()
409
- try:
410
- del cls.proxy
411
- except Exception as exc:
412
- logger.debug("Failed to delete %s proxy: %s", cls.__name__, exc)
413
- cls.exc = exc
414
-
415
- @classmethod
416
- def get_config(cls) -> dict[str, Any | datetime.datetime]:
417
- "Sync config, including `line_labels` and `frequency`"
418
- if cls.serialization in ("json", "j"):
419
- return eval(cls.get_proxy().config)
420
- if cls.serialization in ("pickle", "pkl", "p"):
421
- return cls.get_proxy().config
422
-
423
- @classmethod
424
- def validate(cls, data: Optional[pathlib.Path] = None) -> None:
425
- "Check that data file is valid, or raise AssertionError."
426
- logger.debug("Validating %s data", cls.__name__)
427
- if not data and bool(files := cls.get_latest_data("*.h5")):
428
- data = files[-1]
429
- logger.debug(
430
- "No data file provided: validating most-recent data in %s: %s",
431
- cls.data_root,
432
- data.name,
433
- )
434
- if cls.is_started():
435
- logger.warning(
436
- f"Attempted to validate current data file while recording"
437
- )
438
- return
439
- elif not cls.is_ready_to_start():
440
- cls.finalize()
441
- try:
442
- import h5py
443
- except ImportError:
444
- logger.warning("h5py not installed: cannot open Sync data")
445
- cls.min_validation(data)
446
- else:
447
- cls.full_validation(data)
448
-
449
- @classmethod
450
- def verify(cls) -> None:
451
- "Assert latest data file is currently increasing in size, or raise AssertionError."
452
- super().verify()
453
- if cls.data_root and not utils.is_file_growing(cls.get_latest_data()[-1]):
454
- raise AssertionError(
455
- f"{cls.__name__} latest data file is not increasing in size: {cls.get_latest_data()[-1]}"
456
- )
457
- logger.info("%s | Verified: file on disk is increasing in size", cls.__name__)
458
-
459
- @classmethod
460
- def full_validation(cls, data: pathlib.Path) -> None:
461
- npc_sync.get_sync_data(data).validate()
462
-
463
- @classmethod
464
- def min_validation(cls, data: pathlib.Path) -> None:
465
- if data.stat().st_size == 0:
466
- raise AssertionError(f"Empty file: {data}")
467
- if data.suffix != ".h5":
468
- raise FileNotFoundError(
469
- f"Expected .sync to be converted to .h5 immediately after recording stopped: {data}"
470
- )
471
- logger.debug("%s minimal validation passed for %s", cls.__name__, data.name)
472
-
473
-
474
- class Phidget(CamstimSyncShared):
475
- host = np_config.Rig().Stim
476
- rsc_app_id = "phidget_server"
477
-
478
-
479
- class Camstim(CamstimSyncShared):
480
- host = np_config.Rig().Stim
481
- started_state = ("BUSY", "Script in progress.")
482
- rsc_app_id = "camstim_agent"
483
- sync_path: Optional[pathlib.Path] = None
484
-
485
- @classmethod
486
- def launch(cls) -> None:
487
- super().launch()
488
- Phidget.launch()
489
-
490
- @classmethod
491
- def get_config(cls) -> dict[str, Any]:
492
- return cls.get_proxy().config
493
-
494
- @classmethod
495
- def ensure_config(cls) -> None:
496
- """Updates any missing parameters for class proxy.
497
-
498
- Is called in `get_proxy()` so any time we need the proxy, we have a
499
- correct config, without remembering to run `initialize()` or some such.
500
- """
501
- config = CONFIG.get(
502
- __class__.__name__, {}
503
- ) # class where this function is defined
504
- config.update(**CONFIG.get(cls.__name__, {})) # the calling class, if different
505
-
506
- # for proxy (reqd):
507
- if not hasattr(cls, "host"):
508
- cls.host = config["host"]
509
- if not hasattr(cls, "port"):
510
- cls.port = int(config["port"])
511
- if not hasattr(cls, "timeout"):
512
- cls.timeout = float(config.get("timeout", 10.0))
513
- if not hasattr(cls, "serialization"):
514
- cls.serialization = config.get("serialization", "json")
515
-
516
- # for pretest (reqd, not used if device doesn't record)
517
- if not hasattr(cls, "pretest_duration_sec"):
518
- cls.pretest_duration_sec = config.get("pretest_duration_sec", 5)
519
- if not hasattr(cls, "gb_per_hr"):
520
- cls.gb_per_hr = config.get("gb_per_hr", 2.0)
521
- if not hasattr(cls, "min_rec_hr"):
522
- cls.min_rec_hr = config.get("min_rec_hr", 3.0)
523
-
524
- # for resulting data (optional):
525
- if not cls.data_root:
526
- relative_path = config.get("data", None)
527
- if relative_path:
528
- root = pathlib.Path(f"//{cls.host}/{relative_path}")
529
- try:
530
- _ = root.exists()
531
- except OSError as exc:
532
- cls.exc = exc
533
- logger.exception(
534
- "Error accessing %s data path: %s", cls.__name__, root
535
- )
536
- raise FileNotFoundError(
537
- f"{cls.__name__} data path is not accessible: {root}"
538
- ) from exc
539
- else:
540
- cls.data_root = root
541
- if hasattr(cls, "data_root") and cls.data_root is not None:
542
- cls.data_root.mkdir(parents=True, exist_ok=True)
543
-
544
- @classmethod
545
- def finalize(cls) -> None:
546
- logger.info("Finalizing %s", cls.__name__)
547
- if cls.is_started():
548
- cls.stop()
549
- count = 0
550
- while not cls.is_ready_to_start():
551
- if count % 120 == 0:
552
- logger.debug("Waiting for %s to finish processing", cls.__name__)
553
- time.sleep(1) # TODO add backoff module
554
- if not cls.data_files:
555
- cls.data_files = []
556
- cls.data_files.extend(new := itertools.chain(cls.get_latest_data("*pkl"), cls.get_latest_data("*hdf5")))
557
- logger.info("%s added new data: %s", cls.__name__, [_.name for _ in new])
558
-
559
- @classmethod
560
- def validate(cls) -> None:
561
- if not cls.sync_path:
562
- logger.warning("Cannot validate stim without sync file: assign `stim.sync_path`")
563
- return
564
- logger.info("Validating %s", cls.__name__)
565
- for file in cls.data_files:
566
- npc_stim.validate_stim(file, sync=cls.sync_path)
567
- logger.info(f"Validated {len(cls.data_files)} stim files with sync")
568
-
569
- class ScriptCamstim(Camstim):
570
- script: ClassVar[str]
571
- "path to script on Stim computer"
572
- params: ClassVar[dict[str, Any]] = {}
573
-
574
- @classmethod
575
- def pretest(cls) -> None:
576
- pretest_mouse = "599657"
577
-
578
- cls.script = "C:/ProgramData/StimulusFiles/dev/bi_script_pretest_v2.py"
579
-
580
- # get params from MTrain, as if we were running `Agent.start_session`
581
- cls.params = np_session.mtrain.MTrain(pretest_mouse).stage["parameters"]
582
- cls.params.update(dict(user_name="ben.hardcastle", mouse_id=pretest_mouse))
583
-
584
- logger.info(
585
- "%s | Pretest: running %s with MTrain stage params for mouse %s",
586
- cls.__name__,
587
- cls.script,
588
- pretest_mouse,
589
- )
590
- cls.initialize()
591
- cls.test()
592
- cls.start()
593
- while not cls.is_ready_to_start():
594
- logger.debug("Waiting for %s to finish processing", cls.__name__)
595
- time.sleep(10)
596
- cls.finalize()
597
- # cls.validate()
598
- cls.initialize()
599
-
600
- @classmethod
601
- def start(cls):
602
- cls.latest_start = time.time()
603
- cls.get_proxy().start_script(cls.script, cls.params)
604
-
605
-
606
- class SessionCamstim(Camstim):
607
- lims_user_id: ClassVar[str]
608
- labtracks_mouse_id: ClassVar[int]
609
-
610
- @classmethod
611
- def start(cls):
612
- cls.latest_start = time.time()
613
- cls.get_proxy().start_session(
614
- cls.labtracks_mouse_id, cls.lims_user_id
615
- ) # , cls.params)
616
-
617
- @classmethod
618
- def pretest(cls) -> None:
619
- cls.labtracks_mouse_id = 598796
620
- cls.lims_user_id = "ben.hardcastle"
621
- logger.info(
622
- "%s | Pretest with mouse %s, user %s",
623
- cls.__name__,
624
- cls.labtracks_mouse_id,
625
- cls.lims_user_id,
626
- )
627
- super().pretest()
628
-
629
-
630
- class NoCamstim(Camstim):
631
- "Run remote files (e.g. .bat) without sending directly to Camstim Agent"
632
-
633
- remote_file: ClassVar[str | pathlib.Path]
634
- extra_args: ClassVar[list[str]] = []
635
- ssh: ClassVar[fabric.Connection]
636
- user: ClassVar[str] = "svc_neuropix"
637
- password: ClassVar[str]
638
-
639
- @classmethod
640
- def pretest(cls) -> None:
641
- logger.warning("%s | Pretest not implemented", cls.__name__)
642
-
643
- @classmethod
644
- def get_ssh(cls) -> fabric.Connection:
645
- with contextlib.suppress(AttributeError):
646
- return cls.ssh
647
- cls.initialize()
648
- return cls.ssh
649
-
650
- @classmethod
651
- def initialize(cls) -> None:
652
- if not hasattr(cls, "password"):
653
- cls.password = input(f"{cls.__name__} | Enter password for {cls.host}: ")
654
- cls.remote_file = utils.unc_to_local(pathlib.Path(cls.remote_file))
655
- cls.ssh = fabric.Connection(
656
- cls.host, cls.user, connect_kwargs=dict(password=cls.password)
657
- )
658
- super().initialize()
659
- cls.test()
660
-
661
- @classmethod
662
- def test(cls) -> None:
663
- super().test()
664
- logger.debug(f"{cls.__name__} | Testing")
665
- try:
666
- result = cls.get_ssh().run("hostname", hide=True)
667
- except Exception as exc:
668
- raise TestError(
669
- f"{cls.__name__} Error connecting to {cls.host} via ssh: {exc!r}. Is this password correct? {cls.password}"
670
- )
671
- else:
672
- if result.exited != 0:
673
- raise TestError(
674
- f"{cls.__name__} Error connecting to {cls.host} via ssh: {result}"
675
- )
676
- logger.debug(f"{cls.__name__} | Connected to {cls.host} via ssh")
677
-
678
- try:
679
- result = cls.get_ssh().run(f"type {cls.remote_file}", hide=True)
680
- except Exception as exc:
681
- extra = (
682
- f" | '{exc.result.command}': {exc.result.stderr.strip()!r}"
683
- if hasattr(exc, "result")
684
- else ""
685
- )
686
- raise TestError(
687
- f"{cls.__name__} | Error calling ssh-executed command{extra}"
688
- )
689
- else:
690
- if result.exited != 0:
691
- raise TestError(
692
- f"{cls.__name__} Error accessing {cls.remote_file} on {cls.host} - is filepath correct? {result}"
693
- )
694
- logger.debug(
695
- f"{cls.__name__} | {cls.remote_file} is accessible via ssh on {cls.host}"
696
- )
697
-
698
- @classmethod
699
- def start(cls):
700
- if cls.is_started():
701
- logger.warning(f"{cls.__name__} already started")
702
- return
703
- logger.debug(f"{cls.__name__} | Starting {cls.remote_file} on {cls.host}")
704
- cls.latest_start = time.time()
705
- cls.get_ssh().run(f"call {cls.remote_file} {cls.extra_args}")
706
-
707
- @classmethod
708
- def verify(cls):
709
- logger.warning(f"{cls.__name__} | No verification implemented")
710
-
711
-
712
- class MouseDirector(Proxy):
713
- """Communicate with the ZMQ remote object specified here:
714
- http://aibspi.corp.alleninstitute.org/braintv/visual_behavior/mouse_director/-/blob/master/src/mousedirector.py
715
-
716
- ::
717
- MouseDirector.get_proxy().set_mouse_id(str(366122))
718
- MouseDirector.get_proxy().set_user_id("ben.hardcastle")
719
- """
720
-
721
- user: ClassVar[str | np_session.User]
722
- mouse: ClassVar[str | int | np_session.Mouse]
723
-
724
- rsc_app_id = CONFIG['MouseDirector']['rsc_app_id']
725
- host = np_config.Rig().Mon
726
- gb_per_hr = 0
727
- serialization = "json"
728
- started_state: ClassVar[ProxyState] = ("READY", "")
729
- not_connected_state: ClassVar[ProxyState] = ("", "NOT_CONNECTED")
730
-
731
- @classmethod
732
- def pretest(cls):
733
- with np_logging.debug():
734
- logger.debug(f"{cls.__name__} | Pretest")
735
- cls.user = "ben.hardcastle"
736
- cls.mouse = 366122
737
- cls.initialize()
738
- cls.test()
739
- cls.get_proxy().retract_lick_spout()
740
- time.sleep(3)
741
- cls.get_proxy().extend_lick_spout()
742
- time.sleep(3)
743
- cls.get_proxy().retract_lick_spout()
744
- time.sleep(3)
745
- logger.info(f"{cls.__name__} | Pretest passed")
746
-
747
- @classmethod
748
- def initialize(cls):
749
- logger.debug(f"{cls.__name__} | Initializing")
750
- super().initialize()
751
- cls.get_proxy().set_mouse_id(str(cls.mouse))
752
- time.sleep(1)
753
- cls.get_proxy().set_user_id(str(cls.user))
754
- time.sleep(1)
755
- logger.debug(f"{cls.__name__} | Initialized with mouse {cls.mouse}, user {cls.user}")
756
-
757
- @classmethod
758
- def get_state(cls) -> ProxyState:
759
- result: str = cls.get_proxy().rig_dict
760
- if str(np_config.Rig()) in result:
761
- return cls.started_state
762
- return cls.not_connected_state
763
-
764
- class Cam3d(CamstimSyncShared):
765
-
766
- label: str
767
-
768
- host = np_config.Rig().Mon
769
- serialization = "json"
770
- started_state = ["READY", "CAMERAS_OPEN,CAMERAS_ACQUIRING"]
771
- rsc_app_id = CONFIG['Cam3d']['rsc_app_id']
772
- data_files: ClassVar[list[pathlib.Path]] = []
773
-
774
- @classmethod
775
- def is_started(cls) -> bool:
776
- return cls.get_state() == cls.started_state
777
-
778
- @classmethod
779
- def is_ready_to_start(cls) -> bool:
780
- if cls.is_started():
781
- return False
782
- time.sleep(1)
783
- if (
784
- cls.get_state() == cls.started_state
785
- or 'READY' not in cls.get_state()
786
- ):
787
- return False
788
- return True
789
-
790
- @classmethod
791
- def initialize(cls) -> None:
792
- logger.debug(f"{cls.__name__} | Initializing")
793
- super().initialize()
794
- if not cls.is_ready_to_start():
795
- cls.reenable_cameras()
796
-
797
- time.sleep(1)
798
-
799
- @classmethod
800
- def reenable_cameras(cls) -> None:
801
- cls.get_proxy().release_cameras()
802
- time.sleep(.2)
803
- cls.get_proxy().enable_cameras()
804
- time.sleep(.2)
805
- cls.get_proxy().stop_capture()
806
- time.sleep(.2)
807
- cls.get_proxy().start_capture()
808
- time.sleep(.2)
809
-
810
- @classmethod
811
- def generate_image_paths(cls) -> tuple[pathlib.Path, pathlib.Path]:
812
- if not hasattr(cls, 'label') or not cls.label:
813
- logger.warning(f"{cls.__name__} | `cls.label` not specified")
814
- def path(side: str) -> pathlib.Path:
815
- return cls.data_root / f"{datetime.datetime.now():%Y%m%d_%H%M%S}_{getattr(cls, 'label', 'image')}_{side}.png"
816
- return path('left'), path('right')
817
-
818
- @classmethod
819
- def start(cls) -> None:
820
- logger.debug(f"{cls.__name__} | Starting")
821
- cls.latest_start = time.time()
822
- left, right = cls.generate_image_paths()
823
- cls.get_proxy().save_left_image(str(left))
824
- cls.get_proxy().save_right_image(str(right))
825
- time.sleep(.5)
826
- for path, side in zip((left, right), ('Left', 'Right')):
827
- if path.exists():
828
- logger.debug(f"{cls.__name__} | {side} image saved to {path}")
829
- else:
830
- logger.debug(f"{cls.__name__} | {side} image capture request sent, but image not saved")
831
-
832
- @classmethod
833
- def finalize(cls) -> None:
834
- logger.debug(f"{cls.__name__} | Finalizing")
835
- counter = 0
836
- while (
837
- not (latest := cls.get_latest_data('*'))
838
- or cls.is_started()
839
- ):
840
- time.sleep(1)
841
- counter += 1
842
- if counter == 3:
843
- cls.reenable_cameras()
844
- break
845
- cls.data_files.extend(latest)
846
- logger.debug(f"{cls.__name__} | Images captured: {latest}")
847
-
848
- @classmethod
849
- def validate(cls):
850
- if not (latest := cls.get_latest_data('*')) or len(latest) != 2:
851
- raise AssertionError(f"{cls.__name__} | Expected 2 images, got {len(latest)}: {latest}")
852
-
853
- @classmethod
854
- def stop(cls):
855
- logger.debug("%s | `stop()` not implemented", cls.__name__)
856
-
857
- @classmethod
858
- def pretest(cls):
859
- with np_logging.debug():
860
- logger.debug(f"{cls.__name__} | Pretest")
861
- cls.label = 'pretest'
862
- cls.initialize()
863
- cls.test()
864
- cls.start()
865
- cls.finalize()
866
- cls.validate()
867
- logger.info(f"{cls.__name__} | Pretest passed")
868
-
869
- class MVR(CamstimSyncShared):
870
-
871
- # req proxy config - hardcode or overload ensure_config()
872
- host: ClassVar[str] = np_config.Rig().Mon
873
- port: ClassVar[int] = CONFIG['MVR']['port']
874
-
875
- re_aux: re.Pattern = re.compile("aux|USB!|none", re.IGNORECASE)
876
-
877
- @classmethod
878
- def is_connected(cls) -> bool:
879
- if not utils.is_online(cls.host):
880
- cls.exc = ConnectionError(
881
- f"No response from {cls.host}: may be offline or unreachable"
882
- )
883
- return False
884
- if not cls.get_proxy()._mvr_connected:
885
- cls.exc = ConnectionError(f"MVR likely not running on {cls.host}")
886
- return False
887
- try:
888
- _ = cls.get_camera_status()
889
- except ConnectionError as exc:
890
- cls.exc = exc
891
- return False
892
- return True
893
-
894
- @classmethod
895
- def initialize(cls) -> None:
896
- with contextlib.suppress(AttributeError):
897
- del cls.proxy
898
- cls.proxy = cls.get_proxy()
899
- cls.test()
900
- cls.configure_cameras()
901
- _ = cls.get_proxy().read() # empty buffer
902
- if isinstance(cls, Startable) and not cls.is_ready_to_start():
903
- if cls.is_started() and isinstance(cls, Stoppable):
904
- cls.stop()
905
- if isinstance(cls, Finalizable):
906
- cls.finalize()
907
- if not cls.is_ready_to_start():
908
- logger.warning(
909
- "%s not ready to start: %s", cls.__name__, cls.get_state()
910
- )
911
- return
912
- if cls.data_root:
913
- cls.data_files = []
914
- cls.initialization = time.time()
915
- logger.info("%s initialized: ready for use", cls.__name__)
916
-
917
- @classmethod
918
- def shutdown(cls) -> None:
919
- cls.get_proxy()._mvr_sock.close()
920
- del cls.proxy
921
-
922
- @classmethod
923
- def get_proxy(cls) -> mvr_connector.MVRConnector:
924
- with contextlib.suppress(AttributeError):
925
- return cls.proxy
926
- cls.ensure_config()
927
- logger.debug("Creating %s proxy to %s:%s", cls.__name__, cls.host, cls.port)
928
- cls.proxy = mvr_connector.MVRConnector({"host": cls.host, "port": cls.port})
929
- cls.proxy._mvr_sock.settimeout(cls.timeout)
930
- return cls.get_proxy()
931
-
932
- @classmethod
933
- def get_cameras(cls) -> list[dict[str, str]]:
934
- if not hasattr(cls, "all_cameras"):
935
- cls.get_proxy().read()
936
- cls.all_cameras = cls.get_proxy().request_camera_ids()[0]["value"]
937
- return cls.all_cameras
938
-
939
- @classmethod
940
- def get_camera_status(cls) -> list[dict[str, str]]:
941
- _ = cls.get_proxy().read() # empty buffer
942
- _ = cls.get_proxy()._send({"mvr_request": "get_camera_status"})
943
- for msg in reversed(cls.get_proxy().read()):
944
- if msg.get("mvr_response", "") == "get_camera_status" and (
945
- cams := msg.get("value", [])
946
- ):
947
- break
948
- else:
949
- logger.error("Could not get camera status from %s", cls.host)
950
- raise ConnectionError(f"Could not get camera status from {cls.host}")
951
- return [
952
- _
953
- for _ in cams
954
- if any(_["camera_id"] == __["id"] for __ in cls.get_cameras())
955
- ]
956
-
957
- @classmethod
958
- def get_state(cls) -> ProxyState:
959
- if not cls.is_connected():
960
- return "", "MVR_CLOSED"
961
- status = cls.get_camera_status()
962
- # cam status could change between calls, so only get once
963
- if any(not _["is_open"] for _ in status):
964
- return "", "CAMERA_CLOSED"
965
- if any(not _["is_streaming"] for _ in status):
966
- return "", "CAMERA_NOT_STREAMING"
967
- if cls.get_cameras_recording(status):
968
- return "BUSY", "RECORDING"
969
- return "READY", ""
970
-
971
- @classmethod
972
- def get_cameras_recording(cls, status=None) -> list[dict[str, str]]:
973
- return [_ for _ in status or cls.get_camera_status() if _["is_recording"]]
974
-
975
- @classmethod
976
- def is_ready_to_start(cls) -> bool:
977
- if cls.is_started():
978
- return False
979
- return all(
980
- _["is_open"] and _["is_streaming"] and not _["is_recording"]
981
- for _ in cls.get_camera_status()
982
- )
983
-
984
- @classmethod
985
- def configure_cameras(cls) -> None:
986
- "Set MVR to record video from subset of all cameras, via `get_cameras` (implemented by subclass)"
987
- cam_ids = [_["id"] for _ in cls.get_cameras()]
988
- cls.get_proxy().define_hosts(cam_ids)
989
- cls.get_proxy().start_display()
990
-
991
-
992
- class ImageMVR(MVR):
993
-
994
- gb_per_hr: ClassVar[int | float] = CONFIG['ImageMVR']["gb_per_hr"]
995
- min_rec_hr: ClassVar[int | float] = CONFIG['ImageMVR']["min_rec_hr"]
996
-
997
- label: ClassVar[str]
998
- "Rename file after capture to include label"
999
-
1000
- # TODO ready state is if Aux cam is_open
1001
- @classmethod
1002
- def get_cameras(cls) -> list[dict[str, str]]:
1003
- "Aux cam only"
1004
- cams = super().get_cameras()
1005
- return [_ for _ in cams if cls.re_aux.search(_["label"])]
1006
-
1007
- @classmethod
1008
- def start(cls):
1009
- if not cls.is_ready_to_start():
1010
- # TODO display state, wait on user input to continue
1011
- logger.error("%s not ready to start: %s", cls.__name__, cls.get_state())
1012
- raise AssertionError(
1013
- f"{cls.__name__} not ready to start: {cls.get_state()}"
1014
- )
1015
- cls.latest_start = time.time()
1016
- cls.get_proxy().take_snapshot()
1017
-
1018
- @classmethod
1019
- def stop(cls):
1020
- "Overload parent method to do nothing"
1021
- pass
1022
-
1023
- @classmethod
1024
- def is_started(cls) -> bool:
1025
- for msg in cls.get_proxy().read():
1026
- if msg.get("mvr_broadcast", "") == "snapshot_converted":
1027
- return True
1028
- if msg.get("mvr_broadcast", "") == "snapshot_failed":
1029
- return False
1030
- return False
1031
-
1032
- @classmethod
1033
- def verify(cls):
1034
- "Overload parent method to do nothing"
1035
- pass
1036
-
1037
- # TODO
1038
- @classmethod
1039
- def validate(cls) -> None:
1040
- logger.warning("%s.validate() not implemented", cls.__name__)
1041
-
1042
- @classmethod
1043
- def finalize(cls) -> None:
1044
- logger.debug("Finalizing %s", cls.__name__)
1045
- t0 = time.time()
1046
- timedout = lambda: time.time() > t0 + 10
1047
- while (
1048
- cls.is_started()
1049
- or not cls.is_ready_to_start()
1050
- or not cls.get_latest_data("*")
1051
- or cls.get_latest_data(".bmp")
1052
- ) and not timedout():
1053
- logger.debug("Waiting for %s to finish processing", cls.__name__)
1054
- time.sleep(1) # TODO add backoff module
1055
- if timedout():
1056
- logger.warning(
1057
- "Timed out waiting for %s to finish processing", cls.__name__
1058
- )
1059
- return
1060
- if not hasattr(cls, "data_files") or not cls.data_files:
1061
- cls.data_files = []
1062
- new = cls.get_latest_data("*")
1063
- if hasattr(cls, "label") and cls.label:
1064
- new = [_.rename(_.with_stem(f"{_.stem}_{cls.label}")) for _ in new]
1065
- cls.data_files.extend(new)
1066
- logger.debug("%s processing finished: %s", cls.__name__, [_.name for _ in new])
1067
-
1068
-
1069
- class VideoMVR(MVR):
1070
-
1071
- pretest_duration_sec: ClassVar[int | float] = CONFIG['VideoMVR']["pretest_duration_sec"]
1072
- gb_per_hr: ClassVar[int | float] = CONFIG['VideoMVR']["gb_per_hr"]
1073
- min_rec_hr: ClassVar[int | float] = CONFIG['VideoMVR']["min_rec_hr"]
1074
-
1075
- raw_suffix: ClassVar[str] = ".mp4"
1076
-
1077
- started_state = ("BUSY", "RECORDING")
1078
- sync_path: Optional[pathlib.Path] = None
1079
-
1080
- @classmethod
1081
- def get_cameras(cls) -> list[dict[str, str]]:
1082
- "All available cams except Aux"
1083
- cams = super().get_cameras()
1084
- # check for camera labels with known Aux cam names
1085
- return [_ for _ in cams if cls.re_aux.search(_["label"]) is None]
1086
-
1087
- @classmethod
1088
- def start(cls) -> None:
1089
- logger.info("%s | Starting recording", cls.__name__)
1090
- cls.latest_start = time.time()
1091
- cls.get_proxy().start_record(record_time=24 * 60 * 60,) # sec
1092
-
1093
- @classmethod
1094
- def verify(cls) -> None:
1095
- "Assert data exists since latest start, or raise AssertionError."
1096
- # files grow infrequently while MVR's recording - checking their size
1097
- # is unreliable
1098
- if not cls.is_started():
1099
- logger.warning(
1100
- "Cannot verify %s if not started: %s", cls.__name__, cls.get_state()
1101
- )
1102
- raise AssertionError(f"{cls.__name__} not started: {cls.get_state()}")
1103
- if datetime.datetime.fromtimestamp(
1104
- cls.latest_start
1105
- ) < datetime.datetime.now() - datetime.timedelta(
1106
- seconds=cls.pretest_duration_sec
1107
- ):
1108
- time.sleep(cls.pretest_duration_sec)
1109
- if not (files := cls.get_latest_data()) or len(files) < len(
1110
- cls.get_cameras_recording()
1111
- ):
1112
- raise AssertionError(
1113
- f"{cls.__name__} files do not match the number of cameras: {files}"
1114
- )
1115
- logger.info(
1116
- "%s | Verified: %s cameras recording to disk", cls.__name__, len(files)
1117
- )
1118
- @classmethod
1119
- def stop(cls) -> None:
1120
- cls.get_proxy().stop_record()
1121
- logger.info("%s | Stopped recording", cls.__name__)
1122
-
1123
- @classmethod
1124
- def is_started(cls) -> bool:
1125
- if len(state := cls.get_state()) and all(
1126
- msg in state for msg in cls.started_state
1127
- ):
1128
- return True
1129
- return False
1130
-
1131
- @classmethod
1132
- def finalize(cls) -> None:
1133
- logger.debug("Finalizing %s", cls.__name__)
1134
- if cls.is_started():
1135
- cls.stop()
1136
- t0 = time.time()
1137
- timedout = lambda: time.time() > t0 + 30
1138
- while not cls.is_ready_to_start() and not timedout():
1139
- logger.debug("Waiting for %s to finish processing", cls.__name__)
1140
- time.sleep(1) # TODO add backoff module
1141
- if timedout():
1142
- logger.warning(
1143
- "Timed out waiting for %s to finish processing", cls.__name__
1144
- )
1145
- return
1146
- if not hasattr(cls, "data_files"):
1147
- cls.data_files = []
1148
- cls.data_files.extend(
1149
- new := (cls.get_latest_data("*.mp4") + cls.get_latest_data("*.json"))
1150
- )
1151
- logger.debug("%s processing finished: %s", cls.__name__, [_.name for _ in new])
1152
-
1153
- @classmethod
1154
- def validate(cls) -> None:
1155
- tempdir = pathlib.Path(tempfile.gettempdir())
1156
- tempfiles: list[pathlib.Path] = []
1157
- # currently can't pass individual files to mvrdataset - just a dir
1158
- for file in itertools.chain(cls.get_latest_data("*.mp4"), cls.get_latest_data("*.json")):
1159
- np_tools.copy(file, t := tempdir / file.name)
1160
- tempfiles.append(t)
1161
- npc_mvr.MVRDataset(
1162
- tempdir,
1163
- getattr(cls, "sync_path", None),
1164
- )
1165
- logger.info(f"Validated {len(tempfiles)} video/info files {'with' if getattr(cls, 'sync_path', None) else 'without'} sync")
1166
- for file in tempfiles:
1167
- file.unlink(missing_ok=True)
1168
- class JsonRecorder:
1169
- "Just needs a `start` method that calls `write()`."
1170
-
1171
- log_name: ClassVar[str]
1172
- log_root: ClassVar[pathlib.Path]
1173
-
1174
- @abc.abstractclassmethod
1175
- def start() -> None:
1176
- pass
1177
-
1178
- @classmethod
1179
- def pretest(cls) -> None:
1180
- with np_logging.debug():
1181
- cls.initialize()
1182
- cls.start()
1183
- cls.validate()
1184
- logger.info("%s | Pretest passed", cls.__name__)
1185
-
1186
- @classmethod
1187
- def ensure_config(cls) -> None:
1188
- config = CONFIG.get(
1189
- __class__.__name__, {}
1190
- ) # class where this function is defined
1191
- config.update(**CONFIG.get(cls.__name__, {})) # the calling class, if different
1192
-
1193
- if not hasattr(cls, "log_name"):
1194
- cls.log_name = config.get("log_name", "{}_.json")
1195
- cls.log_name = cls.log_name.format(
1196
- datetime.datetime.now().strftime("%Y-%m-%d_%H%M%S")
1197
- )
1198
-
1199
- if not hasattr(cls, "log_root"):
1200
- cls.log_root = config.get("log_root", ".")
1201
- cls.log_root = pathlib.Path(cls.log_root).resolve()
1202
-
1203
- @classmethod
1204
- def initialize(cls) -> None:
1205
- logger.debug("%s initializing", __class__.__name__)
1206
- cls.ensure_config()
1207
- cls.initialization = time.time()
1208
- log = (cls.log_root / cls.log_name).with_suffix(".json")
1209
- log.parent.mkdir(parents=True, exist_ok=True)
1210
- log.touch(exist_ok=True)
1211
- if log.read_text().strip() == "":
1212
- log.write_text("{}")
1213
- cls.all_files = [log]
1214
- cls.test()
1215
-
1216
- @classmethod
1217
- def test(cls) -> None:
1218
- logger.debug("%s testing", __class__.__name__)
1219
- try:
1220
- _ = cls.get_current_log().read_bytes()
1221
- except OSError as exc:
1222
- raise TestError(
1223
- f"{__class__.__name__} failed to open {cls.get_current_log()}"
1224
- ) from exc
1225
-
1226
- @classmethod
1227
- def get_current_log(cls) -> pathlib.Path:
1228
- if not hasattr(cls, "initialization"):
1229
- cls.initialize()
1230
- return cls.all_files[-1]
1231
-
1232
- @classmethod
1233
- def read(cls) -> dict[str, str | float]:
1234
- try:
1235
- data = json.loads(cls.get_current_log().read_bytes())
1236
- except json.JSONDecodeError as exc:
1237
- if cls.get_current_log().stat().st_size:
1238
- raise
1239
- logger.debug("%s | Error encountered reading file %s: %r", cls.__name__, cls.get_current_log(), exc)
1240
- data = {} # file was empty
1241
- else:
1242
- logger.debug("%s | Read from %s", cls.__name__, cls.get_current_log())
1243
- return data
1244
-
1245
- @classmethod
1246
- def write(cls, value: dict) -> None:
1247
- try:
1248
- data = cls.read()
1249
- except json.JSONDecodeError:
1250
- data = {}
1251
- file = cls.get_current_log().with_suffix(".new.json")
1252
- file.touch()
1253
- cls.all_files.append(file)
1254
- else:
1255
- file = cls.get_current_log()
1256
- np_config.merge(data, value)
1257
- file.write_text(json.dumps(data, indent=4, sort_keys=False, default=str))
1258
- logger.debug("%s wrote to %s", cls.__name__, file)
1259
-
1260
- @classmethod
1261
- def validate(cls) -> None:
1262
- if not (log := cls.read()):
1263
- cls.exc = TestError(
1264
- f"{cls.__name__} failed to validate because log is empty: {cls.get_current_log()}"
1265
- )
1266
- logger.error(
1267
- "%s failed to validate: log is empty %s",
1268
- cls.__name__,
1269
- cls.get_current_log(),
1270
- exc_info=cls.exc,
1271
- )
1272
- logger.debug("%s validated", __class__.__name__)
1273
-
1274
-
1275
- class YamlRecorder(JsonRecorder):
1276
- @classmethod
1277
- def test(cls) -> None:
1278
- logger.debug("%s testing", __class__.__name__)
1279
- super().test()
1280
- try:
1281
- import yaml
1282
- except ImportError as exc:
1283
- raise TestError(f"{__class__.__name__} failed to import yaml") from exc
1284
-
1285
- @classmethod
1286
- def finalize(cls) -> None:
1287
- logger.debug("Finalizing %s", __class__.__name__)
1288
- log = json.load(cls.get_current_log().read_bytes())
1289
- with contextlib.suppress(
1290
- AttributeError, OSError
1291
- ): # if this fails we still have the json file
1292
- yaml.dump(log, cls.get_current_log().with_suffix(".yaml").read_bytes())
1293
-
1294
-
1295
- class NewScaleCoordinateRecorder(JsonRecorder):
1296
- "Gets current manipulator coordinates and stores them in a file with a timestamp."
1297
-
1298
- host: ClassVar[str] = np_config.Rig().Mon
1299
- data_root: ClassVar[pathlib.Path] = CONFIG['NewScaleCoordinateRecorder']['data']
1300
- data_name: ClassVar[str] = CONFIG['NewScaleCoordinateRecorder']['data_name']
1301
- data_fieldnames: ClassVar[Sequence[str]] = CONFIG['NewScaleCoordinateRecorder']['data_fieldnames']
1302
- data_files: ClassVar[list[pathlib.Path]] = []
1303
- "Files to be copied after exp"
1304
-
1305
- max_z_travel: ClassVar[int] = CONFIG['NewScaleCoordinateRecorder']['max_z_travel']
1306
- num_probes: ClassVar[int] = 6
1307
- log_name: ClassVar[str] = "newscale_coords_{}.json"
1308
- log_root: ClassVar[pathlib.Path] = pathlib.Path(tempfile.gettempdir()).resolve()
1309
- label: ClassVar[str] = ""
1310
- "A label to tag each entry with"
1311
- latest_start: ClassVar[int] = 0
1312
- "`time.time()` when the service was last started via `start()`."
1313
- log_time_fmt: str = CONFIG['NewScaleCoordinateRecorder']['log_time_fmt']
1314
-
1315
- @classmethod
1316
- def pretest(cls) -> None:
1317
- cls.label = 'pretest'
1318
- super().pretest()
1319
-
1320
- @classmethod
1321
- def get_current_data(cls) -> pathlib.Path:
1322
- cls.ensure_config()
1323
- return cls.data_root / cls.data_name
1324
-
1325
- @classmethod
1326
- def last_logged_coords_csv(cls) -> dict[str, float]:
1327
- "Get the most recent coordinates from the log file using the csv parser in the stdlib."
1328
- with cls.get_current_data().open("r") as _:
1329
- reader = csv.DictReader(_, fieldnames=cls.data_fieldnames)
1330
- rows = list(reader)
1331
- last_moved_label = cls.data_fieldnames[0]
1332
- coords = {}
1333
- for row in reversed(rows): # search for the most recent coordinates
1334
- if len(coords.keys()) == cls.num_probes:
1335
- break # we have an entry for each probe
1336
- if (m := row.pop(cls.data_fieldnames[1]).strip()) not in coords:
1337
- coords[m] = {}
1338
- for k, v in row.items():
1339
- if "virtual" in k:
1340
- continue
1341
- if k == last_moved_label:
1342
- v = datetime.datetime.strptime(v, cls.log_time_fmt)
1343
- else:
1344
- v = v.strip()
1345
- with contextlib.suppress(ValueError):
1346
- v = float(v)
1347
- coords[m].update({k: v})
1348
- return coords
1349
-
1350
- @classmethod
1351
- def last_logged_coords_pd(cls) -> dict[str, float]:
1352
- "Get the most recent coordinates from the log file using pandas."
1353
- coords = {}
1354
- manipulator_label = cls.data_fieldnames[1]
1355
- last_moved_label = cls.data_fieldnames[0]
1356
- df = pd.read_csv(cls.get_current_data(), names=cls.data_fieldnames, parse_dates=[last_moved_label])
1357
- # group by manipulator_label and get the maximum value in last_moved_label for each group
1358
- # (i.e. the most recent entry for each manipulator)
1359
- last_moved = df.loc[
1360
- df.groupby(manipulator_label)[last_moved_label].idxmax()
1361
- ].set_index(manipulator_label).sort_values(last_moved_label, ascending=False)
1362
- for serial_number, row in last_moved.iloc[:cls.num_probes].iterrows():
1363
- new = {key: row[key] for key in cls.data_fieldnames if (key != manipulator_label and 'virtual' not in key)}
1364
- new[last_moved_label] = row[last_moved_label].to_pydatetime()
1365
- coords[str(serial_number).strip()] = new
1366
- return coords
1367
-
1368
- @classmethod
1369
- def convert_serial_numbers_to_probe_labels(cls, coords: dict[str, float]) -> None:
1370
- for k, v in CONFIG[cls.__name__].get("probe_to_serial_number", {}).items():
1371
- if v in coords:
1372
- coords[k] = coords.pop(v)
1373
- coords[k]['serial_number'] = v
1374
-
1375
- @classmethod
1376
- def get_coordinates(cls) -> dict[str, float]:
1377
- try:
1378
- import pandas as pd
1379
- except ImportError:
1380
- coords = cls.last_logged_coords_csv()
1381
- else:
1382
- coords = cls.last_logged_coords_pd()
1383
-
1384
- def adjust_z_travel(coords):
1385
- for v in coords.values():
1386
- if 'z' in v:
1387
- v['z'] = cls.max_z_travel - v['z']
1388
- adjust_z_travel(coords)
1389
- cls.convert_serial_numbers_to_probe_labels(coords)
1390
- coords["label"] = cls.label
1391
- logger.debug("%s | Retrieved coordinates: %s", cls.__name__, coords)
1392
- return coords
1393
-
1394
- @classmethod
1395
- def write_to_platform_json(cls):
1396
- coords = cls.get_coordinates()
1397
- for k, v in coords.items():
1398
- if isinstance(v, Mapping) and (last_moved := v.get('last_moved')):
1399
- del coords[k]['last_moved']
1400
- del coords[k]['serial_number']
1401
- continue
1402
- # if last_moved is kept, then normalize it depending on csv/pd method:
1403
- match last_moved:
1404
- case str():
1405
- timestamp = datetime.datetime.strptime(last_moved, cls.log_time_fmt)
1406
- case datetime.datetime():
1407
- timestamp = last_moved
1408
- coords[k]['last_moved'] = np_config.normalize_time(timestamp)
1409
-
1410
- # rearrange so `label`` is top-level key, or use capture-timestamp if no label
1411
- platform_json = np_session.PlatformJson(cls.get_current_log())
1412
- platform_json_entry = copy.deepcopy(platform_json.manipulator_coordinates)
1413
- coords = {str(coords.pop('label', np_config.normalize_time(cls.latest_start))): coords}
1414
- logger.debug("%s | Adding to platform json: %s", cls.__name__, coords)
1415
- platform_json.manipulator_coordinates = np_config.merge(platform_json_entry, coords)
1416
- if (csv := cls.get_current_data()) not in cls.data_files:
1417
- cls.data_files.append(csv)
1418
-
1419
- @classmethod
1420
- def start(cls):
1421
- cls.latest_start = time.time()
1422
- if 'platformD1' in cls.log_name:
1423
- cls.write_to_platform_json()
1424
- else:
1425
- cls.write({str(datetime.datetime.now()): cls.get_coordinates()})
1426
-
1427
- @classmethod
1428
- def test(cls) -> None:
1429
- super().test()
1430
- logger.debug("%s | Testing", __class__.__name__)
1431
- try:
1432
- _ = cls.get_current_data().open("r")
1433
- except OSError as exc:
1434
- raise TestError(
1435
- f"{cls.__name__} failed to open {cls.get_current_data()}"
1436
- ) from exc
1437
- try:
1438
- _ = cls.get_coordinates()
1439
- except Exception as exc:
1440
- raise TestError(f"{cls.__name__} failed to get coordinates") from exc
1441
- else:
1442
- logger.info("%s | Test passed", cls.__name__)
1443
-
1444
- @classmethod
1445
- def ensure_config(cls) -> None:
1446
- super().ensure_config()
1447
-
1448
- if CONFIG.get("services", {}):
1449
- config = CONFIG["services"].get(__class__.__name__, {})
1450
- config.update(**CONFIG["services"].get(cls.__name__, {}))
1451
- else:
1452
- config = CONFIG.get(
1453
- __class__.__name__, {}
1454
- ) # class where this function is defined
1455
- config.update(
1456
- **CONFIG.get(cls.__name__, {})
1457
- ) # the calling class, if different
1458
-
1459
- if not hasattr(cls, "host"):
1460
- cls.host = config["host"]
1461
-
1462
- # for resulting data
1463
- if (
1464
- not hasattr(cls, "data_root")
1465
- or cls.host not in pathlib.Path(cls.data_root).parts
1466
- ):
1467
- relative_path = config["data"]
1468
- if relative_path:
1469
- root = pathlib.Path(f"//{cls.host}/{relative_path}")
1470
- try:
1471
- _ = root.exists()
1472
- except OSError as exc:
1473
- cls.exc = exc
1474
- logger.exception(
1475
- "Error accessing %s data path: %s", cls.__name__, root
1476
- )
1477
- raise FileNotFoundError(
1478
- f"{cls.__name__} data path is not accessible: {root}"
1479
- ) from exc
1480
- else:
1481
- cls.data_root = root
1482
-
1483
- if not hasattr(cls, "data_name"):
1484
- cls.data_name = config["data_name"]
1485
- if not hasattr(cls, "data_fieldnames"):
1486
- cls.data_fieldnames = config["data_fieldnames"]
1487
-
1488
-
1
+ """
2
+ Proxy classes for interacting with devices via zro/zmq.
3
+
4
+ Proxy class names must match the name of the proxy key in the config dict.
5
+ """
6
+ import abc
7
+ import contextlib
8
+ import copy
9
+ import csv
10
+ import datetime
11
+ import functools
12
+ import itertools
13
+ import json # loading config from Sync proxy will instantiate datetime objects
14
+ import logging
15
+ import pathlib
16
+ import re
17
+ import tempfile
18
+ import time
19
+ from typing import Any, ClassVar, Literal, Mapping, Optional, Sequence
20
+
21
+ import fabric
22
+ import np_config
23
+ import np_logging
24
+ import np_session
25
+ import npc_stim
26
+ import npc_sync
27
+ import npc_mvr
28
+ import np_tools
29
+ import yaml
30
+ import pandas as pd
31
+
32
+ import np_services.resources.mvr_connector as mvr_connector
33
+ import np_services.utils as utils
34
+ import np_services.resources.zro as zro
35
+ from np_services.protocols import *
36
+
37
+ logger = np_logging.getLogger(__name__)
38
+
39
+ CONFIG = utils.config_from_zk()
40
+
41
+ ProxyState = tuple[Literal["", "READY", "BUSY"], str]
42
+
43
+
44
+ class Proxy(abc.ABC):
45
+ # req proxy config - hardcode or overload ensure_config()
46
+ host: ClassVar[str]
47
+ port: ClassVar[int]
48
+ timeout: ClassVar[float]
49
+ serialization: ClassVar[Literal["json", "pickle"]]
50
+
51
+ # if a program needs to be launched (e.g. via RSC):
52
+ rsc_app_id: str
53
+
54
+ # if device records:
55
+ gb_per_hr: ClassVar[int | float]
56
+ min_rec_hr: ClassVar[int | float]
57
+ pretest_duration_sec: ClassVar[int | float]
58
+
59
+ # for resulting data, if device records:
60
+ data_root: ClassVar[Optional[pathlib.Path]] = None
61
+ data_files: ClassVar[Optional[Sequence[pathlib.Path]]] = None
62
+
63
+ # info
64
+ exc: ClassVar[Optional[Exception]] = None
65
+
66
+ latest_start: ClassVar[float | int] = 0
67
+ "`time.time()` when the service was last started via `start()`."
68
+
69
+ @classmethod
70
+ def ensure_config(cls) -> None:
71
+ """Updates any missing parameters for class proxy.
72
+
73
+ Is called in `get_proxy()` so any time we need the proxy, we have a
74
+ correct config, without remembering to run `initialize()` or some such.
75
+ """
76
+ config = CONFIG.get(
77
+ __class__.__name__, {}
78
+ ) # class where this function is defined
79
+ config.update(**CONFIG.get(cls.__name__, {})) # the calling class, if different
80
+
81
+ # for proxy (reqd):
82
+ if not hasattr(cls, "host"):
83
+ cls.host = config["host"]
84
+ if not hasattr(cls, "port"):
85
+ cls.port = int(config["port"])
86
+ if not hasattr(cls, "timeout"):
87
+ cls.timeout = float(config.get("timeout", 10.0))
88
+ if not hasattr(cls, "serialization"):
89
+ cls.serialization = config.get("serialization", "json")
90
+
91
+ # for pretest (reqd, not used if device doesn't record)
92
+ if not hasattr(cls, "pretest_duration_sec"):
93
+ cls.pretest_duration_sec = config.get("pretest_duration_sec", 5)
94
+ if not hasattr(cls, "gb_per_hr"):
95
+ cls.gb_per_hr = config.get("gb_per_hr", 2.0)
96
+ if not hasattr(cls, "min_rec_hr"):
97
+ cls.min_rec_hr = config.get("min_rec_hr", 3.0)
98
+
99
+ # for resulting data (optional):
100
+ if not cls.data_root or cls.host not in cls.data_root.parts:
101
+ relative_path = config.get("data", None)
102
+ if relative_path:
103
+ root = pathlib.Path(f"//{cls.host}/{relative_path}")
104
+ try:
105
+ _ = root.exists()
106
+ except OSError as exc:
107
+ cls.exc = exc
108
+ logger.exception(
109
+ "Error accessing %s data path: %s", cls.__name__, root
110
+ )
111
+ raise FileNotFoundError(
112
+ f"{cls.__name__} data path is not accessible: {root}"
113
+ ) from exc
114
+ else:
115
+ cls.data_root = root
116
+ if hasattr(cls, "data_root") and cls.data_root:
117
+ cls.data_root.mkdir(parents=True, exist_ok=True)
118
+
119
+ @classmethod
120
+ def launch(cls) -> None:
121
+ utils.start_rsc_app(cls.host, cls.rsc_app_id)
122
+
123
+ @classmethod
124
+ def kill(cls) -> None:
125
+ utils.kill_rsc_app(cls.host, cls.rsc_app_id)
126
+
127
+ @classmethod
128
+ def initialize(cls) -> None:
129
+ cls.launch()
130
+ with contextlib.suppress(AttributeError):
131
+ del cls.proxy
132
+ cls.proxy = cls.get_proxy()
133
+ if isinstance(cls, Startable) and not cls.is_ready_to_start():
134
+ if isinstance(cls, Finalizable):
135
+ cls.finalize()
136
+ if not cls.is_ready_to_start():
137
+ logger.warning(
138
+ "%s not ready to start: %s", cls.__name__, cls.get_state()
139
+ )
140
+ return
141
+ if cls.data_root:
142
+ cls.data_files = []
143
+ cls.sync_path = None
144
+ cls.initialization = time.time()
145
+ logger.info("%s(%s) initialized: ready for use", __class__.__name__, cls.__name__)
146
+
147
+ @classmethod
148
+ def test(cls) -> None:
149
+ "Quickly verify service is working and ready for use, or raise `TestError`."
150
+ logger.debug("Testing %s proxy", cls.__name__)
151
+ if not cls.is_connected():
152
+ raise TestError(
153
+ f"{cls.__name__} not connected to {cls.host}:{cls.port}"
154
+ ) from cls.exc
155
+ logger.debug(
156
+ "%s proxy connection to %s:%s confirmed", cls.__name__, cls.host, cls.port
157
+ )
158
+ gb = cls.get_required_disk_gb()
159
+ if not cls.is_disk_space_ok():
160
+ raise TestError(
161
+ f"{cls.__name__} free disk space on {cls.data_root.drive} doesn't meet minimum of {gb} GB"
162
+ ) from cls.exc
163
+ logger.debug("%s(%s) tested successfully", __class__.__name__, cls.__name__)
164
+
165
+ @classmethod
166
+ def get_proxy(cls) -> zro.DeviceProxy:
167
+ "Return a proxy to the service without re-creating unnecessarily."
168
+ with contextlib.suppress(AttributeError):
169
+ return cls.proxy
170
+ cls.ensure_config()
171
+ logger.debug("Creating %s proxy to %s:%s", cls.__name__, cls.host, cls.port)
172
+ cls.proxy = zro.DeviceProxy(cls.host, cls.port, cls.timeout, cls.serialization)
173
+ return cls.get_proxy()
174
+
175
+ @classmethod
176
+ def get_state(cls) -> ProxyState | dict:
177
+ "Dict may be deprecated: is no longer returned by Sync or Camstim proxies."
178
+ state = cls.get_proxy().get_state()
179
+ logger.debug("%s state: %s", cls.__name__, state)
180
+ return state
181
+
182
+ @classmethod
183
+ def get_latest_data(
184
+ cls: Recorder, glob: Optional[str] = None, subfolders: str = ""
185
+ ) -> list[pathlib.Path] | None:
186
+ cls.ensure_config()
187
+ if not cls.data_root:
188
+ return None
189
+ if subfolders == "/": # can alter path to drive root
190
+ subfolders = ""
191
+ if not glob:
192
+ glob = f"*{cls.raw_suffix}" if hasattr(cls, "raw_suffix") else "*"
193
+ if not hasattr(cls, "latest_start"):
194
+ data_paths = utils.get_files_created_between(
195
+ cls.data_root / subfolders, glob
196
+ )
197
+ if not data_paths:
198
+ return None
199
+ return [
200
+ max(data_paths, key=lambda x: x.stat().st_mtime)
201
+ ]
202
+ return utils.get_files_created_between(
203
+ cls.data_root / subfolders, glob, cls.latest_start
204
+ )
205
+
206
+ @classmethod
207
+ def get_required_disk_gb(cls) -> float:
208
+ "Return the minimum disk space required prior to start (to .1 GB). Returns `0.0` if service generates no data."
209
+ cls.ensure_config()
210
+ if not isinstance(cls, Startable):
211
+ return 0.0
212
+ return round(cls.min_rec_hr * cls.gb_per_hr, 1)
213
+
214
+ @classmethod
215
+ def is_disk_space_ok(cls) -> bool:
216
+ required = cls.get_required_disk_gb()
217
+ if required == 0.0:
218
+ return True
219
+ try:
220
+ free = utils.free_gb(cls.data_root)
221
+ except FileNotFoundError as exc:
222
+ cls.exc = exc
223
+ logger.exception(
224
+ f"{cls.__name__} data path not accessible: {cls.data_root}"
225
+ )
226
+ return False
227
+ else:
228
+ logger.debug(
229
+ "%s free disk space on %s: %s GB",
230
+ cls.__name__,
231
+ cls.data_root.drive,
232
+ free,
233
+ )
234
+ return free > required
235
+
236
+ @classmethod
237
+ def is_connected(cls) -> bool:
238
+ if not utils.is_online(cls.host):
239
+ cls.exc = ConnectionError(
240
+ f"No response from {cls.host}: may be offline or unreachable"
241
+ )
242
+ return False
243
+ try:
244
+ _ = cls.get_proxy().uptime
245
+ except zro.ZroError as exc:
246
+ cls.exc = exc
247
+ logger.exception(
248
+ f"{cls.__name__} proxy connection to {cls.host}:{cls.port} failed"
249
+ )
250
+ return False
251
+ try:
252
+ _ = cls.get_state()
253
+ except zro.ZroError as exc:
254
+ cls.exc = exc
255
+ logger.exception(
256
+ f"{cls.__name__} proxy connection to {cls.host}:{cls.port} failed"
257
+ )
258
+ return False
259
+ return True
260
+
261
+
262
+ class CamstimSyncShared(Proxy):
263
+ started_state: ClassVar[Sequence[str]]
264
+
265
+ @classmethod
266
+ def is_ready_to_start(cls) -> bool:
267
+ if cls.is_started():
268
+ return False
269
+ state = cls.get_state()
270
+ if isinstance(state, Mapping) and state.get("message", "") == "READY":
271
+ return True
272
+ if isinstance(state, Sequence) and "READY" in state:
273
+ return True
274
+ return False
275
+
276
+ @classmethod
277
+ def is_started(cls) -> bool:
278
+ return len(state := cls.get_state()) and all(
279
+ msg in state for msg in cls.started_state
280
+ )
281
+
282
+ @classmethod
283
+ def start(cls) -> None:
284
+ logger.info("%s | Starting recording", cls.__name__)
285
+ if cls.is_started():
286
+ logger.warning(
287
+ "%s already started - should be stopped manually", cls.__name__
288
+ )
289
+ return
290
+ # otherwise, Sync - for example - would stop current recording and start another
291
+ if not cls.is_ready_to_start():
292
+ logger.error("%s not ready to start: %s", cls.__name__, cls.get_state())
293
+ raise AssertionError(
294
+ f"{cls.__name__} not ready to start: {cls.get_state()}"
295
+ )
296
+ cls.latest_start = time.time()
297
+ cls.get_proxy().start()
298
+
299
+ @classmethod
300
+ def pretest(cls) -> None:
301
+ "Test all critical functions"
302
+ with np_logging.debug():
303
+ logger.debug("Starting %s pretest", cls.__name__)
304
+ cls.initialize() # calls test()
305
+
306
+ with utils.stop_on_error(cls):
307
+ cls.start()
308
+ time.sleep(1)
309
+ cls.verify()
310
+ time.sleep(cls.pretest_duration_sec)
311
+ # stop() called by context manager at exit, regardless
312
+ cls.finalize()
313
+ cls.validate()
314
+ logger.info("%s pretest complete", cls.__name__)
315
+
316
+ @classmethod
317
+ def verify(cls) -> None:
318
+ "Assert latest data file is currently increasing in size, or raise AssertionError."
319
+ if not cls.is_started():
320
+ logger.warning(
321
+ "Cannot verify %s if not started: %s", cls.__name__, cls.get_state()
322
+ )
323
+ raise AssertionError(f"{cls.__name__} not started: {cls.get_state()}")
324
+
325
+ @classmethod
326
+ def stop(cls) -> None:
327
+ logger.debug("Stopping %s", cls.__name__)
328
+ cls.get_proxy().stop()
329
+ logger.info("%s | Stopped recording", cls.__name__)
330
+
331
+ # --- End of possible Camstim/Sync shared methods ---
332
+
333
+ # --- Sync-specific methods ---
334
+
335
+
336
+ class Sync(CamstimSyncShared):
337
+ host = np_config.Rig().Sync
338
+ started_state = ("BUSY", "RECORDING")
339
+ raw_suffix: str = ".sync"
340
+ rsc_app_id: str = "sync_device"
341
+
342
+ @classmethod
343
+ def ensure_config(cls) -> None:
344
+ """Updates any missing parameters for class proxy.
345
+
346
+ Is called in `get_proxy()` so any time we need the proxy, we have a
347
+ correct config, without remembering to run `initialize()` or some such.
348
+ """
349
+ config = CONFIG.get(
350
+ __class__.__name__, {}
351
+ ) # class where this function is defined
352
+ config.update(**CONFIG.get(cls.__name__, {})) # the calling class, if different
353
+
354
+ # for proxy (reqd):
355
+ if not hasattr(cls, "host"):
356
+ cls.host = config["host"]
357
+ if not hasattr(cls, "port"):
358
+ cls.port = int(config["port"])
359
+ if not hasattr(cls, "timeout"):
360
+ cls.timeout = float(config.get("timeout", 10.0))
361
+ if not hasattr(cls, "serialization"):
362
+ cls.serialization = config.get("serialization", "json")
363
+
364
+ # for pretest (reqd, not used if device doesn't record)
365
+ if not hasattr(cls, "pretest_duration_sec"):
366
+ cls.pretest_duration_sec = config.get("pretest_duration_sec", 5)
367
+ if not hasattr(cls, "gb_per_hr"):
368
+ cls.gb_per_hr = config.get("gb_per_hr", 2.0)
369
+ if not hasattr(cls, "min_rec_hr"):
370
+ cls.min_rec_hr = config.get("min_rec_hr", 3.0)
371
+
372
+ # for resulting data (optional):
373
+ if not cls.data_root or cls.host not in cls.data_root.parts:
374
+ relative_path = config.get("data", None)
375
+ if relative_path:
376
+ root = pathlib.Path(f"//{cls.host}/{relative_path}")
377
+ try:
378
+ _ = root.exists()
379
+ except OSError as exc:
380
+ cls.exc = exc
381
+ logger.exception(
382
+ "Error accessing %s data path: %s", cls.__name__, root
383
+ )
384
+ raise FileNotFoundError(
385
+ f"{cls.__name__} data path is not accessible: {root}"
386
+ ) from exc
387
+ else:
388
+ cls.data_root = root
389
+ if hasattr(cls, "data_root"):
390
+ cls.data_root.mkdir(parents=True, exist_ok=True)
391
+
392
+ @classmethod
393
+ def finalize(cls) -> None:
394
+ logger.debug("Finalizing %s", cls.__name__)
395
+ if cls.is_started():
396
+ cls.stop()
397
+ while not cls.is_ready_to_start():
398
+ logger.debug("Waiting for %s to finish processing", cls.__name__)
399
+ time.sleep(1) # TODO add backoff module
400
+ if not cls.data_files:
401
+ cls.data_files = []
402
+ cls.data_files.extend(new := cls.get_latest_data("*.h5"))
403
+ logger.debug("%s processing finished: %s", cls.__name__, [_.name for _ in new])
404
+
405
+ @classmethod
406
+ def shutdown(cls) -> None:
407
+ logger.debug("Shutting down %s", cls.__name__)
408
+ cls.stop()
409
+ try:
410
+ del cls.proxy
411
+ except Exception as exc:
412
+ logger.debug("Failed to delete %s proxy: %s", cls.__name__, exc)
413
+ cls.exc = exc
414
+
415
+ @classmethod
416
+ def get_config(cls) -> dict[str, Any | datetime.datetime]:
417
+ "Sync config, including `line_labels` and `frequency`"
418
+ if cls.serialization in ("json", "j"):
419
+ return eval(cls.get_proxy().config)
420
+ if cls.serialization in ("pickle", "pkl", "p"):
421
+ return cls.get_proxy().config
422
+
423
+ @classmethod
424
+ def validate(cls, data: Optional[pathlib.Path] = None) -> None:
425
+ "Check that data file is valid, or raise AssertionError."
426
+ logger.debug("Validating %s data", cls.__name__)
427
+ if not data and bool(files := cls.get_latest_data("*.h5")):
428
+ data = files[-1]
429
+ logger.debug(
430
+ "No data file provided: validating most-recent data in %s: %s",
431
+ cls.data_root,
432
+ data.name,
433
+ )
434
+ if cls.is_started():
435
+ logger.warning(
436
+ f"Attempted to validate current data file while recording"
437
+ )
438
+ return
439
+ elif not cls.is_ready_to_start():
440
+ cls.finalize()
441
+ try:
442
+ import h5py
443
+ except ImportError:
444
+ logger.warning("h5py not installed: cannot open Sync data")
445
+ cls.min_validation(data)
446
+ else:
447
+ cls.full_validation(data)
448
+
449
+ @classmethod
450
+ def verify(cls) -> None:
451
+ "Assert latest data file is currently increasing in size, or raise AssertionError."
452
+ super().verify()
453
+ if cls.data_root and not utils.is_file_growing(cls.get_latest_data()[-1]):
454
+ raise AssertionError(
455
+ f"{cls.__name__} latest data file is not increasing in size: {cls.get_latest_data()[-1]}"
456
+ )
457
+ logger.info("%s | Verified: file on disk is increasing in size", cls.__name__)
458
+
459
+ @classmethod
460
+ def full_validation(cls, data: pathlib.Path) -> None:
461
+ npc_sync.get_sync_data(data).validate()
462
+
463
+ @classmethod
464
+ def min_validation(cls, data: pathlib.Path) -> None:
465
+ if data.stat().st_size == 0:
466
+ raise AssertionError(f"Empty file: {data}")
467
+ if data.suffix != ".h5":
468
+ raise FileNotFoundError(
469
+ f"Expected .sync to be converted to .h5 immediately after recording stopped: {data}"
470
+ )
471
+ logger.debug("%s minimal validation passed for %s", cls.__name__, data.name)
472
+
473
+
474
+ class Phidget(CamstimSyncShared):
475
+ host = np_config.Rig().Stim
476
+ rsc_app_id = "phidget_server"
477
+
478
+
479
+ class Camstim(CamstimSyncShared):
480
+ host = np_config.Rig().Stim
481
+ started_state = ("BUSY", "Script in progress.")
482
+ rsc_app_id = "camstim_agent"
483
+ sync_path: Optional[pathlib.Path] = None
484
+
485
+ @classmethod
486
+ def launch(cls) -> None:
487
+ super().launch()
488
+ Phidget.launch()
489
+
490
+ @classmethod
491
+ def get_config(cls) -> dict[str, Any]:
492
+ return cls.get_proxy().config
493
+
494
+ @classmethod
495
+ def ensure_config(cls) -> None:
496
+ """Updates any missing parameters for class proxy.
497
+
498
+ Is called in `get_proxy()` so any time we need the proxy, we have a
499
+ correct config, without remembering to run `initialize()` or some such.
500
+ """
501
+ config = CONFIG.get(
502
+ __class__.__name__, {}
503
+ ) # class where this function is defined
504
+ config.update(**CONFIG.get(cls.__name__, {})) # the calling class, if different
505
+
506
+ # for proxy (reqd):
507
+ if not hasattr(cls, "host"):
508
+ cls.host = config["host"]
509
+ if not hasattr(cls, "port"):
510
+ cls.port = int(config["port"])
511
+ if not hasattr(cls, "timeout"):
512
+ cls.timeout = float(config.get("timeout", 10.0))
513
+ if not hasattr(cls, "serialization"):
514
+ cls.serialization = config.get("serialization", "json")
515
+
516
+ # for pretest (reqd, not used if device doesn't record)
517
+ if not hasattr(cls, "pretest_duration_sec"):
518
+ cls.pretest_duration_sec = config.get("pretest_duration_sec", 5)
519
+ if not hasattr(cls, "gb_per_hr"):
520
+ cls.gb_per_hr = config.get("gb_per_hr", 2.0)
521
+ if not hasattr(cls, "min_rec_hr"):
522
+ cls.min_rec_hr = config.get("min_rec_hr", 3.0)
523
+
524
+ # for resulting data (optional):
525
+ if not cls.data_root:
526
+ relative_path = config.get("data", None)
527
+ if relative_path:
528
+ root = pathlib.Path(f"//{cls.host}/{relative_path}")
529
+ try:
530
+ _ = root.exists()
531
+ except OSError as exc:
532
+ cls.exc = exc
533
+ logger.exception(
534
+ "Error accessing %s data path: %s", cls.__name__, root
535
+ )
536
+ raise FileNotFoundError(
537
+ f"{cls.__name__} data path is not accessible: {root}"
538
+ ) from exc
539
+ else:
540
+ cls.data_root = root
541
+ if hasattr(cls, "data_root") and cls.data_root is not None:
542
+ cls.data_root.mkdir(parents=True, exist_ok=True)
543
+
544
+ @classmethod
545
+ def finalize(cls) -> None:
546
+ logger.info("Finalizing %s", cls.__name__)
547
+ if cls.is_started():
548
+ cls.stop()
549
+ count = 0
550
+ while not cls.is_ready_to_start():
551
+ if count % 120 == 0:
552
+ logger.debug("Waiting for %s to finish processing", cls.__name__)
553
+ time.sleep(1) # TODO add backoff module
554
+ if not cls.data_files:
555
+ cls.data_files = []
556
+ cls.data_files.extend(new := itertools.chain(cls.get_latest_data("*pkl"), cls.get_latest_data("*hdf5")))
557
+ logger.info("%s added new data: %s", cls.__name__, [_.name for _ in new])
558
+
559
+ @classmethod
560
+ def validate(cls) -> None:
561
+ if not cls.sync_path:
562
+ logger.warning("Cannot validate stim without sync file: assign `stim.sync_path`")
563
+ return
564
+ logger.info("Validating %s", cls.__name__)
565
+ for file in cls.data_files:
566
+ npc_stim.validate_stim(file, sync=cls.sync_path)
567
+ logger.info(f"Validated {len(cls.data_files)} stim files with sync")
568
+
569
+ class ScriptCamstim(Camstim):
570
+ script: ClassVar[str]
571
+ "path to script on Stim computer"
572
+ params: ClassVar[dict[str, Any]] = {}
573
+
574
+ @classmethod
575
+ def pretest(cls) -> None:
576
+ pretest_mouse = "599657"
577
+
578
+ cls.script = "C:/ProgramData/StimulusFiles/dev/bi_script_pretest_v2.py"
579
+
580
+ # get params from MTrain, as if we were running `Agent.start_session`
581
+ cls.params = np_session.mtrain.MTrain(pretest_mouse).stage["parameters"]
582
+ cls.params.update(dict(user_name="ben.hardcastle", mouse_id=pretest_mouse))
583
+
584
+ logger.info(
585
+ "%s | Pretest: running %s with MTrain stage params for mouse %s",
586
+ cls.__name__,
587
+ cls.script,
588
+ pretest_mouse,
589
+ )
590
+ cls.initialize()
591
+ cls.test()
592
+ cls.start()
593
+ while not cls.is_ready_to_start():
594
+ logger.debug("Waiting for %s to finish processing", cls.__name__)
595
+ time.sleep(10)
596
+ cls.finalize()
597
+ # cls.validate()
598
+ cls.initialize()
599
+
600
+ @classmethod
601
+ def start(cls):
602
+ cls.latest_start = time.time()
603
+ cls.get_proxy().start_script(cls.script, cls.params)
604
+
605
+
606
+ class SessionCamstim(Camstim):
607
+ lims_user_id: ClassVar[str]
608
+ labtracks_mouse_id: ClassVar[int]
609
+ override_params: ClassVar[dict[str, Any] | None] = None
610
+
611
+ @classmethod
612
+ def start(cls):
613
+ cls.latest_start = time.time()
614
+ cls.get_proxy().start_session(
615
+ cls.labtracks_mouse_id, cls.lims_user_id, override_params=cls.override_params
616
+ )
617
+
618
+ @classmethod
619
+ def pretest(cls) -> None:
620
+ cls.labtracks_mouse_id = 598796
621
+ cls.lims_user_id = "ben.hardcastle"
622
+ logger.info(
623
+ "%s | Pretest with mouse %s, user %s",
624
+ cls.__name__,
625
+ cls.labtracks_mouse_id,
626
+ cls.lims_user_id,
627
+ )
628
+ super().pretest()
629
+
630
+
631
+ class NoCamstim(Camstim):
632
+ "Run remote files (e.g. .bat) without sending directly to Camstim Agent"
633
+
634
+ remote_file: ClassVar[str | pathlib.Path]
635
+ extra_args: ClassVar[list[str]] = []
636
+ ssh: ClassVar[fabric.Connection]
637
+ user: ClassVar[str] = "svc_neuropix"
638
+ password: ClassVar[str]
639
+
640
+ @classmethod
641
+ def pretest(cls) -> None:
642
+ logger.warning("%s | Pretest not implemented", cls.__name__)
643
+
644
+ @classmethod
645
+ def get_ssh(cls) -> fabric.Connection:
646
+ with contextlib.suppress(AttributeError):
647
+ return cls.ssh
648
+ cls.initialize()
649
+ return cls.ssh
650
+
651
+ @classmethod
652
+ def initialize(cls) -> None:
653
+ if not hasattr(cls, "password"):
654
+ cls.password = input(f"{cls.__name__} | Enter password for {cls.host}: ")
655
+ cls.remote_file = utils.unc_to_local(pathlib.Path(cls.remote_file))
656
+ cls.ssh = fabric.Connection(
657
+ cls.host, cls.user, connect_kwargs=dict(password=cls.password)
658
+ )
659
+ super().initialize()
660
+ cls.test()
661
+
662
+ @classmethod
663
+ def test(cls) -> None:
664
+ super().test()
665
+ logger.debug(f"{cls.__name__} | Testing")
666
+ try:
667
+ result = cls.get_ssh().run("hostname", hide=True)
668
+ except Exception as exc:
669
+ raise TestError(
670
+ f"{cls.__name__} Error connecting to {cls.host} via ssh: {exc!r}. Is this password correct? {cls.password}"
671
+ )
672
+ else:
673
+ if result.exited != 0:
674
+ raise TestError(
675
+ f"{cls.__name__} Error connecting to {cls.host} via ssh: {result}"
676
+ )
677
+ logger.debug(f"{cls.__name__} | Connected to {cls.host} via ssh")
678
+
679
+ try:
680
+ result = cls.get_ssh().run(f"type {cls.remote_file}", hide=True)
681
+ except Exception as exc:
682
+ extra = (
683
+ f" | '{exc.result.command}': {exc.result.stderr.strip()!r}"
684
+ if hasattr(exc, "result")
685
+ else ""
686
+ )
687
+ raise TestError(
688
+ f"{cls.__name__} | Error calling ssh-executed command{extra}"
689
+ )
690
+ else:
691
+ if result.exited != 0:
692
+ raise TestError(
693
+ f"{cls.__name__} Error accessing {cls.remote_file} on {cls.host} - is filepath correct? {result}"
694
+ )
695
+ logger.debug(
696
+ f"{cls.__name__} | {cls.remote_file} is accessible via ssh on {cls.host}"
697
+ )
698
+
699
+ @classmethod
700
+ def start(cls):
701
+ if cls.is_started():
702
+ logger.warning(f"{cls.__name__} already started")
703
+ return
704
+ logger.debug(f"{cls.__name__} | Starting {cls.remote_file} on {cls.host}")
705
+ cls.latest_start = time.time()
706
+ cls.get_ssh().run(f"call {cls.remote_file} {cls.extra_args}")
707
+
708
+ @classmethod
709
+ def verify(cls):
710
+ logger.warning(f"{cls.__name__} | No verification implemented")
711
+
712
+
713
+ class MouseDirector(Proxy):
714
+ """Communicate with the ZMQ remote object specified here:
715
+ http://aibspi.corp.alleninstitute.org/braintv/visual_behavior/mouse_director/-/blob/master/src/mousedirector.py
716
+
717
+ ::
718
+ MouseDirector.get_proxy().set_mouse_id(str(366122))
719
+ MouseDirector.get_proxy().set_user_id("ben.hardcastle")
720
+ """
721
+
722
+ user: ClassVar[str | np_session.User]
723
+ mouse: ClassVar[str | int | np_session.Mouse]
724
+
725
+ rsc_app_id = CONFIG['MouseDirector']['rsc_app_id']
726
+ host = np_config.Rig().Mon
727
+ gb_per_hr = 0
728
+ serialization = "json"
729
+ started_state: ClassVar[ProxyState] = ("READY", "")
730
+ not_connected_state: ClassVar[ProxyState] = ("", "NOT_CONNECTED")
731
+
732
+ @classmethod
733
+ def pretest(cls):
734
+ with np_logging.debug():
735
+ logger.debug(f"{cls.__name__} | Pretest")
736
+ cls.user = "ben.hardcastle"
737
+ cls.mouse = 366122
738
+ cls.initialize()
739
+ cls.test()
740
+ cls.get_proxy().retract_lick_spout()
741
+ time.sleep(3)
742
+ cls.get_proxy().extend_lick_spout()
743
+ time.sleep(3)
744
+ cls.get_proxy().retract_lick_spout()
745
+ time.sleep(3)
746
+ logger.info(f"{cls.__name__} | Pretest passed")
747
+
748
+ @classmethod
749
+ def initialize(cls):
750
+ logger.debug(f"{cls.__name__} | Initializing")
751
+ super().initialize()
752
+ cls.get_proxy().set_mouse_id(str(cls.mouse))
753
+ time.sleep(1)
754
+ cls.get_proxy().set_user_id(str(cls.user))
755
+ time.sleep(1)
756
+ logger.debug(f"{cls.__name__} | Initialized with mouse {cls.mouse}, user {cls.user}")
757
+
758
+ @classmethod
759
+ def get_state(cls) -> ProxyState:
760
+ result: str = cls.get_proxy().rig_dict
761
+ if str(np_config.Rig()) in result:
762
+ return cls.started_state
763
+ return cls.not_connected_state
764
+
765
+ class Cam3d(CamstimSyncShared):
766
+
767
+ label: str
768
+
769
+ host = np_config.Rig().Mon
770
+ serialization = "json"
771
+ started_state = ["READY", "CAMERAS_OPEN,CAMERAS_ACQUIRING"]
772
+ rsc_app_id = CONFIG['Cam3d']['rsc_app_id']
773
+ data_files: ClassVar[list[pathlib.Path]] = []
774
+
775
+ @classmethod
776
+ def is_started(cls) -> bool:
777
+ return cls.get_state() == cls.started_state
778
+
779
+ @classmethod
780
+ def is_ready_to_start(cls) -> bool:
781
+ if cls.is_started():
782
+ return False
783
+ time.sleep(1)
784
+ if (
785
+ cls.get_state() == cls.started_state
786
+ or 'READY' not in cls.get_state()
787
+ ):
788
+ return False
789
+ return True
790
+
791
+ @classmethod
792
+ def initialize(cls) -> None:
793
+ logger.debug(f"{cls.__name__} | Initializing")
794
+ super().initialize()
795
+ if not cls.is_ready_to_start():
796
+ cls.reenable_cameras()
797
+
798
+ time.sleep(1)
799
+
800
+ @classmethod
801
+ def reenable_cameras(cls) -> None:
802
+ cls.get_proxy().release_cameras()
803
+ time.sleep(.2)
804
+ cls.get_proxy().enable_cameras()
805
+ time.sleep(.2)
806
+ cls.get_proxy().stop_capture()
807
+ time.sleep(.2)
808
+ cls.get_proxy().start_capture()
809
+ time.sleep(.2)
810
+
811
+ @classmethod
812
+ def generate_image_paths(cls) -> tuple[pathlib.Path, pathlib.Path]:
813
+ if not hasattr(cls, 'label') or not cls.label:
814
+ logger.warning(f"{cls.__name__} | `cls.label` not specified")
815
+ def path(side: str) -> pathlib.Path:
816
+ return cls.data_root / f"{datetime.datetime.now():%Y%m%d_%H%M%S}_{getattr(cls, 'label', 'image')}_{side}.png"
817
+ return path('left'), path('right')
818
+
819
+ @classmethod
820
+ def start(cls) -> None:
821
+ logger.debug(f"{cls.__name__} | Starting")
822
+ cls.latest_start = time.time()
823
+ left, right = cls.generate_image_paths()
824
+ cls.get_proxy().save_left_image(str(left))
825
+ cls.get_proxy().save_right_image(str(right))
826
+ time.sleep(.5)
827
+ for path, side in zip((left, right), ('Left', 'Right')):
828
+ if path.exists():
829
+ logger.debug(f"{cls.__name__} | {side} image saved to {path}")
830
+ else:
831
+ logger.debug(f"{cls.__name__} | {side} image capture request sent, but image not saved")
832
+
833
+ @classmethod
834
+ def finalize(cls) -> None:
835
+ logger.debug(f"{cls.__name__} | Finalizing")
836
+ counter = 0
837
+ while (
838
+ not (latest := cls.get_latest_data('*'))
839
+ or cls.is_started()
840
+ ):
841
+ time.sleep(1)
842
+ counter += 1
843
+ if counter == 3:
844
+ cls.reenable_cameras()
845
+ break
846
+ cls.data_files.extend(latest)
847
+ logger.debug(f"{cls.__name__} | Images captured: {latest}")
848
+
849
+ @classmethod
850
+ def validate(cls):
851
+ if not (latest := cls.get_latest_data('*')) or len(latest) != 2:
852
+ raise AssertionError(f"{cls.__name__} | Expected 2 images, got {len(latest)}: {latest}")
853
+
854
+ @classmethod
855
+ def stop(cls):
856
+ logger.debug("%s | `stop()` not implemented", cls.__name__)
857
+
858
+ @classmethod
859
+ def pretest(cls):
860
+ with np_logging.debug():
861
+ logger.debug(f"{cls.__name__} | Pretest")
862
+ cls.label = 'pretest'
863
+ cls.initialize()
864
+ cls.test()
865
+ cls.start()
866
+ cls.finalize()
867
+ cls.validate()
868
+ logger.info(f"{cls.__name__} | Pretest passed")
869
+
870
+ class MVR(CamstimSyncShared):
871
+
872
+ # req proxy config - hardcode or overload ensure_config()
873
+ host: ClassVar[str] = np_config.Rig().Mon
874
+ port: ClassVar[int] = CONFIG['MVR']['port']
875
+
876
+ re_aux: re.Pattern = re.compile("aux|USB!|none", re.IGNORECASE)
877
+
878
+ @classmethod
879
+ def is_connected(cls) -> bool:
880
+ if not utils.is_online(cls.host):
881
+ cls.exc = ConnectionError(
882
+ f"No response from {cls.host}: may be offline or unreachable"
883
+ )
884
+ return False
885
+ if not cls.get_proxy()._mvr_connected:
886
+ cls.exc = ConnectionError(f"MVR likely not running on {cls.host}")
887
+ return False
888
+ try:
889
+ _ = cls.get_camera_status()
890
+ except ConnectionError as exc:
891
+ cls.exc = exc
892
+ return False
893
+ return True
894
+
895
+ @classmethod
896
+ def initialize(cls) -> None:
897
+ with contextlib.suppress(AttributeError):
898
+ del cls.proxy
899
+ cls.proxy = cls.get_proxy()
900
+ cls.test()
901
+ cls.configure_cameras()
902
+ _ = cls.get_proxy().read() # empty buffer
903
+ if isinstance(cls, Startable) and not cls.is_ready_to_start():
904
+ if cls.is_started() and isinstance(cls, Stoppable):
905
+ cls.stop()
906
+ if isinstance(cls, Finalizable):
907
+ cls.finalize()
908
+ if not cls.is_ready_to_start():
909
+ logger.warning(
910
+ "%s not ready to start: %s", cls.__name__, cls.get_state()
911
+ )
912
+ return
913
+ if cls.data_root:
914
+ cls.data_files = []
915
+ cls.initialization = time.time()
916
+ logger.info("%s initialized: ready for use", cls.__name__)
917
+
918
+ @classmethod
919
+ def shutdown(cls) -> None:
920
+ cls.get_proxy()._mvr_sock.close()
921
+ del cls.proxy
922
+
923
+ @classmethod
924
+ def get_proxy(cls) -> mvr_connector.MVRConnector:
925
+ with contextlib.suppress(AttributeError):
926
+ return cls.proxy
927
+ cls.ensure_config()
928
+ logger.debug("Creating %s proxy to %s:%s", cls.__name__, cls.host, cls.port)
929
+ cls.proxy = mvr_connector.MVRConnector({"host": cls.host, "port": cls.port})
930
+ cls.proxy._mvr_sock.settimeout(cls.timeout)
931
+ return cls.get_proxy()
932
+
933
+ @classmethod
934
+ def get_cameras(cls) -> list[dict[str, str]]:
935
+ if not hasattr(cls, "all_cameras"):
936
+ cls.get_proxy().read()
937
+ cls.all_cameras = cls.get_proxy().request_camera_ids()[0]["value"]
938
+ return cls.all_cameras
939
+
940
+ @classmethod
941
+ def get_camera_status(cls) -> list[dict[str, str]]:
942
+ _ = cls.get_proxy().read() # empty buffer
943
+ _ = cls.get_proxy()._send({"mvr_request": "get_camera_status"})
944
+ for msg in reversed(cls.get_proxy().read()):
945
+ if msg.get("mvr_response", "") == "get_camera_status" and (
946
+ cams := msg.get("value", [])
947
+ ):
948
+ break
949
+ else:
950
+ logger.error("Could not get camera status from %s", cls.host)
951
+ raise ConnectionError(f"Could not get camera status from {cls.host}")
952
+ return [
953
+ _
954
+ for _ in cams
955
+ if any(_["camera_id"] == __["id"] for __ in cls.get_cameras())
956
+ ]
957
+
958
+ @classmethod
959
+ def get_state(cls) -> ProxyState:
960
+ if not cls.is_connected():
961
+ return "", "MVR_CLOSED"
962
+ status = cls.get_camera_status()
963
+ # cam status could change between calls, so only get once
964
+ if any(not _["is_open"] for _ in status):
965
+ return "", "CAMERA_CLOSED"
966
+ if any(not _["is_streaming"] for _ in status):
967
+ return "", "CAMERA_NOT_STREAMING"
968
+ if cls.get_cameras_recording(status):
969
+ return "BUSY", "RECORDING"
970
+ return "READY", ""
971
+
972
+ @classmethod
973
+ def get_cameras_recording(cls, status=None) -> list[dict[str, str]]:
974
+ return [_ for _ in status or cls.get_camera_status() if _["is_recording"]]
975
+
976
+ @classmethod
977
+ def is_ready_to_start(cls) -> bool:
978
+ if cls.is_started():
979
+ return False
980
+ return all(
981
+ _["is_open"] and _["is_streaming"] and not _["is_recording"]
982
+ for _ in cls.get_camera_status()
983
+ )
984
+
985
+ @classmethod
986
+ def configure_cameras(cls) -> None:
987
+ "Set MVR to record video from subset of all cameras, via `get_cameras` (implemented by subclass)"
988
+ cam_ids = [_["id"] for _ in cls.get_cameras()]
989
+ cls.get_proxy().define_hosts(cam_ids)
990
+ cls.get_proxy().start_display()
991
+
992
+
993
+ class ImageMVR(MVR):
994
+
995
+ gb_per_hr: ClassVar[int | float] = CONFIG['ImageMVR']["gb_per_hr"]
996
+ min_rec_hr: ClassVar[int | float] = CONFIG['ImageMVR']["min_rec_hr"]
997
+
998
+ label: ClassVar[str]
999
+ "Rename file after capture to include label"
1000
+
1001
+ # TODO ready state is if Aux cam is_open
1002
+ @classmethod
1003
+ def get_cameras(cls) -> list[dict[str, str]]:
1004
+ "Aux cam only"
1005
+ cams = super().get_cameras()
1006
+ return [_ for _ in cams if cls.re_aux.search(_["label"])]
1007
+
1008
+ @classmethod
1009
+ def start(cls):
1010
+ if not cls.is_ready_to_start():
1011
+ # TODO display state, wait on user input to continue
1012
+ logger.error("%s not ready to start: %s", cls.__name__, cls.get_state())
1013
+ raise AssertionError(
1014
+ f"{cls.__name__} not ready to start: {cls.get_state()}"
1015
+ )
1016
+ cls.latest_start = time.time()
1017
+ cls.get_proxy().take_snapshot()
1018
+
1019
+ @classmethod
1020
+ def stop(cls):
1021
+ "Overload parent method to do nothing"
1022
+ pass
1023
+
1024
+ @classmethod
1025
+ def is_started(cls) -> bool:
1026
+ for msg in cls.get_proxy().read():
1027
+ if msg.get("mvr_broadcast", "") == "snapshot_converted":
1028
+ return True
1029
+ if msg.get("mvr_broadcast", "") == "snapshot_failed":
1030
+ return False
1031
+ return False
1032
+
1033
+ @classmethod
1034
+ def verify(cls):
1035
+ "Overload parent method to do nothing"
1036
+ pass
1037
+
1038
+ # TODO
1039
+ @classmethod
1040
+ def validate(cls) -> None:
1041
+ logger.warning("%s.validate() not implemented", cls.__name__)
1042
+
1043
+ @classmethod
1044
+ def finalize(cls) -> None:
1045
+ logger.debug("Finalizing %s", cls.__name__)
1046
+ t0 = time.time()
1047
+ timedout = lambda: time.time() > t0 + 10
1048
+ while (
1049
+ cls.is_started()
1050
+ or not cls.is_ready_to_start()
1051
+ or not cls.get_latest_data("*")
1052
+ or cls.get_latest_data(".bmp")
1053
+ ) and not timedout():
1054
+ logger.debug("Waiting for %s to finish processing", cls.__name__)
1055
+ time.sleep(1) # TODO add backoff module
1056
+ if timedout():
1057
+ logger.warning(
1058
+ "Timed out waiting for %s to finish processing", cls.__name__
1059
+ )
1060
+ return
1061
+ if not hasattr(cls, "data_files") or not cls.data_files:
1062
+ cls.data_files = []
1063
+ new = cls.get_latest_data("*")
1064
+ if hasattr(cls, "label") and cls.label:
1065
+ new = [_.rename(_.with_stem(f"{_.stem}_{cls.label}")) for _ in new]
1066
+ cls.data_files.extend(new)
1067
+ logger.debug("%s processing finished: %s", cls.__name__, [_.name for _ in new])
1068
+
1069
+
1070
+ class VideoMVR(MVR):
1071
+
1072
+ pretest_duration_sec: ClassVar[int | float] = CONFIG['VideoMVR']["pretest_duration_sec"]
1073
+ gb_per_hr: ClassVar[int | float] = CONFIG['VideoMVR']["gb_per_hr"]
1074
+ min_rec_hr: ClassVar[int | float] = CONFIG['VideoMVR']["min_rec_hr"]
1075
+
1076
+ raw_suffix: ClassVar[str] = ".mp4"
1077
+
1078
+ started_state = ("BUSY", "RECORDING")
1079
+ sync_path: Optional[pathlib.Path] = None
1080
+
1081
+ @classmethod
1082
+ def get_cameras(cls) -> list[dict[str, str]]:
1083
+ "All available cams except Aux"
1084
+ cams = super().get_cameras()
1085
+ # check for camera labels with known Aux cam names
1086
+ return [_ for _ in cams if cls.re_aux.search(_["label"]) is None]
1087
+
1088
+ @classmethod
1089
+ def start(cls) -> None:
1090
+ logger.info("%s | Starting recording", cls.__name__)
1091
+ cls.latest_start = time.time()
1092
+ cls.get_proxy().start_record(record_time=24 * 60 * 60,) # sec
1093
+
1094
+ @classmethod
1095
+ def verify(cls) -> None:
1096
+ "Assert data exists since latest start, or raise AssertionError."
1097
+ # files grow infrequently while MVR's recording - checking their size
1098
+ # is unreliable
1099
+ if not cls.is_started():
1100
+ logger.warning(
1101
+ "Cannot verify %s if not started: %s", cls.__name__, cls.get_state()
1102
+ )
1103
+ raise AssertionError(f"{cls.__name__} not started: {cls.get_state()}")
1104
+ if datetime.datetime.fromtimestamp(
1105
+ cls.latest_start
1106
+ ) < datetime.datetime.now() - datetime.timedelta(
1107
+ seconds=cls.pretest_duration_sec
1108
+ ):
1109
+ time.sleep(cls.pretest_duration_sec)
1110
+ if not (files := cls.get_latest_data()) or len(files) < len(
1111
+ cls.get_cameras_recording()
1112
+ ):
1113
+ raise AssertionError(
1114
+ f"{cls.__name__} files do not match the number of cameras: {files}"
1115
+ )
1116
+ logger.info(
1117
+ "%s | Verified: %s cameras recording to disk", cls.__name__, len(files)
1118
+ )
1119
+ @classmethod
1120
+ def stop(cls) -> None:
1121
+ cls.get_proxy().stop_record()
1122
+ logger.info("%s | Stopped recording", cls.__name__)
1123
+
1124
+ @classmethod
1125
+ def is_started(cls) -> bool:
1126
+ if len(state := cls.get_state()) and all(
1127
+ msg in state for msg in cls.started_state
1128
+ ):
1129
+ return True
1130
+ return False
1131
+
1132
+ @classmethod
1133
+ def finalize(cls) -> None:
1134
+ logger.debug("Finalizing %s", cls.__name__)
1135
+ if cls.is_started():
1136
+ cls.stop()
1137
+ t0 = time.time()
1138
+ timedout = lambda: time.time() > t0 + 30
1139
+ while not cls.is_ready_to_start() and not timedout():
1140
+ logger.debug("Waiting for %s to finish processing", cls.__name__)
1141
+ time.sleep(1) # TODO add backoff module
1142
+ if timedout():
1143
+ logger.warning(
1144
+ "Timed out waiting for %s to finish processing", cls.__name__
1145
+ )
1146
+ return
1147
+ if not hasattr(cls, "data_files"):
1148
+ cls.data_files = []
1149
+ cls.data_files.extend(
1150
+ new := (cls.get_latest_data("*.mp4") + cls.get_latest_data("*.json"))
1151
+ )
1152
+ logger.debug("%s processing finished: %s", cls.__name__, [_.name for _ in new])
1153
+
1154
+ @classmethod
1155
+ def validate(cls) -> None:
1156
+ tempdir = pathlib.Path(tempfile.gettempdir())
1157
+ tempfiles: list[pathlib.Path] = []
1158
+ # currently can't pass individual files to mvrdataset - just a dir
1159
+ for file in itertools.chain(cls.get_latest_data("*.mp4"), cls.get_latest_data("*.json")):
1160
+ np_tools.copy(file, t := tempdir / file.name)
1161
+ tempfiles.append(t)
1162
+ npc_mvr.MVRDataset(
1163
+ tempdir,
1164
+ getattr(cls, "sync_path", None),
1165
+ )
1166
+ logger.info(f"Validated {len(tempfiles)} video/info files {'with' if getattr(cls, 'sync_path', None) else 'without'} sync")
1167
+ for file in tempfiles:
1168
+ file.unlink(missing_ok=True)
1169
+ class JsonRecorder:
1170
+ "Just needs a `start` method that calls `write()`."
1171
+
1172
+ log_name: ClassVar[str]
1173
+ log_root: ClassVar[pathlib.Path]
1174
+
1175
+ @abc.abstractclassmethod
1176
+ def start() -> None:
1177
+ pass
1178
+
1179
+ @classmethod
1180
+ def pretest(cls) -> None:
1181
+ with np_logging.debug():
1182
+ cls.initialize()
1183
+ cls.start()
1184
+ cls.validate()
1185
+ logger.info("%s | Pretest passed", cls.__name__)
1186
+
1187
+ @classmethod
1188
+ def ensure_config(cls) -> None:
1189
+ config = CONFIG.get(
1190
+ __class__.__name__, {}
1191
+ ) # class where this function is defined
1192
+ config.update(**CONFIG.get(cls.__name__, {})) # the calling class, if different
1193
+
1194
+ if not hasattr(cls, "log_name"):
1195
+ cls.log_name = config.get("log_name", "{}_.json")
1196
+ cls.log_name = cls.log_name.format(
1197
+ datetime.datetime.now().strftime("%Y-%m-%d_%H%M%S")
1198
+ )
1199
+
1200
+ if not hasattr(cls, "log_root"):
1201
+ cls.log_root = config.get("log_root", ".")
1202
+ cls.log_root = pathlib.Path(cls.log_root).resolve()
1203
+
1204
+ @classmethod
1205
+ def initialize(cls) -> None:
1206
+ logger.debug("%s initializing", __class__.__name__)
1207
+ cls.ensure_config()
1208
+ cls.initialization = time.time()
1209
+ log = (cls.log_root / cls.log_name).with_suffix(".json")
1210
+ log.parent.mkdir(parents=True, exist_ok=True)
1211
+ log.touch(exist_ok=True)
1212
+ if log.read_text().strip() == "":
1213
+ log.write_text("{}")
1214
+ cls.all_files = [log]
1215
+ cls.test()
1216
+
1217
+ @classmethod
1218
+ def test(cls) -> None:
1219
+ logger.debug("%s testing", __class__.__name__)
1220
+ try:
1221
+ _ = cls.get_current_log().read_bytes()
1222
+ except OSError as exc:
1223
+ raise TestError(
1224
+ f"{__class__.__name__} failed to open {cls.get_current_log()}"
1225
+ ) from exc
1226
+
1227
+ @classmethod
1228
+ def get_current_log(cls) -> pathlib.Path:
1229
+ if not hasattr(cls, "initialization"):
1230
+ cls.initialize()
1231
+ return cls.all_files[-1]
1232
+
1233
+ @classmethod
1234
+ def read(cls) -> dict[str, str | float]:
1235
+ try:
1236
+ data = json.loads(cls.get_current_log().read_bytes())
1237
+ except json.JSONDecodeError as exc:
1238
+ if cls.get_current_log().stat().st_size:
1239
+ raise
1240
+ logger.debug("%s | Error encountered reading file %s: %r", cls.__name__, cls.get_current_log(), exc)
1241
+ data = {} # file was empty
1242
+ else:
1243
+ logger.debug("%s | Read from %s", cls.__name__, cls.get_current_log())
1244
+ return data
1245
+
1246
+ @classmethod
1247
+ def write(cls, value: dict) -> None:
1248
+ try:
1249
+ data = cls.read()
1250
+ except json.JSONDecodeError:
1251
+ data = {}
1252
+ file = cls.get_current_log().with_suffix(".new.json")
1253
+ file.touch()
1254
+ cls.all_files.append(file)
1255
+ else:
1256
+ file = cls.get_current_log()
1257
+ np_config.merge(data, value)
1258
+ file.write_text(json.dumps(data, indent=4, sort_keys=False, default=str))
1259
+ logger.debug("%s wrote to %s", cls.__name__, file)
1260
+
1261
+ @classmethod
1262
+ def validate(cls) -> None:
1263
+ if not (log := cls.read()):
1264
+ cls.exc = TestError(
1265
+ f"{cls.__name__} failed to validate because log is empty: {cls.get_current_log()}"
1266
+ )
1267
+ logger.error(
1268
+ "%s failed to validate: log is empty %s",
1269
+ cls.__name__,
1270
+ cls.get_current_log(),
1271
+ exc_info=cls.exc,
1272
+ )
1273
+ logger.debug("%s validated", __class__.__name__)
1274
+
1275
+
1276
+ class YamlRecorder(JsonRecorder):
1277
+ @classmethod
1278
+ def test(cls) -> None:
1279
+ logger.debug("%s testing", __class__.__name__)
1280
+ super().test()
1281
+ try:
1282
+ import yaml
1283
+ except ImportError as exc:
1284
+ raise TestError(f"{__class__.__name__} failed to import yaml") from exc
1285
+
1286
+ @classmethod
1287
+ def finalize(cls) -> None:
1288
+ logger.debug("Finalizing %s", __class__.__name__)
1289
+ log = json.load(cls.get_current_log().read_bytes())
1290
+ with contextlib.suppress(
1291
+ AttributeError, OSError
1292
+ ): # if this fails we still have the json file
1293
+ yaml.dump(log, cls.get_current_log().with_suffix(".yaml").read_bytes())
1294
+
1295
+
1296
+ class NewScaleCoordinateRecorder(JsonRecorder):
1297
+ "Gets current manipulator coordinates and stores them in a file with a timestamp."
1298
+
1299
+ host: ClassVar[str] = np_config.Rig().Mon
1300
+ data_root: ClassVar[pathlib.Path] = CONFIG['NewScaleCoordinateRecorder']['data']
1301
+ data_name: ClassVar[str] = CONFIG['NewScaleCoordinateRecorder']['data_name']
1302
+ data_fieldnames: ClassVar[Sequence[str]] = CONFIG['NewScaleCoordinateRecorder']['data_fieldnames']
1303
+ data_files: ClassVar[list[pathlib.Path]] = []
1304
+ "Files to be copied after exp"
1305
+
1306
+ max_z_travel: ClassVar[int] = CONFIG['NewScaleCoordinateRecorder']['max_z_travel']
1307
+ num_probes: ClassVar[int] = 6
1308
+ log_name: ClassVar[str] = "newscale_coords_{}.json"
1309
+ log_root: ClassVar[pathlib.Path] = pathlib.Path(tempfile.gettempdir()).resolve()
1310
+ label: ClassVar[str] = ""
1311
+ "A label to tag each entry with"
1312
+ latest_start: ClassVar[int] = 0
1313
+ "`time.time()` when the service was last started via `start()`."
1314
+ log_time_fmt: str = CONFIG['NewScaleCoordinateRecorder']['log_time_fmt']
1315
+
1316
+ @classmethod
1317
+ def pretest(cls) -> None:
1318
+ cls.label = 'pretest'
1319
+ super().pretest()
1320
+
1321
+ @classmethod
1322
+ def get_current_data(cls) -> pathlib.Path:
1323
+ cls.ensure_config()
1324
+ return cls.data_root / cls.data_name
1325
+
1326
+ @classmethod
1327
+ def last_logged_coords_csv(cls) -> dict[str, float]:
1328
+ "Get the most recent coordinates from the log file using the csv parser in the stdlib."
1329
+ with cls.get_current_data().open("r") as _:
1330
+ reader = csv.DictReader(_, fieldnames=cls.data_fieldnames)
1331
+ rows = list(reader)
1332
+ last_moved_label = cls.data_fieldnames[0]
1333
+ coords = {}
1334
+ for row in reversed(rows): # search for the most recent coordinates
1335
+ if len(coords.keys()) == cls.num_probes:
1336
+ break # we have an entry for each probe
1337
+ if (m := row.pop(cls.data_fieldnames[1]).strip()) not in coords:
1338
+ coords[m] = {}
1339
+ for k, v in row.items():
1340
+ if "virtual" in k:
1341
+ continue
1342
+ if k == last_moved_label:
1343
+ v = datetime.datetime.strptime(v, cls.log_time_fmt)
1344
+ else:
1345
+ v = v.strip()
1346
+ with contextlib.suppress(ValueError):
1347
+ v = float(v)
1348
+ coords[m].update({k: v})
1349
+ return coords
1350
+
1351
+ @classmethod
1352
+ def last_logged_coords_pd(cls) -> dict[str, float]:
1353
+ "Get the most recent coordinates from the log file using pandas."
1354
+ coords = {}
1355
+ manipulator_label = cls.data_fieldnames[1]
1356
+ last_moved_label = cls.data_fieldnames[0]
1357
+ df = pd.read_csv(cls.get_current_data(), names=cls.data_fieldnames, parse_dates=[last_moved_label])
1358
+ # group by manipulator_label and get the maximum value in last_moved_label for each group
1359
+ # (i.e. the most recent entry for each manipulator)
1360
+ last_moved = df.loc[
1361
+ df.groupby(manipulator_label)[last_moved_label].idxmax()
1362
+ ].set_index(manipulator_label).sort_values(last_moved_label, ascending=False)
1363
+ for serial_number, row in last_moved.iloc[:cls.num_probes].iterrows():
1364
+ new = {key: row[key] for key in cls.data_fieldnames if (key != manipulator_label and 'virtual' not in key)}
1365
+ new[last_moved_label] = row[last_moved_label].to_pydatetime()
1366
+ coords[str(serial_number).strip()] = new
1367
+ return coords
1368
+
1369
+ @classmethod
1370
+ def convert_serial_numbers_to_probe_labels(cls, coords: dict[str, float]) -> None:
1371
+ for k, v in CONFIG[cls.__name__].get("probe_to_serial_number", {}).items():
1372
+ if v in coords:
1373
+ coords[k] = coords.pop(v)
1374
+ coords[k]['serial_number'] = v
1375
+
1376
+ @classmethod
1377
+ def get_coordinates(cls) -> dict[str, float]:
1378
+ try:
1379
+ import pandas as pd
1380
+ except ImportError:
1381
+ coords = cls.last_logged_coords_csv()
1382
+ else:
1383
+ coords = cls.last_logged_coords_pd()
1384
+
1385
+ def adjust_z_travel(coords):
1386
+ for v in coords.values():
1387
+ if 'z' in v:
1388
+ v['z'] = cls.max_z_travel - v['z']
1389
+ adjust_z_travel(coords)
1390
+ cls.convert_serial_numbers_to_probe_labels(coords)
1391
+ coords["label"] = cls.label
1392
+ logger.debug("%s | Retrieved coordinates: %s", cls.__name__, coords)
1393
+ return coords
1394
+
1395
+ @classmethod
1396
+ def write_to_platform_json(cls):
1397
+ coords = cls.get_coordinates()
1398
+ for k, v in coords.items():
1399
+ if isinstance(v, Mapping) and (last_moved := v.get('last_moved')):
1400
+ del coords[k]['last_moved']
1401
+ del coords[k]['serial_number']
1402
+ continue
1403
+ # if last_moved is kept, then normalize it depending on csv/pd method:
1404
+ match last_moved:
1405
+ case str():
1406
+ timestamp = datetime.datetime.strptime(last_moved, cls.log_time_fmt)
1407
+ case datetime.datetime():
1408
+ timestamp = last_moved
1409
+ coords[k]['last_moved'] = np_config.normalize_time(timestamp)
1410
+
1411
+ # rearrange so `label`` is top-level key, or use capture-timestamp if no label
1412
+ platform_json = np_session.PlatformJson(cls.get_current_log())
1413
+ platform_json_entry = copy.deepcopy(platform_json.manipulator_coordinates)
1414
+ coords = {str(coords.pop('label', np_config.normalize_time(cls.latest_start))): coords}
1415
+ logger.debug("%s | Adding to platform json: %s", cls.__name__, coords)
1416
+ platform_json.manipulator_coordinates = np_config.merge(platform_json_entry, coords)
1417
+ if (csv := cls.get_current_data()) not in cls.data_files:
1418
+ cls.data_files.append(csv)
1419
+
1420
+ @classmethod
1421
+ def start(cls):
1422
+ cls.latest_start = time.time()
1423
+ if 'platformD1' in cls.log_name:
1424
+ cls.write_to_platform_json()
1425
+ else:
1426
+ cls.write({str(datetime.datetime.now()): cls.get_coordinates()})
1427
+
1428
+ @classmethod
1429
+ def test(cls) -> None:
1430
+ super().test()
1431
+ logger.debug("%s | Testing", __class__.__name__)
1432
+ try:
1433
+ _ = cls.get_current_data().open("r")
1434
+ except OSError as exc:
1435
+ raise TestError(
1436
+ f"{cls.__name__} failed to open {cls.get_current_data()}"
1437
+ ) from exc
1438
+ try:
1439
+ _ = cls.get_coordinates()
1440
+ except Exception as exc:
1441
+ raise TestError(f"{cls.__name__} failed to get coordinates") from exc
1442
+ else:
1443
+ logger.info("%s | Test passed", cls.__name__)
1444
+
1445
+ @classmethod
1446
+ def ensure_config(cls) -> None:
1447
+ super().ensure_config()
1448
+
1449
+ if CONFIG.get("services", {}):
1450
+ config = CONFIG["services"].get(__class__.__name__, {})
1451
+ config.update(**CONFIG["services"].get(cls.__name__, {}))
1452
+ else:
1453
+ config = CONFIG.get(
1454
+ __class__.__name__, {}
1455
+ ) # class where this function is defined
1456
+ config.update(
1457
+ **CONFIG.get(cls.__name__, {})
1458
+ ) # the calling class, if different
1459
+
1460
+ if not hasattr(cls, "host"):
1461
+ cls.host = config["host"]
1462
+
1463
+ # for resulting data
1464
+ if (
1465
+ not hasattr(cls, "data_root")
1466
+ or cls.host not in pathlib.Path(cls.data_root).parts
1467
+ ):
1468
+ relative_path = config["data"]
1469
+ if relative_path:
1470
+ root = pathlib.Path(f"//{cls.host}/{relative_path}")
1471
+ try:
1472
+ _ = root.exists()
1473
+ except OSError as exc:
1474
+ cls.exc = exc
1475
+ logger.exception(
1476
+ "Error accessing %s data path: %s", cls.__name__, root
1477
+ )
1478
+ raise FileNotFoundError(
1479
+ f"{cls.__name__} data path is not accessible: {root}"
1480
+ ) from exc
1481
+ else:
1482
+ cls.data_root = root
1483
+
1484
+ if not hasattr(cls, "data_name"):
1485
+ cls.data_name = config["data_name"]
1486
+ if not hasattr(cls, "data_fieldnames"):
1487
+ cls.data_fieldnames = config["data_fieldnames"]
1488
+
1489
+