sl-shared-assets 1.0.0rc22__py3-none-any.whl → 1.0.0rc24__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sl-shared-assets might be problematic. Click here for more details.

sl_shared_assets/cli.py CHANGED
@@ -25,7 +25,32 @@ from .data_classes import (
25
25
  required=True,
26
26
  help="The absolute path to the session whose raw data needs to be verified for potential corruption.",
27
27
  )
28
- def verify_session_integrity(session_path: str) -> None:
28
+ @click.option(
29
+ "-c",
30
+ "--create_processed_directories",
31
+ is_flag=True,
32
+ show_default=True,
33
+ default=False,
34
+ help=(
35
+ "Determines whether to created the processed data hierarchy. This flag should be disabled for most runtimes. "
36
+ "Primarily, it is used by lab acquisition system code to generate processed data directories on the remote "
37
+ "compute servers as part of the data preprocessing pipeline."
38
+ ),
39
+ )
40
+ @click.option(
41
+ "-pdr",
42
+ "--processed_data_root",
43
+ type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path),
44
+ required=False,
45
+ help=(
46
+ "The absolute path to the directory where processed data from all projects is stored on the machine that runs "
47
+ "this command. This argument is used when calling the CLI on the BioHPC server, which uses different data "
48
+ "volumes for raw and processed data. Note, the input path must point to the root directory, as it will be "
49
+ "automatically modified to include the project name, the animal id, and the session ID. This argument is only "
50
+ "used if 'create_processed_directories' flag is True."
51
+ ),
52
+ )
53
+ def verify_session_integrity(session_path: str, create_processed_directories: bool, processed_data_root: Path) -> None:
29
54
  """Checks the integrity of the target session's raw data (contents of the raw_data directory).
30
55
 
31
56
  This command assumes that the data has been checksummed during acquisition and contains an ax_checksum.txt file
@@ -33,9 +58,14 @@ def verify_session_integrity(session_path: str) -> None:
33
58
  always verified the integrity of the 'raw_data' directory. It does not work with 'processed_data' or any other
34
59
  directories. If the session data was corrupted, the command removes the 'telomere.bin' file, marking the session as
35
60
  'incomplete' and automatically excluding it from all further automated processing runtimes.
61
+
62
+ The command is also used by Sun lab data acquisition systems to generate the processed data hierarchy for each
63
+ processed session. This use case is fully automated and should not be triggered manually by the user.
36
64
  """
37
65
  session = Path(session_path)
38
- if verify_session_checksum(session):
66
+ if verify_session_checksum(
67
+ session, create_processed_data_directory=create_processed_directories, processed_data_root=processed_data_root
68
+ ):
39
69
  console.echo(message=f"Session {session.stem} raw data integrity: verified.", level=LogLevel.SUCCESS)
40
70
  else:
41
71
  console.echo(message=f"Session {session.stem} raw data integrity: compromised!", level=LogLevel.ERROR)
@@ -288,6 +318,15 @@ def generate_experiment_configuration_file(project: str, experiment: str, state_
288
318
  acquisition_system = get_system_configuration_data()
289
319
  file_path = acquisition_system.paths.root_directory.joinpath(project, "configuration", f"{experiment}.yaml")
290
320
 
321
+ if not acquisition_system.paths.root_directory.joinpath(project).exists():
322
+ message = (
323
+ f"Unable to generate the experiment {experiment} configuration file for the project {project}. "
324
+ f"The target project does not exist on the local machine (PC). Use the "
325
+ f"'sl-create-project' CLI command to create the project before creating new experiment configuration(s). "
326
+ )
327
+ console.error(message=message, error=ValueError)
328
+ raise ValueError(message) # Fall-back to appease mypy, should not be reachable
329
+
291
330
  # Loops over the number of requested states and, for each, generates a precursor experiment state field inside the
292
331
  # 'states' dictionary.
293
332
  states = {}
sl_shared_assets/cli.pyi CHANGED
@@ -1,3 +1,5 @@
1
+ from pathlib import Path
2
+
1
3
  from .tools import (
2
4
  ascend_tyche_data as ascend_tyche_data,
3
5
  verify_session_checksum as verify_session_checksum,
@@ -13,7 +15,7 @@ from .data_classes import (
13
15
  set_system_configuration_file as set_system_configuration_file,
14
16
  )
15
17
 
16
- def verify_session_integrity(session_path: str) -> None:
18
+ def verify_session_integrity(session_path: str, create_processed_directories: bool, processed_data_root: Path) -> None:
17
19
  """Checks the integrity of the target session's raw data (contents of the raw_data directory).
18
20
 
19
21
  This command assumes that the data has been checksummed during acquisition and contains an ax_checksum.txt file
@@ -21,6 +23,9 @@ def verify_session_integrity(session_path: str) -> None:
21
23
  always verified the integrity of the 'raw_data' directory. It does not work with 'processed_data' or any other
22
24
  directories. If the session data was corrupted, the command removes the 'telomere.bin' file, marking the session as
23
25
  'incomplete' and automatically excluding it from all further automated processing runtimes.
26
+
27
+ The command is also used by Sun lab data acquisition systems to generate the processed data hierarchy for each
28
+ processed session. This use case is fully automated and should not be triggered manually by the user.
24
29
  """
25
30
 
26
31
  def generate_project_manifest_file(
@@ -111,6 +111,12 @@ class MesoscopePaths:
111
111
  sharing protocol, such as SMB."""
112
112
  harvesters_cti_path: Path = Path("/opt/mvIMPACT_Acquire/lib/x86_64/mvGenTLProducer.cti")
113
113
  """The path to the GeniCam CTI file used to connect to Harvesters-managed cameras."""
114
+ server_processed_data_root: Path = Path("/workdir/sun_data")
115
+ """The absolute path to the BioHPC server directory used to store the processed data from all Sun lab projects.
116
+ This path is relative to the server root and is only used when submitting remote jobs to the server."""
117
+ server_raw_data_root: Path = Path("/storage/sun_data")
118
+ """The absolute path to the BioHPC server directory used to store the raw data from all Sun lab projects.
119
+ This path is relative to the server root and is only used when submitting remote jobs to the server."""
114
120
 
115
121
 
116
122
  @dataclass()
@@ -125,10 +131,17 @@ class MesoscopeCameras:
125
131
  right_camera_index: int = 2
126
132
  """The index of the right body camera (from animal's perspective) in the list of all available OpenCV-managed
127
133
  cameras."""
128
- quantization_parameter: int = 15
129
- """The quantization parameter used by all cameras to encode acquired frames as video files. This controls how much
130
- data is discarded when encoding each video frame, directly contributing to the encoding speed, resultant video file
131
- size and video quality."""
134
+ face_camera_quantization_parameter: int = 15
135
+ """The quantization parameter used by the face camera to encode acquired frames as video files. This controls how
136
+ much data is discarded when encoding each video frame, directly contributing to the encoding speed, resultant video
137
+ file size and video quality."""
138
+ body_camera_quantization_parameter: int = 15
139
+ """SThe quantization parameter used by the left and right body cameras to encode acquired frames as video files.
140
+ See 'face_camera_quantization_parameter' field for more information on what this parameter does."""
141
+ display_face_camera_frames: bool = True
142
+ """Determines whether to display the frames grabbed from the face camera during runtime."""
143
+ display_body_camera_frames: bool = True
144
+ """Determines whether to display the frames grabbed from the left and right body cameras during runtime."""
132
145
 
133
146
 
134
147
  @dataclass()
@@ -141,14 +154,13 @@ class MesoscopeMicroControllers:
141
154
  """The USB port used by the Sensor Microcontroller."""
142
155
  encoder_port: str = "/dev/ttyACM2"
143
156
  """The USB port used by the Encoder Microcontroller."""
144
- mesoscope_start_ttl_module_id: int = 1
145
- """The unique byte-code ID of the TTL module instance used to send mesoscope frame acquisition start trigger
146
- signals to the ScanImagePC."""
147
- mesoscope_stop_ttl_module_id: int = 2
148
- """The unique byte-code ID of the TTL module instance used to send mesoscope frame acquisition stop trigger
149
- signals to the ScanImagePC."""
157
+ debug: bool = False
158
+ """Determines whether to run the managed acquisition system in the 'debug mode'. This mode should be disabled
159
+ during most runtimes. It is used during initial system calibration and testing and prints a lot of generally
160
+ redundant information into the terminal."""
150
161
  mesoscope_ttl_pulse_duration_ms: int = 10
151
- """The duration of the HIGH phase of all outgoing mesoscope TTL pulses, in milliseconds."""
162
+ """The duration of the HIGH phase of all outgoing TTL pulses that target the Mesoscope (enable or disable mesoscope
163
+ frame acquisition), in milliseconds."""
152
164
  minimum_break_strength_g_cm: float = 43.2047
153
165
  """The minimum torque applied by the running wheel break in gram centimeter. This is the torque the break delivers
154
166
  at minimum voltage (break is disabled)."""
@@ -292,6 +304,8 @@ class MesoscopeSystemConfiguration(YamlConfig):
292
304
  self.paths.nas_directory = Path(self.paths.nas_directory)
293
305
  self.paths.mesoscope_directory = Path(self.paths.mesoscope_directory)
294
306
  self.paths.harvesters_cti_path = Path(self.paths.harvesters_cti_path)
307
+ self.paths.server_processed_data_root = Path(self.paths.server_processed_data_root)
308
+ self.paths.server_raw_data_root = Path(self.paths.server_raw_data_root)
295
309
 
296
310
  # Converts valve_calibration data from dictionary to a tuple of tuples format
297
311
  if not isinstance(self.microcontrollers.valve_calibration_data, tuple):
@@ -299,6 +313,23 @@ class MesoscopeSystemConfiguration(YamlConfig):
299
313
  (k, v) for k, v in self.microcontrollers.valve_calibration_data.items()
300
314
  )
301
315
 
316
+ # Verifies the contents of the valve calibration data loaded from the config file.
317
+ valve_calibration_data = self.microcontrollers.valve_calibration_data
318
+ if not all(
319
+ isinstance(item, tuple)
320
+ and len(item) == 2
321
+ and isinstance(item[0], (int, float))
322
+ and isinstance(item[1], (int, float))
323
+ for item in valve_calibration_data
324
+ ):
325
+ message = (
326
+ f"Unable to initialize the MesoscopeSystemConfiguration class. Expected each item under the "
327
+ f"'valve_calibration_data' field of the Mesoscope-VR acquisition system configuration .yaml file to be "
328
+ f"a tuple of two integer or float values, but instead encountered {valve_calibration_data} with at "
329
+ f"least one incompatible element."
330
+ )
331
+ console.error(message=message, error=TypeError)
332
+
302
333
  def save(self, path: Path) -> None:
303
334
  """Saves class instance data to disk as a 'mesoscope_system_configuration.yaml' file.
304
335
 
@@ -323,6 +354,8 @@ class MesoscopeSystemConfiguration(YamlConfig):
323
354
  original.paths.nas_directory = str(original.paths.nas_directory) # type: ignore
324
355
  original.paths.mesoscope_directory = str(original.paths.mesoscope_directory) # type: ignore
325
356
  original.paths.harvesters_cti_path = str(original.paths.harvesters_cti_path) # type: ignore
357
+ original.paths.server_processed_data_root = str(original.paths.server_processed_data_root) # type: ignore
358
+ original.paths.server_raw_data_root = str(original.paths.server_raw_data_root) # type: ignore
326
359
 
327
360
  # Converts valve calibration data into dictionary format
328
361
  if isinstance(original.microcontrollers.valve_calibration_data, tuple):
@@ -438,6 +438,16 @@ class SessionData(YamlConfig):
438
438
  # Constructs the root session directory path
439
439
  session_path = acquisition_system.paths.root_directory.joinpath(project_name, animal_id, session_name)
440
440
 
441
+ # Prevents creating new sessions for non-existent projects.
442
+ if not acquisition_system.paths.root_directory.joinpath(project_name).exists():
443
+ message = (
444
+ f"Unable to create the session directory hierarchy for the session {session_name} of the animal "
445
+ f"'{animal_id}' and project '{project_name}'. The project does not exist on the local machine (PC). "
446
+ f"Use the 'sl-create-project' CLI command to create the project on the local machine before creating "
447
+ f"new sessions."
448
+ )
449
+ console.error(message=message, error=FileNotFoundError)
450
+
441
451
  # Handles potential session name conflicts
442
452
  counter = 0
443
453
  while session_path.exists():
@@ -5,6 +5,7 @@ up a given project."""
5
5
  from pathlib import Path
6
6
 
7
7
  import polars as pl
8
+ from ataraxis_base_utilities import console
8
9
 
9
10
  from ..data_classes import SessionData
10
11
  from .packaging_tools import calculate_directory_checksum
@@ -33,9 +34,25 @@ def generate_project_manifest(
33
34
  is different from the 'raw_project_directory'. Typically, this would be the case on remote compute server(s)
34
35
  and not on local machines.
35
36
  """
37
+
38
+ if not raw_project_directory.exists():
39
+ message = (
40
+ f"Unable to generate the project manifest file for the requested project {raw_project_directory.stem}. The "
41
+ f"specified project directory does not exist."
42
+ )
43
+ console.error(message=message, error=FileNotFoundError)
44
+
36
45
  # Finds all raw data directories
37
46
  session_directories = [directory.parent for directory in raw_project_directory.rglob("raw_data")]
38
47
 
48
+ if len(session_directories) == 0:
49
+ message = (
50
+ f"Unable to generate the project manifest file for the requested project {raw_project_directory.stem}. The "
51
+ f"project does not contain any raw session data. To generate the manifest file, the project must contain "
52
+ f"at least one valid experiment or training session."
53
+ )
54
+ console.error(message=message, error=FileNotFoundError)
55
+
39
56
  # Precreates the 'manifest' dictionary structure
40
57
  manifest: dict[str, list[str | bool]] = {
41
58
  "animal": [], # Animal IDs.
@@ -120,7 +137,9 @@ def generate_project_manifest(
120
137
  )
121
138
 
122
139
 
123
- def verify_session_checksum(session_path: Path) -> bool:
140
+ def verify_session_checksum(
141
+ session_path: Path, create_processed_data_directory: bool = True, processed_data_root: None | Path = None
142
+ ) -> bool:
124
143
  """Verifies the integrity of the session's raw data by generating the checksum of the raw_data directory and
125
144
  comparing it against the checksum stored in the ax_checksum.txt file.
126
145
 
@@ -132,16 +151,27 @@ def verify_session_checksum(session_path: Path) -> bool:
132
151
  Removing the telomere.bin marker file from session's raw_data folder marks the session as incomplete, excluding
133
152
  it from all further automatic processing.
134
153
 
154
+ This function is also used to create the processed data hierarchy on the BioHPC server, when it is called as
155
+ part of the data preprocessing runtime performed by a data acquisition system.
156
+
135
157
  Args:
136
158
  session_path: The path to the session directory to be verified. Note, the input session directory must contain
137
159
  the 'raw_data' subdirectory.
160
+ create_processed_data_directory: Determines whether to create the processed data hierarchy during runtime.
161
+ processed_data_root: The root directory where to store the processed data hierarchy. This path has to point to
162
+ the root directory where to store the processed data from all projects, and it will be automatically
163
+ modified to include the project name, the animal name, and the session ID.
138
164
 
139
165
  Returns:
140
166
  True if the checksum matches, False otherwise.
141
167
  """
142
168
 
143
- # Loads session data layout
144
- session_data = SessionData.load(session_path=session_path)
169
+ # Loads session data layout. If configured to do so, also creates the processed data hierarchy
170
+ session_data = SessionData.load(
171
+ session_path=session_path,
172
+ processed_data_root=processed_data_root,
173
+ make_processed_data_directory=create_processed_data_directory,
174
+ )
145
175
 
146
176
  # Re-calculates the checksum for the raw_data directory
147
177
  calculated_checksum = calculate_directory_checksum(
@@ -27,7 +27,9 @@ def generate_project_manifest(
27
27
  and not on local machines.
28
28
  """
29
29
 
30
- def verify_session_checksum(session_path: Path) -> bool:
30
+ def verify_session_checksum(
31
+ session_path: Path, create_processed_data_directory: bool = True, processed_data_root: None | Path = None
32
+ ) -> bool:
31
33
  """Verifies the integrity of the session's raw data by generating the checksum of the raw_data directory and
32
34
  comparing it against the checksum stored in the ax_checksum.txt file.
33
35
 
@@ -39,9 +41,16 @@ def verify_session_checksum(session_path: Path) -> bool:
39
41
  Removing the telomere.bin marker file from session's raw_data folder marks the session as incomplete, excluding
40
42
  it from all further automatic processing.
41
43
 
44
+ This function is also used to create the processed data hierarchy on the BioHPC server, when it is called as
45
+ part of the data preprocessing runtime performed by a data acquisition system.
46
+
42
47
  Args:
43
48
  session_path: The path to the session directory to be verified. Note, the input session directory must contain
44
49
  the 'raw_data' subdirectory.
50
+ create_processed_data_directory: Determines whether to create the processed data hierarchy during runtime.
51
+ processed_data_root: The root directory where to store the processed data hierarchy. This path has to point to
52
+ the root directory where to store the processed data from all projects, and it will be automatically
53
+ modified to include the project name, the animal name, and the session ID.
45
54
 
46
55
  Returns:
47
56
  True if the checksum matches, False otherwise.
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sl-shared-assets
3
- Version: 1.0.0rc22
3
+ Version: 1.0.0rc24
4
4
  Summary: Stores assets shared between multiple Sun (NeuroAI) lab data pipelines.
5
5
  Project-URL: Homepage, https://github.com/Sun-Lab-NBB/sl-shared-assets
6
6
  Project-URL: Documentation, https://sl-shared-assets-api-docs.netlify.app/
@@ -1,15 +1,14 @@
1
1
  sl_shared_assets/__init__.py,sha256=rscR353jiyUQ-wpguTLOM-A5Lqr1ftQtuTan1D0AtR0,2196
2
2
  sl_shared_assets/__init__.pyi,sha256=Ye6eY_y_l9CTqwbCf-OxXOphxXKfn9UJnuw09DdLEtc,2405
3
- sl_shared_assets/cli.py,sha256=FRc452bUQCDMtMLVxIRr178pbuCVTYWplJAzumy0Ruo,14971
4
- sl_shared_assets/cli.pyi,sha256=dmkg30arGgA3pLipiuQFb8jq2IPy53srE6CdFAaCdwc,5620
3
+ sl_shared_assets/cli.py,sha256=-w9RAbHEcMspp6UDiXHM0fYSUwPSjJpZZcX3T9hLzUc,17103
4
+ sl_shared_assets/cli.pyi,sha256=DuhJhqAXB7e5uOdzCI2c29FwsVwpQefR2eLUfW0gWyQ,5931
5
5
  sl_shared_assets/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
6
  sl_shared_assets/data_classes/__init__.py,sha256=b0BwTAJCD1zbwjd2UdpXheq61q1sgBuYhtAL-GHA2h4,1835
7
7
  sl_shared_assets/data_classes/__init__.pyi,sha256=e2082sm5pSw3bxNZGqwcRhuKLH8T7hcEo6kWtOZNDBU,1968
8
- sl_shared_assets/data_classes/configuration_data.py,sha256=NL5dMQkd_wigUiik8Yvzp2FQb_PQF3C0utvUcSo6b08,27903
9
- sl_shared_assets/data_classes/configuration_data.pyi,sha256=ePWbPROL_bWuFuQKMhBVbZal0taTY7twSibKbrd-_Qw,9439
8
+ sl_shared_assets/data_classes/configuration_data.py,sha256=wuPFz3zx1DN12KimPhH-fU1geU3pFzRuRB53JEJ2T6I,30343
10
9
  sl_shared_assets/data_classes/runtime_data.py,sha256=Q7Ykf9hgrw1jYKXa53mn_LW8G2cPmLLuxgGkP6qQcc4,15483
11
10
  sl_shared_assets/data_classes/runtime_data.pyi,sha256=PxaCbeF9COR4ri91pdzh7zVrqaz2KEDYB1EoLhZQC_c,6618
12
- sl_shared_assets/data_classes/session_data.py,sha256=tQMt1i9Bloj5K9kYS1gkCdOtksd_8_nHFMZQHLaKCoE,36754
11
+ sl_shared_assets/data_classes/session_data.py,sha256=Jf-WPX1f7sPxWOrU2oq79YZhwZZlLOSVjN7YntZdO80,37401
13
12
  sl_shared_assets/data_classes/session_data.pyi,sha256=a3nPC42mQniUPk2HsAM1_DWUa-BfhzDljQfDQh2rSus,13071
14
13
  sl_shared_assets/data_classes/surgery_data.py,sha256=qsMj3NkjhylAT9b_wHBY-1XwTu2xsZcZatdECmkA7Bs,7437
15
14
  sl_shared_assets/data_classes/surgery_data.pyi,sha256=rf59lJ3tGSYKHQlEGXg75MnjajBwl0DYhL4TClAO4SM,2605
@@ -25,12 +24,12 @@ sl_shared_assets/tools/ascension_tools.py,sha256=kIqYGX9F8lRao_LaVOacIiT8J9SypTv
25
24
  sl_shared_assets/tools/ascension_tools.pyi,sha256=tQCDdWZ20ZjUjpMs8aGIN0yBg5ff3j6spi62b3Han4o,3755
26
25
  sl_shared_assets/tools/packaging_tools.py,sha256=oY-EWuTiMfWobYllVZy0piGlVnTHCpPY1GF-WmqQdj4,7269
27
26
  sl_shared_assets/tools/packaging_tools.pyi,sha256=vgGbAQCExwg-0A5F72MzEhzHxu97Nqg1yuz-5P89ycU,3118
28
- sl_shared_assets/tools/project_management_tools.py,sha256=IV4xAfy3_SpV50Xvt7EdVPVLojp9di1rEmO4KoAgFtY,8740
29
- sl_shared_assets/tools/project_management_tools.pyi,sha256=pQY2--Kn3pKSADsArDRmbwGR7JqHD_2qdB0LJBbW_xo,2735
27
+ sl_shared_assets/tools/project_management_tools.py,sha256=E50xhjJZlq6zQXnj8DHm1KkVcTDK7lr61m1ge-u25TY,10476
28
+ sl_shared_assets/tools/project_management_tools.pyi,sha256=bRwohpGa98LDIy1ntLAPSKEGRB4S7ZmnIfyDy097c94,3467
30
29
  sl_shared_assets/tools/transfer_tools.py,sha256=J26kwOp_NpPSY0-xu5FTw9udte-rm_mW1FJyaTNoqQI,6606
31
30
  sl_shared_assets/tools/transfer_tools.pyi,sha256=FoH7eYZe7guGHfPr0MK5ggO62uXKwD2aJ7h1Bu7PaEE,3294
32
- sl_shared_assets-1.0.0rc22.dist-info/METADATA,sha256=ZOXR6uB8yVsMjAntWJBTbyV0qjNsS1Q2mzA04JQ6j9E,48613
33
- sl_shared_assets-1.0.0rc22.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
34
- sl_shared_assets-1.0.0rc22.dist-info/entry_points.txt,sha256=76c00fRS4IuXBP2xOBdvycT15Zen-lHiDg2FaSt-HB4,547
35
- sl_shared_assets-1.0.0rc22.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
36
- sl_shared_assets-1.0.0rc22.dist-info/RECORD,,
31
+ sl_shared_assets-1.0.0rc24.dist-info/METADATA,sha256=NwEZ5XdK0iaApWqMvAkrPsEA7kMdEvO6uw2XOpAAf3k,48613
32
+ sl_shared_assets-1.0.0rc24.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
33
+ sl_shared_assets-1.0.0rc24.dist-info/entry_points.txt,sha256=76c00fRS4IuXBP2xOBdvycT15Zen-lHiDg2FaSt-HB4,547
34
+ sl_shared_assets-1.0.0rc24.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
35
+ sl_shared_assets-1.0.0rc24.dist-info/RECORD,,
@@ -1,195 +0,0 @@
1
- from pathlib import Path
2
- from dataclasses import field, dataclass
3
-
4
- from _typeshed import Incomplete
5
- from ataraxis_data_structures import YamlConfig
6
-
7
- @dataclass()
8
- class ExperimentState:
9
- """Encapsulates the information used to set and maintain the desired experiment and system state.
10
-
11
- Broadly, each experiment runtime can be conceptualized as a two state-system. The first state is that of the
12
- experimental task, which reflects the behavior goal, the rules for achieving the goal, and the reward for
13
- achieving the goal. The second state is that of the data acquisition and experiment control system, which is a
14
- snapshot of all hardware module states that make up the system that acquires the data and controls the task
15
- environment. Overall, experiment state is about 'what the animal is doing', while the system state is about
16
- 'what the hardware is doing'.
17
-
18
- Note:
19
- This class is acquisition-system-agnostic. It can be used to define the ExperimentConfiguration class for any
20
- valid data acquisition system.
21
- """
22
-
23
- experiment_state_code: int
24
- system_state_code: int
25
- state_duration_s: float
26
-
27
- @dataclass()
28
- class MesoscopeExperimentConfiguration(YamlConfig):
29
- """Stores the configuration of a single experiment runtime that uses the Mesoscope_VR data acquisition system.
30
-
31
- Primarily, this includes the sequence of experiment and system states that defines the flow of the experiment
32
- runtime. During runtime, the main runtime control function traverses the sequence of states stored in this class
33
- instance start-to-end in the exact order specified by the user. Together with custom Unity projects that define
34
- the task logic (how the system responds to animal interactions with the VR system) this class allows flexibly
35
- implementing a wide range of experiments using the Mesoscope-VR system.
36
-
37
- Each project should define one or more experiment configurations and save them as .yaml files inside the project
38
- 'configuration' folder. The name for each configuration file is defined by the user and is used to identify and load
39
- the experiment configuration when 'sl-experiment' CLI command exposed by the sl-experiment library is executed.
40
-
41
- Notes:
42
- This class is designed exclusively for the Mesoscope-VR system. Any other system needs to define a separate
43
- ExperimentConfiguration class to specify its experiment runtimes and additional data.
44
- """
45
-
46
- cue_map: dict[int, float] = field(default_factory=Incomplete)
47
- experiment_states: dict[str, ExperimentState] = field(default_factory=Incomplete)
48
-
49
- @dataclass()
50
- class MesoscopePaths:
51
- """Stores the filesystem configuration parameters for the Mesoscope-VR data acquisition system."""
52
-
53
- server_credentials_path: Path = ...
54
- google_credentials_path: Path = ...
55
- root_directory: Path = ...
56
- server_storage_directory: Path = ...
57
- server_working_directory: Path = ...
58
- nas_directory: Path = ...
59
- mesoscope_directory: Path = ...
60
- harvesters_cti_path: Path = ...
61
-
62
- @dataclass()
63
- class MesoscopeCameras:
64
- """Stores the configuration parameters for the cameras used by the Mesoscope-VR system to record behavior videos."""
65
-
66
- face_camera_index: int = ...
67
- left_camera_index: int = ...
68
- right_camera_index: int = ...
69
- quantization_parameter: int = ...
70
-
71
- @dataclass()
72
- class MesoscopeMicroControllers:
73
- """Stores the configuration parameters for the microcontrollers used by the Mesoscope-VR system."""
74
-
75
- actor_port: str = ...
76
- sensor_port: str = ...
77
- encoder_port: str = ...
78
- mesoscope_start_ttl_module_id: int = ...
79
- mesoscope_stop_ttl_module_id: int = ...
80
- mesoscope_ttl_pulse_duration_ms: int = ...
81
- minimum_break_strength_g_cm: float = ...
82
- maximum_break_strength_g_cm: float = ...
83
- wheel_diameter_cm: float = ...
84
- lick_threshold_adc: int = ...
85
- lick_signal_threshold_adc: int = ...
86
- lick_delta_threshold_adc: int = ...
87
- lick_averaging_pool_size: int = ...
88
- torque_baseline_voltage_adc: int = ...
89
- torque_maximum_voltage_adc: int = ...
90
- torque_sensor_capacity_g_cm: float = ...
91
- torque_report_cw: bool = ...
92
- torque_report_ccw: bool = ...
93
- torque_signal_threshold_adc: int = ...
94
- torque_delta_threshold_adc: int = ...
95
- torque_averaging_pool_size: int = ...
96
- wheel_encoder_ppr = ...
97
- wheel_encoder_report_cw: bool = ...
98
- wheel_encoder_report_ccw: bool = ...
99
- wheel_encoder_delta_threshold_pulse: int = ...
100
- wheel_encoder_polling_delay_us = ...
101
- cm_per_unity_unit = ...
102
- screen_trigger_pulse_duration_ms: int = ...
103
- auditory_tone_duration_ms: int = ...
104
- valve_calibration_pulse_count: int = ...
105
- sensor_polling_delay_ms: int = ...
106
- valve_calibration_data: dict[int | float, int | float] | tuple[tuple[int | float, int | float], ...] = ...
107
-
108
- @dataclass()
109
- class MesoscopeAdditionalFirmware:
110
- """Stores the configuration parameters for all firmware and hardware components not assembled in the Sun lab."""
111
-
112
- headbar_port: str = ...
113
- lickport_port: str = ...
114
- wheel_port: str = ...
115
- unity_ip: str = ...
116
- unity_port: int = ...
117
-
118
- @dataclass()
119
- class MesoscopeSystemConfiguration(YamlConfig):
120
- """Stores the hardware and filesystem configuration parameters for the Mesoscope-VR data acquisition system used in
121
- the Sun lab.
122
-
123
- This class is specifically designed to encapsulate the configuration parameters for the Mesoscope-VR system. It
124
- expects the system to be configured according to the specifications available from the sl_experiment repository
125
- (https://github.com/Sun-Lab-NBB/sl-experiment) and should be used exclusively by the VRPC machine
126
- (main Mesoscope-VR PC).
127
-
128
- Notes:
129
- Each SystemConfiguration class is uniquely tied to a specific hardware configuration used in the lab. This
130
- class will only work with the Mesoscope-VR system. Any other data acquisition and runtime management system in
131
- the lab should define its own SystemConfiguration class to specify its own hardware and filesystem configuration
132
- parameters.
133
- """
134
-
135
- name: str = ...
136
- paths: MesoscopePaths = field(default_factory=MesoscopePaths)
137
- cameras: MesoscopeCameras = field(default_factory=MesoscopeCameras)
138
- microcontrollers: MesoscopeMicroControllers = field(default_factory=MesoscopeMicroControllers)
139
- additional_firmware: MesoscopeAdditionalFirmware = field(default_factory=MesoscopeAdditionalFirmware)
140
- def __post_init__(self) -> None:
141
- """Ensures that variables converted to different types for storage purposes are always set to expected types
142
- upon class instantiation."""
143
- def save(self, path: Path) -> None:
144
- """Saves class instance data to disk as a 'mesoscope_system_configuration.yaml' file.
145
-
146
- This method converts certain class variables to yaml-safe types (for example, Path objects -> strings) and
147
- saves class data to disk as a .yaml file. The method is intended to be used solely by the
148
- set_system_configuration_file() function and should not be called from any other context.
149
-
150
- Args:
151
- path: The path to the .yaml file to save the data to.
152
- """
153
-
154
- _supported_configuration_files: Incomplete
155
-
156
- def set_system_configuration_file(path: Path) -> None:
157
- """Sets the system configuration .yaml file specified by the input path as the default system configuration file for
158
- the managed machine (PC).
159
-
160
- This function is used to initially configure or override the existing configuration of any data acquisition system
161
- used in the lab. The path to the configuration file is stored inside the user's data directory, so that all
162
- Sun lab libraries can automatically access that information during every runtime. Since the storage directory is
163
- typically hidden and varies between OSes and machines, this function provides a convenient way for setting that
164
- path without manually editing the storage cache.
165
-
166
- Notes:
167
- If the input path does not point to an existing file, but the file name and extension are correct, the function
168
- will automatically generate a default SystemConfiguration class instance and save it under the specified path.
169
-
170
- A data acquisition system can include multiple machines (PCs). However, the configuration file is typically
171
- only present on the 'main' machine that manages all runtimes.
172
-
173
- Args:
174
- path: The path to the new system configuration file to be used by the local data acquisition system (PC).
175
-
176
- Raises:
177
- ValueError: If the input path is not a valid system configuration file or does not use a supported data
178
- acquisition system name.
179
- """
180
-
181
- def get_system_configuration_data() -> MesoscopeSystemConfiguration:
182
- """Resolves the path to the local system configuration file and loads the system configuration data.
183
-
184
- This service function is used by all Sun lab data acquisition runtimes to load the system configuration data from
185
- the shared configuration file. It supports resolving and returning the data for all data acquisition systems used
186
- in the lab.
187
-
188
- Returns:
189
- The initialized SystemConfiguration class instance for the local acquisition system that stores the loaded
190
- configuration parameters.
191
-
192
- Raises:
193
- FileNotFoundError: If the local machine does not have the Sun lab data directory, or the system configuration
194
- file does not exist.
195
- """