sl-shared-assets 1.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sl-shared-assets might be problematic. Click here for more details.

Files changed (36) hide show
  1. sl_shared_assets/__init__.py +80 -0
  2. sl_shared_assets/__init__.pyi +73 -0
  3. sl_shared_assets/cli.py +384 -0
  4. sl_shared_assets/cli.pyi +94 -0
  5. sl_shared_assets/data_classes/__init__.py +66 -0
  6. sl_shared_assets/data_classes/__init__.pyi +61 -0
  7. sl_shared_assets/data_classes/configuration_data.py +479 -0
  8. sl_shared_assets/data_classes/configuration_data.pyi +199 -0
  9. sl_shared_assets/data_classes/runtime_data.py +251 -0
  10. sl_shared_assets/data_classes/runtime_data.pyi +145 -0
  11. sl_shared_assets/data_classes/session_data.py +625 -0
  12. sl_shared_assets/data_classes/session_data.pyi +252 -0
  13. sl_shared_assets/data_classes/surgery_data.py +152 -0
  14. sl_shared_assets/data_classes/surgery_data.pyi +89 -0
  15. sl_shared_assets/py.typed +0 -0
  16. sl_shared_assets/server/__init__.py +8 -0
  17. sl_shared_assets/server/__init__.pyi +8 -0
  18. sl_shared_assets/server/job.py +140 -0
  19. sl_shared_assets/server/job.pyi +94 -0
  20. sl_shared_assets/server/server.py +214 -0
  21. sl_shared_assets/server/server.pyi +95 -0
  22. sl_shared_assets/tools/__init__.py +15 -0
  23. sl_shared_assets/tools/__init__.pyi +15 -0
  24. sl_shared_assets/tools/ascension_tools.py +277 -0
  25. sl_shared_assets/tools/ascension_tools.pyi +68 -0
  26. sl_shared_assets/tools/packaging_tools.py +148 -0
  27. sl_shared_assets/tools/packaging_tools.pyi +56 -0
  28. sl_shared_assets/tools/project_management_tools.py +201 -0
  29. sl_shared_assets/tools/project_management_tools.pyi +54 -0
  30. sl_shared_assets/tools/transfer_tools.py +119 -0
  31. sl_shared_assets/tools/transfer_tools.pyi +53 -0
  32. sl_shared_assets-1.0.0.dist-info/METADATA +869 -0
  33. sl_shared_assets-1.0.0.dist-info/RECORD +36 -0
  34. sl_shared_assets-1.0.0.dist-info/WHEEL +4 -0
  35. sl_shared_assets-1.0.0.dist-info/entry_points.txt +8 -0
  36. sl_shared_assets-1.0.0.dist-info/licenses/LICENSE +674 -0
@@ -0,0 +1,80 @@
1
+ """A Python library that stores assets shared between multiple Sun (NeuroAI) lab data pipelines.
2
+
3
+ See https://github.com/Sun-Lab-NBB/sl-shared-assets for more details.
4
+ API documentation: https://sl-shared-assets-api-docs.netlify.app/
5
+ Authors: Ivan Kondratyev (Inkaros), Kushaan Gupta, Yuantao Deng, Natalie Yeung
6
+ """
7
+
8
+ from ataraxis_base_utilities import console
9
+
10
+ from .tools import transfer_directory, verify_session_checksum, generate_project_manifest, calculate_directory_checksum
11
+ from .server import Job, Server, ServerCredentials
12
+ from .data_classes import (
13
+ RawData,
14
+ DrugData,
15
+ ImplantData,
16
+ SessionData,
17
+ SubjectData,
18
+ SurgeryData,
19
+ InjectionData,
20
+ ProcedureData,
21
+ ProcessedData,
22
+ MesoscopePaths,
23
+ ZaberPositions,
24
+ ExperimentState,
25
+ MesoscopeCameras,
26
+ MesoscopePositions,
27
+ ProjectConfiguration,
28
+ RunTrainingDescriptor,
29
+ LickTrainingDescriptor,
30
+ MesoscopeHardwareState,
31
+ MesoscopeMicroControllers,
32
+ MesoscopeAdditionalFirmware,
33
+ MesoscopeSystemConfiguration,
34
+ MesoscopeExperimentDescriptor,
35
+ MesoscopeExperimentConfiguration,
36
+ get_system_configuration_data,
37
+ set_system_configuration_file,
38
+ )
39
+
40
+ # Ensures console is enabled when this library is imported
41
+ if not console.enabled:
42
+ console.enable()
43
+
44
+ __all__ = [
45
+ # Server package
46
+ "Server",
47
+ "ServerCredentials",
48
+ "Job",
49
+ # Data classes package
50
+ "DrugData",
51
+ "ImplantData",
52
+ "SessionData",
53
+ "RawData",
54
+ "ProcessedData",
55
+ "SubjectData",
56
+ "SurgeryData",
57
+ "InjectionData",
58
+ "ProcedureData",
59
+ "ZaberPositions",
60
+ "ExperimentState",
61
+ "MesoscopePositions",
62
+ "ProjectConfiguration",
63
+ "MesoscopeHardwareState",
64
+ "RunTrainingDescriptor",
65
+ "LickTrainingDescriptor",
66
+ "MesoscopeExperimentConfiguration",
67
+ "MesoscopeExperimentDescriptor",
68
+ "MesoscopeSystemConfiguration",
69
+ "MesoscopePaths",
70
+ "MesoscopeCameras",
71
+ "MesoscopeMicroControllers",
72
+ "MesoscopeAdditionalFirmware",
73
+ "get_system_configuration_data",
74
+ "set_system_configuration_file",
75
+ # Tools package
76
+ "transfer_directory",
77
+ "generate_project_manifest",
78
+ "verify_session_checksum",
79
+ "calculate_directory_checksum",
80
+ ]
@@ -0,0 +1,73 @@
1
+ from .tools import (
2
+ transfer_directory as transfer_directory,
3
+ verify_session_checksum as verify_session_checksum,
4
+ generate_project_manifest as generate_project_manifest,
5
+ calculate_directory_checksum as calculate_directory_checksum,
6
+ )
7
+ from .server import (
8
+ Job as Job,
9
+ Server as Server,
10
+ ServerCredentials as ServerCredentials,
11
+ )
12
+ from .data_classes import (
13
+ RawData as RawData,
14
+ DrugData as DrugData,
15
+ ImplantData as ImplantData,
16
+ SessionData as SessionData,
17
+ SubjectData as SubjectData,
18
+ SurgeryData as SurgeryData,
19
+ InjectionData as InjectionData,
20
+ ProcedureData as ProcedureData,
21
+ ProcessedData as ProcessedData,
22
+ MesoscopePaths as MesoscopePaths,
23
+ ZaberPositions as ZaberPositions,
24
+ ExperimentState as ExperimentState,
25
+ MesoscopeCameras as MesoscopeCameras,
26
+ MesoscopePositions as MesoscopePositions,
27
+ ProjectConfiguration as ProjectConfiguration,
28
+ RunTrainingDescriptor as RunTrainingDescriptor,
29
+ LickTrainingDescriptor as LickTrainingDescriptor,
30
+ MesoscopeHardwareState as MesoscopeHardwareState,
31
+ MesoscopeMicroControllers as MesoscopeMicroControllers,
32
+ MesoscopeAdditionalFirmware as MesoscopeAdditionalFirmware,
33
+ MesoscopeSystemConfiguration as MesoscopeSystemConfiguration,
34
+ MesoscopeExperimentDescriptor as MesoscopeExperimentDescriptor,
35
+ MesoscopeExperimentConfiguration as MesoscopeExperimentConfiguration,
36
+ get_system_configuration_data as get_system_configuration_data,
37
+ set_system_configuration_file as set_system_configuration_file,
38
+ )
39
+
40
+ __all__ = [
41
+ "Server",
42
+ "ServerCredentials",
43
+ "Job",
44
+ "DrugData",
45
+ "ImplantData",
46
+ "SessionData",
47
+ "RawData",
48
+ "ProcessedData",
49
+ "SubjectData",
50
+ "SurgeryData",
51
+ "InjectionData",
52
+ "ProcedureData",
53
+ "ZaberPositions",
54
+ "ExperimentState",
55
+ "MesoscopePositions",
56
+ "ProjectConfiguration",
57
+ "MesoscopeHardwareState",
58
+ "RunTrainingDescriptor",
59
+ "LickTrainingDescriptor",
60
+ "MesoscopeExperimentConfiguration",
61
+ "MesoscopeExperimentDescriptor",
62
+ "MesoscopeSystemConfiguration",
63
+ "MesoscopePaths",
64
+ "MesoscopeCameras",
65
+ "MesoscopeMicroControllers",
66
+ "MesoscopeAdditionalFirmware",
67
+ "get_system_configuration_data",
68
+ "set_system_configuration_file",
69
+ "transfer_directory",
70
+ "generate_project_manifest",
71
+ "verify_session_checksum",
72
+ "calculate_directory_checksum",
73
+ ]
@@ -0,0 +1,384 @@
1
+ """This module stores the Command-Line Interfaces (CLIs) exposes by the library as part of the installation process."""
2
+
3
+ from pathlib import Path
4
+
5
+ import click
6
+ from ataraxis_base_utilities import LogLevel, console, ensure_directory_exists
7
+
8
+ from .tools import ascend_tyche_data, verify_session_checksum, generate_project_manifest
9
+ from .server import generate_server_credentials
10
+ from .data_classes import (
11
+ SessionData,
12
+ ExperimentState,
13
+ ProjectConfiguration,
14
+ MesoscopeSystemConfiguration,
15
+ MesoscopeExperimentConfiguration,
16
+ get_system_configuration_data,
17
+ set_system_configuration_file,
18
+ )
19
+
20
+
21
+ @click.command()
22
+ @click.option(
23
+ "-sp",
24
+ "--session_path",
25
+ type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path),
26
+ required=True,
27
+ help="The absolute path to the session whose raw data needs to be verified for potential corruption.",
28
+ )
29
+ @click.option(
30
+ "-c",
31
+ "--create_processed_directories",
32
+ is_flag=True,
33
+ show_default=True,
34
+ default=False,
35
+ help=(
36
+ "Determines whether to created the processed data hierarchy. This flag should be disabled for most runtimes. "
37
+ "Primarily, it is used by lab acquisition system code to generate processed data directories on the remote "
38
+ "compute servers as part of the data preprocessing pipeline."
39
+ ),
40
+ )
41
+ @click.option(
42
+ "-pdr",
43
+ "--processed_data_root",
44
+ type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path),
45
+ required=False,
46
+ help=(
47
+ "The absolute path to the directory where processed data from all projects is stored on the machine that runs "
48
+ "this command. This argument is used when calling the CLI on the BioHPC server, which uses different data "
49
+ "volumes for raw and processed data. Note, the input path must point to the root directory, as it will be "
50
+ "automatically modified to include the project name, the animal id, and the session ID. This argument is only "
51
+ "used if 'create_processed_directories' flag is True."
52
+ ),
53
+ )
54
+ def verify_session_integrity(session_path: str, create_processed_directories: bool, processed_data_root: Path) -> None:
55
+ """Checks the integrity of the target session's raw data (contents of the raw_data directory).
56
+
57
+ This command assumes that the data has been checksummed during acquisition and contains an ax_checksum.txt file
58
+ that stores the data checksum generated before transferring the data to long-term storage destination. This function
59
+ always verified the integrity of the 'raw_data' directory. It does not work with 'processed_data' or any other
60
+ directories. If the session data was corrupted, the command removes the 'telomere.bin' file, marking the session as
61
+ 'incomplete' and automatically excluding it from all further automated processing runtimes. if the session data
62
+ is intact, generates a 'verified.bin' marker file inside the session's raw_data folder.
63
+
64
+ The command is also used by Sun lab data acquisition systems to generate the processed data hierarchy for each
65
+ processed session. This use case is fully automated and should not be triggered manually by the user.
66
+ """
67
+ session = Path(session_path)
68
+
69
+ # Runs the verification process
70
+ verify_session_checksum(
71
+ session, create_processed_data_directory=create_processed_directories, processed_data_root=processed_data_root
72
+ )
73
+
74
+ # Checks the outcome of the verification process
75
+ session_data = SessionData.load(session_path=session)
76
+ if session_data.raw_data.verified_bin_path.exists():
77
+ console.echo(message=f"Session {session.stem} raw data integrity: Verified.", level=LogLevel.SUCCESS)
78
+ else:
79
+ console.echo(message=f"Session {session.stem} raw data integrity: Compromised!", level=LogLevel.ERROR)
80
+
81
+
82
+ @click.command()
83
+ @click.option(
84
+ "-pp",
85
+ "--project_path",
86
+ type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path),
87
+ required=True,
88
+ help="The absolute path to the project directory where raw session data is stored.",
89
+ )
90
+ @click.option(
91
+ "-od",
92
+ "--output_directory",
93
+ type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path),
94
+ required=True,
95
+ help="The absolute path to the directory where to store the generated project manifest file.",
96
+ )
97
+ @click.option(
98
+ "-ppp",
99
+ "--project_processed_path",
100
+ type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path),
101
+ required=False,
102
+ help=(
103
+ "The absolute path to the project directory where processed session data is stored, if different from the "
104
+ "directory used to store raw session data. Typically, this extra argument is only used when processing data "
105
+ "stored on remote compute server(s)."
106
+ ),
107
+ )
108
+ def generate_project_manifest_file(
109
+ project_path: str, output_directory: str, project_processed_path: str | None
110
+ ) -> None:
111
+ """Generates the manifest .feather file that provides information about the data-processing state of all available
112
+ project sessions.
113
+
114
+ The manifest file is typically used when batch-processing session data on the remote compute server. It contains the
115
+ comprehensive snapshot of the available project's data in a table-compatible format that can also be transferred
116
+ between machines (as it is cached in a file).
117
+ """
118
+ generate_project_manifest(
119
+ raw_project_directory=Path(project_path),
120
+ output_directory=Path(output_directory),
121
+ processed_project_directory=Path(project_processed_path) if project_processed_path else None,
122
+ )
123
+ console.echo(message=f"Project {Path(project_path).stem} data manifest file: generated.", level=LogLevel.SUCCESS)
124
+
125
+
126
+ @click.command()
127
+ @click.option(
128
+ "-od",
129
+ "--output_directory",
130
+ type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path),
131
+ required=True,
132
+ help="The absolute path to the directory where to store the generated system configuration file.",
133
+ )
134
+ @click.option(
135
+ "-as",
136
+ "--acquisition_system",
137
+ type=str,
138
+ show_default=True,
139
+ required=True,
140
+ default="mesoscope-vr",
141
+ help=(
142
+ "The type (name) of the data acquisition system for which to generate the configuration file. Note, currently, "
143
+ "only the following types are supported: mesoscope-vr."
144
+ ),
145
+ )
146
+ def generate_system_configuration_file(output_directory: str, acquisition_system: str) -> None:
147
+ """Generates a precursor system configuration file for the target acquisition system and configures all local
148
+ Sun lab libraries to use that file to load the acquisition system configuration data.
149
+
150
+ This command is typically used when setting up a new data acquisition system in the lab. The system configuration
151
+ only needs to be specified on the machine (PC) that runs the sl-experiment library and manages the acquisition
152
+ runtime if the system uses multiple machines (PCs). Once the system configuration .yaml file is created via this
153
+ command, editing the configuration parameters in the file will automatically take effect during all following
154
+ runtimes.
155
+ """
156
+
157
+ # Verifies that the input path is a valid directory path and, if necessary, creates the directory specified by the
158
+ # path.
159
+ path = Path(output_directory)
160
+ if not path.is_dir():
161
+ message = (
162
+ f"Unable to generate the system configuration file for the system '{acquisition_system}'. The path to "
163
+ f"the output directory ({path}) is not a valid directory path."
164
+ )
165
+ console.error(message=message, error=ValueError)
166
+ else:
167
+ ensure_directory_exists(path)
168
+
169
+ # Mesoscope
170
+ if acquisition_system.lower() == "mesoscope-vr":
171
+ file_name = "mesoscope_system_configuration.yaml"
172
+ file_path = path.joinpath(file_name)
173
+ system_configuration = MesoscopeSystemConfiguration()
174
+ system_configuration.save(file_path)
175
+ set_system_configuration_file(file_path)
176
+ message = (
177
+ f"Mesoscope-VR system configuration file: generated. Edit the configuration parameters stored inside the "
178
+ f"{file_name} file to match the state of the acquisition system and use context."
179
+ )
180
+ console.echo(message=message, level=LogLevel.SUCCESS)
181
+
182
+ # For unsupported system types, raises an error message
183
+ else:
184
+ message = (
185
+ f"Unable to generate the system configuration file for the system '{acquisition_system}'. The input "
186
+ f"acquisition system is not supported (not recognized). Currently, only the following acquisition "
187
+ f"systems are supported: mesoscope-vr."
188
+ )
189
+ console.error(message=message, error=ValueError)
190
+
191
+
192
+ @click.command()
193
+ @click.option(
194
+ "-od",
195
+ "--output_directory",
196
+ type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path),
197
+ required=True,
198
+ help="The absolute path to the directory where to store the generated server credentials file.",
199
+ )
200
+ @click.option(
201
+ "-h",
202
+ "--host",
203
+ type=str,
204
+ required=True,
205
+ show_default=True,
206
+ default="cbsuwsun.biohpc.cornell.edu",
207
+ help="The host name or IP address of the server to connect to.",
208
+ )
209
+ @click.option(
210
+ "-u",
211
+ "--username",
212
+ type=str,
213
+ required=True,
214
+ help="The username to use for server authentication.",
215
+ )
216
+ @click.option(
217
+ "-p",
218
+ "--password",
219
+ type=str,
220
+ required=True,
221
+ help="The password to use for server authentication.",
222
+ )
223
+ def generate_server_credentials_file(output_directory: str, host: str, username: str, password: str) -> None:
224
+ """Generates a new server_credentials.yaml file under the specified directory, using input information.
225
+
226
+ This command is used to set up access to compute servers and clusters on new machines (PCs). The data stored inside
227
+ the server_credentials.yaml file generated by this command is used by the Server and Job classes used in many Sun
228
+ lab data processing libraries.
229
+ """
230
+ generate_server_credentials(
231
+ output_directory=Path(output_directory), username=username, password=password, host=host
232
+ )
233
+ message = (
234
+ f"Server access credentials file: generated. If necessary, remember to edit the data acquisition system "
235
+ f"configuration file to include the path to the credentials file generated via this CLI."
236
+ )
237
+ console.echo(message=message, level=LogLevel.SUCCESS)
238
+
239
+
240
+ @click.command()
241
+ @click.option(
242
+ "-p",
243
+ "--project",
244
+ type=str,
245
+ required=True,
246
+ help="The name of the project to be created.",
247
+ )
248
+ @click.option(
249
+ "-sli",
250
+ "--surgery_log_id",
251
+ type=str,
252
+ required=True,
253
+ help="The 44-symbol alpha-numeric ID code used by the project's surgery log Google sheet.",
254
+ )
255
+ @click.option(
256
+ "-wli",
257
+ "--water_restriction_log_id",
258
+ type=str,
259
+ required=True,
260
+ help="The 44-symbol alpha-numeric ID code used by the project's water restriction log Google sheet.",
261
+ )
262
+ def generate_project_configuration_file(project: str, surgery_log_id: str, water_restriction_log_id: str) -> None:
263
+ """Generates a new project directory hierarchy and writes its configuration as a project_configuration.yaml file.
264
+
265
+ This command creates new Sun lab projects. Until a project is created in this fashion, all data-acquisition and
266
+ data-processing commands from sl-experiment and sl-forgery libraries targeting the project will not work. This
267
+ command is intended to be called on the main computer of the data-acquisition system(s) used by the project. Note,
268
+ this command assumes that the local machine (PC) is the main PC of the data acquisition system and has a valid
269
+ acquisition system configuration .yaml file.
270
+ """
271
+
272
+ # Queries the data acquisition configuration data. Specifically, this is used to get the path to the root
273
+ # directory where all projects are stored on the local machine.
274
+ system_configuration = get_system_configuration_data()
275
+ file_path = system_configuration.paths.root_directory.joinpath(
276
+ project, "configuration", "project_configuration.yaml"
277
+ )
278
+
279
+ # Generates the initial project directory hierarchy
280
+ ensure_directory_exists(file_path)
281
+
282
+ # Saves project configuration data as a .yaml file to the 'configuration' directory of the created project
283
+ configuration = ProjectConfiguration(
284
+ project_name=project, surgery_sheet_id=surgery_log_id, water_log_sheet_id=water_restriction_log_id
285
+ )
286
+ configuration.save(path=file_path.joinpath())
287
+ console.echo(message=f"Project {project} data structure and configuration file: generated.", level=LogLevel.SUCCESS)
288
+
289
+
290
+ @click.command()
291
+ @click.option(
292
+ "-p",
293
+ "--project",
294
+ type=str,
295
+ required=True,
296
+ help="The name of the project for which to generate the new experiment configuration file.",
297
+ )
298
+ @click.option(
299
+ "-e",
300
+ "--experiment",
301
+ type=str,
302
+ required=True,
303
+ help="The name of the experiment. Note, the generated experiment configuration file will also use this name.",
304
+ )
305
+ @click.option(
306
+ "-sc",
307
+ "--state_count",
308
+ type=int,
309
+ required=True,
310
+ help="The total number of experiment and acquisition system state combinations in the experiment.",
311
+ )
312
+ def generate_experiment_configuration_file(project: str, experiment: str, state_count: int) -> None:
313
+ """Generates a precursor experiment configuration .yaml file for the target experiment inside the project's
314
+ configuration folder.
315
+
316
+ This command assists users in creating new experiment configurations, by statically resolving the structure (layout)
317
+ of the appropriate experiment configuration file for the acquisition system of the local machine (PC). Specifically,
318
+ the generated precursor will contain the correct number of experiment state entries initialized to nonsensical
319
+ default value. The user needs to manually edit the configuration file to properly specify their experiment runtime
320
+ parameters and state transitions before running the experiment. In a sense, this command acts as an 'experiment
321
+ template' generator.
322
+ """
323
+
324
+ # Resolves the acquisition system configuration. Uses the path to the local project directory and the project name
325
+ # to determine where to save the experiment configuration file
326
+ acquisition_system = get_system_configuration_data()
327
+ file_path = acquisition_system.paths.root_directory.joinpath(project, "configuration", f"{experiment}.yaml")
328
+
329
+ if not acquisition_system.paths.root_directory.joinpath(project).exists():
330
+ message = (
331
+ f"Unable to generate the experiment {experiment} configuration file for the project {project}. "
332
+ f"The target project does not exist on the local machine (PC). Use the "
333
+ f"'sl-create-project' CLI command to create the project before creating new experiment configuration(s). "
334
+ )
335
+ console.error(message=message, error=ValueError)
336
+ raise ValueError(message) # Fall-back to appease mypy, should not be reachable
337
+
338
+ # Loops over the number of requested states and, for each, generates a precursor experiment state field inside the
339
+ # 'states' dictionary.
340
+ states = {}
341
+ for state in range(state_count):
342
+ states[f"state_{state + 1}"] = ExperimentState(
343
+ experiment_state_code=state + 1, # Assumes experiment state sequences are 1-based
344
+ system_state_code=0,
345
+ state_duration_s=60,
346
+ )
347
+
348
+ # Depending on the acquisition system, packs state data into the appropriate experiment configuration class and
349
+ # saves it to the project's configuration folder as a .yaml file.
350
+ if acquisition_system.name == "mesoscope-vr":
351
+ experiment_configuration = MesoscopeExperimentConfiguration(experiment_states=states)
352
+
353
+ else:
354
+ message = (
355
+ f"Unable to generate the experiment {experiment} configuration file for the project {project}. "
356
+ f"The data acquisition system of the local machine (PC) is not supported (not recognized). Currently, only "
357
+ f"the following acquisition systems are supported: mesoscope-vr."
358
+ )
359
+ console.error(message=message, error=ValueError)
360
+ raise ValueError(message) # Fall-back to appease mypy, should not be reachable
361
+
362
+ experiment_configuration.to_yaml(file_path=file_path)
363
+ console.echo(message=f"Experiment {experiment} configuration file: generated.", level=LogLevel.SUCCESS)
364
+
365
+
366
+ @click.command()
367
+ @click.option(
368
+ "-id",
369
+ "--input_directory",
370
+ type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path),
371
+ required=True,
372
+ help="The absolute path to the directory that stores original Tyche animal folders.",
373
+ )
374
+ def ascend_tyche_directory(input_directory: str) -> None:
375
+ """Restructures old Tyche project data to use the modern Sun lab data structure and uploads them to the processing
376
+ server.
377
+
378
+ This command is used to convert ('ascend') the old Tyche project data to the modern Sun lab structure. After
379
+ ascension, the data can be processed and analyzed using all modern Sun lab (sl-) tools and libraries. Note, this
380
+ process expects the input data to be preprocessed using an old Sun lab mesoscope data preprocessing pipeline. It
381
+ will not work for any other project or data. Also, this command will only work on a machine (PC) that belongs to a
382
+ valid Sun lab data acquisition system, such as VRPC of the Mesoscope-VR system.
383
+ """
384
+ ascend_tyche_data(root_directory=Path(input_directory))
@@ -0,0 +1,94 @@
1
+ from pathlib import Path
2
+
3
+ from .tools import (
4
+ ascend_tyche_data as ascend_tyche_data,
5
+ verify_session_checksum as verify_session_checksum,
6
+ generate_project_manifest as generate_project_manifest,
7
+ )
8
+ from .server import generate_server_credentials as generate_server_credentials
9
+ from .data_classes import (
10
+ SessionData as SessionData,
11
+ ExperimentState as ExperimentState,
12
+ ProjectConfiguration as ProjectConfiguration,
13
+ MesoscopeSystemConfiguration as MesoscopeSystemConfiguration,
14
+ MesoscopeExperimentConfiguration as MesoscopeExperimentConfiguration,
15
+ get_system_configuration_data as get_system_configuration_data,
16
+ set_system_configuration_file as set_system_configuration_file,
17
+ )
18
+
19
+ def verify_session_integrity(session_path: str, create_processed_directories: bool, processed_data_root: Path) -> None:
20
+ """Checks the integrity of the target session's raw data (contents of the raw_data directory).
21
+
22
+ This command assumes that the data has been checksummed during acquisition and contains an ax_checksum.txt file
23
+ that stores the data checksum generated before transferring the data to long-term storage destination. This function
24
+ always verified the integrity of the 'raw_data' directory. It does not work with 'processed_data' or any other
25
+ directories. If the session data was corrupted, the command removes the 'telomere.bin' file, marking the session as
26
+ 'incomplete' and automatically excluding it from all further automated processing runtimes. if the session data
27
+ is intact, generates a 'verified.bin' marker file inside the session's raw_data folder.
28
+
29
+ The command is also used by Sun lab data acquisition systems to generate the processed data hierarchy for each
30
+ processed session. This use case is fully automated and should not be triggered manually by the user.
31
+ """
32
+
33
+ def generate_project_manifest_file(
34
+ project_path: str, output_directory: str, project_processed_path: str | None
35
+ ) -> None:
36
+ """Generates the manifest .feather file that provides information about the data-processing state of all available
37
+ project sessions.
38
+
39
+ The manifest file is typically used when batch-processing session data on the remote compute server. It contains the
40
+ comprehensive snapshot of the available project's data in a table-compatible format that can also be transferred
41
+ between machines (as it is cached in a file).
42
+ """
43
+
44
+ def generate_system_configuration_file(output_directory: str, acquisition_system: str) -> None:
45
+ """Generates a precursor system configuration file for the target acquisition system and configures all local
46
+ Sun lab libraries to use that file to load the acquisition system configuration data.
47
+
48
+ This command is typically used when setting up a new data acquisition system in the lab. The system configuration
49
+ only needs to be specified on the machine (PC) that runs the sl-experiment library and manages the acquisition
50
+ runtime if the system uses multiple machines (PCs). Once the system configuration .yaml file is created via this
51
+ command, editing the configuration parameters in the file will automatically take effect during all following
52
+ runtimes.
53
+ """
54
+
55
+ def generate_server_credentials_file(output_directory: str, host: str, username: str, password: str) -> None:
56
+ """Generates a new server_credentials.yaml file under the specified directory, using input information.
57
+
58
+ This command is used to set up access to compute servers and clusters on new machines (PCs). The data stored inside
59
+ the server_credentials.yaml file generated by this command is used by the Server and Job classes used in many Sun
60
+ lab data processing libraries.
61
+ """
62
+
63
+ def generate_project_configuration_file(project: str, surgery_log_id: str, water_restriction_log_id: str) -> None:
64
+ """Generates a new project directory hierarchy and writes its configuration as a project_configuration.yaml file.
65
+
66
+ This command creates new Sun lab projects. Until a project is created in this fashion, all data-acquisition and
67
+ data-processing commands from sl-experiment and sl-forgery libraries targeting the project will not work. This
68
+ command is intended to be called on the main computer of the data-acquisition system(s) used by the project. Note,
69
+ this command assumes that the local machine (PC) is the main PC of the data acquisition system and has a valid
70
+ acquisition system configuration .yaml file.
71
+ """
72
+
73
+ def generate_experiment_configuration_file(project: str, experiment: str, state_count: int) -> None:
74
+ """Generates a precursor experiment configuration .yaml file for the target experiment inside the project's
75
+ configuration folder.
76
+
77
+ This command assists users in creating new experiment configurations, by statically resolving the structure (layout)
78
+ of the appropriate experiment configuration file for the acquisition system of the local machine (PC). Specifically,
79
+ the generated precursor will contain the correct number of experiment state entries initialized to nonsensical
80
+ default value. The user needs to manually edit the configuration file to properly specify their experiment runtime
81
+ parameters and state transitions before running the experiment. In a sense, this command acts as an 'experiment
82
+ template' generator.
83
+ """
84
+
85
+ def ascend_tyche_directory(input_directory: str) -> None:
86
+ """Restructures old Tyche project data to use the modern Sun lab data structure and uploads them to the processing
87
+ server.
88
+
89
+ This command is used to convert ('ascend') the old Tyche project data to the modern Sun lab structure. After
90
+ ascension, the data can be processed and analyzed using all modern Sun lab (sl-) tools and libraries. Note, this
91
+ process expects the input data to be preprocessed using an old Sun lab mesoscope data preprocessing pipeline. It
92
+ will not work for any other project or data. Also, this command will only work on a machine (PC) that belongs to a
93
+ valid Sun lab data acquisition system, such as VRPC of the Mesoscope-VR system.
94
+ """