sl-shared-assets 4.0.1__py3-none-any.whl → 5.0.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of sl-shared-assets might be problematic. Click here for more details.
- sl_shared_assets/__init__.py +45 -42
- sl_shared_assets/command_line_interfaces/__init__.py +3 -0
- sl_shared_assets/command_line_interfaces/configure.py +173 -0
- sl_shared_assets/command_line_interfaces/manage.py +226 -0
- sl_shared_assets/data_classes/__init__.py +33 -32
- sl_shared_assets/data_classes/configuration_data.py +267 -79
- sl_shared_assets/data_classes/session_data.py +226 -289
- sl_shared_assets/server/__init__.py +24 -4
- sl_shared_assets/server/job.py +6 -7
- sl_shared_assets/server/pipeline.py +570 -0
- sl_shared_assets/server/server.py +57 -25
- sl_shared_assets/tools/__init__.py +9 -8
- sl_shared_assets/tools/packaging_tools.py +14 -25
- sl_shared_assets/tools/project_management_tools.py +602 -523
- sl_shared_assets/tools/transfer_tools.py +88 -23
- {sl_shared_assets-4.0.1.dist-info → sl_shared_assets-5.0.0.dist-info}/METADATA +46 -203
- sl_shared_assets-5.0.0.dist-info/RECORD +23 -0
- sl_shared_assets-5.0.0.dist-info/entry_points.txt +3 -0
- sl_shared_assets/__init__.pyi +0 -91
- sl_shared_assets/cli.py +0 -501
- sl_shared_assets/cli.pyi +0 -106
- sl_shared_assets/data_classes/__init__.pyi +0 -75
- sl_shared_assets/data_classes/configuration_data.pyi +0 -235
- sl_shared_assets/data_classes/runtime_data.pyi +0 -157
- sl_shared_assets/data_classes/session_data.pyi +0 -379
- sl_shared_assets/data_classes/surgery_data.pyi +0 -89
- sl_shared_assets/server/__init__.pyi +0 -11
- sl_shared_assets/server/job.pyi +0 -205
- sl_shared_assets/server/server.pyi +0 -298
- sl_shared_assets/tools/__init__.pyi +0 -19
- sl_shared_assets/tools/ascension_tools.py +0 -265
- sl_shared_assets/tools/ascension_tools.pyi +0 -68
- sl_shared_assets/tools/packaging_tools.pyi +0 -58
- sl_shared_assets/tools/project_management_tools.pyi +0 -239
- sl_shared_assets/tools/transfer_tools.pyi +0 -53
- sl_shared_assets-4.0.1.dist-info/RECORD +0 -36
- sl_shared_assets-4.0.1.dist-info/entry_points.txt +0 -7
- {sl_shared_assets-4.0.1.dist-info → sl_shared_assets-5.0.0.dist-info}/WHEEL +0 -0
- {sl_shared_assets-4.0.1.dist-info → sl_shared_assets-5.0.0.dist-info}/licenses/LICENSE +0 -0
sl_shared_assets/__init__.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
"""A Python library that
|
|
1
|
+
"""A Python library that provides data acquisition and processing assets shared between Sun (NeuroAI) lab libraries.
|
|
2
2
|
|
|
3
3
|
See https://github.com/Sun-Lab-NBB/sl-shared-assets for more details.
|
|
4
4
|
API documentation: https://sl-shared-assets-api-docs.netlify.app/
|
|
@@ -9,12 +9,22 @@ from ataraxis_base_utilities import console
|
|
|
9
9
|
|
|
10
10
|
from .tools import (
|
|
11
11
|
ProjectManifest,
|
|
12
|
-
|
|
12
|
+
delete_directory,
|
|
13
13
|
transfer_directory,
|
|
14
|
-
generate_project_manifest,
|
|
15
14
|
calculate_directory_checksum,
|
|
16
15
|
)
|
|
17
|
-
from .server import
|
|
16
|
+
from .server import (
|
|
17
|
+
Job,
|
|
18
|
+
Server,
|
|
19
|
+
JupyterJob,
|
|
20
|
+
ProcessingStatus,
|
|
21
|
+
TrackerFileNames,
|
|
22
|
+
ProcessingTracker,
|
|
23
|
+
ServerCredentials,
|
|
24
|
+
ProcessingPipeline,
|
|
25
|
+
ProcessingPipelines,
|
|
26
|
+
generate_manager_id,
|
|
27
|
+
)
|
|
18
28
|
from .data_classes import (
|
|
19
29
|
RawData,
|
|
20
30
|
DrugData,
|
|
@@ -31,8 +41,6 @@ from .data_classes import (
|
|
|
31
41
|
ExperimentState,
|
|
32
42
|
ExperimentTrial,
|
|
33
43
|
MesoscopeCameras,
|
|
34
|
-
TrackerFileNames,
|
|
35
|
-
ProcessingTracker,
|
|
36
44
|
AcquisitionSystems,
|
|
37
45
|
MesoscopePositions,
|
|
38
46
|
RunTrainingDescriptor,
|
|
@@ -44,10 +52,9 @@ from .data_classes import (
|
|
|
44
52
|
MesoscopeSystemConfiguration,
|
|
45
53
|
MesoscopeExperimentDescriptor,
|
|
46
54
|
MesoscopeExperimentConfiguration,
|
|
47
|
-
|
|
48
|
-
|
|
55
|
+
get_working_directory,
|
|
56
|
+
get_credentials_file_path,
|
|
49
57
|
get_system_configuration_data,
|
|
50
|
-
set_system_configuration_file,
|
|
51
58
|
)
|
|
52
59
|
|
|
53
60
|
# Ensures console is enabled when this library is imported
|
|
@@ -55,48 +62,44 @@ if not console.enabled:
|
|
|
55
62
|
console.enable()
|
|
56
63
|
|
|
57
64
|
__all__ = [
|
|
58
|
-
|
|
59
|
-
"Server",
|
|
60
|
-
"ServerCredentials",
|
|
61
|
-
"Job",
|
|
62
|
-
"JupyterJob",
|
|
63
|
-
# Data classes package
|
|
65
|
+
"AcquisitionSystems",
|
|
64
66
|
"DrugData",
|
|
67
|
+
"ExperimentState",
|
|
68
|
+
"ExperimentTrial",
|
|
65
69
|
"ImplantData",
|
|
66
|
-
"SessionData",
|
|
67
|
-
"RawData",
|
|
68
|
-
"ProcessedData",
|
|
69
|
-
"SubjectData",
|
|
70
|
-
"SurgeryData",
|
|
71
70
|
"InjectionData",
|
|
72
|
-
"
|
|
73
|
-
"
|
|
74
|
-
"ZaberPositions",
|
|
75
|
-
"ExperimentState",
|
|
76
|
-
"MesoscopePositions",
|
|
77
|
-
"MesoscopeHardwareState",
|
|
78
|
-
"RunTrainingDescriptor",
|
|
71
|
+
"Job",
|
|
72
|
+
"JupyterJob",
|
|
79
73
|
"LickTrainingDescriptor",
|
|
74
|
+
"MesoscopeAdditionalFirmware",
|
|
75
|
+
"MesoscopeCameras",
|
|
80
76
|
"MesoscopeExperimentConfiguration",
|
|
81
77
|
"MesoscopeExperimentDescriptor",
|
|
82
|
-
"
|
|
83
|
-
"MesoscopePaths",
|
|
84
|
-
"MesoscopeCameras",
|
|
78
|
+
"MesoscopeHardwareState",
|
|
85
79
|
"MesoscopeMicroControllers",
|
|
86
|
-
"
|
|
87
|
-
"
|
|
88
|
-
"
|
|
89
|
-
"
|
|
80
|
+
"MesoscopePaths",
|
|
81
|
+
"MesoscopePositions",
|
|
82
|
+
"MesoscopeSystemConfiguration",
|
|
83
|
+
"ProcedureData",
|
|
84
|
+
"ProcessedData",
|
|
85
|
+
"ProcessingTracker",
|
|
86
|
+
"ProjectManifest",
|
|
87
|
+
"RawData",
|
|
88
|
+
"RunTrainingDescriptor",
|
|
89
|
+
"Server",
|
|
90
|
+
"ServerCredentials",
|
|
91
|
+
"SessionData",
|
|
90
92
|
"SessionTypes",
|
|
91
|
-
"
|
|
93
|
+
"SubjectData",
|
|
94
|
+
"SurgeryData",
|
|
95
|
+
"TrackerFileNames",
|
|
92
96
|
"WindowCheckingDescriptor",
|
|
93
|
-
"
|
|
97
|
+
"ZaberPositions",
|
|
98
|
+
"calculate_directory_checksum",
|
|
99
|
+
"delete_directory",
|
|
94
100
|
"generate_manager_id",
|
|
95
|
-
"
|
|
96
|
-
|
|
97
|
-
"
|
|
98
|
-
"resolve_p53_marker",
|
|
101
|
+
"get_credentials_file_path",
|
|
102
|
+
"get_system_configuration_data",
|
|
103
|
+
"get_working_directory",
|
|
99
104
|
"transfer_directory",
|
|
100
|
-
"calculate_directory_checksum",
|
|
101
|
-
"generate_project_manifest",
|
|
102
105
|
]
|
|
@@ -0,0 +1,173 @@
|
|
|
1
|
+
"""This module provides the Command-Line Interface (CLI) for configuring major components of the Sun lab data
|
|
2
|
+
workflow."""
|
|
3
|
+
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
import click
|
|
7
|
+
from ataraxis_base_utilities import LogLevel, console, ensure_directory_exists
|
|
8
|
+
|
|
9
|
+
from ..server import generate_server_credentials
|
|
10
|
+
from ..data_classes import (
|
|
11
|
+
AcquisitionSystems,
|
|
12
|
+
get_working_directory,
|
|
13
|
+
set_working_directory,
|
|
14
|
+
create_system_configuration_file,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
# Ensures that displayed CLICK help messages are formatted according to the lab standard.
|
|
18
|
+
CONTEXT_SETTINGS = dict(max_content_width=120) # or any width you want
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@click.group("manage", context_settings=CONTEXT_SETTINGS)
|
|
22
|
+
def configure() -> None:
|
|
23
|
+
"""This Command-Line Interface (CLI) allows configuring major components of the Sun lab data acquisition,
|
|
24
|
+
processing, and analysis workflow, such as acquisition systems and compute server(s)."""
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
@configure.command("directory")
|
|
28
|
+
@click.option(
|
|
29
|
+
"-d",
|
|
30
|
+
"--directory",
|
|
31
|
+
type=click.Path(exists=False, file_okay=False, dir_okay=True, path_type=Path),
|
|
32
|
+
required=True,
|
|
33
|
+
help="The absolute path to the directory where to cache Sun lab configuration and local runtime data.",
|
|
34
|
+
)
|
|
35
|
+
def configure_directory(directory: Path) -> None:
|
|
36
|
+
"""Sets the input directory as the Sun lab working directory, creating any missing path components.
|
|
37
|
+
|
|
38
|
+
This command as the initial entry-point for setting up any machine (PC) to work with Sun lab libraries and data.
|
|
39
|
+
After the working directory is configured, all calls to this and all other Sun lab libraries automatically use this
|
|
40
|
+
directory to store the configuration and runtime data required to perform any requested task. This allows all Sun
|
|
41
|
+
lab libraries to behave consistently across different user machines and runtime contexts.
|
|
42
|
+
"""
|
|
43
|
+
# Creates the directory if it does not exist
|
|
44
|
+
ensure_directory_exists(directory)
|
|
45
|
+
|
|
46
|
+
# Sets the directory as the local working directory
|
|
47
|
+
set_working_directory(path=directory)
|
|
48
|
+
|
|
49
|
+
console.echo(message=f"Sun lab working directory set to: {directory}.", level=LogLevel.SUCCESS)
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
@configure.command("server")
|
|
53
|
+
@click.option(
|
|
54
|
+
"-u",
|
|
55
|
+
"--username",
|
|
56
|
+
type=str,
|
|
57
|
+
required=True,
|
|
58
|
+
help="The username to use for server authentication.",
|
|
59
|
+
)
|
|
60
|
+
@click.option(
|
|
61
|
+
"-p",
|
|
62
|
+
"--password",
|
|
63
|
+
type=str,
|
|
64
|
+
required=True,
|
|
65
|
+
help="The password to use for server authentication.",
|
|
66
|
+
)
|
|
67
|
+
@click.option(
|
|
68
|
+
"-s",
|
|
69
|
+
"--service",
|
|
70
|
+
is_flag=True,
|
|
71
|
+
default=False,
|
|
72
|
+
help=(
|
|
73
|
+
"Determines whether the credentials' file is created for a service account. This determines the name of the "
|
|
74
|
+
"generated file. Do not provide this flag unless creating a service credentials file."
|
|
75
|
+
),
|
|
76
|
+
)
|
|
77
|
+
@click.option(
|
|
78
|
+
"-h",
|
|
79
|
+
"--host",
|
|
80
|
+
type=str,
|
|
81
|
+
required=True,
|
|
82
|
+
show_default=True,
|
|
83
|
+
default="cbsuwsun.biohpc.cornell.edu",
|
|
84
|
+
help="The host name or IP address of the server.",
|
|
85
|
+
)
|
|
86
|
+
@click.option(
|
|
87
|
+
"-sr",
|
|
88
|
+
"--storage-root",
|
|
89
|
+
type=str,
|
|
90
|
+
required=True,
|
|
91
|
+
show_default=True,
|
|
92
|
+
default="/local/storage",
|
|
93
|
+
help=(
|
|
94
|
+
"The absolute path to to the root storage server directory. Typically, this is the path to the "
|
|
95
|
+
"top-level (root) directory of the HDD RAID volume."
|
|
96
|
+
),
|
|
97
|
+
)
|
|
98
|
+
@click.option(
|
|
99
|
+
"-wr",
|
|
100
|
+
"--working-root",
|
|
101
|
+
type=str,
|
|
102
|
+
required=True,
|
|
103
|
+
show_default=True,
|
|
104
|
+
default="/local/workdir",
|
|
105
|
+
help=(
|
|
106
|
+
"The absolute path to the root working server directory. Typically, this is the path to the top-level "
|
|
107
|
+
"(root) directory of the NVME RAID volume. If the server uses the same volume for both storing and working "
|
|
108
|
+
"with data, set this to the same path as the 'storage-root' argument."
|
|
109
|
+
),
|
|
110
|
+
)
|
|
111
|
+
@click.option(
|
|
112
|
+
"-sd",
|
|
113
|
+
"--shared-directory",
|
|
114
|
+
type=str,
|
|
115
|
+
required=True,
|
|
116
|
+
show_default=True,
|
|
117
|
+
default="sun_data",
|
|
118
|
+
help="The name of the shared directory used to store all Sun lab project data on all server volumes.",
|
|
119
|
+
)
|
|
120
|
+
def generate_server_credentials_file(
|
|
121
|
+
username: str,
|
|
122
|
+
password: str,
|
|
123
|
+
service: bool,
|
|
124
|
+
host: str,
|
|
125
|
+
storage_root: str,
|
|
126
|
+
working_root: str,
|
|
127
|
+
shared_directory: str,
|
|
128
|
+
) -> None:
|
|
129
|
+
"""Generates a service or user server access credentials' file.
|
|
130
|
+
|
|
131
|
+
This command is used to set up access to the lab's remote compute server(s). The Server class uses the data stored
|
|
132
|
+
inside the generated credentials .yaml file to connect to and execute remote jobs on the target compute server(s).
|
|
133
|
+
Depending on the configuration, this command generates either the 'user_credentials.yaml' or
|
|
134
|
+
'service_credentials.yaml' file.
|
|
135
|
+
"""
|
|
136
|
+
|
|
137
|
+
# Resolves the path to the local Sun lab working directory.
|
|
138
|
+
output_directory = get_working_directory()
|
|
139
|
+
|
|
140
|
+
# Generates the requested credentials' file.
|
|
141
|
+
generate_server_credentials(
|
|
142
|
+
output_directory=output_directory,
|
|
143
|
+
username=username,
|
|
144
|
+
password=password,
|
|
145
|
+
service=service,
|
|
146
|
+
host=host,
|
|
147
|
+
storage_root=storage_root,
|
|
148
|
+
working_root=working_root,
|
|
149
|
+
shared_directory_name=shared_directory,
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
@configure.command("system")
|
|
154
|
+
@click.option(
|
|
155
|
+
"-s",
|
|
156
|
+
"--system",
|
|
157
|
+
type=click.Choice(AcquisitionSystems, case_sensitive=False),
|
|
158
|
+
show_default=True,
|
|
159
|
+
required=True,
|
|
160
|
+
default=AcquisitionSystems.MESOSCOPE_VR,
|
|
161
|
+
help="The type (name) of the data acquisition system for which to generate the configuration file.",
|
|
162
|
+
)
|
|
163
|
+
def generate_system_configuration_file(system: AcquisitionSystems) -> None:
|
|
164
|
+
"""Generates the configuration file for the specified data acquisition system.
|
|
165
|
+
|
|
166
|
+
This command is typically used when setting up new data acquisition systems in the lab. The sl-experiment library
|
|
167
|
+
uses the created file to load the acquisition system configuration data during data acquisition runtimes. The
|
|
168
|
+
system configuration only needs to be created on the machine (PC) that runs the sl-experiment library and manages
|
|
169
|
+
the acquisition runtime if the system uses multiple machines (PCs). Once the system configuration .yaml file is
|
|
170
|
+
created via this command, edit the file to modify the acquisition system configuration at any time.
|
|
171
|
+
"""
|
|
172
|
+
|
|
173
|
+
create_system_configuration_file(system=system)
|
|
@@ -0,0 +1,226 @@
|
|
|
1
|
+
"""This module provides the Command-Line Interfaces (CLIs) for managing Sun lab sessions and projects. Most of these
|
|
2
|
+
CLIs are intended to run on the remote compute server and should not be used by end-users directly."""
|
|
3
|
+
|
|
4
|
+
from typing import Any
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
|
|
7
|
+
import click
|
|
8
|
+
from ataraxis_base_utilities import LogLevel, console
|
|
9
|
+
|
|
10
|
+
from ..tools import (
|
|
11
|
+
archive_session,
|
|
12
|
+
prepare_session,
|
|
13
|
+
resolve_checksum,
|
|
14
|
+
generate_project_manifest,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
# Ensures that displayed CLICK help messages are formatted according to the lab standard.
|
|
18
|
+
CONTEXT_SETTINGS = dict(max_content_width=120) # or any width you want
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
@click.group("manage", context_settings=CONTEXT_SETTINGS)
|
|
22
|
+
def manage() -> None:
|
|
23
|
+
"""This Command-Line Interface (CLI) allows managing session and project data acquired in the Sun lab.
|
|
24
|
+
|
|
25
|
+
This CLI is intended to run on the Sun lab remote compute server(s) and should not be called by the end-user
|
|
26
|
+
directly. Instead, commands from this CLI are designed to be accessed through the bindings in the sl-experiment and
|
|
27
|
+
sl-forgery libraries.
|
|
28
|
+
"""
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
# Session data management commands
|
|
32
|
+
@manage.group("session")
|
|
33
|
+
@click.option(
|
|
34
|
+
"-sp",
|
|
35
|
+
"--session-path",
|
|
36
|
+
type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path),
|
|
37
|
+
required=True,
|
|
38
|
+
help="The absolute path to the root session directory to process. This directory must contain the 'raw_data' "
|
|
39
|
+
"subdirectory.",
|
|
40
|
+
)
|
|
41
|
+
@click.option(
|
|
42
|
+
"-pdr",
|
|
43
|
+
"--processed-data-root",
|
|
44
|
+
type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path),
|
|
45
|
+
required=False,
|
|
46
|
+
help=(
|
|
47
|
+
"The absolute path to the directory that stores the processed data from all Sun lab projects, if it is "
|
|
48
|
+
"different from the root directory included in the 'session-path' argument value."
|
|
49
|
+
),
|
|
50
|
+
)
|
|
51
|
+
@click.option(
|
|
52
|
+
"-id",
|
|
53
|
+
"--manager-id",
|
|
54
|
+
type=int,
|
|
55
|
+
required=True,
|
|
56
|
+
default=0,
|
|
57
|
+
show_default=True,
|
|
58
|
+
help="The unique identifier of the process that manages this runtime.",
|
|
59
|
+
)
|
|
60
|
+
@click.option(
|
|
61
|
+
"-r",
|
|
62
|
+
"--reset-tracker",
|
|
63
|
+
is_flag=True,
|
|
64
|
+
required=False,
|
|
65
|
+
help=(
|
|
66
|
+
"Determines whether to forcibly reset the tracker file for the target session management pipeline before "
|
|
67
|
+
"processing runtime. This flag should only be used in exceptional cases to recover from improper runtime "
|
|
68
|
+
"terminations."
|
|
69
|
+
),
|
|
70
|
+
)
|
|
71
|
+
@click.pass_context
|
|
72
|
+
def manage_session(
|
|
73
|
+
ctx: Any, session_path: Path, processed_data_root: Path | None, manager_id: int, reset_tracker: bool
|
|
74
|
+
) -> None:
|
|
75
|
+
"""This group provides commands for managing the data of a Sun lab data acquisition session.
|
|
76
|
+
|
|
77
|
+
Commands from this group are used to support data processing and dataset-formation (forging) on remote compute
|
|
78
|
+
servers."""
|
|
79
|
+
ctx.ensure_object(dict)
|
|
80
|
+
ctx.obj["session_path"] = session_path
|
|
81
|
+
ctx.obj["processed_data_root"] = processed_data_root
|
|
82
|
+
ctx.obj["manager_id"] = manager_id
|
|
83
|
+
ctx.obj["reset_tracker"] = reset_tracker
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
# noinspection PyUnresolvedReferences
|
|
87
|
+
@manage_session.command("checksum")
|
|
88
|
+
@click.pass_context
|
|
89
|
+
@click.option(
|
|
90
|
+
"-rc",
|
|
91
|
+
"--recalculate-checksum",
|
|
92
|
+
is_flag=True,
|
|
93
|
+
help=(
|
|
94
|
+
"Determines whether to recalculate and overwrite the cached checksum value for the processed session. When "
|
|
95
|
+
"the command is called with this flag, it effectively re-checksums the data instead of verifying its integrity."
|
|
96
|
+
),
|
|
97
|
+
)
|
|
98
|
+
def resolve_session_checksum(ctx: Any, recalculate_checksum: bool) -> None:
|
|
99
|
+
"""Resolves the data integrity checksum for the target session's 'raw_data' directory.
|
|
100
|
+
|
|
101
|
+
This command can be used to verify the integrity of the session's 'raw_data' directory using an existing
|
|
102
|
+
checksum or to re-generate the checksum to reflect the current state of the directory. It only works with the
|
|
103
|
+
'raw_data' session directory and ignores all other directories. Primarily, this command is used to verify the
|
|
104
|
+
integrity of the session's data as it is transferred from data acquisition systems to long-term storage
|
|
105
|
+
destinations.
|
|
106
|
+
"""
|
|
107
|
+
|
|
108
|
+
# Extracts shared parameters from context
|
|
109
|
+
session_path = ctx.obj["session_path"]
|
|
110
|
+
processed_data_root = ctx.obj["processed_data_root"]
|
|
111
|
+
manager_id = ctx.obj["manager_id"]
|
|
112
|
+
reset_tracker = ctx.obj["reset_tracker"]
|
|
113
|
+
|
|
114
|
+
resolve_checksum(
|
|
115
|
+
session_path=session_path,
|
|
116
|
+
manager_id=manager_id,
|
|
117
|
+
processed_data_root=processed_data_root,
|
|
118
|
+
regenerate_checksum=recalculate_checksum,
|
|
119
|
+
reset_tracker=reset_tracker,
|
|
120
|
+
)
|
|
121
|
+
|
|
122
|
+
|
|
123
|
+
# noinspection PyUnresolvedReferences
|
|
124
|
+
@manage_session.command("prepare")
|
|
125
|
+
@click.pass_context
|
|
126
|
+
def prepare_session_for_processing(
|
|
127
|
+
ctx: Any,
|
|
128
|
+
) -> None:
|
|
129
|
+
"""Prepares the target session for data processing by moving all session data to the working volume.
|
|
130
|
+
|
|
131
|
+
This command is intended to run on remote compute servers that use slow HDD volumes to maximize data integrity and
|
|
132
|
+
fast NVME volumes to maximize data processing speed. For such systems, moving the data to the fast volume before
|
|
133
|
+
processing results in a measurable processing time decrease.
|
|
134
|
+
"""
|
|
135
|
+
# Extracts shared parameters from context
|
|
136
|
+
session_path = ctx.obj["session_path"]
|
|
137
|
+
processed_data_root = ctx.obj["processed_data_root"]
|
|
138
|
+
manager_id = ctx.obj["manager_id"]
|
|
139
|
+
reset_tracker = ctx.obj["reset_tracker"]
|
|
140
|
+
|
|
141
|
+
prepare_session(
|
|
142
|
+
session_path=session_path,
|
|
143
|
+
manager_id=manager_id,
|
|
144
|
+
processed_data_root=processed_data_root,
|
|
145
|
+
reset_tracker=reset_tracker,
|
|
146
|
+
)
|
|
147
|
+
|
|
148
|
+
|
|
149
|
+
# noinspection PyUnresolvedReferences
|
|
150
|
+
@manage_session.command("archive")
|
|
151
|
+
@click.pass_context
|
|
152
|
+
def archive_session_for_storage(
|
|
153
|
+
ctx: Any,
|
|
154
|
+
) -> None:
|
|
155
|
+
"""Prepares the target session for long-term storage by moving all session data to the storage volume.
|
|
156
|
+
|
|
157
|
+
This command is primarily intended to run on remote compute servers that use slow HDD volumes to maximize data
|
|
158
|
+
integrity and fast NVME volumes to maximize data processing speed. For such systems, moving all sessions that are no
|
|
159
|
+
longer actively processed or analyzed to the slow drive volume frees up the processing volume space and ensures
|
|
160
|
+
long-term data integrity.
|
|
161
|
+
"""
|
|
162
|
+
# Extracts shared parameters from context
|
|
163
|
+
session_path = ctx.obj["session_path"]
|
|
164
|
+
processed_data_root = ctx.obj["processed_data_root"]
|
|
165
|
+
manager_id = ctx.obj["manager_id"]
|
|
166
|
+
reset_tracker = ctx.obj["reset_tracker"]
|
|
167
|
+
|
|
168
|
+
archive_session(
|
|
169
|
+
session_path=session_path,
|
|
170
|
+
manager_id=manager_id,
|
|
171
|
+
processed_data_root=processed_data_root,
|
|
172
|
+
reset_tracker=reset_tracker,
|
|
173
|
+
)
|
|
174
|
+
|
|
175
|
+
|
|
176
|
+
@manage.group("project")
|
|
177
|
+
@click.pass_context
|
|
178
|
+
@click.option(
|
|
179
|
+
"-pp",
|
|
180
|
+
"--project-path",
|
|
181
|
+
type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path),
|
|
182
|
+
required=True,
|
|
183
|
+
help="The absolute path to the project-specific directory where raw session data is stored.",
|
|
184
|
+
)
|
|
185
|
+
@click.option(
|
|
186
|
+
"-pdr",
|
|
187
|
+
"--processed-data-root",
|
|
188
|
+
type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path),
|
|
189
|
+
required=False,
|
|
190
|
+
help=(
|
|
191
|
+
"The absolute path to the directory that stores the processed data from all Sun lab projects, if it is "
|
|
192
|
+
"different from the root directory included in the 'session-path' argument value."
|
|
193
|
+
),
|
|
194
|
+
)
|
|
195
|
+
def manage_project(ctx: Any, project_path: Path, processed_data_root: Path | None) -> None:
|
|
196
|
+
"""This group provides commands for managing the data of a Sun lab project.
|
|
197
|
+
|
|
198
|
+
Commands from this group are used to support all interactions with the data stored on the Sun lab remote compute
|
|
199
|
+
server(s)."""
|
|
200
|
+
ctx.ensure_object(dict)
|
|
201
|
+
ctx.obj["project_path"] = project_path
|
|
202
|
+
ctx.obj["processed_data_root"] = processed_data_root
|
|
203
|
+
|
|
204
|
+
|
|
205
|
+
# noinspection PyUnresolvedReferences
|
|
206
|
+
@manage_project.command("manifest")
|
|
207
|
+
@click.pass_context
|
|
208
|
+
def generate_project_manifest_file(ctx: Any) -> None:
|
|
209
|
+
"""Generates the manifest .feather file that captures the current state of the target project's data.
|
|
210
|
+
|
|
211
|
+
The manifest file contains the comprehensive snapshot of the available project's data. It includes the information
|
|
212
|
+
about the management and processing pipelines that have been applied to each session's data, as well as the
|
|
213
|
+
descriptive information about each session. The manifest file is used as an entry-point for all interactions with
|
|
214
|
+
the Sun lab data stored on the remote compute server(s).
|
|
215
|
+
"""
|
|
216
|
+
# Extracts shared parameters from context
|
|
217
|
+
project_path = ctx.obj["project_path"]
|
|
218
|
+
processed_data_root = ctx.obj["processed_data_root"]
|
|
219
|
+
|
|
220
|
+
generate_project_manifest(
|
|
221
|
+
raw_project_directory=project_path,
|
|
222
|
+
processed_data_root=processed_data_root,
|
|
223
|
+
manager_id=1,
|
|
224
|
+
)
|
|
225
|
+
|
|
226
|
+
console.echo(message=f"Project {Path(project_path).stem} data manifest file: generated.", level=LogLevel.SUCCESS)
|
|
@@ -1,7 +1,6 @@
|
|
|
1
|
-
"""This package provides the classes used to store data acquired at
|
|
2
|
-
configure various pipelines
|
|
3
|
-
|
|
4
|
-
and restored from the .yaml files as needed."""
|
|
1
|
+
"""This package provides the classes used to store data acquired at all stages of the Sun lab data workflow and to
|
|
2
|
+
configure various elements and pipelines making up the overall workflow. Many classes in this package are designed to
|
|
3
|
+
be saved to disk as .yaml files and restored from the .yaml files as needed."""
|
|
5
4
|
|
|
6
5
|
from .runtime_data import (
|
|
7
6
|
ZaberPositions,
|
|
@@ -15,12 +14,10 @@ from .runtime_data import (
|
|
|
15
14
|
from .session_data import (
|
|
16
15
|
RawData,
|
|
17
16
|
SessionData,
|
|
17
|
+
SessionLock,
|
|
18
18
|
SessionTypes,
|
|
19
|
+
TrackingData,
|
|
19
20
|
ProcessedData,
|
|
20
|
-
TrackerFileNames,
|
|
21
|
-
ProcessingTracker,
|
|
22
|
-
generate_manager_id,
|
|
23
|
-
get_processing_tracker,
|
|
24
21
|
)
|
|
25
22
|
from .surgery_data import (
|
|
26
23
|
DrugData,
|
|
@@ -40,41 +37,45 @@ from .configuration_data import (
|
|
|
40
37
|
MesoscopeAdditionalFirmware,
|
|
41
38
|
MesoscopeSystemConfiguration,
|
|
42
39
|
MesoscopeExperimentConfiguration,
|
|
40
|
+
get_working_directory,
|
|
41
|
+
set_working_directory,
|
|
42
|
+
get_credentials_file_path,
|
|
43
43
|
get_system_configuration_data,
|
|
44
|
-
|
|
44
|
+
create_system_configuration_file,
|
|
45
45
|
)
|
|
46
46
|
|
|
47
47
|
__all__ = [
|
|
48
|
+
"AcquisitionSystems",
|
|
48
49
|
"DrugData",
|
|
50
|
+
"ExperimentState",
|
|
51
|
+
"ExperimentTrial",
|
|
49
52
|
"ImplantData",
|
|
50
|
-
"SessionData",
|
|
51
|
-
"RawData",
|
|
52
|
-
"ProcessedData",
|
|
53
|
-
"SubjectData",
|
|
54
|
-
"SurgeryData",
|
|
55
53
|
"InjectionData",
|
|
56
|
-
"ProcedureData",
|
|
57
|
-
"ZaberPositions",
|
|
58
|
-
"ExperimentState",
|
|
59
|
-
"MesoscopePositions",
|
|
60
|
-
"MesoscopeHardwareState",
|
|
61
|
-
"RunTrainingDescriptor",
|
|
62
54
|
"LickTrainingDescriptor",
|
|
55
|
+
"MesoscopeAdditionalFirmware",
|
|
56
|
+
"MesoscopeCameras",
|
|
63
57
|
"MesoscopeExperimentConfiguration",
|
|
64
58
|
"MesoscopeExperimentDescriptor",
|
|
65
|
-
"
|
|
66
|
-
"set_system_configuration_file",
|
|
67
|
-
"get_system_configuration_data",
|
|
68
|
-
"MesoscopePaths",
|
|
69
|
-
"MesoscopeCameras",
|
|
59
|
+
"MesoscopeHardwareState",
|
|
70
60
|
"MesoscopeMicroControllers",
|
|
71
|
-
"
|
|
72
|
-
"
|
|
73
|
-
"
|
|
74
|
-
"
|
|
61
|
+
"MesoscopePaths",
|
|
62
|
+
"MesoscopePositions",
|
|
63
|
+
"MesoscopeSystemConfiguration",
|
|
64
|
+
"ProcedureData",
|
|
65
|
+
"ProcessedData",
|
|
66
|
+
"RawData",
|
|
67
|
+
"RunTrainingDescriptor",
|
|
68
|
+
"SessionData",
|
|
69
|
+
"SessionLock",
|
|
75
70
|
"SessionTypes",
|
|
71
|
+
"SubjectData",
|
|
72
|
+
"SurgeryData",
|
|
73
|
+
"TrackingData",
|
|
76
74
|
"WindowCheckingDescriptor",
|
|
77
|
-
"
|
|
78
|
-
"
|
|
79
|
-
"
|
|
75
|
+
"ZaberPositions",
|
|
76
|
+
"create_system_configuration_file",
|
|
77
|
+
"get_credentials_file_path",
|
|
78
|
+
"get_system_configuration_data",
|
|
79
|
+
"get_working_directory",
|
|
80
|
+
"set_working_directory",
|
|
80
81
|
]
|