sl-shared-assets 1.0.0rc13__py3-none-any.whl → 1.0.0rc14__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sl-shared-assets might be problematic. Click here for more details.

@@ -5,6 +5,8 @@ API documentation: https://sl-shared-assets-api-docs.netlify.app/
5
5
  Authors: Ivan Kondratyev (Inkaros), Kushaan Gupta, Yuantao Deng
6
6
  """
7
7
 
8
+ from ataraxis_base_utilities import console
9
+
8
10
  from .server import Server, ServerCredentials
9
11
  from .suite2p import (
10
12
  Suite2PConfiguration,
@@ -31,6 +33,10 @@ from .data_classes import (
31
33
  from .transfer_tools import transfer_directory
32
34
  from .packaging_tools import calculate_directory_checksum
33
35
 
36
+ # Ensures console is enabled when this library is imported
37
+ if not console.enabled:
38
+ console.enable()
39
+
34
40
  __all__ = [
35
41
  # Server module
36
42
  "Server",
@@ -54,7 +60,7 @@ __all__ = [
54
60
  "LickTrainingDescriptor",
55
61
  "ExperimentConfiguration",
56
62
  "MesoscopeExperimentDescriptor",
57
- "ProcessingTracker"
63
+ "ProcessingTracker",
58
64
  # Transfer tools module
59
65
  "transfer_directory",
60
66
  # Packaging tools module
@@ -0,0 +1,51 @@
1
+ from .server import (
2
+ Server as Server,
3
+ ServerCredentials as ServerCredentials,
4
+ )
5
+ from .suite2p import Suite2PConfiguration as Suite2PConfiguration
6
+ from .data_classes import (
7
+ DrugData as DrugData,
8
+ ImplantData as ImplantData,
9
+ SessionData as SessionData,
10
+ SubjectData as SubjectData,
11
+ SurgeryData as SurgeryData,
12
+ InjectionData as InjectionData,
13
+ ProcedureData as ProcedureData,
14
+ ZaberPositions as ZaberPositions,
15
+ ExperimentState as ExperimentState,
16
+ ProcessingTracker as ProcessingTracker,
17
+ MesoscopePositions as MesoscopePositions,
18
+ ProjectConfiguration as ProjectConfiguration,
19
+ HardwareConfiguration as HardwareConfiguration,
20
+ RunTrainingDescriptor as RunTrainingDescriptor,
21
+ LickTrainingDescriptor as LickTrainingDescriptor,
22
+ ExperimentConfiguration as ExperimentConfiguration,
23
+ MesoscopeExperimentDescriptor as MesoscopeExperimentDescriptor,
24
+ )
25
+ from .transfer_tools import transfer_directory as transfer_directory
26
+ from .packaging_tools import calculate_directory_checksum as calculate_directory_checksum
27
+
28
+ __all__ = [
29
+ "Server",
30
+ "ServerCredentials",
31
+ "Suite2PConfiguration",
32
+ "DrugData",
33
+ "ImplantData",
34
+ "SessionData",
35
+ "SubjectData",
36
+ "SurgeryData",
37
+ "InjectionData",
38
+ "ProcedureData",
39
+ "ZaberPositions",
40
+ "ExperimentState",
41
+ "MesoscopePositions",
42
+ "ProjectConfiguration",
43
+ "HardwareConfiguration",
44
+ "RunTrainingDescriptor",
45
+ "LickTrainingDescriptor",
46
+ "ExperimentConfiguration",
47
+ "MesoscopeExperimentDescriptor",
48
+ "ProcessingTracker",
49
+ "transfer_directory",
50
+ "calculate_directory_checksum",
51
+ ]
@@ -14,10 +14,6 @@ from .data_classes import SessionData, ProjectConfiguration
14
14
  from .transfer_tools import transfer_directory
15
15
  from .packaging_tools import calculate_directory_checksum
16
16
 
17
- # Ensures the console is enabled when this file is imported
18
- if not console.enabled:
19
- console.enable()
20
-
21
17
 
22
18
  def _generate_session_name(acquisition_path: Path) -> str:
23
19
  """Generates a session name using the last modification time of a zstack.mat or MotionEstimator.me file.
@@ -0,0 +1,68 @@
1
+ from pathlib import Path
2
+
3
+ from .data_classes import (
4
+ SessionData as SessionData,
5
+ ProjectConfiguration as ProjectConfiguration,
6
+ )
7
+ from .transfer_tools import transfer_directory as transfer_directory
8
+ from .packaging_tools import calculate_directory_checksum as calculate_directory_checksum
9
+
10
+ def _generate_session_name(acquisition_path: Path) -> str:
11
+ """Generates a session name using the last modification time of a zstack.mat or MotionEstimator.me file.
12
+
13
+ This worker function uses one of the motion estimation files stored in each Tyche 'acquisition' subfolder to
14
+ generate a modern Sun lab timestamp-based session name. This is used to translate the original Tyche session naming
15
+ pattern into the pattern used by all modern Sun lab projects and pipelines.
16
+
17
+ Args:
18
+ acquisition_path: The absolute path to the target acquisition folder. These folders are found under the 'day'
19
+ folders for each animal, e.g.: Tyche-A7/2022_01_03/1.
20
+
21
+ Returns:
22
+ The modernized session name.
23
+ """
24
+
25
+ def _reorganize_data(session_data: SessionData, source_root: Path) -> bool:
26
+ """Reorganizes and moves the session's data from the source folder in the old Tyche data hierarchy to the raw_data
27
+ folder in the newly created modern hierarchy.
28
+
29
+ This worker function is used to physically rearrange the data from the original Tyche data structure to the
30
+ new data structure. It both moves the existing files to their new destinations and renames certain files to match
31
+ the modern naming convention used in the Sun lab.
32
+
33
+ Args:
34
+ session_data: The initialized SessionData instance managing the 'ascended' (modernized) session data hierarchy.
35
+ source_root: The absolute path to the old Tyche data hierarchy folder that stores session's data.
36
+
37
+ Returns:
38
+ True if the ascension process was successfully completed. False if the process encountered missing data or
39
+ otherwise did not go as expected. When the method returns False, the runtime function requests user intervention
40
+ to finalize the process manually.
41
+ """
42
+
43
+ def ascend_tyche_data(root_directory: Path, output_root_directory: Path, server_root_directory: Path) -> None:
44
+ """Reformats the old Tyche data to use the modern Sun lab layout and metadata files.
45
+
46
+ This function is used to convert old Tyche data to the modern data management standard. This is used to make the
47
+ data compatible with the modern Sun lab data workflows.
48
+
49
+ Notes:
50
+ This function is statically written to work with the raw Tyche dataset featured in the OSM manuscript:
51
+ https://www.nature.com/articles/s41586-024-08548-w. Additionally, it assumes that the dataset has been
52
+ preprocessed with the early Sun lab mesoscope compression pipeline. The function will not work for any other
53
+ project or data hierarchy.
54
+
55
+ As part of its runtime, the function automatically transfers the ascended session data to the BioHPC server.
56
+ Since transferring the data over the network is the bottleneck of this pipeline, it runs in a single-threaded
57
+ mode and is constrained by the communication channel between the local machine and the BioHPC server. Calling
58
+ this function for a large number of sessions will result in a long processing time due to the network data
59
+ transfer.
60
+
61
+ Args:
62
+ root_directory: The directory that stores one or more Tyche animal folders. This can be conceptualized as the
63
+ root directory for the Tyche project.
64
+ output_root_directory: The path to the local directory where to generate the converted Tyche project hierarchy.
65
+ Typically, this is the 'root' directory where all other Sun lab projects are stored.
66
+ server_root_directory: The path to the local filesystem-mounted BioHPC server storage directory. Note, this
67
+ directory hs to be mapped to the local filesystem via the SMB or equivalent protocol.
68
+ """
sl_shared_assets/cli.py CHANGED
@@ -22,9 +22,9 @@ def replace_local_root_directory(path: str) -> None:
22
22
  """Replaces the root directory used to store all lab projects on the local PC with the specified directory.
23
23
 
24
24
  To ensure all projects are saved in the same location, this library resolves and saves the absolute path to the
25
- project directory when it is used for the first time. All future projects reuse the same 'root' path. Since this
26
- information is stored in a typically hidden user directory, this CLI can be used to replace the local directory
27
- path, if necessary.
25
+ project directory the first time ProjectConfiguration class instance is created on a new PC. All future projects
26
+ automatically reuse the same 'root' directory path. Since this information is stored in a typically hidden user
27
+ directory, this CLI can be used to replace the local directory path, if necessary.
28
28
  """
29
29
  replace_root_path(path=Path(path))
30
30
 
@@ -35,8 +35,7 @@ def replace_local_root_directory(path: str) -> None:
35
35
  "--output_directory",
36
36
  type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path),
37
37
  required=True,
38
- prompt="Enter the path to the directory where to create the credentials file: ",
39
- help="The path to the directory where to create the credentials file.",
38
+ help="The absolute path to the directory where to create the credentials file.",
40
39
  )
41
40
  @click.option(
42
41
  "-h",
@@ -64,7 +63,8 @@ def replace_local_root_directory(path: str) -> None:
64
63
  def generate_server_credentials_file(output_directory: str, host: str, username: str, password: str) -> None:
65
64
  """Generates a new server_credentials.yaml file under the specified directory, using input information.
66
65
 
67
- This CLI is used during the initial PC setup (typically, VRPC) to allow it to access the lab BioHPC server.
66
+ This CLI is used to set up new PCs to work with the lab BioHPC server. While this is primarily intended for the
67
+ VRPC, any machined that interacts with BioHPC server can use this CLI to build the access credentials file.
68
68
  """
69
69
  generate_server_credentials(
70
70
  output_directory=Path(output_directory), username=username, password=password, host=host
@@ -77,7 +77,6 @@ def generate_server_credentials_file(output_directory: str, host: str, username:
77
77
  "--path",
78
78
  type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path),
79
79
  required=True,
80
- prompt="Enter the absolute path to the directory that stores original Tyche animal folders: ",
81
80
  help="The absolute path to the directory that stores original Tyche animal folders.",
82
81
  )
83
82
  @click.option(
@@ -85,7 +84,6 @@ def generate_server_credentials_file(output_directory: str, host: str, username:
85
84
  "--output_directory",
86
85
  type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path),
87
86
  required=True,
88
- prompt="Enter the absolute path to the local directory where to create the ascended Tyche project hierarchy: ",
89
87
  help="The absolute path to the local directory where to create the ascended Tyche project hierarchy.",
90
88
  )
91
89
  @click.option(
@@ -93,17 +91,18 @@ def generate_server_credentials_file(output_directory: str, host: str, username:
93
91
  "--server_directory",
94
92
  type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path),
95
93
  required=True,
96
- prompt=(
97
- "Enter the path to the SMB-mounted BioHPC server directory where to create the ascended Tyche project "
98
- "hierarchy: "
94
+ help=(
95
+ "The path to the SMB-mounted BioHPC server directory where to transfer the ascended Tyche project "
96
+ "hierarchy after it is created."
99
97
  ),
100
- help="The path to the SMB-mounted BioHPC server directory where to create the ascended Tyche project hierarchy.",
101
98
  )
102
99
  def ascend_tyche_directory(path: str, output_directory: str, server_directory: str) -> None:
103
100
  """Restructures old Tyche project data to use the modern Sun lab data structure.
104
101
 
105
102
  This CLI is used to convert ('ascend') the old Tyche project data to the modern Sun lab structure. After
106
- ascension, the data can be processed and analyzed using all modern Sun lab (sl-) tools and libraries.
103
+ ascension, the data can be processed and analyzed using all modern Sun lab (sl-) tools and libraries. Note, this
104
+ process expects the input data to be preprocessed using an old Sun lab mesoscope data preprocessing pipeline. It
105
+ will not work for any other project or data.
107
106
  """
108
107
  ascend_tyche_data(
109
108
  root_directory=Path(path),
@@ -0,0 +1,28 @@
1
+ from .server import generate_server_credentials as generate_server_credentials
2
+ from .data_classes import replace_root_path as replace_root_path
3
+ from .ascension_tools import ascend_tyche_data as ascend_tyche_data
4
+
5
+ def replace_local_root_directory(path: str) -> None:
6
+ """Replaces the root directory used to store all lab projects on the local PC with the specified directory.
7
+
8
+ To ensure all projects are saved in the same location, this library resolves and saves the absolute path to the
9
+ project directory the first time ProjectConfiguration class instance is created on a new PC. All future projects
10
+ automatically reuse the same 'root' directory path. Since this information is stored in a typically hidden user
11
+ directory, this CLI can be used to replace the local directory path, if necessary.
12
+ """
13
+
14
+ def generate_server_credentials_file(output_directory: str, host: str, username: str, password: str) -> None:
15
+ """Generates a new server_credentials.yaml file under the specified directory, using input information.
16
+
17
+ This CLI is used to set up new PCs to work with the lab BioHPC server. While this is primarily intended for the
18
+ VRPC, any machined that interacts with BioHPC server can use this CLI to build the access credentials file.
19
+ """
20
+
21
+ def ascend_tyche_directory(path: str, output_directory: str, server_directory: str) -> None:
22
+ """Restructures old Tyche project data to use the modern Sun lab data structure.
23
+
24
+ This CLI is used to convert ('ascend') the old Tyche project data to the modern Sun lab structure. After
25
+ ascension, the data can be processed and analyzed using all modern Sun lab (sl-) tools and libraries. Note, this
26
+ process expects the input data to be preprocessed using an old Sun lab mesoscope data preprocessing pipeline. It
27
+ will not work for any other project or data.
28
+ """