sl-shared-assets 1.0.0rc8__py3-none-any.whl → 1.0.0rc9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sl-shared-assets might be problematic. Click here for more details.

sl_shared_assets/cli.py CHANGED
@@ -8,6 +8,7 @@ import click
8
8
 
9
9
  from .server import generate_server_credentials
10
10
  from .data_classes import replace_root_path
11
+ from .legacy_tools import ascend_tyche_data
11
12
 
12
13
 
13
14
  @click.command()
@@ -70,3 +71,45 @@ def generate_server_credentials_file(output_directory: str, host: str, username:
70
71
  generate_server_credentials(
71
72
  output_directory=Path(output_directory), username=username, password=password, host=host
72
73
  )
74
+
75
+
76
+ @click.command()
77
+ @click.option(
78
+ "-p",
79
+ "--path",
80
+ type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path),
81
+ required=True,
82
+ prompt="Enter the absolute path to the root directory storing Tyche animal folders to ascend (modernize): ",
83
+ help="The path to the root directory storing Tyche animal folders to ascend (modernize).",
84
+ )
85
+ @click.option(
86
+ "-o",
87
+ "--output_directory",
88
+ type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path),
89
+ required=True,
90
+ prompt="Enter the path to the local directory where to create the ascended Tyche project hierarchy: ",
91
+ help="The path to the local directory where to create the ascended Tyche project hierarchy.",
92
+ )
93
+ @click.option(
94
+ "-s",
95
+ "--server_directory",
96
+ type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path),
97
+ required=True,
98
+ prompt="Enter the path to the SMB-mounted BioHPC server directory that will be used to store the ascended data: ",
99
+ help="The path to the SMB-mounted BioHPC server directory that will be used to store the ascended data.",
100
+ )
101
+ def ascend_tyche_directory(path: str, output_directory: str, server_directory: str) -> None:
102
+ """Restructures all original Tyche folders to use the modern Sun lab data structure.
103
+
104
+ This CLI is used to convert the old Tyche data to make it compatible with modern Sun lab processing pipelines and
105
+ data management workflows. This process is commonly referred to as 'ascension' amongst lab engineers. After
106
+ ascension, the data can be processed and analyzed using all modern Sun lab (sl-) tools and libraries.
107
+
108
+ Note! This CLi does NOT move the data to the BioHPC server. The data has to be manually transferred to the server
109
+ before it can be processed using our server-side pipelines.
110
+ """
111
+ ascend_tyche_data(
112
+ root_directory=Path(path),
113
+ output_root_directory=Path(output_directory),
114
+ server_root_directory=Path(server_directory),
115
+ )
@@ -13,7 +13,6 @@ import appdirs
13
13
  from ataraxis_base_utilities import LogLevel, console, ensure_directory_exists
14
14
  from ataraxis_data_structures import YamlConfig
15
15
  from ataraxis_time.time_helpers import get_timestamp
16
- import copy
17
16
 
18
17
 
19
18
  def replace_root_path(path: Path) -> None:
@@ -230,7 +229,8 @@ class ProjectConfiguration(YamlConfig):
230
229
  # this process, the class generates the correct 'local_root_path' based on the path provided by the
231
230
  # user.
232
231
  precursor = ProjectConfiguration(local_root_directory=Path(str(configuration_path.parents[2])))
233
- precursor._to_path(path=configuration_path)
232
+ precursor.project_name = project_name
233
+ precursor.to_path(path=configuration_path)
234
234
 
235
235
  # Waits for the user to manually configure the newly created file.
236
236
  input(f"Enter anything to continue: ")
@@ -263,7 +263,7 @@ class ProjectConfiguration(YamlConfig):
263
263
  # Returns the initialized class instance to caller
264
264
  return instance
265
265
 
266
- def _to_path(self, path: Path) -> None:
266
+ def to_path(self, path: Path) -> None:
267
267
  """Saves the instance data to disk as a project_configuration.yaml file.
268
268
 
269
269
  This method is automatically called when the project is created. All future runtimes should use the load()
@@ -797,6 +797,7 @@ class SessionData(YamlConfig):
797
797
  session_type: str,
798
798
  project_configuration: ProjectConfiguration,
799
799
  experiment_name: str | None = None,
800
+ session_name: str | None = None,
800
801
  ) -> "SessionData":
801
802
  """Creates a new SessionData object and uses it to generate the session's data structure.
802
803
 
@@ -821,13 +822,17 @@ class SessionData(YamlConfig):
821
822
  copy it into the session's raw_data directory.
822
823
  project_configuration: The initialized ProjectConfiguration instance that stores the data for the session's
823
824
  project. This is used to determine the root directory paths for all PCs used in the data workflow.
825
+ session_name: An optional session_name override. Generally, this argument should not be provided for most
826
+ use cases. When provided, the method uses this name instead of generating a new timestamp-based name.
827
+ This is only used when reformatting other data structures to follow Sun lab structure.
824
828
 
825
829
  Returns:
826
830
  An initialized SessionData instance for the newly created session.
827
831
  """
828
832
 
829
833
  # Acquires the UTC timestamp to use as the session name
830
- session_name = str(get_timestamp(time_separator="-"))
834
+ if session_name is None:
835
+ session_name = str(get_timestamp(time_separator="-"))
831
836
 
832
837
  # Extracts the root directory paths stored inside the project configuration file. All roots are expected to be
833
838
  # mounted on the local (VRPC) via SMB or equivalent protocol and be relative to the VRPC root.
@@ -0,0 +1,262 @@
1
+ """This module provides tools for working with old (legacy) data and data formats. Primarily, they are used to reformat
2
+ old Tyche project data to make it compatible with modern Sun lab pipelines."""
3
+
4
+ from pathlib import Path
5
+ import datetime
6
+ import tempfile
7
+
8
+ import numpy as np
9
+ from ataraxis_base_utilities import LogLevel, console
10
+ from ataraxis_time.time_helpers import extract_timestamp_from_bytes
11
+
12
+ from .data_classes import SessionData, ProjectConfiguration
13
+ from .packaging_tools import calculate_directory_checksum
14
+
15
+
16
+ def _generate_session_name(acquisition_path: Path) -> str:
17
+ """Generates a session name using the last modification time of a zstack.mat or MotionEstimator.me file.
18
+
19
+ This worker function uses one of the motion estimation files stored in each 'acquisition' subfolder of the original
20
+ Tyche session data structure to generate a compatible timestamp-based session name. This is used to translate the
21
+ original Tyche data hierarchy into the hierarchy used by all modern Sun lab projects and pipelines.
22
+
23
+ Args:
24
+ acquisition_path: The absolute path to the target acquisition folder. These folders are found under the 'day'
25
+ folders for each animal, e.g.: Tyche-A7/2022_01_03/1.
26
+
27
+ Returns:
28
+ The generated session name for that acquisition.
29
+ """
30
+
31
+ # All well-formed sessions are expected to contain both the zstack.mat and the MotionEstimator.me file.
32
+ # We use the last modification time from one of these files to infer when the session was carried out. This allows
33
+ # us to gather the time information, which is missing from the original session naming pattern.
34
+ source: Path
35
+ if acquisition_path.joinpath("zstack.mat").exists():
36
+ source = acquisition_path.joinpath("zstack.mat")
37
+ elif acquisition_path.joinpath("MotionEstimator.me").exists():
38
+ source = acquisition_path.joinpath("MotionEstimator.me")
39
+ else:
40
+ message = (
41
+ f"Unable to find zstack.mat or MotionEstimator.me file in the target acquisition subfolder "
42
+ f"{acquisition_path} of the session {acquisition_path.parent}. Manual intervention is required to ascend "
43
+ f"the target session folder to the latest Sun lab data format."
44
+ )
45
+ console.error(message=message, error=FileNotFoundError)
46
+ raise FileNotFoundError(message) # Fall-back to appease mypy
47
+
48
+ # Gets last modified time (available on all platforms) and converts it to a UTC timestamp object.
49
+ mod_time = source.stat().st_mtime
50
+ mod_datetime = datetime.datetime.fromtimestamp(mod_time)
51
+
52
+ # Converts the timestamp to microseconds as uint64, then to an array of 8 uint8 bytes. The array is then reformatted
53
+ # to match the session name pattern used in the modern Sun lab data pipelines.
54
+ timestamp_microseconds = np.uint64(int(mod_datetime.timestamp() * 1_000_000))
55
+ timestamp_bytes = np.array([(timestamp_microseconds >> (8 * i)) & 0xFF for i in range(8)], dtype=np.uint8)
56
+ stamp = extract_timestamp_from_bytes(timestamp_bytes=timestamp_bytes)
57
+
58
+ # Returns the generated session name to caller.
59
+ return stamp
60
+
61
+
62
+ def _reorganize_data(session_data: SessionData, source_root: Path) -> bool:
63
+ """Reorganizes and moves the session data from the source acquisition folder to the newly generated session
64
+ hierarchy.
65
+
66
+ This worker function is used to physically rearrange the data from the original Tyche acquisition folder to the
67
+ newly created session hierarchy. It both moves the existing files to their new destinations and renames certain
68
+ files to match the latest naming convention used in the Sun lab.
69
+
70
+ Args:
71
+ session_data: The initialized SessionData instance managing the newly created session data hierarchy.
72
+ source_root: The absolute path to the source Tyche acquisition folder.
73
+
74
+ Returns:
75
+ True if all expected data was found and moved. False, if any expected file was not found inside the folder, or
76
+ the reorganization process otherwise behaved unexpectedly. If this function returns False, the folder is
77
+ statically flagged for manual user intervention to investigate the issue.
78
+ """
79
+
80
+ # Resolves expected data targets:
81
+
82
+ # These files should be present in all well-formed session data folders. While not all session folders are
83
+ # well-formed, we will likely exclude any non-well-formed folders from processing.
84
+ zstack_path = source_root.joinpath("zstack.mat")
85
+ motion_estimator_path = source_root.joinpath("MotionEstimator.me")
86
+ ops_path = source_root.joinpath("ops.json")
87
+ mesoscope_frames_path = source_root.joinpath("mesoscope_frames")
88
+ ax_checksum_path = source_root.joinpath("ax_checksum.txt")
89
+
90
+ # These two file types are present for some, but not all folders. They are not as important as the group of files
91
+ # above though, as, currently, the data stored in these files is not used during processing.
92
+ frame_metadata_path = source_root.joinpath("frame_metadata.npz")
93
+ metadata_path = source_root.joinpath("metadata.json")
94
+
95
+ # This tracker is used to mark the session for manual intervention if any expected data is missing from the source
96
+ # session folder. At the end of this function's runtime, it determines whether the function returns True or False
97
+ data_missing = False
98
+
99
+ # First, moves the mesoscope TIFF stacks to the newly created session data hierarchy as mesoscope_data subfolder
100
+ if mesoscope_frames_path.exists():
101
+ mesoscope_frames_path.rename(session_data.raw_data.mesoscope_data_path)
102
+ else:
103
+ data_missing = True
104
+
105
+ # Then, moves 'loose' mesoscope-related data files to the mesoscope_data folder.
106
+ if zstack_path.exists():
107
+ zstack_path.rename(Path(session_data.raw_data.mesoscope_data_path).joinpath("zstack.mat"))
108
+ else:
109
+ data_missing = True
110
+
111
+ if motion_estimator_path.exists():
112
+ motion_estimator_path.rename(Path(session_data.raw_data.mesoscope_data_path).joinpath("MotionEstimator.me"))
113
+ else:
114
+ data_missing = True
115
+
116
+ if ops_path.exists():
117
+ ops_path.rename(Path(session_data.raw_data.mesoscope_data_path).joinpath("ops.json"))
118
+ else:
119
+ data_missing = True
120
+
121
+ # If variant and invariant metadata files exist, also moves them to the mesoscope data folder and renames the
122
+ # files to use the latest naming convention. Missing any of these files is not considered a user-intervention-worthy
123
+ # situation.
124
+ if frame_metadata_path.exists():
125
+ frame_metadata_path.rename(
126
+ Path(session_data.raw_data.mesoscope_data_path).joinpath("frame_variant_metadata.npz")
127
+ )
128
+ if metadata_path.exists():
129
+ metadata_path.rename(Path(session_data.raw_data.mesoscope_data_path).joinpath("frame_invariant_metadata.json"))
130
+
131
+ # Loops over all camera video files (using the .avi extension) and moves them to the camera_data folder.
132
+ videos_found = 0
133
+ for video in source_root.glob("*.avi"):
134
+ videos_found += 1
135
+ video.rename(Path(session_data.raw_data.camera_data_path).joinpath(video.name))
136
+ if videos_found == 0:
137
+ data_missing = True
138
+
139
+ # Loops over all behavior log files (old GIMBL format) and moves them to the behavior_data folder.
140
+ logs_found = 0
141
+ for log in source_root.glob("Log Tyche-* ????-??-?? session *.json"):
142
+ logs_found += 1
143
+ log.rename(Path(session_data.raw_data.behavior_data_path).joinpath(log.name))
144
+ if logs_found == 0:
145
+ data_missing = True
146
+
147
+ # Removes the checksum file if it exists. Due to file name and location changes, the session data folder has to
148
+ # be re-checksummed after the reorganization anyway, so there is no need to keep the original file.
149
+ ax_checksum_path.unlink(missing_ok=True)
150
+
151
+ # Loops over all remaining contents of the directory.
152
+ for path in source_root.glob("*"):
153
+ # At this point, there should be no more subfolders left inside the root directory. If there are more
154
+ # subfolders, this case requires user intervention
155
+ if path.is_dir():
156
+ data_missing = True
157
+
158
+ # All non-subfolder files are moved to the root raw_data directory of the newly created session.
159
+ else:
160
+ path.rename(Path(session_data.raw_data.raw_data_path).joinpath(path.name))
161
+
162
+ # Session data has been fully reorganized. Depending on whether there was any missing data during processing,
163
+ # returns the boolean flag for whether user intervention is required
164
+ if data_missing:
165
+ return False
166
+ else:
167
+ return True
168
+
169
+
170
+ def ascend_tyche_data(root_directory: Path, output_root_directory: Path, server_root_directory: Path) -> None:
171
+ """Converts raw data from the Tyche project to use the modern Sun lab layout.
172
+
173
+ This function is used to convert old data to the modern data management standard. In turn, this allows using all
174
+ modern data processing pipelines on this data.
175
+
176
+ Notes:
177
+ This function is statically written to work with the raw Tyche dataset featured in the OSM manuscript:
178
+ https://www.nature.com/articles/s41586-024-08548-w. Additionally, it assumes that the dataset has been
179
+ preprocessed with the early Sun lab mesoscope compression pipeline. The function will not work for any other
180
+ project or data hierarchy.
181
+
182
+ This function does not automatically transfer the data to the Server. It only creates the necessary root
183
+ hierarchy on the server and writes the necessary configuration to process the data on the Server, once it is
184
+ manually transferred.
185
+
186
+ Args:
187
+ root_directory: The root 'project' directory that stores individual Tyche animal folders to process.
188
+ output_root_directory: The path to the root directory where to generate the converted Tyche project hierarchy.
189
+ server_root_directory: The path to the SMB-mounted BioHPC server storage root directory. The Tyche project
190
+ hierarchy is generated both locally and on the Server.
191
+ """
192
+ # Generates a (shared) project configuration file.
193
+ project_configuration = ProjectConfiguration()
194
+
195
+ # Generates a temporary directory for NAS and Mesoscope paths. Since Tyche data is already backed up on the NAS and
196
+ # we are not generating new data, these root paths are not needed, but have to be created as part of the pipeline.
197
+ # Redirecting them to local temporary directories allows avoiding extra steps to manually remove these redundant
198
+ # directories after runtime.
199
+ temp_nas_dir = Path(tempfile.mkdtemp(prefix="nas_temp_"))
200
+ temp_mesoscope_dir = Path(tempfile.mkdtemp(prefix="mesoscope_temp_"))
201
+
202
+ # Statically defines project name and local root paths
203
+ project_configuration.project_name = "Tyche"
204
+ project_configuration.local_root_directory = output_root_directory
205
+ project_configuration.local_server_directory = server_root_directory
206
+ project_configuration.local_nas_directory = temp_nas_dir
207
+ project_configuration.local_mesoscope_directory = temp_mesoscope_dir
208
+
209
+ # Uses nonsensical google sheet IDs. Tyche project did not use Google Sheet processing like our modern projects do.
210
+ project_configuration.water_log_sheet_id = "1xFh9Q2zT7pL3mVkJdR8bN6yXoE4wS5aG0cHu2Kf7D3v"
211
+ project_configuration.surgery_sheet_id = "1xFh9Q2zT7pL3mVkJdR8bN6yXoE4wS5aG0cHu2Kf7D3v"
212
+
213
+ # Dumps project configuration into the 'configuration' subfolder of the Tyche project.
214
+ configuration_path = output_root_directory.joinpath("Tyche", "configuration", "project_configuration.yaml")
215
+ project_configuration.to_path(path=configuration_path)
216
+
217
+ # Assumes that root directory stores all animal folders to be processed
218
+ for animal_folder in root_directory.iterdir():
219
+ # Each animal folder is named to include project name and a static animal ID, e.g.: Tyche-A7. This extracts each
220
+ # animal ID.
221
+ animal_name = animal_folder.name.split(sep="-")[1]
222
+
223
+ # Under each animal root folder, there are day folders that use YYYY-MM-DD timestamps
224
+ for session_folder in animal_folder.iterdir():
225
+ # Inside each day folder, there are one or more acquisitions (sessions)
226
+ for acquisition_folder in session_folder.iterdir():
227
+ # For each session, we extract the modification time from either (preferentially) zstack.mat or
228
+ # MotionEstimator.me file. Any session without these files is flagged for additional user intervention.
229
+ # This procedure generates timestamp-based session names, analogous to how our modern pipeline does it.
230
+ session_name = _generate_session_name(acquisition_path=acquisition_folder)
231
+
232
+ # Uses derived session name and the statically created project configuration file to create the
233
+ # session data hierarchy using the output root. This generates a 'standard' Sun lab directory structure
234
+ # for the Tyche data.
235
+ session_data = SessionData.create_session(
236
+ session_name=session_name,
237
+ animal_id=animal_name,
238
+ project_configuration=project_configuration,
239
+ session_type="Experiment",
240
+ experiment_name=None, # Has to be none, otherwise the system tries to copy a configuration file.
241
+ )
242
+
243
+ # Moves the data from the old hierarchy to the new hierarchy. If the process runs as expected, and
244
+ # fully empties the source acquisition folder, destroys the folder. Otherwise, notifies the user that
245
+ # the runtime did not fully process the session data and requests intervention.
246
+ success = _reorganize_data(session_data, acquisition_folder)
247
+ if not success:
248
+ message = (
249
+ f"Encountered issues when reorganizing {animal_name} session {session_name}. "
250
+ f"User intervention is required to finish data reorganization process for this session."
251
+ )
252
+ console.echo(message=message, level=LogLevel.WARNING)
253
+ else:
254
+ # If the transfer process was successful, generates a new checksum for the moved data and removes
255
+ # the now-empty acquisition folder.
256
+ calculate_directory_checksum(directory=Path(session_data.raw_data.raw_data_path))
257
+ acquisition_folder.rmdir()
258
+
259
+ # If the loop above removed all acquisition folders, all data for that day has been successfully converted
260
+ # to use the new session format. Removes the now-empty 'day' folder from the target animal
261
+ if len([folder for folder in session_folder.iterdir()]) == 0:
262
+ session_folder.rmdir()
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sl-shared-assets
3
- Version: 1.0.0rc8
3
+ Version: 1.0.0rc9
4
4
  Summary: Stores assets shared between multiple Sun (NeuroAI) lab data pipelines.
5
5
  Project-URL: Homepage, https://github.com/Sun-Lab-NBB/sl-shared-assets
6
6
  Project-URL: Documentation, https://sl-shared-assets-api-docs.netlify.app/
@@ -0,0 +1,14 @@
1
+ sl_shared_assets/__init__.py,sha256=V7EvTTSB_GhetCbyYPg2RoiG1etDVeML5EBWgGvUo7E,2227
2
+ sl_shared_assets/cli.py,sha256=1OzQUYZS741ca2x8LUCTOOTsC1leZGuwqEt0Q2qUNUQ,4676
3
+ sl_shared_assets/data_classes.py,sha256=wRh493DHBfd2GuuWtHT0vKC7AVWhsE-wyQosZEpHvg0,87529
4
+ sl_shared_assets/legacy_tools.py,sha256=x9Vl_BHUQf_-E6t2xx1r2autzqnmyWY3CNuKzMH97TE,14763
5
+ sl_shared_assets/packaging_tools.py,sha256=3kAXFK37Lv4JA1YhjcoBz1x2Ell8ObCqe9pwxAts4m4,6709
6
+ sl_shared_assets/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
7
+ sl_shared_assets/server.py,sha256=VtFwS4PEy24n_pGz9W56zufkZEf_PKxIllP2ZnF5Zgc,13269
8
+ sl_shared_assets/suite2p.py,sha256=sQ5Zj0TJFD-gUHqtWnRvapBpr8QgmaiVil123cWxGxc,20511
9
+ sl_shared_assets/transfer_tools.py,sha256=J26kwOp_NpPSY0-xu5FTw9udte-rm_mW1FJyaTNoqQI,6606
10
+ sl_shared_assets-1.0.0rc9.dist-info/METADATA,sha256=TVcnp1yuSKs3by_NONUTGY9XncBQRtLVleYviPtrjcg,47806
11
+ sl_shared_assets-1.0.0rc9.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
12
+ sl_shared_assets-1.0.0rc9.dist-info/entry_points.txt,sha256=3VPr5RkWBkusNN9OhWXtC-DN0utu7uMrUulazIK2VNA,166
13
+ sl_shared_assets-1.0.0rc9.dist-info/licenses/LICENSE,sha256=OXLcl0T2SZ8Pmy2_dmlvKuetivmyPd5m1q-Gyd-zaYY,35149
14
+ sl_shared_assets-1.0.0rc9.dist-info/RECORD,,
@@ -1,87 +0,0 @@
1
- from .server import (
2
- Server as Server,
3
- ServerCredentials as ServerCredentials,
4
- )
5
- from .suite2p import (
6
- Main as Main,
7
- FileIO as FileIO,
8
- Output as Output,
9
- Channel2 as Channel2,
10
- NonRigid as NonRigid,
11
- ROIDetection as ROIDetection,
12
- Registration as Registration,
13
- Classification as Classification,
14
- OnePRegistration as OnePRegistration,
15
- SignalExtraction as SignalExtraction,
16
- CellposeDetection as CellposeDetection,
17
- SpikeDeconvolution as SpikeDeconvolution,
18
- Suite2PConfiguration as Suite2PConfiguration,
19
- )
20
- from .data_classes import (
21
- RawData as RawData,
22
- DrugData as DrugData,
23
- ImplantData as ImplantData,
24
- SessionData as SessionData,
25
- SubjectData as SubjectData,
26
- SurgeryData as SurgeryData,
27
- Destinations as Destinations,
28
- InjectionData as InjectionData,
29
- MesoscopeData as MesoscopeData,
30
- ProcedureData as ProcedureData,
31
- ProcessedData as ProcessedData,
32
- PersistentData as PersistentData,
33
- ZaberPositions as ZaberPositions,
34
- ExperimentState as ExperimentState,
35
- MesoscopePositions as MesoscopePositions,
36
- ProjectConfiguration as ProjectConfiguration,
37
- HardwareConfiguration as HardwareConfiguration,
38
- RunTrainingDescriptor as RunTrainingDescriptor,
39
- LickTrainingDescriptor as LickTrainingDescriptor,
40
- ExperimentConfiguration as ExperimentConfiguration,
41
- MesoscopeExperimentDescriptor as MesoscopeExperimentDescriptor,
42
- replace_root_path as replace_root_path,
43
- )
44
- from .transfer_tools import transfer_directory as transfer_directory
45
- from .packaging_tools import calculate_directory_checksum as calculate_directory_checksum
46
-
47
- __all__ = [
48
- "Server",
49
- "ServerCredentials",
50
- "Main",
51
- "FileIO",
52
- "Output",
53
- "Channel2",
54
- "NonRigid",
55
- "ROIDetection",
56
- "Registration",
57
- "Classification",
58
- "OnePRegistration",
59
- "SignalExtraction",
60
- "CellposeDetection",
61
- "SpikeDeconvolution",
62
- "Suite2PConfiguration",
63
- "RawData",
64
- "DrugData",
65
- "ImplantData",
66
- "SessionData",
67
- "SubjectData",
68
- "SurgeryData",
69
- "Destinations",
70
- "InjectionData",
71
- "MesoscopeData",
72
- "ProcedureData",
73
- "ProcessedData",
74
- "PersistentData",
75
- "ZaberPositions",
76
- "ExperimentState",
77
- "MesoscopePositions",
78
- "ProjectConfiguration",
79
- "HardwareConfiguration",
80
- "RunTrainingDescriptor",
81
- "LickTrainingDescriptor",
82
- "ExperimentConfiguration",
83
- "MesoscopeExperimentDescriptor",
84
- "replace_root_path",
85
- "transfer_directory",
86
- "calculate_directory_checksum",
87
- ]
sl_shared_assets/cli.pyi DELETED
@@ -1,17 +0,0 @@
1
- from .server import generate_server_credentials as generate_server_credentials
2
- from .data_classes import replace_root_path as replace_root_path
3
-
4
- def replace_local_root_directory(path: str) -> None:
5
- """Replaces the root directory used to store all lab projects on the local PC with the specified directory.
6
-
7
- To ensure all projects are saved in the same location, this library resolves and saves the absolute path to the
8
- project directory when it is used for the first time. All future projects reuse the same 'root' path. Since this
9
- information is stored in a typically hidden user directory, this CLI can be used to replace the local directory
10
- path, if necessary.
11
- """
12
-
13
- def generate_server_credentials_file(output_directory: str, host: str, username: str, password: str) -> None:
14
- """Generates a new server_credentials.yaml file under the specified directory, using input information.
15
-
16
- This CLI is used during the initial PC setup (typically, VRPC) to allow it to access the lab BioHPC server.
17
- """