sl-shared-assets 1.0.0rc8__tar.gz → 1.0.0rc10__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sl-shared-assets might be problematic. Click here for more details.

Files changed (31) hide show
  1. {sl_shared_assets-1.0.0rc8 → sl_shared_assets-1.0.0rc10}/PKG-INFO +1 -1
  2. sl_shared_assets-1.0.0rc10/WIP.py +13 -0
  3. {sl_shared_assets-1.0.0rc8 → sl_shared_assets-1.0.0rc10}/docs/source/api.rst +11 -0
  4. {sl_shared_assets-1.0.0rc8 → sl_shared_assets-1.0.0rc10}/pyproject.toml +2 -1
  5. {sl_shared_assets-1.0.0rc8 → sl_shared_assets-1.0.0rc10}/src/sl_shared_assets/__init__.py +0 -36
  6. sl_shared_assets-1.0.0rc10/src/sl_shared_assets/ascension_tools.py +279 -0
  7. {sl_shared_assets-1.0.0rc8 → sl_shared_assets-1.0.0rc10}/src/sl_shared_assets/cli.py +43 -3
  8. {sl_shared_assets-1.0.0rc8 → sl_shared_assets-1.0.0rc10}/src/sl_shared_assets/data_classes.py +178 -144
  9. {sl_shared_assets-1.0.0rc8 → sl_shared_assets-1.0.0rc10}/src/sl_shared_assets/packaging_tools.py +1 -0
  10. sl_shared_assets-1.0.0rc8/src/sl_shared_assets/__init__.pyi +0 -87
  11. sl_shared_assets-1.0.0rc8/src/sl_shared_assets/cli.pyi +0 -17
  12. sl_shared_assets-1.0.0rc8/src/sl_shared_assets/data_classes.pyi +0 -667
  13. sl_shared_assets-1.0.0rc8/src/sl_shared_assets/packaging_tools.pyi +0 -52
  14. sl_shared_assets-1.0.0rc8/src/sl_shared_assets/server.pyi +0 -112
  15. sl_shared_assets-1.0.0rc8/src/sl_shared_assets/suite2p.pyi +0 -188
  16. sl_shared_assets-1.0.0rc8/src/sl_shared_assets/transfer_tools.pyi +0 -53
  17. {sl_shared_assets-1.0.0rc8 → sl_shared_assets-1.0.0rc10}/.gitignore +0 -0
  18. {sl_shared_assets-1.0.0rc8 → sl_shared_assets-1.0.0rc10}/LICENSE +0 -0
  19. {sl_shared_assets-1.0.0rc8 → sl_shared_assets-1.0.0rc10}/README.md +0 -0
  20. {sl_shared_assets-1.0.0rc8 → sl_shared_assets-1.0.0rc10}/docs/Makefile +0 -0
  21. {sl_shared_assets-1.0.0rc8 → sl_shared_assets-1.0.0rc10}/docs/make.bat +0 -0
  22. {sl_shared_assets-1.0.0rc8 → sl_shared_assets-1.0.0rc10}/docs/source/conf.py +0 -0
  23. {sl_shared_assets-1.0.0rc8 → sl_shared_assets-1.0.0rc10}/docs/source/index.rst +0 -0
  24. {sl_shared_assets-1.0.0rc8 → sl_shared_assets-1.0.0rc10}/docs/source/welcome.rst +0 -0
  25. {sl_shared_assets-1.0.0rc8 → sl_shared_assets-1.0.0rc10}/envs/slsa_dev_lin.yml +0 -0
  26. {sl_shared_assets-1.0.0rc8 → sl_shared_assets-1.0.0rc10}/envs/slsa_dev_lin_spec.txt +0 -0
  27. {sl_shared_assets-1.0.0rc8 → sl_shared_assets-1.0.0rc10}/src/sl_shared_assets/py.typed +0 -0
  28. {sl_shared_assets-1.0.0rc8 → sl_shared_assets-1.0.0rc10}/src/sl_shared_assets/server.py +0 -0
  29. {sl_shared_assets-1.0.0rc8 → sl_shared_assets-1.0.0rc10}/src/sl_shared_assets/suite2p.py +0 -0
  30. {sl_shared_assets-1.0.0rc8 → sl_shared_assets-1.0.0rc10}/src/sl_shared_assets/transfer_tools.py +0 -0
  31. {sl_shared_assets-1.0.0rc8 → sl_shared_assets-1.0.0rc10}/tox.ini +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sl-shared-assets
3
- Version: 1.0.0rc8
3
+ Version: 1.0.0rc10
4
4
  Summary: Stores assets shared between multiple Sun (NeuroAI) lab data pipelines.
5
5
  Project-URL: Homepage, https://github.com/Sun-Lab-NBB/sl-shared-assets
6
6
  Project-URL: Documentation, https://sl-shared-assets-api-docs.netlify.app/
@@ -0,0 +1,13 @@
1
+ from pathlib import Path
2
+
3
+ from sl_shared_assets import SessionData, transfer_directory
4
+
5
+ root_folder = Path("/media/Data/Experiments/Tyche")
6
+
7
+ raw_data_paths = [folder for folder in root_folder.rglob("session_data.yaml")]
8
+
9
+ for source in raw_data_paths:
10
+ sd = SessionData.load(session_path=source.parents[1], on_server=False)
11
+ transfer_directory(
12
+ source=sd.raw_data.raw_data_path, destination=sd.destinations.server_raw_data_path, verify_integrity=False
13
+ )
@@ -17,6 +17,10 @@ Command Line Interfaces
17
17
  :prog: sl-generate-credentials
18
18
  :nested: full
19
19
 
20
+ .. click:: sl_shared_assets.cli:ascend_tyche_directory
21
+ :prog: sl-ascend
22
+ :nested: full
23
+
20
24
  Packaging Tools
21
25
  ===============
22
26
  .. automodule:: sl_shared_assets.packaging_tools
@@ -48,6 +52,13 @@ General Configuration and Data Storage Classes
48
52
  Compute Server Tools
49
53
  ====================
50
54
  .. automodule:: sl_shared_assets.server
55
+ :members:
56
+ :undoc-members:
57
+ :show-inheritance:
58
+
59
+ Ascension Tools
60
+ ===============
61
+ .. automodule:: sl_shared_assets.ascension_tools
51
62
  :members:
52
63
  :undoc-members:
53
64
  :show-inheritance:
@@ -8,7 +8,7 @@ build-backend = "hatchling.build"
8
8
  # Project metdata section. Provides the genral ID information about the project.
9
9
  [project]
10
10
  name = "sl-shared-assets"
11
- version = "1.0.0rc8"
11
+ version = "1.0.0rc10"
12
12
  description = "Stores assets shared between multiple Sun (NeuroAI) lab data pipelines."
13
13
  readme = "README.md"
14
14
  license = { file = "LICENSE" }
@@ -126,6 +126,7 @@ dev = [
126
126
  [project.scripts]
127
127
  sl-replace-root = "sl_shared_assets.cli:replace_local_root_directory"
128
128
  sl-generate-credentials = "sl_shared_assets.cli:generate_server_credentials_file"
129
+ sl-ascend = "sl_shared_assets.cli:ascend_tyche_directory"
129
130
 
130
131
  # Specifies files that should not be included in the source-code distribution but are also not part of gitignore.
131
132
  [tool.hatch.build.targets.sdist]
@@ -7,33 +7,16 @@ Authors: Ivan Kondratyev (Inkaros), Kushaan Gupta, Yuantao Deng
7
7
 
8
8
  from .server import Server, ServerCredentials
9
9
  from .suite2p import (
10
- Main,
11
- FileIO,
12
- Output,
13
- Channel2,
14
- NonRigid,
15
- ROIDetection,
16
- Registration,
17
- Classification,
18
- OnePRegistration,
19
- SignalExtraction,
20
- CellposeDetection,
21
- SpikeDeconvolution,
22
10
  Suite2PConfiguration,
23
11
  )
24
12
  from .data_classes import (
25
- RawData,
26
13
  DrugData,
27
14
  ImplantData,
28
15
  SessionData,
29
16
  SubjectData,
30
17
  SurgeryData,
31
- Destinations,
32
18
  InjectionData,
33
- MesoscopeData,
34
19
  ProcedureData,
35
- ProcessedData,
36
- PersistentData,
37
20
  ZaberPositions,
38
21
  ExperimentState,
39
22
  MesoscopePositions,
@@ -43,7 +26,6 @@ from .data_classes import (
43
26
  LickTrainingDescriptor,
44
27
  ExperimentConfiguration,
45
28
  MesoscopeExperimentDescriptor,
46
- replace_root_path,
47
29
  )
48
30
  from .transfer_tools import transfer_directory
49
31
  from .packaging_tools import calculate_directory_checksum
@@ -53,32 +35,15 @@ __all__ = [
53
35
  "Server",
54
36
  "ServerCredentials",
55
37
  # Suite2p module
56
- "Main",
57
- "FileIO",
58
- "Output",
59
- "Channel2",
60
- "NonRigid",
61
- "ROIDetection",
62
- "Registration",
63
- "Classification",
64
- "OnePRegistration",
65
- "SignalExtraction",
66
- "CellposeDetection",
67
- "SpikeDeconvolution",
68
38
  "Suite2PConfiguration",
69
39
  # Data classes module
70
- "RawData",
71
40
  "DrugData",
72
41
  "ImplantData",
73
42
  "SessionData",
74
43
  "SubjectData",
75
44
  "SurgeryData",
76
- "Destinations",
77
45
  "InjectionData",
78
- "MesoscopeData",
79
46
  "ProcedureData",
80
- "ProcessedData",
81
- "PersistentData",
82
47
  "ZaberPositions",
83
48
  "ExperimentState",
84
49
  "MesoscopePositions",
@@ -88,7 +53,6 @@ __all__ = [
88
53
  "LickTrainingDescriptor",
89
54
  "ExperimentConfiguration",
90
55
  "MesoscopeExperimentDescriptor",
91
- "replace_root_path",
92
56
  # Transfer tools module
93
57
  "transfer_directory",
94
58
  # Packaging tools module
@@ -0,0 +1,279 @@
1
+ """This module provides tools for translating ('ascending') old Tyche data to use the modern data structure used in the
2
+ Sun lab. The tools from this module will not work for any other data and also assume that the Tyche data has been
3
+ preprocessed with an early version of the Sun lab mesoscope processing pipeline."""
4
+
5
+ from pathlib import Path
6
+ import datetime
7
+ import tempfile
8
+
9
+ import numpy as np
10
+ from ataraxis_base_utilities import LogLevel, console
11
+ from ataraxis_time.time_helpers import extract_timestamp_from_bytes
12
+
13
+ from .data_classes import SessionData, ProjectConfiguration
14
+ from .transfer_tools import transfer_directory
15
+ from .packaging_tools import calculate_directory_checksum
16
+
17
+ # Ensures the console is enabled when this file is imported
18
+ if not console.enabled:
19
+ console.enable()
20
+
21
+
22
+ def _generate_session_name(acquisition_path: Path) -> str:
23
+ """Generates a session name using the last modification time of a zstack.mat or MotionEstimator.me file.
24
+
25
+ This worker function uses one of the motion estimation files stored in each Tyche 'acquisition' subfolder to
26
+ generate a modern Sun lab timestamp-based session name. This is used to translate the original Tyche session naming
27
+ pattern into the pattern used by all modern Sun lab projects and pipelines.
28
+
29
+ Args:
30
+ acquisition_path: The absolute path to the target acquisition folder. These folders are found under the 'day'
31
+ folders for each animal, e.g.: Tyche-A7/2022_01_03/1.
32
+
33
+ Returns:
34
+ The modernized session name.
35
+ """
36
+
37
+ # All well-formed sessions are expected to contain both the zstack.mat and the MotionEstimator.me file.
38
+ # We use the last modification time from one of these files to infer when the session was carried out. This allows
39
+ # us to gather the time information, which is missing from the original session naming pattern.
40
+ source: Path
41
+ if acquisition_path.joinpath("zstack.mat").exists():
42
+ source = acquisition_path.joinpath("zstack.mat")
43
+ elif acquisition_path.joinpath("MotionEstimator.me").exists():
44
+ source = acquisition_path.joinpath("MotionEstimator.me")
45
+ else:
46
+ message = (
47
+ f"Unable to find zstack.mat or MotionEstimator.me file in the target acquisition subfolder "
48
+ f"{acquisition_path} of the session {acquisition_path.parent}. Manual intervention is required to ascend "
49
+ f"the target session folder to the latest Sun lab data format."
50
+ )
51
+ console.error(message=message, error=FileNotFoundError)
52
+ raise FileNotFoundError(message) # Fall-back to appease mypy
53
+
54
+ # Gets last modified time (available on all platforms) and converts it to a UTC timestamp object.
55
+ mod_time = source.stat().st_mtime
56
+ mod_datetime = datetime.datetime.fromtimestamp(mod_time)
57
+
58
+ # Converts the timestamp to microseconds as uint64, then to an array of 8 uint8 bytes. The array is then reformatted
59
+ # to match the session name pattern used in the modern Sun lab data pipelines.
60
+ timestamp_microseconds = np.uint64(int(mod_datetime.timestamp() * 1_000_000))
61
+ timestamp_bytes = np.array([(timestamp_microseconds >> (8 * i)) & 0xFF for i in range(8)], dtype=np.uint8)
62
+ stamp = extract_timestamp_from_bytes(timestamp_bytes=timestamp_bytes)
63
+
64
+ # Returns the generated session name to caller.
65
+ return stamp
66
+
67
+
68
+ def _reorganize_data(session_data: SessionData, source_root: Path) -> bool:
69
+ """Reorganizes and moves the session's data from the source folder in the old Tyche data hierarchy to the raw_data
70
+ folder in the newly created modern hierarchy.
71
+
72
+ This worker function is used to physically rearrange the data from the original Tyche data structure to the
73
+ new data structure. It both moves the existing files to their new destinations and renames certain files to match
74
+ the modern naming convention used in the Sun lab.
75
+
76
+ Args:
77
+ session_data: The initialized SessionData instance managing the 'ascended' (modernized) session data hierarchy.
78
+ source_root: The absolute path to the old Tyche data hierarchy folder that stores session's data.
79
+
80
+ Returns:
81
+ True if the ascension process was successfully completed. False if the process encountered missing data or
82
+ otherwise did not go as expected. When the method returns False, the runtime function requests user intervention
83
+ to finalize the process manually.
84
+ """
85
+
86
+ # Resolves expected data targets:
87
+
88
+ # These files should be present in all well-formed session data folders. While not all session folders are
89
+ # well-formed, we will likely exclude any non-well-formed folders from processing.
90
+ zstack_path = source_root.joinpath("zstack.mat")
91
+ motion_estimator_path = source_root.joinpath("MotionEstimator.me")
92
+ ops_path = source_root.joinpath("ops.json")
93
+ mesoscope_frames_path = source_root.joinpath("mesoscope_frames")
94
+ ax_checksum_path = source_root.joinpath("ax_checksum.txt")
95
+
96
+ # These two file types are present for some, but not all folders. They are not as important as the group of files
97
+ # above though, as, currently, the data stored in these files is not used during processing.
98
+ frame_metadata_path = source_root.joinpath("frame_metadata.npz")
99
+ metadata_path = source_root.joinpath("metadata.json")
100
+
101
+ # This tracker is used to mark the session for manual intervention if any expected data is missing from the source
102
+ # session folder. At the end of this function's runtime, it determines whether the function returns True or False
103
+ data_missing = False
104
+
105
+ # First, moves the mesoscope TIFF stacks to the newly created session data hierarchy as mesoscope_data subfolder
106
+ if mesoscope_frames_path.exists():
107
+ mesoscope_frames_path.rename(session_data.raw_data.mesoscope_data_path)
108
+ else:
109
+ data_missing = True
110
+
111
+ # Then, moves 'loose' mesoscope-related data files to the mesoscope_data folder.
112
+ if zstack_path.exists():
113
+ zstack_path.rename(Path(session_data.raw_data.mesoscope_data_path).joinpath("zstack.mat"))
114
+ else:
115
+ data_missing = True
116
+
117
+ if motion_estimator_path.exists():
118
+ motion_estimator_path.rename(Path(session_data.raw_data.mesoscope_data_path).joinpath("MotionEstimator.me"))
119
+ else:
120
+ data_missing = True
121
+
122
+ if ops_path.exists():
123
+ ops_path.rename(Path(session_data.raw_data.mesoscope_data_path).joinpath("ops.json"))
124
+ else:
125
+ data_missing = True
126
+
127
+ # If variant and invariant metadata files exist, also moves them to the mesoscope data folder and renames the
128
+ # files to use the latest naming convention. Missing any of these files is not considered a user-intervention-worthy
129
+ # situation.
130
+ if frame_metadata_path.exists():
131
+ frame_metadata_path.rename(
132
+ Path(session_data.raw_data.mesoscope_data_path).joinpath("frame_variant_metadata.npz")
133
+ )
134
+ if metadata_path.exists():
135
+ metadata_path.rename(Path(session_data.raw_data.mesoscope_data_path).joinpath("frame_invariant_metadata.json"))
136
+
137
+ # Loops over all camera video files (using the .avi extension) and moves them to the camera_data folder.
138
+ videos_found = 0
139
+ for video in source_root.glob("*.avi"):
140
+ videos_found += 1
141
+ video.rename(Path(session_data.raw_data.camera_data_path).joinpath(video.name))
142
+ if videos_found == 0:
143
+ data_missing = True
144
+
145
+ # Loops over all behavior log files (old GIMBL format) and moves them to the behavior_data folder.
146
+ logs_found = 0
147
+ for log in source_root.glob("Log Tyche-* ????-??-?? session *.json"):
148
+ logs_found += 1
149
+ log.rename(Path(session_data.raw_data.behavior_data_path).joinpath(log.name))
150
+ if logs_found == 0:
151
+ data_missing = True
152
+
153
+ # Removes the checksum file if it exists. Due to file name and location changes, the session data folder has to
154
+ # be re-checksummed after the reorganization anyway, so there is no need to keep the original file.
155
+ ax_checksum_path.unlink(missing_ok=True)
156
+
157
+ # Loops over all remaining contents of the directory.
158
+ for path in source_root.glob("*"):
159
+ # At this point, there should be no more subfolders left inside the root directory. If there are more
160
+ # subfolders, this case requires user intervention
161
+ if path.is_dir():
162
+ data_missing = True
163
+
164
+ # All non-subfolder files are moved to the root raw_data directory of the newly created session.
165
+ else:
166
+ path.rename(Path(session_data.raw_data.raw_data_path).joinpath(path.name))
167
+
168
+ # Session data has been fully reorganized. Depending on whether there was any missing data during processing,
169
+ # returns the boolean flag for whether user intervention is required
170
+ if data_missing:
171
+ return False
172
+ else:
173
+ return True
174
+
175
+
176
+ def ascend_tyche_data(root_directory: Path, output_root_directory: Path, server_root_directory: Path) -> None:
177
+ """Reformats the old Tyche data to use the modern Sun lab layout and metadata files.
178
+
179
+ This function is used to convert old Tyche data to the modern data management standard. This is used to make the
180
+ data compatible with the modern Sun lab data workflows.
181
+
182
+ Notes:
183
+ This function is statically written to work with the raw Tyche dataset featured in the OSM manuscript:
184
+ https://www.nature.com/articles/s41586-024-08548-w. Additionally, it assumes that the dataset has been
185
+ preprocessed with the early Sun lab mesoscope compression pipeline. The function will not work for any other
186
+ project or data hierarchy.
187
+
188
+ As part of its runtime, the function automatically transfers the ascended session data to the BioHPC server.
189
+ Since transferring the data over the network is the bottleneck of this pipeline, it runs in a single-threaded
190
+ mode and is constrained by the communication channel between the local machine and the BioHPC server. Calling
191
+ this function for a large number of sessions will result in a long processing time due to the network data
192
+ transfer.
193
+
194
+ Args:
195
+ root_directory: The directory that stores one or more Tyche animal folders. This can be conceptualized as the
196
+ root directory for the Tyche project.
197
+ output_root_directory: The path to the local directory where to generate the converted Tyche project hierarchy.
198
+ Typically, this is the 'root' directory where all other Sun lab projects are stored.
199
+ server_root_directory: The path to the local filesystem-mounted BioHPC server storage directory. Note, this
200
+ directory hs to be mapped to the local filesystem via the SMB or equivalent protocol.
201
+ """
202
+ # Generates a (shared) project configuration file.
203
+ project_configuration = ProjectConfiguration()
204
+
205
+ # Generates a temporary directory for NAS and Mesoscope paths. Since Tyche data is already backed up on the NAS and
206
+ # we are not generating new data, these root paths are not needed, but have to be created as part of the pipeline.
207
+ # Redirecting them to local temporary directories allows avoiding extra steps to manually remove these redundant
208
+ # directories after runtime.
209
+ temp_nas_dir = Path(tempfile.mkdtemp(prefix="nas_temp_"))
210
+ temp_mesoscope_dir = Path(tempfile.mkdtemp(prefix="mesoscope_temp_"))
211
+
212
+ # Statically defines project name and local root paths
213
+ project_configuration.project_name = "Tyche"
214
+ project_configuration.local_root_directory = output_root_directory
215
+ project_configuration.local_server_directory = server_root_directory
216
+ project_configuration.local_nas_directory = temp_nas_dir
217
+ project_configuration.local_mesoscope_directory = temp_mesoscope_dir
218
+
219
+ # Uses nonsensical google sheet IDs. Tyche project did not use Google Sheet processing like our modern projects do.
220
+ project_configuration.water_log_sheet_id = "1xFh9Q2zT7pL3mVkJdR8bN6yXoE4wS5aG0cHu2Kf7D3v"
221
+ project_configuration.surgery_sheet_id = "1xFh9Q2zT7pL3mVkJdR8bN6yXoE4wS5aG0cHu2Kf7D3v"
222
+
223
+ # Dumps project configuration into the 'configuration' subfolder of the Tyche project.
224
+ configuration_path = output_root_directory.joinpath("Tyche", "configuration", "project_configuration.yaml")
225
+ project_configuration.save(path=configuration_path)
226
+
227
+ # Assumes that root directory stores all animal folders to be processed
228
+ for animal_folder in root_directory.iterdir():
229
+ # Each animal folder is named to include project name and a static animal ID, e.g.: Tyche-A7. This extracts each
230
+ # animal ID.
231
+ animal_name = animal_folder.name.split(sep="-")[1]
232
+
233
+ # Under each animal root folder, there are day folders that use YYYY-MM-DD timestamps
234
+ for session_folder in animal_folder.iterdir():
235
+ # Inside each day folder, there are one or more acquisitions (sessions)
236
+ for acquisition_folder in session_folder.iterdir():
237
+ # For each session, we extract the modification time from either (preferentially) zstack.mat or
238
+ # MotionEstimator.me file. Any session without these files is flagged for additional user intervention.
239
+ # This procedure generates timestamp-based session names, analogous to how our modern pipeline does it.
240
+ session_name = _generate_session_name(acquisition_path=acquisition_folder)
241
+
242
+ # Uses derived session name and the statically created project configuration file to create the
243
+ # session data hierarchy using the output root. This generates a 'standard' Sun lab directory structure
244
+ # for the Tyche data.
245
+ session_data = SessionData.create(
246
+ session_name=session_name,
247
+ animal_id=animal_name,
248
+ project_configuration=project_configuration,
249
+ session_type="Experiment",
250
+ experiment_name=None, # Has to be none, otherwise the system tries to copy a configuration file.
251
+ )
252
+
253
+ # Moves the data from the old hierarchy to the new hierarchy. If the process runs as expected, and
254
+ # fully empties the source acquisition folder, destroys the folder. Otherwise, notifies the user that
255
+ # the runtime did not fully process the session data and requests intervention.
256
+ success = _reorganize_data(session_data, acquisition_folder)
257
+ if not success:
258
+ message = (
259
+ f"Encountered issues when reorganizing {animal_name} session {session_name}. "
260
+ f"User intervention is required to finish data reorganization process for this session."
261
+ )
262
+ # noinspection PyTypeChecker
263
+ console.echo(message=message, level=LogLevel.WARNING)
264
+ else:
265
+ # If the transfer process was successful, generates a new checksum for the moved data
266
+ calculate_directory_checksum(directory=Path(session_data.raw_data.raw_data_path))
267
+ # Next, copies the data to the BioHPC server for further processing
268
+ transfer_directory(
269
+ source=Path(session_data.raw_data.raw_data_path),
270
+ destination=Path(session_data.destinations.server_raw_data_path),
271
+ verify_integrity=False,
272
+ )
273
+ # Finally, removes the now-empty old session data directory.
274
+ acquisition_folder.rmdir()
275
+
276
+ # If the loop above removed all acquisition folders, all data for that day has been successfully converted
277
+ # to use the new session format. Removes the now-empty 'day' folder from the target animal
278
+ if len([folder for folder in session_folder.iterdir()]) == 0:
279
+ session_folder.rmdir()
@@ -1,6 +1,4 @@
1
- """This module stores the Command-Line Interfaces (CLIs) exposes by the library as part of the installation process.
2
- Primarily, these CLIs are used when setting up or reconfiguring the VRPC and other machines in the lab to work with
3
- sl-experiment and sl-forgery libraries."""
1
+ """This module stores the Command-Line Interfaces (CLIs) exposes by the library as part of the installation process."""
4
2
 
5
3
  from pathlib import Path
6
4
 
@@ -8,6 +6,7 @@ import click
8
6
 
9
7
  from .server import generate_server_credentials
10
8
  from .data_classes import replace_root_path
9
+ from .ascension_tools import ascend_tyche_data
11
10
 
12
11
 
13
12
  @click.command()
@@ -70,3 +69,44 @@ def generate_server_credentials_file(output_directory: str, host: str, username:
70
69
  generate_server_credentials(
71
70
  output_directory=Path(output_directory), username=username, password=password, host=host
72
71
  )
72
+
73
+
74
+ @click.command()
75
+ @click.option(
76
+ "-p",
77
+ "--path",
78
+ type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path),
79
+ required=True,
80
+ prompt="Enter the absolute path to the directory that stores original Tyche animal folders: ",
81
+ help="The absolute path to the directory that stores original Tyche animal folders.",
82
+ )
83
+ @click.option(
84
+ "-o",
85
+ "--output_directory",
86
+ type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path),
87
+ required=True,
88
+ prompt="Enter the absolute path to the local directory where to create the ascended Tyche project hierarchy: ",
89
+ help="The absolute path to the local directory where to create the ascended Tyche project hierarchy.",
90
+ )
91
+ @click.option(
92
+ "-s",
93
+ "--server_directory",
94
+ type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path),
95
+ required=True,
96
+ prompt=(
97
+ "Enter the path to the SMB-mounted BioHPC server directory where to create the ascended Tyche project "
98
+ "hierarchy: "
99
+ ),
100
+ help="The path to the SMB-mounted BioHPC server directory where to create the ascended Tyche project hierarchy.",
101
+ )
102
+ def ascend_tyche_directory(path: str, output_directory: str, server_directory: str) -> None:
103
+ """Restructures old Tyche project data to use the modern Sun lab data structure.
104
+
105
+ This CLI is used to convert ('ascend') the old Tyche project data to the modern Sun lab structure. After
106
+ ascension, the data can be processed and analyzed using all modern Sun lab (sl-) tools and libraries.
107
+ """
108
+ ascend_tyche_data(
109
+ root_directory=Path(path),
110
+ output_root_directory=Path(output_directory),
111
+ server_root_directory=Path(server_directory),
112
+ )