sl-shared-assets 2.0.1__py3-none-any.whl → 3.0.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sl-shared-assets might be problematic. Click here for more details.

Files changed (32) hide show
  1. sl_shared_assets/__init__.py +17 -9
  2. sl_shared_assets/__init__.pyi +12 -8
  3. sl_shared_assets/cli.py +258 -21
  4. sl_shared_assets/cli.pyi +44 -5
  5. sl_shared_assets/data_classes/__init__.py +8 -3
  6. sl_shared_assets/data_classes/__init__.pyi +8 -4
  7. sl_shared_assets/data_classes/configuration_data.py +149 -30
  8. sl_shared_assets/data_classes/configuration_data.pyi +49 -11
  9. sl_shared_assets/data_classes/runtime_data.py +70 -49
  10. sl_shared_assets/data_classes/runtime_data.pyi +41 -33
  11. sl_shared_assets/data_classes/session_data.py +193 -253
  12. sl_shared_assets/data_classes/session_data.pyi +99 -116
  13. sl_shared_assets/data_classes/surgery_data.py +1 -1
  14. sl_shared_assets/server/__init__.py +2 -2
  15. sl_shared_assets/server/__init__.pyi +5 -2
  16. sl_shared_assets/server/job.py +229 -1
  17. sl_shared_assets/server/job.pyi +111 -0
  18. sl_shared_assets/server/server.py +431 -31
  19. sl_shared_assets/server/server.pyi +158 -15
  20. sl_shared_assets/tools/__init__.py +2 -1
  21. sl_shared_assets/tools/__init__.pyi +2 -0
  22. sl_shared_assets/tools/ascension_tools.py +9 -21
  23. sl_shared_assets/tools/ascension_tools.pyi +1 -1
  24. sl_shared_assets/tools/packaging_tools.py +2 -2
  25. sl_shared_assets/tools/project_management_tools.py +147 -41
  26. sl_shared_assets/tools/project_management_tools.pyi +45 -6
  27. {sl_shared_assets-2.0.1.dist-info → sl_shared_assets-3.0.0.dist-info}/METADATA +127 -13
  28. sl_shared_assets-3.0.0.dist-info/RECORD +36 -0
  29. {sl_shared_assets-2.0.1.dist-info → sl_shared_assets-3.0.0.dist-info}/entry_points.txt +2 -0
  30. sl_shared_assets-2.0.1.dist-info/RECORD +0 -36
  31. {sl_shared_assets-2.0.1.dist-info → sl_shared_assets-3.0.0.dist-info}/WHEEL +0 -0
  32. {sl_shared_assets-2.0.1.dist-info → sl_shared_assets-3.0.0.dist-info}/licenses/LICENSE +0 -0
@@ -2,13 +2,17 @@
2
2
 
3
3
  See https://github.com/Sun-Lab-NBB/sl-shared-assets for more details.
4
4
  API documentation: https://sl-shared-assets-api-docs.netlify.app/
5
- Authors: Ivan Kondratyev (Inkaros), Kushaan Gupta, Yuantao Deng, Natalie Yeung
5
+ Authors: Ivan Kondratyev (Inkaros), Kushaan Gupta, Natalie Yeung
6
6
  """
7
7
 
8
8
  from ataraxis_base_utilities import console
9
9
 
10
- from .tools import transfer_directory, verify_session_checksum, generate_project_manifest, calculate_directory_checksum
11
- from .server import Job, Server, ServerCredentials
10
+ from .tools import (
11
+ resolve_p53_marker,
12
+ transfer_directory,
13
+ calculate_directory_checksum,
14
+ )
15
+ from .server import Job, Server, JupyterJob, ServerCredentials
12
16
  from .data_classes import (
13
17
  RawData,
14
18
  DrugData,
@@ -16,20 +20,22 @@ from .data_classes import (
16
20
  SessionData,
17
21
  SubjectData,
18
22
  SurgeryData,
19
- VersionData,
23
+ SessionTypes,
20
24
  InjectionData,
21
25
  ProcedureData,
22
26
  ProcessedData,
23
27
  MesoscopePaths,
24
28
  ZaberPositions,
25
29
  ExperimentState,
30
+ ExperimentTrial,
26
31
  MesoscopeCameras,
27
32
  ProcessingTracker,
33
+ AcquisitionSystems,
28
34
  MesoscopePositions,
29
- ProjectConfiguration,
30
35
  RunTrainingDescriptor,
31
36
  LickTrainingDescriptor,
32
37
  MesoscopeHardwareState,
38
+ WindowCheckingDescriptor,
33
39
  MesoscopeMicroControllers,
34
40
  MesoscopeAdditionalFirmware,
35
41
  MesoscopeSystemConfiguration,
@@ -48,12 +54,12 @@ __all__ = [
48
54
  "Server",
49
55
  "ServerCredentials",
50
56
  "Job",
57
+ "JupyterJob",
51
58
  # Data classes package
52
59
  "DrugData",
53
60
  "ImplantData",
54
61
  "SessionData",
55
62
  "RawData",
56
- "VersionData",
57
63
  "ProcessedData",
58
64
  "SubjectData",
59
65
  "SurgeryData",
@@ -63,7 +69,6 @@ __all__ = [
63
69
  "ZaberPositions",
64
70
  "ExperimentState",
65
71
  "MesoscopePositions",
66
- "ProjectConfiguration",
67
72
  "MesoscopeHardwareState",
68
73
  "RunTrainingDescriptor",
69
74
  "LickTrainingDescriptor",
@@ -76,9 +81,12 @@ __all__ = [
76
81
  "MesoscopeAdditionalFirmware",
77
82
  "get_system_configuration_data",
78
83
  "set_system_configuration_file",
84
+ "ExperimentTrial",
85
+ "SessionTypes",
86
+ "AcquisitionSystems",
87
+ "WindowCheckingDescriptor",
79
88
  # Tools package
89
+ "resolve_p53_marker",
80
90
  "transfer_directory",
81
- "generate_project_manifest",
82
- "verify_session_checksum",
83
91
  "calculate_directory_checksum",
84
92
  ]
@@ -1,12 +1,12 @@
1
1
  from .tools import (
2
+ resolve_p53_marker as resolve_p53_marker,
2
3
  transfer_directory as transfer_directory,
3
- verify_session_checksum as verify_session_checksum,
4
- generate_project_manifest as generate_project_manifest,
5
4
  calculate_directory_checksum as calculate_directory_checksum,
6
5
  )
7
6
  from .server import (
8
7
  Job as Job,
9
8
  Server as Server,
9
+ JupyterJob as JupyterJob,
10
10
  ServerCredentials as ServerCredentials,
11
11
  )
12
12
  from .data_classes import (
@@ -16,20 +16,22 @@ from .data_classes import (
16
16
  SessionData as SessionData,
17
17
  SubjectData as SubjectData,
18
18
  SurgeryData as SurgeryData,
19
- VersionData as VersionData,
19
+ SessionTypes as SessionTypes,
20
20
  InjectionData as InjectionData,
21
21
  ProcedureData as ProcedureData,
22
22
  ProcessedData as ProcessedData,
23
23
  MesoscopePaths as MesoscopePaths,
24
24
  ZaberPositions as ZaberPositions,
25
25
  ExperimentState as ExperimentState,
26
+ ExperimentTrial as ExperimentTrial,
26
27
  MesoscopeCameras as MesoscopeCameras,
27
28
  ProcessingTracker as ProcessingTracker,
29
+ AcquisitionSystems as AcquisitionSystems,
28
30
  MesoscopePositions as MesoscopePositions,
29
- ProjectConfiguration as ProjectConfiguration,
30
31
  RunTrainingDescriptor as RunTrainingDescriptor,
31
32
  LickTrainingDescriptor as LickTrainingDescriptor,
32
33
  MesoscopeHardwareState as MesoscopeHardwareState,
34
+ WindowCheckingDescriptor as WindowCheckingDescriptor,
33
35
  MesoscopeMicroControllers as MesoscopeMicroControllers,
34
36
  MesoscopeAdditionalFirmware as MesoscopeAdditionalFirmware,
35
37
  MesoscopeSystemConfiguration as MesoscopeSystemConfiguration,
@@ -43,11 +45,11 @@ __all__ = [
43
45
  "Server",
44
46
  "ServerCredentials",
45
47
  "Job",
48
+ "JupyterJob",
46
49
  "DrugData",
47
50
  "ImplantData",
48
51
  "SessionData",
49
52
  "RawData",
50
- "VersionData",
51
53
  "ProcessedData",
52
54
  "SubjectData",
53
55
  "SurgeryData",
@@ -57,7 +59,6 @@ __all__ = [
57
59
  "ZaberPositions",
58
60
  "ExperimentState",
59
61
  "MesoscopePositions",
60
- "ProjectConfiguration",
61
62
  "MesoscopeHardwareState",
62
63
  "RunTrainingDescriptor",
63
64
  "LickTrainingDescriptor",
@@ -70,8 +71,11 @@ __all__ = [
70
71
  "MesoscopeAdditionalFirmware",
71
72
  "get_system_configuration_data",
72
73
  "set_system_configuration_file",
74
+ "ExperimentTrial",
75
+ "SessionTypes",
76
+ "AcquisitionSystems",
77
+ "WindowCheckingDescriptor",
78
+ "resolve_p53_marker",
73
79
  "transfer_directory",
74
- "generate_project_manifest",
75
- "verify_session_checksum",
76
80
  "calculate_directory_checksum",
77
81
  ]
sl_shared_assets/cli.py CHANGED
@@ -3,10 +3,10 @@
3
3
  from pathlib import Path
4
4
 
5
5
  import click
6
- from ataraxis_base_utilities import LogLevel, console
6
+ from ataraxis_base_utilities import LogLevel, console, ensure_directory_exists
7
7
 
8
- from .tools import ascend_tyche_data, verify_session_checksum, generate_project_manifest
9
- from .server import generate_server_credentials
8
+ from .tools import ascend_tyche_data, resolve_p53_marker, verify_session_checksum, generate_project_manifest
9
+ from .server import Server, JupyterJob, generate_server_credentials
10
10
  from .data_classes import SessionData, ProcessingTracker
11
11
 
12
12
 
@@ -16,7 +16,7 @@ from .data_classes import SessionData, ProcessingTracker
16
16
  "--session_path",
17
17
  type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path),
18
18
  required=True,
19
- help="The absolute path to the session whose raw data needs to be verified for potential corruption.",
19
+ help="The absolute path to the session directory whose raw data needs to be verified for potential corruption.",
20
20
  )
21
21
  @click.option(
22
22
  "-c",
@@ -43,7 +43,9 @@ from .data_classes import SessionData, ProcessingTracker
43
43
  "used if 'create_processed_directories' flag is True."
44
44
  ),
45
45
  )
46
- def verify_session_integrity(session_path: str, create_processed_directories: bool, processed_data_root: Path) -> None:
46
+ def verify_session_integrity(
47
+ session_path: Path, create_processed_directories: bool, processed_data_root: Path | None
48
+ ) -> None:
47
49
  """Checks the integrity of the target session's raw data (contents of the raw_data directory).
48
50
 
49
51
  This command assumes that the data has been checksummed during acquisition and contains an ax_checksum.txt file
@@ -101,7 +103,7 @@ def verify_session_integrity(session_path: str, create_processed_directories: bo
101
103
  ),
102
104
  )
103
105
  def generate_project_manifest_file(
104
- project_path: str, output_directory: str, project_processed_path: str | None
106
+ project_path: Path, output_directory: Path, project_processed_path: Path | None
105
107
  ) -> None:
106
108
  """Generates the manifest .feather file that provides information about the data-processing state of all available
107
109
  project sessions.
@@ -123,7 +125,7 @@ def generate_project_manifest_file(
123
125
  @click.option(
124
126
  "-od",
125
127
  "--output_directory",
126
- type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path),
128
+ type=click.Path(exists=False, file_okay=False, dir_okay=True, path_type=Path),
127
129
  required=True,
128
130
  help="The absolute path to the directory where to store the generated server credentials file.",
129
131
  )
@@ -151,29 +153,50 @@ def generate_project_manifest_file(
151
153
  help="The password to use for server authentication.",
152
154
  )
153
155
  @click.option(
154
- "-rdp",
155
- "--raw_data_path",
156
+ "-sr",
157
+ "--storage_root",
156
158
  type=str,
157
159
  required=True,
158
- default="/storage/sun_data",
160
+ show_default=True,
161
+ default="/local/storage",
159
162
  help=(
160
- "The absolute path to the directory used to store raw data from all Sun lab projects, relative to the server "
161
- "root."
163
+ "The absolute path to to the root storage (slow) server directory. Typically, this is the path to the "
164
+ "top-level (root) directory of the HDD RAID volume."
162
165
  ),
163
166
  )
164
167
  @click.option(
165
- "-pdp",
166
- "--processed_data_path",
168
+ "-wr",
169
+ "--working_root",
167
170
  type=str,
168
171
  required=True,
169
- default="/workdir/sun_data",
172
+ show_default=True,
173
+ default="/local/workdir",
170
174
  help=(
171
- "The absolute path to the directory used to store processed data from all Sun lab projects, relative to the "
172
- "server root."
175
+ "The absolute path to the root working (fast) server directory. Typically, this is the path to the top-level "
176
+ "(root) directory of the NVME RAID volume. If the server uses the same volume for both storage and working "
177
+ "directories, enter the same path under both 'storage_root' and 'working_root'."
178
+ ),
179
+ )
180
+ @click.option(
181
+ "-sdn",
182
+ "--shared_directory_name",
183
+ type=str,
184
+ required=True,
185
+ show_default=True,
186
+ default="sun_data",
187
+ help=(
188
+ "The name of the shared directory used to store all Sun lab project data on the storage and working server "
189
+ "volumes."
173
190
  ),
174
191
  )
175
192
  def generate_server_credentials_file(
176
- output_directory: str, host: str, username: str, password: str, raw_data_path: str, processed_data_path: str
193
+ output_directory: Path,
194
+ host: str,
195
+ username: str,
196
+ password: str,
197
+ storage_root: str,
198
+ working_root: str,
199
+ shared_directory_name: str,
177
200
  ) -> None:
178
201
  """Generates a new server_credentials.yaml file under the specified directory, using input information.
179
202
 
@@ -181,13 +204,19 @@ def generate_server_credentials_file(
181
204
  the server_credentials.yaml file generated by this command is used by the Server and Job classes used in many Sun
182
205
  lab data processing libraries.
183
206
  """
207
+
208
+ # If necessary, generates the output directory hierarchy before creating the credentials' file.
209
+ ensure_directory_exists(output_directory)
210
+
211
+ # Generates the credentials' file
184
212
  generate_server_credentials(
185
213
  output_directory=Path(output_directory),
186
214
  username=username,
187
215
  password=password,
188
216
  host=host,
189
- raw_data_root=raw_data_path,
190
- processed_data_root=processed_data_path,
217
+ storage_root=storage_root,
218
+ working_root=working_root,
219
+ shared_directory_name=shared_directory_name,
191
220
  )
192
221
  message = (
193
222
  f"Server access credentials file: generated. If necessary, remember to edit the data acquisition system "
@@ -195,6 +224,9 @@ def generate_server_credentials_file(
195
224
  )
196
225
  # noinspection PyTypeChecker
197
226
  console.echo(message=message, level=LogLevel.SUCCESS)
227
+ message = f"File location: {output_directory}"
228
+ # noinspection PyTypeChecker
229
+ console.echo(message=message, level=LogLevel.SUCCESS)
198
230
 
199
231
 
200
232
  @click.command()
@@ -205,7 +237,7 @@ def generate_server_credentials_file(
205
237
  required=True,
206
238
  help="The absolute path to the directory that stores original Tyche animal folders.",
207
239
  )
208
- def ascend_tyche_directory(input_directory: str) -> None:
240
+ def ascend_tyche_directory(input_directory: Path) -> None:
209
241
  """Restructures old Tyche project data to use the modern Sun lab data structure and uploads them to the processing
210
242
  server.
211
243
 
@@ -216,3 +248,208 @@ def ascend_tyche_directory(input_directory: str) -> None:
216
248
  valid Sun lab data acquisition system, such as VRPC of the Mesoscope-VR system.
217
249
  """
218
250
  ascend_tyche_data(root_directory=Path(input_directory))
251
+
252
+
253
+ @click.command()
254
+ @click.option(
255
+ "-cp",
256
+ "--credentials_path",
257
+ type=click.Path(exists=True, file_okay=True, dir_okay=False, path_type=Path),
258
+ required=True,
259
+ help=(
260
+ "The absolute path to the server_credentials.yaml file that stores access credentials for the target Sun lab "
261
+ "server. If necessary, use the 'sl-create-server-credentials' command to generate the file."
262
+ ),
263
+ )
264
+ @click.option(
265
+ "-n",
266
+ "--name",
267
+ type=str,
268
+ required=True,
269
+ show_default=True,
270
+ default="jupyter_server",
271
+ help=(
272
+ "The descriptive name to be given to the remote Jupyter server job. Primarily, this is used to identify the "
273
+ "job inside the log files."
274
+ ),
275
+ )
276
+ @click.option(
277
+ "-e",
278
+ "--environment",
279
+ type=str,
280
+ required=True,
281
+ help=(
282
+ "The name of the conda environment to use for running the Jupyter server. At a minimum, the target environment "
283
+ "must contain the 'jupyterlab' and the 'notebook' Python packages. Note, the user whose credentials are used "
284
+ "to connect to the server must have a configured conda / mamba shell that exposes the target environment for "
285
+ "the job to run as expected."
286
+ ),
287
+ )
288
+ @click.option(
289
+ "-d",
290
+ "--directory",
291
+ type=click.Path(exists=False, file_okay=True, dir_okay=True, path_type=Path),
292
+ required=False,
293
+ help=(
294
+ "The absolute path to the server directory to use as the root directory for the jupyter session. If not "
295
+ "provided, this is automatically resolved to user's working directory. Note, during runtime, Jupyter will only "
296
+ "have access to files stored in or under that root directory."
297
+ ),
298
+ )
299
+ @click.option(
300
+ "-c",
301
+ "--cores",
302
+ type=int,
303
+ required=True,
304
+ show_default=True,
305
+ default=2,
306
+ help=(
307
+ "The number of CPU cores to allocate to the Jupyter server. Note, during the interactive Jupyter runtime, it "
308
+ "is be impossible to use more than this number of CPU cores."
309
+ ),
310
+ )
311
+ @click.option(
312
+ "-m",
313
+ "--memory",
314
+ type=int,
315
+ required=True,
316
+ show_default=True,
317
+ default=32,
318
+ help=(
319
+ "The RAM, in Gigabytes, to allocate to the Jupyter server. Note, during the interactive Jupyter runtime, it "
320
+ "is be impossible to use more than this amount of RAM."
321
+ ),
322
+ )
323
+ @click.option(
324
+ "-t",
325
+ "--time",
326
+ type=int,
327
+ required=True,
328
+ show_default=True,
329
+ default=240,
330
+ help=(
331
+ "The maximum runtime duration for this Jupyter server instance, in minutes. If the server job is still running "
332
+ "at the end of this time limit, the job will be forcibly terminated by SLURM. Note, to prevent hogging the "
333
+ "server, make sure this parameter is always set to the smallest feasible period of time you intend to interact "
334
+ "with the server."
335
+ ),
336
+ )
337
+ @click.option(
338
+ "-p",
339
+ "--port",
340
+ type=int,
341
+ required=True,
342
+ show_default=True,
343
+ default=0,
344
+ help=(
345
+ "The port to use for the Jupyter server communication on the remote server. Valid port values are from 8888 "
346
+ "to 9999. Most runtimes should leave this set to the default value (0), which will randomly select one of the "
347
+ "valid ports. Using random selection minimizes the chances of colliding with other interactive jupyter "
348
+ "sessions."
349
+ ),
350
+ )
351
+ def start_jupyter_server(
352
+ credentials_path: Path, name: str, environment: str, directory: Path, cores: int, memory: int, time: int, port: int
353
+ ) -> None:
354
+ """Starts an interactive Jupyter session on the remote Sun lab server.
355
+
356
+ This command should be used to run Jupyter lab and notebooks sessions on the remote Sun lab server. Since all lab
357
+ data is stored on the server, this allows running light interactive analysis sessions on the same node as the data,
358
+ while leveraging considerable compute resources of the server.
359
+
360
+ Calling this command initializes a SLURM session that runs the interactive Jupyter server. Since this server
361
+ directly competes for resources with all other headless jobs running on the server, it is imperative that each
362
+ jupyter runtime uses only the minimum amount of resources and run-time as necessary. Do not use this command to run
363
+ heavy data processing pipelines! Instead, consult with library documentation and use the headless Job class.
364
+ """
365
+ # Initializes server connection
366
+ server = Server(credentials_path)
367
+ job: JupyterJob | None = None
368
+ try:
369
+ # If the caller did not provide an explicit notebook directory, defaults to user's working directory
370
+ if directory is None:
371
+ directory = (server.user_working_root,)
372
+
373
+ # Launches the specified Jupyter server
374
+ job = server.launch_jupyter_server(
375
+ job_name=name,
376
+ conda_environment=environment,
377
+ notebook_directory=directory,
378
+ cpus_to_use=cores,
379
+ ram_gb=memory,
380
+ port=port,
381
+ time_limit=time,
382
+ )
383
+
384
+ # Displays the server connection details to the user via terminal
385
+ job.print_connection_info()
386
+
387
+ # Blocks in-place until the user shuts down the server. This allows terminating the jupyter job early if the
388
+ # user is done working with the server
389
+ input("Enter anything to shut down the server: ")
390
+
391
+ # Ensures that the server created as part of this CLI is always terminated when the CLI terminates
392
+ finally:
393
+ # Terminates the server job
394
+ if job is not None:
395
+ server.abort_job(job)
396
+
397
+ # Closes server connection if it is still open
398
+ server.close()
399
+
400
+
401
+ @click.command()
402
+ @click.option(
403
+ "-sp",
404
+ "--session_path",
405
+ type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path),
406
+ required=True,
407
+ help="The absolute path to the session directory for which to resolve the dataset integration readiness marker.",
408
+ )
409
+ @click.option(
410
+ "-c",
411
+ "--create_processed_directories",
412
+ is_flag=True,
413
+ show_default=True,
414
+ default=False,
415
+ help="Determines whether to create the processed data hierarchy. This flag should be disabled for most runtimes.",
416
+ )
417
+ @click.option(
418
+ "-ppp",
419
+ "--project_processed_path",
420
+ type=click.Path(exists=True, file_okay=False, dir_okay=True, path_type=Path),
421
+ required=False,
422
+ help=(
423
+ "The absolute path to the project directory where processed session data is stored, if different from the "
424
+ "directory used to store raw session data. Typically, this extra argument is only used when processing data "
425
+ "stored on remote compute server(s)."
426
+ ),
427
+ )
428
+ @click.option(
429
+ "-r",
430
+ "--remove",
431
+ is_flag=True,
432
+ show_default=True,
433
+ default=False,
434
+ help=(
435
+ "Determines whether the command should create or remove the dataset integration marker. Do not enable this "
436
+ "flag unless you know what you are doing. It is only safe to enable this flag if the session is not currently "
437
+ "being integrated into any datasets."
438
+ ),
439
+ )
440
+ def resolve_dataset_marker(
441
+ session_path: Path, create_processed_directories: bool, project_processed_path: Path | None, remove: bool
442
+ ) -> None:
443
+ """Depending on configuration, either creates or removes the p53.bin marker from the target session.
444
+
445
+ The p53.bin marker determines whether the session is ready for dataset integration. When the marker exists,
446
+ processing pipelines are not allowed to work with the session data, ensuring that all processed data remains
447
+ unchanged. If the marker does not exist, dataset integration pipelines are not allowed to work with the session
448
+ data, enabling processing pipelines to safely modify the data at any time.
449
+ """
450
+ resolve_p53_marker(
451
+ session_path=session_path,
452
+ create_processed_data_directory=create_processed_directories,
453
+ processed_data_root=project_processed_path,
454
+ remove=remove,
455
+ )
sl_shared_assets/cli.pyi CHANGED
@@ -2,16 +2,23 @@ from pathlib import Path
2
2
 
3
3
  from .tools import (
4
4
  ascend_tyche_data as ascend_tyche_data,
5
+ resolve_p53_marker as resolve_p53_marker,
5
6
  verify_session_checksum as verify_session_checksum,
6
7
  generate_project_manifest as generate_project_manifest,
7
8
  )
8
- from .server import generate_server_credentials as generate_server_credentials
9
+ from .server import (
10
+ Server as Server,
11
+ JupyterJob as JupyterJob,
12
+ generate_server_credentials as generate_server_credentials,
13
+ )
9
14
  from .data_classes import (
10
15
  SessionData as SessionData,
11
16
  ProcessingTracker as ProcessingTracker,
12
17
  )
13
18
 
14
- def verify_session_integrity(session_path: str, create_processed_directories: bool, processed_data_root: Path) -> None:
19
+ def verify_session_integrity(
20
+ session_path: Path, create_processed_directories: bool, processed_data_root: Path | None
21
+ ) -> None:
15
22
  """Checks the integrity of the target session's raw data (contents of the raw_data directory).
16
23
 
17
24
  This command assumes that the data has been checksummed during acquisition and contains an ax_checksum.txt file
@@ -26,7 +33,7 @@ def verify_session_integrity(session_path: str, create_processed_directories: bo
26
33
  """
27
34
 
28
35
  def generate_project_manifest_file(
29
- project_path: str, output_directory: str, project_processed_path: str | None
36
+ project_path: Path, output_directory: Path, project_processed_path: Path | None
30
37
  ) -> None:
31
38
  """Generates the manifest .feather file that provides information about the data-processing state of all available
32
39
  project sessions.
@@ -37,7 +44,13 @@ def generate_project_manifest_file(
37
44
  """
38
45
 
39
46
  def generate_server_credentials_file(
40
- output_directory: str, host: str, username: str, password: str, raw_data_path: str, processed_data_path: str
47
+ output_directory: Path,
48
+ host: str,
49
+ username: str,
50
+ password: str,
51
+ storage_root: str,
52
+ working_root: str,
53
+ shared_directory_name: str,
41
54
  ) -> None:
42
55
  """Generates a new server_credentials.yaml file under the specified directory, using input information.
43
56
 
@@ -46,7 +59,7 @@ def generate_server_credentials_file(
46
59
  lab data processing libraries.
47
60
  """
48
61
 
49
- def ascend_tyche_directory(input_directory: str) -> None:
62
+ def ascend_tyche_directory(input_directory: Path) -> None:
50
63
  """Restructures old Tyche project data to use the modern Sun lab data structure and uploads them to the processing
51
64
  server.
52
65
 
@@ -56,3 +69,29 @@ def ascend_tyche_directory(input_directory: str) -> None:
56
69
  will not work for any other project or data. Also, this command will only work on a machine (PC) that belongs to a
57
70
  valid Sun lab data acquisition system, such as VRPC of the Mesoscope-VR system.
58
71
  """
72
+
73
+ def start_jupyter_server(
74
+ credentials_path: Path, name: str, environment: str, directory: Path, cores: int, memory: int, time: int, port: int
75
+ ) -> None:
76
+ """Starts an interactive Jupyter session on the remote Sun lab server.
77
+
78
+ This command should be used to run Jupyter lab and notebooks sessions on the remote Sun lab server. Since all lab
79
+ data is stored on the server, this allows running light interactive analysis sessions on the same node as the data,
80
+ while leveraging considerable compute resources of the server.
81
+
82
+ Calling this command initializes a SLURM session that runs the interactive Jupyter server. Since this server
83
+ directly competes for resources with all other headless jobs running on the server, it is imperative that each
84
+ jupyter runtime uses only the minimum amount of resources and run-time as necessary. Do not use this command to run
85
+ heavy data processing pipelines! Instead, consult with library documentation and use the headless Job class.
86
+ """
87
+
88
+ def resolve_dataset_marker(
89
+ session_path: Path, create_processed_directories: bool, project_processed_path: Path | None, remove: bool
90
+ ) -> None:
91
+ """Depending on configuration, either creates or removes the p53.bin marker from the target session.
92
+
93
+ The p53.bin marker determines whether the session is ready for dataset integration. When the marker exists,
94
+ processing pipelines are not allowed to work with the session data, ensuring that all processed data remains
95
+ unchanged. If the marker does not exist, dataset integration pipelines are not allowed to work with the session
96
+ data, enabling processing pipelines to safely modify the data at any time.
97
+ """
@@ -9,9 +9,10 @@ from .runtime_data import (
9
9
  RunTrainingDescriptor,
10
10
  LickTrainingDescriptor,
11
11
  MesoscopeHardwareState,
12
+ WindowCheckingDescriptor,
12
13
  MesoscopeExperimentDescriptor,
13
14
  )
14
- from .session_data import RawData, SessionData, VersionData, ProcessedData, ProcessingTracker, ProjectConfiguration
15
+ from .session_data import RawData, SessionData, SessionTypes, ProcessedData, ProcessingTracker
15
16
  from .surgery_data import (
16
17
  DrugData,
17
18
  ImplantData,
@@ -23,7 +24,9 @@ from .surgery_data import (
23
24
  from .configuration_data import (
24
25
  MesoscopePaths,
25
26
  ExperimentState,
27
+ ExperimentTrial,
26
28
  MesoscopeCameras,
29
+ AcquisitionSystems,
27
30
  MesoscopeMicroControllers,
28
31
  MesoscopeAdditionalFirmware,
29
32
  MesoscopeSystemConfiguration,
@@ -37,7 +40,6 @@ __all__ = [
37
40
  "ImplantData",
38
41
  "SessionData",
39
42
  "RawData",
40
- "VersionData",
41
43
  "ProcessedData",
42
44
  "SubjectData",
43
45
  "SurgeryData",
@@ -46,7 +48,6 @@ __all__ = [
46
48
  "ZaberPositions",
47
49
  "ExperimentState",
48
50
  "MesoscopePositions",
49
- "ProjectConfiguration",
50
51
  "MesoscopeHardwareState",
51
52
  "RunTrainingDescriptor",
52
53
  "LickTrainingDescriptor",
@@ -60,4 +61,8 @@ __all__ = [
60
61
  "MesoscopeMicroControllers",
61
62
  "MesoscopeAdditionalFirmware",
62
63
  "ProcessingTracker",
64
+ "ExperimentTrial",
65
+ "AcquisitionSystems",
66
+ "SessionTypes",
67
+ "WindowCheckingDescriptor",
63
68
  ]