sl-shared-assets 3.0.0rc14__tar.gz → 3.1.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of sl-shared-assets might be problematic. Click here for more details.

Files changed (46) hide show
  1. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/PKG-INFO +122 -5
  2. sl_shared_assets-3.1.0/README.md +230 -0
  3. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/envs/slsa_dev_lin.yml +51 -66
  4. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/envs/slsa_dev_lin_spec.txt +48 -63
  5. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/pyproject.toml +1 -1
  6. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/src/sl_shared_assets/__init__.py +2 -0
  7. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/src/sl_shared_assets/__init__.pyi +2 -0
  8. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/src/sl_shared_assets/cli.py +38 -14
  9. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/src/sl_shared_assets/cli.pyi +11 -11
  10. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/src/sl_shared_assets/data_classes/__init__.py +2 -2
  11. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/src/sl_shared_assets/data_classes/configuration_data.py +11 -8
  12. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/src/sl_shared_assets/data_classes/configuration_data.pyi +8 -7
  13. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/src/sl_shared_assets/data_classes/runtime_data.py +2 -2
  14. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/src/sl_shared_assets/data_classes/runtime_data.pyi +2 -2
  15. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/src/sl_shared_assets/data_classes/session_data.py +43 -29
  16. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/src/sl_shared_assets/data_classes/session_data.pyi +11 -11
  17. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/src/sl_shared_assets/server/__init__.py +1 -1
  18. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/src/sl_shared_assets/server/job.py +10 -10
  19. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/src/sl_shared_assets/server/job.pyi +5 -5
  20. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/src/sl_shared_assets/server/server.py +15 -15
  21. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/src/sl_shared_assets/server/server.pyi +7 -7
  22. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/src/sl_shared_assets/tools/__init__.py +7 -1
  23. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/src/sl_shared_assets/tools/__init__.pyi +2 -0
  24. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/src/sl_shared_assets/tools/ascension_tools.py +8 -8
  25. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/src/sl_shared_assets/tools/packaging_tools.py +2 -1
  26. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/src/sl_shared_assets/tools/project_management_tools.py +87 -41
  27. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/src/sl_shared_assets/tools/project_management_tools.pyi +23 -11
  28. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/src/sl_shared_assets/tools/transfer_tools.py +1 -1
  29. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/src/sl_shared_assets/tools/transfer_tools.pyi +1 -1
  30. sl_shared_assets-3.0.0rc14/README.md +0 -113
  31. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/.gitignore +0 -0
  32. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/LICENSE +0 -0
  33. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/docs/Makefile +0 -0
  34. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/docs/make.bat +0 -0
  35. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/docs/source/api.rst +0 -0
  36. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/docs/source/conf.py +0 -0
  37. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/docs/source/index.rst +0 -0
  38. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/docs/source/welcome.rst +0 -0
  39. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/src/sl_shared_assets/data_classes/__init__.pyi +0 -0
  40. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/src/sl_shared_assets/data_classes/surgery_data.py +0 -0
  41. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/src/sl_shared_assets/data_classes/surgery_data.pyi +0 -0
  42. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/src/sl_shared_assets/py.typed +0 -0
  43. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/src/sl_shared_assets/server/__init__.pyi +0 -0
  44. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/src/sl_shared_assets/tools/ascension_tools.pyi +0 -0
  45. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/src/sl_shared_assets/tools/packaging_tools.pyi +0 -0
  46. {sl_shared_assets-3.0.0rc14 → sl_shared_assets-3.1.0}/tox.ini +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: sl-shared-assets
3
- Version: 3.0.0rc14
3
+ Version: 3.1.0
4
4
  Summary: Provides data acquisition and processing assets shared between Sun (NeuroAI) lab libraries.
5
5
  Project-URL: Homepage, https://github.com/Sun-Lab-NBB/sl-shared-assets
6
6
  Project-URL: Documentation, https://sl-shared-assets-api-docs.netlify.app/
@@ -772,6 +772,7 @@ A Python library that stores assets shared between multiple Sun (NeuroAI) lab da
772
772
  ![PyPI - License](https://img.shields.io/pypi/l/sl-shared-assets)
773
773
  ![PyPI - Status](https://img.shields.io/pypi/status/sl-shared-assets)
774
774
  ![PyPI - Wheel](https://img.shields.io/pypi/wheel/sl-shared-assets)
775
+
775
776
  ___
776
777
 
777
778
  ## Detailed Description
@@ -813,7 +814,7 @@ All software library dependencies are installed automatically as part of library
813
814
 
814
815
  Note, installation from source is ***highly discouraged*** for everyone who is not an active project developer.
815
816
 
816
- 1. Download this repository to your local machine using your preferred method, such as Git-cloning. Use one
817
+ 1. Download this repository to your local machine using any method, such as Git-cloning. Use one
817
818
  of the stable releases from [GitHub](https://github.com/Sun-Lab-NBB/sl-shared-assets/releases).
818
819
  2. Unpack the downloaded zip and note the path to the binary wheel (`.whl`) file contained in the archive.
819
820
  3. Run ```python -m pip install WHEEL_PATH```, replacing 'WHEEL_PATH' with the path to the wheel file, to install the
@@ -826,12 +827,128 @@ Use the following command to install the library using pip: ```pip install sl-sh
826
827
 
827
828
  ## Usage
828
829
 
829
- All library components are intended to be used via other Sun lab libraries. Developers should study the API and CLI
830
+ Most library components are intended to be used via other Sun lab libraries. Developers should study the API and CLI
830
831
  documentation below to learn how to use library components in other Sun lab libraries. For notes on using shared
831
832
  assets for data acquisition, see the [sl-experiment](https://github.com/Sun-Lab-NBB/sl-experiment) library ReadMe.
832
833
  For notes on using shared assets for data processing, see the [sl-forgery](https://github.com/Sun-Lab-NBB/sl-forgery)
833
834
  library ReadMe.
834
835
 
836
+ The only exception to the note above is the **server.py** package exposed by this library. This package exposes an API
837
+ for running headless and a CLI for running interactive Simple Linux Utility for Resource Management (SLURM)-managed
838
+ jobs on remote compute clusters.
839
+
840
+ ### Generating Access Credentials
841
+
842
+ To access any remote server, the user is required to first generate the access credentials. The credentials are stored
843
+ inside the 'server_credentials.yaml' file, which is generated by using the `sl-create-server-credentials` command.
844
+ **Note!** Users are advised to generate this file in a secure (non-shared) location on their local machine.
845
+
846
+ ### Running Headless Jobs
847
+
848
+ A headless job is a job that does not require any user interaction during runtime. Currently, all headless jobs in the
849
+ lab rely on pip-installable packages that expose a callable Command-Line Interface to carry out some type of
850
+ data processing. In this regard, **running a headless job is equivalent to calling a CLI command on your local
851
+ machine**, except that the command is executed on a remote compute server. Therefore, the primary purpose of the API
852
+ exposed by this library is to transfer the target command request to the remote server, execute it, and monitor the
853
+ runtime status until it is complete.
854
+
855
+ For example, the [sl-suite2p package](https://github.com/Sun-Lab-NBB/suite2p) maintained in the lab exposes a CLI to
856
+ process 2-Photon data from experiment sessions. During data processing by the
857
+ [sl-forgery](https://github.com/Sun-Lab-NBB/sl-forgery) library, a remote job is sent to the server that uses the CLI
858
+ exposed by the sl-suite2p package to process target session(s).
859
+
860
+ ### Creating Jobs
861
+ All remote jobs are sent to the server in the form of an executable *shell* (.sh) script. The script is composed on the
862
+ local machine that uses this library and transferred to a temporary server directory using Secure Shell File
863
+ Transfer Protocol (SFTP). The server is then instructed to evaluate (run) the script using SLURM job manager, via a
864
+ Secure Shell (SSH) session.
865
+
866
+ Broadly, each job consists of three major steps, which correspond to three major sections of the job shell script:
867
+ 1. **Setting up the job environment**. Each job script starts with a SLURM job parameter block, which tells SLURM
868
+ what resources (CPUs, GPUs, RAM, etc.) the job requires. When resources become available, SLURM generates a virtual
869
+ environment and runs the rest of the job script in that environment. This forms the basis for using the shared
870
+ compute resources fairly, as SLURM balances resource allocation and the order of job execution for all users.
871
+ 2. **Activating the target conda environment**. Currently, all jobs are assumed to use Python libraries to execute the
872
+ intended data processing. Similar to processing data locally, each job expects the remote server to provide a
873
+ Conda environment preconfigured with necessary assets (packages) to run the job. Therefore, each job contains a
874
+ section that activates the user-defined conda environment before running the rest of the job.
875
+ 3. **Executing processing**. The final section is typically unique to each job and calls specific CLI commands or runs
876
+ specific Python modules. Since each job is submitted as a shell script, it can do anything a server shell can
877
+ do. Therefore, despite python-centric approach to data processing in the lab, a remote job composed via this library
878
+ can execute ***any*** arbitrary command available to the user on the remove server.
879
+
880
+ Use the *Job* class exposed by this library to compose remote jobs. **Steps 1 and 2** of each job are configured when
881
+ initializing the Job instance, while **step 3** is added via the `add_command()` method of the Job class:
882
+ ```
883
+ # First, import the job class
884
+ from pathlib import Path
885
+ from sl_shared_assets import Job
886
+
887
+ # Next, instantiate a new Job object. For example, this job is used to verify the integrity of raw experiment data as
888
+ # it is transferred to the long-term storage destination (server) by the sl-experiment library.
889
+ job = Job(
890
+ job_name="data_integrity_verification",
891
+ output_log=Path("/temp/output.txt"),
892
+ error_log=Path("/temp/errors.txt"),
893
+ working_directory=Path("/temp/test_job"),
894
+ conda_environment="test_environment",
895
+ cpus_to_use=20,
896
+ ram_gb=50,
897
+ time_limit=20,
898
+ )
899
+
900
+ # Finally, add a CLI command call (the actual work to be done by the job). Here, the job calls the
901
+ # 'sl-verify-session' command exposed by the sl-shared-assets library installed in the target environment on the server.
902
+ # Use this method to add commands as you would type them in your local terminal / shell / command line.
903
+ job.add_command(f"sl-verify-session -sp /temp/test_session")
904
+ ```
905
+
906
+ ### Submitting and Monitoring Jobs:
907
+ To submit the job to the remote server, use a **Server** class instance. This class encapsulates access to the target
908
+ remote compute server and uses the server_credentials.yaml file to determine server access credentials (see above):
909
+ ```
910
+ # Initialize the Server class using precreated server credentials file
911
+ server = Server(credentials_path=Path("/temp/server_credentials.yaml"))
912
+
913
+ # Submit the job (generated in the previous code snippet) to the server
914
+ job = server.submit_job(job)
915
+
916
+ # Wait for the server to complete the job
917
+ delay_timer = PrecisionTimer("s")
918
+ while not server.job_complete(job=job):
919
+ delay_timer.delay_noblock(delay=5, allow_sleep=True)
920
+ ```
921
+
922
+ **Note!** The Server class only checks whether the job is running on the server, but not the outcome of the job. For
923
+ that, you can either manually check the output and error logs for the job or come up with a programmatic way of
924
+ checking the outcome. All developers are highly advised to study the API documentation for the Job and Server classes
925
+ to use them most effectively.
926
+
927
+ **Critical!** Since running remote jobs is largely equivalent to executing them locally, all users are highly encouraged
928
+ to test their job scripts locally before deploying them server-side. If a script works on a local machine, it is likely
929
+ that the script would behave similarly and work on the server.
930
+
931
+ ### Interactive Jobs
932
+
933
+ Interactive jobs are a special extension of the headless job type discussed above. Specifically, an interactive job is
934
+ a headless job, whose only purpose is to **create and maintain a Jupyter lab server** under the SLURM control.
935
+ Specifically, it requests SLURM to set up an isolated environment, starts a Jupyter server in that environment, and
936
+ sends the credentials for the started server back to the user.
937
+
938
+ In essence, this allocates a set of resources the user can use interactively by running various Jupyter notebooks.
939
+ While convenient for certain data analysis cases, this type of jobs has the potential to inefficiently hog server
940
+ resources for prolonged periods of time. Therefore, users are encouraged to only resort to this type of jobs when
941
+ strictly necessary and to minimize the resources and time allocated to running these jobs.
942
+
943
+ To run an interactive job, call the `sl-start-jupyter` CLI command exposed by this library and follow the instructions
944
+ printed to the terminal by the command during runtime.
945
+
946
+ **Critical!** While this command tries to minimize collisions with other users, it is possible that an access port
947
+ collision occurs when multiple users try to instantiate a jupyter server at the same time. If you cannot authenticate
948
+ with the Jupyter server, this likely indicates that the target port was in use and Jupyter automatically incremented the
949
+ port number by 1. In this case, add 1 to your port number and try connecting to that port using the Jupyter credentials
950
+ provided by the command. For example, if your target port was '8888,' try port '8889.'
951
+
835
952
  ---
836
953
 
837
954
  ## API Documentation
@@ -847,7 +964,7 @@ ___
847
964
 
848
965
  ## Versioning
849
966
 
850
- We use [semantic versioning](https://semver.org/) for this project. For the versions available, see the
967
+ This project uses [semantic versioning](https://semver.org/). For the versions available, see the
851
968
  [tags on this repository](https://github.com/Sun-Lab-NBB/sl-shared-assets/tags).
852
969
 
853
970
  ---
@@ -870,7 +987,7 @@ ___
870
987
 
871
988
  - All Sun lab [members](https://neuroai.github.io/sunlab/people) for providing the inspiration and comments during the
872
989
  development of this library.
873
- - The creators of all other projects used in our development automation pipelines and source code
990
+ - The creators of all other projects used in the development automation pipelines and source code of this project
874
991
  [see pyproject.toml](pyproject.toml).
875
992
 
876
993
  ---
@@ -0,0 +1,230 @@
1
+ # sl-shared-assets
2
+ A Python library that stores assets shared between multiple Sun (NeuroAI) lab data pipelines.
3
+
4
+ ![PyPI - Version](https://img.shields.io/pypi/v/sl-shared-assets)
5
+ ![PyPI - Python Version](https://img.shields.io/pypi/pyversions/sl-shared-assets)
6
+ [![uv](https://tinyurl.com/uvbadge)](https://github.com/astral-sh/uv)
7
+ [![Ruff](https://tinyurl.com/ruffbadge)](https://github.com/astral-sh/ruff)
8
+ ![type-checked: mypy](https://img.shields.io/badge/type--checked-mypy-blue?style=flat-square&logo=python)
9
+ ![PyPI - License](https://img.shields.io/pypi/l/sl-shared-assets)
10
+ ![PyPI - Status](https://img.shields.io/pypi/status/sl-shared-assets)
11
+ ![PyPI - Wheel](https://img.shields.io/pypi/wheel/sl-shared-assets)
12
+
13
+ ___
14
+
15
+ ## Detailed Description
16
+
17
+ Primarily, this library is designed to make the two main Sun lab libraries used for data acquisition
18
+ ([sl-experiment](https://github.com/Sun-Lab-NBB/sl-experiment)) and processing
19
+ ([sl-forgery](https://github.com/Sun-Lab-NBB/sl-forgery)) independent of each other. This is beneficial, as both
20
+ libraries feature an extensive and largely incompatible set of runtime dependencies. Moreover, having a shared
21
+ repository of classes and tools reused across Sun lab pipelines streamlines the maintenance of these tools.
22
+
23
+ The library broadly stores two types of assets. First, it stores various dataclasses used to save the data acquired
24
+ during experiments in the lab and the dataclasses used to configure data acquisition and processing runtimes. Secondly,
25
+ it stores the tools used to safely move the data between the machines (computers) used in the data acquisition and
26
+ processing, and provides the API for running various data processing jobs on remote compute servers.
27
+
28
+ ---
29
+
30
+ ## Table of Contents
31
+
32
+ - [Dependencies](#dependencies)
33
+ - [Installation](#installation)
34
+ - [Usage](#usage)
35
+ - [API Documentation](#api-documentation)
36
+ - [Versioning](#versioning)
37
+ - [Authors](#authors)
38
+ - [License](#license)
39
+ - [Acknowledgements](#Acknowledgments)
40
+ ___
41
+
42
+ ## Dependencies
43
+
44
+ All software library dependencies are installed automatically as part of library installation.
45
+
46
+ ---
47
+
48
+ ## Installation
49
+
50
+ ### Source
51
+
52
+ Note, installation from source is ***highly discouraged*** for everyone who is not an active project developer.
53
+
54
+ 1. Download this repository to your local machine using any method, such as Git-cloning. Use one
55
+ of the stable releases from [GitHub](https://github.com/Sun-Lab-NBB/sl-shared-assets/releases).
56
+ 2. Unpack the downloaded zip and note the path to the binary wheel (`.whl`) file contained in the archive.
57
+ 3. Run ```python -m pip install WHEEL_PATH```, replacing 'WHEEL_PATH' with the path to the wheel file, to install the
58
+ wheel into the active python environment.
59
+
60
+ ### pip
61
+ Use the following command to install the library using pip: ```pip install sl-shared-assets```.
62
+
63
+ ---
64
+
65
+ ## Usage
66
+
67
+ Most library components are intended to be used via other Sun lab libraries. Developers should study the API and CLI
68
+ documentation below to learn how to use library components in other Sun lab libraries. For notes on using shared
69
+ assets for data acquisition, see the [sl-experiment](https://github.com/Sun-Lab-NBB/sl-experiment) library ReadMe.
70
+ For notes on using shared assets for data processing, see the [sl-forgery](https://github.com/Sun-Lab-NBB/sl-forgery)
71
+ library ReadMe.
72
+
73
+ The only exception to the note above is the **server.py** package exposed by this library. This package exposes an API
74
+ for running headless and a CLI for running interactive Simple Linux Utility for Resource Management (SLURM)-managed
75
+ jobs on remote compute clusters.
76
+
77
+ ### Generating Access Credentials
78
+
79
+ To access any remote server, the user is required to first generate the access credentials. The credentials are stored
80
+ inside the 'server_credentials.yaml' file, which is generated by using the `sl-create-server-credentials` command.
81
+ **Note!** Users are advised to generate this file in a secure (non-shared) location on their local machine.
82
+
83
+ ### Running Headless Jobs
84
+
85
+ A headless job is a job that does not require any user interaction during runtime. Currently, all headless jobs in the
86
+ lab rely on pip-installable packages that expose a callable Command-Line Interface to carry out some type of
87
+ data processing. In this regard, **running a headless job is equivalent to calling a CLI command on your local
88
+ machine**, except that the command is executed on a remote compute server. Therefore, the primary purpose of the API
89
+ exposed by this library is to transfer the target command request to the remote server, execute it, and monitor the
90
+ runtime status until it is complete.
91
+
92
+ For example, the [sl-suite2p package](https://github.com/Sun-Lab-NBB/suite2p) maintained in the lab exposes a CLI to
93
+ process 2-Photon data from experiment sessions. During data processing by the
94
+ [sl-forgery](https://github.com/Sun-Lab-NBB/sl-forgery) library, a remote job is sent to the server that uses the CLI
95
+ exposed by the sl-suite2p package to process target session(s).
96
+
97
+ ### Creating Jobs
98
+ All remote jobs are sent to the server in the form of an executable *shell* (.sh) script. The script is composed on the
99
+ local machine that uses this library and transferred to a temporary server directory using Secure Shell File
100
+ Transfer Protocol (SFTP). The server is then instructed to evaluate (run) the script using SLURM job manager, via a
101
+ Secure Shell (SSH) session.
102
+
103
+ Broadly, each job consists of three major steps, which correspond to three major sections of the job shell script:
104
+ 1. **Setting up the job environment**. Each job script starts with a SLURM job parameter block, which tells SLURM
105
+ what resources (CPUs, GPUs, RAM, etc.) the job requires. When resources become available, SLURM generates a virtual
106
+ environment and runs the rest of the job script in that environment. This forms the basis for using the shared
107
+ compute resources fairly, as SLURM balances resource allocation and the order of job execution for all users.
108
+ 2. **Activating the target conda environment**. Currently, all jobs are assumed to use Python libraries to execute the
109
+ intended data processing. Similar to processing data locally, each job expects the remote server to provide a
110
+ Conda environment preconfigured with necessary assets (packages) to run the job. Therefore, each job contains a
111
+ section that activates the user-defined conda environment before running the rest of the job.
112
+ 3. **Executing processing**. The final section is typically unique to each job and calls specific CLI commands or runs
113
+ specific Python modules. Since each job is submitted as a shell script, it can do anything a server shell can
114
+ do. Therefore, despite python-centric approach to data processing in the lab, a remote job composed via this library
115
+ can execute ***any*** arbitrary command available to the user on the remove server.
116
+
117
+ Use the *Job* class exposed by this library to compose remote jobs. **Steps 1 and 2** of each job are configured when
118
+ initializing the Job instance, while **step 3** is added via the `add_command()` method of the Job class:
119
+ ```
120
+ # First, import the job class
121
+ from pathlib import Path
122
+ from sl_shared_assets import Job
123
+
124
+ # Next, instantiate a new Job object. For example, this job is used to verify the integrity of raw experiment data as
125
+ # it is transferred to the long-term storage destination (server) by the sl-experiment library.
126
+ job = Job(
127
+ job_name="data_integrity_verification",
128
+ output_log=Path("/temp/output.txt"),
129
+ error_log=Path("/temp/errors.txt"),
130
+ working_directory=Path("/temp/test_job"),
131
+ conda_environment="test_environment",
132
+ cpus_to_use=20,
133
+ ram_gb=50,
134
+ time_limit=20,
135
+ )
136
+
137
+ # Finally, add a CLI command call (the actual work to be done by the job). Here, the job calls the
138
+ # 'sl-verify-session' command exposed by the sl-shared-assets library installed in the target environment on the server.
139
+ # Use this method to add commands as you would type them in your local terminal / shell / command line.
140
+ job.add_command(f"sl-verify-session -sp /temp/test_session")
141
+ ```
142
+
143
+ ### Submitting and Monitoring Jobs:
144
+ To submit the job to the remote server, use a **Server** class instance. This class encapsulates access to the target
145
+ remote compute server and uses the server_credentials.yaml file to determine server access credentials (see above):
146
+ ```
147
+ # Initialize the Server class using precreated server credentials file
148
+ server = Server(credentials_path=Path("/temp/server_credentials.yaml"))
149
+
150
+ # Submit the job (generated in the previous code snippet) to the server
151
+ job = server.submit_job(job)
152
+
153
+ # Wait for the server to complete the job
154
+ delay_timer = PrecisionTimer("s")
155
+ while not server.job_complete(job=job):
156
+ delay_timer.delay_noblock(delay=5, allow_sleep=True)
157
+ ```
158
+
159
+ **Note!** The Server class only checks whether the job is running on the server, but not the outcome of the job. For
160
+ that, you can either manually check the output and error logs for the job or come up with a programmatic way of
161
+ checking the outcome. All developers are highly advised to study the API documentation for the Job and Server classes
162
+ to use them most effectively.
163
+
164
+ **Critical!** Since running remote jobs is largely equivalent to executing them locally, all users are highly encouraged
165
+ to test their job scripts locally before deploying them server-side. If a script works on a local machine, it is likely
166
+ that the script would behave similarly and work on the server.
167
+
168
+ ### Interactive Jobs
169
+
170
+ Interactive jobs are a special extension of the headless job type discussed above. Specifically, an interactive job is
171
+ a headless job, whose only purpose is to **create and maintain a Jupyter lab server** under the SLURM control.
172
+ Specifically, it requests SLURM to set up an isolated environment, starts a Jupyter server in that environment, and
173
+ sends the credentials for the started server back to the user.
174
+
175
+ In essence, this allocates a set of resources the user can use interactively by running various Jupyter notebooks.
176
+ While convenient for certain data analysis cases, this type of jobs has the potential to inefficiently hog server
177
+ resources for prolonged periods of time. Therefore, users are encouraged to only resort to this type of jobs when
178
+ strictly necessary and to minimize the resources and time allocated to running these jobs.
179
+
180
+ To run an interactive job, call the `sl-start-jupyter` CLI command exposed by this library and follow the instructions
181
+ printed to the terminal by the command during runtime.
182
+
183
+ **Critical!** While this command tries to minimize collisions with other users, it is possible that an access port
184
+ collision occurs when multiple users try to instantiate a jupyter server at the same time. If you cannot authenticate
185
+ with the Jupyter server, this likely indicates that the target port was in use and Jupyter automatically incremented the
186
+ port number by 1. In this case, add 1 to your port number and try connecting to that port using the Jupyter credentials
187
+ provided by the command. For example, if your target port was '8888,' try port '8889.'
188
+
189
+ ---
190
+
191
+ ## API Documentation
192
+
193
+ See the [API documentation](https://sl-shared-assets-api-docs.netlify.app/) for the
194
+ detailed description of the methods and classes exposed by components of this library.
195
+
196
+ **Note!** The API documentation includes important information about Command-Line-Interfaces (CLIs) exposed by this
197
+ library as part of installation into a Python environment. All users are highly encouraged to study the CLI
198
+ documentation to learn how to use library components via the terminal.
199
+
200
+ ___
201
+
202
+ ## Versioning
203
+
204
+ This project uses [semantic versioning](https://semver.org/). For the versions available, see the
205
+ [tags on this repository](https://github.com/Sun-Lab-NBB/sl-shared-assets/tags).
206
+
207
+ ---
208
+
209
+ ## Authors
210
+
211
+ - Ivan Kondratyev ([Inkaros](https://github.com/Inkaros))
212
+ - Kushaan Gupta ([kushaangupta](https://github.com/kushaangupta))
213
+ - Natalie Yeung
214
+
215
+ ___
216
+
217
+ ## License
218
+
219
+ This project is licensed under the GPL3 License: see the [LICENSE](LICENSE) file for details.
220
+
221
+ ___
222
+
223
+ ## Acknowledgments
224
+
225
+ - All Sun lab [members](https://neuroai.github.io/sunlab/people) for providing the inspiration and comments during the
226
+ development of this library.
227
+ - The creators of all other projects used in the development automation pipelines and source code of this project
228
+ [see pyproject.toml](pyproject.toml).
229
+
230
+ ---
@@ -7,44 +7,42 @@ dependencies:
7
7
  - _python_abi3_support=1.0=hd8ed1ab_2
8
8
  - alabaster=1.0.0=pyhd8ed1ab_1
9
9
  - appdirs=1.4.4=pyhd8ed1ab_1
10
- - aws-c-auth=0.9.0=hbfa7f16_15
11
- - aws-c-cal=0.9.2=h5e3027f_0
12
- - aws-c-common=0.12.3=hb9d3cd8_0
13
- - aws-c-compression=0.3.1=hafb2847_5
14
- - aws-c-event-stream=0.5.4=h76f0014_12
15
- - aws-c-http=0.10.2=h015de20_2
16
- - aws-c-io=0.20.1=hdfce8c9_0
17
- - aws-c-mqtt=0.13.1=h1e5e6c0_3
18
- - aws-c-s3=0.8.3=h5e174a9_0
19
- - aws-c-sdkutils=0.2.4=hafb2847_0
20
- - aws-checksums=0.2.7=hafb2847_1
21
- - aws-crt-cpp=0.32.10=hff780f1_1
22
- - aws-sdk-cpp=1.11.510=h937e755_11
23
- - azure-core-cpp=1.14.0=h5cfcd09_0
24
- - azure-identity-cpp=1.10.0=h113e628_0
25
- - azure-storage-blobs-cpp=12.13.0=h3cf044e_1
26
- - azure-storage-common-cpp=12.8.0=h736e048_1
27
- - azure-storage-files-datalake-cpp=12.12.0=ha633028_1
10
+ - aws-c-auth=0.9.0=h92a005d_16
11
+ - aws-c-cal=0.9.2=he7b75e1_1
12
+ - aws-c-common=0.12.4=hb03c661_0
13
+ - aws-c-compression=0.3.1=h92c474e_6
14
+ - aws-c-event-stream=0.5.5=h0c2b49e_1
15
+ - aws-c-http=0.10.2=hee85082_3
16
+ - aws-c-io=0.21.0=h1d8da38_1
17
+ - aws-c-mqtt=0.13.1=h46c1de9_4
18
+ - aws-c-s3=0.8.3=h9cdc349_1
19
+ - aws-c-sdkutils=0.2.4=h92c474e_1
20
+ - aws-checksums=0.2.7=h92c474e_2
21
+ - aws-crt-cpp=0.32.10=h186f887_3
22
+ - aws-sdk-cpp=1.11.510=h379b65b_14
23
+ - azure-core-cpp=1.15.0=h5cfcd09_0
24
+ - azure-identity-cpp=1.11.0=hb5324b0_1
25
+ - azure-storage-blobs-cpp=12.13.0=hf182047_2
26
+ - azure-storage-common-cpp=12.10.0=h40e822a_1
27
+ - azure-storage-files-datalake-cpp=12.12.0=h141ff2a_2
28
28
  - babel=2.17.0=pyhd8ed1ab_0
29
29
  - backports=1.0=pyhd8ed1ab_5
30
30
  - backports.tarfile=1.2.0=pyhd8ed1ab_1
31
31
  - bcrypt=4.3.0=py313h4b2b08d_1
32
- - beautifulsoup4=4.13.4=pyha770c72_0
33
32
  - brotli-python=1.1.0=py313h46c70d0_3
34
33
  - bzip2=1.0.8=h4bc722e_7
35
34
  - c-ares=1.34.5=hb9d3cd8_0
36
- - ca-certificates=2025.6.15=hbd8a1cb_0
35
+ - ca-certificates=2025.7.9=hbd8a1cb_0
37
36
  - cachetools=6.1.0=pyhd8ed1ab_0
38
- - certifi=2025.6.15=pyhd8ed1ab_0
37
+ - certifi=2025.7.9=pyhd8ed1ab_0
39
38
  - cffi=1.17.1=py313hfab6e84_0
40
39
  - chardet=5.2.0=pyhd8ed1ab_3
41
40
  - charset-normalizer=3.4.2=pyhd8ed1ab_0
42
41
  - click=8.2.1=pyh707e725_0
43
42
  - cmarkgfm=2024.11.20=py313h536fd9c_0
44
43
  - colorama=0.4.6=pyhd8ed1ab_1
45
- - conda-souschef=2.2.3=pyhd8ed1ab_0
46
44
  - cpython=3.13.5=py313hd8ed1ab_102
47
- - cryptography=45.0.4=py313h6556f6e_0
45
+ - cryptography=45.0.5=py313h6556f6e_0
48
46
  - dbus=1.16.2=h3c4dab8_0
49
47
  - distlib=0.3.9=pyhd8ed1ab_1
50
48
  - docutils=0.21.2=pyhd8ed1ab_1
@@ -52,7 +50,6 @@ dependencies:
52
50
  - filelock=3.18.0=pyhd8ed1ab_0
53
51
  - gflags=2.2.2=h5888daf_1005
54
52
  - glog=0.7.1=hbabe93e_0
55
- - grayskull=2.9.1=pyhd8ed1ab_0
56
53
  - h2=4.2.0=pyhd8ed1ab_0
57
54
  - hatchling=1.27.0=pypyhd8ed1ab_0
58
55
  - hpack=4.1.0=pyhd8ed1ab_0
@@ -66,18 +63,18 @@ dependencies:
66
63
  - importlib_resources=6.5.2=pyhd8ed1ab_0
67
64
  - jaraco.classes=3.4.0=pyhd8ed1ab_2
68
65
  - jaraco.context=6.0.1=pyhd8ed1ab_0
69
- - jaraco.functools=4.1.0=pyhd8ed1ab_0
66
+ - jaraco.functools=4.2.1=pyhd8ed1ab_0
70
67
  - jeepney=0.9.0=pyhd8ed1ab_0
71
68
  - jinja2=3.1.6=pyhd8ed1ab_0
72
69
  - keyring=25.6.0=pyha804496_0
73
70
  - keyutils=1.6.1=h166bdaf_0
74
71
  - krb5=1.21.3=h659f571_0
75
- - ld_impl_linux-64=2.43=h1423503_5
76
- - libabseil=20250127.1=cxx17_hbbce691_0
77
- - libarrow=20.0.0=h019e7cd_8_cuda
78
- - libarrow-acero=20.0.0=hb826db4_8_cuda
79
- - libarrow-dataset=20.0.0=hb826db4_8_cuda
80
- - libarrow-substrait=20.0.0=h69308b4_8_cuda
72
+ - ld_impl_linux-64=2.44=h1423503_1
73
+ - libabseil=20250512.1=cxx17_hba17884_0
74
+ - libarrow=20.0.0=hf967735_13_cpu
75
+ - libarrow-acero=20.0.0=h635bf11_13_cpu
76
+ - libarrow-dataset=20.0.0=h635bf11_13_cpu
77
+ - libarrow-substrait=20.0.0=h3f74fd7_13_cpu
81
78
  - libblas=3.9.0=32_h59b9bed_openblas
82
79
  - libbrotlicommon=1.1.0=hb9d3cd8_3
83
80
  - libbrotlidec=1.1.0=hb9d3cd8_3
@@ -96,26 +93,26 @@ dependencies:
96
93
  - libgfortran5=15.1.0=hcea5267_3
97
94
  - libglib=2.84.2=h3618099_0
98
95
  - libgomp=15.1.0=h767d61c_3
99
- - libgoogle-cloud=2.36.0=hc4361e1_1
100
- - libgoogle-cloud-storage=2.36.0=h0121fbd_1
101
- - libgrpc=1.71.0=h8e591d7_1
96
+ - libgoogle-cloud=2.39.0=hdb79228_0
97
+ - libgoogle-cloud-storage=2.39.0=hdbdcf42_0
98
+ - libgrpc=1.73.1=h1e535eb_0
102
99
  - libiconv=1.18=h4ce23a2_1
103
100
  - liblapack=3.9.0=32_h7ac8fdf_openblas
104
101
  - liblzma=5.8.1=hb9d3cd8_2
105
102
  - libmpdec=4.0.0=hb9d3cd8_0
106
103
  - libnghttp2=1.64.0=h161d5f1_0
107
104
  - libopenblas=0.3.30=pthreads_h94d23a6_0
108
- - libopentelemetry-cpp=1.21.0=hd1b1c89_0
109
- - libopentelemetry-cpp-headers=1.21.0=ha770c72_0
110
- - libparquet=20.0.0=h3f30f2e_8_cuda
111
- - libprotobuf=5.29.3=h501fc15_1
112
- - libre2-11=2024.07.02=hba17884_3
105
+ - libopentelemetry-cpp=1.21.0=hb9b0907_1
106
+ - libopentelemetry-cpp-headers=1.21.0=ha770c72_1
107
+ - libparquet=20.0.0=h790f06f_13_cpu
108
+ - libprotobuf=6.31.1=h9ef548d_1
109
+ - libre2-11=2025.06.26=h7064273_1
113
110
  - libsodium=1.0.20=h4ab18f5_0
114
- - libsqlite=3.50.1=h6cd9bfd_7
111
+ - libsqlite=3.50.2=hee844dc_2
115
112
  - libssh2=1.11.1=hcf80075_0
116
113
  - libstdcxx=15.1.0=h8f9b012_3
117
114
  - libstdcxx-ng=15.1.0=h4852527_3
118
- - libthrift=0.21.0=h0e7cc3e_0
115
+ - libthrift=0.22.0=h093b73b_0
119
116
  - libutf8proc=2.10.0=h202a827_0
120
117
  - libuuid=2.38.1=h0b41bf4_0
121
118
  - libxml2=2.13.8=h4bc477f_0
@@ -132,23 +129,21 @@ dependencies:
132
129
  - nh3=0.2.21=py39h7c48542_2
133
130
  - nlohmann_json=3.12.0=h3f2d84a_0
134
131
  - numpy=2.2.6=py313h17eae1a_0
135
- - openssl=3.5.0=h7b32b05_1
136
- - orc=2.1.2=h17f744e_0
132
+ - openssl=3.5.1=h7b32b05_0
133
+ - orc=2.1.3=h61e0c1e_0
137
134
  - packaging=25.0=pyh29332c3_1
138
135
  - paramiko=3.5.1=pyhd8ed1ab_0
139
136
  - pathspec=0.12.1=pyhd8ed1ab_1
140
137
  - pcre2=10.45=hc749103_0
141
138
  - pip=25.1.1=pyh145f28c_0
142
- - pkginfo=1.12.1.2=pyhd8ed1ab_0
143
139
  - platformdirs=4.3.8=pyhe01879c_0
144
140
  - pluggy=1.6.0=pyhd8ed1ab_0
145
141
  - polars=1.31.0=default_h1650462_0
146
142
  - polars-default=1.31.0=py39hfac2b71_0
147
- - progressbar2=4.5.0=pyhd8ed1ab_1
148
143
  - prometheus-cpp=1.3.0=ha5d0236_0
149
144
  - psutil=7.0.0=py313h536fd9c_0
150
145
  - pyarrow=20.0.0=py313h78bf25f_0
151
- - pyarrow-core=20.0.0=py313hc6b0d6e_0_cuda
146
+ - pyarrow-core=20.0.0=py313he5f92c8_0_cpu
152
147
  - pycparser=2.22=pyh29332c3_1
153
148
  - pygments=2.19.2=pyhd8ed1ab_0
154
149
  - pynacl=1.5.0=py313h536fd9c_4
@@ -156,11 +151,9 @@ dependencies:
156
151
  - pysocks=1.7.1=pyha55dd90_7
157
152
  - python=3.13.5=hec9711d_102_cp313
158
153
  - python-gil=3.13.5=h4df99d1_102
159
- - python-utils=3.9.1=pyhff2d567_1
160
154
  - python_abi=3.13=7_cp313
161
155
  - pytz=2025.2=pyhd8ed1ab_0
162
- - rapidfuzz=3.13.0=py313h46c70d0_0
163
- - re2=2024.07.02=h9925aae_3
156
+ - re2=2025.06.26=h9925aae_1
164
157
  - readline=8.2=h8c095d6_2
165
158
  - readme_renderer=44.0=pyhd8ed1ab_1
166
159
  - requests=2.32.4=pyhd8ed1ab_0
@@ -168,17 +161,12 @@ dependencies:
168
161
  - rfc3986=2.0.0=pyhd8ed1ab_1
169
162
  - rich=14.0.0=pyh29332c3_0
170
163
  - roman-numerals-py=3.1.0=pyhd8ed1ab_0
171
- - ruamel.yaml=0.18.14=py313h536fd9c_0
172
- - ruamel.yaml.clib=0.2.8=py313h536fd9c_1
173
- - ruamel.yaml.jinja2=0.2.7=pyhd8ed1ab_1
174
- - ruff=0.12.0=py313h67f39b2_0
175
- - s2n=1.5.21=h7ab7c64_0
164
+ - ruff=0.12.3=hf9daec2_0
165
+ - s2n=1.5.22=h96f233e_0
176
166
  - secretstorage=3.3.3=py313h78bf25f_3
177
- - setuptools=80.9.0=pyhff2d567_0
178
167
  - six=1.17.0=pyhd8ed1ab_0
179
168
  - snappy=1.2.1=h8bd8927_1
180
169
  - snowballstemmer=3.0.1=pyhd8ed1ab_0
181
- - soupsieve=2.7=pyhd8ed1ab_0
182
170
  - sphinx=8.2.3=pyhd8ed1ab_0
183
171
  - sphinx-autodoc-typehints=3.2.0=pyhd8ed1ab_0
184
172
  - sphinx-click=6.0.0=pyhd8ed1ab_1
@@ -191,24 +179,21 @@ dependencies:
191
179
  - sphinxcontrib-jsmath=1.0.1=pyhd8ed1ab_1
192
180
  - sphinxcontrib-qthelp=2.0.0=pyhd8ed1ab_1
193
181
  - sphinxcontrib-serializinghtml=1.1.10=pyhd8ed1ab_1
194
- - stdlib-list=0.11.1=pyhd8ed1ab_0
195
182
  - tk=8.6.13=noxft_hd72426e_102
196
183
  - tomli=2.2.1=pyhd8ed1ab_1
197
- - tomli-w=1.2.0=pyhd8ed1ab_0
198
184
  - tox=4.27.0=pyhe01879c_0
199
185
  - tqdm=4.67.1=pyhd8ed1ab_1
200
186
  - trove-classifiers=2025.5.9.12=pyhd8ed1ab_0
201
187
  - twine=6.1.0=pyh29332c3_0
202
188
  - types-appdirs=1.4.3.5=pyhd8ed1ab_1
203
189
  - types-filelock=3.2.7=pyhd8ed1ab_1
204
- - types-paramiko=3.5.0.20250516=pyhd8ed1ab_0
190
+ - types-paramiko=3.5.0.20250708=pyhd8ed1ab_0
205
191
  - types-requests=2.32.4.20250611=pyhd8ed1ab_0
206
192
  - types-tqdm=4.67.0.20250516=pyhd8ed1ab_0
207
- - typing-extensions=4.14.0=h32cad80_0
208
- - typing_extensions=4.14.0=pyhe01879c_0
193
+ - typing_extensions=4.14.1=pyhe01879c_0
209
194
  - tzdata=2025b=h78e105d_0
210
195
  - urllib3=2.5.0=pyhd8ed1ab_0
211
- - uv=0.7.15=h2f11bb8_0
196
+ - uv=0.7.20=heb9285d_0
212
197
  - virtualenv=20.31.2=pyhd8ed1ab_0
213
198
  - zipp=3.23.0=pyhd8ed1ab_0
214
199
  - zlib=1.3.1=hb9d3cd8_2
@@ -217,7 +202,7 @@ dependencies:
217
202
  - pip:
218
203
  - PyYAML==6.0.2
219
204
  - ataraxis-automation==5.0.0
220
- - ataraxis-base-utilities==3.0.1
205
+ - ataraxis-base-utilities==3.1.0
221
206
  - ataraxis-data-structures==3.1.1
222
207
  - ataraxis-time==3.0.0
223
208
  - build==1.2.2.post1
@@ -225,10 +210,10 @@ dependencies:
225
210
  - loguru==0.7.3
226
211
  - pyproject_hooks==1.2.0
227
212
  - simple_slurm==0.3.6
228
- - sl-shared-assets==1.2.0rc4
213
+ - sl-shared-assets==3.0.0
229
214
  - sphinx-rtd-dark-mode==1.3.0
230
- - tox-uv==1.26.0
215
+ - tox-uv==1.26.1
231
216
  - types-pytz==2025.2.0.20250516
232
- - uv==0.7.12
217
+ - uv==0.7.19
233
218
  - xxhash==3.5.0
234
219