scientiflow-cli 0.1.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- scientiflow_cli-0.1.0/LICENSE.md +7 -0
- scientiflow_cli-0.1.0/PKG-INFO +62 -0
- scientiflow_cli-0.1.0/README.md +45 -0
- scientiflow_cli-0.1.0/pyproject.toml +30 -0
- scientiflow_cli-0.1.0/scientiflow_cli/__init__.py +0 -0
- scientiflow_cli-0.1.0/scientiflow_cli/__main__.py +4 -0
- scientiflow_cli-0.1.0/scientiflow_cli/cli/__init__.py +0 -0
- scientiflow_cli-0.1.0/scientiflow_cli/cli/auth_utils.py +50 -0
- scientiflow_cli-0.1.0/scientiflow_cli/cli/login.py +38 -0
- scientiflow_cli-0.1.0/scientiflow_cli/cli/logout.py +26 -0
- scientiflow_cli-0.1.0/scientiflow_cli/cli/runjobs.py +0 -0
- scientiflow_cli-0.1.0/scientiflow_cli/cli/status.py +26 -0
- scientiflow_cli-0.1.0/scientiflow_cli/config.py +0 -0
- scientiflow_cli-0.1.0/scientiflow_cli/main.py +66 -0
- scientiflow_cli-0.1.0/scientiflow_cli/pipeline/__init__.py +0 -0
- scientiflow_cli-0.1.0/scientiflow_cli/pipeline/container_manager.py +62 -0
- scientiflow_cli-0.1.0/scientiflow_cli/pipeline/decode_and_execute.py +111 -0
- scientiflow_cli-0.1.0/scientiflow_cli/pipeline/get_jobs.py +34 -0
- scientiflow_cli-0.1.0/scientiflow_cli/services/__init__.py +0 -0
- scientiflow_cli-0.1.0/scientiflow_cli/services/auth.py +0 -0
- scientiflow_cli-0.1.0/scientiflow_cli/services/executor.py +105 -0
- scientiflow_cli-0.1.0/scientiflow_cli/services/job_manager.py +0 -0
- scientiflow_cli-0.1.0/scientiflow_cli/services/request_handler.py +36 -0
- scientiflow_cli-0.1.0/scientiflow_cli/services/status_updater.py +10 -0
- scientiflow_cli-0.1.0/scientiflow_cli/utils/__init__.py +0 -0
- scientiflow_cli-0.1.0/scientiflow_cli/utils/encryption.py +36 -0
- scientiflow_cli-0.1.0/scientiflow_cli/utils/file_manager.py +80 -0
- scientiflow_cli-0.1.0/scientiflow_cli/utils/logger.py +0 -0
- scientiflow_cli-0.1.0/scientiflow_cli/utils/mock.py +472 -0
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
ScientiFlow CLI Proprietary License
|
|
2
|
+
|
|
3
|
+
This software is the proprietary property of ScientiFlow. You may not copy, modify, or distribute this software or any part of it without express written permission from ScientiFlow.
|
|
4
|
+
|
|
5
|
+
Usage of the software is governed by this license and subject to the terms of any commercial agreements with ScientiFlow.
|
|
6
|
+
|
|
7
|
+
For commercial licensing requests, contact ScientiFlow at scientiflow@gmail.com.
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
Metadata-Version: 2.1
|
|
2
|
+
Name: scientiflow-cli
|
|
3
|
+
Version: 0.1.0
|
|
4
|
+
Summary: CLI tool for scientiflow. This application runs on the client side, decodes pipelines, and executes them in the configured order!
|
|
5
|
+
License: Proprietary
|
|
6
|
+
Author: ScientiFlow
|
|
7
|
+
Author-email: scientiflow@gmail.com
|
|
8
|
+
Requires-Python: >=3.12,<4.0
|
|
9
|
+
Classifier: License :: Other/Proprietary License
|
|
10
|
+
Classifier: Programming Language :: Python :: 3
|
|
11
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
12
|
+
Requires-Dist: cryptography (>=43.0.1,<44.0.0)
|
|
13
|
+
Requires-Dist: pwinput (>=1.0.3,<2.0.0)
|
|
14
|
+
Requires-Dist: requests (>=2.32.3,<3.0.0)
|
|
15
|
+
Description-Content-Type: text/markdown
|
|
16
|
+
|
|
17
|
+
# Scientiflow
|
|
18
|
+
|
|
19
|
+
## Setting the ENV variables
|
|
20
|
+
|
|
21
|
+
```
|
|
22
|
+
# When developing on Windows, run:
|
|
23
|
+
$env:API_BASE="SCIENTIFLOW_BACKEND_URL" # Set the base URL of the Scientiflow backend
|
|
24
|
+
$env:AUTH_TOKEN="SOME_AUTH_TOKEN" # Set the AUTH token that you get after logging in
|
|
25
|
+
$env:SCFLOW_DEBUG=1 # Run in debug mode, using some dummy data
|
|
26
|
+
|
|
27
|
+
# on Linux, run:
|
|
28
|
+
export API_BASE="SCIENTIFLOW_BACKEND_URL" # Set the base URL of the Scientiflow backend
|
|
29
|
+
export AUTH_TOKEN="SOME_AUTH_TOKEN" # Set the AUTH token that you get after logging in
|
|
30
|
+
export SCFLOW_DEBUG=1 # Run in debug mode, using some dummy data
|
|
31
|
+
```
|
|
32
|
+
|
|
33
|
+
## Building and installing the python package
|
|
34
|
+
|
|
35
|
+
```bash
|
|
36
|
+
poetry build
|
|
37
|
+
pip install dist/scientiflow_cli-0.1.0-py3-none-any.whl --force-reinstall
|
|
38
|
+
|
|
39
|
+
# Now you can run it as:
|
|
40
|
+
python -m scientiflow_cli --help
|
|
41
|
+
```
|
|
42
|
+
|
|
43
|
+
> Note: The `--force-reinstall` flag is used as a sage-guard, in case an already exising version of the package is installed.
|
|
44
|
+
|
|
45
|
+
## For the devs
|
|
46
|
+
|
|
47
|
+
> Note
|
|
48
|
+
> If you need to add a dependency, which this project depends on, use the command `poetry add` instead of doing a pip install. This will ensure that the `pyproject.toml` file is updated with the new dependency, and all the other devs have the same dependencies and dependencies versions.
|
|
49
|
+
|
|
50
|
+
```bash
|
|
51
|
+
# Activate a poetry shell, and install dependencies
|
|
52
|
+
poetry shell
|
|
53
|
+
|
|
54
|
+
# Install the dependencies if it's your first time working on the project, using:
|
|
55
|
+
# poetry install
|
|
56
|
+
```
|
|
57
|
+
|
|
58
|
+
```bash
|
|
59
|
+
# In order to run the package without building, you can do:
|
|
60
|
+
poetry run python -m scientiflow_cli.main --help
|
|
61
|
+
```
|
|
62
|
+
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
# Scientiflow
|
|
2
|
+
|
|
3
|
+
## Setting the ENV variables
|
|
4
|
+
|
|
5
|
+
```
|
|
6
|
+
# When developing on Windows, run:
|
|
7
|
+
$env:API_BASE="SCIENTIFLOW_BACKEND_URL" # Set the base URL of the Scientiflow backend
|
|
8
|
+
$env:AUTH_TOKEN="SOME_AUTH_TOKEN" # Set the AUTH token that you get after logging in
|
|
9
|
+
$env:SCFLOW_DEBUG=1 # Run in debug mode, using some dummy data
|
|
10
|
+
|
|
11
|
+
# on Linux, run:
|
|
12
|
+
export API_BASE="SCIENTIFLOW_BACKEND_URL" # Set the base URL of the Scientiflow backend
|
|
13
|
+
export AUTH_TOKEN="SOME_AUTH_TOKEN" # Set the AUTH token that you get after logging in
|
|
14
|
+
export SCFLOW_DEBUG=1 # Run in debug mode, using some dummy data
|
|
15
|
+
```
|
|
16
|
+
|
|
17
|
+
## Building and installing the python package
|
|
18
|
+
|
|
19
|
+
```bash
|
|
20
|
+
poetry build
|
|
21
|
+
pip install dist/scientiflow_cli-0.1.0-py3-none-any.whl --force-reinstall
|
|
22
|
+
|
|
23
|
+
# Now you can run it as:
|
|
24
|
+
python -m scientiflow_cli --help
|
|
25
|
+
```
|
|
26
|
+
|
|
27
|
+
> Note: The `--force-reinstall` flag is used as a sage-guard, in case an already exising version of the package is installed.
|
|
28
|
+
|
|
29
|
+
## For the devs
|
|
30
|
+
|
|
31
|
+
> Note
|
|
32
|
+
> If you need to add a dependency, which this project depends on, use the command `poetry add` instead of doing a pip install. This will ensure that the `pyproject.toml` file is updated with the new dependency, and all the other devs have the same dependencies and dependencies versions.
|
|
33
|
+
|
|
34
|
+
```bash
|
|
35
|
+
# Activate a poetry shell, and install dependencies
|
|
36
|
+
poetry shell
|
|
37
|
+
|
|
38
|
+
# Install the dependencies if it's your first time working on the project, using:
|
|
39
|
+
# poetry install
|
|
40
|
+
```
|
|
41
|
+
|
|
42
|
+
```bash
|
|
43
|
+
# In order to run the package without building, you can do:
|
|
44
|
+
poetry run python -m scientiflow_cli.main --help
|
|
45
|
+
```
|
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
[tool.poetry]
|
|
2
|
+
name = "scientiflow-cli"
|
|
3
|
+
version = "0.1.0"
|
|
4
|
+
description = "CLI tool for scientiflow. This application runs on the client side, decodes pipelines, and executes them in the configured order!"
|
|
5
|
+
authors = ["ScientiFlow <scientiflow@gmail.com>"]
|
|
6
|
+
license = "Proprietary"
|
|
7
|
+
readme = "README.md"
|
|
8
|
+
|
|
9
|
+
[tool.poetry.dependencies]
|
|
10
|
+
python = "^3.12"
|
|
11
|
+
requests = "^2.32.3"
|
|
12
|
+
cryptography = "^43.0.1"
|
|
13
|
+
pwinput = "^1.0.3"
|
|
14
|
+
|
|
15
|
+
[tool.poetry.scripts]
|
|
16
|
+
scientiflow-cli = "scientiflow_cli.main:main"
|
|
17
|
+
|
|
18
|
+
[build-system]
|
|
19
|
+
requires = ["poetry-core"]
|
|
20
|
+
build-backend = "poetry.core.masonry.api"
|
|
21
|
+
|
|
22
|
+
# Optional additions
|
|
23
|
+
# Keywords and classifiers go directly in the main section
|
|
24
|
+
keywords = ["cli", "scientific workflows", "automation", "pipeline"]
|
|
25
|
+
classifiers = [
|
|
26
|
+
"Programming Language :: Python :: 3",
|
|
27
|
+
"License :: Other/Proprietary License",
|
|
28
|
+
"Development Status :: 4 - Beta",
|
|
29
|
+
"Operating System :: OS Independent"
|
|
30
|
+
]
|
|
File without changes
|
|
File without changes
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
import os
|
|
2
|
+
from cryptography.fernet import Fernet
|
|
3
|
+
|
|
4
|
+
TOKEN_FILE_PATH = os.path.expanduser("~/.scientiflow/token")
|
|
5
|
+
KEY_FILE_PATH = os.path.expanduser("~/.scientiflow/key")
|
|
6
|
+
|
|
7
|
+
def setAuthToken(auth_token):
|
|
8
|
+
try:
|
|
9
|
+
directory = os.path.dirname(TOKEN_FILE_PATH)
|
|
10
|
+
if not os.path.exists(directory):
|
|
11
|
+
os.makedirs(directory, exist_ok=True)
|
|
12
|
+
|
|
13
|
+
if not os.path.exists(KEY_FILE_PATH):
|
|
14
|
+
key = Fernet.generate_key()
|
|
15
|
+
with open(KEY_FILE_PATH, "wb") as key_file:
|
|
16
|
+
key_file.write(key)
|
|
17
|
+
os.chmod(KEY_FILE_PATH, 0o600)
|
|
18
|
+
else:
|
|
19
|
+
with open(KEY_FILE_PATH, "rb") as key_file:
|
|
20
|
+
key = key_file.read()
|
|
21
|
+
|
|
22
|
+
fernet = Fernet(key)
|
|
23
|
+
encrypted_token = fernet.encrypt(auth_token.encode())
|
|
24
|
+
|
|
25
|
+
with open(TOKEN_FILE_PATH, "wb") as token_file:
|
|
26
|
+
token_file.write(encrypted_token)
|
|
27
|
+
os.chmod(TOKEN_FILE_PATH, 0o600)
|
|
28
|
+
|
|
29
|
+
print("Token encrypted and saved securely.")
|
|
30
|
+
except Exception as e:
|
|
31
|
+
print(f"Error setting auth token: {e}")
|
|
32
|
+
|
|
33
|
+
def getAuthToken():
|
|
34
|
+
if os.path.exists(TOKEN_FILE_PATH) and os.path.exists(KEY_FILE_PATH):
|
|
35
|
+
try:
|
|
36
|
+
with open(KEY_FILE_PATH, 'rb') as key_file:
|
|
37
|
+
encryption_key = key_file.read()
|
|
38
|
+
|
|
39
|
+
fernet = Fernet(encryption_key)
|
|
40
|
+
|
|
41
|
+
with open(TOKEN_FILE_PATH, 'rb') as token_file:
|
|
42
|
+
encrypted_token = token_file.read()
|
|
43
|
+
|
|
44
|
+
token = fernet.decrypt(encrypted_token).decode()
|
|
45
|
+
return token
|
|
46
|
+
except Exception as e:
|
|
47
|
+
print(f"Failed to decrypt token: {e}")
|
|
48
|
+
return None
|
|
49
|
+
else:
|
|
50
|
+
return None
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
import requests
|
|
2
|
+
import pwinput
|
|
3
|
+
|
|
4
|
+
from scientiflow_cli.cli.auth_utils import setAuthToken
|
|
5
|
+
import re
|
|
6
|
+
|
|
7
|
+
def login_user():
|
|
8
|
+
pattern = r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$'
|
|
9
|
+
email = input("Enter your email: ")
|
|
10
|
+
if re.match(pattern, email):
|
|
11
|
+
is_valid = True
|
|
12
|
+
else:
|
|
13
|
+
is_valid = False
|
|
14
|
+
if is_valid:
|
|
15
|
+
password = pwinput.pwinput("Enter your password: ")
|
|
16
|
+
payload = {
|
|
17
|
+
"email": email,
|
|
18
|
+
"password": password,
|
|
19
|
+
"device_name": "Google-Windows",
|
|
20
|
+
"remember": True
|
|
21
|
+
}
|
|
22
|
+
else:
|
|
23
|
+
print(f"'{email}' is not a valid email.")
|
|
24
|
+
return
|
|
25
|
+
app_base_url = "https://www.backend.scientiflow.com/api"
|
|
26
|
+
login_url = f"{app_base_url}/auth/login"
|
|
27
|
+
response = requests.post(login_url, json=payload)
|
|
28
|
+
if response.status_code == 200:
|
|
29
|
+
print("Login successful!")
|
|
30
|
+
auth_token = response.json().get("token")
|
|
31
|
+
if auth_token:
|
|
32
|
+
setAuthToken(auth_token)
|
|
33
|
+
else:
|
|
34
|
+
print("No token received from the server.")
|
|
35
|
+
else:
|
|
36
|
+
print("Login failed!")
|
|
37
|
+
print(f"Status code: {response.status_code}")
|
|
38
|
+
print(f"Response: {response.text}")
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import requests
|
|
3
|
+
from scientiflow_cli.cli.auth_utils import getAuthToken
|
|
4
|
+
|
|
5
|
+
def logout_user():
|
|
6
|
+
app_base_url = "https://www.backend.scientiflow.com/api"
|
|
7
|
+
logout_url = f"{app_base_url}/auth/logout"
|
|
8
|
+
token = getAuthToken()
|
|
9
|
+
if token:
|
|
10
|
+
headers = {
|
|
11
|
+
'Authorization': f'Bearer {token}'
|
|
12
|
+
}
|
|
13
|
+
response = requests.post(logout_url, headers=headers)
|
|
14
|
+
if response.status_code == 200:
|
|
15
|
+
print("Logout successful!")
|
|
16
|
+
token_file_path = os.path.expanduser("~/.scientiflow/token")
|
|
17
|
+
key_file_path = os.path.expanduser("~/.scientiflow/key")
|
|
18
|
+
os.remove(token_file_path)
|
|
19
|
+
os.remove(key_file_path)
|
|
20
|
+
print("Token and key files deleted.")
|
|
21
|
+
else:
|
|
22
|
+
print("Logout failed!")
|
|
23
|
+
print(f"Status code: {response.status_code}")
|
|
24
|
+
print(f"Response: {response.text}")
|
|
25
|
+
else:
|
|
26
|
+
print("Token file not found or decryption failed. Please login to continue.")
|
|
File without changes
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
import pathlib
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
def user_is_authenticated() -> bool:
|
|
6
|
+
"""Check if user is authenticated."""
|
|
7
|
+
home = pathlib.Path.home()
|
|
8
|
+
token_file = home / ".scientiflow" / "key"
|
|
9
|
+
|
|
10
|
+
if token_file.exists():
|
|
11
|
+
jwt_token = token_file.read_text()
|
|
12
|
+
|
|
13
|
+
#TODO: Implement token verification using backend
|
|
14
|
+
if (jwt_token == "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJ1c2VybmFtZSI6ImFkbWluIiwibG9naW5FeHBpcmVzIjoxNzMwMDc4NDU0fQ.Ppa9RsbMfMo7q0ZfhD9To_vArAkmESaDJtUVRmzm920"):
|
|
15
|
+
|
|
16
|
+
return True
|
|
17
|
+
|
|
18
|
+
return False
|
|
19
|
+
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def get_auth_token() -> str:
|
|
23
|
+
home = pathlib.Path.home()
|
|
24
|
+
token_file = home / ".scientiflow" / "key"
|
|
25
|
+
|
|
26
|
+
return token_file.read_text()
|
|
File without changes
|
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import argparse
|
|
3
|
+
from scientiflow_cli.cli.login import login_user#, user_has_auth_token, get_auth_token
|
|
4
|
+
from scientiflow_cli.cli.login import login_user
|
|
5
|
+
from scientiflow_cli.cli.logout import logout_user
|
|
6
|
+
from scientiflow_cli.pipeline.get_jobs import get_jobs
|
|
7
|
+
from scientiflow_cli.utils.file_manager import create_job_dirs, get_job_files
|
|
8
|
+
from scientiflow_cli.pipeline.container_manager import get_job_containers
|
|
9
|
+
from scientiflow_cli.utils.mock import mock_jobs
|
|
10
|
+
from scientiflow_cli.services.executor import execute_jobs
|
|
11
|
+
from scientiflow_cli.cli.auth_utils import getAuthToken
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
AUTH_TOKEN = os.getenv("AUTH_TOKEN", None)
|
|
15
|
+
|
|
16
|
+
def main():
|
|
17
|
+
parser = argparse.ArgumentParser(description="Scientiflow Agent CLI")
|
|
18
|
+
|
|
19
|
+
parser.add_argument('--login', action='store_true', help="Login using your scientiflow credentials")
|
|
20
|
+
parser.add_argument('--logout', action='store_true', help="Logout from scientiflow")
|
|
21
|
+
# parser.add_argument('--list-jobs', action='store_true', help="Get jobs to execute")
|
|
22
|
+
parser.add_argument('--get-files', action='store_true', help="Get user files")
|
|
23
|
+
parser.add_argument('--get-containers', action='store_true', help="Download containers for the user")
|
|
24
|
+
parser.add_argument('--mock-pipeline-decode', action='store_true', help="Decode the mock pipeline")
|
|
25
|
+
parser.add_argument('--execute-jobs', action='store_true', help="Fetch and execute pending jobs")
|
|
26
|
+
args = parser.parse_args()
|
|
27
|
+
|
|
28
|
+
|
|
29
|
+
try:
|
|
30
|
+
# We're checking here that if command is either login or logout, then we don't need to check for auth token
|
|
31
|
+
# since these two are the only two commands allowed to run without an auth token
|
|
32
|
+
# if not args.login and not args.logout:
|
|
33
|
+
# print("Session not found. Please login to continue")
|
|
34
|
+
# return
|
|
35
|
+
if args.login:
|
|
36
|
+
login_user()
|
|
37
|
+
|
|
38
|
+
elif args.logout:
|
|
39
|
+
logout_user()
|
|
40
|
+
# elif args.list_jobs:
|
|
41
|
+
# get_jobs(auth_token = getAuthToken())
|
|
42
|
+
|
|
43
|
+
elif args.get_files:
|
|
44
|
+
get_job_files(auth_token = AUTH_TOKEN)
|
|
45
|
+
elif args.get_containers:
|
|
46
|
+
get_job_containers(auth_token = AUTH_TOKEN)
|
|
47
|
+
|
|
48
|
+
# This is a mock pipeline decode. This is used to test the functions that are used to decode the pipeline
|
|
49
|
+
# elif args.mock_pipeline_decode:
|
|
50
|
+
# for job in mock_jobs:
|
|
51
|
+
# create_job_dirs(job)
|
|
52
|
+
# get_job_files(auth_token = AUTH_TOKEN, job = job)
|
|
53
|
+
# get_job_containers(auth_token = AUTH_TOKEN, job = job)
|
|
54
|
+
|
|
55
|
+
elif args.execute_jobs:
|
|
56
|
+
execute_jobs(auth_token=getAuthToken())
|
|
57
|
+
else:
|
|
58
|
+
print("No arguments specified. Use --help to see available options")
|
|
59
|
+
|
|
60
|
+
except Exception as e:
|
|
61
|
+
print("Error: ", e)
|
|
62
|
+
return
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
if __name__ == "__main__":
|
|
66
|
+
main()
|
|
File without changes
|
|
@@ -0,0 +1,62 @@
|
|
|
1
|
+
import os
|
|
2
|
+
import requests
|
|
3
|
+
import subprocess
|
|
4
|
+
from pathlib import Path
|
|
5
|
+
|
|
6
|
+
API_BASE = "https://www.backend.scientiflow.com/api"
|
|
7
|
+
|
|
8
|
+
def get_job_containers(auth_token: str, job: dict) -> None:
|
|
9
|
+
base_dir = Path(job['server']['base_directory'])
|
|
10
|
+
containers_dir = base_dir / "containers"
|
|
11
|
+
project_job_id = job['project_job']['id']
|
|
12
|
+
if not containers_dir.exists():
|
|
13
|
+
containers_dir.mkdir()
|
|
14
|
+
|
|
15
|
+
# Get names of containers already available in the user's machine
|
|
16
|
+
avail_containers: set[str] = {item.name for item in containers_dir.iterdir() if item.is_file()}
|
|
17
|
+
|
|
18
|
+
headers = {"Authorization": f"Bearer {auth_token}"}
|
|
19
|
+
params = {"project_job_id": project_job_id}
|
|
20
|
+
response = requests.get(f"{API_BASE}/agent-application/get-user-containers", headers=headers, params=params)
|
|
21
|
+
try:
|
|
22
|
+
if response.status_code == 200:
|
|
23
|
+
container_info = response.json()
|
|
24
|
+
|
|
25
|
+
if not container_info:
|
|
26
|
+
print("[X] No containers found for current User / Project")
|
|
27
|
+
return
|
|
28
|
+
|
|
29
|
+
current_pipeline_containers = set(container_info["current_pipeline_containers"])
|
|
30
|
+
user_all_containers = set(container_info["user_all_unique_containers"])
|
|
31
|
+
|
|
32
|
+
# Remove containers that are not needed but are present on the current machine
|
|
33
|
+
containers_to_remove = avail_containers - user_all_containers
|
|
34
|
+
for container_name in containers_to_remove:
|
|
35
|
+
container_path = base_dir / "containers" / container_name
|
|
36
|
+
container_path.unlink() # Unlinking means deleting
|
|
37
|
+
|
|
38
|
+
# Download containers which are not present on the user's machine
|
|
39
|
+
containers_to_download = current_pipeline_containers - avail_containers
|
|
40
|
+
|
|
41
|
+
for container_name in container_info['container_image_details']:
|
|
42
|
+
if container_name['image_name'] in containers_to_download:
|
|
43
|
+
command = f"singularity pull {container_name['image_name']}.sif {container_name['sylabs_uri']}"
|
|
44
|
+
command = command.split()
|
|
45
|
+
print(f"[+] Downloading container {container_name['image_name']}... ", end="")
|
|
46
|
+
subprocess.run(command, check=True, cwd=containers_dir)
|
|
47
|
+
print("Done")
|
|
48
|
+
|
|
49
|
+
else:
|
|
50
|
+
print(f"Error fetching container info! Error code: {response.status_code}")
|
|
51
|
+
|
|
52
|
+
except subprocess.CalledProcessError:
|
|
53
|
+
print("[x] Error executing singularity commands. Try checking your singularity installation")
|
|
54
|
+
return
|
|
55
|
+
|
|
56
|
+
except requests.exceptions.JSONDecodeError:
|
|
57
|
+
print("[X] No containers found for current User / Project")
|
|
58
|
+
return
|
|
59
|
+
|
|
60
|
+
except Exception as e:
|
|
61
|
+
print(f"Error: {e}")
|
|
62
|
+
|
|
@@ -0,0 +1,111 @@
|
|
|
1
|
+
import subprocess
|
|
2
|
+
import re
|
|
3
|
+
from typing import Dict, List, Any
|
|
4
|
+
from scientiflow_cli.services.status_updater import update_job_status
|
|
5
|
+
|
|
6
|
+
class PipelineExecutor:
|
|
7
|
+
def __init__(self, base_dir: str, project_job_id: int, project_title: str, job_dir_name: str, nodes: List[Dict[str, Any]], edges: List[Dict[str, str]], environment_variables: Dict[str, str]):
|
|
8
|
+
self.base_dir = base_dir
|
|
9
|
+
self.project_job_id = project_job_id
|
|
10
|
+
self.project_title = project_title
|
|
11
|
+
self.job_dir_name = job_dir_name
|
|
12
|
+
self.nodes = nodes
|
|
13
|
+
self.edges = edges
|
|
14
|
+
self.environment_variables = environment_variables
|
|
15
|
+
|
|
16
|
+
# Create a mapping of nodes
|
|
17
|
+
self.nodes_map = {node['id']: node for node in nodes}
|
|
18
|
+
|
|
19
|
+
# Create adjacency list based on edges
|
|
20
|
+
self.adj_list = {node['id']: [] for node in nodes}
|
|
21
|
+
for edge in edges:
|
|
22
|
+
self.adj_list[edge['source']].append(edge['target'])
|
|
23
|
+
|
|
24
|
+
# Find the root (the node with no incoming edges)
|
|
25
|
+
all_nodes = set(self.nodes_map.keys())
|
|
26
|
+
target_nodes = {edge['target'] for edge in edges}
|
|
27
|
+
self.root_nodes = all_nodes - target_nodes
|
|
28
|
+
|
|
29
|
+
def replace_variables(self, command: str) -> str:
|
|
30
|
+
# Function to replace each match with the corresponding value from the dictionary
|
|
31
|
+
def replacer(match):
|
|
32
|
+
variable_name = match.group(1) # Extract the variable name (without ${})
|
|
33
|
+
return self.environment_variables.get(variable_name, f"${{{variable_name}}}")
|
|
34
|
+
|
|
35
|
+
pattern = r'\$\{(\w+)\}' # This regex pattern finds placeholders like ${<variable>}
|
|
36
|
+
result = re.sub(pattern, replacer, command)
|
|
37
|
+
return result
|
|
38
|
+
|
|
39
|
+
def execute_command(self, command: str) -> Dict[str, str]:
|
|
40
|
+
res = {"status": None, "result": ""}
|
|
41
|
+
try:
|
|
42
|
+
res["result"] = subprocess.run(command, capture_output=True, text=True, check=True, shell=True).stdout
|
|
43
|
+
res["status"] = "SUCCESS"
|
|
44
|
+
except FileNotFoundError as fnf_error:
|
|
45
|
+
res["result"] = f"File not found: {fnf_error}"
|
|
46
|
+
res["status"] = "FAILURE"
|
|
47
|
+
update_job_status(self.project_job_id, "FAILURE")
|
|
48
|
+
raise SystemExit("Terminating the program.")
|
|
49
|
+
except subprocess.CalledProcessError as cpe:
|
|
50
|
+
res["status"] = "FAILURE"
|
|
51
|
+
res["result"] = f"Script failed with error: {cpe.stderr}"
|
|
52
|
+
update_job_status(self.project_job_id, "FAILURE")
|
|
53
|
+
raise SystemExit("Terminating the program.")
|
|
54
|
+
except Exception as e:
|
|
55
|
+
res["status"] = "FAILURE"
|
|
56
|
+
res["result"] = f"Error: {e}"
|
|
57
|
+
update_job_status(self.project_job_id, "FAILURE")
|
|
58
|
+
raise SystemExit("Terminating the program.")
|
|
59
|
+
return res
|
|
60
|
+
|
|
61
|
+
def dfs(self, node: str):
|
|
62
|
+
current_node = self.nodes_map[node]
|
|
63
|
+
|
|
64
|
+
if current_node['type'] == "splitterParent":
|
|
65
|
+
collector = None
|
|
66
|
+
for splitter_child in self.adj_list[node]:
|
|
67
|
+
if self.nodes_map[splitter_child]['data']['active'] == True:
|
|
68
|
+
collector = self.dfs(splitter_child)
|
|
69
|
+
else:
|
|
70
|
+
if collector and self.adj_list[collector]:
|
|
71
|
+
return self.dfs(self.adj_list[collector][0])
|
|
72
|
+
else:
|
|
73
|
+
return
|
|
74
|
+
|
|
75
|
+
elif current_node['type'] == "splitter-child":
|
|
76
|
+
if current_node['data']['active'] == True:
|
|
77
|
+
if self.adj_list[node]:
|
|
78
|
+
return self.dfs(self.adj_list[node][0])
|
|
79
|
+
else:
|
|
80
|
+
return
|
|
81
|
+
|
|
82
|
+
elif current_node['type'] == "terminal":
|
|
83
|
+
commands = current_node['data']['commands']
|
|
84
|
+
for command in commands:
|
|
85
|
+
cmd = self.replace_variables(command.get('command'))
|
|
86
|
+
if cmd:
|
|
87
|
+
res = self.execute_command(f"cd {self.base_dir}/{self.project_title}/{self.job_dir_name} && singularity exec {self.base_dir}/containers/{current_node['data']['software']}.sif {cmd}")
|
|
88
|
+
print(cmd, ":", res["status"], res["result"])
|
|
89
|
+
if self.adj_list[node]:
|
|
90
|
+
return self.dfs(self.adj_list[node][0])
|
|
91
|
+
else:
|
|
92
|
+
return
|
|
93
|
+
|
|
94
|
+
elif current_node['type'] == "collector":
|
|
95
|
+
if self.adj_list[node]:
|
|
96
|
+
return node
|
|
97
|
+
else:
|
|
98
|
+
return
|
|
99
|
+
|
|
100
|
+
def decode_and_execute_pipeline(self):
|
|
101
|
+
update_job_status(self.project_job_id, "RUNNING")
|
|
102
|
+
if self.root_nodes:
|
|
103
|
+
# Start DFS from the first root node
|
|
104
|
+
root_node = next(iter(self.root_nodes))
|
|
105
|
+
self.dfs(root_node)
|
|
106
|
+
update_job_status(self.project_job_id, "SUCCESS")
|
|
107
|
+
|
|
108
|
+
# External function to initiate the pipeline execution
|
|
109
|
+
def decode_and_execute_pipeline(base_dir: str, project_job_id: int, project_title: str, job_dir_name: str, nodes: List[Dict[str, Any]], edges: List[Dict[str, str]], environment_variables: Dict[str, str]):
|
|
110
|
+
executor = PipelineExecutor(base_dir, project_job_id, project_title, job_dir_name, nodes, edges, environment_variables)
|
|
111
|
+
executor.decode_and_execute_pipeline()
|
|
@@ -0,0 +1,34 @@
|
|
|
1
|
+
import requests
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
API_BASE = "https://www.backend.scientiflow.com/api"
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def get_jobs(auth_token: str) -> list[dict]:
|
|
9
|
+
headers = { "Authorization": f"Bearer {auth_token}"}
|
|
10
|
+
response = requests.get(f"{API_BASE}/agent-application/check-jobs-to-execute", headers=headers)
|
|
11
|
+
|
|
12
|
+
if not response.status_code == 200:
|
|
13
|
+
print("Error fetching jobs - Invalid response")
|
|
14
|
+
return []
|
|
15
|
+
|
|
16
|
+
try:
|
|
17
|
+
jobs = response.json()
|
|
18
|
+
# breakpoint()
|
|
19
|
+
if len(jobs) == 0:
|
|
20
|
+
print("No jobs to execute")
|
|
21
|
+
return []
|
|
22
|
+
else:
|
|
23
|
+
print("\n{:<8} {:<20} {:<20}".format("Job ID", "Project Title", "Job Title"))
|
|
24
|
+
print("====== ============= =========")
|
|
25
|
+
for index, job in enumerate(response.json(), start=1):
|
|
26
|
+
project_title = job['project']['project_title']
|
|
27
|
+
job_title = job['project_job']['job_title']
|
|
28
|
+
print("{:<8} {:<20} {:<20}".format(index, project_title, job_title))
|
|
29
|
+
print("\n")
|
|
30
|
+
return jobs
|
|
31
|
+
|
|
32
|
+
except requests.exceptions.JSONDecodeError:
|
|
33
|
+
print("Error fetching jobs - Invalid JSON")
|
|
34
|
+
return []
|
|
File without changes
|
|
File without changes
|