cloudos-cli 2.17.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- cloudos_cli/__init__.py +11 -0
- cloudos_cli/__main__.py +1297 -0
- cloudos_cli/_version.py +1 -0
- cloudos_cli/clos.py +726 -0
- cloudos_cli/jobs/__init__.py +8 -0
- cloudos_cli/jobs/job.py +555 -0
- cloudos_cli/queue/__init__.py +8 -0
- cloudos_cli/queue/queue.py +139 -0
- cloudos_cli/utils/__init__.py +9 -0
- cloudos_cli/utils/errors.py +32 -0
- cloudos_cli/utils/requests.py +75 -0
- cloudos_cli-2.17.0.dist-info/LICENSE +674 -0
- cloudos_cli-2.17.0.dist-info/METADATA +1060 -0
- cloudos_cli-2.17.0.dist-info/RECORD +41 -0
- cloudos_cli-2.17.0.dist-info/WHEEL +5 -0
- cloudos_cli-2.17.0.dist-info/entry_points.txt +2 -0
- cloudos_cli-2.17.0.dist-info/top_level.txt +2 -0
- tests/__init__.py +0 -0
- tests/functions_for_pytest.py +7 -0
- tests/test_clos/__init__.py +0 -0
- tests/test_clos/test_create_cromwell_header.py +35 -0
- tests/test_clos/test_cromwell_switch.py +77 -0
- tests/test_clos/test_detect_workflow.py +47 -0
- tests/test_clos/test_get_cromwell_status.py +77 -0
- tests/test_clos/test_get_curated_workflow_list.py +72 -0
- tests/test_clos/test_get_job_list.py +79 -0
- tests/test_clos/test_get_job_status.py +75 -0
- tests/test_clos/test_get_project_list.py +74 -0
- tests/test_clos/test_get_user_info.py +68 -0
- tests/test_clos/test_get_workflow_list.py +87 -0
- tests/test_clos/test_is_module.py +48 -0
- tests/test_clos/test_process_job_list.py +74 -0
- tests/test_clos/test_process_project_list.py +36 -0
- tests/test_clos/test_process_workflow_list.py +36 -0
- tests/test_clos/test_wait_job_completion.py +40 -0
- tests/test_clos/test_workflow_import.py +77 -0
- tests/test_jobs/__init__.py +0 -0
- tests/test_jobs/test_convert_nextflow_to_json.py +104 -0
- tests/test_jobs/test_project_id.py +67 -0
- tests/test_jobs/test_send_job.py +84 -0
- tests/test_jobs/test_workflow_id.py +67 -0
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
import mock
|
|
2
|
+
import json
|
|
3
|
+
import pytest
|
|
4
|
+
import responses
|
|
5
|
+
from cloudos_cli.clos import Cloudos
|
|
6
|
+
from cloudos_cli.utils.errors import BadRequestException
|
|
7
|
+
|
|
8
|
+
APIKEY = 'vnoiweur89u2ongs'
|
|
9
|
+
CLOUDOS_URL = 'http://cloudos.lifebit.ai'
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@mock.patch('cloudos_cli.clos', mock.MagicMock())
|
|
13
|
+
@responses.activate
|
|
14
|
+
def test_get_user_info_correct_response():
|
|
15
|
+
"""
|
|
16
|
+
Test 'get_user_info' to work as intended
|
|
17
|
+
"""
|
|
18
|
+
body = json.dumps({"dockerRegistriesCredentials": []})
|
|
19
|
+
header = {
|
|
20
|
+
"Accept": "application/json, text/plain, */*",
|
|
21
|
+
"Content-Type": "application/json;charset=UTF-8",
|
|
22
|
+
"apikey": APIKEY
|
|
23
|
+
}
|
|
24
|
+
# mock GET method with the .json
|
|
25
|
+
responses.add(
|
|
26
|
+
responses.GET,
|
|
27
|
+
body=body,
|
|
28
|
+
url=f"{CLOUDOS_URL}/api/v1/users/me",
|
|
29
|
+
headers=header,
|
|
30
|
+
status=200)
|
|
31
|
+
# start cloudOS service
|
|
32
|
+
clos = Cloudos(apikey=APIKEY, cromwell_token=None,
|
|
33
|
+
cloudos_url=CLOUDOS_URL)
|
|
34
|
+
# get mock response
|
|
35
|
+
response = clos.get_user_info()
|
|
36
|
+
# check the response
|
|
37
|
+
assert response['dockerRegistriesCredentials'] == []
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
@mock.patch('cloudos_cli.clos', mock.MagicMock())
|
|
41
|
+
@responses.activate
|
|
42
|
+
def test_get_user_info_incorrect_response():
|
|
43
|
+
"""
|
|
44
|
+
Test 'get_user_info' to fail with '400' response
|
|
45
|
+
"""
|
|
46
|
+
# prepare error message
|
|
47
|
+
error_message = {"statusCode": 400, "code": "BadRequest",
|
|
48
|
+
"message": "Bad Request.", "time": "2022-11-23_17:31:07"}
|
|
49
|
+
error_json = json.dumps(error_message)
|
|
50
|
+
header = {
|
|
51
|
+
"Accept": "application/json, text/plain, */*",
|
|
52
|
+
"Content-Type": "application/json;charset=UTF-8",
|
|
53
|
+
"apikey": APIKEY
|
|
54
|
+
}
|
|
55
|
+
# mock GET method with the .json
|
|
56
|
+
responses.add(
|
|
57
|
+
responses.GET,
|
|
58
|
+
url=f"{CLOUDOS_URL}/api/v1/users/me",
|
|
59
|
+
body=error_json,
|
|
60
|
+
headers=header,
|
|
61
|
+
status=400)
|
|
62
|
+
# raise 400 error
|
|
63
|
+
with pytest.raises(BadRequestException) as error:
|
|
64
|
+
# check if it failed
|
|
65
|
+
clos = Cloudos(apikey=APIKEY, cromwell_token=None,
|
|
66
|
+
cloudos_url=CLOUDOS_URL)
|
|
67
|
+
clos.get_user_info()
|
|
68
|
+
assert "Server returned status 400." in (str(error))
|
|
@@ -0,0 +1,87 @@
|
|
|
1
|
+
import mock
|
|
2
|
+
import json
|
|
3
|
+
import pytest
|
|
4
|
+
import responses
|
|
5
|
+
from responses import matchers
|
|
6
|
+
from cloudos_cli.clos import Cloudos
|
|
7
|
+
from cloudos_cli.utils.errors import BadRequestException
|
|
8
|
+
from tests.functions_for_pytest import load_json_file
|
|
9
|
+
|
|
10
|
+
INPUT = "tests/test_data/workflows/workflows.json"
|
|
11
|
+
APIKEY = 'vnoiweur89u2ongs'
|
|
12
|
+
CLOUDOS_URL = 'http://cloudos.lifebit.ai'
|
|
13
|
+
WORKSPACE_ID = 'lv89ufc838sdig'
|
|
14
|
+
PAGE_SIZE = 10
|
|
15
|
+
PAGE = 1
|
|
16
|
+
ARCHIVED_STATUS = "false"
|
|
17
|
+
|
|
18
|
+
|
|
19
|
+
@mock.patch('cloudos_cli.clos', mock.MagicMock())
|
|
20
|
+
@responses.activate
|
|
21
|
+
def test_get_workflow_list_correct_response():
|
|
22
|
+
"""
|
|
23
|
+
Test 'get_workflow_list' to work as intended
|
|
24
|
+
API request is mocked and replicated with json files
|
|
25
|
+
"""
|
|
26
|
+
create_json = load_json_file(INPUT)
|
|
27
|
+
params = {"teamId": WORKSPACE_ID,
|
|
28
|
+
"pageSize": PAGE_SIZE,
|
|
29
|
+
"page": PAGE,
|
|
30
|
+
"archived.status": ARCHIVED_STATUS}
|
|
31
|
+
header = {
|
|
32
|
+
"Accept": "application/json, text/plain, */*",
|
|
33
|
+
"Content-Type": "application/json;charset=UTF-8",
|
|
34
|
+
"apikey": APIKEY
|
|
35
|
+
}
|
|
36
|
+
search_str = f"teamId={WORKSPACE_ID}&pageSize={PAGE_SIZE}&page={PAGE}&archived.status={ARCHIVED_STATUS}"
|
|
37
|
+
# mock GET method with the .json
|
|
38
|
+
responses.add(
|
|
39
|
+
responses.GET,
|
|
40
|
+
url=f"{CLOUDOS_URL}/api/v3/workflows?{search_str}",
|
|
41
|
+
body=create_json,
|
|
42
|
+
headers=header,
|
|
43
|
+
match=[matchers.query_param_matcher(params)],
|
|
44
|
+
status=200)
|
|
45
|
+
# start cloudOS service
|
|
46
|
+
clos = Cloudos(apikey=APIKEY, cromwell_token=None, cloudos_url=CLOUDOS_URL)
|
|
47
|
+
# get mock response
|
|
48
|
+
response = clos.get_workflow_list(WORKSPACE_ID)
|
|
49
|
+
# check the response
|
|
50
|
+
assert isinstance(response, list)
|
|
51
|
+
assert len(response) == 1
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
@mock.patch('cloudos_cli.clos', mock.MagicMock())
|
|
55
|
+
@responses.activate
|
|
56
|
+
def test_get_workflow_list_incorrect_response():
|
|
57
|
+
"""
|
|
58
|
+
Test 'get_workflow_list' to fail with '400' response
|
|
59
|
+
"""
|
|
60
|
+
# prepare error message
|
|
61
|
+
error_message = {"statusCode": 400, "code": "BadRequest",
|
|
62
|
+
"message": "Bad Request.", "time": "2022-11-23_17:31:07"}
|
|
63
|
+
error_json = json.dumps(error_message)
|
|
64
|
+
params = {"teamId": WORKSPACE_ID,
|
|
65
|
+
"pageSize": PAGE_SIZE,
|
|
66
|
+
"page": PAGE,
|
|
67
|
+
"archived.status": ARCHIVED_STATUS}
|
|
68
|
+
header = {
|
|
69
|
+
"Accept": "application/json, text/plain, */*",
|
|
70
|
+
"Content-Type": "application/json;charset=UTF-8",
|
|
71
|
+
"apikey": APIKEY
|
|
72
|
+
}
|
|
73
|
+
search_str = f"teamId={WORKSPACE_ID}&pageSize={PAGE_SIZE}&page={PAGE}&archived.status={ARCHIVED_STATUS}"
|
|
74
|
+
# mock GET method with the .json
|
|
75
|
+
responses.add(
|
|
76
|
+
responses.GET,
|
|
77
|
+
url=f"{CLOUDOS_URL}/api/v3/workflows?{search_str}",
|
|
78
|
+
body=error_json,
|
|
79
|
+
headers=header,
|
|
80
|
+
match=[matchers.query_param_matcher(params)],
|
|
81
|
+
status=400)
|
|
82
|
+
# raise 400 error
|
|
83
|
+
with pytest.raises(BadRequestException) as error:
|
|
84
|
+
# check if it failed
|
|
85
|
+
clos = Cloudos(apikey=APIKEY, cromwell_token=None, cloudos_url=CLOUDOS_URL)
|
|
86
|
+
clos.get_workflow_list(WORKSPACE_ID)
|
|
87
|
+
assert "Server returned status 400." in (str(error))
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
"""Pytest for method Cloudos.is_module"""
|
|
2
|
+
import mock
|
|
3
|
+
import responses
|
|
4
|
+
from responses import matchers
|
|
5
|
+
from cloudos_cli.clos import Cloudos
|
|
6
|
+
from tests.functions_for_pytest import load_json_file
|
|
7
|
+
|
|
8
|
+
INPUT = "tests/test_data/process_workflow_list_initial_request.json"
|
|
9
|
+
APIKEY = 'vnoiweur89u2ongs'
|
|
10
|
+
CLOUDOS_URL = 'http://cloudos.lifebit.ai'
|
|
11
|
+
WORKSPACE_ID = 'lv89ufc838sdig'
|
|
12
|
+
PAGE_SIZE = 10
|
|
13
|
+
PAGE = 1
|
|
14
|
+
ARCHIVED_STATUS = "false"
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
@mock.patch('cloudos_cli.clos', mock.MagicMock())
|
|
18
|
+
@responses.activate
|
|
19
|
+
def test_is_module():
|
|
20
|
+
"""
|
|
21
|
+
Test 'is_module' to work as intended
|
|
22
|
+
API request is mocked and replicated with json files
|
|
23
|
+
"""
|
|
24
|
+
json_data = load_json_file(INPUT)
|
|
25
|
+
params = {"teamId": WORKSPACE_ID,
|
|
26
|
+
"pageSize": PAGE_SIZE,
|
|
27
|
+
"page": PAGE,
|
|
28
|
+
"archived.status": ARCHIVED_STATUS}
|
|
29
|
+
header = {
|
|
30
|
+
"Accept": "application/json, text/plain, */*",
|
|
31
|
+
"Content-Type": "application/json;charset=UTF-8",
|
|
32
|
+
"apikey": APIKEY
|
|
33
|
+
}
|
|
34
|
+
search_str = f"teamId={WORKSPACE_ID}&pageSize={PAGE_SIZE}&page={PAGE}&archived.status={ARCHIVED_STATUS}"
|
|
35
|
+
# mock GET method with the .json
|
|
36
|
+
responses.add(
|
|
37
|
+
responses.GET,
|
|
38
|
+
url=f"{CLOUDOS_URL}/api/v3/workflows?{search_str}",
|
|
39
|
+
body=json_data,
|
|
40
|
+
headers=header,
|
|
41
|
+
match=[matchers.query_param_matcher(params)],
|
|
42
|
+
status=200)
|
|
43
|
+
# start cloudOS service
|
|
44
|
+
clos = Cloudos(apikey=APIKEY, cromwell_token=None, cloudos_url=CLOUDOS_URL)
|
|
45
|
+
# get mock response
|
|
46
|
+
response = clos.is_module(workspace_id=WORKSPACE_ID,
|
|
47
|
+
workflow_name="multiqc")
|
|
48
|
+
assert response
|
|
@@ -0,0 +1,74 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import pytest
|
|
3
|
+
import requests_mock
|
|
4
|
+
import requests
|
|
5
|
+
import pandas as pd
|
|
6
|
+
import numpy as np
|
|
7
|
+
from cloudos_cli.clos import Cloudos
|
|
8
|
+
|
|
9
|
+
input_json = "tests/test_data/process_job_list_initial_json.json"
|
|
10
|
+
output_df = pd.read_csv("tests/test_data/output_df_of_results.csv",
|
|
11
|
+
index_col=0)
|
|
12
|
+
output_df_full = pd.read_csv("tests/test_data/output_df_of_results_FULL.csv",
|
|
13
|
+
index_col=0)
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@pytest.fixture()
|
|
17
|
+
def mocked_requests_get():
|
|
18
|
+
test_workspace_id = 1
|
|
19
|
+
with open(input_json) as json_data:
|
|
20
|
+
d = json.load(json_data)
|
|
21
|
+
with requests_mock.Mocker() as mock:
|
|
22
|
+
mock.get(f"http://test_cloud_os/api/v1/jobs?teamId={test_workspace_id}&page=1",
|
|
23
|
+
json=d)
|
|
24
|
+
r = requests.get(f"http://test_cloud_os/api/v1/jobs?teamId={test_workspace_id}&page=1")
|
|
25
|
+
return json.loads(r.content)['jobs']
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def test_process_job_list_output_correct_shape(mocked_requests_get,):
|
|
29
|
+
df = Cloudos.process_job_list(mocked_requests_get, all_fields=False)
|
|
30
|
+
assert df.shape == output_df.shape
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def test_process_job_list_output_correct_headers(mocked_requests_get):
|
|
34
|
+
df = Cloudos.process_job_list(mocked_requests_get, all_fields=False)
|
|
35
|
+
correct_headers = list(output_df.columns)
|
|
36
|
+
actual_headers = list(df.columns)
|
|
37
|
+
assert correct_headers == actual_headers
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
def test_process_job_list_df_values_equal(mocked_requests_get,):
|
|
41
|
+
"""
|
|
42
|
+
Testing to check the values are the same. 3 columns (parameters,
|
|
43
|
+
workflow.description and masterInstanceStorageCost) have been removed.
|
|
44
|
+
Parameters has" added during making the csv, masterInstanceStorageCost
|
|
45
|
+
there is a rounding error and workflow.description has nan.
|
|
46
|
+
"""
|
|
47
|
+
df = Cloudos.process_job_list(mocked_requests_get)
|
|
48
|
+
columns_to_compare = ['_id', 'team', 'name', 'status',
|
|
49
|
+
'startTime', 'endTime', 'createdAt', 'updatedAt',
|
|
50
|
+
'computeCostSpent', 'user.id',
|
|
51
|
+
'workflow._id', 'workflow.name', 'workflow.createdAt',
|
|
52
|
+
'workflow.updatedAt', 'workflow.workflowType',
|
|
53
|
+
'project._id', 'project.name',
|
|
54
|
+
'project.createdAt', 'project.updatedAt']
|
|
55
|
+
assert np.all(df[columns_to_compare] == output_df[columns_to_compare])
|
|
56
|
+
|
|
57
|
+
|
|
58
|
+
def test_process_job_list_full_has_correct_columns(mocked_requests_get):
|
|
59
|
+
df = Cloudos.process_job_list(mocked_requests_get, all_fields=True)
|
|
60
|
+
correct_headers = list(output_df_full.columns)
|
|
61
|
+
actual_headers = list(df.columns)
|
|
62
|
+
assert correct_headers == actual_headers
|
|
63
|
+
|
|
64
|
+
|
|
65
|
+
def test_process_job_list_empty_json():
|
|
66
|
+
test_workspace_id = 1
|
|
67
|
+
empty_json = {"a": {"b": ""}}
|
|
68
|
+
with requests_mock.Mocker() as mock:
|
|
69
|
+
mock.get(f"http://test_cloud_os/api/v1/jobs?teamId={test_workspace_id}&page=1",
|
|
70
|
+
json=empty_json)
|
|
71
|
+
r = requests.get(f"http://test_cloud_os/api/v1/jobs?teamId={test_workspace_id}&page=1")
|
|
72
|
+
with pytest.raises(KeyError) as excinfo:
|
|
73
|
+
Cloudos.process_job_list(json.loads(r.content)['jobs'])
|
|
74
|
+
assert "jobs" in str(excinfo.value)
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
"""pytest for method Cloudos.process_project_list"""
|
|
2
|
+
import json
|
|
3
|
+
import requests
|
|
4
|
+
from cloudos_cli.clos import Cloudos
|
|
5
|
+
import pytest
|
|
6
|
+
import requests_mock
|
|
7
|
+
import pandas as pd
|
|
8
|
+
|
|
9
|
+
INPUT_JSON = "tests/test_data/projects.json"
|
|
10
|
+
output_df = pd.read_csv("tests/test_data/process_project_list_results.csv")
|
|
11
|
+
output_df_full = pd.read_csv("tests/test_data/process_project_list_results_FULL.csv")
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@pytest.fixture(name="mocked_requests_get")
|
|
15
|
+
def fixture_mocked_requests_get():
|
|
16
|
+
"""Creating a mock request"""
|
|
17
|
+
test_workspace_id = 1
|
|
18
|
+
with open(INPUT_JSON, encoding="utf-8") as json_data:
|
|
19
|
+
data_d = json.load(json_data)
|
|
20
|
+
with requests_mock.Mocker() as mock:
|
|
21
|
+
mock.get(f"http://test_cloud_os/api/v1/projects?teamId={test_workspace_id}",
|
|
22
|
+
json=data_d)
|
|
23
|
+
r_get = requests.get(f"http://test_cloud_os/api/v1/projects?teamId={test_workspace_id}")
|
|
24
|
+
return r_get
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def test_process_project_list_all_fields_false(mocked_requests_get):
|
|
28
|
+
"""Test function parameter 'all_fields=False'"""
|
|
29
|
+
df_ = Cloudos.process_project_list(mocked_requests_get, all_fields=False)
|
|
30
|
+
assert (df_.columns == output_df.columns).any()
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def test_process_project_list_all_fields_true(mocked_requests_get):
|
|
34
|
+
"""Test function parameter 'all_fields=True'"""
|
|
35
|
+
df_ = Cloudos.process_project_list(mocked_requests_get, all_fields=True)
|
|
36
|
+
assert (df_.columns == output_df_full.columns).any()
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
"""pytest for method Cloudos.process_workflow_list"""
|
|
2
|
+
import json
|
|
3
|
+
import requests
|
|
4
|
+
from cloudos_cli.clos import Cloudos
|
|
5
|
+
import pytest
|
|
6
|
+
import requests_mock
|
|
7
|
+
import pandas as pd
|
|
8
|
+
|
|
9
|
+
INPUT_JSON = "tests/test_data/process_workflow_list_initial_request.json"
|
|
10
|
+
output_df = pd.read_csv("tests/test_data/process_workflow_list_results.csv")
|
|
11
|
+
output_df_full = pd.read_csv("tests/test_data/process_workflow_list_results_FULL.csv")
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
@pytest.fixture(name="mocked_requests_get")
|
|
15
|
+
def fixture_mocked_requests_get():
|
|
16
|
+
"""Creating a mock request"""
|
|
17
|
+
test_workspace_id = 1
|
|
18
|
+
with open(INPUT_JSON, encoding="utf-8") as json_data:
|
|
19
|
+
data_d = json.load(json_data)
|
|
20
|
+
with requests_mock.Mocker() as mock:
|
|
21
|
+
mock.get(f"http://test_cloud_os/api/v1/jobs?teamId={test_workspace_id}",
|
|
22
|
+
json=data_d)
|
|
23
|
+
r_get = requests.get(f"http://test_cloud_os/api/v1/jobs?teamId={test_workspace_id}")
|
|
24
|
+
return json.loads(r_get.content)['workflows']
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
def test_process_workflow_list_all_fields_false(mocked_requests_get):
|
|
28
|
+
"""Test function parameter 'all_fields=False'"""
|
|
29
|
+
df_ = Cloudos.process_workflow_list(mocked_requests_get, all_fields=False)
|
|
30
|
+
assert (df_.columns == output_df.columns).any()
|
|
31
|
+
|
|
32
|
+
|
|
33
|
+
def test_process_workflow_list_all_fields_true(mocked_requests_get):
|
|
34
|
+
"""Test function parameter 'all_fields=True'"""
|
|
35
|
+
df_ = Cloudos.process_workflow_list(mocked_requests_get, all_fields=True)
|
|
36
|
+
assert (df_.columns == output_df_full.columns).any()
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
"""Pytest for method Cloudos.wait_job_completion"""
|
|
2
|
+
import mock
|
|
3
|
+
import responses
|
|
4
|
+
from cloudos_cli.clos import Cloudos
|
|
5
|
+
from tests.functions_for_pytest import load_json_file
|
|
6
|
+
|
|
7
|
+
INPUT = "tests/test_data/get_job_status.json"
|
|
8
|
+
APIKEY = 'vnoiweur89u2ongs'
|
|
9
|
+
CLOUDOS_URL = 'http://cloudos.lifebit.ai'
|
|
10
|
+
JOB_ID = '63bd590f72c38201551c3824'
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
@mock.patch('cloudos_cli.clos', mock.MagicMock())
|
|
14
|
+
@responses.activate
|
|
15
|
+
def test_wait_job_completion():
|
|
16
|
+
"""
|
|
17
|
+
Test 'wait_job_completion' to work as intended
|
|
18
|
+
API request is mocked and replicated with json files
|
|
19
|
+
"""
|
|
20
|
+
json_data = load_json_file(INPUT)
|
|
21
|
+
header = {
|
|
22
|
+
"Accept": "application/json, text/plain, */*",
|
|
23
|
+
"Content-Type": "application/json;charset=UTF-8",
|
|
24
|
+
"apikey": APIKEY
|
|
25
|
+
}
|
|
26
|
+
# mock GET method with the .json
|
|
27
|
+
responses.add(
|
|
28
|
+
responses.GET,
|
|
29
|
+
url=f"{CLOUDOS_URL}/api/v1/jobs/{JOB_ID}",
|
|
30
|
+
body=json_data,
|
|
31
|
+
headers=header,
|
|
32
|
+
status=200)
|
|
33
|
+
# start cloudOS service
|
|
34
|
+
cl = Cloudos(apikey=APIKEY, cromwell_token=None, cloudos_url=CLOUDOS_URL)
|
|
35
|
+
# get mock job status
|
|
36
|
+
status = cl.wait_job_completion(job_id=JOB_ID,
|
|
37
|
+
wait_time=0.01,
|
|
38
|
+
request_interval=0.01)
|
|
39
|
+
assert isinstance(status, dict)
|
|
40
|
+
assert status['status'] == 'running'
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
import mock
|
|
2
|
+
import json
|
|
3
|
+
import pytest
|
|
4
|
+
import responses
|
|
5
|
+
from cloudos_cli.clos import Cloudos
|
|
6
|
+
from cloudos_cli.utils.errors import BadRequestException
|
|
7
|
+
from tests.functions_for_pytest import load_json_file
|
|
8
|
+
|
|
9
|
+
OUTPUT = "tests/test_data/workflows/workflow_import.json"
|
|
10
|
+
APIKEY = 'vnoiweur89u2ongs'
|
|
11
|
+
CLOUDOS_URL = 'http://cloudos.lifebit.ai'
|
|
12
|
+
WORKSPACE_ID = 'lv89ufc838sdig'
|
|
13
|
+
WORKFLOW_URL = 'https://github.com/lifebit-ai/repo'
|
|
14
|
+
WORKFLOW_NAME = 'test-repo'
|
|
15
|
+
WORKFLOW_DOCS_LINK = ''
|
|
16
|
+
REPOSITORY_PROJECT_ID = 1234
|
|
17
|
+
REPOSITORY_ID = 567
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@mock.patch('cloudos_cli.clos', mock.MagicMock())
|
|
21
|
+
@responses.activate
|
|
22
|
+
def test_workflow_import_correct():
|
|
23
|
+
"""
|
|
24
|
+
Test 'import_workflows' to work as intended
|
|
25
|
+
API request is mocked and replicated with json files
|
|
26
|
+
"""
|
|
27
|
+
create_json = load_json_file(OUTPUT)
|
|
28
|
+
search_str = f"teamId={WORKSPACE_ID}"
|
|
29
|
+
# mock POST method with the .json
|
|
30
|
+
responses.add(
|
|
31
|
+
responses.POST,
|
|
32
|
+
url=f"{CLOUDOS_URL}/api/v1/workflows?{search_str}",
|
|
33
|
+
body=create_json,
|
|
34
|
+
status=200)
|
|
35
|
+
# start cloudOS service
|
|
36
|
+
clos = Cloudos(apikey=APIKEY, cromwell_token=None, cloudos_url=CLOUDOS_URL)
|
|
37
|
+
# get mock response
|
|
38
|
+
workflow_id = clos.workflow_import(WORKSPACE_ID,
|
|
39
|
+
WORKFLOW_URL,
|
|
40
|
+
WORKFLOW_NAME,
|
|
41
|
+
REPOSITORY_PROJECT_ID,
|
|
42
|
+
WORKFLOW_DOCS_LINK,
|
|
43
|
+
REPOSITORY_ID)
|
|
44
|
+
# check the response
|
|
45
|
+
assert isinstance(workflow_id, str)
|
|
46
|
+
assert workflow_id == '66156ba61d5f06a39b1da573'
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
@mock.patch('cloudos_cli.clos', mock.MagicMock())
|
|
50
|
+
@responses.activate
|
|
51
|
+
def test_workflow_import_incorrect():
|
|
52
|
+
"""
|
|
53
|
+
Test 'workflow_import' to fail with '400' response
|
|
54
|
+
"""
|
|
55
|
+
# prepare error message
|
|
56
|
+
error_message = {"statusCode": 400, "code": "BadRequest",
|
|
57
|
+
"message": "Bad Request.", "time": "2022-11-23_17:31:07"}
|
|
58
|
+
error_json = json.dumps(error_message)
|
|
59
|
+
search_str = f"teamId={WORKSPACE_ID}"
|
|
60
|
+
# mock POST method with the .json
|
|
61
|
+
responses.add(
|
|
62
|
+
responses.POST,
|
|
63
|
+
url=f"{CLOUDOS_URL}/api/v1/workflows?{search_str}",
|
|
64
|
+
body=error_json,
|
|
65
|
+
status=400)
|
|
66
|
+
# raise 400 error
|
|
67
|
+
with pytest.raises(BadRequestException) as error:
|
|
68
|
+
# check if it failed
|
|
69
|
+
clos = Cloudos(apikey=APIKEY, cromwell_token=None,
|
|
70
|
+
cloudos_url=CLOUDOS_URL)
|
|
71
|
+
clos.workflow_import(WORKSPACE_ID,
|
|
72
|
+
WORKFLOW_URL,
|
|
73
|
+
WORKFLOW_NAME,
|
|
74
|
+
REPOSITORY_PROJECT_ID,
|
|
75
|
+
WORKFLOW_DOCS_LINK,
|
|
76
|
+
REPOSITORY_ID)
|
|
77
|
+
assert "Server returned status 400." in (str(error))
|
|
File without changes
|
|
@@ -0,0 +1,104 @@
|
|
|
1
|
+
import pytest
|
|
2
|
+
import json
|
|
3
|
+
|
|
4
|
+
from cloudos_cli.jobs.job import Job
|
|
5
|
+
|
|
6
|
+
actual_json_file = "tests/test_data/convert_nextflow_to_json_params.json"
|
|
7
|
+
|
|
8
|
+
param_dict = {
|
|
9
|
+
"config": "cloudos_cli/examples/rnatoy.config",
|
|
10
|
+
"parameter": (),
|
|
11
|
+
"example_parameters": [],
|
|
12
|
+
"git_commit": None,
|
|
13
|
+
"git_tag": None,
|
|
14
|
+
"project_id": "6054754029b82f0112762b9c",
|
|
15
|
+
"workflow_id": "60b0ca54303ee601a69b42d1",
|
|
16
|
+
"job_name": "new_job",
|
|
17
|
+
"resumable": True,
|
|
18
|
+
"save_logs": True,
|
|
19
|
+
"batch": False,
|
|
20
|
+
"job_queue_id": None,
|
|
21
|
+
"nextflow_profile": None,
|
|
22
|
+
"nextflow_version": '22.10.8',
|
|
23
|
+
"instance_type": "c5.xlarge",
|
|
24
|
+
"instance_disk": 500,
|
|
25
|
+
"storage_mode": 'regular',
|
|
26
|
+
"lustre_size": 1200,
|
|
27
|
+
"execution_platform": "aws",
|
|
28
|
+
"hpc_id": None,
|
|
29
|
+
"workflow_type": 'nextflow',
|
|
30
|
+
"cromwell_id": None,
|
|
31
|
+
"cost_limit": -1,
|
|
32
|
+
"use_mountpoints": False,
|
|
33
|
+
"docker_login": False
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
def test_convert_nextflow_to_json_output_correct():
|
|
38
|
+
job_json = Job.convert_nextflow_to_json(
|
|
39
|
+
1, param_dict["config"],
|
|
40
|
+
parameter=param_dict["parameter"],
|
|
41
|
+
example_parameters=param_dict["example_parameters"],
|
|
42
|
+
git_commit=param_dict["git_commit"],
|
|
43
|
+
git_tag=param_dict["git_tag"],
|
|
44
|
+
project_id=param_dict["project_id"],
|
|
45
|
+
workflow_id=param_dict["workflow_id"],
|
|
46
|
+
job_name=param_dict["job_name"],
|
|
47
|
+
resumable=param_dict["resumable"],
|
|
48
|
+
save_logs=param_dict["save_logs"],
|
|
49
|
+
batch=param_dict["batch"],
|
|
50
|
+
job_queue_id=param_dict["job_queue_id"],
|
|
51
|
+
nextflow_profile=param_dict["nextflow_profile"],
|
|
52
|
+
nextflow_version=param_dict["nextflow_version"],
|
|
53
|
+
instance_type=param_dict["instance_type"],
|
|
54
|
+
instance_disk=param_dict["instance_disk"],
|
|
55
|
+
storage_mode=param_dict["storage_mode"],
|
|
56
|
+
lustre_size=param_dict["lustre_size"],
|
|
57
|
+
execution_platform=param_dict["execution_platform"],
|
|
58
|
+
hpc_id=param_dict["hpc_id"],
|
|
59
|
+
workflow_type=param_dict["workflow_type"],
|
|
60
|
+
cromwell_id=param_dict["cromwell_id"],
|
|
61
|
+
cost_limit=param_dict["cost_limit"],
|
|
62
|
+
use_mountpoints=param_dict["use_mountpoints"],
|
|
63
|
+
docker_login=param_dict["docker_login"]
|
|
64
|
+
)
|
|
65
|
+
with open(actual_json_file) as json_data:
|
|
66
|
+
correct_json = json.load(json_data)
|
|
67
|
+
assert job_json == correct_json
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def test_convert_nextflow_to_json_badly_formed_config():
|
|
71
|
+
no_equals_config = "tests/test_data/wrong_params.config"
|
|
72
|
+
with pytest.raises(ValueError) as excinfo:
|
|
73
|
+
Job.convert_nextflow_to_json(
|
|
74
|
+
1, no_equals_config,
|
|
75
|
+
parameter=param_dict["parameter"],
|
|
76
|
+
example_parameters=param_dict["example_parameters"],
|
|
77
|
+
git_commit=param_dict["git_commit"],
|
|
78
|
+
git_tag=param_dict["git_tag"],
|
|
79
|
+
project_id=param_dict["project_id"],
|
|
80
|
+
workflow_id=param_dict["workflow_id"],
|
|
81
|
+
job_name=param_dict["job_name"],
|
|
82
|
+
resumable=param_dict["resumable"],
|
|
83
|
+
save_logs=param_dict["save_logs"],
|
|
84
|
+
batch=param_dict["batch"],
|
|
85
|
+
job_queue_id=param_dict["job_queue_id"],
|
|
86
|
+
nextflow_profile=param_dict["nextflow_profile"],
|
|
87
|
+
nextflow_version=param_dict["nextflow_version"],
|
|
88
|
+
instance_type=param_dict["instance_type"],
|
|
89
|
+
instance_disk=param_dict["instance_disk"],
|
|
90
|
+
storage_mode=param_dict["storage_mode"],
|
|
91
|
+
lustre_size=param_dict["lustre_size"],
|
|
92
|
+
execution_platform=param_dict["execution_platform"],
|
|
93
|
+
hpc_id=param_dict["hpc_id"],
|
|
94
|
+
workflow_type=param_dict["workflow_type"],
|
|
95
|
+
cromwell_id=param_dict["cromwell_id"],
|
|
96
|
+
cost_limit=param_dict["cost_limit"],
|
|
97
|
+
use_mountpoints=param_dict["use_mountpoints"],
|
|
98
|
+
docker_login=param_dict["docker_login"]
|
|
99
|
+
)
|
|
100
|
+
print(str(excinfo.value))
|
|
101
|
+
assert "Please, specify your parameters in\
|
|
102
|
+
tests/test_data/wrong_params.config\
|
|
103
|
+
using the \'=\' as spacer.\
|
|
104
|
+
E.g: name = my_name".replace(" ", "") in str(excinfo.value)
|
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
"""Pytest added for checking Job.project_id"""
|
|
2
|
+
import mock
|
|
3
|
+
import responses
|
|
4
|
+
from responses import matchers
|
|
5
|
+
from cloudos_cli.jobs import Job
|
|
6
|
+
from tests.functions_for_pytest import load_json_file
|
|
7
|
+
|
|
8
|
+
INPUT_PROJECT = "tests/test_data/projects.json"
|
|
9
|
+
INPUT_WORKFLOW = "tests/test_data/workflows.json"
|
|
10
|
+
APIKEY = 'vnoiweur89u2ongs'
|
|
11
|
+
CLOUDOS_URL = 'http://cloudos.lifebit.ai'
|
|
12
|
+
WORKSPACE_ID = 'lv89ufc838sdig'
|
|
13
|
+
PROJECT_NAME = "lifebit-testing"
|
|
14
|
+
WORKFLOW_NAME = "nf-core-deepvariant"
|
|
15
|
+
PAGE_SIZE = 10
|
|
16
|
+
PAGE = 1
|
|
17
|
+
ARCHIVED_STATUS = "false"
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
@mock.patch('cloudos_cli.clos', mock.MagicMock())
|
|
21
|
+
@responses.activate
|
|
22
|
+
def test_project_id():
|
|
23
|
+
"""
|
|
24
|
+
Test 'project_id' to work as intended
|
|
25
|
+
API request is mocked and replicated with json files
|
|
26
|
+
"""
|
|
27
|
+
create_json_project = load_json_file(INPUT_PROJECT)
|
|
28
|
+
create_json_workflow = load_json_file(INPUT_WORKFLOW)
|
|
29
|
+
params_projects = {"teamId": WORKSPACE_ID}
|
|
30
|
+
params_workflows = {
|
|
31
|
+
"teamId": WORKSPACE_ID,
|
|
32
|
+
"pageSize": PAGE_SIZE,
|
|
33
|
+
"page": PAGE,
|
|
34
|
+
"archived.status": ARCHIVED_STATUS}
|
|
35
|
+
header = {
|
|
36
|
+
"Accept": "application/json, text/plain, */*",
|
|
37
|
+
"Content-Type": "application/json;charset=UTF-8",
|
|
38
|
+
"apikey": APIKEY
|
|
39
|
+
}
|
|
40
|
+
search_str_projects = f"teamId={WORKSPACE_ID}"
|
|
41
|
+
search_str_workflows = f"teamId={WORKSPACE_ID}&pageSize={PAGE_SIZE}&page={PAGE}&archived.status={ARCHIVED_STATUS}"
|
|
42
|
+
# mock GET method with the .json
|
|
43
|
+
responses.add(
|
|
44
|
+
responses.GET,
|
|
45
|
+
url=f"{CLOUDOS_URL}/api/v1/projects?{search_str_projects}",
|
|
46
|
+
body=create_json_project,
|
|
47
|
+
headers=header,
|
|
48
|
+
match=[matchers.query_param_matcher(params_projects)],
|
|
49
|
+
status=200)
|
|
50
|
+
responses.add(
|
|
51
|
+
responses.GET,
|
|
52
|
+
url=f"{CLOUDOS_URL}/api/v3/workflows?{search_str_workflows}",
|
|
53
|
+
body=create_json_workflow,
|
|
54
|
+
headers=header,
|
|
55
|
+
match=[matchers.query_param_matcher(params_workflows)],
|
|
56
|
+
status=201)
|
|
57
|
+
# start cloudOS service
|
|
58
|
+
job = Job(apikey=APIKEY,
|
|
59
|
+
cloudos_url=CLOUDOS_URL,
|
|
60
|
+
workspace_id=WORKSPACE_ID,
|
|
61
|
+
cromwell_token=None,
|
|
62
|
+
project_name=PROJECT_NAME,
|
|
63
|
+
workflow_name=WORKFLOW_NAME)
|
|
64
|
+
# get mock response
|
|
65
|
+
project_id = job.project_id
|
|
66
|
+
# check the response
|
|
67
|
+
assert project_id == '1234bc123125'
|