pybiolib 1.2.911__py3-none-any.whl → 1.2.1642__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pybiolib might be problematic. Click here for more details.
- biolib/__init__.py +33 -10
- biolib/_data_record/data_record.py +24 -11
- biolib/_index/index.py +51 -0
- biolib/_index/types.py +7 -0
- biolib/_internal/add_copilot_prompts.py +3 -5
- biolib/_internal/add_gui_files.py +59 -0
- biolib/_internal/data_record/data_record.py +1 -1
- biolib/_internal/data_record/push_data.py +1 -1
- biolib/_internal/data_record/remote_storage_endpoint.py +3 -3
- biolib/_internal/file_utils.py +48 -0
- biolib/_internal/index/__init__.py +1 -0
- biolib/_internal/index/index.py +18 -0
- biolib/_internal/lfs/cache.py +4 -2
- biolib/_internal/push_application.py +89 -23
- biolib/_internal/runtime.py +2 -0
- biolib/_internal/string_utils.py +13 -0
- biolib/_internal/templates/copilot_template/.github/instructions/style-react-ts.instructions.md +47 -0
- biolib/_internal/templates/copilot_template/.github/prompts/biolib_onboard_repo.prompt.md +19 -0
- biolib/_internal/templates/gui_template/.yarnrc.yml +1 -0
- biolib/_internal/templates/gui_template/App.tsx +53 -0
- biolib/_internal/templates/gui_template/Dockerfile +28 -0
- biolib/_internal/templates/gui_template/biolib-sdk.ts +37 -0
- biolib/_internal/templates/gui_template/dev-data/output.json +7 -0
- biolib/_internal/templates/gui_template/index.css +5 -0
- biolib/_internal/templates/gui_template/index.html +13 -0
- biolib/_internal/templates/gui_template/index.tsx +10 -0
- biolib/_internal/templates/gui_template/package.json +27 -0
- biolib/_internal/templates/gui_template/tsconfig.json +24 -0
- biolib/_internal/templates/gui_template/vite-plugin-dev-data.ts +49 -0
- biolib/_internal/templates/gui_template/vite.config.mts +9 -0
- biolib/_internal/templates/init_template/.biolib/config.yml +1 -0
- biolib/_internal/templates/init_template/.github/workflows/biolib.yml +6 -1
- biolib/_internal/templates/init_template/Dockerfile +2 -0
- biolib/_internal/templates/init_template/run.sh +1 -0
- biolib/_internal/templates/templates.py +9 -1
- biolib/_internal/utils/__init__.py +25 -0
- biolib/_internal/utils/job_url.py +33 -0
- biolib/_internal/utils/multinode.py +12 -14
- biolib/_runtime/runtime.py +15 -2
- biolib/_session/session.py +7 -5
- biolib/_shared/__init__.py +0 -0
- biolib/_shared/types/__init__.py +69 -0
- biolib/_shared/types/account.py +12 -0
- biolib/_shared/types/account_member.py +8 -0
- biolib/{_internal → _shared}/types/experiment.py +1 -0
- biolib/_shared/types/resource.py +17 -0
- biolib/_shared/types/resource_deploy_key.py +11 -0
- biolib/{_internal → _shared}/types/resource_permission.py +1 -1
- biolib/{_internal → _shared}/types/user.py +5 -5
- biolib/_shared/utils/__init__.py +7 -0
- biolib/_shared/utils/resource_uri.py +75 -0
- biolib/api/client.py +1 -1
- biolib/app/app.py +96 -45
- biolib/biolib_api_client/app_types.py +1 -0
- biolib/biolib_api_client/biolib_app_api.py +26 -0
- biolib/biolib_binary_format/module_input.py +8 -0
- biolib/biolib_binary_format/remote_endpoints.py +3 -3
- biolib/biolib_binary_format/remote_stream_seeker.py +39 -25
- biolib/biolib_logging.py +1 -1
- biolib/cli/__init__.py +2 -1
- biolib/cli/auth.py +4 -16
- biolib/cli/data_record.py +17 -0
- biolib/cli/index.py +32 -0
- biolib/cli/init.py +93 -11
- biolib/cli/lfs.py +1 -1
- biolib/cli/run.py +1 -1
- biolib/cli/start.py +14 -1
- biolib/compute_node/job_worker/executors/docker_executor.py +31 -9
- biolib/compute_node/job_worker/executors/docker_types.py +1 -1
- biolib/compute_node/job_worker/executors/types.py +6 -5
- biolib/compute_node/job_worker/job_storage.py +2 -1
- biolib/compute_node/job_worker/job_worker.py +155 -90
- biolib/compute_node/job_worker/large_file_system.py +2 -6
- biolib/compute_node/job_worker/network_alloc.py +99 -0
- biolib/compute_node/job_worker/network_buffer.py +240 -0
- biolib/compute_node/job_worker/utilization_reporter_thread.py +2 -2
- biolib/compute_node/remote_host_proxy.py +135 -67
- biolib/compute_node/utils.py +2 -0
- biolib/compute_node/webserver/compute_node_results_proxy.py +188 -0
- biolib/compute_node/webserver/proxy_utils.py +28 -0
- biolib/compute_node/webserver/webserver.py +64 -19
- biolib/experiments/experiment.py +98 -16
- biolib/jobs/job.py +128 -31
- biolib/jobs/job_result.py +73 -33
- biolib/jobs/types.py +1 -0
- biolib/sdk/__init__.py +17 -2
- biolib/typing_utils.py +1 -1
- biolib/utils/cache_state.py +2 -2
- biolib/utils/seq_util.py +1 -1
- {pybiolib-1.2.911.dist-info → pybiolib-1.2.1642.dist-info}/METADATA +4 -2
- pybiolib-1.2.1642.dist-info/RECORD +180 -0
- {pybiolib-1.2.911.dist-info → pybiolib-1.2.1642.dist-info}/WHEEL +1 -1
- biolib/_internal/llm_instructions/.github/instructions/style-react-ts.instructions.md +0 -22
- biolib/_internal/types/__init__.py +0 -6
- biolib/_internal/types/account.py +0 -10
- biolib/utils/app_uri.py +0 -57
- pybiolib-1.2.911.dist-info/RECORD +0 -150
- /biolib/{_internal/llm_instructions → _index}/__init__.py +0 -0
- /biolib/_internal/{llm_instructions → templates/copilot_template}/.github/instructions/general-app-knowledge.instructions.md +0 -0
- /biolib/_internal/{llm_instructions → templates/copilot_template}/.github/instructions/style-general.instructions.md +0 -0
- /biolib/_internal/{llm_instructions → templates/copilot_template}/.github/instructions/style-python.instructions.md +0 -0
- /biolib/_internal/{llm_instructions → templates/copilot_template}/.github/prompts/biolib_app_inputs.prompt.md +0 -0
- /biolib/_internal/{llm_instructions → templates/copilot_template}/.github/prompts/biolib_run_apps.prompt.md +0 -0
- /biolib/{_internal → _shared}/types/app.py +0 -0
- /biolib/{_internal → _shared}/types/data_record.py +0 -0
- /biolib/{_internal → _shared}/types/file_node.py +0 -0
- /biolib/{_internal → _shared}/types/push.py +0 -0
- /biolib/{_internal/types/resource.py → _shared/types/resource_types.py} +0 -0
- /biolib/{_internal → _shared}/types/resource_version.py +0 -0
- /biolib/{_internal → _shared}/types/result.py +0 -0
- /biolib/{_internal → _shared}/types/typing.py +0 -0
- {pybiolib-1.2.911.dist-info → pybiolib-1.2.1642.dist-info}/entry_points.txt +0 -0
- {pybiolib-1.2.911.dist-info → pybiolib-1.2.1642.dist-info/licenses}/LICENSE +0 -0
|
@@ -57,6 +57,22 @@ def _get_git_branch_name() -> str:
|
|
|
57
57
|
return ''
|
|
58
58
|
|
|
59
59
|
|
|
60
|
+
def _get_git_commit_hash() -> str:
|
|
61
|
+
try:
|
|
62
|
+
github_actions_commit_hash = os.getenv('GITHUB_SHA')
|
|
63
|
+
if github_actions_commit_hash:
|
|
64
|
+
return github_actions_commit_hash
|
|
65
|
+
|
|
66
|
+
gitlab_ci_commit_hash = os.getenv('CI_COMMIT_SHA')
|
|
67
|
+
if gitlab_ci_commit_hash:
|
|
68
|
+
return gitlab_ci_commit_hash
|
|
69
|
+
|
|
70
|
+
result = subprocess.run(['git', 'rev-parse', 'HEAD'], check=True, stdout=subprocess.PIPE, text=True)
|
|
71
|
+
return result.stdout.strip()
|
|
72
|
+
except BaseException:
|
|
73
|
+
return ''
|
|
74
|
+
|
|
75
|
+
|
|
60
76
|
def _get_git_repository_url() -> str:
|
|
61
77
|
try:
|
|
62
78
|
result = subprocess.run(['git', 'remote', 'get-url', 'origin'], check=True, stdout=subprocess.PIPE, text=True)
|
|
@@ -99,6 +115,15 @@ class BiolibAppApi:
|
|
|
99
115
|
|
|
100
116
|
raise error
|
|
101
117
|
|
|
118
|
+
@staticmethod
|
|
119
|
+
def create_app(uri: str):
|
|
120
|
+
uri = _get_app_uri_from_str(uri)
|
|
121
|
+
try:
|
|
122
|
+
response = biolib.api.client.post(path='/resources/apps/', data={'uri': uri})
|
|
123
|
+
return response.json()
|
|
124
|
+
except HttpError as error:
|
|
125
|
+
raise error
|
|
126
|
+
|
|
102
127
|
@staticmethod
|
|
103
128
|
def push_app_version(
|
|
104
129
|
app_id,
|
|
@@ -116,6 +141,7 @@ class BiolibAppApi:
|
|
|
116
141
|
'state': 'published',
|
|
117
142
|
'app_version_id_to_copy_images_from': app_version_id_to_copy_images_from,
|
|
118
143
|
'git_branch_name': _get_git_branch_name(),
|
|
144
|
+
'git_commit_hash': _get_git_commit_hash(),
|
|
119
145
|
'git_repository_url': _get_git_repository_url(),
|
|
120
146
|
}
|
|
121
147
|
if semantic_version:
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
from biolib.biolib_binary_format.base_bbf_package import BioLibBinaryFormatBasePackage
|
|
2
|
+
from biolib.biolib_logging import logger
|
|
2
3
|
from biolib.typing_utils import TypedDict, Dict, List
|
|
3
4
|
|
|
4
5
|
|
|
@@ -14,6 +15,10 @@ class ModuleInput(BioLibBinaryFormatBasePackage):
|
|
|
14
15
|
self.package_type = 1
|
|
15
16
|
|
|
16
17
|
def serialize(self, stdin, arguments, files) -> bytes:
|
|
18
|
+
for path in files.keys():
|
|
19
|
+
if '//' in path:
|
|
20
|
+
raise ValueError(f"File path '{path}' contains double slashes which are not allowed")
|
|
21
|
+
|
|
17
22
|
bbf_data = bytearray()
|
|
18
23
|
bbf_data.extend(self.version.to_bytes(1, 'big'))
|
|
19
24
|
bbf_data.extend(self.package_type.to_bytes(1, 'big'))
|
|
@@ -67,6 +72,9 @@ class ModuleInput(BioLibBinaryFormatBasePackage):
|
|
|
67
72
|
data_len = self.get_data(8, output_type='int')
|
|
68
73
|
path = self.get_data(path_len, output_type='str')
|
|
69
74
|
data = self.get_data(data_len)
|
|
75
|
+
if '//' in path:
|
|
76
|
+
# TODO: Raise ValueError here once backwards compatibility period is over
|
|
77
|
+
logger.warning(f"File path '{path}' contains double slashes which are not allowed")
|
|
70
78
|
files[path] = bytes(data)
|
|
71
79
|
|
|
72
80
|
return ModuleInputDict(stdin=stdin, arguments=arguments, files=files)
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from datetime import datetime, timedelta
|
|
1
|
+
from datetime import datetime, timedelta, timezone
|
|
2
2
|
|
|
3
3
|
from biolib.biolib_api_client.biolib_job_api import BiolibJobApi
|
|
4
4
|
from biolib.biolib_binary_format.utils import RemoteEndpoint
|
|
@@ -17,13 +17,13 @@ class RemoteJobStorageEndpoint(RemoteEndpoint):
|
|
|
17
17
|
self._storage_type: Literal['input', 'output'] = storage_type
|
|
18
18
|
|
|
19
19
|
def get_remote_url(self):
|
|
20
|
-
if not self._presigned_url or datetime.
|
|
20
|
+
if not self._presigned_url or not self._expires_at or datetime.now(timezone.utc) > self._expires_at:
|
|
21
21
|
self._presigned_url = BiolibJobApi.get_job_storage_download_url(
|
|
22
22
|
job_auth_token=self._job_auth_token,
|
|
23
23
|
job_uuid=self._job_uuid,
|
|
24
24
|
storage_type='results' if self._storage_type == 'output' else 'input',
|
|
25
25
|
)
|
|
26
|
-
self._expires_at = datetime.
|
|
26
|
+
self._expires_at = datetime.now(timezone.utc) + timedelta(minutes=8)
|
|
27
27
|
# TODO: Use expires at from url
|
|
28
28
|
# parsed_url = urlparse(self._presigned_url)
|
|
29
29
|
# query_params = parse_qs(parsed_url.query)
|
|
@@ -1,45 +1,59 @@
|
|
|
1
1
|
from biolib.biolib_binary_format.utils import IndexableBuffer
|
|
2
|
+
from biolib.biolib_logging import logger
|
|
2
3
|
from biolib.typing_utils import Iterable
|
|
3
4
|
|
|
4
5
|
|
|
5
6
|
class StreamSeeker:
|
|
6
7
|
def __init__(
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
8
|
+
self,
|
|
9
|
+
upstream_buffer: IndexableBuffer,
|
|
10
|
+
files_data_start: int,
|
|
11
|
+
files_data_end: int,
|
|
12
|
+
max_chunk_size: int,
|
|
12
13
|
):
|
|
13
14
|
self._upstream_buffer = upstream_buffer
|
|
14
15
|
self._files_data_end = files_data_end
|
|
15
|
-
self.
|
|
16
|
+
self._max_chunk_size = max_chunk_size
|
|
16
17
|
|
|
17
18
|
self._buffer_start = files_data_start
|
|
18
19
|
self._buffer = bytearray()
|
|
19
20
|
|
|
20
|
-
def seek_and_read(self, file_start: int, file_length: int) -> Iterable[bytes]:
|
|
21
|
+
def seek_and_read(self, file_start: int, file_length: int, read_ahead_bytes: int = 0) -> Iterable[bytes]:
|
|
21
22
|
assert file_start >= self._buffer_start
|
|
22
|
-
self._buffer = self._buffer[file_start - self._buffer_start:]
|
|
23
|
+
self._buffer = self._buffer[file_start - self._buffer_start :]
|
|
23
24
|
self._buffer_start = file_start
|
|
24
25
|
|
|
25
26
|
while True:
|
|
26
27
|
file_byte_count_remaining = file_length - (self._buffer_start - file_start)
|
|
27
|
-
if file_byte_count_remaining
|
|
28
|
+
if file_byte_count_remaining <= 0:
|
|
28
29
|
return
|
|
29
30
|
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
31
|
+
if len(self._buffer) > 0:
|
|
32
|
+
take = min(file_byte_count_remaining, len(self._buffer))
|
|
33
|
+
chunk = self._buffer[:take]
|
|
34
|
+
if chunk:
|
|
35
|
+
yield chunk
|
|
36
|
+
self._buffer = self._buffer[take:]
|
|
37
|
+
self._buffer_start += take
|
|
38
|
+
else:
|
|
39
|
+
start_of_fetch = self._buffer_start + len(self._buffer)
|
|
40
|
+
bytes_left_in_stream = self._files_data_end - start_of_fetch
|
|
41
|
+
if bytes_left_in_stream <= 0:
|
|
42
|
+
logger.error(
|
|
43
|
+
'StreamSeeker: no bytes left upstream (start_of_fetch=%d, files_data_end=%d)',
|
|
44
|
+
start_of_fetch,
|
|
45
|
+
self._files_data_end,
|
|
46
|
+
)
|
|
47
|
+
return
|
|
48
|
+
|
|
49
|
+
fetch_size = min(self._max_chunk_size, file_byte_count_remaining + read_ahead_bytes)
|
|
50
|
+
if fetch_size > bytes_left_in_stream:
|
|
51
|
+
logger.error(
|
|
52
|
+
'StreamSeeker: fetch_size (%d) > bytes_left_in_stream (%d); clamping',
|
|
53
|
+
fetch_size,
|
|
54
|
+
bytes_left_in_stream,
|
|
55
|
+
)
|
|
56
|
+
fetch_size = bytes_left_in_stream
|
|
57
|
+
|
|
58
|
+
fetched_data = self._upstream_buffer.get_data(start=start_of_fetch, length=fetch_size)
|
|
59
|
+
self._buffer.extend(fetched_data)
|
biolib/biolib_logging.py
CHANGED
|
@@ -62,7 +62,7 @@ def _get_no_user_data_logger() -> _BioLibLogger:
|
|
|
62
62
|
|
|
63
63
|
# TODO: Simplify by refactoring to env BIOLIB_ENVIRONMENT_IS_CLOUD: boolean
|
|
64
64
|
if os.getenv('BIOLIB_CLOUD_ENVIRONMENT', '').lower() == 'non-enclave':
|
|
65
|
-
handler = logging.FileHandler(filename='/
|
|
65
|
+
handler = logging.FileHandler(filename='/biolib/logs/biolib_no_user_data.log')
|
|
66
66
|
formatter = logging.Formatter(_DEFAULT_LOGGER_FORMAT)
|
|
67
67
|
handler.setFormatter(formatter)
|
|
68
68
|
_logger_no_user_data.addHandler(handler)
|
biolib/cli/__init__.py
CHANGED
|
@@ -5,7 +5,7 @@ import click
|
|
|
5
5
|
|
|
6
6
|
from biolib import utils
|
|
7
7
|
from biolib.biolib_logging import logger, logger_no_user_data
|
|
8
|
-
from biolib.cli import auth, data_record, download_container, init, lfs, push, run, runtime, sdk, start
|
|
8
|
+
from biolib.cli import auth, data_record, download_container, index, init, lfs, push, run, runtime, sdk, start
|
|
9
9
|
|
|
10
10
|
|
|
11
11
|
@click.version_option(version=utils.BIOLIB_PACKAGE_VERSION, prog_name='pybiolib')
|
|
@@ -31,6 +31,7 @@ cli.add_command(run.run)
|
|
|
31
31
|
cli.add_command(runtime.runtime)
|
|
32
32
|
cli.add_command(start.start)
|
|
33
33
|
cli.add_command(data_record.data_record)
|
|
34
|
+
cli.add_command(index.index)
|
|
34
35
|
cli.add_command(sdk.sdk)
|
|
35
36
|
|
|
36
37
|
# allow this script to be called without poetry in dev e.g. by an IDE debugger
|
biolib/cli/auth.py
CHANGED
|
@@ -3,7 +3,7 @@ import sys
|
|
|
3
3
|
|
|
4
4
|
import click
|
|
5
5
|
|
|
6
|
-
from biolib import api
|
|
6
|
+
from biolib import api
|
|
7
7
|
from biolib.biolib_api_client.api_client import BiolibApiClient
|
|
8
8
|
from biolib.biolib_logging import logger, logger_no_user_data
|
|
9
9
|
from biolib.user import sign_in, sign_out
|
|
@@ -35,23 +35,11 @@ def logout() -> None:
|
|
|
35
35
|
def whoami() -> None:
|
|
36
36
|
client = BiolibApiClient.get()
|
|
37
37
|
if client.is_signed_in:
|
|
38
|
-
|
|
39
|
-
if client.access_token is None:
|
|
40
|
-
print('Unable to fetch user credentials. Please try logging out and logging in again.')
|
|
41
|
-
exit(1)
|
|
42
|
-
try:
|
|
43
|
-
user_uuid = client.decode_jwt_without_checking_signature(jwt=client.access_token)['payload']['public_id']
|
|
44
|
-
except biolib_errors.BioLibError as error:
|
|
45
|
-
print(
|
|
46
|
-
f'Unable to reference user public_id in access token:\n {error.message}',
|
|
47
|
-
file=sys.stderr,
|
|
48
|
-
)
|
|
49
|
-
exit(1)
|
|
50
|
-
response = api.client.get(path=f'/user/{user_uuid}/')
|
|
38
|
+
response = api.client.get(path='/users/me/')
|
|
51
39
|
user_dict = response.json()
|
|
52
40
|
email = user_dict['email']
|
|
53
|
-
|
|
54
|
-
|
|
41
|
+
display_name = user_dict['account']['display_name']
|
|
42
|
+
|
|
55
43
|
print(f'Name: {display_name}\nEmail: {email}\nLogged into: {client.base_url}')
|
|
56
44
|
else:
|
|
57
45
|
print('Not logged in', file=sys.stderr)
|
biolib/cli/data_record.py
CHANGED
|
@@ -80,3 +80,20 @@ def describe(uri: str, output_as_json: bool) -> None:
|
|
|
80
80
|
size_string = str(file_info['size_bytes'])
|
|
81
81
|
leading_space_string = ' ' * (10 - len(size_string))
|
|
82
82
|
print(f"{leading_space_string}{size_string} {file_info['path']}")
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
@data_record.command(help='Delete a Data Record')
|
|
86
|
+
@click.argument('uri', required=True)
|
|
87
|
+
def delete(uri: str) -> None:
|
|
88
|
+
record = DataRecord.get_by_uri(uri=uri)
|
|
89
|
+
|
|
90
|
+
print(f'You are about to delete the data record: {record.uri}')
|
|
91
|
+
print('This action cannot be undone.')
|
|
92
|
+
|
|
93
|
+
confirmation = input(f'To confirm deletion, please type the data record name "{record.name}": ')
|
|
94
|
+
if confirmation != record.name:
|
|
95
|
+
print('Data record name does not match. Deletion cancelled.')
|
|
96
|
+
return
|
|
97
|
+
|
|
98
|
+
record.delete()
|
|
99
|
+
print(f'Data record {record.uri} has been deleted.')
|
biolib/cli/index.py
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import logging
|
|
3
|
+
import sys
|
|
4
|
+
|
|
5
|
+
import click
|
|
6
|
+
|
|
7
|
+
from biolib._index.index import Index
|
|
8
|
+
from biolib.biolib_errors import BioLibError
|
|
9
|
+
from biolib.biolib_logging import logger, logger_no_user_data
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@click.group(help='Manage Indexes')
|
|
13
|
+
def index() -> None:
|
|
14
|
+
logger.configure(default_log_level=logging.INFO)
|
|
15
|
+
logger_no_user_data.configure(default_log_level=logging.INFO)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@index.command(help='Create an Index')
|
|
19
|
+
@click.argument('uri', required=True)
|
|
20
|
+
@click.option('--config-path', required=True, type=click.Path(exists=True), help='Path to JSON config file')
|
|
21
|
+
def create(uri: str, config_path: str) -> None:
|
|
22
|
+
try:
|
|
23
|
+
Index.create_from_config_file(uri=uri, config_path=config_path)
|
|
24
|
+
except json.JSONDecodeError as error:
|
|
25
|
+
print(f'Error: Invalid JSON in config file: {error}', file=sys.stderr)
|
|
26
|
+
sys.exit(1)
|
|
27
|
+
except BioLibError as error:
|
|
28
|
+
print(f'Error creating index: {error.message}', file=sys.stderr)
|
|
29
|
+
sys.exit(1)
|
|
30
|
+
except Exception as error:
|
|
31
|
+
print(f'Error reading config file: {error}', file=sys.stderr)
|
|
32
|
+
sys.exit(1)
|
biolib/cli/init.py
CHANGED
|
@@ -4,9 +4,19 @@ import sys
|
|
|
4
4
|
|
|
5
5
|
import click
|
|
6
6
|
|
|
7
|
-
from biolib import
|
|
7
|
+
from biolib import (
|
|
8
|
+
biolib_errors,
|
|
9
|
+
utils, # Import like this to let BASE_URL_IS_PUBLIC_BIOLIB be set correctly
|
|
10
|
+
)
|
|
8
11
|
from biolib._internal.add_copilot_prompts import add_copilot_prompts
|
|
12
|
+
from biolib._internal.add_gui_files import add_gui_files
|
|
13
|
+
from biolib._internal.http_client import HttpError
|
|
14
|
+
from biolib._internal.string_utils import normalize_for_docker_tag
|
|
9
15
|
from biolib._internal.templates import templates
|
|
16
|
+
from biolib.api import client as api_client
|
|
17
|
+
from biolib.biolib_api_client.api_client import BiolibApiClient
|
|
18
|
+
from biolib.biolib_api_client.biolib_app_api import BiolibAppApi
|
|
19
|
+
from biolib.user.sign_in import sign_in
|
|
10
20
|
from biolib.utils import BIOLIB_PACKAGE_VERSION
|
|
11
21
|
|
|
12
22
|
|
|
@@ -15,25 +25,82 @@ def init() -> None:
|
|
|
15
25
|
cwd = os.getcwd()
|
|
16
26
|
|
|
17
27
|
app_uri = input('What URI do you want to create the application under? (leave blank to skip): ')
|
|
28
|
+
|
|
29
|
+
if app_uri and not app_uri.startswith('@'):
|
|
30
|
+
try:
|
|
31
|
+
response = api_client.get('system/enterprise/config/', authenticate=False)
|
|
32
|
+
config = response.json()
|
|
33
|
+
prefix = config.get('resource_hostname_prefix')
|
|
34
|
+
if prefix:
|
|
35
|
+
app_uri = f'@{prefix}/{app_uri}'
|
|
36
|
+
print(f'Detected enterprise deployment, using URI: {app_uri}')
|
|
37
|
+
except HttpError as e:
|
|
38
|
+
# 404 indicates endpoint not found, 501 indicates non-enterprise deployment
|
|
39
|
+
if e.code in [404, 501]:
|
|
40
|
+
pass
|
|
41
|
+
else:
|
|
42
|
+
print(f'Warning: Could not detect enterprise configuration: {e}')
|
|
43
|
+
except Exception as e:
|
|
44
|
+
print(f'Warning: Could not detect enterprise configuration: {e}')
|
|
45
|
+
|
|
18
46
|
app_name = app_uri.split('/')[-1] if app_uri else None
|
|
47
|
+
docker_tag = normalize_for_docker_tag(app_name) if app_name else None
|
|
48
|
+
|
|
49
|
+
if app_uri:
|
|
50
|
+
try:
|
|
51
|
+
if BiolibApiClient.is_reauthentication_needed():
|
|
52
|
+
sign_in_input = input('You need to sign in to validate/create apps. Would you like to sign in? [y/N]: ')
|
|
53
|
+
if sign_in_input.lower() in ['y', 'yes']:
|
|
54
|
+
sign_in()
|
|
55
|
+
else:
|
|
56
|
+
print('Skipping app validation and creation. You can set the URI in .biolib/config.yml later.')
|
|
57
|
+
return
|
|
58
|
+
|
|
59
|
+
BiolibAppApi.get_by_uri(app_uri)
|
|
60
|
+
print(f'App {app_uri} already exists.')
|
|
61
|
+
except biolib_errors.NotFound:
|
|
62
|
+
create_app_input = input(f'App {app_uri} does not exist. Would you like to create it? [y/N]: ')
|
|
63
|
+
if create_app_input.lower() in ['y', 'yes']:
|
|
64
|
+
try:
|
|
65
|
+
BiolibAppApi.create_app(app_uri)
|
|
66
|
+
print(f'Successfully created app {app_uri}')
|
|
67
|
+
except Exception as e:
|
|
68
|
+
print(f'Failed to create app {app_uri}: {str(e)}')
|
|
69
|
+
print('You can create the app manually later or set the URI in .biolib/config.yml')
|
|
70
|
+
else:
|
|
71
|
+
print(
|
|
72
|
+
'App creation skipped. You can create the app manually later or set the URI in .biolib/config.yml'
|
|
73
|
+
)
|
|
74
|
+
except Exception as e:
|
|
75
|
+
print(f'Failed to validate app {app_uri}: {str(e)}')
|
|
76
|
+
print('Continuing with initialization...')
|
|
77
|
+
|
|
19
78
|
if not app_uri:
|
|
20
79
|
print(
|
|
21
80
|
'Remember to set the app URI in the .biolib/config.yml file later, '
|
|
22
81
|
'and docker image name in the .biolib/config.yml and .github/workflows/biolib.yml files.'
|
|
23
82
|
)
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
83
|
+
advanced_setup_input = input('Do you want to set up advanced features like Copilot and GUI? [y/N]: ')
|
|
84
|
+
advanced_setup = advanced_setup_input.lower() == 'y'
|
|
85
|
+
include_copilot = False
|
|
86
|
+
include_gui = False
|
|
87
|
+
if advanced_setup:
|
|
88
|
+
copilot_enabled_input = input('Do you want to include Copilot instructions and prompts? [y/N]: ')
|
|
89
|
+
include_copilot = copilot_enabled_input.lower() == 'y'
|
|
90
|
+
include_gui_input = input('Do you want to include GUI setup? [y/N]: ')
|
|
91
|
+
include_gui = include_gui_input.lower() == 'y'
|
|
92
|
+
|
|
93
|
+
init_template_dir = templates.init_template()
|
|
28
94
|
conflicting_files = []
|
|
29
95
|
files_to_overwrite = set()
|
|
30
96
|
|
|
31
97
|
try:
|
|
32
98
|
# First pass: check for conflicts
|
|
33
|
-
for root, dirs, filenames in os.walk(
|
|
99
|
+
for root, dirs, filenames in os.walk(init_template_dir):
|
|
34
100
|
dirs[:] = [d for d in dirs if '__pycache__' not in d]
|
|
35
|
-
relative_dir = os.path.relpath(root,
|
|
101
|
+
relative_dir = os.path.relpath(root, init_template_dir)
|
|
36
102
|
destination_dir = cwd if relative_dir == '.' else os.path.join(cwd, relative_dir)
|
|
103
|
+
|
|
37
104
|
for filename in filenames:
|
|
38
105
|
source_file = os.path.join(root, filename)
|
|
39
106
|
destination_file = os.path.join(destination_dir, filename)
|
|
@@ -54,18 +121,22 @@ def init() -> None:
|
|
|
54
121
|
files_to_overwrite.add(conflicting_file)
|
|
55
122
|
|
|
56
123
|
replace_app_uri = app_uri if app_uri else 'PUT_APP_URI_HERE'
|
|
124
|
+
replace_app_name = app_name if app_name else 'biolib-app'
|
|
57
125
|
|
|
58
126
|
# Second pass: copy files (only if no conflicts)
|
|
59
|
-
for root, dirs, filenames in os.walk(
|
|
127
|
+
for root, dirs, filenames in os.walk(init_template_dir):
|
|
60
128
|
dirs[:] = [d for d in dirs if '__pycache__' not in d]
|
|
61
|
-
relative_dir = os.path.relpath(root,
|
|
129
|
+
relative_dir = os.path.relpath(root, init_template_dir)
|
|
62
130
|
destination_dir = os.path.join(cwd, relative_dir)
|
|
131
|
+
|
|
63
132
|
os.makedirs(destination_dir, exist_ok=True)
|
|
64
133
|
|
|
65
134
|
for filename in filenames:
|
|
66
135
|
if utils.BASE_URL_IS_PUBLIC_BIOLIB and filename == 'biolib.yml':
|
|
67
136
|
continue
|
|
68
137
|
|
|
138
|
+
relative_file_path = os.path.join(relative_dir, filename) if relative_dir != '.' else filename
|
|
139
|
+
|
|
69
140
|
source_file = os.path.join(root, filename)
|
|
70
141
|
destination_file = os.path.join(destination_dir, filename)
|
|
71
142
|
relative_file_path = os.path.relpath(destination_file, cwd)
|
|
@@ -79,8 +150,15 @@ def init() -> None:
|
|
|
79
150
|
new_content = new_content.replace('BIOLIB_REPLACE_APP_URI', replace_app_uri)
|
|
80
151
|
new_content = new_content.replace(
|
|
81
152
|
'BIOLIB_REPLACE_DOCKER_TAG',
|
|
82
|
-
|
|
153
|
+
docker_tag if docker_tag else 'PUT_DOCKER_TAG_HERE',
|
|
83
154
|
)
|
|
155
|
+
new_content = new_content.replace('BIOLIB_REPLACE_APP_NAME', replace_app_name)
|
|
156
|
+
|
|
157
|
+
gui_config = "main_output_file: '/result.html'\n" if include_gui else ''
|
|
158
|
+
new_content = new_content.replace('BIOLIB_REPLACE_GUI_CONFIG\n', gui_config)
|
|
159
|
+
|
|
160
|
+
gui_mv_command = 'mv result.html output/result.html\n' if include_gui else ''
|
|
161
|
+
new_content = new_content.replace('BIOLIB_REPLACE_GUI_MV_COMMAND\n', gui_mv_command)
|
|
84
162
|
|
|
85
163
|
with open(destination_file, 'w') as f:
|
|
86
164
|
f.write(new_content)
|
|
@@ -92,7 +170,11 @@ def init() -> None:
|
|
|
92
170
|
with open(readme_path, 'w') as readme_file:
|
|
93
171
|
readme_file.write(f'# {app_name}\n')
|
|
94
172
|
|
|
95
|
-
|
|
173
|
+
if include_copilot:
|
|
174
|
+
add_copilot_prompts(force=False, silent=True)
|
|
175
|
+
|
|
176
|
+
if include_gui:
|
|
177
|
+
add_gui_files(force=False, silent=True)
|
|
96
178
|
|
|
97
179
|
except KeyboardInterrupt:
|
|
98
180
|
print('\nInit command cancelled.', file=sys.stderr)
|
biolib/cli/lfs.py
CHANGED
biolib/cli/run.py
CHANGED
|
@@ -48,7 +48,7 @@ def _run(local: bool, non_blocking: bool, uri: str, args: Tuple[str]) -> None:
|
|
|
48
48
|
)
|
|
49
49
|
|
|
50
50
|
if blocking:
|
|
51
|
-
job.save_files('biolib_results')
|
|
51
|
+
job.save_files('biolib_results', overwrite=True)
|
|
52
52
|
|
|
53
53
|
# Write stdout and stderr if it has not been streamed (Markdown is not streamed)
|
|
54
54
|
if app.version.get('stdout_render_type') == 'markdown' or not sys.stdout.isatty():
|
biolib/cli/start.py
CHANGED
|
@@ -13,7 +13,10 @@ from biolib.typing_utils import Optional
|
|
|
13
13
|
@click.option('--port', default=5000, type=click.IntRange(1, 65_535), required=False)
|
|
14
14
|
@click.option('--tls-certificate', type=click.Path(exists=True), required=False, hidden=True)
|
|
15
15
|
@click.option('--tls-key', type=click.Path(exists=True), required=False, hidden=True)
|
|
16
|
-
|
|
16
|
+
@click.option('--initialize-network-buffer', is_flag=True, help='Initialize the remote host network buffer and exit')
|
|
17
|
+
def start(
|
|
18
|
+
host: str, port: int, tls_certificate: Optional[str], tls_key: Optional[str], initialize_network_buffer: bool
|
|
19
|
+
) -> None:
|
|
17
20
|
logger.configure(default_log_level=logging.INFO)
|
|
18
21
|
logger_no_user_data.configure(default_log_level=logging.INFO)
|
|
19
22
|
if platform.system() == 'Windows':
|
|
@@ -22,6 +25,16 @@ def start(host: str, port: int, tls_certificate: Optional[str], tls_key: Optiona
|
|
|
22
25
|
if tls_certificate and not tls_key or tls_key and not tls_certificate:
|
|
23
26
|
raise Exception('Options --tls-certificate and --tls-key must be specified together')
|
|
24
27
|
|
|
28
|
+
if initialize_network_buffer:
|
|
29
|
+
from biolib.compute_node.job_worker.network_buffer import ( # pylint: disable=import-outside-toplevel
|
|
30
|
+
NetworkBuffer,
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
network_buffer = NetworkBuffer.get_instance()
|
|
34
|
+
created = network_buffer.fill_buffer()
|
|
35
|
+
logger_no_user_data.info(f'Initialized network buffer (created {created} networks)')
|
|
36
|
+
return
|
|
37
|
+
|
|
25
38
|
try:
|
|
26
39
|
from biolib.compute_node.webserver import webserver # pylint: disable=import-outside-toplevel
|
|
27
40
|
|
|
@@ -11,10 +11,10 @@ import zipfile
|
|
|
11
11
|
from copy import copy
|
|
12
12
|
from datetime import datetime
|
|
13
13
|
|
|
14
|
-
import docker
|
|
15
|
-
import docker.types
|
|
16
|
-
from docker.errors import APIError, ImageNotFound
|
|
17
|
-
from docker.models.containers import Container
|
|
14
|
+
import docker
|
|
15
|
+
import docker.types
|
|
16
|
+
from docker.errors import APIError, ImageNotFound
|
|
17
|
+
from docker.models.containers import Container
|
|
18
18
|
|
|
19
19
|
from biolib import utils
|
|
20
20
|
from biolib._internal.runtime import RuntimeJobDataDict
|
|
@@ -40,7 +40,7 @@ class DockerExecutor:
|
|
|
40
40
|
self._options: LocalExecutorOptions = options
|
|
41
41
|
self._is_cleaning_up = False
|
|
42
42
|
|
|
43
|
-
self._absolute_image_uri = f
|
|
43
|
+
self._absolute_image_uri = f'{utils.BIOLIB_SITE_HOSTNAME}/{self._options["module"]["image_uri"]}'
|
|
44
44
|
self._send_system_exception = options['send_system_exception']
|
|
45
45
|
self._send_stdout_and_stderr = options['send_stdout_and_stderr']
|
|
46
46
|
self._random_docker_id = compute_node_utils.random_string(15)
|
|
@@ -308,7 +308,8 @@ class DockerExecutor:
|
|
|
308
308
|
job_uuid = self._options['job']['public_id']
|
|
309
309
|
logger_no_user_data.debug(f'Job "{job_uuid}" initializing Docker container...')
|
|
310
310
|
module = self._options['module']
|
|
311
|
-
logger.debug(f
|
|
311
|
+
logger.debug(f'Initializing docker container with command: {module["command"]}')
|
|
312
|
+
docker_client = BiolibDockerClient.get_docker_client()
|
|
312
313
|
|
|
313
314
|
docker_volume_mounts = [lfs.docker_mount for lfs in self._options['large_file_systems'].values()]
|
|
314
315
|
|
|
@@ -318,10 +319,12 @@ class DockerExecutor:
|
|
|
318
319
|
biolib_system_secret = RuntimeJobDataDict(
|
|
319
320
|
version='1.0.0',
|
|
320
321
|
job_requested_machine=self._options['job']['requested_machine'],
|
|
322
|
+
job_requested_machine_spot=self._options['job'].get('requested_machine_spot', False),
|
|
321
323
|
job_uuid=self._options['job']['public_id'],
|
|
322
324
|
job_auth_token=self._options['job']['auth_token'],
|
|
323
325
|
app_uri=self._options['job']['app_uri'],
|
|
324
326
|
is_environment_biolib_cloud=bool(utils.IS_RUNNING_IN_CLOUD),
|
|
327
|
+
job_reserved_machines=self._options['job']['reserved_machines'],
|
|
325
328
|
)
|
|
326
329
|
docker_volume_mounts.append(
|
|
327
330
|
self._create_secrets_mount(
|
|
@@ -365,9 +368,11 @@ class DockerExecutor:
|
|
|
365
368
|
)
|
|
366
369
|
|
|
367
370
|
logger_no_user_data.debug(f'Job "{job_uuid}" initializing Docker container. Getting IPs for proxies...')
|
|
371
|
+
|
|
372
|
+
networks_to_connect = []
|
|
368
373
|
for proxy in self._options['remote_host_proxies']:
|
|
369
|
-
proxy_ip = proxy.get_ip_address_on_network(internal_network)
|
|
370
374
|
if proxy.is_app_caller_proxy:
|
|
375
|
+
proxy_ip = proxy.get_ip_address_on_network(internal_network)
|
|
371
376
|
logger_no_user_data.debug('Found app caller proxy, setting both base URLs in compute container')
|
|
372
377
|
environment_vars.update(
|
|
373
378
|
{
|
|
@@ -381,7 +386,11 @@ class DockerExecutor:
|
|
|
381
386
|
}
|
|
382
387
|
)
|
|
383
388
|
else:
|
|
384
|
-
extra_hosts
|
|
389
|
+
extra_hosts.update(proxy.get_hostname_to_ip_mapping())
|
|
390
|
+
|
|
391
|
+
for network in proxy.get_remote_host_networks():
|
|
392
|
+
if network != internal_network:
|
|
393
|
+
networks_to_connect.append(network)
|
|
385
394
|
|
|
386
395
|
logger_no_user_data.debug(f'Job "{job_uuid}" initializing Docker container. Constructing container args...')
|
|
387
396
|
create_container_args = {
|
|
@@ -391,6 +400,9 @@ class DockerExecutor:
|
|
|
391
400
|
'mounts': docker_volume_mounts,
|
|
392
401
|
'network': internal_network.name,
|
|
393
402
|
'working_dir': module['working_directory'],
|
|
403
|
+
'networking_config': {
|
|
404
|
+
internal_network.name: docker_client.api.create_endpoint_config(aliases=['main'])
|
|
405
|
+
},
|
|
394
406
|
}
|
|
395
407
|
|
|
396
408
|
if self._options['job'].get('arguments_override_command'):
|
|
@@ -429,9 +441,19 @@ class DockerExecutor:
|
|
|
429
441
|
if docker_runtime is not None:
|
|
430
442
|
create_container_args['runtime'] = docker_runtime
|
|
431
443
|
|
|
432
|
-
docker_client = BiolibDockerClient.get_docker_client()
|
|
433
444
|
logger_no_user_data.debug(f'Job "{job_uuid}" initializing Docker container. Creating container...')
|
|
434
445
|
self._docker_container = docker_client.containers.create(**create_container_args)
|
|
446
|
+
|
|
447
|
+
if networks_to_connect:
|
|
448
|
+
network_connection_start = time.time()
|
|
449
|
+
for network in networks_to_connect:
|
|
450
|
+
network.connect(self._docker_container.id)
|
|
451
|
+
logger_no_user_data.debug(f'Connected app container to network {network.name}')
|
|
452
|
+
network_connection_time = time.time() - network_connection_start
|
|
453
|
+
logger_no_user_data.debug(
|
|
454
|
+
f'Connected app container to {len(networks_to_connect)} networks in {network_connection_time:.2f}s'
|
|
455
|
+
)
|
|
456
|
+
|
|
435
457
|
logger_no_user_data.debug(f'Job "{job_uuid}" finished initializing Docker container.')
|
|
436
458
|
except Exception as exception:
|
|
437
459
|
raise ComputeProcessException(
|
|
@@ -1,11 +1,11 @@
|
|
|
1
|
-
from docker.models.networks import Network
|
|
1
|
+
from docker.models.networks import Network
|
|
2
2
|
|
|
3
|
+
from biolib.biolib_api_client.app_types import Module
|
|
4
|
+
from biolib.biolib_api_client.job_types import CloudJob, CreatedJobDict
|
|
3
5
|
from biolib.compute_node.job_worker.large_file_system import LargeFileSystem
|
|
4
|
-
from biolib.compute_node.webserver.webserver_types import ComputeNodeInfo
|
|
5
|
-
from biolib.typing_utils import TypedDict, Callable, Optional, List, Dict
|
|
6
6
|
from biolib.compute_node.remote_host_proxy import RemoteHostProxy
|
|
7
|
-
from biolib.
|
|
8
|
-
from biolib.
|
|
7
|
+
from biolib.compute_node.webserver.webserver_types import ComputeNodeInfo
|
|
8
|
+
from biolib.typing_utils import Callable, Dict, List, Optional, TypedDict
|
|
9
9
|
|
|
10
10
|
|
|
11
11
|
class StatusUpdate(TypedDict):
|
|
@@ -43,6 +43,7 @@ class LocalExecutorOptions(TypedDict):
|
|
|
43
43
|
send_system_exception: SendSystemExceptionType
|
|
44
44
|
send_stdout_and_stderr: SendStdoutAndStderrType
|
|
45
45
|
|
|
46
|
+
|
|
46
47
|
class MetadataToSaveOutput(TypedDict):
|
|
47
48
|
arguments: List[str]
|
|
48
49
|
startup_error_string: Optional[str]
|