pybiolib 1.2.1056__py3-none-any.whl → 1.2.1642__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pybiolib might be problematic. Click here for more details.
- biolib/__init__.py +33 -10
- biolib/_data_record/data_record.py +24 -11
- biolib/_index/__init__.py +0 -0
- biolib/_index/index.py +51 -0
- biolib/_index/types.py +7 -0
- biolib/_internal/data_record/data_record.py +1 -1
- biolib/_internal/data_record/push_data.py +1 -1
- biolib/_internal/data_record/remote_storage_endpoint.py +3 -3
- biolib/_internal/file_utils.py +7 -4
- biolib/_internal/index/__init__.py +1 -0
- biolib/_internal/index/index.py +18 -0
- biolib/_internal/lfs/cache.py +4 -2
- biolib/_internal/push_application.py +89 -23
- biolib/_internal/runtime.py +2 -0
- biolib/_internal/templates/gui_template/App.tsx +38 -2
- biolib/_internal/templates/gui_template/Dockerfile +2 -0
- biolib/_internal/templates/gui_template/biolib-sdk.ts +37 -0
- biolib/_internal/templates/gui_template/dev-data/output.json +7 -0
- biolib/_internal/templates/gui_template/package.json +1 -0
- biolib/_internal/templates/gui_template/vite-plugin-dev-data.ts +49 -0
- biolib/_internal/templates/gui_template/vite.config.mts +2 -1
- biolib/_internal/templates/init_template/.github/workflows/biolib.yml +6 -1
- biolib/_internal/templates/init_template/Dockerfile +2 -0
- biolib/_internal/utils/__init__.py +25 -0
- biolib/_internal/utils/job_url.py +33 -0
- biolib/_runtime/runtime.py +9 -0
- biolib/_session/session.py +7 -5
- biolib/_shared/__init__.py +0 -0
- biolib/_shared/types/__init__.py +69 -0
- biolib/_shared/types/resource.py +17 -0
- biolib/_shared/types/resource_deploy_key.py +11 -0
- biolib/{_internal → _shared}/types/resource_permission.py +1 -1
- biolib/_shared/utils/__init__.py +7 -0
- biolib/_shared/utils/resource_uri.py +75 -0
- biolib/api/client.py +1 -1
- biolib/app/app.py +56 -23
- biolib/biolib_api_client/app_types.py +1 -6
- biolib/biolib_api_client/biolib_app_api.py +17 -0
- biolib/biolib_binary_format/module_input.py +8 -0
- biolib/biolib_binary_format/remote_endpoints.py +3 -3
- biolib/biolib_binary_format/remote_stream_seeker.py +39 -25
- biolib/cli/__init__.py +2 -1
- biolib/cli/data_record.py +17 -0
- biolib/cli/index.py +32 -0
- biolib/cli/lfs.py +1 -1
- biolib/cli/start.py +14 -1
- biolib/compute_node/job_worker/executors/docker_executor.py +31 -9
- biolib/compute_node/job_worker/executors/docker_types.py +1 -1
- biolib/compute_node/job_worker/executors/types.py +6 -5
- biolib/compute_node/job_worker/job_worker.py +149 -93
- biolib/compute_node/job_worker/large_file_system.py +2 -6
- biolib/compute_node/job_worker/network_alloc.py +99 -0
- biolib/compute_node/job_worker/network_buffer.py +240 -0
- biolib/compute_node/job_worker/utilization_reporter_thread.py +2 -2
- biolib/compute_node/remote_host_proxy.py +125 -67
- biolib/compute_node/utils.py +2 -0
- biolib/compute_node/webserver/compute_node_results_proxy.py +188 -0
- biolib/compute_node/webserver/proxy_utils.py +28 -0
- biolib/compute_node/webserver/webserver.py +64 -19
- biolib/experiments/experiment.py +98 -16
- biolib/jobs/job.py +119 -29
- biolib/jobs/job_result.py +70 -33
- biolib/jobs/types.py +1 -0
- biolib/sdk/__init__.py +17 -2
- biolib/typing_utils.py +1 -1
- biolib/utils/cache_state.py +2 -2
- biolib/utils/seq_util.py +1 -1
- {pybiolib-1.2.1056.dist-info → pybiolib-1.2.1642.dist-info}/METADATA +4 -2
- {pybiolib-1.2.1056.dist-info → pybiolib-1.2.1642.dist-info}/RECORD +84 -66
- {pybiolib-1.2.1056.dist-info → pybiolib-1.2.1642.dist-info}/WHEEL +1 -1
- biolib/_internal/types/__init__.py +0 -6
- biolib/utils/app_uri.py +0 -57
- /biolib/{_internal → _shared}/types/account.py +0 -0
- /biolib/{_internal → _shared}/types/account_member.py +0 -0
- /biolib/{_internal → _shared}/types/app.py +0 -0
- /biolib/{_internal → _shared}/types/data_record.py +0 -0
- /biolib/{_internal → _shared}/types/experiment.py +0 -0
- /biolib/{_internal → _shared}/types/file_node.py +0 -0
- /biolib/{_internal → _shared}/types/push.py +0 -0
- /biolib/{_internal/types/resource.py → _shared/types/resource_types.py} +0 -0
- /biolib/{_internal → _shared}/types/resource_version.py +0 -0
- /biolib/{_internal → _shared}/types/result.py +0 -0
- /biolib/{_internal → _shared}/types/typing.py +0 -0
- /biolib/{_internal → _shared}/types/user.py +0 -0
- {pybiolib-1.2.1056.dist-info → pybiolib-1.2.1642.dist-info}/entry_points.txt +0 -0
- {pybiolib-1.2.1056.dist-info → pybiolib-1.2.1642.dist-info/licenses}/LICENSE +0 -0
|
@@ -68,11 +68,6 @@ class LargeFileSystemMapping(TypedDict):
|
|
|
68
68
|
uuid: str
|
|
69
69
|
|
|
70
70
|
|
|
71
|
-
class PortMapping(TypedDict):
|
|
72
|
-
from_port: int
|
|
73
|
-
to_port: int
|
|
74
|
-
|
|
75
|
-
|
|
76
71
|
class _Module(TypedDict):
|
|
77
72
|
command: str
|
|
78
73
|
environment: Literal['biolib-app', 'biolib-custom', 'biolib-ecr']
|
|
@@ -83,7 +78,7 @@ class _Module(TypedDict):
|
|
|
83
78
|
large_file_systems: List[LargeFileSystemMapping]
|
|
84
79
|
name: str
|
|
85
80
|
output_files_mappings: List[FilesMapping]
|
|
86
|
-
|
|
81
|
+
ports: List[int]
|
|
87
82
|
source_files_mappings: List[FilesMapping]
|
|
88
83
|
working_directory: str
|
|
89
84
|
|
|
@@ -57,6 +57,22 @@ def _get_git_branch_name() -> str:
|
|
|
57
57
|
return ''
|
|
58
58
|
|
|
59
59
|
|
|
60
|
+
def _get_git_commit_hash() -> str:
|
|
61
|
+
try:
|
|
62
|
+
github_actions_commit_hash = os.getenv('GITHUB_SHA')
|
|
63
|
+
if github_actions_commit_hash:
|
|
64
|
+
return github_actions_commit_hash
|
|
65
|
+
|
|
66
|
+
gitlab_ci_commit_hash = os.getenv('CI_COMMIT_SHA')
|
|
67
|
+
if gitlab_ci_commit_hash:
|
|
68
|
+
return gitlab_ci_commit_hash
|
|
69
|
+
|
|
70
|
+
result = subprocess.run(['git', 'rev-parse', 'HEAD'], check=True, stdout=subprocess.PIPE, text=True)
|
|
71
|
+
return result.stdout.strip()
|
|
72
|
+
except BaseException:
|
|
73
|
+
return ''
|
|
74
|
+
|
|
75
|
+
|
|
60
76
|
def _get_git_repository_url() -> str:
|
|
61
77
|
try:
|
|
62
78
|
result = subprocess.run(['git', 'remote', 'get-url', 'origin'], check=True, stdout=subprocess.PIPE, text=True)
|
|
@@ -125,6 +141,7 @@ class BiolibAppApi:
|
|
|
125
141
|
'state': 'published',
|
|
126
142
|
'app_version_id_to_copy_images_from': app_version_id_to_copy_images_from,
|
|
127
143
|
'git_branch_name': _get_git_branch_name(),
|
|
144
|
+
'git_commit_hash': _get_git_commit_hash(),
|
|
128
145
|
'git_repository_url': _get_git_repository_url(),
|
|
129
146
|
}
|
|
130
147
|
if semantic_version:
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
from biolib.biolib_binary_format.base_bbf_package import BioLibBinaryFormatBasePackage
|
|
2
|
+
from biolib.biolib_logging import logger
|
|
2
3
|
from biolib.typing_utils import TypedDict, Dict, List
|
|
3
4
|
|
|
4
5
|
|
|
@@ -14,6 +15,10 @@ class ModuleInput(BioLibBinaryFormatBasePackage):
|
|
|
14
15
|
self.package_type = 1
|
|
15
16
|
|
|
16
17
|
def serialize(self, stdin, arguments, files) -> bytes:
|
|
18
|
+
for path in files.keys():
|
|
19
|
+
if '//' in path:
|
|
20
|
+
raise ValueError(f"File path '{path}' contains double slashes which are not allowed")
|
|
21
|
+
|
|
17
22
|
bbf_data = bytearray()
|
|
18
23
|
bbf_data.extend(self.version.to_bytes(1, 'big'))
|
|
19
24
|
bbf_data.extend(self.package_type.to_bytes(1, 'big'))
|
|
@@ -67,6 +72,9 @@ class ModuleInput(BioLibBinaryFormatBasePackage):
|
|
|
67
72
|
data_len = self.get_data(8, output_type='int')
|
|
68
73
|
path = self.get_data(path_len, output_type='str')
|
|
69
74
|
data = self.get_data(data_len)
|
|
75
|
+
if '//' in path:
|
|
76
|
+
# TODO: Raise ValueError here once backwards compatibility period is over
|
|
77
|
+
logger.warning(f"File path '{path}' contains double slashes which are not allowed")
|
|
70
78
|
files[path] = bytes(data)
|
|
71
79
|
|
|
72
80
|
return ModuleInputDict(stdin=stdin, arguments=arguments, files=files)
|
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from datetime import datetime, timedelta
|
|
1
|
+
from datetime import datetime, timedelta, timezone
|
|
2
2
|
|
|
3
3
|
from biolib.biolib_api_client.biolib_job_api import BiolibJobApi
|
|
4
4
|
from biolib.biolib_binary_format.utils import RemoteEndpoint
|
|
@@ -17,13 +17,13 @@ class RemoteJobStorageEndpoint(RemoteEndpoint):
|
|
|
17
17
|
self._storage_type: Literal['input', 'output'] = storage_type
|
|
18
18
|
|
|
19
19
|
def get_remote_url(self):
|
|
20
|
-
if not self._presigned_url or datetime.
|
|
20
|
+
if not self._presigned_url or not self._expires_at or datetime.now(timezone.utc) > self._expires_at:
|
|
21
21
|
self._presigned_url = BiolibJobApi.get_job_storage_download_url(
|
|
22
22
|
job_auth_token=self._job_auth_token,
|
|
23
23
|
job_uuid=self._job_uuid,
|
|
24
24
|
storage_type='results' if self._storage_type == 'output' else 'input',
|
|
25
25
|
)
|
|
26
|
-
self._expires_at = datetime.
|
|
26
|
+
self._expires_at = datetime.now(timezone.utc) + timedelta(minutes=8)
|
|
27
27
|
# TODO: Use expires at from url
|
|
28
28
|
# parsed_url = urlparse(self._presigned_url)
|
|
29
29
|
# query_params = parse_qs(parsed_url.query)
|
|
@@ -1,45 +1,59 @@
|
|
|
1
1
|
from biolib.biolib_binary_format.utils import IndexableBuffer
|
|
2
|
+
from biolib.biolib_logging import logger
|
|
2
3
|
from biolib.typing_utils import Iterable
|
|
3
4
|
|
|
4
5
|
|
|
5
6
|
class StreamSeeker:
|
|
6
7
|
def __init__(
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
|
|
8
|
+
self,
|
|
9
|
+
upstream_buffer: IndexableBuffer,
|
|
10
|
+
files_data_start: int,
|
|
11
|
+
files_data_end: int,
|
|
12
|
+
max_chunk_size: int,
|
|
12
13
|
):
|
|
13
14
|
self._upstream_buffer = upstream_buffer
|
|
14
15
|
self._files_data_end = files_data_end
|
|
15
|
-
self.
|
|
16
|
+
self._max_chunk_size = max_chunk_size
|
|
16
17
|
|
|
17
18
|
self._buffer_start = files_data_start
|
|
18
19
|
self._buffer = bytearray()
|
|
19
20
|
|
|
20
|
-
def seek_and_read(self, file_start: int, file_length: int) -> Iterable[bytes]:
|
|
21
|
+
def seek_and_read(self, file_start: int, file_length: int, read_ahead_bytes: int = 0) -> Iterable[bytes]:
|
|
21
22
|
assert file_start >= self._buffer_start
|
|
22
|
-
self._buffer = self._buffer[file_start - self._buffer_start:]
|
|
23
|
+
self._buffer = self._buffer[file_start - self._buffer_start :]
|
|
23
24
|
self._buffer_start = file_start
|
|
24
25
|
|
|
25
26
|
while True:
|
|
26
27
|
file_byte_count_remaining = file_length - (self._buffer_start - file_start)
|
|
27
|
-
if file_byte_count_remaining
|
|
28
|
+
if file_byte_count_remaining <= 0:
|
|
28
29
|
return
|
|
29
30
|
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
31
|
+
if len(self._buffer) > 0:
|
|
32
|
+
take = min(file_byte_count_remaining, len(self._buffer))
|
|
33
|
+
chunk = self._buffer[:take]
|
|
34
|
+
if chunk:
|
|
35
|
+
yield chunk
|
|
36
|
+
self._buffer = self._buffer[take:]
|
|
37
|
+
self._buffer_start += take
|
|
38
|
+
else:
|
|
39
|
+
start_of_fetch = self._buffer_start + len(self._buffer)
|
|
40
|
+
bytes_left_in_stream = self._files_data_end - start_of_fetch
|
|
41
|
+
if bytes_left_in_stream <= 0:
|
|
42
|
+
logger.error(
|
|
43
|
+
'StreamSeeker: no bytes left upstream (start_of_fetch=%d, files_data_end=%d)',
|
|
44
|
+
start_of_fetch,
|
|
45
|
+
self._files_data_end,
|
|
46
|
+
)
|
|
47
|
+
return
|
|
48
|
+
|
|
49
|
+
fetch_size = min(self._max_chunk_size, file_byte_count_remaining + read_ahead_bytes)
|
|
50
|
+
if fetch_size > bytes_left_in_stream:
|
|
51
|
+
logger.error(
|
|
52
|
+
'StreamSeeker: fetch_size (%d) > bytes_left_in_stream (%d); clamping',
|
|
53
|
+
fetch_size,
|
|
54
|
+
bytes_left_in_stream,
|
|
55
|
+
)
|
|
56
|
+
fetch_size = bytes_left_in_stream
|
|
57
|
+
|
|
58
|
+
fetched_data = self._upstream_buffer.get_data(start=start_of_fetch, length=fetch_size)
|
|
59
|
+
self._buffer.extend(fetched_data)
|
biolib/cli/__init__.py
CHANGED
|
@@ -5,7 +5,7 @@ import click
|
|
|
5
5
|
|
|
6
6
|
from biolib import utils
|
|
7
7
|
from biolib.biolib_logging import logger, logger_no_user_data
|
|
8
|
-
from biolib.cli import auth, data_record, download_container, init, lfs, push, run, runtime, sdk, start
|
|
8
|
+
from biolib.cli import auth, data_record, download_container, index, init, lfs, push, run, runtime, sdk, start
|
|
9
9
|
|
|
10
10
|
|
|
11
11
|
@click.version_option(version=utils.BIOLIB_PACKAGE_VERSION, prog_name='pybiolib')
|
|
@@ -31,6 +31,7 @@ cli.add_command(run.run)
|
|
|
31
31
|
cli.add_command(runtime.runtime)
|
|
32
32
|
cli.add_command(start.start)
|
|
33
33
|
cli.add_command(data_record.data_record)
|
|
34
|
+
cli.add_command(index.index)
|
|
34
35
|
cli.add_command(sdk.sdk)
|
|
35
36
|
|
|
36
37
|
# allow this script to be called without poetry in dev e.g. by an IDE debugger
|
biolib/cli/data_record.py
CHANGED
|
@@ -80,3 +80,20 @@ def describe(uri: str, output_as_json: bool) -> None:
|
|
|
80
80
|
size_string = str(file_info['size_bytes'])
|
|
81
81
|
leading_space_string = ' ' * (10 - len(size_string))
|
|
82
82
|
print(f"{leading_space_string}{size_string} {file_info['path']}")
|
|
83
|
+
|
|
84
|
+
|
|
85
|
+
@data_record.command(help='Delete a Data Record')
|
|
86
|
+
@click.argument('uri', required=True)
|
|
87
|
+
def delete(uri: str) -> None:
|
|
88
|
+
record = DataRecord.get_by_uri(uri=uri)
|
|
89
|
+
|
|
90
|
+
print(f'You are about to delete the data record: {record.uri}')
|
|
91
|
+
print('This action cannot be undone.')
|
|
92
|
+
|
|
93
|
+
confirmation = input(f'To confirm deletion, please type the data record name "{record.name}": ')
|
|
94
|
+
if confirmation != record.name:
|
|
95
|
+
print('Data record name does not match. Deletion cancelled.')
|
|
96
|
+
return
|
|
97
|
+
|
|
98
|
+
record.delete()
|
|
99
|
+
print(f'Data record {record.uri} has been deleted.')
|
biolib/cli/index.py
ADDED
|
@@ -0,0 +1,32 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import logging
|
|
3
|
+
import sys
|
|
4
|
+
|
|
5
|
+
import click
|
|
6
|
+
|
|
7
|
+
from biolib._index.index import Index
|
|
8
|
+
from biolib.biolib_errors import BioLibError
|
|
9
|
+
from biolib.biolib_logging import logger, logger_no_user_data
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@click.group(help='Manage Indexes')
|
|
13
|
+
def index() -> None:
|
|
14
|
+
logger.configure(default_log_level=logging.INFO)
|
|
15
|
+
logger_no_user_data.configure(default_log_level=logging.INFO)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
@index.command(help='Create an Index')
|
|
19
|
+
@click.argument('uri', required=True)
|
|
20
|
+
@click.option('--config-path', required=True, type=click.Path(exists=True), help='Path to JSON config file')
|
|
21
|
+
def create(uri: str, config_path: str) -> None:
|
|
22
|
+
try:
|
|
23
|
+
Index.create_from_config_file(uri=uri, config_path=config_path)
|
|
24
|
+
except json.JSONDecodeError as error:
|
|
25
|
+
print(f'Error: Invalid JSON in config file: {error}', file=sys.stderr)
|
|
26
|
+
sys.exit(1)
|
|
27
|
+
except BioLibError as error:
|
|
28
|
+
print(f'Error creating index: {error.message}', file=sys.stderr)
|
|
29
|
+
sys.exit(1)
|
|
30
|
+
except Exception as error:
|
|
31
|
+
print(f'Error reading config file: {error}', file=sys.stderr)
|
|
32
|
+
sys.exit(1)
|
biolib/cli/lfs.py
CHANGED
biolib/cli/start.py
CHANGED
|
@@ -13,7 +13,10 @@ from biolib.typing_utils import Optional
|
|
|
13
13
|
@click.option('--port', default=5000, type=click.IntRange(1, 65_535), required=False)
|
|
14
14
|
@click.option('--tls-certificate', type=click.Path(exists=True), required=False, hidden=True)
|
|
15
15
|
@click.option('--tls-key', type=click.Path(exists=True), required=False, hidden=True)
|
|
16
|
-
|
|
16
|
+
@click.option('--initialize-network-buffer', is_flag=True, help='Initialize the remote host network buffer and exit')
|
|
17
|
+
def start(
|
|
18
|
+
host: str, port: int, tls_certificate: Optional[str], tls_key: Optional[str], initialize_network_buffer: bool
|
|
19
|
+
) -> None:
|
|
17
20
|
logger.configure(default_log_level=logging.INFO)
|
|
18
21
|
logger_no_user_data.configure(default_log_level=logging.INFO)
|
|
19
22
|
if platform.system() == 'Windows':
|
|
@@ -22,6 +25,16 @@ def start(host: str, port: int, tls_certificate: Optional[str], tls_key: Optiona
|
|
|
22
25
|
if tls_certificate and not tls_key or tls_key and not tls_certificate:
|
|
23
26
|
raise Exception('Options --tls-certificate and --tls-key must be specified together')
|
|
24
27
|
|
|
28
|
+
if initialize_network_buffer:
|
|
29
|
+
from biolib.compute_node.job_worker.network_buffer import ( # pylint: disable=import-outside-toplevel
|
|
30
|
+
NetworkBuffer,
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
network_buffer = NetworkBuffer.get_instance()
|
|
34
|
+
created = network_buffer.fill_buffer()
|
|
35
|
+
logger_no_user_data.info(f'Initialized network buffer (created {created} networks)')
|
|
36
|
+
return
|
|
37
|
+
|
|
25
38
|
try:
|
|
26
39
|
from biolib.compute_node.webserver import webserver # pylint: disable=import-outside-toplevel
|
|
27
40
|
|
|
@@ -11,10 +11,10 @@ import zipfile
|
|
|
11
11
|
from copy import copy
|
|
12
12
|
from datetime import datetime
|
|
13
13
|
|
|
14
|
-
import docker
|
|
15
|
-
import docker.types
|
|
16
|
-
from docker.errors import APIError, ImageNotFound
|
|
17
|
-
from docker.models.containers import Container
|
|
14
|
+
import docker
|
|
15
|
+
import docker.types
|
|
16
|
+
from docker.errors import APIError, ImageNotFound
|
|
17
|
+
from docker.models.containers import Container
|
|
18
18
|
|
|
19
19
|
from biolib import utils
|
|
20
20
|
from biolib._internal.runtime import RuntimeJobDataDict
|
|
@@ -40,7 +40,7 @@ class DockerExecutor:
|
|
|
40
40
|
self._options: LocalExecutorOptions = options
|
|
41
41
|
self._is_cleaning_up = False
|
|
42
42
|
|
|
43
|
-
self._absolute_image_uri = f
|
|
43
|
+
self._absolute_image_uri = f'{utils.BIOLIB_SITE_HOSTNAME}/{self._options["module"]["image_uri"]}'
|
|
44
44
|
self._send_system_exception = options['send_system_exception']
|
|
45
45
|
self._send_stdout_and_stderr = options['send_stdout_and_stderr']
|
|
46
46
|
self._random_docker_id = compute_node_utils.random_string(15)
|
|
@@ -308,7 +308,8 @@ class DockerExecutor:
|
|
|
308
308
|
job_uuid = self._options['job']['public_id']
|
|
309
309
|
logger_no_user_data.debug(f'Job "{job_uuid}" initializing Docker container...')
|
|
310
310
|
module = self._options['module']
|
|
311
|
-
logger.debug(f
|
|
311
|
+
logger.debug(f'Initializing docker container with command: {module["command"]}')
|
|
312
|
+
docker_client = BiolibDockerClient.get_docker_client()
|
|
312
313
|
|
|
313
314
|
docker_volume_mounts = [lfs.docker_mount for lfs in self._options['large_file_systems'].values()]
|
|
314
315
|
|
|
@@ -318,10 +319,12 @@ class DockerExecutor:
|
|
|
318
319
|
biolib_system_secret = RuntimeJobDataDict(
|
|
319
320
|
version='1.0.0',
|
|
320
321
|
job_requested_machine=self._options['job']['requested_machine'],
|
|
322
|
+
job_requested_machine_spot=self._options['job'].get('requested_machine_spot', False),
|
|
321
323
|
job_uuid=self._options['job']['public_id'],
|
|
322
324
|
job_auth_token=self._options['job']['auth_token'],
|
|
323
325
|
app_uri=self._options['job']['app_uri'],
|
|
324
326
|
is_environment_biolib_cloud=bool(utils.IS_RUNNING_IN_CLOUD),
|
|
327
|
+
job_reserved_machines=self._options['job']['reserved_machines'],
|
|
325
328
|
)
|
|
326
329
|
docker_volume_mounts.append(
|
|
327
330
|
self._create_secrets_mount(
|
|
@@ -365,9 +368,11 @@ class DockerExecutor:
|
|
|
365
368
|
)
|
|
366
369
|
|
|
367
370
|
logger_no_user_data.debug(f'Job "{job_uuid}" initializing Docker container. Getting IPs for proxies...')
|
|
371
|
+
|
|
372
|
+
networks_to_connect = []
|
|
368
373
|
for proxy in self._options['remote_host_proxies']:
|
|
369
|
-
proxy_ip = proxy.get_ip_address_on_network(internal_network)
|
|
370
374
|
if proxy.is_app_caller_proxy:
|
|
375
|
+
proxy_ip = proxy.get_ip_address_on_network(internal_network)
|
|
371
376
|
logger_no_user_data.debug('Found app caller proxy, setting both base URLs in compute container')
|
|
372
377
|
environment_vars.update(
|
|
373
378
|
{
|
|
@@ -381,7 +386,11 @@ class DockerExecutor:
|
|
|
381
386
|
}
|
|
382
387
|
)
|
|
383
388
|
else:
|
|
384
|
-
extra_hosts
|
|
389
|
+
extra_hosts.update(proxy.get_hostname_to_ip_mapping())
|
|
390
|
+
|
|
391
|
+
for network in proxy.get_remote_host_networks():
|
|
392
|
+
if network != internal_network:
|
|
393
|
+
networks_to_connect.append(network)
|
|
385
394
|
|
|
386
395
|
logger_no_user_data.debug(f'Job "{job_uuid}" initializing Docker container. Constructing container args...')
|
|
387
396
|
create_container_args = {
|
|
@@ -391,6 +400,9 @@ class DockerExecutor:
|
|
|
391
400
|
'mounts': docker_volume_mounts,
|
|
392
401
|
'network': internal_network.name,
|
|
393
402
|
'working_dir': module['working_directory'],
|
|
403
|
+
'networking_config': {
|
|
404
|
+
internal_network.name: docker_client.api.create_endpoint_config(aliases=['main'])
|
|
405
|
+
},
|
|
394
406
|
}
|
|
395
407
|
|
|
396
408
|
if self._options['job'].get('arguments_override_command'):
|
|
@@ -429,9 +441,19 @@ class DockerExecutor:
|
|
|
429
441
|
if docker_runtime is not None:
|
|
430
442
|
create_container_args['runtime'] = docker_runtime
|
|
431
443
|
|
|
432
|
-
docker_client = BiolibDockerClient.get_docker_client()
|
|
433
444
|
logger_no_user_data.debug(f'Job "{job_uuid}" initializing Docker container. Creating container...')
|
|
434
445
|
self._docker_container = docker_client.containers.create(**create_container_args)
|
|
446
|
+
|
|
447
|
+
if networks_to_connect:
|
|
448
|
+
network_connection_start = time.time()
|
|
449
|
+
for network in networks_to_connect:
|
|
450
|
+
network.connect(self._docker_container.id)
|
|
451
|
+
logger_no_user_data.debug(f'Connected app container to network {network.name}')
|
|
452
|
+
network_connection_time = time.time() - network_connection_start
|
|
453
|
+
logger_no_user_data.debug(
|
|
454
|
+
f'Connected app container to {len(networks_to_connect)} networks in {network_connection_time:.2f}s'
|
|
455
|
+
)
|
|
456
|
+
|
|
435
457
|
logger_no_user_data.debug(f'Job "{job_uuid}" finished initializing Docker container.')
|
|
436
458
|
except Exception as exception:
|
|
437
459
|
raise ComputeProcessException(
|
|
@@ -1,11 +1,11 @@
|
|
|
1
|
-
from docker.models.networks import Network
|
|
1
|
+
from docker.models.networks import Network
|
|
2
2
|
|
|
3
|
+
from biolib.biolib_api_client.app_types import Module
|
|
4
|
+
from biolib.biolib_api_client.job_types import CloudJob, CreatedJobDict
|
|
3
5
|
from biolib.compute_node.job_worker.large_file_system import LargeFileSystem
|
|
4
|
-
from biolib.compute_node.webserver.webserver_types import ComputeNodeInfo
|
|
5
|
-
from biolib.typing_utils import TypedDict, Callable, Optional, List, Dict
|
|
6
6
|
from biolib.compute_node.remote_host_proxy import RemoteHostProxy
|
|
7
|
-
from biolib.
|
|
8
|
-
from biolib.
|
|
7
|
+
from biolib.compute_node.webserver.webserver_types import ComputeNodeInfo
|
|
8
|
+
from biolib.typing_utils import Callable, Dict, List, Optional, TypedDict
|
|
9
9
|
|
|
10
10
|
|
|
11
11
|
class StatusUpdate(TypedDict):
|
|
@@ -43,6 +43,7 @@ class LocalExecutorOptions(TypedDict):
|
|
|
43
43
|
send_system_exception: SendSystemExceptionType
|
|
44
44
|
send_stdout_and_stderr: SendStdoutAndStderrType
|
|
45
45
|
|
|
46
|
+
|
|
46
47
|
class MetadataToSaveOutput(TypedDict):
|
|
47
48
|
arguments: List[str]
|
|
48
49
|
startup_error_string: Optional[str]
|