pybiolib 1.1.1747__py3-none-any.whl → 1.1.1881__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. biolib/__init__.py +8 -2
  2. biolib/_internal/data_record/__init__.py +1 -0
  3. biolib/_internal/data_record/data_record.py +153 -0
  4. biolib/_internal/data_record/remote_storage_endpoint.py +27 -0
  5. biolib/_internal/http_client.py +14 -15
  6. biolib/_internal/push_application.py +22 -37
  7. biolib/_internal/runtime.py +73 -0
  8. biolib/_internal/utils/__init__.py +18 -0
  9. biolib/app/app.py +6 -1
  10. biolib/app/search_apps.py +8 -12
  11. biolib/biolib_api_client/api_client.py +14 -9
  12. biolib/biolib_api_client/app_types.py +1 -0
  13. biolib/biolib_api_client/biolib_app_api.py +1 -1
  14. biolib/biolib_binary_format/utils.py +19 -2
  15. biolib/cli/__init__.py +6 -2
  16. biolib/cli/auth.py +58 -0
  17. biolib/cli/data_record.py +43 -0
  18. biolib/cli/download_container.py +3 -1
  19. biolib/cli/init.py +1 -0
  20. biolib/cli/lfs.py +39 -9
  21. biolib/cli/push.py +1 -1
  22. biolib/cli/run.py +3 -2
  23. biolib/cli/start.py +1 -0
  24. biolib/compute_node/cloud_utils/cloud_utils.py +2 -2
  25. biolib/compute_node/job_worker/cache_state.py +1 -1
  26. biolib/compute_node/job_worker/executors/docker_executor.py +9 -7
  27. biolib/compute_node/job_worker/job_worker.py +8 -2
  28. biolib/compute_node/remote_host_proxy.py +30 -2
  29. biolib/jobs/job.py +28 -29
  30. biolib/lfs/__init__.py +0 -2
  31. biolib/lfs/utils.py +23 -107
  32. biolib/runtime/__init__.py +13 -1
  33. biolib/sdk/__init__.py +17 -4
  34. biolib/user/sign_in.py +8 -12
  35. biolib/utils/__init__.py +1 -1
  36. biolib/utils/app_uri.py +11 -4
  37. biolib/utils/cache_state.py +2 -2
  38. biolib/utils/seq_util.py +15 -10
  39. {pybiolib-1.1.1747.dist-info → pybiolib-1.1.1881.dist-info}/METADATA +1 -1
  40. {pybiolib-1.1.1747.dist-info → pybiolib-1.1.1881.dist-info}/RECORD +43 -39
  41. {pybiolib-1.1.1747.dist-info → pybiolib-1.1.1881.dist-info}/WHEEL +1 -1
  42. biolib/biolib_api_client/biolib_account_api.py +0 -8
  43. biolib/biolib_api_client/biolib_large_file_system_api.py +0 -34
  44. biolib/runtime/results.py +0 -20
  45. {pybiolib-1.1.1747.dist-info → pybiolib-1.1.1881.dist-info}/LICENSE +0 -0
  46. {pybiolib-1.1.1747.dist-info → pybiolib-1.1.1881.dist-info}/entry_points.txt +0 -0
biolib/__init__.py CHANGED
@@ -13,10 +13,12 @@ from biolib.biolib_api_client import BiolibApiClient as _BioLibApiClient, App
13
13
  from biolib.jobs import Job as _Job
14
14
  from biolib import user as _user
15
15
  from biolib.typing_utils import List, Optional
16
+ from biolib._internal.data_record import DataRecord as _DataRecord
16
17
 
17
18
  import biolib.api
18
19
  import biolib.app
19
20
  import biolib.cli
21
+ import biolib.sdk
20
22
  import biolib.utils
21
23
 
22
24
 
@@ -33,8 +35,8 @@ def load(uri: str) -> _BioLibApp:
33
35
  def search(
34
36
  search_query: Optional[str] = None,
35
37
  team: Optional[str] = None,
36
- count: int = 100
37
- ) -> List[str]:
38
+ count: int = 100,
39
+ ) -> List[str]:
38
40
  apps: List[str] = search_apps(search_query, team, count)
39
41
  return apps
40
42
 
@@ -47,6 +49,10 @@ def fetch_jobs(count: int = 25) -> List[_Job]:
47
49
  return _Job.fetch_jobs(count)
48
50
 
49
51
 
52
+ def fetch_data_records(uri: Optional[str] = None, count: Optional[int] = None) -> List[_DataRecord]:
53
+ return _DataRecord.fetch(uri, count)
54
+
55
+
50
56
  def get_experiment(name: str) -> Experiment:
51
57
  return Experiment(name)
52
58
 
@@ -0,0 +1 @@
1
+ from .data_record import DataRecord
@@ -0,0 +1,153 @@
1
+ import os
2
+ from collections import namedtuple
3
+ from datetime import datetime
4
+ from fnmatch import fnmatch
5
+ from struct import Struct
6
+ from typing import Callable, Dict, List, Optional, Union, cast
7
+
8
+ from biolib import lfs
9
+ from biolib._internal.data_record.remote_storage_endpoint import DataRecordRemoteStorageEndpoint
10
+ from biolib._internal.http_client import HttpClient
11
+ from biolib.api import client as api_client
12
+ from biolib.biolib_api_client import AppGetResponse
13
+ from biolib.biolib_binary_format import LazyLoadedFile
14
+ from biolib.biolib_binary_format.utils import RemoteIndexableBuffer
15
+ from biolib.biolib_logging import logger
16
+ from biolib.utils.app_uri import parse_app_uri
17
+ from biolib.utils.zip.remote_zip import RemoteZip # type: ignore
18
+
19
+ PathFilter = Union[str, Callable[[str], bool]]
20
+
21
+
22
+ class DataRecord:
23
+ def __init__(self, uri: str):
24
+ self._uri = uri
25
+ uri_parsed = parse_app_uri(uri, use_account_as_name_default=False)
26
+ if not uri_parsed['app_name']:
27
+ raise ValueError('Expected parameter "uri" to contain resource name')
28
+
29
+ self._name = uri_parsed['app_name']
30
+
31
+ @property
32
+ def uri(self) -> str:
33
+ return self._uri
34
+
35
+ @property
36
+ def name(self) -> str:
37
+ return self._name
38
+
39
+ def list_files(self, path_filter: Optional[PathFilter] = None) -> List[LazyLoadedFile]:
40
+ app_response: AppGetResponse = api_client.get(path='/app/', params={'uri': self._uri}).json()
41
+ remote_storage_endpoint = DataRecordRemoteStorageEndpoint(
42
+ resource_version_uuid=app_response['app_version']['public_id'],
43
+ )
44
+ files: List[LazyLoadedFile] = []
45
+ with RemoteZip(url=remote_storage_endpoint.get_remote_url()) as remote_zip:
46
+ central_directory = remote_zip.get_central_directory()
47
+ for file_info in central_directory.values():
48
+ files.append(self._get_file(remote_storage_endpoint, file_info))
49
+
50
+ return self._get_filtered_files(files=files, path_filter=path_filter) if path_filter else files
51
+
52
+ def download_files(self, output_dir: str, path_filter: Optional[PathFilter] = None) -> None:
53
+ filtered_files = self.list_files(path_filter=path_filter)
54
+
55
+ if len(filtered_files) == 0:
56
+ logger.debug('No files to save')
57
+ return
58
+
59
+ for file in filtered_files:
60
+ file_path = os.path.join(output_dir, file.path)
61
+ os.makedirs(os.path.dirname(file_path), exist_ok=True)
62
+ with open(file_path, mode='wb') as file_handle:
63
+ file_handle.write(file.get_data())
64
+
65
+ def save_files(self, output_dir: str, path_filter: Optional[PathFilter] = None) -> None:
66
+ self.download_files(output_dir=output_dir, path_filter=path_filter)
67
+
68
+ @staticmethod
69
+ def create(destination: str, data_path: str, name: Optional[str] = None) -> 'DataRecord':
70
+ assert os.path.isdir(data_path), f'The path "{data_path}" is not a directory.'
71
+ record_name = name if name else 'data-record-' + datetime.now().isoformat().split('.')[0].replace(':', '-')
72
+ record_uri = lfs.create_large_file_system(lfs_uri=f'{destination}/{record_name}')
73
+ record_version_uri = lfs.push_large_file_system(lfs_uri=record_uri, input_dir=data_path)
74
+ return DataRecord(uri=record_version_uri)
75
+
76
+ @staticmethod
77
+ def fetch(uri: Optional[str] = None, count: Optional[int] = None) -> List['DataRecord']:
78
+ max_page_size = 1_000
79
+ params: Dict[str, Union[str, int]] = {
80
+ 'page_size': str(count or max_page_size),
81
+ 'resource_type': 'data-record',
82
+ }
83
+ if uri:
84
+ uri_parsed = parse_app_uri(uri, use_account_as_name_default=False)
85
+ params['account_handle'] = uri_parsed['account_handle_normalized']
86
+
87
+ results = api_client.get(path='/apps/', params=params).json()['results']
88
+ if count is None and len(results) == max_page_size:
89
+ logger.warning(
90
+ f'Fetch results exceeded maximum count of {max_page_size}. Some data records might not be fetched.'
91
+ )
92
+
93
+ return [DataRecord(result['resource_uri']) for result in results]
94
+
95
+ @staticmethod
96
+ def _get_file(remote_storage_endpoint: DataRecordRemoteStorageEndpoint, file_info: Dict) -> LazyLoadedFile:
97
+ local_file_header_signature_bytes = b'\x50\x4b\x03\x04'
98
+ local_file_header_struct = Struct('<H2sHHHIIIHH')
99
+ LocalFileHeader = namedtuple(
100
+ 'LocalFileHeader',
101
+ (
102
+ 'version',
103
+ 'flags',
104
+ 'compression_raw',
105
+ 'mod_time',
106
+ 'mod_date',
107
+ 'crc_32_expected',
108
+ 'compressed_size_raw',
109
+ 'uncompressed_size_raw',
110
+ 'file_name_len',
111
+ 'extra_field_len',
112
+ ),
113
+ )
114
+
115
+ local_file_header_start = file_info['header_offset'] + len(local_file_header_signature_bytes)
116
+ local_file_header_end = local_file_header_start + local_file_header_struct.size
117
+
118
+ def file_start_func() -> int:
119
+ local_file_header_response = HttpClient.request(
120
+ url=remote_storage_endpoint.get_remote_url(),
121
+ headers={'range': f'bytes={local_file_header_start}-{local_file_header_end - 1}'},
122
+ timeout_in_seconds=300,
123
+ )
124
+ local_file_header = LocalFileHeader._make(
125
+ local_file_header_struct.unpack(local_file_header_response.content)
126
+ )
127
+ file_start: int = (
128
+ local_file_header_end + local_file_header.file_name_len + local_file_header.extra_field_len
129
+ )
130
+ return file_start
131
+
132
+ return LazyLoadedFile(
133
+ buffer=RemoteIndexableBuffer(endpoint=remote_storage_endpoint),
134
+ length=file_info['file_size'],
135
+ path=file_info['filename'],
136
+ start=None,
137
+ start_func=file_start_func,
138
+ )
139
+
140
+ @staticmethod
141
+ def _get_filtered_files(files: List[LazyLoadedFile], path_filter: PathFilter) -> List[LazyLoadedFile]:
142
+ if not (isinstance(path_filter, str) or callable(path_filter)):
143
+ raise Exception('Expected path_filter to be a string or a function')
144
+
145
+ if callable(path_filter):
146
+ return list(filter(lambda x: path_filter(x.path), files)) # type: ignore
147
+
148
+ glob_filter = cast(str, path_filter)
149
+
150
+ def _filter_function(file: LazyLoadedFile) -> bool:
151
+ return fnmatch(file.path, glob_filter)
152
+
153
+ return list(filter(_filter_function, files))
@@ -0,0 +1,27 @@
1
+ from datetime import datetime, timedelta
2
+
3
+ from biolib.api import client as api_client
4
+ from biolib.biolib_api_client.lfs_types import LargeFileSystemVersion
5
+ from biolib.biolib_binary_format.utils import RemoteEndpoint
6
+ from biolib.biolib_logging import logger
7
+
8
+
9
+ class DataRecordRemoteStorageEndpoint(RemoteEndpoint):
10
+ def __init__(self, resource_version_uuid: str):
11
+ self._resource_version_uuid: str = resource_version_uuid
12
+ self._expires_at = None
13
+ self._presigned_url = None
14
+
15
+ def get_remote_url(self):
16
+ if not self._presigned_url or datetime.utcnow() > self._expires_at:
17
+ lfs_version: LargeFileSystemVersion = api_client.get(
18
+ path=f'/lfs/versions/{self._resource_version_uuid}/',
19
+ ).json()
20
+ self._presigned_url = lfs_version['presigned_download_url']
21
+ self._expires_at = datetime.utcnow() + timedelta(minutes=8)
22
+ logger.debug(
23
+ f'DataRecord "{self._resource_version_uuid}" fetched presigned URL '
24
+ f'with expiry at {self._expires_at.isoformat()}'
25
+ )
26
+
27
+ return self._presigned_url
@@ -1,25 +1,24 @@
1
1
  import json
2
2
  import platform
3
- import time
4
3
  import socket
5
4
  import ssl
6
5
  import subprocess
7
- import urllib.request
6
+ import time
8
7
  import urllib.error
9
8
  import urllib.parse
9
+ import urllib.request
10
10
 
11
11
  from biolib.biolib_logging import logger_no_user_data
12
- from biolib.typing_utils import Dict, Optional, Union, Literal, cast
12
+ from biolib.typing_utils import Dict, Literal, Optional, Union, cast
13
+
14
+ _HttpMethod = Literal['GET', 'POST', 'PATCH', 'PUT']
13
15
 
14
16
 
15
17
  def _create_ssl_context():
16
18
  context = ssl.create_default_context()
17
19
  try:
18
20
  if platform.system() == 'Darwin':
19
- certificates = subprocess.check_output(
20
- "security find-certificate -a -p",
21
- shell=True
22
- ).decode('utf-8')
21
+ certificates = subprocess.check_output('security find-certificate -a -p', shell=True).decode('utf-8')
23
22
  context.load_verify_locations(cadata=certificates)
24
23
  except BaseException:
25
24
  pass
@@ -33,7 +32,7 @@ class HttpError(urllib.error.HTTPError):
33
32
  code=http_error.code,
34
33
  msg=http_error.msg, # type: ignore
35
34
  hdrs=http_error.hdrs, # type: ignore
36
- fp=http_error.fp
35
+ fp=http_error.fp,
37
36
  )
38
37
 
39
38
  def __str__(self):
@@ -42,7 +41,7 @@ class HttpError(urllib.error.HTTPError):
42
41
 
43
42
 
44
43
  class HttpResponse:
45
- def __init__(self, response):
44
+ def __init__(self, response) -> None:
46
45
  self.headers: Dict[str, str] = dict(response.headers)
47
46
  self.status_code: int = int(response.status)
48
47
  self.content: bytes = response.read()
@@ -61,12 +60,12 @@ class HttpClient:
61
60
 
62
61
  @staticmethod
63
62
  def request(
64
- url: str,
65
- method: Optional[Literal['GET', 'POST', 'PATCH', 'PUT']] = None,
66
- data: Optional[Union[Dict, bytes]] = None,
67
- headers: Optional[Dict[str, str]] = None,
68
- retries: int = 5,
69
- timeout_in_seconds: Optional[int] = None,
63
+ url: str,
64
+ method: Optional[_HttpMethod] = None,
65
+ data: Optional[Union[Dict, bytes]] = None,
66
+ headers: Optional[Dict[str, str]] = None,
67
+ retries: int = 5,
68
+ timeout_in_seconds: Optional[int] = None,
70
69
  ) -> HttpResponse:
71
70
  if not HttpClient.ssl_context:
72
71
  HttpClient.ssl_context = _create_ssl_context()
@@ -1,17 +1,18 @@
1
1
  import os
2
2
  import re
3
3
  from pathlib import Path
4
- import yaml
4
+
5
5
  import rich.progress
6
+ import yaml
6
7
 
7
- from biolib.lfs.utils import get_iterable_zip_stream, get_files_and_size_of_directory
8
- from biolib.typing_utils import Optional, Set, TypedDict, Iterable
8
+ from biolib import api, utils
9
9
  from biolib.biolib_api_client import BiolibApiClient
10
- from biolib.biolib_docker_client import BiolibDockerClient
11
10
  from biolib.biolib_api_client.biolib_app_api import BiolibAppApi
11
+ from biolib.biolib_docker_client import BiolibDockerClient
12
12
  from biolib.biolib_errors import BioLibError
13
13
  from biolib.biolib_logging import logger
14
- from biolib import utils, api
14
+ from biolib.lfs.utils import get_files_and_size_of_directory, get_iterable_zip_stream
15
+ from biolib.typing_utils import Iterable, Optional, Set, TypedDict
15
16
 
16
17
  REGEX_MARKDOWN_INLINE_IMAGE = re.compile(r'!\[(?P<alt>.*)\]\((?P<src>.*)\)')
17
18
 
@@ -38,9 +39,7 @@ def process_docker_status_updates(status_updates: Iterable[DockerStatusUpdate],
38
39
  progress_detail = update['progressDetail']
39
40
 
40
41
  if layer_id not in layer_id_to_task_id:
41
- layer_id_to_task_id[layer_id] = progress.add_task(
42
- description=f'[cyan]{action} layer {layer_id}'
43
- )
42
+ layer_id_to_task_id[layer_id] = progress.add_task(description=f'[cyan]{action} layer {layer_id}')
44
43
 
45
44
  if progress_detail and 'current' in progress_detail and 'total' in progress_detail:
46
45
  progress.update(
@@ -60,7 +59,7 @@ def process_docker_status_updates(status_updates: Iterable[DockerStatusUpdate],
60
59
 
61
60
 
62
61
  def set_app_version_as_active(
63
- app_version_uuid: str,
62
+ app_version_uuid: str,
64
63
  ):
65
64
  logger.debug(f'Setting app version {app_version_uuid} as active.')
66
65
  api.client.patch(
@@ -70,10 +69,10 @@ def set_app_version_as_active(
70
69
 
71
70
 
72
71
  def push_application(
73
- app_uri: str,
74
- app_path: str,
75
- app_version_to_copy_images_from: Optional[str],
76
- is_dev_version: Optional[bool],
72
+ app_uri: str,
73
+ app_path: str,
74
+ app_version_to_copy_images_from: Optional[str],
75
+ is_dev_version: Optional[bool],
77
76
  ):
78
77
  app_path_absolute = Path(app_path).resolve()
79
78
 
@@ -96,7 +95,7 @@ def push_application(
96
95
 
97
96
  input_files_maps_to_root = False
98
97
  try:
99
- with open(config_yml_path, mode='r') as config_yml_file:
98
+ with open(config_yml_path) as config_yml_file:
100
99
  config = yaml.safe_load(config_yml_file.read())
101
100
 
102
101
  license_file_relative_path = config.get('license_file', 'LICENSE')
@@ -109,7 +108,7 @@ def push_application(
109
108
  raise BioLibError(f'Could not find {description_file_relative_path}')
110
109
 
111
110
  zip_filters.add(description_file_relative_path)
112
- with open(description_file_absolute_path, mode='r') as description_file:
111
+ with open(description_file_absolute_path) as description_file:
113
112
  description_file_content = description_file.read()
114
113
 
115
114
  for _, img_src_path in re.findall(REGEX_MARKDOWN_INLINE_IMAGE, description_file_content):
@@ -171,8 +170,9 @@ def push_application(
171
170
  author=app['account_handle'],
172
171
  set_as_active=False,
173
172
  zip_binary=source_files_zip_bytes,
174
- app_version_id_to_copy_images_from=app_response['app_version']['public_id'] if app_version_to_copy_images_from
175
- else None
173
+ app_version_id_to_copy_images_from=app_response['app_version']['public_id']
174
+ if app_version_to_copy_images_from
175
+ else None,
176
176
  )
177
177
 
178
178
  # Don't push docker images if copying from another app version
@@ -180,18 +180,6 @@ def push_application(
180
180
  if not app_version_to_copy_images_from and docker_tags:
181
181
  logger.info('Found docker images to push.')
182
182
 
183
- try:
184
- yaml_file = open(f'{app_path}/.biolib/config.yml', 'r', encoding='utf-8')
185
-
186
- except Exception as error: # pylint: disable=broad-except
187
- raise BioLibError('Could not open the config file .biolib/config.yml') from error
188
-
189
- try:
190
- config_data = yaml.safe_load(yaml_file)
191
-
192
- except Exception as error: # pylint: disable=broad-except
193
- raise BioLibError('Could not parse .biolib/config.yml. Please make sure it is valid YAML') from error
194
-
195
183
  # Auth to be sent to proxy
196
184
  # The tokens are sent as "{access_token},{job_id}". We leave job_id blank on push.
197
185
  tokens = f'{BiolibApiClient.get().access_token},'
@@ -200,14 +188,12 @@ def push_application(
200
188
  docker_client = BiolibDockerClient.get_docker_client()
201
189
 
202
190
  for module_name, repo_and_tag in docker_tags.items():
203
- docker_image_definition = config_data['modules'][module_name]['image']
191
+ docker_image_definition = config['modules'][module_name]['image']
204
192
  repo, tag = repo_and_tag.split(':')
205
193
 
206
194
  if docker_image_definition.startswith('dockerhub://'):
207
195
  docker_image_name = docker_image_definition.replace('dockerhub://', 'docker.io/', 1)
208
- logger.info(
209
- f'Pulling image {docker_image_name} defined on module {module_name} from Dockerhub.'
210
- )
196
+ logger.info(f'Pulling image {docker_image_name} defined on module {module_name} from Dockerhub.')
211
197
  dockerhub_repo, dockerhub_tag = docker_image_name.split(':')
212
198
  pull_status_updates: Iterable[DockerStatusUpdate] = docker_client.api.pull(
213
199
  decode=True,
@@ -238,7 +224,7 @@ def push_application(
238
224
 
239
225
  process_docker_status_updates(push_status_updates, action='Pushing')
240
226
 
241
- except Exception as exception: # pylint: disable=broad-except
227
+ except Exception as exception:
242
228
  raise BioLibError(f'Failed to tag and push image {docker_image_name}.') from exception
243
229
 
244
230
  logger.info(f'Successfully pushed {docker_image_name}')
@@ -249,10 +235,9 @@ def push_application(
249
235
  data={'set_as_active': not is_dev_version},
250
236
  )
251
237
 
252
- sematic_version = \
253
- f"{new_app_version_json['major']}.{new_app_version_json['minor']}.{new_app_version_json['patch']}"
238
+ sematic_version = f"{new_app_version_json['major']}.{new_app_version_json['minor']}.{new_app_version_json['patch']}"
254
239
  logger.info(
255
240
  f"Successfully pushed new {'development ' if is_dev_version else ''}version {sematic_version} of {app_uri}."
256
241
  )
257
242
 
258
- return {"app_uri": app_uri, "sematic_version": sematic_version}
243
+ return {'app_uri': app_uri, 'sematic_version': sematic_version}
@@ -0,0 +1,73 @@
1
+ import json
2
+
3
+ from biolib import api
4
+ from biolib.typing_utils import Optional, TypedDict, cast
5
+
6
+
7
+ class RuntimeJobDataDict(TypedDict):
8
+ version: str
9
+ job_requested_machine: str
10
+ job_uuid: str
11
+ job_auth_token: str
12
+
13
+
14
+ class BioLibRuntimeError(Exception):
15
+ pass
16
+
17
+
18
+ class BioLibRuntimeNotRecognizedError(BioLibRuntimeError):
19
+ def __init__(self, message='The runtime is not recognized as a BioLib app'):
20
+ self.message = message
21
+ super().__init__(self.message)
22
+
23
+
24
+ class Runtime:
25
+ _job_data: Optional[RuntimeJobDataDict] = None
26
+
27
+ @staticmethod
28
+ def check_is_environment_biolib_app() -> bool:
29
+ return bool(Runtime._try_to_get_job_data())
30
+
31
+ @staticmethod
32
+ def get_job_id() -> str:
33
+ return Runtime._get_job_data()['job_uuid']
34
+
35
+ @staticmethod
36
+ def get_job_auth_token() -> str:
37
+ return Runtime._get_job_data()['job_auth_token']
38
+
39
+ @staticmethod
40
+ def get_job_requested_machine() -> str:
41
+ return Runtime._get_job_data()['job_requested_machine']
42
+
43
+ @staticmethod
44
+ def set_main_result_prefix(result_prefix: str) -> None:
45
+ job_data = Runtime._get_job_data()
46
+ api.client.patch(
47
+ data={'result_name_prefix': result_prefix},
48
+ headers={'Job-Auth-Token': job_data['job_auth_token']},
49
+ path=f"/jobs/{job_data['job_uuid']}/main_result/",
50
+ )
51
+
52
+ @staticmethod
53
+ def _try_to_get_job_data() -> Optional[RuntimeJobDataDict]:
54
+ if not Runtime._job_data:
55
+ try:
56
+ with open('/biolib/secrets/biolib_system_secret') as file:
57
+ job_data: RuntimeJobDataDict = json.load(file)
58
+ except BaseException:
59
+ return None
60
+
61
+ if not job_data['version'].startswith('1.'):
62
+ raise BioLibRuntimeError(f"Unexpected system secret version {job_data['version']} expected 1.x.x")
63
+
64
+ Runtime._job_data = job_data
65
+
66
+ return cast(RuntimeJobDataDict, Runtime._job_data)
67
+
68
+ @staticmethod
69
+ def _get_job_data() -> RuntimeJobDataDict:
70
+ job_data = Runtime._try_to_get_job_data()
71
+ if not job_data:
72
+ raise BioLibRuntimeNotRecognizedError() from None
73
+ return job_data
@@ -0,0 +1,18 @@
1
+ import time
2
+ import uuid
3
+
4
+
5
+ def open_browser_window_from_notebook(url_to_open: str) -> None:
6
+ try:
7
+ from IPython.display import ( # type:ignore # pylint: disable=import-error, import-outside-toplevel
8
+ Javascript,
9
+ display,
10
+ update_display,
11
+ )
12
+ except ImportError as error:
13
+ raise Exception('Unexpected environment. This function can only be called from a notebook.') from error
14
+
15
+ display_id = str(uuid.uuid4())
16
+ display(Javascript(f'window.open("{url_to_open}");'), display_id=display_id)
17
+ time.sleep(1)
18
+ update_display(Javascript(''), display_id=display_id)
biolib/app/app.py CHANGED
@@ -263,7 +263,12 @@ Example: "app.cli('--help')"
263
263
  if not key.startswith('--'):
264
264
  key = f'--{key}'
265
265
 
266
- args.extend([key, value])
266
+ args.append(key)
267
+ if isinstance(value, list):
268
+ # TODO: only do this if argument key is of type file list
269
+ args.extend(value)
270
+ else:
271
+ args.append(value)
267
272
 
268
273
  return self.cli(args, **biolib_kwargs)
269
274
 
biolib/app/search_apps.py CHANGED
@@ -7,41 +7,37 @@ from biolib.typing_utils import Optional, List
7
7
  def search_apps(
8
8
  search_query: Optional[str] = None,
9
9
  team: Optional[str] = None,
10
- count: int = 100
11
- ) -> List[str]:
12
-
10
+ count: int = 100,
11
+ ) -> List[str]:
13
12
  query_exceeded_page_size = False
14
13
  params = {
15
14
  'page_size': count,
16
15
  }
17
16
  if team:
18
- if not team.startswith("@"):
19
- team = "@biolib.com/" + team
17
+ if not team.startswith('@'):
18
+ team = '@biolib.com/' + team
20
19
  params['account_handle'] = team
21
20
 
22
21
  if search_query:
23
22
  params['search'] = search_query
24
23
 
25
- apps_json = api.client.get(
26
- path='/apps/',
27
- params=params
28
- ).json()
24
+ apps_json = api.client.get(path='/apps/', params=params).json()
29
25
  if apps_json['count'] > count:
30
26
  query_exceeded_page_size = True
31
27
 
32
28
  apps = [app['resource_uri'] for app in apps_json['results']]
33
29
 
34
- if not utils.BASE_URL_IS_PUBLIC_BIOLIB and (not team or team.lower().startswith("@biolib.com")):
30
+ if not utils.BASE_URL_IS_PUBLIC_BIOLIB and (not team or team.lower().startswith('@biolib.com')):
35
31
  # Also get federated apps if running on enterprise deployment
36
32
  public_biolib_apps_json = api.client.get(
37
33
  authenticate=False,
38
34
  path='https://biolib.com/api/apps/',
39
- params=params
35
+ params=params,
40
36
  ).json()
41
37
  if public_biolib_apps_json['count'] > count:
42
38
  query_exceeded_page_size = True
43
39
 
44
- apps.extend([f'@biolib.com/{app["resource_uri"]}' for app in public_biolib_apps_json['results']])
40
+ apps.extend([f"@biolib.com/{app['resource_uri']}" for app in public_biolib_apps_json['results']])
45
41
 
46
42
  if query_exceeded_page_size:
47
43
  print(f'Search results exceeded {count}, use the argument "count" to increase the amount of results returned')
@@ -6,6 +6,7 @@ import os
6
6
  from datetime import datetime, timezone
7
7
  from json.decoder import JSONDecodeError
8
8
 
9
+ from biolib._internal.runtime import Runtime
9
10
  from biolib._internal.http_client import HttpClient
10
11
  from biolib.typing_utils import Optional
11
12
  from biolib.biolib_errors import BioLibError
@@ -61,16 +62,18 @@ class _ApiClient:
61
62
  return
62
63
 
63
64
  if self.access_token:
64
- decoded_token = self._decode_jwt_without_checking_signature(self.access_token)
65
+ decoded_token = self.decode_jwt_without_checking_signature(self.access_token)
65
66
  if datetime.now(tz=timezone.utc).timestamp() < decoded_token['payload']['exp'] - 60: # 60 second buffer
66
67
  # Token has not expired yet
67
68
  return
68
69
 
69
70
  # TODO: Implement nicer error handling
70
71
  try:
71
- response = HttpClient.request(method='POST',
72
- url=f'{self.base_url}/api/user/token/refresh/',
73
- data={'refresh': self.refresh_token})
72
+ response = HttpClient.request(
73
+ method='POST',
74
+ url=f'{self.base_url}/api/user/token/refresh/',
75
+ data={'refresh': self.refresh_token},
76
+ )
74
77
  except Exception as exception:
75
78
  logger.error('Sign in with refresh token failed')
76
79
  raise exception
@@ -111,9 +114,11 @@ class _ApiClient:
111
114
  def sign_in_with_api_token(self, api_token: str) -> None:
112
115
  logger_no_user_data.debug('ApiClient: Signing in with BIOLIB_TOKEN...')
113
116
  try:
114
- response = HttpClient.request(method='POST',
115
- url=f'{self.base_url}/api/user/api_tokens/exchange/',
116
- data={'token': api_token})
117
+ response = HttpClient.request(
118
+ method='POST',
119
+ url=f'{self.base_url}/api/user/api_tokens/exchange/',
120
+ data={'token': api_token},
121
+ )
117
122
  except Exception as exception:
118
123
  logger.error('Sign in with API token failed')
119
124
  raise exception
@@ -127,7 +132,7 @@ class _ApiClient:
127
132
  self.refresh_token = json_response['refresh_token']
128
133
 
129
134
  @staticmethod
130
- def _decode_jwt_without_checking_signature(jwt: str) -> Dict[str, Any]:
135
+ def decode_jwt_without_checking_signature(jwt: str) -> Dict[str, Any]:
131
136
  jwt_bytes = jwt.encode('utf-8')
132
137
 
133
138
  try:
@@ -189,7 +194,7 @@ class BiolibApiClient:
189
194
  @staticmethod
190
195
  def assert_is_signed_in(authenticated_action_description: str) -> None:
191
196
  api_client = BiolibApiClient.get()
192
- if not api_client.is_signed_in:
197
+ if not api_client.is_signed_in and not Runtime.check_is_environment_biolib_app():
193
198
  raise BioLibError(
194
199
  f'You must be signed in to {authenticated_action_description}. '
195
200
  f'Please set the environment variable "BIOLIB_TOKEN"'
@@ -99,6 +99,7 @@ class _AppVersionOnJob(TypedDict):
99
99
 
100
100
  class AppOnJob(TypedDict):
101
101
  allow_client_side_execution: bool
102
+ can_push_data_record_for_user: bool
102
103
  state: Literal['public', 'draft']
103
104
 
104
105
 
@@ -35,7 +35,7 @@ def encode_multipart(data, files):
35
35
  line_array.append('')
36
36
 
37
37
  data_encoded = b'\r\n'.join([line.encode() if isinstance(line, str) else line for line in line_array])
38
- return 'multipart/form-data; boundary={}'.format(boundary), data_encoded
38
+ return f'multipart/form-data; boundary={boundary}', data_encoded
39
39
 
40
40
 
41
41
  def _get_git_branch_name() -> str: