pybiolib 1.1.1971__py3-none-any.whl → 1.1.1979__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- biolib/app/app.py +32 -71
- biolib/biolib_binary_format/remote_endpoints.py +12 -10
- biolib/cli/auth.py +1 -1
- biolib/jobs/job.py +75 -10
- biolib/jobs/job_result.py +16 -16
- biolib/jobs/types.py +1 -1
- {pybiolib-1.1.1971.dist-info → pybiolib-1.1.1979.dist-info}/METADATA +1 -1
- {pybiolib-1.1.1971.dist-info → pybiolib-1.1.1979.dist-info}/RECORD +11 -11
- {pybiolib-1.1.1971.dist-info → pybiolib-1.1.1979.dist-info}/LICENSE +0 -0
- {pybiolib-1.1.1971.dist-info → pybiolib-1.1.1979.dist-info}/WHEEL +0 -0
- {pybiolib-1.1.1971.dist-info → pybiolib-1.1.1979.dist-info}/entry_points.txt +0 -0
biolib/app/app.py
CHANGED
@@ -1,29 +1,26 @@
|
|
1
|
-
import os
|
2
1
|
import io
|
3
|
-
import random
|
4
2
|
import json
|
3
|
+
import os
|
4
|
+
import random
|
5
5
|
import string
|
6
|
-
|
7
6
|
from pathlib import Path
|
7
|
+
|
8
8
|
from biolib import utils
|
9
|
-
from biolib.
|
10
|
-
from biolib.compute_node.job_worker.job_worker import JobWorker
|
11
|
-
from biolib.experiments.experiment import Experiment
|
12
|
-
from biolib.jobs import Job
|
13
|
-
from biolib.typing_utils import Optional, cast
|
14
|
-
from biolib.biolib_api_client import CreatedJobDict, JobState
|
15
|
-
from biolib.jobs.types import JobDict
|
9
|
+
from biolib.biolib_api_client import JobState
|
16
10
|
from biolib.biolib_api_client.app_types import App, AppVersion
|
17
|
-
from biolib.biolib_api_client.biolib_job_api import BiolibJobApi
|
18
11
|
from biolib.biolib_api_client.biolib_app_api import BiolibAppApi
|
12
|
+
from biolib.biolib_api_client.biolib_job_api import BiolibJobApi
|
19
13
|
from biolib.biolib_binary_format import ModuleInput
|
20
14
|
from biolib.biolib_errors import BioLibError
|
21
15
|
from biolib.biolib_logging import logger
|
16
|
+
from biolib.compute_node.job_worker.job_worker import JobWorker
|
17
|
+
from biolib.experiments.experiment import Experiment
|
18
|
+
from biolib.jobs import Job
|
19
|
+
from biolib.typing_utils import Optional
|
22
20
|
from biolib.utils.app_uri import parse_app_uri
|
23
21
|
|
24
22
|
|
25
23
|
class BioLibApp:
|
26
|
-
|
27
24
|
def __init__(self, uri: str):
|
28
25
|
app_response = BiolibAppApi.get_by_uri(uri)
|
29
26
|
self._app: App = app_response['app']
|
@@ -48,17 +45,17 @@ class BioLibApp:
|
|
48
45
|
return self._app_version
|
49
46
|
|
50
47
|
def cli(
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
48
|
+
self,
|
49
|
+
args=None,
|
50
|
+
stdin=None,
|
51
|
+
files=None,
|
52
|
+
override_command=False,
|
53
|
+
machine='',
|
54
|
+
blocking: bool = True,
|
55
|
+
experiment_id: Optional[str] = None,
|
56
|
+
result_prefix: Optional[str] = None,
|
57
|
+
timeout: Optional[int] = None,
|
58
|
+
notify: bool = False,
|
62
59
|
) -> Job:
|
63
60
|
if not experiment_id:
|
64
61
|
experiment = Experiment.get_experiment_in_context()
|
@@ -78,7 +75,9 @@ class BioLibApp:
|
|
78
75
|
|
79
76
|
return self._run_locally(module_input_serialized)
|
80
77
|
|
81
|
-
job =
|
78
|
+
job = Job._start_job_in_cloud( # pylint: disable=protected-access
|
79
|
+
app_uri=self._app_uri,
|
80
|
+
app_version_uuid=self._app_version['public_id'],
|
82
81
|
experiment_id=experiment_id,
|
83
82
|
machine=machine,
|
84
83
|
module_input_serialized=module_input_serialized,
|
@@ -93,8 +92,8 @@ class BioLibApp:
|
|
93
92
|
utils.STREAM_STDOUT = True
|
94
93
|
|
95
94
|
enable_print = bool(
|
96
|
-
utils.STREAM_STDOUT
|
97
|
-
(self._app_version.get('main_output_file') or self._app_version.get('stdout_render_type') == 'text')
|
95
|
+
utils.STREAM_STDOUT
|
96
|
+
and (self._app_version.get('main_output_file') or self._app_version.get('stdout_render_type') == 'text')
|
98
97
|
)
|
99
98
|
job._stream_logs(enable_print=enable_print) # pylint: disable=protected-access
|
100
99
|
|
@@ -108,11 +107,11 @@ class BioLibApp:
|
|
108
107
|
self.cli()
|
109
108
|
|
110
109
|
else:
|
111
|
-
raise BioLibError(
|
110
|
+
raise BioLibError("""
|
112
111
|
Calling an app directly with app() is currently being reworked.
|
113
112
|
To use the previous functionality, please call app.cli() instead.
|
114
113
|
Example: "app.cli('--help')"
|
115
|
-
|
114
|
+
""")
|
116
115
|
|
117
116
|
@staticmethod
|
118
117
|
def _get_serialized_module_input(args=None, stdin=None, files=None) -> bytes:
|
@@ -142,9 +141,9 @@ Example: "app.cli('--help')"
|
|
142
141
|
args[idx] = Path(arg).name
|
143
142
|
|
144
143
|
# support --myarg=file.txt
|
145
|
-
elif os.path.isfile(arg.split(
|
146
|
-
files.append(arg.split(
|
147
|
-
args[idx] = arg.split(
|
144
|
+
elif os.path.isfile(arg.split('=')[-1]) or os.path.isdir(arg.split('=')[-1]):
|
145
|
+
files.append(arg.split('=')[-1])
|
146
|
+
args[idx] = arg.split('=')[0] + '=' + Path(arg.split('=')[-1]).name
|
148
147
|
else:
|
149
148
|
pass # a normal string arg was given
|
150
149
|
else:
|
@@ -154,7 +153,7 @@ Example: "app.cli('--help')"
|
|
154
153
|
elif isinstance(arg, io.BytesIO):
|
155
154
|
file_data = arg.getvalue()
|
156
155
|
else:
|
157
|
-
raise Exception(f
|
156
|
+
raise Exception(f'Unexpected type of argument: {arg}')
|
158
157
|
files_dict[f'/{tmp_filename}'] = file_data
|
159
158
|
args[idx] = tmp_filename
|
160
159
|
|
@@ -192,48 +191,10 @@ Example: "app.cli('--help')"
|
|
192
191
|
)
|
193
192
|
return module_input_serialized
|
194
193
|
|
195
|
-
def _start_in_cloud(
|
196
|
-
self,
|
197
|
-
module_input_serialized: bytes,
|
198
|
-
override_command: bool = False,
|
199
|
-
machine: Optional[str] = None,
|
200
|
-
experiment_id: Optional[str] = None,
|
201
|
-
result_prefix: Optional[str] = None,
|
202
|
-
timeout: Optional[int] = None,
|
203
|
-
notify: bool = False,
|
204
|
-
) -> Job:
|
205
|
-
if len(module_input_serialized) < 500_000:
|
206
|
-
_job_dict = BiolibJobApi.create_job_with_data(
|
207
|
-
app_resource_name_prefix=parse_app_uri(self._app_uri)['resource_name_prefix'],
|
208
|
-
app_version_uuid=self._app_version['public_id'],
|
209
|
-
arguments_override_command=override_command,
|
210
|
-
experiment_uuid=experiment_id,
|
211
|
-
module_input_serialized=module_input_serialized,
|
212
|
-
notify=notify,
|
213
|
-
requested_machine=machine,
|
214
|
-
requested_timeout_seconds=timeout,
|
215
|
-
result_name_prefix=result_prefix,
|
216
|
-
)
|
217
|
-
return Job(cast(JobDict, _job_dict))
|
218
|
-
|
219
|
-
job_dict: CreatedJobDict = BiolibJobApi.create(
|
220
|
-
app_resource_name_prefix=parse_app_uri(self._app_uri)['resource_name_prefix'],
|
221
|
-
app_version_id=self._app_version['public_id'],
|
222
|
-
experiment_uuid=experiment_id,
|
223
|
-
machine=machine,
|
224
|
-
notify=notify,
|
225
|
-
override_command=override_command,
|
226
|
-
timeout=timeout,
|
227
|
-
)
|
228
|
-
JobStorage.upload_module_input(job=job_dict, module_input_serialized=module_input_serialized)
|
229
|
-
cloud_job = BiolibJobApi.create_cloud_job(job_id=job_dict['public_id'], result_name_prefix=result_prefix)
|
230
|
-
logger.debug(f"Cloud: Job created with id {cloud_job['public_id']}")
|
231
|
-
return Job(cast(JobDict, job_dict))
|
232
|
-
|
233
194
|
def _run_locally(self, module_input_serialized: bytes) -> Job:
|
234
195
|
job_dict = BiolibJobApi.create(
|
235
196
|
app_version_id=self._app_version['public_id'],
|
236
|
-
app_resource_name_prefix=parse_app_uri(self._app_uri)['resource_name_prefix']
|
197
|
+
app_resource_name_prefix=parse_app_uri(self._app_uri)['resource_name_prefix'],
|
237
198
|
)
|
238
199
|
job = Job(job_dict)
|
239
200
|
|
@@ -1,25 +1,27 @@
|
|
1
1
|
from datetime import datetime, timedelta
|
2
|
-
# from urllib.parse import urlparse, parse_qs
|
3
|
-
|
4
|
-
from biolib.biolib_logging import logger
|
5
2
|
|
6
3
|
from biolib.biolib_api_client.biolib_job_api import BiolibJobApi
|
7
4
|
from biolib.biolib_binary_format.utils import RemoteEndpoint
|
8
5
|
|
6
|
+
# from urllib.parse import urlparse, parse_qs
|
7
|
+
from biolib.biolib_logging import logger
|
8
|
+
from biolib.typing_utils import Literal
|
9
|
+
|
9
10
|
|
10
|
-
class
|
11
|
-
def __init__(self,
|
12
|
-
self._job_id = job_id
|
13
|
-
self._job_auth_token = job_auth_token
|
11
|
+
class RemoteJobStorageEndpoint(RemoteEndpoint):
|
12
|
+
def __init__(self, job_uuid: str, job_auth_token: str, storage_type: Literal['input', 'output']):
|
14
13
|
self._expires_at = None
|
14
|
+
self._job_auth_token = job_auth_token
|
15
|
+
self._job_uuid = job_uuid
|
15
16
|
self._presigned_url = None
|
17
|
+
self._storage_type: Literal['input', 'output'] = storage_type
|
16
18
|
|
17
19
|
def get_remote_url(self):
|
18
20
|
if not self._presigned_url or datetime.utcnow() > self._expires_at:
|
19
21
|
self._presigned_url = BiolibJobApi.get_job_storage_download_url(
|
20
22
|
job_auth_token=self._job_auth_token,
|
21
|
-
job_uuid=self.
|
22
|
-
storage_type='results'
|
23
|
+
job_uuid=self._job_uuid,
|
24
|
+
storage_type='results' if self._storage_type == 'output' else 'input',
|
23
25
|
)
|
24
26
|
self._expires_at = datetime.utcnow() + timedelta(minutes=8)
|
25
27
|
# TODO: Use expires at from url
|
@@ -27,6 +29,6 @@ class RemoteJobStorageResultEndpoint(RemoteEndpoint):
|
|
27
29
|
# query_params = parse_qs(parsed_url.query)
|
28
30
|
# time_at_generation = datetime.datetime.strptime(query_params['X-Amz-Date'][0], '%Y%m%dT%H%M%SZ')
|
29
31
|
# self._expires_at = time_at_generation + timedelta(seconds=int(query_params['X-Amz-Expires'][0]))
|
30
|
-
logger.debug(f'Job "{self.
|
32
|
+
logger.debug(f'Job "{self._job_uuid}" fetched presigned URL with expiry at {self._expires_at.isoformat()}')
|
31
33
|
|
32
34
|
return self._presigned_url
|
biolib/cli/auth.py
CHANGED
@@ -52,7 +52,7 @@ def whoami() -> None:
|
|
52
52
|
email = user_dict['email']
|
53
53
|
intrinsic_account = [account for account in user_dict['accounts'] if account['role'] == 'intrinsic'][0]
|
54
54
|
display_name = intrinsic_account['display_name']
|
55
|
-
print(f'Name: {display_name}\nEmail: {email}')
|
55
|
+
print(f'Name: {display_name}\nEmail: {email}\nLogged into: {client.base_url}')
|
56
56
|
else:
|
57
57
|
print('Not logged in', file=sys.stderr)
|
58
58
|
exit(1)
|
biolib/jobs/job.py
CHANGED
@@ -9,18 +9,22 @@ from urllib.parse import urlparse
|
|
9
9
|
from biolib import api, utils
|
10
10
|
from biolib._internal.http_client import HttpClient
|
11
11
|
from biolib._internal.utils import open_browser_window_from_notebook
|
12
|
-
from biolib.biolib_api_client import BiolibApiClient
|
12
|
+
from biolib.biolib_api_client import BiolibApiClient, CreatedJobDict
|
13
|
+
from biolib.biolib_api_client.biolib_app_api import BiolibAppApi
|
13
14
|
from biolib.biolib_api_client.biolib_job_api import BiolibJobApi
|
14
15
|
from biolib.biolib_binary_format import LazyLoadedFile, ModuleInput, ModuleInputDict, ModuleOutputV2
|
16
|
+
from biolib.biolib_binary_format.remote_endpoints import RemoteJobStorageEndpoint
|
15
17
|
from biolib.biolib_binary_format.stdout_and_stderr import StdoutAndStderr
|
16
18
|
from biolib.biolib_errors import BioLibError, CloudJobFinishedError
|
17
19
|
from biolib.biolib_logging import logger, logger_no_user_data
|
20
|
+
from biolib.compute_node.job_worker.job_storage import JobStorage
|
18
21
|
from biolib.compute_node.utils import SystemExceptionCodeMap, SystemExceptionCodes
|
19
22
|
from biolib.jobs.job_result import JobResult
|
20
23
|
from biolib.jobs.types import CloudJobDict, CloudJobStartedDict, JobDict
|
21
24
|
from biolib.tables import BioLibTable
|
22
25
|
from biolib.typing_utils import Dict, List, Optional, cast
|
23
26
|
from biolib.utils import IS_RUNNING_IN_NOTEBOOK
|
27
|
+
from biolib.utils.app_uri import parse_app_uri
|
24
28
|
|
25
29
|
|
26
30
|
class Job:
|
@@ -56,7 +60,7 @@ class Job:
|
|
56
60
|
@property
|
57
61
|
def result(self) -> JobResult:
|
58
62
|
if not self._result:
|
59
|
-
if self.get_status() ==
|
63
|
+
if self.get_status() == 'completed':
|
60
64
|
self._result = JobResult(job_uuid=self._uuid, job_auth_token=self._auth_token)
|
61
65
|
else:
|
62
66
|
raise BioLibError(f"Result is not available for {self._uuid}: status is {self._job_dict['state']}.")
|
@@ -65,17 +69,17 @@ class Job:
|
|
65
69
|
|
66
70
|
@property
|
67
71
|
def stdout(self) -> bytes:
|
68
|
-
logger.warning(
|
72
|
+
logger.warning('The property .stdout is deprecated, please use .get_stdout()')
|
69
73
|
return self.result.get_stdout()
|
70
74
|
|
71
75
|
@property
|
72
76
|
def stderr(self) -> bytes:
|
73
|
-
logger.warning(
|
77
|
+
logger.warning('The property .stderr is deprecated, please use .get_stderr()')
|
74
78
|
return self.result.get_stderr()
|
75
79
|
|
76
80
|
@property
|
77
81
|
def exitcode(self) -> int:
|
78
|
-
logger.warning(
|
82
|
+
logger.warning('The property .exitcode is deprecated, please use .get_exit_code()')
|
79
83
|
return self.result.get_exit_code()
|
80
84
|
|
81
85
|
def is_finished(self) -> bool:
|
@@ -109,8 +113,8 @@ class Job:
|
|
109
113
|
def load_file_as_numpy(self, *args, **kwargs):
|
110
114
|
try:
|
111
115
|
import numpy # type: ignore # pylint: disable=import-outside-toplevel,import-error
|
112
|
-
except: # pylint: disable=raise-missing-from
|
113
|
-
raise Exception(
|
116
|
+
except ImportError: # pylint: disable=raise-missing-from
|
117
|
+
raise Exception('Failed to import numpy, please make sure it is installed.') from None
|
114
118
|
file_handle = self.result.get_output_file(*args, **kwargs).get_file_handle()
|
115
119
|
return numpy.load(file_handle, allow_pickle=False) # type: ignore
|
116
120
|
|
@@ -198,6 +202,27 @@ class Job:
|
|
198
202
|
except Exception as error:
|
199
203
|
logger.error(f'Failed to cancel job {self._uuid} due to: {error}')
|
200
204
|
|
205
|
+
def recompute(self, app_uri: Optional[str] = None, machine: Optional[str] = None, blocking: bool = True) -> 'Job':
|
206
|
+
app_response = BiolibAppApi.get_by_uri(uri=app_uri or self._job_dict['app_uri'])
|
207
|
+
|
208
|
+
job_storage_input = RemoteJobStorageEndpoint(
|
209
|
+
job_auth_token=self._auth_token,
|
210
|
+
job_uuid=self._uuid,
|
211
|
+
storage_type='input',
|
212
|
+
)
|
213
|
+
http_response = HttpClient.request(url=job_storage_input.get_remote_url())
|
214
|
+
module_input_serialized = http_response.content
|
215
|
+
|
216
|
+
job = self._start_job_in_cloud(
|
217
|
+
app_uri=app_response['app_uri'],
|
218
|
+
app_version_uuid=app_response['app_version']['public_id'],
|
219
|
+
module_input_serialized=module_input_serialized,
|
220
|
+
machine=machine,
|
221
|
+
)
|
222
|
+
if blocking:
|
223
|
+
job.stream_logs()
|
224
|
+
|
225
|
+
return job
|
201
226
|
|
202
227
|
def _get_cloud_job(self) -> CloudJobDict:
|
203
228
|
self._refetch_job_dict(force_refetch=True)
|
@@ -290,7 +315,7 @@ class Job:
|
|
290
315
|
status_json = self._get_job_status_from_compute_node(compute_node_url)
|
291
316
|
if not status_json:
|
292
317
|
# this can happen if the job is finished but already removed from the compute node
|
293
|
-
logger.warning(
|
318
|
+
logger.warning('WARN: We were unable to retrieve the full log of the job, please try again')
|
294
319
|
break
|
295
320
|
job_is_completed = status_json['is_completed']
|
296
321
|
for status_update in status_json['status_updates']:
|
@@ -353,9 +378,9 @@ class Job:
|
|
353
378
|
return HttpClient.request(url=f'{compute_node_url}/v1/job/{self._uuid}/status/').json()
|
354
379
|
except Exception: # pylint: disable=broad-except
|
355
380
|
cloud_job = self._get_cloud_job()
|
356
|
-
logger.debug(
|
381
|
+
logger.debug('Failed to get status from compute node, retrying...')
|
357
382
|
if cloud_job['finished_at']:
|
358
|
-
logger.debug(
|
383
|
+
logger.debug('Job no longer exists on compute node, checking for error...')
|
359
384
|
if cloud_job['error_code'] != SystemExceptionCodes.COMPLETED_SUCCESSFULLY.value:
|
360
385
|
error_message = SystemExceptionCodeMap.get(
|
361
386
|
cloud_job['error_code'], f'Unknown error code {cloud_job["error_code"]}'
|
@@ -378,3 +403,43 @@ class Job:
|
|
378
403
|
|
379
404
|
self._job_dict = self._get_job_dict(self._uuid, self._auth_token)
|
380
405
|
self._job_dict_last_fetched_at = datetime.utcnow()
|
406
|
+
|
407
|
+
@staticmethod
|
408
|
+
def _start_job_in_cloud(
|
409
|
+
app_uri: str,
|
410
|
+
app_version_uuid: str,
|
411
|
+
module_input_serialized: bytes,
|
412
|
+
override_command: bool = False,
|
413
|
+
machine: Optional[str] = None,
|
414
|
+
experiment_id: Optional[str] = None,
|
415
|
+
result_prefix: Optional[str] = None,
|
416
|
+
timeout: Optional[int] = None,
|
417
|
+
notify: bool = False,
|
418
|
+
) -> 'Job':
|
419
|
+
if len(module_input_serialized) < 500_000:
|
420
|
+
_job_dict = BiolibJobApi.create_job_with_data(
|
421
|
+
app_resource_name_prefix=parse_app_uri(app_uri)['resource_name_prefix'],
|
422
|
+
app_version_uuid=app_version_uuid,
|
423
|
+
arguments_override_command=override_command,
|
424
|
+
experiment_uuid=experiment_id,
|
425
|
+
module_input_serialized=module_input_serialized,
|
426
|
+
notify=notify,
|
427
|
+
requested_machine=machine,
|
428
|
+
requested_timeout_seconds=timeout,
|
429
|
+
result_name_prefix=result_prefix,
|
430
|
+
)
|
431
|
+
return Job(cast(JobDict, _job_dict))
|
432
|
+
|
433
|
+
job_dict: CreatedJobDict = BiolibJobApi.create(
|
434
|
+
app_resource_name_prefix=parse_app_uri(app_uri)['resource_name_prefix'],
|
435
|
+
app_version_id=app_version_uuid,
|
436
|
+
experiment_uuid=experiment_id,
|
437
|
+
machine=machine,
|
438
|
+
notify=notify,
|
439
|
+
override_command=override_command,
|
440
|
+
timeout=timeout,
|
441
|
+
)
|
442
|
+
JobStorage.upload_module_input(job=job_dict, module_input_serialized=module_input_serialized)
|
443
|
+
cloud_job = BiolibJobApi.create_cloud_job(job_id=job_dict['public_id'], result_name_prefix=result_prefix)
|
444
|
+
logger.debug(f"Cloud: Job created with id {cloud_job['public_id']}")
|
445
|
+
return Job(cast(JobDict, job_dict))
|
biolib/jobs/job_result.py
CHANGED
@@ -1,25 +1,24 @@
|
|
1
|
-
from pathlib import Path
|
2
|
-
from fnmatch import fnmatch
|
3
1
|
import time
|
2
|
+
from fnmatch import fnmatch
|
3
|
+
from pathlib import Path
|
4
4
|
|
5
5
|
from biolib.biolib_binary_format import ModuleOutputV2
|
6
|
+
from biolib.biolib_binary_format.remote_endpoints import RemoteJobStorageEndpoint
|
6
7
|
from biolib.biolib_binary_format.remote_stream_seeker import StreamSeeker
|
7
|
-
from biolib.biolib_binary_format.utils import
|
8
|
-
from biolib.biolib_binary_format.remote_endpoints import RemoteJobStorageResultEndpoint
|
8
|
+
from biolib.biolib_binary_format.utils import LazyLoadedFile, RemoteIndexableBuffer
|
9
9
|
from biolib.biolib_errors import BioLibError
|
10
10
|
from biolib.biolib_logging import logger
|
11
|
-
from biolib.typing_utils import
|
11
|
+
from biolib.typing_utils import Callable, List, Optional, Union, cast
|
12
12
|
|
13
13
|
PathFilter = Union[str, Callable[[str], bool]]
|
14
14
|
|
15
15
|
|
16
16
|
class JobResult:
|
17
|
-
|
18
17
|
def __init__(
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
18
|
+
self,
|
19
|
+
job_uuid: str,
|
20
|
+
job_auth_token: str,
|
21
|
+
module_output: Optional[ModuleOutputV2] = None,
|
23
22
|
):
|
24
23
|
self._job_uuid: str = job_uuid
|
25
24
|
self._job_auth_token: str = job_auth_token
|
@@ -75,10 +74,10 @@ class JobResult:
|
|
75
74
|
files = self._get_module_output().get_files()
|
76
75
|
filtered_files = self._get_filtered_files(files, path_filter=filename)
|
77
76
|
if not filtered_files:
|
78
|
-
raise BioLibError(f
|
77
|
+
raise BioLibError(f'File {filename} not found in results.')
|
79
78
|
|
80
79
|
if len(filtered_files) != 1:
|
81
|
-
raise BioLibError(f
|
80
|
+
raise BioLibError(f'Found multiple results for filename {filename}.')
|
82
81
|
|
83
82
|
return filtered_files[0]
|
84
83
|
|
@@ -100,8 +99,8 @@ class JobResult:
|
|
100
99
|
glob_filter = cast(str, path_filter)
|
101
100
|
|
102
101
|
# since all file paths start with /, make sure filter does too
|
103
|
-
if not glob_filter.startswith(
|
104
|
-
glob_filter =
|
102
|
+
if not glob_filter.startswith('/'):
|
103
|
+
glob_filter = '/' + glob_filter
|
105
104
|
|
106
105
|
def _filter_function(file: LazyLoadedFile) -> bool:
|
107
106
|
return fnmatch(file.path, glob_filter)
|
@@ -110,9 +109,10 @@ class JobResult:
|
|
110
109
|
|
111
110
|
def _get_module_output(self) -> ModuleOutputV2:
|
112
111
|
if self._module_output is None:
|
113
|
-
remote_job_storage_endpoint =
|
114
|
-
job_id=self._job_uuid,
|
112
|
+
remote_job_storage_endpoint = RemoteJobStorageEndpoint(
|
115
113
|
job_auth_token=self._job_auth_token,
|
114
|
+
job_uuid=self._job_uuid,
|
115
|
+
storage_type='output',
|
116
116
|
)
|
117
117
|
buffer = RemoteIndexableBuffer(endpoint=remote_job_storage_endpoint)
|
118
118
|
self._module_output = ModuleOutputV2(buffer)
|
biolib/jobs/types.py
CHANGED
@@ -16,7 +16,7 @@ biolib/_internal/utils/__init__.py,sha256=p5vsIFyu-zYqBgdSMfwW9NC_jk7rXvvCbV4Bzd
|
|
16
16
|
biolib/api/__init__.py,sha256=iIO8ZRdn7YDhY5cR47-Wo1YsNOK8H6RN6jn8yor5WJI,137
|
17
17
|
biolib/api/client.py,sha256=MtDkH2Amr2Fko-bCR5DdookJu0yZ1q-6K_PPg4KK_Ek,2941
|
18
18
|
biolib/app/__init__.py,sha256=cdPtcfb_U-bxb9iSL4fCEq2rpD9OjkyY4W-Zw60B0LI,37
|
19
|
-
biolib/app/app.py,sha256=
|
19
|
+
biolib/app/app.py,sha256=8AvPYL1W2wxQ7t7BB2KeVU2WPrm3UL6vVuHPGs8g9L0,8388
|
20
20
|
biolib/app/search_apps.py,sha256=K4a41f5XIWth2BWI7OffASgIsD0ko8elCax8YL2igaY,1470
|
21
21
|
biolib/biolib_api_client/__init__.py,sha256=E5EMa19wJoblwSdQPYrxc_BtIeRsAuO0L_jQweWw-Yk,182
|
22
22
|
biolib/biolib_api_client/api_client.py,sha256=J03jRVvod1bgwwAZ3BZVKlUSJi43-ev2DUB0j63GZpc,7189
|
@@ -33,7 +33,7 @@ biolib/biolib_binary_format/base_bbf_package.py,sha256=vxRV4iKy0dXeDOlFWnMFI0hGn
|
|
33
33
|
biolib/biolib_binary_format/file_in_container.py,sha256=j1eEPRxf_ew8I6G8sDiiZZxn4Wx1ppagfM9K8zTDG4U,3591
|
34
34
|
biolib/biolib_binary_format/module_input.py,sha256=led2QhHeec_ymBPw5uEn3_3vJKI-1T8zrFQGqwEWLMY,2788
|
35
35
|
biolib/biolib_binary_format/module_output_v2.py,sha256=J5ZO5gCSeudpE12EVDrjYrNTS2DwgszY-SVXT7Qjuyg,5913
|
36
|
-
biolib/biolib_binary_format/remote_endpoints.py,sha256=
|
36
|
+
biolib/biolib_binary_format/remote_endpoints.py,sha256=V48mwOj3eAQAKp-8DjtWUdEKUyC0WKc1pEiKTmtjrJY,1651
|
37
37
|
biolib/biolib_binary_format/remote_stream_seeker.py,sha256=uyi6kJBU1C1DWfiuR0kRUQIY7nalG7ocgwgngd3Ul4U,1999
|
38
38
|
biolib/biolib_binary_format/saved_job.py,sha256=nFHVFRNTNcAFGODLSiBntCtMk55QKwreUq6qLX80dI4,1125
|
39
39
|
biolib/biolib_binary_format/stdout_and_stderr.py,sha256=WfUUJFFCBrtfXjuWIaRPiWCpuBLxfko68oxoTKhrwx8,1023
|
@@ -45,7 +45,7 @@ biolib/biolib_download_container.py,sha256=8TmBV8iv3bCvkNlHa1SSsc4zl0wX_eaxhfnW5
|
|
45
45
|
biolib/biolib_errors.py,sha256=5m4lK2l39DafpoXBImEBD4EPH3ayXBX0JgtPzmGClow,689
|
46
46
|
biolib/biolib_logging.py,sha256=J3E5H_LL5k6ZUim2C8gqN7E6lCBZMTpO4tnMpOPwG9U,2854
|
47
47
|
biolib/cli/__init__.py,sha256=0v3c_J-U0k46c5ZWeQjLG_kTaKDJm81LBxQpDO2B_aI,1286
|
48
|
-
biolib/cli/auth.py,sha256=
|
48
|
+
biolib/cli/auth.py,sha256=rpWGmXs6Fz6CGrO9K8ibPRszOdXG78Vig_boKaVCD9A,2082
|
49
49
|
biolib/cli/data_record.py,sha256=piN3QUbRAkMi4wpayghN4unFfuiNE5VCjI1gag4d8qg,1725
|
50
50
|
biolib/cli/download_container.py,sha256=HIZVHOPmslGE5M2Dsp9r2cCkAEJx__vcsDz5Wt5LRos,483
|
51
51
|
biolib/cli/init.py,sha256=wQOfii_au-d30Hp7DdH-WVw-WVraKvA_zY4za1w7DE8,821
|
@@ -89,9 +89,9 @@ biolib/experiments/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuF
|
|
89
89
|
biolib/experiments/experiment.py,sha256=kUQsH9AGAckPKT_nzaRuTh8Mb2pVUpxnuX9IstRTOEo,6351
|
90
90
|
biolib/experiments/types.py,sha256=n9GxdFA7cLMfHvLLqLmZzX31ELeSSkMXFoEEdFsdWGY,171
|
91
91
|
biolib/jobs/__init__.py,sha256=aIb2H2DHjQbM2Bs-dysFijhwFcL58Blp0Co0gimED3w,32
|
92
|
-
biolib/jobs/job.py,sha256=
|
93
|
-
biolib/jobs/job_result.py,sha256=
|
94
|
-
biolib/jobs/types.py,sha256=
|
92
|
+
biolib/jobs/job.py,sha256=aWKnf_2pYdr76gh3hxPiVs2iuXlpwZkKPTK81Pz4G2U,19072
|
93
|
+
biolib/jobs/job_result.py,sha256=UGxE9MNLtwJiWmhw2UNjOQW7EZi7B-e2lL0PXYvsXeA,4925
|
94
|
+
biolib/jobs/types.py,sha256=qhadtH2KDC2WUOOqPiwke0YgtQY4FtuB71Stekq1k48,970
|
95
95
|
biolib/lfs/__init__.py,sha256=Qv8vdYeK43JecT4SsE93ZYE2VmNiZENdNpW8P9-omxs,115
|
96
96
|
biolib/lfs/cache.py,sha256=pQS2np21rdJ6I3DpoOutnzPHpLOZgUIS8TMltUJk_k4,2226
|
97
97
|
biolib/lfs/utils.py,sha256=HSs7F2wXklYhhv5tabfaeC5noXJyxRjbGD5IhWOVdxs,5918
|
@@ -109,8 +109,8 @@ biolib/utils/cache_state.py,sha256=u256F37QSRIVwqKlbnCyzAX4EMI-kl6Dwu6qwj-Qmag,3
|
|
109
109
|
biolib/utils/multipart_uploader.py,sha256=XvGP1I8tQuKhAH-QugPRoEsCi9qvbRk-DVBs5PNwwJo,8452
|
110
110
|
biolib/utils/seq_util.py,sha256=jC5WhH63FTD7SLFJbxQGA2hOt9NTwq9zHl_BEec1Z0c,4907
|
111
111
|
biolib/utils/zip/remote_zip.py,sha256=0wErYlxir5921agfFeV1xVjf29l9VNgGQvNlWOlj2Yc,23232
|
112
|
-
pybiolib-1.1.
|
113
|
-
pybiolib-1.1.
|
114
|
-
pybiolib-1.1.
|
115
|
-
pybiolib-1.1.
|
116
|
-
pybiolib-1.1.
|
112
|
+
pybiolib-1.1.1979.dist-info/LICENSE,sha256=F2h7gf8i0agDIeWoBPXDMYScvQOz02pAWkKhTGOHaaw,1067
|
113
|
+
pybiolib-1.1.1979.dist-info/METADATA,sha256=gaN82MLET8VrqRkqovA4ARsoe1iBCt5eWuA0YoalPaY,1508
|
114
|
+
pybiolib-1.1.1979.dist-info/WHEEL,sha256=sP946D7jFCHeNz5Iq4fL4Lu-PrWrFsgfLXbbkciIZwg,88
|
115
|
+
pybiolib-1.1.1979.dist-info/entry_points.txt,sha256=p6DyaP_2kctxegTX23WBznnrDi4mz6gx04O5uKtRDXg,42
|
116
|
+
pybiolib-1.1.1979.dist-info/RECORD,,
|
File without changes
|
File without changes
|
File without changes
|