pybiolib 1.1.1881__py3-none-any.whl → 1.1.2193__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. biolib/__init__.py +11 -4
  2. biolib/_data_record/data_record.py +278 -0
  3. biolib/_internal/data_record/__init__.py +1 -1
  4. biolib/_internal/data_record/data_record.py +95 -151
  5. biolib/_internal/data_record/remote_storage_endpoint.py +18 -7
  6. biolib/_internal/file_utils.py +77 -0
  7. biolib/_internal/fuse_mount/__init__.py +1 -0
  8. biolib/_internal/fuse_mount/experiment_fuse_mount.py +209 -0
  9. biolib/_internal/http_client.py +29 -9
  10. biolib/_internal/lfs/__init__.py +1 -0
  11. biolib/_internal/libs/__init__.py +1 -0
  12. biolib/_internal/libs/fusepy/__init__.py +1257 -0
  13. biolib/_internal/push_application.py +1 -1
  14. biolib/_internal/runtime.py +2 -56
  15. biolib/_internal/types/__init__.py +4 -0
  16. biolib/_internal/types/app.py +9 -0
  17. biolib/_internal/types/data_record.py +40 -0
  18. biolib/_internal/types/experiment.py +10 -0
  19. biolib/_internal/types/resource.py +14 -0
  20. biolib/_internal/types/typing.py +7 -0
  21. biolib/_runtime/runtime.py +80 -0
  22. biolib/api/__init__.py +1 -0
  23. biolib/api/client.py +39 -17
  24. biolib/app/app.py +34 -71
  25. biolib/biolib_api_client/api_client.py +9 -2
  26. biolib/biolib_api_client/app_types.py +2 -2
  27. biolib/biolib_api_client/biolib_job_api.py +6 -0
  28. biolib/biolib_api_client/job_types.py +4 -4
  29. biolib/biolib_api_client/lfs_types.py +8 -2
  30. biolib/biolib_binary_format/remote_endpoints.py +12 -10
  31. biolib/biolib_binary_format/utils.py +23 -3
  32. biolib/cli/auth.py +1 -1
  33. biolib/cli/data_record.py +43 -6
  34. biolib/cli/lfs.py +10 -6
  35. biolib/compute_node/cloud_utils/cloud_utils.py +13 -16
  36. biolib/compute_node/job_worker/executors/docker_executor.py +126 -108
  37. biolib/compute_node/job_worker/job_storage.py +3 -4
  38. biolib/compute_node/job_worker/job_worker.py +25 -15
  39. biolib/compute_node/remote_host_proxy.py +61 -84
  40. biolib/compute_node/webserver/webserver_types.py +0 -1
  41. biolib/experiments/experiment.py +75 -44
  42. biolib/jobs/job.py +98 -19
  43. biolib/jobs/job_result.py +46 -21
  44. biolib/jobs/types.py +1 -1
  45. biolib/runtime/__init__.py +2 -1
  46. biolib/sdk/__init__.py +18 -7
  47. biolib/typing_utils.py +2 -7
  48. biolib/user/sign_in.py +2 -2
  49. biolib/utils/seq_util.py +38 -35
  50. {pybiolib-1.1.1881.dist-info → pybiolib-1.1.2193.dist-info}/METADATA +1 -1
  51. {pybiolib-1.1.1881.dist-info → pybiolib-1.1.2193.dist-info}/RECORD +55 -44
  52. biolib/experiments/types.py +0 -9
  53. biolib/lfs/__init__.py +0 -4
  54. biolib/lfs/utils.py +0 -153
  55. /biolib/{lfs → _internal/lfs}/cache.py +0 -0
  56. {pybiolib-1.1.1881.dist-info → pybiolib-1.1.2193.dist-info}/LICENSE +0 -0
  57. {pybiolib-1.1.1881.dist-info → pybiolib-1.1.2193.dist-info}/WHEEL +0 -0
  58. {pybiolib-1.1.1881.dist-info → pybiolib-1.1.2193.dist-info}/entry_points.txt +0 -0
@@ -1,21 +1,21 @@
1
1
  import io
2
- import tarfile
3
2
  import subprocess
3
+ import tarfile
4
4
  import time
5
+ from urllib.parse import urlparse
5
6
 
6
- from docker.models.containers import Container # type: ignore
7
7
  from docker.errors import ImageNotFound # type: ignore
8
+ from docker.models.containers import Container # type: ignore
8
9
  from docker.models.images import Image # type: ignore
9
10
  from docker.models.networks import Network # type: ignore
10
11
 
11
12
  from biolib import utils
12
- from biolib.biolib_errors import BioLibError
13
- from biolib.compute_node.cloud_utils import CloudUtils
14
- from biolib.typing_utils import Optional, List
15
- from biolib.biolib_api_client import RemoteHost
13
+ from biolib.biolib_api_client import BiolibApiClient, RemoteHost
16
14
  from biolib.biolib_docker_client import BiolibDockerClient
15
+ from biolib.biolib_errors import BioLibError
17
16
  from biolib.biolib_logging import logger_no_user_data
18
- from biolib.biolib_api_client import BiolibApiClient
17
+ from biolib.compute_node.cloud_utils import CloudUtils
18
+ from biolib.typing_utils import List, Optional
19
19
 
20
20
 
21
21
  # Prepare for remote hosts with specified port
@@ -24,31 +24,23 @@ class RemoteHostExtended(RemoteHost):
24
24
 
25
25
 
26
26
  class RemoteHostProxy:
27
-
28
27
  def __init__(
29
- self,
30
- remote_host: RemoteHost,
31
- public_network: Network,
32
- internal_network: Optional[Network],
33
- job_id: str,
34
- ports: List[int],
35
- can_push_data_record_for_user: bool,
28
+ self,
29
+ remote_host: RemoteHost,
30
+ public_network: Network,
31
+ internal_network: Optional[Network],
32
+ job_id: str,
33
+ ports: List[int],
36
34
  ):
37
- self._can_push_data_record_for_user: bool = can_push_data_record_for_user
38
35
  self.is_app_caller_proxy = remote_host['hostname'] == 'AppCallerProxy'
39
-
40
- # Default to port 443 for now until backend serves remote_hosts with port specified
41
- self._remote_host: RemoteHostExtended = RemoteHostExtended(
42
- hostname=remote_host['hostname'],
43
- ports=ports
44
- )
36
+ self._remote_host: RemoteHostExtended = RemoteHostExtended(hostname=remote_host['hostname'], ports=ports)
45
37
  self._public_network: Network = public_network
46
38
  self._internal_network: Optional[Network] = internal_network
47
39
 
48
40
  if not job_id:
49
41
  raise Exception('RemoteHostProxy missing argument "job_id"')
50
42
 
51
- self._name = f"biolib-remote-host-proxy-{job_id}-{self.hostname}"
43
+ self._name = f'biolib-remote-host-proxy-{job_id}-{self.hostname}'
52
44
  self._job_uuid = job_id
53
45
  self._container: Optional[Container] = None
54
46
  self._enclave_traffic_forwarder_processes: List[subprocess.Popen] = []
@@ -154,32 +146,21 @@ class RemoteHostProxy:
154
146
  raise Exception('RemoteHostProxy container not defined when attempting to write NGINX config')
155
147
 
156
148
  docker = BiolibDockerClient.get_docker_client()
157
- base_url = BiolibApiClient.get().base_url
149
+ upstream_hostname = urlparse(BiolibApiClient.get().base_url).hostname
158
150
  if self.is_app_caller_proxy:
151
+ if not utils.IS_RUNNING_IN_CLOUD:
152
+ raise BioLibError('Calling apps inside apps is not supported in local compute environment')
153
+
159
154
  logger_no_user_data.debug(f'Job "{self._job_uuid}" writing config for and starting App Caller Proxy...')
160
- if utils.BIOLIB_CLOUD_BASE_URL:
161
- cloud_base_url = utils.BIOLIB_CLOUD_BASE_URL
162
- else:
163
- if base_url in ('https://biolib.com', 'https://staging-elb.biolib.com'):
164
- cloud_base_url = 'https://biolibcloud.com'
165
- else:
166
- raise BioLibError('Calling apps inside apps is not supported in local compute environment')
167
-
168
- if utils.IS_RUNNING_IN_CLOUD:
169
- config = CloudUtils.get_webserver_config()
170
- s3_results_bucket_name = config['s3_general_storage_bucket_name']
171
- s3_results_base_url = f'https://{s3_results_bucket_name}.s3.amazonaws.com'
172
- else:
173
- if base_url in ('https://biolib.com', 'https://staging-elb.biolib.com'):
174
- s3_results_base_url = 'https://biolib-cloud-api.s3.amazonaws.com'
175
- else:
176
- raise BioLibError("Calling apps inside apps locally is only supported on biolib.com")
155
+ config = CloudUtils.get_webserver_config()
156
+ compute_node_uuid = config['compute_node_info']['public_id']
157
+ compute_node_auth_token = config['compute_node_info']['auth_token']
177
158
 
178
159
  # TODO: Get access_token from new API class instead
179
160
  access_token = BiolibApiClient.get().access_token
180
161
  bearer_token = f'Bearer {access_token}' if access_token else ''
181
162
 
182
- nginx_config = f'''
163
+ nginx_config = f"""
183
164
  events {{
184
165
  worker_connections 1024;
185
166
  }}
@@ -196,29 +177,19 @@ http {{
196
177
  default "";
197
178
  }}
198
179
 
199
- map $request_method $bearer_token_on_patch {{
200
- PATCH "{bearer_token}";
201
- default "";
202
- }}
203
-
204
180
  map $request_method $bearer_token_on_patch_and_get {{
205
181
  PATCH "{bearer_token}";
206
182
  GET "{bearer_token}";
207
183
  default "";
208
184
  }}
209
185
 
210
- map $request_method $bearer_token_on_post_and_get {{
211
- POST "{bearer_token}";
212
- GET "{bearer_token}";
213
- default "";
214
- }}
215
-
216
186
  server {{
217
187
  listen 80;
218
- resolver 127.0.0.11 valid=30s;
188
+ resolver 127.0.0.11 ipv6=off valid=30s;
189
+ set $upstream_hostname {upstream_hostname};
219
190
 
220
191
  location ~* "^/api/jobs/cloud/(?<job_id>[a-z0-9-]{{36}})/status/$" {{
221
- proxy_pass {base_url}/api/jobs/cloud/$job_id/status/;
192
+ proxy_pass https://$upstream_hostname/api/jobs/cloud/$job_id/status/;
222
193
  proxy_set_header authorization $bearer_token_on_get;
223
194
  proxy_set_header cookie "";
224
195
  proxy_ssl_server_name on;
@@ -226,35 +197,35 @@ http {{
226
197
 
227
198
  location ~* "^/api/jobs/cloud/$" {{
228
199
  # Note: Using $1 here as URI part from regex must be used for proxy_pass
229
- proxy_pass {base_url}/api/jobs/cloud/$1;
200
+ proxy_pass https://$upstream_hostname/api/jobs/cloud/$1;
230
201
  proxy_set_header authorization $bearer_token_on_post;
231
202
  proxy_set_header cookie "";
232
203
  proxy_ssl_server_name on;
233
204
  }}
234
205
 
235
206
  location ~* "^/api/jobs/(?<job_id>[a-z0-9-]{{36}})/storage/input/start_upload/$" {{
236
- proxy_pass {base_url}/api/jobs/$job_id/storage/input/start_upload/;
207
+ proxy_pass https://$upstream_hostname/api/jobs/$job_id/storage/input/start_upload/;
237
208
  proxy_set_header authorization "";
238
209
  proxy_set_header cookie "";
239
210
  proxy_ssl_server_name on;
240
211
  }}
241
212
 
242
213
  location ~* "^/api/jobs/(?<job_id>[a-z0-9-]{{36}})/storage/input/presigned_upload_url/$" {{
243
- proxy_pass {base_url}/api/jobs/$job_id/storage/input/presigned_upload_url/$is_args$args;
214
+ proxy_pass https://$upstream_hostname/api/jobs/$job_id/storage/input/presigned_upload_url/$is_args$args;
244
215
  proxy_set_header authorization "";
245
216
  proxy_set_header cookie "";
246
217
  proxy_ssl_server_name on;
247
218
  }}
248
219
 
249
220
  location ~* "^/api/jobs/(?<job_id>[a-z0-9-]{{36}})/storage/input/complete_upload/$" {{
250
- proxy_pass {base_url}/api/jobs/$job_id/storage/input/complete_upload/;
221
+ proxy_pass https://$upstream_hostname/api/jobs/$job_id/storage/input/complete_upload/;
251
222
  proxy_set_header authorization "";
252
223
  proxy_set_header cookie "";
253
224
  proxy_ssl_server_name on;
254
225
  }}
255
226
 
256
227
  location ~* "^/api/jobs/(?<job_id>[a-z0-9-]{{36}})/main_result/$" {{
257
- proxy_pass {base_url}/api/jobs/$job_id/main_result/;
228
+ proxy_pass https://$upstream_hostname/api/jobs/$job_id/main_result/;
258
229
  proxy_set_header authorization "";
259
230
  proxy_set_header cookie "";
260
231
  proxy_pass_request_headers on;
@@ -262,7 +233,7 @@ http {{
262
233
  }}
263
234
 
264
235
  location ~* "^/api/jobs/(?<job_id>[a-z0-9-]{{36}})/$" {{
265
- proxy_pass {base_url}/api/jobs/$job_id/;
236
+ proxy_pass https://$upstream_hostname/api/jobs/$job_id/;
266
237
  proxy_set_header authorization $bearer_token_on_patch_and_get;
267
238
  proxy_set_header caller-job-uuid "{self._job_uuid}";
268
239
  proxy_set_header cookie "";
@@ -271,7 +242,7 @@ http {{
271
242
 
272
243
  location ~* "^/api/jobs/create_job_with_data/$" {{
273
244
  # Note: Using $1 here as URI part from regex must be used for proxy_pass
274
- proxy_pass {base_url}/api/jobs/create_job_with_data/$1;
245
+ proxy_pass https://$upstream_hostname/api/jobs/create_job_with_data/$1;
275
246
  proxy_set_header authorization $bearer_token_on_post;
276
247
  proxy_set_header caller-job-uuid "{self._job_uuid}";
277
248
  proxy_set_header cookie "";
@@ -280,59 +251,65 @@ http {{
280
251
 
281
252
  location ~* "^/api/jobs/$" {{
282
253
  # Note: Using $1 here as URI part from regex must be used for proxy_pass
283
- proxy_pass {base_url}/api/jobs/$1;
254
+ proxy_pass https://$upstream_hostname/api/jobs/$1;
284
255
  proxy_set_header authorization $bearer_token_on_post;
285
256
  proxy_set_header caller-job-uuid "{self._job_uuid}";
286
257
  proxy_set_header cookie "";
287
258
  proxy_ssl_server_name on;
288
259
  }}
289
260
 
290
- location /api/lfs/ {{
291
- proxy_pass {base_url}/api/lfs/;
292
- proxy_set_header authorization {'$bearer_token_on_post_and_get' if self._can_push_data_record_for_user else '""'};
261
+ location ~ "^/api/jobs/{self._job_uuid}/notes/$" {{
262
+ # Note: Using $1 here as URI part from regex must be used for proxy_pass
263
+ proxy_pass https://$upstream_hostname/api/jobs/{self._job_uuid}/notes/$1;
264
+ proxy_set_header authorization "";
265
+ proxy_set_header job-auth-token "";
266
+ proxy_set_header compute-node-auth-token "{compute_node_auth_token}";
267
+ proxy_set_header compute-node-uuid "{compute_node_uuid}";
293
268
  proxy_set_header cookie "";
294
269
  proxy_ssl_server_name on;
295
270
  }}
296
271
 
297
- location /api/ {{
298
- proxy_pass {base_url}/api/;
272
+ location /api/lfs/ {{
273
+ proxy_pass https://$upstream_hostname/api/lfs/;
299
274
  proxy_set_header authorization "";
275
+ proxy_set_header compute-node-auth-token "{compute_node_auth_token}";
276
+ proxy_set_header job-uuid "{self._job_uuid}";
300
277
  proxy_set_header cookie "";
301
278
  proxy_ssl_server_name on;
302
279
  }}
303
280
 
304
- location /cloud-proxy/ {{
305
- proxy_pass {cloud_base_url}/cloud-proxy/;
281
+ location /api/app/ {{
282
+ proxy_pass https://$upstream_hostname/api/app/;
306
283
  proxy_set_header authorization "";
284
+ proxy_set_header compute-node-auth-token "{compute_node_auth_token}";
285
+ proxy_set_header job-uuid "{self._job_uuid}";
307
286
  proxy_set_header cookie "";
308
287
  proxy_ssl_server_name on;
309
288
  }}
310
289
 
311
- location /job-storage/ {{
312
- proxy_pass {s3_results_base_url}/job-storage/;
290
+ location /api/ {{
291
+ proxy_pass https://$upstream_hostname/api/;
313
292
  proxy_set_header authorization "";
314
293
  proxy_set_header cookie "";
315
294
  proxy_ssl_server_name on;
316
295
  }}
317
296
 
318
297
  location /proxy/storage/job-storage/ {{
319
- proxy_pass {cloud_base_url}/proxy/storage/job-storage/;
298
+ proxy_pass https://$upstream_hostname/proxy/storage/job-storage/;
320
299
  proxy_set_header authorization "";
321
300
  proxy_set_header cookie "";
322
301
  proxy_ssl_server_name on;
323
302
  }}
324
303
 
325
- {f"""
326
304
  location /proxy/storage/lfs/versions/ {{
327
- proxy_pass {cloud_base_url}/proxy/storage/lfs/versions/;
305
+ proxy_pass https://$upstream_hostname/proxy/storage/lfs/versions/;
328
306
  proxy_set_header authorization "";
329
307
  proxy_set_header cookie "";
330
308
  proxy_ssl_server_name on;
331
309
  }}
332
- """ if self._can_push_data_record_for_user else ''}
333
310
 
334
311
  location /proxy/cloud/ {{
335
- proxy_pass {cloud_base_url}/proxy/cloud/;
312
+ proxy_pass https://$upstream_hostname/proxy/cloud/;
336
313
  proxy_set_header authorization "";
337
314
  proxy_set_header cookie "";
338
315
  proxy_ssl_server_name on;
@@ -343,15 +320,15 @@ http {{
343
320
  }}
344
321
  }}
345
322
  }}
346
- '''
323
+ """
347
324
  else:
348
- nginx_config = '''
325
+ nginx_config = """
349
326
  events {}
350
327
  error_log /dev/stdout info;
351
328
  stream {
352
- resolver 127.0.0.11 valid=30s;'''
329
+ resolver 127.0.0.11 valid=30s;"""
353
330
  for idx, upstream_server_port in enumerate(upstream_server_ports):
354
- nginx_config += f'''
331
+ nginx_config += f"""
355
332
  map "" $upstream_{idx} {{
356
333
  default {upstream_server_name}:{upstream_server_port};
357
334
  }}
@@ -364,11 +341,11 @@ stream {
364
341
  server {{
365
342
  listen {self._remote_host['ports'][idx]} udp;
366
343
  proxy_pass $upstream_{idx};
367
- }}'''
344
+ }}"""
368
345
 
369
- nginx_config += '''
346
+ nginx_config += """
370
347
  }
371
- '''
348
+ """
372
349
 
373
350
  nginx_config_bytes = nginx_config.encode()
374
351
  tarfile_in_memory = io.BytesIO()
@@ -16,5 +16,4 @@ class WebserverConfig(TypedDict):
16
16
  base_url: str
17
17
  compute_node_info: ComputeNodeInfo
18
18
  is_dev: bool
19
- s3_general_storage_bucket_name: str
20
19
  shutdown_times: ShutdownTimes
@@ -1,30 +1,29 @@
1
1
  import time
2
2
  from collections import OrderedDict
3
3
 
4
- from biolib.biolib_errors import BioLibError
5
- from biolib.jobs.types import JobsPaginatedResponse
6
- from biolib.typing_utils import List, Optional
7
-
4
+ import biolib._internal.types as _types
8
5
  from biolib import api
9
- from biolib.experiments.types import ExperimentDict
6
+ from biolib.biolib_errors import BioLibError
10
7
  from biolib.jobs.job import Job
11
- from biolib.typing_utils import Dict, Union
12
-
8
+ from biolib.jobs.types import JobsPaginatedResponse
13
9
  from biolib.tables import BioLibTable
10
+ from biolib.typing_utils import Dict, List, Optional, Union
14
11
 
15
12
 
16
13
  class Experiment:
17
14
  _BIOLIB_EXPERIMENTS: List['Experiment'] = []
18
15
 
19
16
  # Columns to print in table when showing Job
20
- _table_columns_to_row_map = OrderedDict({
21
- 'Name': {'key': 'name', 'params': {}},
22
- 'Job Count': {'key': 'job_count', 'params': {}},
23
- 'Created At': {'key': 'created_at', 'params': {}}
24
- })
17
+ _table_columns_to_row_map = OrderedDict(
18
+ {
19
+ 'Name': {'key': 'name', 'params': {}},
20
+ 'Job Count': {'key': 'job_count', 'params': {}},
21
+ 'Created At': {'key': 'created_at', 'params': {}},
22
+ }
23
+ )
25
24
 
26
- def __init__(self, name: str):
27
- self._experiment_dict: ExperimentDict = self._create_in_backend_or_get_experiment_dict(name)
25
+ def __init__(self, uri: str, _resource_dict: Optional[_types.ResourceDict] = None):
26
+ self._resource_dict: _types.ResourceDict = _resource_dict or self._get_or_create_resource_dict(uri)
28
27
 
29
28
  def __enter__(self):
30
29
  Experiment._BIOLIB_EXPERIMENTS.append(self)
@@ -33,18 +32,29 @@ class Experiment:
33
32
  Experiment._BIOLIB_EXPERIMENTS.pop()
34
33
 
35
34
  def __str__(self):
36
- return f'Experiment: {self.name}'
35
+ return f'Experiment: {self.uri}'
37
36
 
38
37
  def __repr__(self):
39
- return f'Experiment: {self.name}'
38
+ return f'Experiment: {self.uri}'
40
39
 
41
40
  @property
42
41
  def uuid(self) -> str:
43
- return self._experiment_dict['uuid']
42
+ return self._resource_dict['uuid']
44
43
 
45
44
  @property
46
45
  def name(self) -> str:
47
- return self._experiment_dict['name']
46
+ return self._resource_dict['name']
47
+
48
+ @property
49
+ def uri(self) -> str:
50
+ return self._resource_dict['uri']
51
+
52
+ @property
53
+ def _experiment_dict(self) -> _types.ExperimentSlimDict:
54
+ if not self._resource_dict['experiment']:
55
+ raise ValueError(f'Resource {self.uri} is not an Experiment')
56
+
57
+ return self._resource_dict['experiment']
48
58
 
49
59
  @staticmethod
50
60
  def get_experiment_in_context() -> Optional['Experiment']:
@@ -55,32 +65,46 @@ class Experiment:
55
65
  # Prints a table listing info about experiments accessible to the user
56
66
  @staticmethod
57
67
  def show_experiments(count: int = 25) -> None:
58
- experiment_dicts = api.client.get(
59
- path='/experiments/',
60
- params={
61
- 'page_size': str(count)
62
- }
63
- ).json()['results']
68
+ experiment_dicts = api.client.get(path='/experiments/', params={'page_size': str(count)}).json()['results']
64
69
  BioLibTable(
65
70
  columns_to_row_map=Experiment._table_columns_to_row_map,
66
71
  rows=experiment_dicts,
67
- title='Experiments'
72
+ title='Experiments',
68
73
  ).print_table()
69
74
 
75
+ @staticmethod
76
+ def get_by_uri(uri: str) -> 'Experiment':
77
+ query_param_key = 'uri' if '/' in uri else 'name'
78
+ resource_dict: _types.ResourceDict = api.client.get('/resource/', params={query_param_key: uri}).json()
79
+ if not resource_dict['experiment']:
80
+ raise ValueError(f'Resource {uri} is not an experiment')
81
+
82
+ return Experiment(uri=resource_dict['uri'], _resource_dict=resource_dict)
83
+
70
84
  def wait(self) -> None:
71
- self._refetch_experiment_dict()
85
+ self._refetch()
72
86
  while self._experiment_dict['job_running_count'] > 0:
73
87
  print(f"Waiting for {self._experiment_dict['job_running_count']} jobs to finish", end='\r')
74
88
  time.sleep(5)
75
- self._refetch_experiment_dict()
89
+ self._refetch()
76
90
 
77
91
  print(f'All jobs of experiment {self.name} have finished')
78
92
 
79
93
  def add_job(self, job_id: str) -> None:
80
- api.client.patch(
81
- path=f'/jobs/{job_id}/',
82
- data={'experiment_uuid': self.uuid}
83
- )
94
+ api.client.patch(path=f'/jobs/{job_id}/', data={'experiment_uuid': self.uuid})
95
+
96
+ def mount_files(self, mount_path: str) -> None:
97
+ try:
98
+ # Only attempt to import FUSE dependencies when strictly necessary
99
+ from biolib._internal.fuse_mount import ( # pylint: disable=import-outside-toplevel
100
+ ExperimentFuseMount as _ExperimentFuseMount,
101
+ )
102
+ except ImportError as error:
103
+ raise ImportError(
104
+ 'Failed to import FUSE mounting utils. Please ensure FUSE is installed on your system.'
105
+ ) from error
106
+
107
+ _ExperimentFuseMount.mount_experiment(experiment=self, mount_path=mount_path)
84
108
 
85
109
  def export_job_list(self, export_format='dicts'):
86
110
  valid_formats = ('dicts', 'dataframe')
@@ -98,7 +122,7 @@ class Experiment:
98
122
  raise ImportError(
99
123
  'Pandas must be installed to use this method. '
100
124
  'Alternatively, use .get_jobs() to get a list of job objects.'
101
- ) from error
125
+ ) from error
102
126
 
103
127
  jobs_df = pd.DataFrame.from_dict(job_dict_list)
104
128
  jobs_df.started_at = pd.to_datetime(jobs_df.started_at)
@@ -125,7 +149,7 @@ class Experiment:
125
149
  BioLibTable(
126
150
  columns_to_row_map=Job.table_columns_to_row_map,
127
151
  rows=[job._job_dict for job in jobs], # pylint: disable=protected-access
128
- title=f'Jobs in experiment: "{self.name}"'
152
+ title=f'Jobs in experiment: "{self.name}"',
129
153
  ).print_table()
130
154
 
131
155
  def get_jobs(self, status: Optional[str] = None) -> List[Job]:
@@ -147,15 +171,22 @@ class Experiment:
147
171
 
148
172
  return jobs
149
173
 
150
- def _create_in_backend_or_get_experiment_dict(self, name: str) -> ExperimentDict:
151
- # This endpoint returns experiment dict if already created
152
- experiment_dict: ExperimentDict = api.client.post(
153
- path='/experiments/',
154
- data={
155
- 'name': name
156
- }
157
- ).json()
158
- return experiment_dict
174
+ def rename(self, destination: str) -> None:
175
+ api.client.patch(f'/resources/{self.uuid}/', data={'uri': destination})
176
+ self._refetch()
177
+
178
+ @staticmethod
179
+ def _get_resource_dict_by_uuid(uuid: str) -> _types.ResourceDict:
180
+ resource_dict: _types.ResourceDict = api.client.get(f'/resources/{uuid}/').json()
181
+ if not resource_dict['experiment']:
182
+ raise ValueError('Resource from URI is not an experiment')
183
+
184
+ return resource_dict
185
+
186
+ @staticmethod
187
+ def _get_or_create_resource_dict(uri: str) -> _types.ResourceDict:
188
+ response_dict = api.client.post(path='/experiments/', data={'uri' if '/' in uri else 'name': uri}).json()
189
+ return Experiment._get_resource_dict_by_uuid(uuid=response_dict['uuid'])
159
190
 
160
- def _refetch_experiment_dict(self) -> None:
161
- self._experiment_dict = api.client.get(path=f'/experiments/{self.uuid}/').json()
191
+ def _refetch(self) -> None:
192
+ self._resource_dict = self._get_resource_dict_by_uuid(uuid=self._resource_dict['uuid'])