pybiolib 1.2.1056__py3-none-any.whl → 1.2.1727__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of pybiolib might be problematic. Click here for more details.
- biolib/__init__.py +33 -10
- biolib/_data_record/data_record.py +103 -26
- biolib/_index/__init__.py +0 -0
- biolib/_index/index.py +51 -0
- biolib/_index/types.py +7 -0
- biolib/_internal/data_record/data_record.py +1 -1
- biolib/_internal/data_record/push_data.py +65 -16
- biolib/_internal/data_record/remote_storage_endpoint.py +3 -3
- biolib/_internal/file_utils.py +7 -4
- biolib/_internal/index/__init__.py +1 -0
- biolib/_internal/index/index.py +18 -0
- biolib/_internal/lfs/cache.py +4 -2
- biolib/_internal/push_application.py +89 -23
- biolib/_internal/runtime.py +2 -0
- biolib/_internal/templates/gui_template/App.tsx +38 -2
- biolib/_internal/templates/gui_template/Dockerfile +2 -0
- biolib/_internal/templates/gui_template/biolib-sdk.ts +37 -0
- biolib/_internal/templates/gui_template/dev-data/output.json +7 -0
- biolib/_internal/templates/gui_template/package.json +1 -0
- biolib/_internal/templates/gui_template/vite-plugin-dev-data.ts +49 -0
- biolib/_internal/templates/gui_template/vite.config.mts +2 -1
- biolib/_internal/templates/init_template/.github/workflows/biolib.yml +6 -1
- biolib/_internal/templates/init_template/Dockerfile +2 -0
- biolib/_internal/utils/__init__.py +40 -0
- biolib/_internal/utils/auth.py +46 -0
- biolib/_internal/utils/job_url.py +33 -0
- biolib/_runtime/runtime.py +9 -0
- biolib/_session/session.py +7 -5
- biolib/_shared/__init__.py +0 -0
- biolib/_shared/types/__init__.py +74 -0
- biolib/_shared/types/resource.py +37 -0
- biolib/_shared/types/resource_deploy_key.py +11 -0
- biolib/{_internal → _shared}/types/resource_version.py +8 -2
- biolib/_shared/utils/__init__.py +7 -0
- biolib/_shared/utils/resource_uri.py +75 -0
- biolib/api/client.py +3 -47
- biolib/app/app.py +57 -33
- biolib/biolib_api_client/api_client.py +3 -47
- biolib/biolib_api_client/app_types.py +1 -6
- biolib/biolib_api_client/biolib_app_api.py +17 -0
- biolib/biolib_binary_format/module_input.py +8 -0
- biolib/biolib_binary_format/remote_endpoints.py +3 -3
- biolib/biolib_binary_format/remote_stream_seeker.py +39 -25
- biolib/cli/__init__.py +2 -1
- biolib/cli/data_record.py +82 -0
- biolib/cli/index.py +32 -0
- biolib/cli/init.py +39 -1
- biolib/cli/lfs.py +1 -1
- biolib/cli/run.py +8 -5
- biolib/cli/start.py +14 -1
- biolib/compute_node/job_worker/executors/docker_executor.py +31 -9
- biolib/compute_node/job_worker/executors/docker_types.py +1 -1
- biolib/compute_node/job_worker/executors/types.py +6 -5
- biolib/compute_node/job_worker/job_worker.py +149 -93
- biolib/compute_node/job_worker/large_file_system.py +2 -6
- biolib/compute_node/job_worker/network_alloc.py +99 -0
- biolib/compute_node/job_worker/network_buffer.py +240 -0
- biolib/compute_node/job_worker/utilization_reporter_thread.py +2 -2
- biolib/compute_node/remote_host_proxy.py +139 -79
- biolib/compute_node/utils.py +2 -0
- biolib/compute_node/webserver/compute_node_results_proxy.py +188 -0
- biolib/compute_node/webserver/proxy_utils.py +28 -0
- biolib/compute_node/webserver/webserver.py +64 -19
- biolib/experiments/experiment.py +111 -16
- biolib/jobs/job.py +119 -29
- biolib/jobs/job_result.py +70 -33
- biolib/jobs/types.py +1 -0
- biolib/sdk/__init__.py +17 -2
- biolib/typing_utils.py +1 -1
- biolib/utils/cache_state.py +2 -2
- biolib/utils/multipart_uploader.py +24 -18
- biolib/utils/seq_util.py +1 -1
- pybiolib-1.2.1727.dist-info/METADATA +41 -0
- {pybiolib-1.2.1056.dist-info → pybiolib-1.2.1727.dist-info}/RECORD +103 -85
- {pybiolib-1.2.1056.dist-info → pybiolib-1.2.1727.dist-info}/WHEEL +1 -1
- pybiolib-1.2.1727.dist-info/entry_points.txt +2 -0
- biolib/_internal/types/__init__.py +0 -6
- biolib/_internal/types/resource.py +0 -18
- biolib/utils/app_uri.py +0 -57
- pybiolib-1.2.1056.dist-info/METADATA +0 -50
- pybiolib-1.2.1056.dist-info/entry_points.txt +0 -3
- /biolib/{_internal → _shared}/types/account.py +0 -0
- /biolib/{_internal → _shared}/types/account_member.py +0 -0
- /biolib/{_internal → _shared}/types/app.py +0 -0
- /biolib/{_internal → _shared}/types/data_record.py +0 -0
- /biolib/{_internal → _shared}/types/experiment.py +0 -0
- /biolib/{_internal → _shared}/types/file_node.py +0 -0
- /biolib/{_internal → _shared}/types/push.py +0 -0
- /biolib/{_internal → _shared}/types/resource_permission.py +0 -0
- /biolib/{_internal → _shared}/types/result.py +0 -0
- /biolib/{_internal → _shared}/types/typing.py +0 -0
- /biolib/{_internal → _shared}/types/user.py +0 -0
- {pybiolib-1.2.1056.dist-info → pybiolib-1.2.1727.dist-info/licenses}/LICENSE +0 -0
|
@@ -1,3 +1,4 @@
|
|
|
1
|
+
import json
|
|
1
2
|
import os
|
|
2
3
|
import re
|
|
3
4
|
import sys
|
|
@@ -14,14 +15,14 @@ from biolib._internal.data_record.push_data import (
|
|
|
14
15
|
)
|
|
15
16
|
from biolib._internal.errors import AuthenticationError
|
|
16
17
|
from biolib._internal.file_utils import get_files_and_size_of_directory, get_iterable_zip_stream
|
|
17
|
-
from biolib.
|
|
18
|
+
from biolib._shared.types import PushResponseDict
|
|
19
|
+
from biolib._shared.utils import parse_resource_uri
|
|
18
20
|
from biolib.biolib_api_client import BiolibApiClient
|
|
19
21
|
from biolib.biolib_api_client.biolib_app_api import BiolibAppApi
|
|
20
22
|
from biolib.biolib_docker_client import BiolibDockerClient
|
|
21
23
|
from biolib.biolib_errors import BioLibError
|
|
22
24
|
from biolib.biolib_logging import logger
|
|
23
|
-
from biolib.typing_utils import Iterable, Optional, Set, TypedDict
|
|
24
|
-
from biolib.utils.app_uri import parse_app_uri
|
|
25
|
+
from biolib.typing_utils import Dict, Iterable, Optional, Set, TypedDict, Union
|
|
25
26
|
|
|
26
27
|
REGEX_MARKDOWN_INLINE_IMAGE = re.compile(r'!\[(?P<alt>.*)\]\((?P<src>.*)\)')
|
|
27
28
|
|
|
@@ -108,8 +109,10 @@ def _process_docker_status_updates_with_progress_bar(status_updates: Iterable[Do
|
|
|
108
109
|
|
|
109
110
|
|
|
110
111
|
def _process_docker_status_updates_with_logging(status_updates: Iterable[DockerStatusUpdate], action: str) -> None:
|
|
111
|
-
layer_progress = {}
|
|
112
|
-
layer_status = {}
|
|
112
|
+
layer_progress: Dict[str, float] = {}
|
|
113
|
+
layer_status: Dict[str, str] = {}
|
|
114
|
+
layer_details: Dict[str, Dict[str, int]] = {}
|
|
115
|
+
layer_bytes_at_last_log: Dict[str, int] = {}
|
|
113
116
|
last_log_time = time.time()
|
|
114
117
|
|
|
115
118
|
logger.info(f'{action} Docker image...')
|
|
@@ -127,6 +130,7 @@ def _process_docker_status_updates_with_logging(status_updates: Iterable[DockerS
|
|
|
127
130
|
percentage = (current / total * 100) if total > 0 else 0
|
|
128
131
|
layer_progress[layer_id] = percentage
|
|
129
132
|
layer_status[layer_id] = f'{action.lower()}'
|
|
133
|
+
layer_details[layer_id] = {'current': current, 'total': total}
|
|
130
134
|
elif update.get('status') == 'Layer already exists':
|
|
131
135
|
layer_progress[layer_id] = 100
|
|
132
136
|
layer_status[layer_id] = 'already exists'
|
|
@@ -145,16 +149,33 @@ def _process_docker_status_updates_with_logging(status_updates: Iterable[DockerS
|
|
|
145
149
|
logger.info(f'{action} Docker image - {status}')
|
|
146
150
|
|
|
147
151
|
if current_time - last_log_time >= 10.0:
|
|
148
|
-
_log_progress_summary(
|
|
152
|
+
_log_progress_summary(
|
|
153
|
+
action,
|
|
154
|
+
layer_progress,
|
|
155
|
+
layer_status,
|
|
156
|
+
layer_details,
|
|
157
|
+
layer_bytes_at_last_log,
|
|
158
|
+
current_time - last_log_time,
|
|
159
|
+
)
|
|
160
|
+
layer_bytes_at_last_log = {lid: details['current'] for lid, details in layer_details.items()}
|
|
149
161
|
last_log_time = current_time
|
|
150
162
|
|
|
151
|
-
_log_progress_summary(
|
|
163
|
+
_log_progress_summary(
|
|
164
|
+
action, layer_progress, layer_status, layer_details, layer_bytes_at_last_log, time.time() - last_log_time
|
|
165
|
+
)
|
|
152
166
|
if action == 'Pushing':
|
|
153
167
|
logger.info('Pushing final image manifest...')
|
|
154
168
|
logger.info(f'{action} Docker image completed')
|
|
155
169
|
|
|
156
170
|
|
|
157
|
-
def _log_progress_summary(
|
|
171
|
+
def _log_progress_summary(
|
|
172
|
+
action: str,
|
|
173
|
+
layer_progress: Dict[str, float],
|
|
174
|
+
layer_status: Dict[str, str],
|
|
175
|
+
layer_details: Dict[str, Dict[str, int]],
|
|
176
|
+
layer_bytes_at_last_log: Dict[str, int],
|
|
177
|
+
time_delta: float,
|
|
178
|
+
) -> None:
|
|
158
179
|
if not layer_progress and not layer_status:
|
|
159
180
|
return
|
|
160
181
|
|
|
@@ -173,7 +194,36 @@ def _log_progress_summary(action: str, layer_progress: dict, layer_status: dict)
|
|
|
173
194
|
if status in ['preparing', 'waiting', 'pushing', 'uploading'] and layer_progress.get(layer_id, 0) < 100
|
|
174
195
|
]
|
|
175
196
|
|
|
176
|
-
if active_layers:
|
|
197
|
+
if active_layers and layer_details:
|
|
198
|
+
total_bytes_transferred = 0
|
|
199
|
+
layer_info_parts = []
|
|
200
|
+
|
|
201
|
+
for layer_id in active_layers[:5]:
|
|
202
|
+
if layer_id in layer_details:
|
|
203
|
+
details = layer_details[layer_id]
|
|
204
|
+
current = details['current']
|
|
205
|
+
total = details['total']
|
|
206
|
+
percentage = layer_progress.get(layer_id, 0)
|
|
207
|
+
|
|
208
|
+
bytes_since_last = current - layer_bytes_at_last_log.get(layer_id, 0)
|
|
209
|
+
total_bytes_transferred += bytes_since_last
|
|
210
|
+
|
|
211
|
+
current_mb = current / (1024 * 1024)
|
|
212
|
+
total_mb = total / (1024 * 1024)
|
|
213
|
+
layer_info_parts.append(f'{layer_id}: {current_mb:.1f}/{total_mb:.1f} MB ({percentage:.1f}%)')
|
|
214
|
+
|
|
215
|
+
speed_info = ''
|
|
216
|
+
if time_delta > 0 and total_bytes_transferred > 0:
|
|
217
|
+
speed_mbps = (total_bytes_transferred / (1024 * 1024)) / time_delta
|
|
218
|
+
speed_info = f' @ {speed_mbps:.2f} MB/s'
|
|
219
|
+
|
|
220
|
+
more_layers_info = ''
|
|
221
|
+
if len(active_layers) > 5:
|
|
222
|
+
more_layers_info = f' (+ {len(active_layers) - 5} more)'
|
|
223
|
+
|
|
224
|
+
if layer_info_parts:
|
|
225
|
+
logger.info(f'Active layers: {", ".join(layer_info_parts)}{speed_info}{more_layers_info}')
|
|
226
|
+
elif active_layers:
|
|
177
227
|
logger.info(f'Active layers: {", ".join(active_layers[:5])}{"..." if len(active_layers) > 5 else ""}')
|
|
178
228
|
|
|
179
229
|
|
|
@@ -195,13 +245,12 @@ def push_application(
|
|
|
195
245
|
set_as_published: bool,
|
|
196
246
|
dry_run: bool = False,
|
|
197
247
|
) -> Optional[PushResponseDict]:
|
|
198
|
-
|
|
199
|
-
|
|
248
|
+
app_uri = app_uri.rstrip('/')
|
|
249
|
+
parsed_uri = parse_resource_uri(app_uri)
|
|
250
|
+
resource_name = parsed_uri['resource_name']
|
|
200
251
|
|
|
201
|
-
app_uri_prefix =
|
|
202
|
-
|
|
203
|
-
)
|
|
204
|
-
app_uri_to_fetch = f"{app_uri_prefix}{parsed_uri['account_handle_normalized']}/{app_name}"
|
|
252
|
+
app_uri_prefix = f"@{parsed_uri['resource_prefix']}/" if parsed_uri['resource_prefix'] is not None else ''
|
|
253
|
+
app_uri_to_fetch = f"{app_uri_prefix}{parsed_uri['account_handle_normalized']}/{resource_name}"
|
|
205
254
|
|
|
206
255
|
version = parsed_uri['version']
|
|
207
256
|
semantic_version = f"{version['major']}.{version['minor']}.{version['patch']}" if version else None
|
|
@@ -238,19 +287,34 @@ def push_application(
|
|
|
238
287
|
app_data_path: Optional[Path] = None
|
|
239
288
|
try:
|
|
240
289
|
with open(config_yml_path) as config_yml_file:
|
|
241
|
-
|
|
290
|
+
try:
|
|
291
|
+
config = json.loads(json.dumps(yaml.safe_load(config_yml_file.read())))
|
|
292
|
+
except (TypeError, ValueError) as e:
|
|
293
|
+
raise BioLibError(
|
|
294
|
+
f'The .biolib/config.yml file contains data types that are not supported '
|
|
295
|
+
f'(must be JSON-serializable). Please ensure only standard JSON types '
|
|
296
|
+
f'(str, int, float, bool, list, dict, null) are used. Original error: {e}'
|
|
297
|
+
) from e
|
|
298
|
+
|
|
299
|
+
if 'assets' in config and 'app_data' not in config:
|
|
300
|
+
config['app_data'] = config.pop('assets')
|
|
301
|
+
elif 'assets' in config and 'app_data' in config:
|
|
302
|
+
raise BioLibError(
|
|
303
|
+
'In .biolib/config.yml you cannot specify both "app_data" and "assets" fields. Please use only one.'
|
|
304
|
+
)
|
|
242
305
|
|
|
243
306
|
app_data = config.get('app_data')
|
|
244
307
|
if app_data:
|
|
308
|
+
field_name = 'app_data' if 'app_data' in config else 'assets'
|
|
245
309
|
if not isinstance(app_data, str):
|
|
246
310
|
raise BioLibError(
|
|
247
|
-
f'In .biolib/config.yml the value of "
|
|
311
|
+
f'In .biolib/config.yml the value of "{field_name}" must be a string but got {type(app_data)}'
|
|
248
312
|
)
|
|
249
313
|
|
|
250
314
|
app_data_path = app_path_absolute.joinpath(app_data).resolve()
|
|
251
315
|
if not app_data_path.is_dir():
|
|
252
316
|
raise BioLibError(
|
|
253
|
-
'In .biolib/config.yml the value of "
|
|
317
|
+
f'In .biolib/config.yml the value of "{field_name}" must be a path to a directory '
|
|
254
318
|
'in the application directory'
|
|
255
319
|
)
|
|
256
320
|
|
|
@@ -321,10 +385,6 @@ def push_application(
|
|
|
321
385
|
app_response = BiolibAppApi.get_by_uri(app_uri_to_fetch)
|
|
322
386
|
app = app_response['app']
|
|
323
387
|
|
|
324
|
-
if app_data and not app['allow_client_side_execution']:
|
|
325
|
-
raise BioLibError(
|
|
326
|
-
'To push a version with app_data the app must be set to "Allow Client-Side Source Code Access"'
|
|
327
|
-
)
|
|
328
388
|
if dry_run:
|
|
329
389
|
logger.info('Successfully completed dry-run. No new version was pushed.')
|
|
330
390
|
return None
|
|
@@ -410,9 +470,15 @@ def push_application(
|
|
|
410
470
|
logger.info(f'Successfully pushed {docker_image_name}')
|
|
411
471
|
|
|
412
472
|
app_version_uuid = new_app_version_json['public_id']
|
|
473
|
+
complete_push_data: Dict[str, Union[bool, str]] = {
|
|
474
|
+
'set_as_active': set_as_active,
|
|
475
|
+
'set_as_published': set_as_published,
|
|
476
|
+
}
|
|
477
|
+
if parsed_uri['tag']:
|
|
478
|
+
complete_push_data['tag'] = parsed_uri['tag']
|
|
413
479
|
api.client.post(
|
|
414
480
|
path=f'/app-versions/{app_version_uuid}/complete-push/',
|
|
415
|
-
data=
|
|
481
|
+
data=complete_push_data,
|
|
416
482
|
)
|
|
417
483
|
|
|
418
484
|
sematic_version = f"{new_app_version_json['major']}.{new_app_version_json['minor']}.{new_app_version_json['patch']}"
|
biolib/_internal/runtime.py
CHANGED
|
@@ -4,10 +4,12 @@ from biolib.typing_utils import TypedDict
|
|
|
4
4
|
class RuntimeJobDataDict(TypedDict):
|
|
5
5
|
version: str
|
|
6
6
|
job_requested_machine: str
|
|
7
|
+
job_requested_machine_spot: bool
|
|
7
8
|
job_uuid: str
|
|
8
9
|
job_auth_token: str
|
|
9
10
|
app_uri: str
|
|
10
11
|
is_environment_biolib_cloud: bool
|
|
12
|
+
job_reserved_machines: int
|
|
11
13
|
|
|
12
14
|
|
|
13
15
|
class BioLibRuntimeError(Exception):
|
|
@@ -1,16 +1,52 @@
|
|
|
1
|
+
import { useState, useEffect } from "react";
|
|
2
|
+
import biolib from "./biolib-sdk";
|
|
3
|
+
|
|
1
4
|
export default function App() {
|
|
5
|
+
const [outputFileData, setOutputFileData] = useState<Uint8Array | null>(null);
|
|
6
|
+
const [loading, setLoading] = useState(true);
|
|
7
|
+
|
|
8
|
+
const loadOutputData = async () => {
|
|
9
|
+
setLoading(true);
|
|
10
|
+
try {
|
|
11
|
+
const data = await biolib.getOutputFileData("output.json");
|
|
12
|
+
setOutputFileData(data);
|
|
13
|
+
} catch (error) {
|
|
14
|
+
console.error("Error loading output data:", error);
|
|
15
|
+
setOutputFileData(null);
|
|
16
|
+
} finally {
|
|
17
|
+
setLoading(false);
|
|
18
|
+
}
|
|
19
|
+
};
|
|
20
|
+
|
|
21
|
+
useEffect(() => {
|
|
22
|
+
loadOutputData();
|
|
23
|
+
}, []);
|
|
24
|
+
|
|
2
25
|
return (
|
|
3
26
|
<div className="min-h-screen bg-gray-100 flex items-center justify-center">
|
|
4
|
-
<div className="text-center">
|
|
27
|
+
<div className="text-center max-w-2xl mx-auto p-8">
|
|
5
28
|
<h1 className="text-4xl font-bold mb-4">
|
|
6
29
|
Hello, BioLib!
|
|
7
30
|
</h1>
|
|
8
31
|
<p className="text-lg mb-2">
|
|
9
32
|
You have successfully set up your BioLib GUI application.
|
|
10
33
|
</p>
|
|
11
|
-
<p className="italic">
|
|
34
|
+
<p className="italic mb-6">
|
|
12
35
|
This is a simple React template with Tailwind CSS styling.
|
|
13
36
|
</p>
|
|
37
|
+
|
|
38
|
+
<div className="mt-8 p-4 bg-white rounded-lg shadow">
|
|
39
|
+
<h2 className="text-xl font-semibold mb-4">Example: Reading Output Files</h2>
|
|
40
|
+
{loading ? (
|
|
41
|
+
<p className="text-gray-500">Loading output.json...</p>
|
|
42
|
+
) : outputFileData ? (
|
|
43
|
+
<div className="p-3 bg-gray-50 rounded text-left">
|
|
44
|
+
<pre className="text-sm">{new TextDecoder().decode(outputFileData)}</pre>
|
|
45
|
+
</div>
|
|
46
|
+
) : (
|
|
47
|
+
<p className="text-red-500">Failed to load output.json</p>
|
|
48
|
+
)}
|
|
49
|
+
</div>
|
|
14
50
|
</div>
|
|
15
51
|
</div>
|
|
16
52
|
);
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
interface IBioLibGlobals {
|
|
2
|
+
getOutputFileData: (path: string) => Promise<Uint8Array>;
|
|
3
|
+
}
|
|
4
|
+
|
|
5
|
+
declare global {
|
|
6
|
+
const biolib: IBioLibGlobals;
|
|
7
|
+
}
|
|
8
|
+
|
|
9
|
+
// DO NOT MODIFY: Development data files are injected at build time from gui/dev-data/ folder
|
|
10
|
+
const DEV_DATA_FILES: Record<string, string> = {};
|
|
11
|
+
|
|
12
|
+
const devSdkBioLib: IBioLibGlobals = {
|
|
13
|
+
getOutputFileData: async (path: string): Promise<Uint8Array> => {
|
|
14
|
+
console.log(`[SDK] getOutputFileData called with path: ${path}`);
|
|
15
|
+
|
|
16
|
+
const normalizedPath = path.startsWith('/') ? path.slice(1) : path;
|
|
17
|
+
|
|
18
|
+
if (typeof DEV_DATA_FILES !== 'undefined' && normalizedPath in DEV_DATA_FILES) {
|
|
19
|
+
const base64Data = DEV_DATA_FILES[normalizedPath];
|
|
20
|
+
const binaryString = atob(base64Data);
|
|
21
|
+
const bytes = new Uint8Array(binaryString.length);
|
|
22
|
+
for (let i = 0; i < binaryString.length; i++) {
|
|
23
|
+
bytes[i] = binaryString.charCodeAt(i);
|
|
24
|
+
}
|
|
25
|
+
return bytes;
|
|
26
|
+
}
|
|
27
|
+
|
|
28
|
+
throw new Error(`File not found: ${path}. Add this file to the dev-data/ folder for local development.`);
|
|
29
|
+
},
|
|
30
|
+
};
|
|
31
|
+
|
|
32
|
+
const biolib: IBioLibGlobals =
|
|
33
|
+
process.env.NODE_ENV === "development"
|
|
34
|
+
? devSdkBioLib
|
|
35
|
+
: (window as any).biolib;
|
|
36
|
+
|
|
37
|
+
export default biolib;
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
import type { Plugin } from 'vite';
|
|
2
|
+
import fs from 'fs';
|
|
3
|
+
import path from 'path';
|
|
4
|
+
|
|
5
|
+
export function devDataPlugin(): Plugin {
|
|
6
|
+
let isDev = false;
|
|
7
|
+
|
|
8
|
+
return {
|
|
9
|
+
name: 'dev-data-plugin',
|
|
10
|
+
configResolved(config) {
|
|
11
|
+
isDev = config.mode === 'development';
|
|
12
|
+
},
|
|
13
|
+
transform(code: string, id: string) {
|
|
14
|
+
if (id.endsWith('biolib-sdk.ts')) {
|
|
15
|
+
let injectedCode: string;
|
|
16
|
+
|
|
17
|
+
if (isDev) {
|
|
18
|
+
const devDataDir = path.join(__dirname, 'dev-data');
|
|
19
|
+
const devDataMap: Record<string, string> = {};
|
|
20
|
+
|
|
21
|
+
if (fs.existsSync(devDataDir)) {
|
|
22
|
+
const files = fs.readdirSync(devDataDir);
|
|
23
|
+
for (const file of files) {
|
|
24
|
+
const filePath = path.join(devDataDir, file);
|
|
25
|
+
if (fs.statSync(filePath).isFile()) {
|
|
26
|
+
const content = fs.readFileSync(filePath);
|
|
27
|
+
const base64Content = content.toString('base64');
|
|
28
|
+
devDataMap[file] = base64Content;
|
|
29
|
+
}
|
|
30
|
+
}
|
|
31
|
+
}
|
|
32
|
+
|
|
33
|
+
const devDataJson = JSON.stringify(devDataMap);
|
|
34
|
+
injectedCode = code.replace(
|
|
35
|
+
"const DEV_DATA_FILES = {};",
|
|
36
|
+
`const DEV_DATA_FILES = ${devDataJson};`
|
|
37
|
+
);
|
|
38
|
+
} else {
|
|
39
|
+
injectedCode = code;
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
return {
|
|
43
|
+
code: injectedCode,
|
|
44
|
+
map: null
|
|
45
|
+
};
|
|
46
|
+
}
|
|
47
|
+
}
|
|
48
|
+
};
|
|
49
|
+
}
|
|
@@ -2,7 +2,8 @@ import { defineConfig } from "vite";
|
|
|
2
2
|
import react from "@vitejs/plugin-react";
|
|
3
3
|
import tailwindcss from "@tailwindcss/vite";
|
|
4
4
|
import { viteSingleFile } from "vite-plugin-singlefile";
|
|
5
|
+
import { devDataPlugin } from "./gui/vite-plugin-dev-data";
|
|
5
6
|
|
|
6
7
|
export default defineConfig({
|
|
7
|
-
plugins: [react(), tailwindcss(), viteSingleFile()],
|
|
8
|
+
plugins: [react(), tailwindcss(), devDataPlugin(), viteSingleFile()],
|
|
8
9
|
});
|
|
@@ -11,6 +11,11 @@ jobs:
|
|
|
11
11
|
- name: Build
|
|
12
12
|
run: docker build -t BIOLIB_REPLACE_DOCKER_TAG:latest .
|
|
13
13
|
- name: Push
|
|
14
|
-
run:
|
|
14
|
+
run: |
|
|
15
|
+
if [ "$GITHUB_REF_NAME" == "main" ]; then
|
|
16
|
+
biolib push BIOLIB_REPLACE_APP_URI
|
|
17
|
+
else
|
|
18
|
+
biolib push --dev BIOLIB_REPLACE_APP_URI:latest-dev
|
|
19
|
+
fi
|
|
15
20
|
env:
|
|
16
21
|
BIOLIB_TOKEN: ${{ secrets.BIOLIB_TOKEN }}
|
|
@@ -1,5 +1,31 @@
|
|
|
1
|
+
import base64
|
|
1
2
|
import time
|
|
2
3
|
import uuid
|
|
4
|
+
from fnmatch import fnmatch
|
|
5
|
+
|
|
6
|
+
from biolib.biolib_binary_format.utils import LazyLoadedFile
|
|
7
|
+
from biolib.typing_utils import Callable, List, Union, cast
|
|
8
|
+
|
|
9
|
+
PathFilter = Union[str, Callable[[str], bool]]
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
def filter_lazy_loaded_files(files: List[LazyLoadedFile], path_filter: PathFilter) -> List[LazyLoadedFile]:
|
|
13
|
+
if not (isinstance(path_filter, str) or callable(path_filter)):
|
|
14
|
+
raise Exception('Expected path_filter to be a string or a function')
|
|
15
|
+
|
|
16
|
+
if callable(path_filter):
|
|
17
|
+
return list(filter(lambda x: path_filter(x.path), files)) # type: ignore
|
|
18
|
+
|
|
19
|
+
glob_filter = cast(str, path_filter)
|
|
20
|
+
|
|
21
|
+
# since all file paths start with /, make sure filter does too
|
|
22
|
+
if not glob_filter.startswith('/'):
|
|
23
|
+
glob_filter = '/' + glob_filter
|
|
24
|
+
|
|
25
|
+
def _filter_function(file: LazyLoadedFile) -> bool:
|
|
26
|
+
return fnmatch(file.path, glob_filter)
|
|
27
|
+
|
|
28
|
+
return list(filter(_filter_function, files))
|
|
3
29
|
|
|
4
30
|
|
|
5
31
|
def open_browser_window_from_notebook(url_to_open: str) -> None:
|
|
@@ -16,3 +42,17 @@ def open_browser_window_from_notebook(url_to_open: str) -> None:
|
|
|
16
42
|
display(Javascript(f'window.open("{url_to_open}");'), display_id=display_id)
|
|
17
43
|
time.sleep(1)
|
|
18
44
|
update_display(Javascript(''), display_id=display_id)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def base64_encode_string(input_str: str) -> str:
|
|
48
|
+
input_bytes = input_str.encode('utf-8')
|
|
49
|
+
base64_bytes = base64.b64encode(input_bytes)
|
|
50
|
+
base64_str = base64_bytes.decode('utf-8')
|
|
51
|
+
return base64_str
|
|
52
|
+
|
|
53
|
+
|
|
54
|
+
def decode_base64_string(base64_str: str) -> str:
|
|
55
|
+
base64_bytes = base64_str.encode('utf-8')
|
|
56
|
+
input_bytes = base64.b64decode(base64_bytes)
|
|
57
|
+
input_str = input_bytes.decode('utf-8')
|
|
58
|
+
return input_str
|
|
@@ -0,0 +1,46 @@
|
|
|
1
|
+
import base64
|
|
2
|
+
import binascii
|
|
3
|
+
import json
|
|
4
|
+
from typing import Any, Dict
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
class JwtDecodeError(Exception):
|
|
8
|
+
pass
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
def decode_jwt_without_checking_signature(jwt: str) -> Dict[str, Any]:
|
|
12
|
+
jwt_bytes = jwt.encode('utf-8')
|
|
13
|
+
|
|
14
|
+
try:
|
|
15
|
+
signing_input, _ = jwt_bytes.rsplit(b'.', 1)
|
|
16
|
+
header_segment, payload_segment = signing_input.split(b'.', 1)
|
|
17
|
+
except ValueError as error:
|
|
18
|
+
raise JwtDecodeError('Not enough segments') from error
|
|
19
|
+
|
|
20
|
+
try:
|
|
21
|
+
header_data = base64.urlsafe_b64decode(header_segment)
|
|
22
|
+
except (TypeError, binascii.Error) as error:
|
|
23
|
+
raise JwtDecodeError('Invalid header padding') from error
|
|
24
|
+
|
|
25
|
+
try:
|
|
26
|
+
header = json.loads(header_data)
|
|
27
|
+
except ValueError as error:
|
|
28
|
+
raise JwtDecodeError(f'Invalid header string: {error}') from error
|
|
29
|
+
|
|
30
|
+
if not isinstance(header, dict):
|
|
31
|
+
raise JwtDecodeError('Invalid header string: must be a json object')
|
|
32
|
+
|
|
33
|
+
try:
|
|
34
|
+
payload_data = base64.urlsafe_b64decode(payload_segment)
|
|
35
|
+
except (TypeError, binascii.Error) as error:
|
|
36
|
+
raise JwtDecodeError('Invalid payload padding') from error
|
|
37
|
+
|
|
38
|
+
try:
|
|
39
|
+
payload = json.loads(payload_data)
|
|
40
|
+
except ValueError as error:
|
|
41
|
+
raise JwtDecodeError(f'Invalid payload string: {error}') from error
|
|
42
|
+
|
|
43
|
+
if not isinstance(payload, dict):
|
|
44
|
+
raise JwtDecodeError('Invalid payload string: must be a json object')
|
|
45
|
+
|
|
46
|
+
return dict(header=header, payload=payload)
|
|
@@ -0,0 +1,33 @@
|
|
|
1
|
+
import re
|
|
2
|
+
from urllib.parse import urlparse
|
|
3
|
+
|
|
4
|
+
import biolib.utils
|
|
5
|
+
from biolib.typing_utils import Optional, Tuple
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def parse_result_id_or_url(result_id_or_url: str, default_token: Optional[str] = None) -> Tuple[str, Optional[str]]:
|
|
9
|
+
result_id_or_url = result_id_or_url.strip()
|
|
10
|
+
|
|
11
|
+
if '/' not in result_id_or_url:
|
|
12
|
+
return (result_id_or_url, default_token)
|
|
13
|
+
|
|
14
|
+
if not result_id_or_url.startswith('http://') and not result_id_or_url.startswith('https://'):
|
|
15
|
+
result_id_or_url = 'https://' + result_id_or_url
|
|
16
|
+
|
|
17
|
+
parsed_url = urlparse(result_id_or_url)
|
|
18
|
+
|
|
19
|
+
if biolib.utils.BIOLIB_BASE_URL:
|
|
20
|
+
expected_base = urlparse(biolib.utils.BIOLIB_BASE_URL)
|
|
21
|
+
if parsed_url.scheme != expected_base.scheme or parsed_url.netloc != expected_base.netloc:
|
|
22
|
+
raise ValueError(f'URL must start with {biolib.utils.BIOLIB_BASE_URL}, got: {result_id_or_url}')
|
|
23
|
+
|
|
24
|
+
pattern = r'/results?/(?P<uuid>[a-f0-9-]+)/?(?:\?token=(?P<token>[^&]+))?'
|
|
25
|
+
match = re.search(pattern, result_id_or_url, re.IGNORECASE)
|
|
26
|
+
|
|
27
|
+
if not match:
|
|
28
|
+
raise ValueError(f'URL must be in format <base_url>/results/<UUID>/?token=<token>, got: {result_id_or_url}')
|
|
29
|
+
|
|
30
|
+
uuid = match.group('uuid')
|
|
31
|
+
token = match.group('token') or default_token
|
|
32
|
+
|
|
33
|
+
return (uuid, token)
|
biolib/_runtime/runtime.py
CHANGED
|
@@ -37,10 +37,19 @@ class Runtime:
|
|
|
37
37
|
return None
|
|
38
38
|
return job_requested_machine
|
|
39
39
|
|
|
40
|
+
@staticmethod
|
|
41
|
+
def is_spot_machine_requested() -> bool:
|
|
42
|
+
job_data = Runtime._get_job_data()
|
|
43
|
+
return job_data.get('job_requested_machine_spot', False)
|
|
44
|
+
|
|
40
45
|
@staticmethod
|
|
41
46
|
def get_app_uri() -> str:
|
|
42
47
|
return Runtime._get_job_data()['app_uri']
|
|
43
48
|
|
|
49
|
+
@staticmethod
|
|
50
|
+
def get_max_workers() -> int:
|
|
51
|
+
return Runtime._get_job_data()['job_reserved_machines']
|
|
52
|
+
|
|
44
53
|
@staticmethod
|
|
45
54
|
def get_secret(secret_name: str) -> bytes:
|
|
46
55
|
assert re.match(
|
biolib/_session/session.py
CHANGED
|
@@ -1,21 +1,23 @@
|
|
|
1
1
|
from biolib import utils
|
|
2
|
-
from biolib.
|
|
2
|
+
from biolib.typing_utils import Optional
|
|
3
3
|
from biolib.api.client import ApiClient, ApiClientInitDict
|
|
4
4
|
from biolib.app import BioLibApp
|
|
5
5
|
|
|
6
6
|
|
|
7
7
|
class Session:
|
|
8
|
-
def __init__(self, _init_dict: ApiClientInitDict) -> None:
|
|
8
|
+
def __init__(self, _init_dict: ApiClientInitDict, _experiment: Optional[str] = None) -> None:
|
|
9
9
|
self._api = ApiClient(_init_dict=_init_dict)
|
|
10
|
+
self._experiment = _experiment
|
|
10
11
|
|
|
11
12
|
@staticmethod
|
|
12
|
-
def get_session(refresh_token: str, base_url: Optional[str] = None, client_type: Optional[str] = None) -> 'Session':
|
|
13
|
+
def get_session(refresh_token: str, base_url: Optional[str] = None, client_type: Optional[str] = None, experiment: Optional[str] = None) -> 'Session':
|
|
13
14
|
return Session(
|
|
14
15
|
_init_dict=ApiClientInitDict(
|
|
15
16
|
refresh_token=refresh_token,
|
|
16
17
|
base_url=base_url or utils.load_base_url_from_env(),
|
|
17
18
|
client_type=client_type,
|
|
18
|
-
)
|
|
19
|
+
),
|
|
20
|
+
_experiment=experiment,
|
|
19
21
|
)
|
|
20
22
|
|
|
21
23
|
def load(self, uri: str, suppress_version_warning: bool = False) -> BioLibApp:
|
|
@@ -39,4 +41,4 @@ class Session:
|
|
|
39
41
|
>>> app = biolib.load('https://biolib.com/biolib/myapp/')
|
|
40
42
|
>>> result = app.cli('--help')
|
|
41
43
|
"""
|
|
42
|
-
return BioLibApp(uri=uri, _api_client=self._api, suppress_version_warning=suppress_version_warning)
|
|
44
|
+
return BioLibApp(uri=uri, _api_client=self._api, suppress_version_warning=suppress_version_warning, _experiment=self._experiment)
|
|
File without changes
|