pybiolib 1.2.1295__py3-none-any.whl → 1.2.1304__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pybiolib might be problematic. Click here for more details.

@@ -367,9 +367,11 @@ class DockerExecutor:
367
367
  )
368
368
 
369
369
  logger_no_user_data.debug(f'Job "{job_uuid}" initializing Docker container. Getting IPs for proxies...')
370
+
371
+ networks_to_connect = []
370
372
  for proxy in self._options['remote_host_proxies']:
371
- proxy_ip = proxy.get_ip_address_on_network(internal_network)
372
373
  if proxy.is_app_caller_proxy:
374
+ proxy_ip = proxy.get_ip_address_on_network(internal_network)
373
375
  logger_no_user_data.debug('Found app caller proxy, setting both base URLs in compute container')
374
376
  environment_vars.update(
375
377
  {
@@ -383,7 +385,11 @@ class DockerExecutor:
383
385
  }
384
386
  )
385
387
  else:
386
- extra_hosts[proxy.hostname] = proxy_ip
388
+ extra_hosts.update(proxy.get_hostname_to_ip_mapping())
389
+
390
+ for network in proxy.get_remote_host_networks():
391
+ if network != internal_network:
392
+ networks_to_connect.append(network)
387
393
 
388
394
  logger_no_user_data.debug(f'Job "{job_uuid}" initializing Docker container. Constructing container args...')
389
395
  create_container_args = {
@@ -436,6 +442,17 @@ class DockerExecutor:
436
442
 
437
443
  logger_no_user_data.debug(f'Job "{job_uuid}" initializing Docker container. Creating container...')
438
444
  self._docker_container = docker_client.containers.create(**create_container_args)
445
+
446
+ if networks_to_connect:
447
+ network_connection_start = time.time()
448
+ for network in networks_to_connect:
449
+ network.connect(self._docker_container.id)
450
+ logger_no_user_data.debug(f'Connected app container to network {network.name}')
451
+ network_connection_time = time.time() - network_connection_start
452
+ logger_no_user_data.debug(
453
+ f'Connected app container to {len(networks_to_connect)} networks in {network_connection_time:.2f}s'
454
+ )
455
+
439
456
  logger_no_user_data.debug(f'Job "{job_uuid}" finished initializing Docker container.')
440
457
  except Exception as exception:
441
458
  raise ComputeProcessException(
@@ -1,44 +1,56 @@
1
1
  import io
2
2
  import json
3
- import socket
3
+ import multiprocessing
4
+ import os
4
5
  import shlex
6
+ import signal
7
+ import socket
5
8
  import sys
6
9
  import tempfile
7
10
  import zipfile
8
- from time import time
9
11
  from queue import Queue
10
- import multiprocessing
11
- import os
12
- import signal
12
+ from time import time
13
13
  from types import FrameType
14
14
 
15
15
  from docker.models.networks import Network # type: ignore
16
+ from docker.types import IPAMConfig, IPAMPool
16
17
 
17
- from biolib._internal.http_client import HttpClient
18
- from biolib.biolib_binary_format.stdout_and_stderr import StdoutAndStderr
19
- from biolib.compute_node.job_worker.job_legacy_input_wait_timeout_thread import JobLegacyInputWaitTimeout
20
- from biolib.compute_node.job_worker.job_storage import JobStorage
21
- from biolib.compute_node.job_worker.large_file_system import LargeFileSystem
22
- from biolib.biolib_errors import DockerContainerNotFoundDuringExecutionException, BioLibError, \
23
- StorageDownloadFailed
24
- from biolib.compute_node.job_worker.job_max_runtime_timer_thread import JobMaxRuntimeTimerThread
25
- from biolib.compute_node.remote_host_proxy import RemoteHostProxy
26
- from biolib.typing_utils import Optional, List, Dict
27
18
  from biolib import utils
28
- from biolib.biolib_api_client import ModuleEnvironment, CreatedJobDict, JobWrapper, Module, AppVersionOnJob, \
29
- BiolibApiClient, RemoteHost
19
+ from biolib._internal.http_client import HttpClient
20
+ from biolib.biolib_api_client import (
21
+ AppVersionOnJob,
22
+ BiolibApiClient,
23
+ CreatedJobDict,
24
+ JobWrapper,
25
+ Module,
26
+ ModuleEnvironment,
27
+ )
30
28
  from biolib.biolib_api_client.biolib_job_api import BiolibJobApi
29
+ from biolib.biolib_binary_format import (
30
+ InMemoryIndexableBuffer,
31
+ ModuleInput,
32
+ ModuleOutputV2,
33
+ SavedJob,
34
+ SystemException,
35
+ SystemStatusUpdate,
36
+ )
37
+ from biolib.biolib_binary_format.stdout_and_stderr import StdoutAndStderr
31
38
  from biolib.biolib_docker_client import BiolibDockerClient
39
+ from biolib.biolib_errors import BioLibError, DockerContainerNotFoundDuringExecutionException, StorageDownloadFailed
32
40
  from biolib.biolib_logging import logger, logger_no_user_data
33
41
  from biolib.compute_node.job_worker.executors import DockerExecutor
34
42
  from biolib.compute_node.job_worker.executors.types import LocalExecutorOptions, StatusUpdate
35
- from biolib.compute_node.socker_listener_thread import SocketListenerThread
36
- from biolib.compute_node.socket_sender_thread import SocketSenderThread
43
+ from biolib.compute_node.job_worker.job_legacy_input_wait_timeout_thread import JobLegacyInputWaitTimeout
44
+ from biolib.compute_node.job_worker.job_max_runtime_timer_thread import JobMaxRuntimeTimerThread
45
+ from biolib.compute_node.job_worker.job_storage import JobStorage
46
+ from biolib.compute_node.job_worker.large_file_system import LargeFileSystem
37
47
  from biolib.compute_node.job_worker.mappings import Mappings, path_without_first_folder
38
48
  from biolib.compute_node.job_worker.utils import ComputeProcessException, log_disk_and_memory_usage_info
39
- from biolib.compute_node.utils import get_package_type, SystemExceptionCodes, SystemExceptionCodeMap
40
- from biolib.biolib_binary_format import SavedJob, SystemStatusUpdate, ModuleInput, SystemException, \
41
- ModuleOutputV2, InMemoryIndexableBuffer
49
+ from biolib.compute_node.remote_host_proxy import RemoteHostMapping, RemoteHostProxy, get_static_ip_from_network
50
+ from biolib.compute_node.socker_listener_thread import SocketListenerThread
51
+ from biolib.compute_node.socket_sender_thread import SocketSenderThread
52
+ from biolib.compute_node.utils import SystemExceptionCodeMap, SystemExceptionCodes, get_package_type
53
+ from biolib.typing_utils import Dict, List, Optional
42
54
 
43
55
  SOCKET_HOST = '127.0.0.1'
44
56
 
@@ -199,6 +211,7 @@ class JobWorker:
199
211
  executor.cleanup()
200
212
 
201
213
  proxy_count = len(self._remote_host_proxies)
214
+ cleaned_networks = set()
202
215
  if proxy_count > 0:
203
216
  logger_no_user_data.debug('Cleaning up proxies...')
204
217
  proxy_cleanup_start_time = time()
@@ -210,11 +223,20 @@ class JobWorker:
210
223
  logger_no_user_data.error('Failed to clean up remote host proxy')
211
224
  logger.error(exception)
212
225
 
226
+ for network in proxy.get_remote_host_networks():
227
+ try:
228
+ self._cleanup_network(network)
229
+ cleaned_networks.add(network.id)
230
+ except Exception as exception: # pylint: disable=broad-except
231
+ logger_no_user_data.error(f'Failed to clean up network {network.name}')
232
+ logger.error(exception)
233
+
213
234
  self._remote_host_proxies = []
214
235
  logger_no_user_data.debug(f'Cleaned up {proxy_count} proxies in {time() - proxy_cleanup_start_time}')
215
236
 
216
237
  logger_no_user_data.debug('Cleaning up networks...')
217
- self._cleanup_network(self._internal_network)
238
+ if self._internal_network and self._internal_network.id not in cleaned_networks:
239
+ self._cleanup_network(self._internal_network)
218
240
  self._internal_network = None
219
241
  logger_no_user_data.debug('Cleaned up networks...')
220
242
 
@@ -265,13 +287,6 @@ class JobWorker:
265
287
  app_version = job['app_version']
266
288
  job_id = job['public_id']
267
289
  remote_hosts = app_version['remote_hosts']
268
- if utils.IS_RUNNING_IN_CLOUD:
269
- remote_hosts.append(
270
- {
271
- 'hostname': 'AppCallerProxy',
272
- },
273
- )
274
-
275
290
  docker_client = BiolibDockerClient.get_docker_client()
276
291
  try:
277
292
  self._internal_network = docker_client.networks.create(
@@ -288,7 +303,7 @@ class JobWorker:
288
303
  ) from exception
289
304
 
290
305
  if len(remote_hosts) > 0:
291
- logger_no_user_data.debug(f'Job "{job_id}" starting proxies for remote hosts: {remote_hosts}')
306
+ logger_no_user_data.debug(f'Job "{job_id}" starting proxy for remote hosts: {remote_hosts}')
292
307
  try:
293
308
  hostname_to_ports: Dict[str, List[int]] = {}
294
309
  for remote_host in remote_hosts:
@@ -304,15 +319,44 @@ class JobWorker:
304
319
  else:
305
320
  hostname_to_ports[hostname] = [port]
306
321
 
322
+ remote_host_mappings: List[RemoteHostMapping] = []
307
323
  for hostname, ports in hostname_to_ports.items():
324
+ network_index = len(remote_host_mappings)
325
+ subnet = f'172.28.{network_index}.0/24'
326
+
327
+ ipam_pool = IPAMPool(subnet=subnet)
328
+ ipam_config = IPAMConfig(pool_configs=[ipam_pool])
329
+
330
+ network = docker_client.networks.create(
331
+ name=f'biolib-remote-host-network-{job_id}-{network_index}',
332
+ internal=True,
333
+ driver='bridge',
334
+ ipam=ipam_config,
335
+ )
336
+
337
+ static_ip = get_static_ip_from_network(network, offset=2)
338
+
339
+ mapping = RemoteHostMapping(
340
+ hostname=hostname,
341
+ ports=ports,
342
+ network=network,
343
+ static_ip=static_ip,
344
+ )
345
+ remote_host_mappings.append(mapping)
346
+ logger_no_user_data.debug(
347
+ f'Created network {network.name} with static IP {static_ip} for host {hostname}'
348
+ )
349
+
350
+ if remote_host_mappings:
308
351
  remote_host_proxy = RemoteHostProxy(
309
- remote_host=RemoteHost(hostname=hostname),
310
- internal_network=self._internal_network,
352
+ remote_host_mappings=remote_host_mappings,
311
353
  job_id=job_id,
312
- ports=ports,
354
+ app_caller_network=None,
313
355
  )
314
356
  remote_host_proxy.start()
315
357
  self._remote_host_proxies.append(remote_host_proxy)
358
+ num_hosts = len(remote_host_mappings)
359
+ logger_no_user_data.debug(f'Started single proxy container for {num_hosts} remote hosts')
316
360
 
317
361
  except Exception as exception:
318
362
  raise ComputeProcessException(
@@ -322,7 +366,23 @@ class JobWorker:
322
366
  may_contain_user_data=False
323
367
  ) from exception
324
368
 
325
- logger_no_user_data.debug(f'Job "{job_id}" startup of remote host proxies completed')
369
+ if utils.IS_RUNNING_IN_CLOUD:
370
+ try:
371
+ app_caller_proxy = RemoteHostProxy(
372
+ remote_host_mappings=[],
373
+ job_id=job_id,
374
+ app_caller_network=self._internal_network,
375
+ )
376
+ app_caller_proxy.start()
377
+ self._remote_host_proxies.append(app_caller_proxy)
378
+ logger_no_user_data.debug('Started app caller proxy')
379
+ except Exception as exception:
380
+ raise ComputeProcessException(
381
+ exception,
382
+ SystemExceptionCodes.FAILED_TO_START_REMOTE_HOST_PROXIES.value,
383
+ self.send_system_exception,
384
+ may_contain_user_data=False
385
+ ) from exception
326
386
 
327
387
  def _run_app_version(
328
388
  self,
@@ -1,5 +1,6 @@
1
1
  import base64
2
2
  import io
3
+ import ipaddress
3
4
  import tarfile
4
5
  import time
5
6
  from urllib.parse import urlparse
@@ -19,52 +20,73 @@ from biolib.compute_node.webserver.proxy_utils import get_biolib_nginx_proxy_ima
19
20
  from biolib.typing_utils import Dict, List, Optional
20
21
 
21
22
 
23
+ def get_static_ip_from_network(network: Network, offset: int = 2) -> str:
24
+ ipam_config = network.attrs['IPAM']['Config']
25
+ if not ipam_config:
26
+ raise BioLibError(f'Network {network.name} has no IPAM configuration')
27
+
28
+ subnet_str = ipam_config[0]['Subnet']
29
+ subnet = ipaddress.ip_network(subnet_str, strict=False)
30
+
31
+ static_ip = str(subnet.network_address + offset)
32
+
33
+ return static_ip
34
+
35
+
22
36
  # Prepare for remote hosts with specified port
23
37
  class RemoteHostExtended(RemoteHost):
24
38
  ports: List[int]
25
39
 
26
40
 
41
+ class RemoteHostMapping:
42
+ def __init__(self, hostname: str, ports: List[int], network: Network, static_ip: str):
43
+ self.hostname = hostname
44
+ self.ports = ports
45
+ self.network = network
46
+ self.static_ip = static_ip
47
+
48
+
27
49
  class RemoteHostProxy:
28
50
  def __init__(
29
51
  self,
30
- remote_host: RemoteHost,
31
- internal_network: Optional[Network],
52
+ remote_host_mappings: List[RemoteHostMapping],
32
53
  job_id: str,
33
- ports: List[int],
54
+ app_caller_network: Optional[Network] = None,
34
55
  ):
35
- self.is_app_caller_proxy = remote_host['hostname'] == 'AppCallerProxy'
36
- self._remote_host: RemoteHostExtended = RemoteHostExtended(hostname=remote_host['hostname'], ports=ports)
37
- self._internal_network: Optional[Network] = internal_network
56
+ self._remote_host_mappings = remote_host_mappings
57
+ self._app_caller_network = app_caller_network
58
+ self.is_app_caller_proxy = app_caller_network is not None
38
59
 
39
60
  if not job_id:
40
61
  raise Exception('RemoteHostProxy missing argument "job_id"')
41
62
 
42
- self._name = f'biolib-remote-host-proxy-{job_id}-{self.hostname}'
63
+ suffix = '-AppCallerProxy' if app_caller_network else ''
64
+ self._name = f'biolib-remote-host-proxy-{job_id}{suffix}'
43
65
  self._job_uuid = job_id
44
66
  self._container: Optional[Container] = None
45
67
  self._docker = BiolibDockerClient().get_docker_client()
46
68
 
47
- @property
48
- def hostname(self) -> str:
49
- return self._remote_host['hostname']
69
+ def get_hostname_to_ip_mapping(self) -> Dict[str, str]:
70
+ return {mapping.hostname: mapping.static_ip for mapping in self._remote_host_mappings}
71
+
72
+ def get_remote_host_networks(self) -> List[Network]:
73
+ networks = [mapping.network for mapping in self._remote_host_mappings]
74
+ return networks
50
75
 
51
76
  def get_ip_address_on_network(self, network: Network) -> str:
52
77
  if not self._container:
53
- raise Exception('RemoteHostProxy not yet started')
78
+ raise BioLibError('RemoteHostProxy not yet started')
54
79
 
55
80
  container_networks = self._container.attrs['NetworkSettings']['Networks']
56
81
  if network.name in container_networks:
57
82
  ip_address: str = container_networks[network.name]['IPAddress']
83
+ if not ip_address:
84
+ raise BioLibError(f'No IP address found for network {network.name}')
58
85
  return ip_address
59
86
 
60
- raise Exception(f'RemoteHostProxy not connected to network {network.name}')
87
+ raise BioLibError(f'RemoteHostProxy not connected to network {network.name}')
61
88
 
62
89
  def start(self) -> None:
63
- # TODO: Implement nice error handling in this method
64
-
65
- upstream_server_name = self._remote_host['hostname']
66
- upstream_server_ports = self._remote_host['ports']
67
-
68
90
  docker = BiolibDockerClient.get_docker_client()
69
91
 
70
92
  networking_config: Optional[Dict[str, EndpointConfig]] = (
@@ -97,13 +119,19 @@ class RemoteHostProxy:
97
119
  if not self._container or not self._container.id:
98
120
  raise BioLibError(f'Exceeded re-try limit for creating container {self._name}')
99
121
 
100
- self._write_nginx_config_to_container(
101
- upstream_server_name,
102
- upstream_server_ports,
103
- )
122
+ for mapping in self._remote_host_mappings:
123
+ mapping.network.connect(self._container.id, ipv4_address=mapping.static_ip)
124
+ logger_no_user_data.debug(
125
+ f'Connected proxy to network {mapping.network.name} with static IP {mapping.static_ip}'
126
+ )
127
+
128
+ if self._app_caller_network:
129
+ self._app_caller_network.connect(self._container.id)
130
+ logger_no_user_data.debug(
131
+ f'Connected app caller proxy to network {self._app_caller_network.name}'
132
+ )
104
133
 
105
- if self._internal_network:
106
- self._internal_network.connect(self._container.id)
134
+ self._write_nginx_config_to_container()
107
135
 
108
136
  self._container.start()
109
137
 
@@ -129,8 +157,7 @@ class RemoteHostProxy:
129
157
  if self._container:
130
158
  self._container.remove(force=True)
131
159
 
132
-
133
- def _write_nginx_config_to_container(self, upstream_server_name: str, upstream_server_ports: List[int]) -> None:
160
+ def _write_nginx_config_to_container(self) -> None:
134
161
  if not self._container:
135
162
  raise Exception('RemoteHostProxy container not defined when attempting to write NGINX config')
136
163
 
@@ -378,25 +405,34 @@ http {{
378
405
  }}
379
406
  """
380
407
  else:
408
+ port_to_mappings: Dict[int, List[RemoteHostMapping]] = {}
409
+ for mapping in self._remote_host_mappings:
410
+ for port in mapping.ports:
411
+ if port not in port_to_mappings:
412
+ port_to_mappings[port] = []
413
+ port_to_mappings[port].append(mapping)
414
+
381
415
  nginx_config = """
382
416
  events {}
383
417
  error_log /dev/stdout info;
384
418
  stream {
385
419
  resolver 127.0.0.11 valid=30s;"""
386
- for idx, upstream_server_port in enumerate(upstream_server_ports):
420
+
421
+ for port, mappings in port_to_mappings.items():
387
422
  nginx_config += f"""
388
- map "" $upstream_{idx} {{
389
- default {upstream_server_name}:{upstream_server_port};
390
- }}
423
+ map $server_addr $backend_{port} {{"""
424
+ for mapping in mappings:
425
+ nginx_config += f'\n {mapping.static_ip} {mapping.hostname}:{port};'
391
426
 
427
+ nginx_config += f"""
428
+ }}
392
429
  server {{
393
- listen {self._remote_host['ports'][idx]};
394
- proxy_pass $upstream_{idx};
430
+ listen 0.0.0.0:{port};
431
+ proxy_pass $backend_{port};
395
432
  }}
396
-
397
433
  server {{
398
- listen {self._remote_host['ports'][idx]} udp;
399
- proxy_pass $upstream_{idx};
434
+ listen 0.0.0.0:{port} udp;
435
+ proxy_pass $backend_{port};
400
436
  }}"""
401
437
 
402
438
  nginx_config += """
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: pybiolib
3
- Version: 1.2.1295
3
+ Version: 1.2.1304
4
4
  Summary: BioLib Python Client
5
5
  License: MIT
6
6
  License-File: LICENSE
@@ -115,19 +115,19 @@ biolib/compute_node/job_worker/cache_state.py,sha256=MwjSRzcJJ_4jybqvBL4xdgnDYSI
115
115
  biolib/compute_node/job_worker/cache_types.py,sha256=ajpLy8i09QeQS9dEqTn3T6NVNMY_YsHQkSD5nvIHccQ,818
116
116
  biolib/compute_node/job_worker/docker_image_cache.py,sha256=ansHIkJIq_EMW1nZNlW-RRLVVeKWTbzNICYaOHpKiRE,7460
117
117
  biolib/compute_node/job_worker/executors/__init__.py,sha256=bW6t1qi3PZTlHM4quaTLa8EI4ALTCk83cqcVJfJfJfE,145
118
- biolib/compute_node/job_worker/executors/docker_executor.py,sha256=bEgk7M96T8JsFKVlGm200GwP_iBOtQQz2HSmeIJAjIQ,31988
118
+ biolib/compute_node/job_worker/executors/docker_executor.py,sha256=VEeyBMKmIWEDRpHHv_wAkCVL1wBgTfGNT461J0SHG5U,32794
119
119
  biolib/compute_node/job_worker/executors/docker_types.py,sha256=Hh8SwQYBLdIMGWgITwD2fzoo_sbW2ESx1G8j9Zq2n30,216
120
120
  biolib/compute_node/job_worker/executors/tars/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
121
121
  biolib/compute_node/job_worker/executors/types.py,sha256=dlp7p8KKkd19nC68o-RuAzRBhpdYFMWKg-__LFjvscs,1611
122
122
  biolib/compute_node/job_worker/job_legacy_input_wait_timeout_thread.py,sha256=_cvEiZbOwfkv6fYmfrvdi_FVviIEYr_dSClQcOQaUWM,1198
123
123
  biolib/compute_node/job_worker/job_max_runtime_timer_thread.py,sha256=K_xgz7IhiIjpLlXRk8sqaMyLoApcidJkgu29sJX0gb8,1174
124
124
  biolib/compute_node/job_worker/job_storage.py,sha256=J6B5wkBo3cqmT1AV-qJnm2Lt9Qmcp3qn-1AabjO9m60,4686
125
- biolib/compute_node/job_worker/job_worker.py,sha256=IP9fkPD_sK-H1ulZ_xFzzcmJTm8LNF_F2cWiKTToXuk,28681
125
+ biolib/compute_node/job_worker/job_worker.py,sha256=mDx0lUAV57MZzeWjo0xi1f1ZThIPk4tR7EsyTsmjYTc,31178
126
126
  biolib/compute_node/job_worker/large_file_system.py,sha256=Xe_LILVfTD9LXb-0HwLqGsp1fWiI-pU55BqgJ-6t8-0,10357
127
127
  biolib/compute_node/job_worker/mappings.py,sha256=Z48Kg4nbcOvsT2-9o3RRikBkqflgO4XeaWxTGz-CNvI,2499
128
128
  biolib/compute_node/job_worker/utilization_reporter_thread.py,sha256=7tm5Yk9coqJ9VbEdnO86tSXI0iM0omwIyKENxdxiVXk,8575
129
129
  biolib/compute_node/job_worker/utils.py,sha256=wgxcIA8yAhUPdCwyvuuJ0JmreyWmmUoBO33vWtG60xg,1282
130
- biolib/compute_node/remote_host_proxy.py,sha256=CD6k3Jvf2iPuL1KZ-Xq_dUQBsO8qE7qeY9iMw2YIG-g,16940
130
+ biolib/compute_node/remote_host_proxy.py,sha256=YqfyVxQObqNtwfpkBOs1nndWQMxm76y3oq-Yl2e2uoU,18425
131
131
  biolib/compute_node/socker_listener_thread.py,sha256=T5_UikA3MB9bD5W_dckYLPTgixh72vKUlgbBvj9dbM0,1601
132
132
  biolib/compute_node/socket_sender_thread.py,sha256=YgamPHeUm2GjMFGx8qk-99WlZhEs-kAb3q_2O6qByig,971
133
133
  biolib/compute_node/utils.py,sha256=fvdbetPKMdfHkPqNZRw6eln_i13myu-n8tuceTUcfPU,4913
@@ -158,8 +158,8 @@ biolib/utils/cache_state.py,sha256=u256F37QSRIVwqKlbnCyzAX4EMI-kl6Dwu6qwj-Qmag,3
158
158
  biolib/utils/multipart_uploader.py,sha256=XvGP1I8tQuKhAH-QugPRoEsCi9qvbRk-DVBs5PNwwJo,8452
159
159
  biolib/utils/seq_util.py,sha256=rImaghQGuIqTVWks6b9P2yKuN34uePUYPUFW_Wyoa4A,6737
160
160
  biolib/utils/zip/remote_zip.py,sha256=0wErYlxir5921agfFeV1xVjf29l9VNgGQvNlWOlj2Yc,23232
161
- pybiolib-1.2.1295.dist-info/METADATA,sha256=URIKfpKORBz-C5Wvjq34LuMh7unEverSx9TMzQzMFhE,1644
162
- pybiolib-1.2.1295.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
163
- pybiolib-1.2.1295.dist-info/entry_points.txt,sha256=p6DyaP_2kctxegTX23WBznnrDi4mz6gx04O5uKtRDXg,42
164
- pybiolib-1.2.1295.dist-info/licenses/LICENSE,sha256=F2h7gf8i0agDIeWoBPXDMYScvQOz02pAWkKhTGOHaaw,1067
165
- pybiolib-1.2.1295.dist-info/RECORD,,
161
+ pybiolib-1.2.1304.dist-info/METADATA,sha256=9hWDP_mnzSSpALZigTvNoptUNQcEbYdCiasZlR8z4a8,1644
162
+ pybiolib-1.2.1304.dist-info/WHEEL,sha256=zp0Cn7JsFoX2ATtOhtaFYIiE2rmFAD4OcMhtUki8W3U,88
163
+ pybiolib-1.2.1304.dist-info/entry_points.txt,sha256=p6DyaP_2kctxegTX23WBznnrDi4mz6gx04O5uKtRDXg,42
164
+ pybiolib-1.2.1304.dist-info/licenses/LICENSE,sha256=F2h7gf8i0agDIeWoBPXDMYScvQOz02pAWkKhTGOHaaw,1067
165
+ pybiolib-1.2.1304.dist-info/RECORD,,