pybiolib 1.2.846__py3-none-any.whl → 1.2.856__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
biolib/__init__.py CHANGED
@@ -11,7 +11,7 @@ from biolib.biolib_errors import BioLibError
11
11
  from biolib.biolib_logging import logger as _logger, logger_no_user_data as _logger_no_user_data
12
12
  from biolib.experiments.experiment import Experiment
13
13
  from biolib.biolib_api_client import BiolibApiClient as _BioLibApiClient, App
14
- from biolib.jobs import Job as _Job
14
+ from biolib.jobs.job import Result as _Result
15
15
  from biolib import user as _user
16
16
  from biolib.typing_utils import List, Optional, cast as _cast
17
17
  from biolib._data_record.data_record import DataRecord as _DataRecord
@@ -82,7 +82,7 @@ def search(
82
82
  return apps
83
83
 
84
84
 
85
- def get_job(job_id: str, job_token: Optional[str] = None) -> _Job:
85
+ def get_job(job_id: str, job_token: Optional[str] = None) -> _Result:
86
86
  r"""Get a job by its ID.
87
87
 
88
88
  Args:
@@ -99,10 +99,10 @@ def get_job(job_id: str, job_token: Optional[str] = None) -> _Job:
99
99
  >>> # Access shared job
100
100
  >>> job = biolib.get_job('abc123', job_token='xyz789')
101
101
  """
102
- return _Job.create_from_uuid(uuid=job_id, auth_token=job_token)
102
+ return _Result.create_from_uuid(uuid=job_id, auth_token=job_token)
103
103
 
104
104
 
105
- def get_result(result_id: str, result_token: Optional[str] = None) -> _Job:
105
+ def get_result(result_id: str, result_token: Optional[str] = None) -> _Result:
106
106
  r"""Get a result by its ID.
107
107
 
108
108
  Args:
@@ -119,7 +119,7 @@ def get_result(result_id: str, result_token: Optional[str] = None) -> _Job:
119
119
  >>> # Access shared result
120
120
  >>> result = biolib.get_result('abc123', result_token='xyz789')
121
121
  """
122
- return _Job.create_from_uuid(uuid=result_id, auth_token=result_token)
122
+ return _Result.create_from_uuid(uuid=result_id, auth_token=result_token)
123
123
 
124
124
 
125
125
  def get_data_record(uri: str) -> _DataRecord:
@@ -138,7 +138,7 @@ def get_data_record(uri: str) -> _DataRecord:
138
138
  return _DataRecord.get_by_uri(uri)
139
139
 
140
140
 
141
- def fetch_jobs(count: int = 25, status: Optional[str] = None) -> List[_Job]:
141
+ def fetch_jobs(count: int = 25, status: Optional[str] = None) -> List[_Result]:
142
142
  r"""Fetch a list of jobs from the server.
143
143
 
144
144
  Args:
@@ -156,7 +156,7 @@ def fetch_jobs(count: int = 25, status: Optional[str] = None) -> List[_Job]:
156
156
  >>> # Get last 100 jobs of any status
157
157
  >>> all_jobs = biolib.fetch_jobs(100)
158
158
  """
159
- return _Job.fetch_jobs(count, status)
159
+ return _Result.fetch_jobs(count, status)
160
160
 
161
161
 
162
162
  def fetch_data_records(uri: Optional[str] = None, count: Optional[int] = None) -> List[_DataRecord]:
@@ -216,7 +216,7 @@ def show_jobs(count: int = 25) -> None:
216
216
  >>> biolib.show_jobs() # Show last 25 jobs
217
217
  >>> biolib.show_jobs(100) # Show last 100 jobs
218
218
  """
219
- _Job.show_jobs(count=count)
219
+ _Result.show_jobs(count=count)
220
220
 
221
221
 
222
222
  def show_experiments(count: int = 25) -> None:
@@ -0,0 +1,2 @@
1
+ *__pycache__*
2
+ *.pyc
biolib/app/app.py CHANGED
@@ -16,7 +16,7 @@ from biolib.biolib_errors import BioLibError, JobResultNonZeroExitCodeError
16
16
  from biolib.biolib_logging import logger
17
17
  from biolib.compute_node.job_worker.job_worker import JobWorker
18
18
  from biolib.experiments.experiment import Experiment
19
- from biolib.jobs import Job
19
+ from biolib.jobs.job import Result
20
20
  from biolib.typing_utils import Dict, Optional
21
21
  from biolib.utils.app_uri import parse_app_uri
22
22
  from biolib._runtime.runtime import Runtime
@@ -75,7 +75,7 @@ class BioLibApp:
75
75
  temporary_client_secrets: Optional[Dict[str, str]] = None,
76
76
  check: bool = False,
77
77
  stream_logs: bool = False,
78
- ) -> Job:
78
+ ) -> Result:
79
79
  if experiment_id and experiment:
80
80
  raise ValueError('Only one of experiment_id and experiment can be specified')
81
81
 
@@ -100,7 +100,7 @@ class BioLibApp:
100
100
 
101
101
  return self._run_locally(module_input_serialized)
102
102
 
103
- job = Job._start_job_in_cloud( # pylint: disable=protected-access
103
+ job = Result._start_job_in_cloud( # pylint: disable=protected-access
104
104
  app_uri=self._app_uri,
105
105
  app_version_uuid=self._app_version['public_id'],
106
106
  experiment_id=experiment_id,
@@ -225,12 +225,12 @@ Example: "app.cli('--help')"
225
225
  )
226
226
  return module_input_serialized
227
227
 
228
- def _run_locally(self, module_input_serialized: bytes) -> Job:
228
+ def _run_locally(self, module_input_serialized: bytes) -> Result:
229
229
  job_dict = BiolibJobApi.create(
230
230
  app_version_id=self._app_version['public_id'],
231
231
  app_resource_name_prefix=parse_app_uri(self._app_uri)['resource_name_prefix'],
232
232
  )
233
- job = Job(job_dict)
233
+ job = Result(job_dict)
234
234
 
235
235
  try:
236
236
  BiolibJobApi.update_state(job.id, JobState.IN_PROGRESS)
@@ -243,7 +243,7 @@ Example: "app.cli('--help')"
243
243
 
244
244
  return job
245
245
 
246
- def run(self, **kwargs) -> Job:
246
+ def run(self, **kwargs) -> Result:
247
247
  args = []
248
248
  biolib_kwargs = {}
249
249
  for key, value in kwargs.items():
@@ -273,5 +273,5 @@ Example: "app.cli('--help')"
273
273
 
274
274
  return self.cli(args, **biolib_kwargs)
275
275
 
276
- def start(self, **kwargs) -> Job:
276
+ def start(self, **kwargs) -> Result:
277
277
  return self.run(biolib_blocking=False, **kwargs)
biolib/cli/init.py CHANGED
@@ -29,7 +29,8 @@ def init() -> None:
29
29
 
30
30
  try:
31
31
  # First pass: check for conflicts
32
- for root, _, filenames in os.walk(template_dir):
32
+ for root, dirs, filenames in os.walk(template_dir):
33
+ dirs[:] = [d for d in dirs if '__pycache__' not in d]
33
34
  relative_dir = os.path.relpath(root, template_dir)
34
35
  destination_dir = cwd if relative_dir == '.' else os.path.join(cwd, relative_dir)
35
36
  for filename in filenames:
@@ -48,7 +49,8 @@ def init() -> None:
48
49
  replace_app_uri = app_uri if app_uri else 'PUT_APP_URI_HERE'
49
50
 
50
51
  # Second pass: copy files (only if no conflicts)
51
- for root, _, filenames in os.walk(template_dir):
52
+ for root, dirs, filenames in os.walk(template_dir):
53
+ dirs[:] = [d for d in dirs if '__pycache__' not in d]
52
54
  relative_dir = os.path.relpath(root, template_dir)
53
55
  destination_dir = os.path.join(cwd, relative_dir)
54
56
  os.makedirs(destination_dir, exist_ok=True)
biolib/jobs/job.py CHANGED
@@ -30,8 +30,8 @@ from biolib.utils import IS_RUNNING_IN_NOTEBOOK
30
30
  from biolib.utils.app_uri import parse_app_uri
31
31
 
32
32
 
33
- class Job:
34
- # Columns to print in table when showing Job
33
+ class Result:
34
+ # Columns to print in table when showing Result
35
35
  table_columns_to_row_map = OrderedDict(
36
36
  {
37
37
  'ID': {'key': 'uuid', 'params': {'width': 36}},
@@ -54,7 +54,7 @@ class Job:
54
54
  self._cached_input_arguments: Optional[List[str]] = None
55
55
 
56
56
  def __str__(self):
57
- return f"Job for {self._job_dict['app_uri']} created at {self._job_dict['created_at']} ({self._uuid})"
57
+ return f"Result of {self._job_dict['app_uri']} created at {self._job_dict['created_at']} ({self._uuid})"
58
58
 
59
59
  def __repr__(self):
60
60
  # Get job status and shareable link
@@ -134,20 +134,20 @@ class Job:
134
134
  return bool(self._job_dict['ended_at'])
135
135
 
136
136
  def is_pending(self) -> bool:
137
- """Returns whether the job is in a pending state.
137
+ """Returns whether the result is in a pending state.
138
138
 
139
- A job is considered pending if it's not finished yet.
140
- The job state is re-fetched when this method is called.
139
+ A result is considered pending if it's not finished yet.
140
+ The result state is re-fetched when this method is called.
141
141
 
142
142
  Returns:
143
- bool: True if the job is in a pending state, False otherwise.
143
+ bool: True if the result is in a pending state, False otherwise.
144
144
 
145
145
  Example::
146
- >>> job = biolib.get_job("job_id")
147
- >>> if job.is_pending():
148
- >>> print("Job is still running")
146
+ >>> result = biolib.get_result("result_id")
147
+ >>> if result.is_pending():
148
+ >>> print("Result is still running")
149
149
  >>> else:
150
- >>> print("Job has finished")
150
+ >>> print("Result has finished")
151
151
  """
152
152
  return not self.is_finished()
153
153
 
@@ -170,7 +170,7 @@ class Job:
170
170
  self,
171
171
  path_filter: Optional[PathFilter] = None,
172
172
  ) -> List[LazyLoadedFile]:
173
- """List output files from the job.
173
+ """List output files from the result.
174
174
 
175
175
  Args:
176
176
  path_filter (PathFilter, optional): Filter to apply to the output files.
@@ -180,10 +180,10 @@ class Job:
180
180
  List[LazyLoadedFile]: List of output files.
181
181
 
182
182
  Example::
183
- >>> job = biolib.get_job("job_id")
184
- >>> output_files = job.list_output_files()
183
+ >>> result = biolib.get_result("result_id")
184
+ >>> output_files = result.list_output_files()
185
185
  >>> # Filter files with a glob pattern
186
- >>> output_files = job.list_output_files("*.pdb")
186
+ >>> output_files = result.list_output_files("*.pdb")
187
187
  """
188
188
  return self.result.list_output_files(path_filter=path_filter)
189
189
 
@@ -267,7 +267,7 @@ class Job:
267
267
  logger.info(f'Waiting for job {self.id} to finish...')
268
268
  while not self.is_finished():
269
269
  time.sleep(2)
270
- logger.info(f'Job {self.id} has finished.')
270
+ logger.info(f'Result {self.id} has finished.')
271
271
 
272
272
  def get_shareable_link(self, embed_view: Optional[bool] = None) -> str:
273
273
  api_client = BiolibApiClient.get()
@@ -292,20 +292,20 @@ class Job:
292
292
  headers={'Job-Auth-Token': self._auth_token} if self._auth_token else None,
293
293
  data={'state': 'cancelled'},
294
294
  )
295
- logger.info(f'Job {self._uuid} canceled')
295
+ logger.info(f'Result {self._uuid} canceled')
296
296
  except Exception as error:
297
- logger.error(f'Failed to cancel job {self._uuid} due to: {error}')
297
+ logger.error(f'Failed to cancel result {self._uuid} due to: {error}')
298
298
 
299
299
  def delete(self) -> None:
300
- """Delete the job.
300
+ """Delete the result.
301
301
 
302
302
  Example::
303
- >>> job = biolib.get_job("job_id")
304
- >>> job.delete()
303
+ >>> result = biolib.get_result("result_id")
304
+ >>> result.delete()
305
305
  """
306
306
  try:
307
307
  biolib.api.client.delete(path=f'/jobs/{self._uuid}/')
308
- logger.info(f'Job {self._uuid} deleted')
308
+ logger.info(f'Result {self._uuid} deleted')
309
309
  except Exception as error:
310
310
  raise BioLibError(f'Failed to delete job {self._uuid} due to: {error}') from error
311
311
 
@@ -318,7 +318,7 @@ class Job:
318
318
  )
319
319
  self._refetch_job_dict(force_refetch=True)
320
320
  updated_name = self.get_name()
321
- logger.info(f'Job {self._uuid} renamed to "{updated_name}"')
321
+ logger.info(f'Result {self._uuid} renamed to "{updated_name}"')
322
322
  except Exception as error:
323
323
  raise BioLibError(f'Failed to rename job {self._uuid} due to: {error}') from error
324
324
 
@@ -328,28 +328,28 @@ class Job:
328
328
  machine: Optional[str] = None,
329
329
  blocking: bool = True,
330
330
  arguments: Optional[List[str]] = None,
331
- ) -> 'Job':
332
- """Recompute the job with the same input files but potentially different arguments.
331
+ ) -> 'Result':
332
+ """Recompute the result with the same input files but potentially different arguments.
333
333
 
334
334
  Args:
335
335
  app_uri (Optional[str], optional): The URI of the app to use for recomputation.
336
336
  If None, uses the original app URI. Defaults to None.
337
- machine (Optional[str], optional): The machine to run the job on.
337
+ machine (Optional[str], optional): The machine to run the result on.
338
338
  If None, uses the original requested machine. Defaults to None.
339
- blocking (bool, optional): Whether to block until the job completes.
339
+ blocking (bool, optional): Whether to block until the result completes.
340
340
  If True, streams logs until completion. Defaults to True.
341
- arguments (Optional[List[str]], optional): New arguments to use for the job.
341
+ arguments (Optional[List[str]], optional): New arguments to use for the result.
342
342
  If None, uses the original arguments. Defaults to None.
343
343
 
344
344
  Returns:
345
- Job: A new Job instance for the recomputed job.
345
+ Result: A new Result instance for the recomputed result.
346
346
 
347
347
  Example::
348
- >>> job = biolib.get_job("job_id")
348
+ >>> result = biolib.get_result("result_id")
349
349
  >>> # Recompute with the same arguments
350
- >>> new_job = job.recompute()
350
+ >>> new_result = result.recompute()
351
351
  >>> # Recompute with different arguments
352
- >>> new_job = job.recompute(arguments=["--new-arg", "value"])
352
+ >>> new_result = result.recompute(arguments=["--new-arg", "value"])
353
353
  """
354
354
  app_response = BiolibAppApi.get_by_uri(uri=app_uri or self._job_dict['app_uri'])
355
355
 
@@ -388,7 +388,7 @@ class Job:
388
388
  def _get_cloud_job(self) -> CloudJobDict:
389
389
  self._refetch_job_dict(force_refetch=True)
390
390
  if self._job_dict['cloud_job'] is None:
391
- raise BioLibError(f'Job {self._uuid} did not register correctly. Try creating a new job.')
391
+ raise BioLibError(f'Result {self._uuid} did not register correctly. Try creating a new result.')
392
392
 
393
393
  return self._job_dict['cloud_job']
394
394
 
@@ -396,13 +396,13 @@ class Job:
396
396
  self._result = JobResult(job_uuid=self._uuid, job_auth_token=self._auth_token, module_output=module_output)
397
397
 
398
398
  @staticmethod
399
- def fetch_jobs(count: int, status: Optional[str] = None) -> List['Job']:
400
- job_dicts = Job._get_job_dicts(count, status)
401
- return [Job(job_dict) for job_dict in job_dicts]
399
+ def fetch_jobs(count: int, status: Optional[str] = None) -> List['Result']:
400
+ job_dicts = Result._get_job_dicts(count, status)
401
+ return [Result(job_dict) for job_dict in job_dicts]
402
402
 
403
403
  @staticmethod
404
404
  def show_jobs(count: int = 25) -> None:
405
- job_dicts = Job._get_job_dicts(count)
405
+ job_dicts = Result._get_job_dicts(count)
406
406
  BioLibTable(columns_to_row_map=Job.table_columns_to_row_map, rows=job_dicts, title='Jobs').print_table()
407
407
 
408
408
  @staticmethod
@@ -439,9 +439,9 @@ class Job:
439
439
  return job_dict
440
440
 
441
441
  @staticmethod
442
- def create_from_uuid(uuid: str, auth_token: Optional[str] = None) -> 'Job':
443
- job_dict = Job._get_job_dict(uuid=uuid, auth_token=auth_token)
444
- return Job(job_dict)
442
+ def create_from_uuid(uuid: str, auth_token: Optional[str] = None) -> 'Result':
443
+ job_dict = Result._get_job_dict(uuid=uuid, auth_token=auth_token)
444
+ return Result(job_dict)
445
445
 
446
446
  @staticmethod
447
447
  def print_logs_packages(stdout_and_stderr_packages_b64):
@@ -458,7 +458,9 @@ class Job:
458
458
  def show(self) -> None:
459
459
  self._refetch_job_dict()
460
460
  BioLibTable(
461
- columns_to_row_map=Job.table_columns_to_row_map, rows=[self._job_dict], title=f'Job: {self._uuid}'
461
+ columns_to_row_map=Result.table_columns_to_row_map,
462
+ rows=[self._job_dict],
463
+ title=f'Result: {self._uuid}',
462
464
  ).print_table()
463
465
 
464
466
  def stream_logs(self) -> None:
@@ -468,7 +470,7 @@ class Job:
468
470
  try:
469
471
  cloud_job = self._get_cloud_job_awaiting_started()
470
472
  except CloudJobFinishedError:
471
- logger.info(f'--- The job {self.id} has already completed (no streaming will take place) ---')
473
+ logger.info(f'--- The result {self.id} has already completed (no streaming will take place) ---')
472
474
  logger.info('--- The stdout log is printed below: ---')
473
475
  sys.stdout.flush()
474
476
  print(self.get_stdout().decode(), file=sys.stdout)
@@ -562,7 +564,7 @@ class Job:
562
564
  cloud_job = self._get_cloud_job()
563
565
  logger.debug('Failed to get status from compute node, retrying...')
564
566
  if cloud_job['finished_at']:
565
- logger.debug('Job no longer exists on compute node, checking for error...')
567
+ logger.debug('Result no longer exists on compute node, checking for error...')
566
568
  if cloud_job['error_code'] != SystemExceptionCodes.COMPLETED_SUCCESSFULLY.value:
567
569
  error_message = SystemExceptionCodeMap.get(
568
570
  cloud_job['error_code'], f'Unknown error code {cloud_job["error_code"]}'
@@ -600,7 +602,7 @@ class Job:
600
602
  requested_machine_count: Optional[int] = None,
601
603
  temporary_client_secrets: Optional[Dict[str, str]] = None,
602
604
  api_client: Optional[ApiClient] = None,
603
- ) -> 'Job':
605
+ ) -> 'Result':
604
606
  if len(module_input_serialized) < 500_000 and temporary_client_secrets is None:
605
607
  _job_dict = BiolibJobApi.create_job_with_data(
606
608
  app_resource_name_prefix=parse_app_uri(app_uri)['resource_name_prefix'],
@@ -615,7 +617,7 @@ class Job:
615
617
  requested_machine_count=requested_machine_count,
616
618
  api_client=api_client,
617
619
  )
618
- return Job(cast(JobDict, _job_dict))
620
+ return Result(cast(JobDict, _job_dict))
619
621
 
620
622
  job_dict: CreatedJobDict = BiolibJobApi.create(
621
623
  app_resource_name_prefix=parse_app_uri(app_uri)['resource_name_prefix'],
@@ -632,4 +634,11 @@ class Job:
632
634
  JobStorage.upload_module_input(job=job_dict, module_input_serialized=module_input_serialized)
633
635
  cloud_job = BiolibJobApi.create_cloud_job(job_id=job_dict['public_id'], result_name_prefix=result_prefix)
634
636
  logger.debug(f"Cloud: Job created with id {cloud_job['public_id']}")
635
- return Job(cast(JobDict, job_dict), _api_client=api_client)
637
+ return Result(cast(JobDict, job_dict), _api_client=api_client)
638
+
639
+
640
+ class Job(Result):
641
+ """
642
+ Deprecated class. `Job` extends the `Result` class and is retained for backward compatibility.
643
+ Please use the `Result` class instead.
644
+ """
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.3
2
2
  Name: pybiolib
3
- Version: 1.2.846
3
+ Version: 1.2.856
4
4
  Summary: BioLib Python Client
5
5
  License: MIT
6
6
  Keywords: biolib
@@ -1,4 +1,4 @@
1
- biolib/__init__.py,sha256=RL-YmqW1WUKk2FGuEt-8kzzlK1QZKcEtzBUsifWUNQM,10626
1
+ biolib/__init__.py,sha256=o0cpNzP7OwPYfdw5Cq-XgsMYooQdQynZTUO3W-LFuKo,10657
2
2
  biolib/_data_record/data_record.py,sha256=zKvnh5T-dIVY46-kgVzMBoZ666ZhcTCFQnWvZT0D6RM,12026
3
3
  biolib/_internal/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
4
4
  biolib/_internal/add_copilot_prompts.py,sha256=6gbqYsrTTUFTEEV62fp-UTDwZeJIjDfvywWgtW6AowQ,1967
@@ -26,6 +26,7 @@ biolib/_internal/runtime.py,sha256=BiHl4klUHr36MCpqKaUso4idHeBZfPAahLYRQrabFqA,4
26
26
  biolib/_internal/templates/__init__.py,sha256=NVbhLUMC8HITzkLvP88Qu7FHaL-SvQord-DX3gh1Ykk,24
27
27
  biolib/_internal/templates/init_template/.biolib/config.yml,sha256=y4ndTgbFvUE1UiGcIOqogT2Wm8jahGffeyU5rlCEltQ,427
28
28
  biolib/_internal/templates/init_template/.github/workflows/biolib.yml,sha256=sphjoiycV_oc4VbaA8wbUEokSMpnrdB6N-bYju_5Ibo,522
29
+ biolib/_internal/templates/init_template/.gitignore,sha256=dR_jhtT0boUspgk3S5PPUwuO0o8gKGIbdwu8IH638CY,20
29
30
  biolib/_internal/templates/init_template/Dockerfile,sha256=Wv2r9aiazkL1wOf_BT1f5Kx_-txfrqSkYTtWI8HGdL8,169
30
31
  biolib/_internal/templates/init_template/requirements.txt,sha256=2GnBHsKg4tX5F06Z4YeLuId6jQO3-HGTITsaVBTDG0Y,42
31
32
  biolib/_internal/templates/init_template/run.py,sha256=GS2qGGmFGIthdxdSxZbGktwZc8x3Q2IVLubpp7hEROw,529
@@ -50,7 +51,7 @@ biolib/_session/session.py,sha256=US1Y1jfFIAm86-Lq3C7nCXpZXUJXXBVBkND9djMNYxI,16
50
51
  biolib/api/__init__.py,sha256=mQ4u8FijqyLzjYMezMUUbbBGNB3iFmkNdjXnWPZ7Jlw,138
51
52
  biolib/api/client.py,sha256=2GpKE7QrPgyPdgJgrV7XnZByIJf1n26UCy3aoaHBs1M,7881
52
53
  biolib/app/__init__.py,sha256=cdPtcfb_U-bxb9iSL4fCEq2rpD9OjkyY4W-Zw60B0LI,37
53
- biolib/app/app.py,sha256=rezW3dV73dD-1XGefGtgKI1Mf6cMkH-JZQhy8q_MA7w,10529
54
+ biolib/app/app.py,sha256=7AD0S1ih_drWbWXFKixGXyOKwQurt3ecND4m0Vb-Aik,10554
54
55
  biolib/app/search_apps.py,sha256=K4a41f5XIWth2BWI7OffASgIsD0ko8elCax8YL2igaY,1470
55
56
  biolib/biolib_api_client/__init__.py,sha256=E5EMa19wJoblwSdQPYrxc_BtIeRsAuO0L_jQweWw-Yk,182
56
57
  biolib/biolib_api_client/api_client.py,sha256=IONzXeFCHl4wuct6fqOC_7NiTv_zFy6ys0hsAtvLzTA,7578
@@ -82,7 +83,7 @@ biolib/cli/__init__.py,sha256=IHC2bEyA27pvgp-18SGfFVJOP456elanz7suDP8D084,1316
82
83
  biolib/cli/auth.py,sha256=rpWGmXs6Fz6CGrO9K8ibPRszOdXG78Vig_boKaVCD9A,2082
83
84
  biolib/cli/data_record.py,sha256=t8DfJK2EZ_SNZ9drDA_N5Jqy8DNwf9f5SlFrIaOvtv0,3501
84
85
  biolib/cli/download_container.py,sha256=HIZVHOPmslGE5M2Dsp9r2cCkAEJx__vcsDz5Wt5LRos,483
85
- biolib/cli/init.py,sha256=VDarbLkkfhyEjlNc8UbjaF3y6J59DOn6ZUjY2MVmNJk,3945
86
+ biolib/cli/init.py,sha256=v6ebi969q4lMfBBjJEUbG50Z8J5vA1fkTYeVfm7iZ7Y,4083
86
87
  biolib/cli/lfs.py,sha256=z2qHUwink85mv9yDgifbVKkVwuyknGhMDTfly_gLKJM,4151
87
88
  biolib/cli/push.py,sha256=pSFEUQkQ69M__eR1nIT9ejW4V4_MtX3lb8ydEc1uKiM,1484
88
89
  biolib/cli/run.py,sha256=MCo0ZqW2pHBxOoCI3i5gAx5D0auW9fmxHqkAF4TRhms,2134
@@ -123,7 +124,7 @@ biolib/compute_node/webserver/worker_thread.py,sha256=7uD9yQPhePYvP2HCJ27EeZ_h6p
123
124
  biolib/experiments/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
124
125
  biolib/experiments/experiment.py,sha256=gY1-d7FgVrcQO3YzOJ0mHS6j7z7rl5XzXEuk7dFi1gU,10460
125
126
  biolib/jobs/__init__.py,sha256=aIb2H2DHjQbM2Bs-dysFijhwFcL58Blp0Co0gimED3w,32
126
- biolib/jobs/job.py,sha256=X33-s9u8x3uZI0x_fSDgEFAW0ke-8Qi96M-VB-W29MA,26703
127
+ biolib/jobs/job.py,sha256=INQuNpEpe8rQPWBWeDFRjcAd-i4U6hF63FAsJ0BkJ_Y,27086
127
128
  biolib/jobs/job_result.py,sha256=rALHiKYNaC9lHi_JJqBob1RubzNLwG9Z386kwRJjd2M,5885
128
129
  biolib/jobs/types.py,sha256=ezvaoTANsWazK6PmfpYcqezdfjP7MNBEBfqIZGoZhz8,997
129
130
  biolib/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -139,8 +140,8 @@ biolib/utils/cache_state.py,sha256=u256F37QSRIVwqKlbnCyzAX4EMI-kl6Dwu6qwj-Qmag,3
139
140
  biolib/utils/multipart_uploader.py,sha256=XvGP1I8tQuKhAH-QugPRoEsCi9qvbRk-DVBs5PNwwJo,8452
140
141
  biolib/utils/seq_util.py,sha256=Ozk0blGtPur_D9MwShD02r_mphyQmgZkx-lOHOwnlIM,6730
141
142
  biolib/utils/zip/remote_zip.py,sha256=0wErYlxir5921agfFeV1xVjf29l9VNgGQvNlWOlj2Yc,23232
142
- pybiolib-1.2.846.dist-info/LICENSE,sha256=F2h7gf8i0agDIeWoBPXDMYScvQOz02pAWkKhTGOHaaw,1067
143
- pybiolib-1.2.846.dist-info/METADATA,sha256=JJWM8zRs2Dicm12fzJu1aqB4FeUL0nckB3mItyWSmQo,1570
144
- pybiolib-1.2.846.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
145
- pybiolib-1.2.846.dist-info/entry_points.txt,sha256=p6DyaP_2kctxegTX23WBznnrDi4mz6gx04O5uKtRDXg,42
146
- pybiolib-1.2.846.dist-info/RECORD,,
143
+ pybiolib-1.2.856.dist-info/LICENSE,sha256=F2h7gf8i0agDIeWoBPXDMYScvQOz02pAWkKhTGOHaaw,1067
144
+ pybiolib-1.2.856.dist-info/METADATA,sha256=8dJ2I-UmDG62OdLmb9QEm4g4DuF-Xu6AEIH1VaaGQrs,1570
145
+ pybiolib-1.2.856.dist-info/WHEEL,sha256=b4K_helf-jlQoXBBETfwnf4B04YC67LOev0jo4fX5m8,88
146
+ pybiolib-1.2.856.dist-info/entry_points.txt,sha256=p6DyaP_2kctxegTX23WBznnrDi4mz6gx04O5uKtRDXg,42
147
+ pybiolib-1.2.856.dist-info/RECORD,,