cloudos-cli 2.30.0__tar.gz → 2.31.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {cloudos_cli-2.30.0 → cloudos_cli-2.31.0}/PKG-INFO +39 -1
- {cloudos_cli-2.30.0 → cloudos_cli-2.31.0}/README.md +38 -0
- {cloudos_cli-2.30.0 → cloudos_cli-2.31.0}/cloudos_cli/__init__.py +1 -1
- {cloudos_cli-2.30.0 → cloudos_cli-2.31.0}/cloudos_cli/__main__.py +187 -27
- cloudos_cli-2.31.0/cloudos_cli/_version.py +1 -0
- {cloudos_cli-2.30.0 → cloudos_cli-2.31.0}/cloudos_cli/clos.py +141 -1
- cloudos_cli-2.31.0/cloudos_cli/utils/__init__.py +11 -0
- cloudos_cli-2.31.0/cloudos_cli/utils/cloud.py +37 -0
- {cloudos_cli-2.30.0 → cloudos_cli-2.31.0}/cloudos_cli/utils/errors.py +23 -1
- {cloudos_cli-2.30.0 → cloudos_cli-2.31.0}/cloudos_cli.egg-info/PKG-INFO +39 -1
- {cloudos_cli-2.30.0 → cloudos_cli-2.31.0}/cloudos_cli.egg-info/SOURCES.txt +1 -0
- cloudos_cli-2.30.0/cloudos_cli/_version.py +0 -1
- cloudos_cli-2.30.0/cloudos_cli/utils/__init__.py +0 -9
- {cloudos_cli-2.30.0 → cloudos_cli-2.31.0}/LICENSE +0 -0
- {cloudos_cli-2.30.0 → cloudos_cli-2.31.0}/cloudos_cli/configure/__init__.py +0 -0
- {cloudos_cli-2.30.0 → cloudos_cli-2.31.0}/cloudos_cli/configure/configure.py +0 -0
- {cloudos_cli-2.30.0 → cloudos_cli-2.31.0}/cloudos_cli/datasets/__init__.py +0 -0
- {cloudos_cli-2.30.0 → cloudos_cli-2.31.0}/cloudos_cli/datasets/datasets.py +0 -0
- {cloudos_cli-2.30.0 → cloudos_cli-2.31.0}/cloudos_cli/import_wf/__init__.py +0 -0
- {cloudos_cli-2.30.0 → cloudos_cli-2.31.0}/cloudos_cli/import_wf/import_wf.py +0 -0
- {cloudos_cli-2.30.0 → cloudos_cli-2.31.0}/cloudos_cli/jobs/__init__.py +0 -0
- {cloudos_cli-2.30.0 → cloudos_cli-2.31.0}/cloudos_cli/jobs/job.py +0 -0
- {cloudos_cli-2.30.0 → cloudos_cli-2.31.0}/cloudos_cli/queue/__init__.py +0 -0
- {cloudos_cli-2.30.0 → cloudos_cli-2.31.0}/cloudos_cli/queue/queue.py +0 -0
- {cloudos_cli-2.30.0 → cloudos_cli-2.31.0}/cloudos_cli/utils/requests.py +0 -0
- {cloudos_cli-2.30.0 → cloudos_cli-2.31.0}/cloudos_cli/utils/resources.py +0 -0
- {cloudos_cli-2.30.0 → cloudos_cli-2.31.0}/cloudos_cli.egg-info/dependency_links.txt +0 -0
- {cloudos_cli-2.30.0 → cloudos_cli-2.31.0}/cloudos_cli.egg-info/entry_points.txt +0 -0
- {cloudos_cli-2.30.0 → cloudos_cli-2.31.0}/cloudos_cli.egg-info/requires.txt +0 -0
- {cloudos_cli-2.30.0 → cloudos_cli-2.31.0}/cloudos_cli.egg-info/top_level.txt +0 -0
- {cloudos_cli-2.30.0 → cloudos_cli-2.31.0}/setup.cfg +0 -0
- {cloudos_cli-2.30.0 → cloudos_cli-2.31.0}/setup.py +0 -0
- {cloudos_cli-2.30.0 → cloudos_cli-2.31.0}/tests/__init__.py +0 -0
- {cloudos_cli-2.30.0 → cloudos_cli-2.31.0}/tests/functions_for_pytest.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: cloudos_cli
|
|
3
|
-
Version: 2.
|
|
3
|
+
Version: 2.31.0
|
|
4
4
|
Summary: Python package for interacting with CloudOS
|
|
5
5
|
Home-page: https://github.com/lifebit-ai/cloudos-cli
|
|
6
6
|
Author: David Piñeyro
|
|
@@ -420,6 +420,44 @@ command.
|
|
|
420
420
|
Other options like `--wait-completion` are also available and work in the same way as for the `cloudos job run` command.
|
|
421
421
|
Check `cloudos bash job --help` for more details.
|
|
422
422
|
|
|
423
|
+
#### Get path to logs of job from CloudOS
|
|
424
|
+
|
|
425
|
+
Get the path to "Nextflow logs", "Nextflow standard output", and "trace" files. It can be used only on your user's jobs, with any status.
|
|
426
|
+
|
|
427
|
+
Example
|
|
428
|
+
```console
|
|
429
|
+
cloudos job logs --cloudos-url $CLOUDOS \
|
|
430
|
+
--apikey $MY_API_KEY \
|
|
431
|
+
--workspace-id $WORKSPACE_ID \
|
|
432
|
+
--job-id "12345678910"
|
|
433
|
+
|
|
434
|
+
|
|
435
|
+
Executing logs...
|
|
436
|
+
Logs URI: s3://path/to/location/of/logs
|
|
437
|
+
|
|
438
|
+
Nextflow log: s3://path/to/location/of/logs/.nextflow.log
|
|
439
|
+
|
|
440
|
+
Nextflow standard output: s3://path/to/location/of/logs/stdout.txt
|
|
441
|
+
|
|
442
|
+
Trace file: s3://path/to/location/of/logs/trace.txt
|
|
443
|
+
```
|
|
444
|
+
|
|
445
|
+
#### Get path to result files of jobs from CloudOS
|
|
446
|
+
|
|
447
|
+
Get the path where CloudOS stores the output files for a job. This can be used only on your user's jobs and for jobs with "completed" status.
|
|
448
|
+
|
|
449
|
+
Example
|
|
450
|
+
```console
|
|
451
|
+
cloudos job logs --cloudos-url $CLOUDOS \
|
|
452
|
+
--apikey $MY_API_KEY \
|
|
453
|
+
--workspace-id $WORKSPACE_ID \
|
|
454
|
+
--job-id "12345678910"
|
|
455
|
+
|
|
456
|
+
|
|
457
|
+
Executing results...
|
|
458
|
+
results: s3://path/to/location/of/results/results/
|
|
459
|
+
```
|
|
460
|
+
|
|
423
461
|
#### Abort single or multiple jobs from CloudOS
|
|
424
462
|
|
|
425
463
|
Aborts jobs in the CloudOS workspace that are either running or initialising. It can be used with one or more job IDs provided as a comma separated string using the `--job-ids` parameter.
|
|
@@ -385,6 +385,44 @@ command.
|
|
|
385
385
|
Other options like `--wait-completion` are also available and work in the same way as for the `cloudos job run` command.
|
|
386
386
|
Check `cloudos bash job --help` for more details.
|
|
387
387
|
|
|
388
|
+
#### Get path to logs of job from CloudOS
|
|
389
|
+
|
|
390
|
+
Get the path to "Nextflow logs", "Nextflow standard output", and "trace" files. It can be used only on your user's jobs, with any status.
|
|
391
|
+
|
|
392
|
+
Example
|
|
393
|
+
```console
|
|
394
|
+
cloudos job logs --cloudos-url $CLOUDOS \
|
|
395
|
+
--apikey $MY_API_KEY \
|
|
396
|
+
--workspace-id $WORKSPACE_ID \
|
|
397
|
+
--job-id "12345678910"
|
|
398
|
+
|
|
399
|
+
|
|
400
|
+
Executing logs...
|
|
401
|
+
Logs URI: s3://path/to/location/of/logs
|
|
402
|
+
|
|
403
|
+
Nextflow log: s3://path/to/location/of/logs/.nextflow.log
|
|
404
|
+
|
|
405
|
+
Nextflow standard output: s3://path/to/location/of/logs/stdout.txt
|
|
406
|
+
|
|
407
|
+
Trace file: s3://path/to/location/of/logs/trace.txt
|
|
408
|
+
```
|
|
409
|
+
|
|
410
|
+
#### Get path to result files of jobs from CloudOS
|
|
411
|
+
|
|
412
|
+
Get the path where CloudOS stores the output files for a job. This can be used only on your user's jobs and for jobs with "completed" status.
|
|
413
|
+
|
|
414
|
+
Example
|
|
415
|
+
```console
|
|
416
|
+
cloudos job logs --cloudos-url $CLOUDOS \
|
|
417
|
+
--apikey $MY_API_KEY \
|
|
418
|
+
--workspace-id $WORKSPACE_ID \
|
|
419
|
+
--job-id "12345678910"
|
|
420
|
+
|
|
421
|
+
|
|
422
|
+
Executing results...
|
|
423
|
+
results: s3://path/to/location/of/results/results/
|
|
424
|
+
```
|
|
425
|
+
|
|
388
426
|
#### Abort single or multiple jobs from CloudOS
|
|
389
427
|
|
|
390
428
|
Aborts jobs in the CloudOS workspace that are either running or initialising. It can be used with one or more job IDs provided as a comma separated string using the `--job-ids` parameter.
|
|
@@ -8,4 +8,4 @@ Python package for interacting with Cloud OS (https://cloudos.lifebit.ai/)
|
|
|
8
8
|
from .clos import Cloudos
|
|
9
9
|
from ._version import __version__
|
|
10
10
|
|
|
11
|
-
__all__ = ['jobs', 'utils', 'clos', 'queue']
|
|
11
|
+
__all__ = ['jobs', 'utils', 'clos', 'queue', 'configure', 'datasets', 'import_wf']
|
|
@@ -31,13 +31,14 @@ ABORT_JOB_STATES = ['running', 'initializing']
|
|
|
31
31
|
CLOUDOS_URL = 'https://cloudos.lifebit.ai'
|
|
32
32
|
INIT_PROFILE = 'initialisingProfile'
|
|
33
33
|
|
|
34
|
+
|
|
34
35
|
@click.group()
|
|
35
36
|
@click.version_option(__version__)
|
|
36
37
|
@click.pass_context
|
|
37
38
|
def run_cloudos_cli(ctx):
|
|
38
39
|
"""CloudOS python package: a package for interacting with CloudOS."""
|
|
39
40
|
ctx.ensure_object(dict)
|
|
40
|
-
if ctx.invoked_subcommand not in ['datasets']
|
|
41
|
+
if ctx.invoked_subcommand not in ['datasets']:
|
|
41
42
|
print(run_cloudos_cli.__doc__ + '\n')
|
|
42
43
|
print('Version: ' + __version__ + '\n')
|
|
43
44
|
config_manager = ConfigurationProfile()
|
|
@@ -60,6 +61,8 @@ def run_cloudos_cli(ctx):
|
|
|
60
61
|
'abort': shared_config,
|
|
61
62
|
'status': shared_config,
|
|
62
63
|
'list': shared_config,
|
|
64
|
+
'logs': shared_config,
|
|
65
|
+
'results': shared_config,
|
|
63
66
|
'details': shared_config
|
|
64
67
|
},
|
|
65
68
|
'workflow': {
|
|
@@ -103,6 +106,8 @@ def run_cloudos_cli(ctx):
|
|
|
103
106
|
'abort': shared_config,
|
|
104
107
|
'status': shared_config,
|
|
105
108
|
'list': shared_config,
|
|
109
|
+
'logs': shared_config,
|
|
110
|
+
'results': shared_config,
|
|
106
111
|
'details': shared_config
|
|
107
112
|
},
|
|
108
113
|
'workflow': {
|
|
@@ -515,9 +520,9 @@ def run(ctx,
|
|
|
515
520
|
nextflow_version = '22.10.8'
|
|
516
521
|
if execution_platform == 'azure':
|
|
517
522
|
print(f'[Message] The selected worflow \'{workflow_name}\' ' +
|
|
518
|
-
'is a CloudOS module. For these workflows, worker nodes '+
|
|
519
|
-
'are managed internally. For this reason, the options '+
|
|
520
|
-
'azure-worker-instance-type, azure-worker-instance-disk and '+
|
|
523
|
+
'is a CloudOS module. For these workflows, worker nodes ' +
|
|
524
|
+
'are managed internally. For this reason, the options ' +
|
|
525
|
+
'azure-worker-instance-type, azure-worker-instance-disk and ' +
|
|
521
526
|
'azure-worker-instance-spot are not taking effect.')
|
|
522
527
|
else:
|
|
523
528
|
queue = Queue(cloudos_url=cloudos_url, apikey=apikey, cromwell_token=cromwell_token,
|
|
@@ -574,7 +579,7 @@ def run(ctx,
|
|
|
574
579
|
print(f'\tNextflow version: {nextflow_version}')
|
|
575
580
|
j_id = j.send_job(job_config=job_config,
|
|
576
581
|
parameter=parameter,
|
|
577
|
-
is_module
|
|
582
|
+
is_module=is_module,
|
|
578
583
|
git_commit=git_commit,
|
|
579
584
|
git_tag=git_tag,
|
|
580
585
|
git_branch=git_branch,
|
|
@@ -701,6 +706,152 @@ def job_status(ctx,
|
|
|
701
706
|
'or repeat the command you just used.')
|
|
702
707
|
|
|
703
708
|
|
|
709
|
+
@job.command('logs')
|
|
710
|
+
@click.option('-k',
|
|
711
|
+
'--apikey',
|
|
712
|
+
help='Your CloudOS API key',
|
|
713
|
+
required=True)
|
|
714
|
+
@click.option('-c',
|
|
715
|
+
'--cloudos-url',
|
|
716
|
+
help=(f'The CloudOS url you are trying to access to. Default={CLOUDOS_URL}.'),
|
|
717
|
+
default=CLOUDOS_URL)
|
|
718
|
+
@click.option('--workspace-id',
|
|
719
|
+
help='The specific CloudOS workspace id.',
|
|
720
|
+
required=True)
|
|
721
|
+
@click.option('--job-id',
|
|
722
|
+
help='The job id in CloudOS to search for.',
|
|
723
|
+
required=True)
|
|
724
|
+
@click.option('--verbose',
|
|
725
|
+
help='Whether to print information messages or not.',
|
|
726
|
+
is_flag=True)
|
|
727
|
+
@click.option('--disable-ssl-verification',
|
|
728
|
+
help=('Disable SSL certificate verification. Please, remember that this option is ' +
|
|
729
|
+
'not generally recommended for security reasons.'),
|
|
730
|
+
is_flag=True)
|
|
731
|
+
@click.option('--ssl-cert',
|
|
732
|
+
help='Path to your SSL certificate file.')
|
|
733
|
+
@click.option('--profile', help='Profile to use from the config file', default=None)
|
|
734
|
+
@click.pass_context
|
|
735
|
+
def job_logs(ctx,
|
|
736
|
+
apikey,
|
|
737
|
+
cloudos_url,
|
|
738
|
+
workspace_id,
|
|
739
|
+
job_id,
|
|
740
|
+
verbose,
|
|
741
|
+
disable_ssl_verification,
|
|
742
|
+
ssl_cert,
|
|
743
|
+
profile):
|
|
744
|
+
"""Get the path to the logs of a specified job."""
|
|
745
|
+
profile = profile or ctx.default_map['job']['logs']['profile']
|
|
746
|
+
# Create a dictionary with required and non-required params
|
|
747
|
+
required_dict = {
|
|
748
|
+
'apikey': True,
|
|
749
|
+
'workspace_id': True,
|
|
750
|
+
'workflow_name': False,
|
|
751
|
+
'project_name': False
|
|
752
|
+
}
|
|
753
|
+
# determine if the user provided all required parameters
|
|
754
|
+
config_manager = ConfigurationProfile()
|
|
755
|
+
apikey, cloudos_url, workspace_id, workflow_name, repository_platform, execution_platform, project_name = (
|
|
756
|
+
config_manager.load_profile_and_validate_data(
|
|
757
|
+
ctx,
|
|
758
|
+
INIT_PROFILE,
|
|
759
|
+
CLOUDOS_URL,
|
|
760
|
+
profile=profile,
|
|
761
|
+
required_dict=required_dict,
|
|
762
|
+
apikey=apikey,
|
|
763
|
+
cloudos_url=cloudos_url,
|
|
764
|
+
workspace_id=workspace_id
|
|
765
|
+
)
|
|
766
|
+
)
|
|
767
|
+
|
|
768
|
+
print('Executing logs...')
|
|
769
|
+
verify_ssl = ssl_selector(disable_ssl_verification, ssl_cert)
|
|
770
|
+
if verbose:
|
|
771
|
+
print('\t...Preparing objects')
|
|
772
|
+
cl = Cloudos(cloudos_url, apikey, None)
|
|
773
|
+
if verbose:
|
|
774
|
+
print('\tThe following Cloudos object was created:')
|
|
775
|
+
print('\t' + str(cl) + '\n')
|
|
776
|
+
print(f'\tSearching for job id: {job_id}')
|
|
777
|
+
logs = cl.get_job_logs(job_id, workspace_id, verify_ssl)
|
|
778
|
+
for name, path in logs.items():
|
|
779
|
+
print(f"{name}: {path}\n")
|
|
780
|
+
|
|
781
|
+
|
|
782
|
+
@job.command('results')
|
|
783
|
+
@click.option('-k',
|
|
784
|
+
'--apikey',
|
|
785
|
+
help='Your CloudOS API key',
|
|
786
|
+
required=True)
|
|
787
|
+
@click.option('-c',
|
|
788
|
+
'--cloudos-url',
|
|
789
|
+
help=(f'The CloudOS url you are trying to access to. Default={CLOUDOS_URL}.'),
|
|
790
|
+
default=CLOUDOS_URL)
|
|
791
|
+
@click.option('--workspace-id',
|
|
792
|
+
help='The specific CloudOS workspace id.',
|
|
793
|
+
required=True)
|
|
794
|
+
@click.option('--job-id',
|
|
795
|
+
help='The job id in CloudOS to search for.',
|
|
796
|
+
required=True)
|
|
797
|
+
@click.option('--verbose',
|
|
798
|
+
help='Whether to print information messages or not.',
|
|
799
|
+
is_flag=True)
|
|
800
|
+
@click.option('--disable-ssl-verification',
|
|
801
|
+
help=('Disable SSL certificate verification. Please, remember that this option is ' +
|
|
802
|
+
'not generally recommended for security reasons.'),
|
|
803
|
+
is_flag=True)
|
|
804
|
+
@click.option('--ssl-cert',
|
|
805
|
+
help='Path to your SSL certificate file.')
|
|
806
|
+
@click.option('--profile', help='Profile to use from the config file', default=None)
|
|
807
|
+
@click.pass_context
|
|
808
|
+
def job_results(ctx,
|
|
809
|
+
apikey,
|
|
810
|
+
cloudos_url,
|
|
811
|
+
workspace_id,
|
|
812
|
+
job_id,
|
|
813
|
+
verbose,
|
|
814
|
+
disable_ssl_verification,
|
|
815
|
+
ssl_cert,
|
|
816
|
+
profile):
|
|
817
|
+
"""Get the path to the results of a specified job."""
|
|
818
|
+
profile = profile or ctx.default_map['job']['results']['profile']
|
|
819
|
+
# Create a dictionary with required and non-required params
|
|
820
|
+
required_dict = {
|
|
821
|
+
'apikey': True,
|
|
822
|
+
'workspace_id': True,
|
|
823
|
+
'workflow_name': False,
|
|
824
|
+
'project_name': False
|
|
825
|
+
}
|
|
826
|
+
# determine if the user provided all required parameters
|
|
827
|
+
config_manager = ConfigurationProfile()
|
|
828
|
+
apikey, cloudos_url, workspace_id, workflow_name, repository_platform, execution_platform, project_name = (
|
|
829
|
+
config_manager.load_profile_and_validate_data(
|
|
830
|
+
ctx,
|
|
831
|
+
INIT_PROFILE,
|
|
832
|
+
CLOUDOS_URL,
|
|
833
|
+
profile=profile,
|
|
834
|
+
required_dict=required_dict,
|
|
835
|
+
apikey=apikey,
|
|
836
|
+
cloudos_url=cloudos_url,
|
|
837
|
+
workspace_id=workspace_id
|
|
838
|
+
)
|
|
839
|
+
)
|
|
840
|
+
|
|
841
|
+
print('Executing results...')
|
|
842
|
+
verify_ssl = ssl_selector(disable_ssl_verification, ssl_cert)
|
|
843
|
+
if verbose:
|
|
844
|
+
print('\t...Preparing objects')
|
|
845
|
+
cl = Cloudos(cloudos_url, apikey, None)
|
|
846
|
+
if verbose:
|
|
847
|
+
print('\tThe following Cloudos object was created:')
|
|
848
|
+
print('\t' + str(cl) + '\n')
|
|
849
|
+
print(f'\tSearching for job id: {job_id}')
|
|
850
|
+
logs = cl.get_job_results(job_id, workspace_id, verify_ssl)
|
|
851
|
+
for name, path in logs.items():
|
|
852
|
+
print(f"{name}: {path}\n")
|
|
853
|
+
|
|
854
|
+
|
|
704
855
|
@job.command('details')
|
|
705
856
|
@click.option('-k',
|
|
706
857
|
'--apikey',
|
|
@@ -714,7 +865,8 @@ def job_status(ctx,
|
|
|
714
865
|
help='The job id in CloudOS to search for.',
|
|
715
866
|
required=True)
|
|
716
867
|
@click.option('--output-format',
|
|
717
|
-
help='The desired display for the output, either directly in standard output or saved as file.
|
|
868
|
+
help=('The desired display for the output, either directly in standard output or saved as file. ' +
|
|
869
|
+
'Default=stdout.'),
|
|
718
870
|
type=click.Choice(['stdout', 'json'], case_sensitive=False),
|
|
719
871
|
default='stdout')
|
|
720
872
|
@click.option('--output-basename',
|
|
@@ -738,16 +890,16 @@ def job_status(ctx,
|
|
|
738
890
|
@click.option('--profile', help='Profile to use from the config file', default=None)
|
|
739
891
|
@click.pass_context
|
|
740
892
|
def job_details(ctx,
|
|
741
|
-
|
|
742
|
-
|
|
743
|
-
|
|
744
|
-
|
|
745
|
-
|
|
746
|
-
|
|
747
|
-
|
|
748
|
-
|
|
749
|
-
|
|
750
|
-
|
|
893
|
+
apikey,
|
|
894
|
+
cloudos_url,
|
|
895
|
+
job_id,
|
|
896
|
+
output_format,
|
|
897
|
+
output_basename,
|
|
898
|
+
parameters,
|
|
899
|
+
verbose,
|
|
900
|
+
disable_ssl_verification,
|
|
901
|
+
ssl_cert,
|
|
902
|
+
profile):
|
|
751
903
|
"""Retrieve job details in CloudOS."""
|
|
752
904
|
profile = profile or ctx.default_map['job']['details']['profile']
|
|
753
905
|
# Create a dictionary with required and non-required params
|
|
@@ -826,7 +978,7 @@ def job_details(ctx,
|
|
|
826
978
|
|
|
827
979
|
# Determine the execution platform based on jobType
|
|
828
980
|
executors = {
|
|
829
|
-
'nextflowAWS':'Batch AWS',
|
|
981
|
+
'nextflowAWS': 'Batch AWS',
|
|
830
982
|
'nextflowAzure': 'Batch Azure',
|
|
831
983
|
'nextflowGcp': 'GCP',
|
|
832
984
|
'nextflowHpc': 'HPC',
|
|
@@ -888,7 +1040,7 @@ def job_details(ctx,
|
|
|
888
1040
|
"Master Instance": str(j_details_h["masterInstance"]["usedInstance"]["type"]),
|
|
889
1041
|
"Storage": str(j_details_h["storageSizeInGb"]) + " GB",
|
|
890
1042
|
"Accelerated File Staging": str(j_details_h.get("usesFusionFileSystem", "None")),
|
|
891
|
-
"Task Resources": f"{str(j_details_h['resourceRequirements']['cpu'])} CPUs, " +
|
|
1043
|
+
"Task Resources": f"{str(j_details_h['resourceRequirements']['cpu'])} CPUs, " +
|
|
892
1044
|
f"{str(j_details_h['resourceRequirements']['ram'])} GB RAM"
|
|
893
1045
|
|
|
894
1046
|
}
|
|
@@ -1308,9 +1460,11 @@ def import_wf(ctx,
|
|
|
1308
1460
|
)
|
|
1309
1461
|
|
|
1310
1462
|
verify_ssl = ssl_selector(disable_ssl_verification, ssl_cert)
|
|
1311
|
-
repo_import = ImportWorflow(
|
|
1312
|
-
|
|
1313
|
-
|
|
1463
|
+
repo_import = ImportWorflow(
|
|
1464
|
+
cloudos_url=cloudos_url, cloudos_apikey=apikey, workspace_id=workspace_id, platform=repository_platform,
|
|
1465
|
+
workflow_name=workflow_name, workflow_url=workflow_url, workflow_docs_link=workflow_docs_link,
|
|
1466
|
+
cost_limit=cost_limit, workflow_description=workflow_description, verify=verify_ssl
|
|
1467
|
+
)
|
|
1314
1468
|
workflow_id = repo_import.import_workflow()
|
|
1315
1469
|
print(f'\tWorkflow {workflow_name} was imported successfully with the ' +
|
|
1316
1470
|
f'following ID: {workflow_id}')
|
|
@@ -2178,7 +2332,8 @@ def list_files(ctx,
|
|
|
2178
2332
|
@click.option('-c', '--cloudos-url', default=CLOUDOS_URL, required=False, help='The CloudOS URL.')
|
|
2179
2333
|
@click.option('--workspace-id', required=True, help='The CloudOS workspace ID.')
|
|
2180
2334
|
@click.option('--project-name', required=True, help='The source project name.')
|
|
2181
|
-
@click.option('--destination-project-name', required=False,
|
|
2335
|
+
@click.option('--destination-project-name', required=False,
|
|
2336
|
+
help='The destination project name. Defaults to the source project.')
|
|
2182
2337
|
@click.option('--disable-ssl-verification', is_flag=True, help='Disable SSL certificate verification.')
|
|
2183
2338
|
@click.option('--ssl-cert', help='Path to your SSL certificate file.')
|
|
2184
2339
|
@click.option('--profile', default=None, help='Profile to use from the config file.')
|
|
@@ -2189,8 +2344,10 @@ def move_files(ctx, source_path, destination_path, apikey, cloudos_url, workspac
|
|
|
2189
2344
|
"""
|
|
2190
2345
|
Move a file or folder from a source path to a destination path within or across CloudOS projects.
|
|
2191
2346
|
|
|
2192
|
-
SOURCE_PATH [path]
|
|
2193
|
-
|
|
2347
|
+
SOURCE_PATH [path]: the full path to the file or folder to move. It must be a 'Data' folder path.
|
|
2348
|
+
E.g.: 'Data/folderA/file.txt'\n
|
|
2349
|
+
DESTINATION_PATH [path]: the full path to the destination folder. It must be a 'Data' folder path.
|
|
2350
|
+
E.g.: 'Data/folderB'
|
|
2194
2351
|
"""
|
|
2195
2352
|
|
|
2196
2353
|
profile = profile or ctx.default_map['datasets']['move'].get('profile')
|
|
@@ -2270,7 +2427,8 @@ def move_files(ctx, source_path, destination_path, apikey, cloudos_url, workspac
|
|
|
2270
2427
|
if found_source:
|
|
2271
2428
|
break
|
|
2272
2429
|
if not found_source:
|
|
2273
|
-
click.echo(f"[ERROR] Item '{source_item_name}' not found in '{source_parent_path or '[project root]'}'",
|
|
2430
|
+
click.echo(f"[ERROR] Item '{source_item_name}' not found in '{source_parent_path or '[project root]'}'",
|
|
2431
|
+
err=True)
|
|
2274
2432
|
sys.exit(1)
|
|
2275
2433
|
|
|
2276
2434
|
source_id = found_source["_id"]
|
|
@@ -2300,7 +2458,8 @@ def move_files(ctx, source_path, destination_path, apikey, cloudos_url, workspac
|
|
|
2300
2458
|
except Exception as e:
|
|
2301
2459
|
click.echo(f"[ERROR] Could not resolve destination path '{destination_path}': {str(e)}", err=True)
|
|
2302
2460
|
sys.exit(1)
|
|
2303
|
-
click.echo(f"Moving {source_kind} '{source_item_name}' to '{destination_path}'
|
|
2461
|
+
click.echo(f"Moving {source_kind} '{source_item_name}' to '{destination_path}' " +
|
|
2462
|
+
f"in project '{destination_project_name} ...")
|
|
2304
2463
|
# === Perform Move ===
|
|
2305
2464
|
try:
|
|
2306
2465
|
response = source_client.move_files_and_folders(
|
|
@@ -2310,7 +2469,8 @@ def move_files(ctx, source_path, destination_path, apikey, cloudos_url, workspac
|
|
|
2310
2469
|
target_kind=target_kind
|
|
2311
2470
|
)
|
|
2312
2471
|
if response.ok:
|
|
2313
|
-
|
|
2472
|
+
click.secho(f"[SUCCESS] {source_kind} '{source_item_name}' moved to '{destination_path}' " +
|
|
2473
|
+
f"in project '{destination_project_name}'.", fg="green", bold=True)
|
|
2314
2474
|
else:
|
|
2315
2475
|
click.echo(f"[ERROR] Move failed: {response.status_code} - {response.text}", err=True)
|
|
2316
2476
|
sys.exit(1)
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
__version__ = '2.31.0'
|
|
@@ -6,7 +6,8 @@ import requests
|
|
|
6
6
|
import time
|
|
7
7
|
import json
|
|
8
8
|
from dataclasses import dataclass
|
|
9
|
-
from cloudos_cli.utils.
|
|
9
|
+
from cloudos_cli.utils.cloud import find_cloud
|
|
10
|
+
from cloudos_cli.utils.errors import BadRequestException, JoBNotCompletedException, NotAuthorisedException
|
|
10
11
|
from cloudos_cli.utils.requests import retry_requests_get, retry_requests_post, retry_requests_put
|
|
11
12
|
import pandas as pd
|
|
12
13
|
|
|
@@ -134,6 +135,145 @@ class Cloudos:
|
|
|
134
135
|
f'\t\t--job-id {job_id}\n')
|
|
135
136
|
return {'name': j_name, 'id': job_id, 'status': j_status_h}
|
|
136
137
|
|
|
138
|
+
def get_storage_contents(self, cloud_name, cloud_meta, container, path, workspace_id, verify):
|
|
139
|
+
"""
|
|
140
|
+
Retrieves the contents of a storage container from the specified cloud service.
|
|
141
|
+
|
|
142
|
+
This method fetches the contents of a specified path within a storage container
|
|
143
|
+
on a cloud service (e.g., AWS S3 or Azure Blob). The request is authenticated
|
|
144
|
+
using an API key and requires valid parameters such as the workspace ID and path.
|
|
145
|
+
|
|
146
|
+
Parameters:
|
|
147
|
+
cloud_name (str): The name of the cloud service (e.g., 'aws' or 'azure').
|
|
148
|
+
container (str): The name of the storage container or bucket.
|
|
149
|
+
path (str): The file path or directory within the storage container.
|
|
150
|
+
workspace_id (str): The identifier of the workspace or team.
|
|
151
|
+
verify (bool): Whether to verify SSL certificates for the request.
|
|
152
|
+
|
|
153
|
+
Returns:
|
|
154
|
+
list: A list of contents retrieved from the specified cloud storage.
|
|
155
|
+
|
|
156
|
+
Raises:
|
|
157
|
+
BadRequestException: If the request to retrieve the contents fails with a
|
|
158
|
+
status code indicating an error.
|
|
159
|
+
"""
|
|
160
|
+
headers = {
|
|
161
|
+
"Content-type": "application/json",
|
|
162
|
+
"apikey": self.apikey
|
|
163
|
+
}
|
|
164
|
+
cloud_data = {
|
|
165
|
+
"aws": {
|
|
166
|
+
"url": f"{self.cloudos_url}/api/v1/data-access/s3/bucket-contents",
|
|
167
|
+
"params": {
|
|
168
|
+
"bucket": container,
|
|
169
|
+
"path": path,
|
|
170
|
+
"teamId": workspace_id
|
|
171
|
+
}
|
|
172
|
+
},
|
|
173
|
+
"azure": {
|
|
174
|
+
"url": f"{self.cloudos_url}/api/v1/data-access/azure/container-contents",
|
|
175
|
+
"container": "containerName",
|
|
176
|
+
"params": {
|
|
177
|
+
"containerName": container,
|
|
178
|
+
"path": path + "/",
|
|
179
|
+
"storageAccountName": "",
|
|
180
|
+
"teamId": workspace_id
|
|
181
|
+
}
|
|
182
|
+
}
|
|
183
|
+
}
|
|
184
|
+
if cloud_name == "azure":
|
|
185
|
+
cloud_data[cloud_name]["params"]["storageAccountName"] = cloud_meta["storage"]["storageAccount"]
|
|
186
|
+
params = cloud_data[cloud_name]["params"]
|
|
187
|
+
contents_req = retry_requests_get(cloud_data[cloud_name]["url"], params=params, headers=headers, verify=verify)
|
|
188
|
+
if contents_req.status_code >= 400:
|
|
189
|
+
raise BadRequestException(contents_req)
|
|
190
|
+
return contents_req.json()["contents"]
|
|
191
|
+
|
|
192
|
+
def get_job_logs(self, j_id, workspace_id, verify=True):
|
|
193
|
+
"""
|
|
194
|
+
Get the location of the logs for the specified job
|
|
195
|
+
"""
|
|
196
|
+
cloudos_url = self.cloudos_url
|
|
197
|
+
apikey = self.apikey
|
|
198
|
+
headers = {
|
|
199
|
+
"Content-type": "application/json",
|
|
200
|
+
"apikey": apikey
|
|
201
|
+
}
|
|
202
|
+
r = retry_requests_get(f"{cloudos_url}/api/v1/jobs/{j_id}", headers=headers, verify=verify)
|
|
203
|
+
if r.status_code == 401:
|
|
204
|
+
raise NotAuthorisedException
|
|
205
|
+
elif r.status_code >= 400:
|
|
206
|
+
raise BadRequestException(r)
|
|
207
|
+
r_json = r.json()
|
|
208
|
+
logs_obj = r_json["logs"]
|
|
209
|
+
job_workspace = r_json["team"]
|
|
210
|
+
if job_workspace != workspace_id:
|
|
211
|
+
raise ValueError("Workspace provided or configured is different from workspace where the job was executed")
|
|
212
|
+
cloud_name, cloud_meta, cloud_storage = find_cloud(self.cloudos_url, self.apikey, workspace_id, logs_obj)
|
|
213
|
+
container_name = cloud_storage["container"]
|
|
214
|
+
prefix_name = cloud_storage["prefix"]
|
|
215
|
+
logs_bucket = logs_obj[container_name]
|
|
216
|
+
logs_path = logs_obj[prefix_name]
|
|
217
|
+
contents_obj = self.get_storage_contents(cloud_name, cloud_meta, logs_bucket, logs_path, workspace_id, verify)
|
|
218
|
+
logs = {}
|
|
219
|
+
cloude_scheme = cloud_storage["scheme"]
|
|
220
|
+
storage_account_prefix = ''
|
|
221
|
+
if cloude_scheme == 'az':
|
|
222
|
+
storage_account_prefix = f'{workspace_id}.blob.core.windows.net/'
|
|
223
|
+
for item in contents_obj:
|
|
224
|
+
if not item["isDir"]:
|
|
225
|
+
filename = item["name"]
|
|
226
|
+
if filename == "stdout.txt":
|
|
227
|
+
filename = "Nextflow standard output"
|
|
228
|
+
if filename == ".nextflow.log":
|
|
229
|
+
filename = "Nextflow log"
|
|
230
|
+
if filename == "trace.txt":
|
|
231
|
+
filename = "Trace file"
|
|
232
|
+
logs[filename] = f"{cloude_scheme}://{storage_account_prefix}{logs_bucket}/{item['path']}"
|
|
233
|
+
return logs
|
|
234
|
+
|
|
235
|
+
def get_job_results(self, j_id, workspace_id, verify=True):
|
|
236
|
+
"""
|
|
237
|
+
Get the location of the results for the specified job
|
|
238
|
+
"""
|
|
239
|
+
cloudos_url = self.cloudos_url
|
|
240
|
+
apikey = self.apikey
|
|
241
|
+
headers = {
|
|
242
|
+
"Content-type": "application/json",
|
|
243
|
+
"apikey": apikey
|
|
244
|
+
}
|
|
245
|
+
status = self.get_job_status(j_id, verify).json()["status"]
|
|
246
|
+
if status != JOB_COMPLETED:
|
|
247
|
+
raise JoBNotCompletedException(j_id, status)
|
|
248
|
+
|
|
249
|
+
r = retry_requests_get(f"{cloudos_url}/api/v1/jobs/{j_id}",
|
|
250
|
+
headers=headers, verify=verify)
|
|
251
|
+
if r.status_code == 401:
|
|
252
|
+
raise NotAuthorisedException
|
|
253
|
+
if r.status_code >= 400:
|
|
254
|
+
raise BadRequestException(r)
|
|
255
|
+
req_obj = r.json()
|
|
256
|
+
job_workspace = req_obj["team"]
|
|
257
|
+
if job_workspace != workspace_id:
|
|
258
|
+
raise ValueError("Workspace provided or configured is different from workspace where the job was executed")
|
|
259
|
+
cloud_name, meta, cloud_storage = find_cloud(self.cloudos_url, self.apikey, workspace_id, req_obj["logs"])
|
|
260
|
+
# cont_name
|
|
261
|
+
results_obj = req_obj["results"]
|
|
262
|
+
results_container = results_obj[cloud_storage["container"]]
|
|
263
|
+
results_path = results_obj[cloud_storage["prefix"]]
|
|
264
|
+
scheme = cloud_storage["scheme"]
|
|
265
|
+
contents_obj = self.get_storage_contents(cloud_name, meta, results_container,
|
|
266
|
+
results_path, workspace_id, verify)
|
|
267
|
+
storage_account_prefix = ''
|
|
268
|
+
if scheme == 'az':
|
|
269
|
+
storage_account_prefix = f'{workspace_id}.blob.core.windows.net/'
|
|
270
|
+
results = dict()
|
|
271
|
+
for item in contents_obj:
|
|
272
|
+
if item["isDir"]:
|
|
273
|
+
filename = item["name"]
|
|
274
|
+
results[filename] = f"{scheme}://{storage_account_prefix}{results_container}/{item['path']}"
|
|
275
|
+
return results
|
|
276
|
+
|
|
137
277
|
def _create_cromwell_header(self):
|
|
138
278
|
"""Generates cromwell header.
|
|
139
279
|
|
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
"""
|
|
2
|
+
Utility functions and classes to use across the package.
|
|
3
|
+
"""
|
|
4
|
+
|
|
5
|
+
from .errors import BadRequestException, TimeOutException, AccountNotLinkedException, JoBNotCompletedException, NotAuthorisedException, NoCloudForWorkspaceException
|
|
6
|
+
from .requests import retry_requests_get, retry_requests_post, retry_requests_put
|
|
7
|
+
from .resources import format_bytes, ssl_selector
|
|
8
|
+
from .cloud import find_cloud
|
|
9
|
+
from .cloud import find_cloud
|
|
10
|
+
|
|
11
|
+
__all__ = ['errors', 'requests', 'resources', 'cloud']
|
|
@@ -0,0 +1,37 @@
|
|
|
1
|
+
from cloudos_cli.utils.requests import retry_requests_get
|
|
2
|
+
from cloudos_cli.utils import BadRequestException
|
|
3
|
+
from cloudos_cli.utils.errors import NoCloudForWorkspaceException
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def find_cloud(cloudos_url, apikey, workspace_id, logs):
|
|
7
|
+
if "s3BucketName" in logs:
|
|
8
|
+
cloud_name = "aws"
|
|
9
|
+
meta = {}
|
|
10
|
+
storage = {
|
|
11
|
+
"container": "s3BucketName",
|
|
12
|
+
"prefix": "s3Prefix",
|
|
13
|
+
"scheme": "s3"
|
|
14
|
+
}
|
|
15
|
+
return cloud_name, meta, storage
|
|
16
|
+
else:
|
|
17
|
+
headers = {
|
|
18
|
+
"Content-Type": "application/json",
|
|
19
|
+
"Accept": "application/json",
|
|
20
|
+
"apikey": apikey
|
|
21
|
+
}
|
|
22
|
+
params = dict(teamId=workspace_id)
|
|
23
|
+
url = f"{cloudos_url}/api/v1/cloud/azure"
|
|
24
|
+
r = retry_requests_get(url, headers=headers, params=params)
|
|
25
|
+
if r.status_code >= 400:
|
|
26
|
+
raise BadRequestException(r)
|
|
27
|
+
if r.json() and r.text != "null":
|
|
28
|
+
cloud_data = r.json()
|
|
29
|
+
cloud_name = "azure"
|
|
30
|
+
storage = {
|
|
31
|
+
"container": "blobContainerName",
|
|
32
|
+
"prefix": "blobPrefix",
|
|
33
|
+
"scheme": "az"
|
|
34
|
+
}
|
|
35
|
+
return cloud_name, cloud_data, storage
|
|
36
|
+
|
|
37
|
+
raise NoCloudForWorkspaceException(workspace_id)
|
|
@@ -41,4 +41,26 @@ class AccountNotLinkedException(Exception):
|
|
|
41
41
|
msg = (f"The pipeline at the URL {wf_url} cannot be imported. Check that you repository account " +
|
|
42
42
|
"has been linked in your cloudOS workspace")
|
|
43
43
|
super(AccountNotLinkedException, self).__init__(msg)
|
|
44
|
-
self.wf_url = wf_url
|
|
44
|
+
self.wf_url = wf_url
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
class JoBNotCompletedException(Exception):
|
|
48
|
+
def __init__(self, job, status):
|
|
49
|
+
msg = f"Job {job} has status {status}. Results are only available for jobs with status \"completed\""
|
|
50
|
+
super(JoBNotCompletedException, self).__init__(msg)
|
|
51
|
+
self.job = job
|
|
52
|
+
self.status = status
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
class NotAuthorisedException(Exception):
|
|
56
|
+
def __init__(self):
|
|
57
|
+
msg = ("Not authorised to run this operation. Check your API key, and that the resource you request is "
|
|
58
|
+
"in the same workspace as the workspace specified in the cloudOS cli")
|
|
59
|
+
super(NotAuthorisedException, self).__init__(msg)
|
|
60
|
+
|
|
61
|
+
|
|
62
|
+
class NoCloudForWorkspaceException(Exception):
|
|
63
|
+
def __init__(self, workspace_id):
|
|
64
|
+
msg = f"Workspace ID {workspace_id} is not associated with supported cloud providers. Check the workspace ID"
|
|
65
|
+
super(NoCloudForWorkspaceException, self).__init__(msg)
|
|
66
|
+
self.workspace_id = workspace_id
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: cloudos_cli
|
|
3
|
-
Version: 2.
|
|
3
|
+
Version: 2.31.0
|
|
4
4
|
Summary: Python package for interacting with CloudOS
|
|
5
5
|
Home-page: https://github.com/lifebit-ai/cloudos-cli
|
|
6
6
|
Author: David Piñeyro
|
|
@@ -420,6 +420,44 @@ command.
|
|
|
420
420
|
Other options like `--wait-completion` are also available and work in the same way as for the `cloudos job run` command.
|
|
421
421
|
Check `cloudos bash job --help` for more details.
|
|
422
422
|
|
|
423
|
+
#### Get path to logs of job from CloudOS
|
|
424
|
+
|
|
425
|
+
Get the path to "Nextflow logs", "Nextflow standard output", and "trace" files. It can be used only on your user's jobs, with any status.
|
|
426
|
+
|
|
427
|
+
Example
|
|
428
|
+
```console
|
|
429
|
+
cloudos job logs --cloudos-url $CLOUDOS \
|
|
430
|
+
--apikey $MY_API_KEY \
|
|
431
|
+
--workspace-id $WORKSPACE_ID \
|
|
432
|
+
--job-id "12345678910"
|
|
433
|
+
|
|
434
|
+
|
|
435
|
+
Executing logs...
|
|
436
|
+
Logs URI: s3://path/to/location/of/logs
|
|
437
|
+
|
|
438
|
+
Nextflow log: s3://path/to/location/of/logs/.nextflow.log
|
|
439
|
+
|
|
440
|
+
Nextflow standard output: s3://path/to/location/of/logs/stdout.txt
|
|
441
|
+
|
|
442
|
+
Trace file: s3://path/to/location/of/logs/trace.txt
|
|
443
|
+
```
|
|
444
|
+
|
|
445
|
+
#### Get path to result files of jobs from CloudOS
|
|
446
|
+
|
|
447
|
+
Get the path where CloudOS stores the output files for a job. This can be used only on your user's jobs and for jobs with "completed" status.
|
|
448
|
+
|
|
449
|
+
Example
|
|
450
|
+
```console
|
|
451
|
+
cloudos job logs --cloudos-url $CLOUDOS \
|
|
452
|
+
--apikey $MY_API_KEY \
|
|
453
|
+
--workspace-id $WORKSPACE_ID \
|
|
454
|
+
--job-id "12345678910"
|
|
455
|
+
|
|
456
|
+
|
|
457
|
+
Executing results...
|
|
458
|
+
results: s3://path/to/location/of/results/results/
|
|
459
|
+
```
|
|
460
|
+
|
|
423
461
|
#### Abort single or multiple jobs from CloudOS
|
|
424
462
|
|
|
425
463
|
Aborts jobs in the CloudOS workspace that are either running or initialising. It can be used with one or more job IDs provided as a comma separated string using the `--job-ids` parameter.
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
__version__ = '2.30.0'
|
|
@@ -1,9 +0,0 @@
|
|
|
1
|
-
"""
|
|
2
|
-
Utility functions and classes to use across the package.
|
|
3
|
-
"""
|
|
4
|
-
|
|
5
|
-
from .errors import BadRequestException, TimeOutException
|
|
6
|
-
from .requests import retry_requests_get, retry_requests_post
|
|
7
|
-
from .resources import format_bytes, ssl_selector
|
|
8
|
-
|
|
9
|
-
__all__ = ['errors', 'requests', 'resources']
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|