dxpy 0.384.0__tar.gz → 0.386.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dxpy-0.384.0 → dxpy-0.386.0}/PKG-INFO +9 -1
- {dxpy-0.384.0 → dxpy-0.386.0}/Readme.md +8 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/bindings/dxapplet.py +8 -4
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/bindings/dxfile.py +1 -1
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/bindings/dxjob.py +20 -14
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/bindings/dxproject.py +18 -1
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/cli/exec_io.py +2 -2
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/cli/parsers.py +4 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/scripts/dx.py +20 -5
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/scripts/dx_build_app.py +5 -5
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/system_requirements.py +1 -1
- dxpy-0.386.0/dxpy/toolkit_version.py +1 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/utils/describe.py +3 -1
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/utils/exec_utils.py +2 -2
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/utils/file_load_utils.py +2 -6
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/utils/local_exec_utils.py +4 -4
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy.egg-info/PKG-INFO +9 -1
- {dxpy-0.384.0 → dxpy-0.386.0}/test/test_dx_app_wizard.py +0 -1
- {dxpy-0.384.0 → dxpy-0.386.0}/test/test_dx_bash_helpers.py +24 -8
- {dxpy-0.384.0 → dxpy-0.386.0}/test/test_dxclient.py +75 -25
- {dxpy-0.384.0 → dxpy-0.386.0}/test/test_dxpy.py +13 -3
- dxpy-0.384.0/dxpy/toolkit_version.py +0 -1
- {dxpy-0.384.0 → dxpy-0.386.0}/MANIFEST.in +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/__init__.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/api.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/app_builder.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/app_categories.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/asset_builder.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/bindings/__init__.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/bindings/apollo/__init__.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/bindings/apollo/cmd_line_options_validator.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/bindings/apollo/data_transformations.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/bindings/apollo/dataset.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/bindings/apollo/json_validation_by_schema.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/bindings/apollo/schemas/__init__.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/bindings/apollo/schemas/assay_filtering_conditions.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/bindings/apollo/schemas/assay_filtering_json_schemas.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/bindings/apollo/schemas/input_arguments_validation_schemas.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/bindings/apollo/vizclient.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/bindings/apollo/vizserver_filters_from_json_parser.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/bindings/apollo/vizserver_payload_builder.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/bindings/auth.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/bindings/download_all_inputs.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/bindings/dxanalysis.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/bindings/dxapp.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/bindings/dxapp_container_functions.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/bindings/dxdatabase.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/bindings/dxdatabase_functions.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/bindings/dxdataobject_functions.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/bindings/dxfile_functions.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/bindings/dxglobalworkflow.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/bindings/dxrecord.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/bindings/dxworkflow.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/bindings/mount_all_inputs.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/bindings/search.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/cli/__init__.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/cli/cp.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/cli/dataset_utilities.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/cli/download.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/cli/help_messages.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/cli/org.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/cli/output_handling.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/cli/workflow.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/compat.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/dx_extract_utils/Homo_sapiens_genes_manifest.json +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/dx_extract_utils/Homo_sapiens_genes_manifest_staging.json +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/dx_extract_utils/Homo_sapiens_genes_manifest_staging_vep.json +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/dx_extract_utils/Homo_sapiens_genes_manifest_vep.json +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/dx_extract_utils/__init__.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/dx_extract_utils/cohort_filter_payload.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/dx_extract_utils/column_conditions.json +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/dx_extract_utils/column_conversion.json +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/dx_extract_utils/filter_to_payload.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/dx_extract_utils/germline_utils.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/dx_extract_utils/input_validation.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/dx_extract_utils/input_validation_somatic.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/dx_extract_utils/retrieve_allele_schema.json +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/dx_extract_utils/retrieve_annotation_schema.json +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/dx_extract_utils/retrieve_bins.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/dx_extract_utils/retrieve_genotype_schema.json +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/dx_extract_utils/return_columns_allele.json +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/dx_extract_utils/return_columns_annotation.json +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/dx_extract_utils/return_columns_genotype.json +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/dx_extract_utils/return_columns_genotype_only.json +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/dx_extract_utils/somatic_filter_payload.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/dxlog.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/exceptions.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/executable_builder.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/nextflow/ImageRef.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/nextflow/ImageRefFactory.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/nextflow/__init__.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/nextflow/awscli_assets.json +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/nextflow/awscli_assets.staging.json +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/nextflow/collect_images.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/nextflow/nextaur_assets.json +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/nextflow/nextaur_assets.staging.json +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/nextflow/nextflow_assets.json +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/nextflow/nextflow_assets.staging.json +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/nextflow/nextflow_builder.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/nextflow/nextflow_templates.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/nextflow/nextflow_utils.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/packages/__init__.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/scripts/__init__.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/scripts/dx_app_wizard.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/scripts/dx_build_applet.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/ssh_tunnel_app_support.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/templating/__init__.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/templating/bash.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/templating/python.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/templating/templates/Readme.md +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/templating/templates/bash/basic/dxapp.json +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/templating/templates/bash/basic/src/code.sh +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/templating/templates/bash/parallelized/dxapp.json +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/templating/templates/bash/parallelized/src/code.sh +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/templating/templates/bash/scatter-process-gather/dxapp.json +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/templating/templates/bash/scatter-process-gather/src/code.sh +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/templating/templates/nextflow/dxapp.json +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/templating/templates/nextflow/src/nextflow.sh +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/templating/templates/python/basic/dxapp.json +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/templating/templates/python/basic/src/code.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/templating/templates/python/basic/test/test.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/templating/templates/python/parallelized/dxapp.json +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/templating/templates/python/parallelized/src/code.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/templating/templates/python/parallelized/test/test.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/templating/templates/python/scatter-process-gather/dxapp.json +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/templating/templates/python/scatter-process-gather/src/code.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/templating/templates/python/scatter-process-gather/test/test.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/templating/utils.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/utils/__init__.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/utils/batch_utils.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/utils/completer.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/utils/config.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/utils/executable_unbuilder.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/utils/file_handle.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/utils/genomic_utils.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/utils/job_log_client.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/utils/pathmatch.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/utils/pretty_print.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/utils/printing.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/utils/resolver.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/utils/spelling_corrector.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/utils/version.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy/workflow_builder.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy.egg-info/SOURCES.txt +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy.egg-info/dependency_links.txt +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy.egg-info/entry_points.txt +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy.egg-info/not-zip-safe +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy.egg-info/requires.txt +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/dxpy.egg-info/top_level.txt +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/requirements.txt +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/requirements_setuptools.txt +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/requirements_test.txt +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/scripts/dx-clone-asset +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/scripts/dx-docker +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/scripts/dx-download-all-inputs +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/scripts/dx-fetch-bundled-depends +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/scripts/dx-generate-dxapp +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/scripts/dx-jobutil-add-output +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/scripts/dx-jobutil-dxlink +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/scripts/dx-jobutil-get-identity-token +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/scripts/dx-jobutil-new-job +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/scripts/dx-jobutil-parse-link +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/scripts/dx-jobutil-report-error +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/scripts/dx-log-stream +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/scripts/dx-mount-all-inputs +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/scripts/dx-notebook-reconnect +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/scripts/dx-print-bash-vars +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/scripts/dx-upload-all-outputs +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/setup.cfg +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/setup.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/test/test_batch.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/test/test_create_cohort.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/test/test_describe.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/test/test_dx-docker.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/test/test_dx_completion.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/test/test_dx_symlink.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/test/test_dxabs.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/test/test_dxasset.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/test/test_dxpy_utils.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/test/test_dxunpack.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/test/test_extract_assay.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/test/test_extract_dataset.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/test/test_extract_expression.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/test/test_extract_somatic.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/test/test_nextflow.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/test/test_nextflow_ImageRef.py +0 -0
- {dxpy-0.384.0 → dxpy-0.386.0}/test/test_nextflow_ImageRefFactory.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: dxpy
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.386.0
|
|
4
4
|
Summary: DNAnexus Platform API bindings for Python
|
|
5
5
|
Home-page: https://github.com/dnanexus/dx-toolkit
|
|
6
6
|
Author: Aleksandra Zalcman, Andrey Kislyuk, Anurag Biyani, Geet Duggal, Katherine Lai, Kurt Jensen, Marek Hrvol, Ohad Rodeh, Phil Sung
|
|
@@ -48,6 +48,14 @@ Example:
|
|
|
48
48
|
$ _DX_DEBUG=1 dx ls
|
|
49
49
|
```
|
|
50
50
|
|
|
51
|
+
### Debugging inside the IDE (PyCharm)
|
|
52
|
+
To be able to debug dx-toolkit (dx commands) directly in the IDE, 'Run/Debug Configurations' needs to be changed.
|
|
53
|
+
1. Go to Run → Edit Configurations...
|
|
54
|
+
2. Add New Configuration (Python)
|
|
55
|
+
3. Change script to module (dxpy.scripts.dx)
|
|
56
|
+
4. To Script parameters field write dx command you want to run (eg 'ls' runs 'dx ls')
|
|
57
|
+
5. Apply and OK (now it is possible to start debugging via main() function in dx.py)
|
|
58
|
+
|
|
51
59
|
Python coding style
|
|
52
60
|
-------------------
|
|
53
61
|
|
|
@@ -27,6 +27,14 @@ Example:
|
|
|
27
27
|
$ _DX_DEBUG=1 dx ls
|
|
28
28
|
```
|
|
29
29
|
|
|
30
|
+
### Debugging inside the IDE (PyCharm)
|
|
31
|
+
To be able to debug dx-toolkit (dx commands) directly in the IDE, 'Run/Debug Configurations' needs to be changed.
|
|
32
|
+
1. Go to Run → Edit Configurations...
|
|
33
|
+
2. Add New Configuration (Python)
|
|
34
|
+
3. Change script to module (dxpy.scripts.dx)
|
|
35
|
+
4. To Script parameters field write dx command you want to run (eg 'ls' runs 'dx ls')
|
|
36
|
+
5. Apply and OK (now it is possible to start debugging via main() function in dx.py)
|
|
37
|
+
|
|
30
38
|
Python coding style
|
|
31
39
|
-------------------
|
|
32
40
|
|
|
@@ -58,11 +58,12 @@ class DXExecutable:
|
|
|
58
58
|
if kwargs.get(arg) is not None:
|
|
59
59
|
run_input[arg] = kwargs[arg]
|
|
60
60
|
|
|
61
|
-
if kwargs.get(
|
|
61
|
+
if any(kwargs.get(key) is not None for key in ['instance_type', 'cluster_spec', 'fpga_driver', 'nvidia_driver']):
|
|
62
62
|
instance_type_srd = SystemRequirementsDict.from_instance_type(kwargs.get('instance_type'))
|
|
63
63
|
cluster_spec_srd = SystemRequirementsDict(kwargs.get('cluster_spec'))
|
|
64
64
|
fpga_driver_srd = SystemRequirementsDict(kwargs.get('fpga_driver'))
|
|
65
|
-
|
|
65
|
+
nvidia_driver_srd = SystemRequirementsDict(kwargs.get('nvidia_driver'))
|
|
66
|
+
run_input["systemRequirements"] = (instance_type_srd + cluster_spec_srd + fpga_driver_srd + nvidia_driver_srd).as_dict()
|
|
66
67
|
|
|
67
68
|
if kwargs.get('system_requirements') is not None:
|
|
68
69
|
run_input["systemRequirements"] = kwargs.get('system_requirements')
|
|
@@ -195,7 +196,7 @@ class DXExecutable:
|
|
|
195
196
|
depends_on=None, allow_ssh=None, debug=None, delay_workspace_destruction=None, priority=None, head_job_on_demand=None,
|
|
196
197
|
ignore_reuse=None, ignore_reuse_stages=None, detach=None, cost_limit=None, rank=None, max_tree_spot_wait_time=None,
|
|
197
198
|
max_job_spot_wait_time=None, preserve_job_outputs=None, detailed_job_metrics=None, extra_args=None,
|
|
198
|
-
fpga_driver=None, system_requirements=None, system_requirements_by_executable=None, **kwargs):
|
|
199
|
+
fpga_driver=None, system_requirements=None, system_requirements_by_executable=None, nvidia_driver=None, **kwargs):
|
|
199
200
|
'''
|
|
200
201
|
:param executable_input: Hash of the executable's input arguments
|
|
201
202
|
:type executable_input: dict
|
|
@@ -252,6 +253,8 @@ class DXExecutable:
|
|
|
252
253
|
:type system_requirements: dict
|
|
253
254
|
:param system_requirements_by_executable: System requirement by executable double mapping
|
|
254
255
|
:type system_requirements_by_executable: dict
|
|
256
|
+
:param nvidia_driver: a dict mapping function names to nvidia driver requests
|
|
257
|
+
:type nvidia_driver: dict
|
|
255
258
|
:rtype: :class:`~dxpy.bindings.dxjob.DXJob`
|
|
256
259
|
|
|
257
260
|
Creates a new job that executes the function "main" of this executable with
|
|
@@ -292,7 +295,8 @@ class DXExecutable:
|
|
|
292
295
|
extra_args=extra_args,
|
|
293
296
|
fpga_driver=fpga_driver,
|
|
294
297
|
system_requirements=system_requirements,
|
|
295
|
-
system_requirements_by_executable=system_requirements_by_executable
|
|
298
|
+
system_requirements_by_executable=system_requirements_by_executable,
|
|
299
|
+
nvidia_driver=nvidia_driver)
|
|
296
300
|
return self._run_impl(run_input, **kwargs)
|
|
297
301
|
|
|
298
302
|
|
|
@@ -728,7 +728,7 @@ class DXFile(DXDataObject):
|
|
|
728
728
|
report_progress_fn(self, len(data))
|
|
729
729
|
|
|
730
730
|
def wait_until_parts_uploaded(self, **kwargs):
|
|
731
|
-
self._wait_until_parts_uploaded(
|
|
731
|
+
self._wait_until_parts_uploaded(**kwargs)
|
|
732
732
|
|
|
733
733
|
|
|
734
734
|
def get_download_url(self, duration=None, preauthenticated=False, filename=None, project=None, **kwargs):
|
|
@@ -38,14 +38,15 @@ from ..system_requirements import SystemRequirementsDict
|
|
|
38
38
|
from ..utils.local_exec_utils import queue_entry_point
|
|
39
39
|
from ..compat import basestring
|
|
40
40
|
|
|
41
|
+
|
|
41
42
|
#########
|
|
42
43
|
# DXJob #
|
|
43
44
|
#########
|
|
44
45
|
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
**kwargs):
|
|
46
|
+
|
|
47
|
+
def new_dxjob(fn_input, fn_name, name=None, tags=None, properties=None, details=None, instance_type=None,
|
|
48
|
+
depends_on=None, cluster_spec=None, fpga_driver=None, system_requirements=None,
|
|
49
|
+
system_requirements_by_executable=None, nvidia_driver=None, **kwargs):
|
|
49
50
|
'''
|
|
50
51
|
:param fn_input: Function input
|
|
51
52
|
:type fn_input: dict
|
|
@@ -71,6 +72,8 @@ def new_dxjob(fn_input, fn_name, name=None, tags=None, properties=None, details=
|
|
|
71
72
|
:type system_requirements: dict
|
|
72
73
|
:param system_requirements_by_executable: System requirement by executable double mapping
|
|
73
74
|
:type system_requirements_by_executable: dict
|
|
75
|
+
:param nvidia_driver: a dict mapping function names to nvidia driver requests
|
|
76
|
+
:type nvidia_driver: dict
|
|
74
77
|
:rtype: :class:`~dxpy.bindings.dxjob.DXJob`
|
|
75
78
|
|
|
76
79
|
Creates and enqueues a new job that will execute a particular
|
|
@@ -94,12 +97,13 @@ def new_dxjob(fn_input, fn_name, name=None, tags=None, properties=None, details=
|
|
|
94
97
|
|
|
95
98
|
'''
|
|
96
99
|
dxjob = DXJob()
|
|
97
|
-
dxjob.new(fn_input, fn_name, name=name, tags=tags, properties=properties,
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
100
|
+
dxjob.new(fn_input, fn_name, name=name, tags=tags, properties=properties, details=details,
|
|
101
|
+
instance_type=instance_type, depends_on=depends_on, cluster_spec=cluster_spec, fpga_driver=fpga_driver,
|
|
102
|
+
system_requirements=system_requirements, system_requirements_by_executable=system_requirements_by_executable,
|
|
103
|
+
nvidia_driver=nvidia_driver, **kwargs)
|
|
101
104
|
return dxjob
|
|
102
105
|
|
|
106
|
+
|
|
103
107
|
class DXJob(DXObject):
|
|
104
108
|
'''
|
|
105
109
|
Remote job object handler.
|
|
@@ -112,10 +116,9 @@ class DXJob(DXObject):
|
|
|
112
116
|
DXObject.__init__(self, dxid=dxid)
|
|
113
117
|
self.set_id(dxid)
|
|
114
118
|
|
|
115
|
-
def new(self, fn_input, fn_name, name=None, tags=None, properties=None, details=None,
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
**kwargs):
|
|
119
|
+
def new(self, fn_input, fn_name, name=None, tags=None, properties=None, details=None, instance_type=None,
|
|
120
|
+
depends_on=None, cluster_spec=None, fpga_driver=None, system_requirements=None,
|
|
121
|
+
system_requirements_by_executable=None, nvidia_driver=None, **kwargs):
|
|
119
122
|
'''
|
|
120
123
|
:param fn_input: Function input
|
|
121
124
|
:type fn_input: dict
|
|
@@ -141,6 +144,8 @@ class DXJob(DXObject):
|
|
|
141
144
|
:type system_requirements: dict
|
|
142
145
|
:param system_requirements_by_executable: System requirement by executable double mapping
|
|
143
146
|
:type system_requirements_by_executable: dict
|
|
147
|
+
:param nvidia_driver: a dict mapping function names to nvidia driver requests
|
|
148
|
+
:type nvidia_driver: dict
|
|
144
149
|
|
|
145
150
|
Creates and enqueues a new job that will execute a particular
|
|
146
151
|
function (from the same app or applet as the one the current job
|
|
@@ -179,11 +184,12 @@ class DXJob(DXObject):
|
|
|
179
184
|
req_input["tags"] = tags
|
|
180
185
|
if properties is not None:
|
|
181
186
|
req_input["properties"] = properties
|
|
182
|
-
if
|
|
187
|
+
if any(requirement is not None for requirement in [instance_type, cluster_spec, fpga_driver, nvidia_driver]):
|
|
183
188
|
instance_type_srd = SystemRequirementsDict.from_instance_type(instance_type, fn_name)
|
|
184
189
|
cluster_spec_srd = SystemRequirementsDict(cluster_spec)
|
|
185
190
|
fpga_driver_srd = SystemRequirementsDict(fpga_driver)
|
|
186
|
-
|
|
191
|
+
nvidia_driver_srd = SystemRequirementsDict(nvidia_driver)
|
|
192
|
+
req_input["systemRequirements"] = (instance_type_srd + cluster_spec_srd + fpga_driver_srd + nvidia_driver_srd).as_dict()
|
|
187
193
|
if system_requirements is not None:
|
|
188
194
|
req_input["systemRequirements"] = system_requirements
|
|
189
195
|
if system_requirements_by_executable is not None:
|
|
@@ -285,6 +285,7 @@ class DXProject(DXContainer):
|
|
|
285
285
|
restricted=None, download_restricted=None, contains_phi=None,
|
|
286
286
|
tags=None, properties=None, bill_to=None, database_ui_view_only=None,
|
|
287
287
|
external_upload_restricted=None, default_symlink=None,
|
|
288
|
+
database_results_restricted=None,
|
|
288
289
|
**kwargs):
|
|
289
290
|
"""
|
|
290
291
|
:param name: The name of the project
|
|
@@ -313,6 +314,8 @@ class DXProject(DXContainer):
|
|
|
313
314
|
:type database_ui_view_only: boolean
|
|
314
315
|
:param external_upload_restricted: If provided, whether project members can upload data to project from external sources, e.g. outside of job
|
|
315
316
|
:type external_upload_restricted: boolean
|
|
317
|
+
:param database_results_restricted: If provided, minimum amount of data that project members with VIEW access can see from databases in the project
|
|
318
|
+
:type database_results_restricted: int
|
|
316
319
|
:param default_symlink: If provided, the details needed to have writable symlinks in the project. Dict must include drive, container, and optional prefix.
|
|
317
320
|
:type default_symlink: dict
|
|
318
321
|
|
|
@@ -346,6 +349,8 @@ class DXProject(DXContainer):
|
|
|
346
349
|
input_hash["databaseUIViewOnly"] = database_ui_view_only
|
|
347
350
|
if external_upload_restricted is not None:
|
|
348
351
|
input_hash["externalUploadRestricted"] = external_upload_restricted
|
|
352
|
+
if database_results_restricted is not None:
|
|
353
|
+
input_hash["databaseResultsRestricted"] = database_results_restricted
|
|
349
354
|
if tags is not None:
|
|
350
355
|
input_hash["tags"] = tags
|
|
351
356
|
if properties is not None:
|
|
@@ -360,7 +365,9 @@ class DXProject(DXContainer):
|
|
|
360
365
|
def update(self, name=None, summary=None, description=None, protected=None,
|
|
361
366
|
restricted=None, download_restricted=None, version=None,
|
|
362
367
|
allowed_executables=None, unset_allowed_executables=None,
|
|
363
|
-
database_ui_view_only=None, external_upload_restricted=None,
|
|
368
|
+
database_ui_view_only=None, external_upload_restricted=None,
|
|
369
|
+
database_results_restricted=None, unset_database_results_restricted=None,
|
|
370
|
+
**kwargs):
|
|
364
371
|
"""
|
|
365
372
|
:param name: If provided, the new project name
|
|
366
373
|
:type name: string
|
|
@@ -376,10 +383,16 @@ class DXProject(DXContainer):
|
|
|
376
383
|
:type download_restricted: boolean
|
|
377
384
|
:param allowed_executables: If provided, these are the only executable ID(s) allowed to run as root executions in this project
|
|
378
385
|
:type allowed_executables: list
|
|
386
|
+
:param unset_allowed_executables: If provided, removes any restrictions set by allowed_executables
|
|
387
|
+
:type unset_allowed_executables: boolean
|
|
379
388
|
:param database_ui_view_only: If provided, whether the viewers on the project can access the database data directly
|
|
380
389
|
:type database_ui_view_only: boolean
|
|
381
390
|
:param external_upload_restricted: If provided, whether project members can upload data to project from external sources, e.g. outside of job
|
|
382
391
|
:type external_upload_restricted: boolean
|
|
392
|
+
:param database_results_restricted: If provided, minimum amount of data that project members with VIEW access can see from databases in the project
|
|
393
|
+
:type database_results_restricted: int
|
|
394
|
+
:param unset_database_results_restricted: If provided, removes any restrictions set by database_results_restricted
|
|
395
|
+
:type unset_database_results_restricted: boolean
|
|
383
396
|
:param version: If provided, the update will only occur if the value matches the current project's version number
|
|
384
397
|
:type version: int
|
|
385
398
|
|
|
@@ -413,6 +426,10 @@ class DXProject(DXContainer):
|
|
|
413
426
|
update_hash["databaseUIViewOnly"] = database_ui_view_only
|
|
414
427
|
if external_upload_restricted is not None:
|
|
415
428
|
update_hash["externalUploadRestricted"] = external_upload_restricted
|
|
429
|
+
if database_results_restricted is not None:
|
|
430
|
+
update_hash["databaseResultsRestricted"] = database_results_restricted
|
|
431
|
+
if unset_database_results_restricted is not None:
|
|
432
|
+
update_hash["databaseResultsRestricted"] = None
|
|
416
433
|
dxpy.api.project_update(self._dxid, update_hash, **kwargs)
|
|
417
434
|
|
|
418
435
|
def invite(self, invitee, level, send_email=True, **kwargs):
|
|
@@ -22,7 +22,7 @@ from __future__ import print_function, unicode_literals, division, absolute_impo
|
|
|
22
22
|
|
|
23
23
|
# TODO: refactor all dx run helper functions here
|
|
24
24
|
|
|
25
|
-
import os, sys, json, collections,
|
|
25
|
+
import os, sys, json, collections, shlex
|
|
26
26
|
from ..bindings.dxworkflow import DXWorkflow
|
|
27
27
|
|
|
28
28
|
import dxpy
|
|
@@ -327,7 +327,7 @@ def format_choices_or_suggestions(header, items, obj_class, initial_indent=' ' *
|
|
|
327
327
|
# TODO: in interactive prompts the quotes here may be a bit
|
|
328
328
|
# misleading. Perhaps it should be a separate mode to print
|
|
329
329
|
# "interactive-ready" suggestions.
|
|
330
|
-
return fill(header + ' ' + ', '.join([
|
|
330
|
+
return fill(header + ' ' + ', '.join([shlex.quote(str(item)) for item in items]),
|
|
331
331
|
initial_indent=initial_indent,
|
|
332
332
|
subsequent_indent=subsequent_indent)
|
|
333
333
|
|
|
@@ -446,6 +446,10 @@ def get_update_project_args(args):
|
|
|
446
446
|
input_params['allowedExecutables'] = args.allowed_executables
|
|
447
447
|
if args.unset_allowed_executables:
|
|
448
448
|
input_params['allowedExecutables'] = None
|
|
449
|
+
if args.database_results_restricted is not None:
|
|
450
|
+
input_params['databaseResultsRestricted'] = args.database_results_restricted
|
|
451
|
+
if args.unset_database_results_restricted:
|
|
452
|
+
input_params['databaseResultsRestricted'] = None
|
|
449
453
|
if args.external_upload_restricted is not None:
|
|
450
454
|
input_params['externalUploadRestricted'] = args.external_upload_restricted == 'true'
|
|
451
455
|
return input_params
|
|
@@ -1432,6 +1432,8 @@ def new_project(args):
|
|
|
1432
1432
|
inputs["containsPHI"] = True
|
|
1433
1433
|
if args.database_ui_view_only:
|
|
1434
1434
|
inputs["databaseUIViewOnly"] = True
|
|
1435
|
+
if args.database_results_restricted is not None:
|
|
1436
|
+
inputs["databaseResultsRestricted"] = args.database_results_restricted
|
|
1435
1437
|
if args.monthly_compute_limit is not None:
|
|
1436
1438
|
inputs["monthlyComputeLimit"] = args.monthly_compute_limit
|
|
1437
1439
|
if args.monthly_egress_bytes_limit is not None:
|
|
@@ -1440,6 +1442,8 @@ def new_project(args):
|
|
|
1440
1442
|
inputs["monthlyStorageLimit"] = args.monthly_storage_limit
|
|
1441
1443
|
if args.default_symlink is not None:
|
|
1442
1444
|
inputs["defaultSymlink"] = json.loads(args.default_symlink)
|
|
1445
|
+
if args.drive is not None:
|
|
1446
|
+
inputs["drive"] = args.drive
|
|
1443
1447
|
try:
|
|
1444
1448
|
resp = dxpy.api.project_new(inputs)
|
|
1445
1449
|
if args.brief:
|
|
@@ -3195,10 +3199,12 @@ def run_body(args, executable, dest_proj, dest_path, preset_inputs=None, input_n
|
|
|
3195
3199
|
cloned_instance_type = SystemRequirementsDict.from_sys_requirements(cloned_system_requirements, _type='instanceType')
|
|
3196
3200
|
cloned_cluster_spec = SystemRequirementsDict.from_sys_requirements(cloned_system_requirements, _type='clusterSpec')
|
|
3197
3201
|
cloned_fpga_driver = SystemRequirementsDict.from_sys_requirements(cloned_system_requirements, _type='fpgaDriver')
|
|
3202
|
+
cloned_nvidia_driver = SystemRequirementsDict.from_sys_requirements(cloned_system_requirements, _type='nvidiaDriver')
|
|
3198
3203
|
cloned_system_requirements_by_executable = args.cloned_job_desc.get("mergedSystemRequirementsByExecutable", {}) or {}
|
|
3199
3204
|
else:
|
|
3200
3205
|
cloned_system_requirements = {}
|
|
3201
|
-
cloned_instance_type, cloned_cluster_spec, cloned_fpga_driver =
|
|
3206
|
+
cloned_instance_type, cloned_cluster_spec, cloned_fpga_driver, cloned_nvidia_driver = (
|
|
3207
|
+
SystemRequirementsDict({}), SystemRequirementsDict({}), SystemRequirementsDict({}), SystemRequirementsDict({}))
|
|
3202
3208
|
cloned_system_requirements_by_executable = {}
|
|
3203
3209
|
|
|
3204
3210
|
# convert runtime --instance-type into mapping {entrypoint:{'instanceType':xxx}}
|
|
@@ -3227,12 +3233,15 @@ def run_body(args, executable, dest_proj, dest_path, preset_inputs=None, input_n
|
|
|
3227
3233
|
else:
|
|
3228
3234
|
requested_cluster_spec = cloned_cluster_spec
|
|
3229
3235
|
|
|
3230
|
-
# fpga driver now does not have corresponding dx run option,
|
|
3236
|
+
# fpga/nvidia driver now does not have corresponding dx run option,
|
|
3237
|
+
# so it can only be requested using the cloned value
|
|
3231
3238
|
requested_fpga_driver = cloned_fpga_driver
|
|
3239
|
+
requested_nvidia_driver = cloned_nvidia_driver
|
|
3232
3240
|
|
|
3233
|
-
# combine the requested instance type, full cluster spec, fpga spec
|
|
3241
|
+
# combine the requested instance type, full cluster spec, fpga spec, nvidia spec
|
|
3234
3242
|
# into the runtime systemRequirements
|
|
3235
|
-
requested_system_requirements = (requested_instance_type + requested_cluster_spec + requested_fpga_driver
|
|
3243
|
+
requested_system_requirements = (requested_instance_type + requested_cluster_spec + requested_fpga_driver +
|
|
3244
|
+
requested_nvidia_driver).as_dict()
|
|
3236
3245
|
|
|
3237
3246
|
if (args.instance_type and cloned_system_requirements_by_executable):
|
|
3238
3247
|
warning = BOLD("WARNING") + ": --instance-type argument: {} may get overridden by".format(args.instance_type)
|
|
@@ -3283,6 +3292,7 @@ def run_body(args, executable, dest_proj, dest_path, preset_inputs=None, input_n
|
|
|
3283
3292
|
"instance_type": None,
|
|
3284
3293
|
"cluster_spec": None,
|
|
3285
3294
|
"fpga_driver": None,
|
|
3295
|
+
"nvidia_driver": None,
|
|
3286
3296
|
"stage_instance_types": args.stage_instance_types,
|
|
3287
3297
|
"stage_folders": args.stage_folders,
|
|
3288
3298
|
"rerun_stages": args.rerun_stages,
|
|
@@ -5419,6 +5429,9 @@ parser_update_project.add_argument('--bill-to', help="Update the user or org ID
|
|
|
5419
5429
|
allowed_executables_group = parser_update_project.add_mutually_exclusive_group()
|
|
5420
5430
|
allowed_executables_group.add_argument('--allowed-executables', help='Executable ID(s) this project is allowed to run. This operation overrides any existing list of executables.', type=str, nargs="+")
|
|
5421
5431
|
allowed_executables_group.add_argument('--unset-allowed-executables', help='Removes any restriction to run executables as set by --allowed-executables', action='store_true')
|
|
5432
|
+
database_results_restricted_group = parser_update_project.add_mutually_exclusive_group()
|
|
5433
|
+
database_results_restricted_group.add_argument('--database-results-restricted', help='Viewers on the project can access only more than specified size of visual data from databases', type=positive_integer)
|
|
5434
|
+
database_results_restricted_group.add_argument('--unset-database-results-restricted', help='Removes any restriction to return data from databases as set by --database-results-restricted', action='store_true')
|
|
5422
5435
|
|
|
5423
5436
|
parser_update_project.set_defaults(func=update_project)
|
|
5424
5437
|
register_parser(parser_update_project, subparsers_action=subparsers_update, categories="metadata")
|
|
@@ -5805,10 +5818,12 @@ parser_new_project.add_argument('--phi', help='Add PHI protection to project', d
|
|
|
5805
5818
|
action='store_true')
|
|
5806
5819
|
parser_new_project.add_argument('--database-ui-view-only', help='Viewers on the project cannot access database data directly', default=False,
|
|
5807
5820
|
action='store_true')
|
|
5821
|
+
parser_new_project.add_argument('--database-results-restricted', help='Viewers on the project can access only more than specified size of visual data from databases', type=positive_integer)
|
|
5808
5822
|
parser_new_project.add_argument('--monthly-compute-limit', type=positive_integer, help='Monthly project spending limit for compute')
|
|
5809
5823
|
parser_new_project.add_argument('--monthly-egress-bytes-limit', type=positive_integer, help='Monthly project spending limit for egress (in Bytes)')
|
|
5810
5824
|
parser_new_project.add_argument('--monthly-storage-limit', type=positive_number, help='Monthly project spending limit for storage')
|
|
5811
|
-
parser_new_project.add_argument('--default-symlink', help='Default symlink for external
|
|
5825
|
+
parser_new_project.add_argument('--default-symlink', help='Default symlink for external storage account')
|
|
5826
|
+
parser_new_project.add_argument('--drive', help='Drive for external storage account')
|
|
5812
5827
|
parser_new_project.set_defaults(func=new_project)
|
|
5813
5828
|
register_parser(parser_new_project, subparsers_action=subparsers_new, categories='fs')
|
|
5814
5829
|
|
|
@@ -357,11 +357,11 @@ def _verify_app_source_dir_impl(src_dir, temp_dir, mode, enforce=True):
|
|
|
357
357
|
if "interpreter" not in manifest['runSpec']:
|
|
358
358
|
raise dxpy.app_builder.AppBuilderException('runSpec.interpreter field was not present')
|
|
359
359
|
|
|
360
|
-
if "
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
360
|
+
if "distribution" not in manifest['runSpec']:
|
|
361
|
+
raise dxpy.app_builder.AppBuilderException('Required field runSpec.distribution is not present')
|
|
362
|
+
|
|
363
|
+
if "release" not in manifest['runSpec']:
|
|
364
|
+
raise dxpy.app_builder.AppBuilderException('Required field runSpec.release is not present')
|
|
365
365
|
|
|
366
366
|
if manifest['runSpec']['interpreter'] in ["python2.7", "bash", "python3"]:
|
|
367
367
|
if "file" in manifest['runSpec']:
|
|
@@ -85,7 +85,7 @@ class SystemRequirementsDict(object):
|
|
|
85
85
|
It can extract only entrypoints with specific fields ('clusterSpec',
|
|
86
86
|
'instanceType', etc), depending on the value of _type.
|
|
87
87
|
"""
|
|
88
|
-
allowed_types = ['all', 'clusterSpec', 'instanceType', 'fpgaDriver']
|
|
88
|
+
allowed_types = ['all', 'clusterSpec', 'instanceType', 'fpgaDriver', 'nvidiaDriver']
|
|
89
89
|
if _type not in (allowed_types):
|
|
90
90
|
raise DXError("Expected '_type' to be one of the following: {}".format(allowed_types))
|
|
91
91
|
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
version = '0.386.0'
|
|
@@ -415,7 +415,7 @@ def print_project_desc(desc, verbose=False):
|
|
|
415
415
|
'containsPHI', 'databaseUIViewOnly', 'externalUploadRestricted', 'region', 'storageCost', 'pendingTransfer',
|
|
416
416
|
'atSpendingLimit', 'currentMonthComputeAvailableBudget', 'currentMonthEgressBytesAvailableBudget',
|
|
417
417
|
'currentMonthStorageAvailableBudget', 'currentMonthComputeUsage', 'currentMonthEgressBytesUsage',
|
|
418
|
-
'currentMonthExpectedStorageUsage', 'defaultSymlink'
|
|
418
|
+
'currentMonthExpectedStorageUsage', 'defaultSymlink', 'databaseResultsRestricted',
|
|
419
419
|
# Following are app container-specific
|
|
420
420
|
'destroyAt', 'project', 'type', 'app', 'appName'
|
|
421
421
|
]
|
|
@@ -455,6 +455,8 @@ def print_project_desc(desc, verbose=False):
|
|
|
455
455
|
print_json_field('External Upload Restricted', desc['externalUploadRestricted'])
|
|
456
456
|
if 'defaultSymlink' in desc and verbose:
|
|
457
457
|
print_json_field('Default Symlink', desc['defaultSymlink'])
|
|
458
|
+
if 'databaseResultsRestricted' in desc and desc['databaseResultsRestricted']:
|
|
459
|
+
print_json_field('Database Results Restricted', desc['databaseResultsRestricted'])
|
|
458
460
|
|
|
459
461
|
# Usage
|
|
460
462
|
print_field("Created", render_timestamp(desc['created']))
|
|
@@ -23,7 +23,7 @@ from __future__ import print_function, unicode_literals, division, absolute_impo
|
|
|
23
23
|
import os, sys, json, re, collections, logging, argparse, string, itertools, subprocess, tempfile
|
|
24
24
|
from functools import wraps
|
|
25
25
|
from collections import namedtuple
|
|
26
|
-
import
|
|
26
|
+
import shlex
|
|
27
27
|
|
|
28
28
|
import dxpy
|
|
29
29
|
from ..compat import USING_PYTHON2, open, Mapping
|
|
@@ -435,7 +435,7 @@ class DXExecDependencyInstaller(object):
|
|
|
435
435
|
dxpy.download_dxfile(bundle["id"], bundle["name"], project=dxpy.WORKSPACE_ID)
|
|
436
436
|
except dxpy.exceptions.ResourceNotFound:
|
|
437
437
|
dxpy.download_dxfile(bundle["id"], bundle["name"])
|
|
438
|
-
self.run("dx-unpack {}".format(
|
|
438
|
+
self.run("dx-unpack {}".format(shlex.quote(bundle["name"])))
|
|
439
439
|
else:
|
|
440
440
|
self.log('Skipping bundled dependency "{name}" because it does not refer to a file'.format(**bundle))
|
|
441
441
|
|
|
@@ -83,7 +83,7 @@ will download into the execution environment:
|
|
|
83
83
|
from __future__ import print_function, unicode_literals, division, absolute_import
|
|
84
84
|
|
|
85
85
|
import json
|
|
86
|
-
import
|
|
86
|
+
import shlex
|
|
87
87
|
import os
|
|
88
88
|
import fnmatch
|
|
89
89
|
import sys
|
|
@@ -401,10 +401,6 @@ def analyze_bash_vars(job_input_file, job_homedir):
|
|
|
401
401
|
return file_key_descs, rest_hash
|
|
402
402
|
|
|
403
403
|
|
|
404
|
-
#
|
|
405
|
-
# Note: pipes.quote() to be replaced with shlex.quote() in Python 3
|
|
406
|
-
# (see http://docs.python.org/2/library/pipes.html#pipes.quote)
|
|
407
|
-
#
|
|
408
404
|
def gen_bash_vars(job_input_file, job_homedir=None, check_name_collision=True):
|
|
409
405
|
"""
|
|
410
406
|
:param job_input_file: path to a JSON file describing the job inputs
|
|
@@ -427,7 +423,7 @@ def gen_bash_vars(job_input_file, job_homedir=None, check_name_collision=True):
|
|
|
427
423
|
result = json.dumps(dxpy.dxlink(elem))
|
|
428
424
|
else:
|
|
429
425
|
result = json.dumps(elem)
|
|
430
|
-
return
|
|
426
|
+
return shlex.quote(result)
|
|
431
427
|
|
|
432
428
|
def string_of_value(val):
|
|
433
429
|
if isinstance(val, list):
|
|
@@ -16,7 +16,7 @@
|
|
|
16
16
|
|
|
17
17
|
from __future__ import print_function, unicode_literals, division, absolute_import
|
|
18
18
|
|
|
19
|
-
import os, sys, json, subprocess,
|
|
19
|
+
import os, sys, json, subprocess, shlex
|
|
20
20
|
import collections, datetime
|
|
21
21
|
|
|
22
22
|
import dxpy
|
|
@@ -351,9 +351,9 @@ def run_one_entry_point(job_id, function, input_hash, run_spec, depends_on, name
|
|
|
351
351
|
if [[ $(type -t {function}) == "function" ]];
|
|
352
352
|
then {function};
|
|
353
353
|
else echo "$0: Global scope execution complete. Not invoking entry point function {function} because it was not found" 1>&2;
|
|
354
|
-
fi'''.format(homedir=
|
|
355
|
-
env_path=
|
|
356
|
-
code_path=
|
|
354
|
+
fi'''.format(homedir=shlex.quote(job_homedir),
|
|
355
|
+
env_path=shlex.quote(os.path.join(job_env['HOME'], 'environment')),
|
|
356
|
+
code_path=shlex.quote(environ['DX_TEST_CODE_PATH']),
|
|
357
357
|
function=function)
|
|
358
358
|
invocation_args = ['bash', '-c', '-e'] + (['-x'] if environ.get('DX_TEST_X_FLAG') else []) + [script]
|
|
359
359
|
elif run_spec['interpreter'] == 'python2.7':
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: dxpy
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.386.0
|
|
4
4
|
Summary: DNAnexus Platform API bindings for Python
|
|
5
5
|
Home-page: https://github.com/dnanexus/dx-toolkit
|
|
6
6
|
Author: Aleksandra Zalcman, Andrey Kislyuk, Anurag Biyani, Geet Duggal, Katherine Lai, Kurt Jensen, Marek Hrvol, Ohad Rodeh, Phil Sung
|
|
@@ -48,6 +48,14 @@ Example:
|
|
|
48
48
|
$ _DX_DEBUG=1 dx ls
|
|
49
49
|
```
|
|
50
50
|
|
|
51
|
+
### Debugging inside the IDE (PyCharm)
|
|
52
|
+
To be able to debug dx-toolkit (dx commands) directly in the IDE, 'Run/Debug Configurations' needs to be changed.
|
|
53
|
+
1. Go to Run → Edit Configurations...
|
|
54
|
+
2. Add New Configuration (Python)
|
|
55
|
+
3. Change script to module (dxpy.scripts.dx)
|
|
56
|
+
4. To Script parameters field write dx command you want to run (eg 'ls' runs 'dx ls')
|
|
57
|
+
5. Apply and OK (now it is possible to start debugging via main() function in dx.py)
|
|
58
|
+
|
|
51
59
|
Python coding style
|
|
52
60
|
-------------------
|
|
53
61
|
|
|
@@ -23,7 +23,7 @@ import dxpy
|
|
|
23
23
|
import dxpy_testutil as testutil
|
|
24
24
|
import json
|
|
25
25
|
import os
|
|
26
|
-
import
|
|
26
|
+
import shlex
|
|
27
27
|
import pytest
|
|
28
28
|
import shutil
|
|
29
29
|
import tempfile
|
|
@@ -42,7 +42,7 @@ from dxpy.bindings.download_all_inputs import _get_num_parallel_threads
|
|
|
42
42
|
def run(command, **kwargs):
|
|
43
43
|
try:
|
|
44
44
|
if isinstance(command, list) or isinstance(command, tuple):
|
|
45
|
-
print("$ %s" % " ".join(
|
|
45
|
+
print("$ %s" % " ".join(shlex.quote(f) for f in command))
|
|
46
46
|
output = check_output(command, **kwargs)
|
|
47
47
|
else:
|
|
48
48
|
print("$ %s" % (command,))
|
|
@@ -919,22 +919,38 @@ class TestDXJobutilNewJob(DXTestCase):
|
|
|
919
919
|
),
|
|
920
920
|
# instance type: mapping
|
|
921
921
|
("--instance-type " +
|
|
922
|
-
|
|
922
|
+
shlex.quote(json.dumps({"main": "mem2_hdd2_x2" , "other_function": "mem2_hdd2_x1" })),
|
|
923
923
|
{"systemRequirements": {"main": { "instanceType": "mem2_hdd2_x2" },
|
|
924
924
|
"other_function": { "instanceType": "mem2_hdd2_x1" }}}),
|
|
925
925
|
("--instance-type-by-executable " +
|
|
926
|
-
|
|
926
|
+
shlex.quote(json.dumps({"my_applet": {"main": "mem2_hdd2_x2",
|
|
927
927
|
"other_function": "mem3_ssd2_fpga1_x8"}})),
|
|
928
928
|
{"systemRequirementsByExecutable": {"my_applet": {"main": {"instanceType": "mem2_hdd2_x2"},
|
|
929
929
|
"other_function": {"instanceType": "mem3_ssd2_fpga1_x8"}}}}),
|
|
930
930
|
("--instance-type-by-executable " +
|
|
931
|
-
|
|
931
|
+
shlex.quote(json.dumps({"my_applet": {"main": "mem1_ssd1_v2_x2",
|
|
932
932
|
"other_function": "mem3_ssd2_fpga1_x8"}})) +
|
|
933
933
|
" --extra-args " +
|
|
934
|
-
|
|
934
|
+
shlex.quote(json.dumps({"systemRequirementsByExecutable": {"my_applet": {"main": {"instanceType": "mem2_hdd2_x2", "clusterSpec": {"initialInstanceCount": 3}},
|
|
935
935
|
"other_function": {"fpgaDriver": "edico-1.4.5"}}}})),
|
|
936
936
|
{"systemRequirementsByExecutable": {"my_applet":{"main": { "instanceType": "mem2_hdd2_x2", "clusterSpec":{"initialInstanceCount": 3}},
|
|
937
937
|
"other_function": { "instanceType": "mem3_ssd2_fpga1_x8", "fpgaDriver": "edico-1.4.5"} }}}),
|
|
938
|
+
# nvidia driver
|
|
939
|
+
("--instance-type-by-executable " +
|
|
940
|
+
shlex.quote(json.dumps({
|
|
941
|
+
"my_applet": {
|
|
942
|
+
"main": "mem1_ssd1_v2_x2",
|
|
943
|
+
"other_function": "mem2_ssd1_gpu_x16"}})) +
|
|
944
|
+
" --extra-args " +
|
|
945
|
+
shlex.quote(json.dumps({
|
|
946
|
+
"systemRequirementsByExecutable": {
|
|
947
|
+
"my_applet": {
|
|
948
|
+
"main": {"instanceType": "mem2_hdd2_x2"},
|
|
949
|
+
"other_function": {"nvidiaDriver": "R535"}}}})),
|
|
950
|
+
{"systemRequirementsByExecutable": {
|
|
951
|
+
"my_applet": {"main": {"instanceType": "mem2_hdd2_x2"},
|
|
952
|
+
"other_function": {"instanceType": "mem2_ssd1_gpu_x16",
|
|
953
|
+
"nvidiaDriver": "R535"}}}}),
|
|
938
954
|
# properties - mapping
|
|
939
955
|
(
|
|
940
956
|
"--property foo=foo_value --property bar=bar_value",
|
|
@@ -947,14 +963,14 @@ class TestDXJobutilNewJob(DXTestCase):
|
|
|
947
963
|
self.assertNewJobInputHash(cmd_snippet, arguments_hash)
|
|
948
964
|
|
|
949
965
|
def test_extra_arguments(self):
|
|
950
|
-
cmd_snippet = "--extra-args " +
|
|
966
|
+
cmd_snippet = "--extra-args " + shlex.quote(
|
|
951
967
|
json.dumps({"details": {"d1": "detail1", "d2": 1234}, "foo": "foo_value"})
|
|
952
968
|
)
|
|
953
969
|
arguments_hash = {"details": {"d1": "detail1", "d2": 1234}, "foo": "foo_value"}
|
|
954
970
|
self.assertNewJobInputHash(cmd_snippet, arguments_hash)
|
|
955
971
|
|
|
956
972
|
# override previously specified args
|
|
957
|
-
cmd_snippet = "--name JobName --extra-args " +
|
|
973
|
+
cmd_snippet = "--name JobName --extra-args " + shlex.quote(
|
|
958
974
|
json.dumps({"name": "FinalName"})
|
|
959
975
|
)
|
|
960
976
|
arguments_hash = {"name": "FinalName"}
|