dxpy 0.394.0__tar.gz → 0.396.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dxpy-0.394.0 → dxpy-0.396.0}/PKG-INFO +1 -1
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/__init__.py +18 -5
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/app_builder.py +0 -1
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/asset_builder.py +0 -1
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/bindings/dxdatabase.py +3 -6
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/bindings/dxdatabase_functions.py +1 -1
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/bindings/dxfile.py +27 -35
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/bindings/dxfile_functions.py +1 -3
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/bindings/dxproject.py +6 -1
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/cli/__init__.py +0 -1
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/cli/dataset_utilities.py +1 -6
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/cli/exec_io.py +1 -3
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/cli/org.py +0 -1
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/cli/parsers.py +2 -0
- dxpy-0.396.0/dxpy/compat.py +38 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/dxlog.py +1 -8
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/exceptions.py +2 -5
- dxpy-0.396.0/dxpy/nextflow/app_asset_projects_ids_prod.json +9 -0
- dxpy-0.396.0/dxpy/nextflow/app_asset_projects_ids_staging.json +9 -0
- dxpy-0.396.0/dxpy/nextflow/nextaur_assets.json +9 -0
- dxpy-0.396.0/dxpy/nextflow/nextaur_assets.staging.json +9 -0
- dxpy-0.396.0/dxpy/nextflow/nextflow_assets.json +9 -0
- dxpy-0.396.0/dxpy/nextflow/nextflow_assets.staging.json +9 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/nextflow/nextflow_templates.py +1 -6
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/nextflow/nextflow_utils.py +31 -3
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/scripts/dx.py +11 -28
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/scripts/dx_app_wizard.py +0 -2
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/scripts/dx_build_app.py +3 -14
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/templating/utils.py +0 -1
- dxpy-0.396.0/dxpy/toolkit_version.py +1 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/utils/__init__.py +2 -1
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/utils/batch_utils.py +1 -4
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/utils/completer.py +0 -1
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/utils/config.py +27 -33
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/utils/describe.py +3 -7
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/utils/exec_utils.py +6 -37
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/utils/executable_unbuilder.py +0 -1
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/utils/file_load_utils.py +9 -9
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/utils/genomic_utils.py +1 -6
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/utils/local_exec_utils.py +19 -29
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/utils/pretty_print.py +2 -2
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/utils/printing.py +2 -2
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/utils/resolver.py +1 -1
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/workflow_builder.py +0 -1
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy.egg-info/PKG-INFO +1 -1
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy.egg-info/SOURCES.txt +2 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/scripts/dx-log-stream +0 -1
- {dxpy-0.394.0 → dxpy-0.396.0}/scripts/dx-upload-all-outputs +1 -3
- {dxpy-0.394.0 → dxpy-0.396.0}/test/test_batch.py +3 -9
- {dxpy-0.394.0 → dxpy-0.396.0}/test/test_dx_app_wizard.py +2 -8
- {dxpy-0.394.0 → dxpy-0.396.0}/test/test_dx_completion.py +2 -5
- {dxpy-0.394.0 → dxpy-0.396.0}/test/test_dx_symlink.py +0 -64
- {dxpy-0.394.0 → dxpy-0.396.0}/test/test_dxclient.py +29 -45
- {dxpy-0.394.0 → dxpy-0.396.0}/test/test_dxpy.py +3 -21
- {dxpy-0.394.0 → dxpy-0.396.0}/test/test_dxpy_utils.py +2 -6
- {dxpy-0.394.0 → dxpy-0.396.0}/test/test_nextflow.py +2 -12
- {dxpy-0.394.0 → dxpy-0.396.0}/test/test_nextflow_ImageRefFactory.py +1 -10
- dxpy-0.394.0/dxpy/compat.py +0 -232
- dxpy-0.394.0/dxpy/nextflow/nextaur_assets.json +0 -9
- dxpy-0.394.0/dxpy/nextflow/nextaur_assets.staging.json +0 -9
- dxpy-0.394.0/dxpy/nextflow/nextflow_assets.json +0 -9
- dxpy-0.394.0/dxpy/nextflow/nextflow_assets.staging.json +0 -9
- dxpy-0.394.0/dxpy/toolkit_version.py +0 -1
- {dxpy-0.394.0 → dxpy-0.396.0}/MANIFEST.in +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/Readme.md +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/api.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/app_categories.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/bindings/__init__.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/bindings/apollo/__init__.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/bindings/apollo/cmd_line_options_validator.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/bindings/apollo/data_transformations.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/bindings/apollo/dataset.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/bindings/apollo/json_validation_by_schema.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/bindings/apollo/schemas/__init__.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/bindings/apollo/schemas/assay_filtering_conditions.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/bindings/apollo/schemas/assay_filtering_json_schemas.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/bindings/apollo/schemas/input_arguments_validation_schemas.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/bindings/apollo/vizclient.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/bindings/apollo/vizserver_filters_from_json_parser.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/bindings/apollo/vizserver_payload_builder.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/bindings/auth.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/bindings/download_all_inputs.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/bindings/dxanalysis.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/bindings/dxapp.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/bindings/dxapp_container_functions.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/bindings/dxapplet.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/bindings/dxdataobject_functions.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/bindings/dxglobalworkflow.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/bindings/dxjob.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/bindings/dxrecord.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/bindings/dxworkflow.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/bindings/mount_all_inputs.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/bindings/search.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/cli/cp.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/cli/download.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/cli/help_messages.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/cli/output_handling.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/cli/workflow.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/dx_extract_utils/Homo_sapiens_genes_manifest.json +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/dx_extract_utils/Homo_sapiens_genes_manifest_staging.json +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/dx_extract_utils/Homo_sapiens_genes_manifest_staging_vep.json +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/dx_extract_utils/Homo_sapiens_genes_manifest_vep.json +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/dx_extract_utils/__init__.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/dx_extract_utils/cohort_filter_payload.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/dx_extract_utils/column_conditions.json +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/dx_extract_utils/column_conversion.json +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/dx_extract_utils/filter_to_payload.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/dx_extract_utils/germline_utils.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/dx_extract_utils/input_validation.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/dx_extract_utils/input_validation_somatic.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/dx_extract_utils/retrieve_allele_schema.json +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/dx_extract_utils/retrieve_annotation_schema.json +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/dx_extract_utils/retrieve_bins.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/dx_extract_utils/retrieve_genotype_schema.json +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/dx_extract_utils/return_columns_allele.json +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/dx_extract_utils/return_columns_annotation.json +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/dx_extract_utils/return_columns_genotype.json +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/dx_extract_utils/return_columns_genotype_only.json +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/dx_extract_utils/somatic_filter_payload.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/executable_builder.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/nextflow/ImageRef.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/nextflow/ImageRefFactory.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/nextflow/__init__.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/nextflow/awscli_assets.json +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/nextflow/awscli_assets.staging.json +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/nextflow/collect_images.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/nextflow/nextflow_builder.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/packages/__init__.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/scripts/__init__.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/scripts/dx_build_applet.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/ssh_tunnel_app_support.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/system_requirements.py +1 -1
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/templating/__init__.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/templating/bash.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/templating/python.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/templating/templates/Readme.md +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/templating/templates/bash/basic/dxapp.json +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/templating/templates/bash/basic/src/code.sh +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/templating/templates/bash/parallelized/dxapp.json +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/templating/templates/bash/parallelized/src/code.sh +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/templating/templates/bash/scatter-process-gather/dxapp.json +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/templating/templates/bash/scatter-process-gather/src/code.sh +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/templating/templates/nextflow/dxapp.json +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/templating/templates/nextflow/src/nextflow.sh +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/templating/templates/python/basic/dxapp.json +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/templating/templates/python/basic/src/code.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/templating/templates/python/basic/test/test.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/templating/templates/python/parallelized/dxapp.json +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/templating/templates/python/parallelized/src/code.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/templating/templates/python/parallelized/test/test.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/templating/templates/python/scatter-process-gather/dxapp.json +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/templating/templates/python/scatter-process-gather/src/code.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/templating/templates/python/scatter-process-gather/test/test.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/utils/file_handle.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/utils/job_log_client.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/utils/pathmatch.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/utils/spelling_corrector.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy/utils/version.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy.egg-info/dependency_links.txt +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy.egg-info/entry_points.txt +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy.egg-info/not-zip-safe +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy.egg-info/requires.txt +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/dxpy.egg-info/top_level.txt +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/requirements.txt +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/requirements_setuptools.txt +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/requirements_test.txt +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/scripts/dx-clone-asset +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/scripts/dx-docker +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/scripts/dx-download-all-inputs +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/scripts/dx-fetch-bundled-depends +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/scripts/dx-generate-dxapp +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/scripts/dx-jobutil-add-output +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/scripts/dx-jobutil-dxlink +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/scripts/dx-jobutil-get-identity-token +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/scripts/dx-jobutil-new-job +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/scripts/dx-jobutil-parse-link +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/scripts/dx-jobutil-report-error +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/scripts/dx-mount-all-inputs +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/scripts/dx-notebook-reconnect +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/scripts/dx-print-bash-vars +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/setup.cfg +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/setup.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/test/test_create_cohort.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/test/test_describe.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/test/test_dx-docker.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/test/test_dx_bash_helpers.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/test/test_dxabs.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/test/test_dxasset.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/test/test_dxunpack.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/test/test_extract_assay.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/test/test_extract_dataset.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/test/test_extract_expression.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/test/test_extract_somatic.py +0 -0
- {dxpy-0.394.0 → dxpy-0.396.0}/test/test_nextflow_ImageRef.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: dxpy
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.396.0
|
|
4
4
|
Summary: DNAnexus Platform API bindings for Python
|
|
5
5
|
Home-page: https://github.com/dnanexus/dx-toolkit
|
|
6
6
|
Author: Aleksandra Zalcman, Andrey Kislyuk, Anurag Biyani, Geet Duggal, Katherine Lai, Kurt Jensen, Marek Hrvol, Ohad Rodeh, Phil Sung
|
|
@@ -136,9 +136,11 @@ import socket
|
|
|
136
136
|
import threading
|
|
137
137
|
import certifi
|
|
138
138
|
from collections import namedtuple
|
|
139
|
+
from http.client import BadStatusLine
|
|
140
|
+
from io import StringIO
|
|
141
|
+
from reprlib import Repr
|
|
139
142
|
|
|
140
143
|
from . import exceptions
|
|
141
|
-
from .compat import BadStatusLine, StringIO, bytes, Repr
|
|
142
144
|
from .utils.printing import BOLD, BLUE, YELLOW, GREEN, RED, WHITE
|
|
143
145
|
|
|
144
146
|
from random import randint
|
|
@@ -353,7 +355,14 @@ def _calculate_retry_delay(response, num_attempts):
|
|
|
353
355
|
'''
|
|
354
356
|
if response is not None and response.status in (503, 429) and 'retry-after' in response.headers:
|
|
355
357
|
try:
|
|
356
|
-
|
|
358
|
+
suggested_delay = int(response.headers['retry-after'])
|
|
359
|
+
|
|
360
|
+
# By default, apiserver doesn't track attempts and doesn't provide increased timeout over attempts.
|
|
361
|
+
# So, increasing backoff for throttled requests up to x5 times from the original one.
|
|
362
|
+
# The current implementation of apiserver returns a retry-after header ranging from 20 to 30 seconds.
|
|
363
|
+
# Thus, after the 20th attempt the delay will always be between 100 and 150 seconds.
|
|
364
|
+
return suggested_delay if suggested_delay >= 60 \
|
|
365
|
+
else suggested_delay + int(0.25 * min(num_attempts - 1, 20) * suggested_delay)
|
|
357
366
|
except ValueError:
|
|
358
367
|
# In RFC 2616, retry-after can be formatted as absolute time
|
|
359
368
|
# instead of seconds to wait. We don't bother to parse that,
|
|
@@ -361,7 +370,8 @@ def _calculate_retry_delay(response, num_attempts):
|
|
|
361
370
|
pass
|
|
362
371
|
if num_attempts <= 1:
|
|
363
372
|
return 1
|
|
364
|
-
num_attempts = min(num_attempts,
|
|
373
|
+
num_attempts = min(num_attempts, 8)
|
|
374
|
+
# After the 8th attempt the delay will always be between 64 and 128 seconds
|
|
365
375
|
return randint(2 ** (num_attempts - 2), 2 ** (num_attempts - 1))
|
|
366
376
|
|
|
367
377
|
|
|
@@ -514,6 +524,9 @@ def DXHTTPRequest(resource, data, method='POST', headers=None, auth=True,
|
|
|
514
524
|
through to :func:`DXHTTPRequest`.
|
|
515
525
|
|
|
516
526
|
'''
|
|
527
|
+
# option wasn't named correctly, so to not break existing clients rename it locally for clarity
|
|
528
|
+
safe_to_retry = always_retry
|
|
529
|
+
|
|
517
530
|
if headers is None:
|
|
518
531
|
headers = {}
|
|
519
532
|
|
|
@@ -719,7 +732,7 @@ def DXHTTPRequest(resource, data, method='POST', headers=None, auth=True,
|
|
|
719
732
|
# up to (max_retries) subsequent retries.
|
|
720
733
|
total_allowed_tries = max_retries + 1
|
|
721
734
|
ok_to_retry = False
|
|
722
|
-
is_retryable =
|
|
735
|
+
is_retryable = safe_to_retry or (method == 'GET') or _is_retryable_exception(e)
|
|
723
736
|
# Because try_index is not incremented until we escape
|
|
724
737
|
# this iteration of the loop, try_index is equal to the
|
|
725
738
|
# number of tries that have failed so far, minus one.
|
|
@@ -800,7 +813,7 @@ def DXHTTPRequest(resource, data, method='POST', headers=None, auth=True,
|
|
|
800
813
|
want_full_response=want_full_response,
|
|
801
814
|
decode_response_body=decode_response_body, prepend_srv=prepend_srv,
|
|
802
815
|
session_handler=session_handler,
|
|
803
|
-
max_retries=max_retries, always_retry=
|
|
816
|
+
max_retries=max_retries, always_retry=safe_to_retry, **kwargs)
|
|
804
817
|
raise AssertionError('Should never reach this line: should never break out of loop')
|
|
805
818
|
|
|
806
819
|
|
|
@@ -23,19 +23,16 @@ This remote database handler is a Python database-like object.
|
|
|
23
23
|
|
|
24
24
|
from __future__ import print_function, unicode_literals, division, absolute_import
|
|
25
25
|
|
|
26
|
-
import os,
|
|
27
|
-
import
|
|
28
|
-
import mmap
|
|
26
|
+
import os, copy, time
|
|
27
|
+
from io import BytesIO
|
|
29
28
|
from threading import Lock
|
|
30
29
|
from multiprocessing import cpu_count
|
|
31
30
|
|
|
32
31
|
import dxpy
|
|
33
32
|
from . import DXDataObject
|
|
34
|
-
from ..exceptions import DXFileError, DXIncompleteReadsError
|
|
35
|
-
from ..utils import warn
|
|
36
33
|
from ..utils.resolver import object_exists_in_project
|
|
37
|
-
from ..compat import BytesIO, basestring, USING_PYTHON2
|
|
38
34
|
from .. import logger
|
|
35
|
+
from ..compat import basestring
|
|
39
36
|
|
|
40
37
|
|
|
41
38
|
DXFILE_HTTP_THREADS = min(cpu_count(), 8)
|
|
@@ -36,7 +36,7 @@ from .. import logger
|
|
|
36
36
|
from . import dxfile, DXFile
|
|
37
37
|
from . import dxdatabase, DXDatabase
|
|
38
38
|
from .dxfile import FILE_REQUEST_TIMEOUT
|
|
39
|
-
from ..compat import
|
|
39
|
+
from ..compat import md5_hasher
|
|
40
40
|
from ..exceptions import DXFileError, DXChecksumMismatchError, DXIncompleteReadsError, err_exit
|
|
41
41
|
from ..utils import response_iterator
|
|
42
42
|
import subprocess
|
|
@@ -23,7 +23,8 @@ This remote file handler is a Python file-like object.
|
|
|
23
23
|
|
|
24
24
|
from __future__ import print_function, unicode_literals, division, absolute_import
|
|
25
25
|
|
|
26
|
-
import os, sys,
|
|
26
|
+
import os, sys, traceback, copy, time
|
|
27
|
+
from io import BytesIO
|
|
27
28
|
import math
|
|
28
29
|
import mmap
|
|
29
30
|
from threading import Lock
|
|
@@ -34,7 +35,7 @@ from . import DXDataObject
|
|
|
34
35
|
from ..exceptions import DXFileError, DXIncompleteReadsError
|
|
35
36
|
from ..utils import warn
|
|
36
37
|
from ..utils.resolver import object_exists_in_project
|
|
37
|
-
from ..compat import
|
|
38
|
+
from ..compat import md5_hasher, basestring
|
|
38
39
|
|
|
39
40
|
|
|
40
41
|
DXFILE_HTTP_THREADS = min(cpu_count(), 8)
|
|
@@ -488,8 +489,7 @@ class DXFile(DXDataObject):
|
|
|
488
489
|
does not affect where the next :meth:`write` will occur.
|
|
489
490
|
|
|
490
491
|
'''
|
|
491
|
-
|
|
492
|
-
assert(isinstance(data, bytes))
|
|
492
|
+
assert(isinstance(data, bytes))
|
|
493
493
|
|
|
494
494
|
self._ensure_write_bufsize(**kwargs)
|
|
495
495
|
|
|
@@ -546,31 +546,28 @@ class DXFile(DXDataObject):
|
|
|
546
546
|
does not affect where the next :meth:`write` will occur.
|
|
547
547
|
|
|
548
548
|
'''
|
|
549
|
-
|
|
550
|
-
|
|
549
|
+
# In python3, the underlying system methods use the 'bytes' type, not 'string'
|
|
550
|
+
#
|
|
551
|
+
# This is, hopefully, a temporary hack. It is not a good idea for two reasons:
|
|
552
|
+
# 1) Performance, we need to make a pass on the data, and need to allocate
|
|
553
|
+
# another buffer of similar size
|
|
554
|
+
# 2) The types are wrong. The "bytes" type should be visible to the caller
|
|
555
|
+
# of the write method, instead of being hidden.
|
|
556
|
+
|
|
557
|
+
# Should we throw an exception if the file is opened in binary mode,
|
|
558
|
+
# and the data is unicode/text?
|
|
559
|
+
if isinstance(data, str):
|
|
560
|
+
bt = data.encode("utf-8")
|
|
561
|
+
elif isinstance(data, bytearray):
|
|
562
|
+
bt = bytes(data)
|
|
563
|
+
elif isinstance(data, bytes):
|
|
564
|
+
bt = data
|
|
565
|
+
elif isinstance(data, mmap.mmap):
|
|
566
|
+
bt = bytes(data)
|
|
551
567
|
else:
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
# 1) Performance, we need to make a pass on the data, and need to allocate
|
|
556
|
-
# another buffer of similar size
|
|
557
|
-
# 2) The types are wrong. The "bytes" type should be visible to the caller
|
|
558
|
-
# of the write method, instead of being hidden.
|
|
559
|
-
|
|
560
|
-
# Should we throw an exception if the file is opened in binary mode,
|
|
561
|
-
# and the data is unicode/text?
|
|
562
|
-
if isinstance(data, str):
|
|
563
|
-
bt = data.encode("utf-8")
|
|
564
|
-
elif isinstance(data, bytearray):
|
|
565
|
-
bt = bytes(data)
|
|
566
|
-
elif isinstance(data, bytes):
|
|
567
|
-
bt = data
|
|
568
|
-
elif isinstance(data, mmap.mmap):
|
|
569
|
-
bt = bytes(data)
|
|
570
|
-
else:
|
|
571
|
-
raise DXFileError("Invalid type {} for write data argument".format(type(data)))
|
|
572
|
-
assert(isinstance(bt, bytes))
|
|
573
|
-
self._write2(bt, multithread=multithread, **kwargs)
|
|
568
|
+
raise DXFileError("Invalid type {} for write data argument".format(type(data)))
|
|
569
|
+
assert(isinstance(bt, bytes))
|
|
570
|
+
self._write2(bt, multithread=multithread, **kwargs)
|
|
574
571
|
|
|
575
572
|
def closed(self, **kwargs):
|
|
576
573
|
'''
|
|
@@ -606,10 +603,7 @@ class DXFile(DXDataObject):
|
|
|
606
603
|
# settings allow last empty part upload, try to upload
|
|
607
604
|
# an empty part (otherwise files with 0 parts cannot be closed).
|
|
608
605
|
try:
|
|
609
|
-
|
|
610
|
-
self.upload_part('', 1, **kwargs)
|
|
611
|
-
else:
|
|
612
|
-
self.upload_part(b'', 1, **kwargs)
|
|
606
|
+
self.upload_part(b'', 1, **kwargs)
|
|
613
607
|
except dxpy.exceptions.InvalidState:
|
|
614
608
|
pass
|
|
615
609
|
|
|
@@ -648,9 +642,7 @@ class DXFile(DXDataObject):
|
|
|
648
642
|
defaults to 1. This probably only makes sense if this is the
|
|
649
643
|
only part to be uploaded.
|
|
650
644
|
"""
|
|
651
|
-
|
|
652
|
-
# In python3, the underlying system methods use the 'bytes' type, not 'string'
|
|
653
|
-
assert(isinstance(data, bytes))
|
|
645
|
+
assert(isinstance(data, bytes))
|
|
654
646
|
|
|
655
647
|
req_input = {}
|
|
656
648
|
if index is not None:
|
|
@@ -30,8 +30,6 @@ import traceback
|
|
|
30
30
|
import warnings
|
|
31
31
|
from collections import defaultdict
|
|
32
32
|
import multiprocessing
|
|
33
|
-
from random import randint
|
|
34
|
-
from time import sleep
|
|
35
33
|
import crc32c
|
|
36
34
|
import zlib
|
|
37
35
|
import base64
|
|
@@ -41,7 +39,7 @@ from .. import logger
|
|
|
41
39
|
from . import dxfile, DXFile
|
|
42
40
|
from .dxfile import FILE_REQUEST_TIMEOUT
|
|
43
41
|
from ..exceptions import DXError, DXFileError, DXPartLengthMismatchError, DXChecksumMismatchError, DXIncompleteReadsError, err_exit
|
|
44
|
-
from ..compat import
|
|
42
|
+
from ..compat import md5_hasher
|
|
45
43
|
from ..utils import response_iterator
|
|
46
44
|
import subprocess
|
|
47
45
|
import concurrent.futures
|
|
@@ -367,7 +367,7 @@ class DXProject(DXContainer):
|
|
|
367
367
|
allowed_executables=None, unset_allowed_executables=None,
|
|
368
368
|
database_ui_view_only=None, external_upload_restricted=None,
|
|
369
369
|
database_results_restricted=None, unset_database_results_restricted=None,
|
|
370
|
-
**kwargs):
|
|
370
|
+
https_app_isolated_browsing=None, **kwargs):
|
|
371
371
|
"""
|
|
372
372
|
:param name: If provided, the new project name
|
|
373
373
|
:type name: string
|
|
@@ -395,6 +395,9 @@ class DXProject(DXContainer):
|
|
|
395
395
|
:type unset_database_results_restricted: boolean
|
|
396
396
|
:param version: If provided, the update will only occur if the value matches the current project's version number
|
|
397
397
|
:type version: int
|
|
398
|
+
:param https_app_isolated_browsing: Whether all https access to jobs in this project should be wrapped in Isolated Browsing.
|
|
399
|
+
If true, httpsApp-enabled executables must have httpsApp.shared_access set to 'NONE' to run in this project.
|
|
400
|
+
:type https_app_isolated_browsing: boolean
|
|
398
401
|
|
|
399
402
|
Updates the project with the new fields. All fields are
|
|
400
403
|
optional. Fields that are not provided are not changed.
|
|
@@ -430,6 +433,8 @@ class DXProject(DXContainer):
|
|
|
430
433
|
update_hash["databaseResultsRestricted"] = database_results_restricted
|
|
431
434
|
if unset_database_results_restricted is not None:
|
|
432
435
|
update_hash["databaseResultsRestricted"] = None
|
|
436
|
+
if https_app_isolated_browsing is not None:
|
|
437
|
+
update_hash["httpsAppIsolatedBrowsing"] = https_app_isolated_browsing
|
|
433
438
|
dxpy.api.project_update(self._dxid, update_hash, **kwargs)
|
|
434
439
|
|
|
435
440
|
def invite(self, invitee, level, send_email=True, **kwargs):
|
|
@@ -26,7 +26,6 @@ import sys
|
|
|
26
26
|
INTERACTIVE_CLI = sys.stdin and sys.stdin.isatty() and sys.stdout and sys.stdout.isatty()
|
|
27
27
|
|
|
28
28
|
from ..exceptions import err_exit, default_expected_exceptions, DXError
|
|
29
|
-
from ..compat import input
|
|
30
29
|
|
|
31
30
|
def try_call_err_exit():
|
|
32
31
|
err_exit(expected_exceptions=default_expected_exceptions + (DXError,))
|
|
@@ -36,13 +36,11 @@ from ..bindings.dxfile import DXFile
|
|
|
36
36
|
from ..utils.resolver import resolve_existing_path, is_hashid, ResolutionError, resolve_path, check_folder_exists
|
|
37
37
|
from ..utils.file_handle import as_handle
|
|
38
38
|
from ..utils.describe import print_desc
|
|
39
|
-
from ..compat import USING_PYTHON2
|
|
40
39
|
from ..exceptions import (
|
|
41
40
|
err_exit,
|
|
42
41
|
PermissionDenied,
|
|
43
42
|
InvalidInput,
|
|
44
43
|
InvalidState,
|
|
45
|
-
ResourceNotFound,
|
|
46
44
|
default_expected_exceptions,
|
|
47
45
|
)
|
|
48
46
|
|
|
@@ -1633,10 +1631,7 @@ def validate_cohort_ids(descriptor, project, resp, ids):
|
|
|
1633
1631
|
gpk_type = descriptor.model["entities"][entity_name]["fields"][field_name]["mapping"]["column_sql_type"]
|
|
1634
1632
|
# Prepare a payload to find entries matching the input ids in the dataset
|
|
1635
1633
|
if gpk_type in ["integer", "bigint"]:
|
|
1636
|
-
|
|
1637
|
-
lambda_for_list_conv = lambda a, b: a+[long(b)]
|
|
1638
|
-
else:
|
|
1639
|
-
lambda_for_list_conv = lambda a, b: a+[int(b)]
|
|
1634
|
+
lambda_for_list_conv = lambda a, b: a+[int(b)]
|
|
1640
1635
|
elif gpk_type in ["float", "double"]:
|
|
1641
1636
|
lambda_for_list_conv = lambda a, b: a+[float(b)]
|
|
1642
1637
|
elif gpk_type in ["string"]:
|
|
@@ -33,7 +33,7 @@ from ..utils.describe import (get_find_executions_string, get_ls_l_desc, get_ls_
|
|
|
33
33
|
from ..utils.resolver import (parse_input_keyval, is_hashid, is_job_id, is_localjob_id, paginate_and_pick, pick,
|
|
34
34
|
resolve_existing_path, resolve_multiple_existing_paths, split_unescaped, is_analysis_id)
|
|
35
35
|
from ..utils import OrderedDefaultdict
|
|
36
|
-
from ..compat import
|
|
36
|
+
from ..compat import basestring
|
|
37
37
|
try:
|
|
38
38
|
# Import gnureadline if installed for macOS
|
|
39
39
|
import gnureadline as readline
|
|
@@ -604,8 +604,6 @@ class ExecutableInputs(object):
|
|
|
604
604
|
try:
|
|
605
605
|
parsed_input_value = json.loads(input_value, object_pairs_hook=collections.OrderedDict)
|
|
606
606
|
immediate_types = {collections.OrderedDict, list, int, float}
|
|
607
|
-
if USING_PYTHON2:
|
|
608
|
-
immediate_types.add(long) # noqa
|
|
609
607
|
if type(parsed_input_value) not in immediate_types:
|
|
610
608
|
raise Exception()
|
|
611
609
|
except:
|
|
@@ -20,7 +20,6 @@ the org-based commands of the dx command-line client.
|
|
|
20
20
|
'''
|
|
21
21
|
from __future__ import print_function, unicode_literals, division, absolute_import
|
|
22
22
|
|
|
23
|
-
from ..compat import input
|
|
24
23
|
import dxpy
|
|
25
24
|
from . import try_call, prompt_for_yn, INTERACTIVE_CLI
|
|
26
25
|
from .parsers import process_find_by_property_args, process_phi_param
|
|
@@ -452,6 +452,8 @@ def get_update_project_args(args):
|
|
|
452
452
|
input_params['databaseResultsRestricted'] = None
|
|
453
453
|
if args.external_upload_restricted is not None:
|
|
454
454
|
input_params['externalUploadRestricted'] = args.external_upload_restricted == 'true'
|
|
455
|
+
if args.https_app_isolated_browsing is not None:
|
|
456
|
+
input_params['httpsAppIsolatedBrowsing'] = args.https_app_isolated_browsing == 'true'
|
|
455
457
|
return input_params
|
|
456
458
|
|
|
457
459
|
def process_phi_param(args):
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
# Copyright (C) 2013-2016 DNAnexus, Inc.
|
|
2
|
+
#
|
|
3
|
+
# This file is part of dx-toolkit (DNAnexus platform client libraries).
|
|
4
|
+
#
|
|
5
|
+
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
|
|
6
|
+
# use this file except in compliance with the License. You may obtain a copy
|
|
7
|
+
# of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
12
|
+
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
13
|
+
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
14
|
+
# License for the specific language governing permissions and limitations
|
|
15
|
+
# under the License.
|
|
16
|
+
|
|
17
|
+
from __future__ import print_function, unicode_literals, division, absolute_import
|
|
18
|
+
|
|
19
|
+
import sys, locale, threading, hashlib
|
|
20
|
+
|
|
21
|
+
try:
|
|
22
|
+
sys_encoding = locale.getdefaultlocale()[1] or "UTF-8"
|
|
23
|
+
except Exception:
|
|
24
|
+
sys_encoding = "UTF-8"
|
|
25
|
+
|
|
26
|
+
USING_PYTHON2 = True if sys.version_info < (3, 0) else False
|
|
27
|
+
|
|
28
|
+
basestring = (str, bytes)
|
|
29
|
+
|
|
30
|
+
THREAD_TIMEOUT_MAX = threading.TIMEOUT_MAX
|
|
31
|
+
|
|
32
|
+
# Support FIPS enabled Python
|
|
33
|
+
def md5_hasher():
|
|
34
|
+
try:
|
|
35
|
+
md5_hasher = hashlib.new('md5', usedforsecurity=False)
|
|
36
|
+
except:
|
|
37
|
+
md5_hasher = hashlib.new('md5')
|
|
38
|
+
return md5_hasher
|
|
@@ -21,7 +21,6 @@ import socket, json, time, os, logging
|
|
|
21
21
|
from logging.handlers import SysLogHandler
|
|
22
22
|
|
|
23
23
|
from dxpy.exceptions import DXError
|
|
24
|
-
from dxpy.compat import USING_PYTHON2
|
|
25
24
|
|
|
26
25
|
class DXLogHandler(SysLogHandler):
|
|
27
26
|
'''
|
|
@@ -70,7 +69,7 @@ class DXLogHandler(SysLogHandler):
|
|
|
70
69
|
return self.priority_names[self.priority_map.get(record.levelname, "warning")]
|
|
71
70
|
|
|
72
71
|
def truncate_message(self, message):
|
|
73
|
-
msg_bytes = message
|
|
72
|
+
msg_bytes = message.encode('utf-8')
|
|
74
73
|
|
|
75
74
|
if len(json.dumps(message)) <= 8015:
|
|
76
75
|
return message
|
|
@@ -80,12 +79,9 @@ class DXLogHandler(SysLogHandler):
|
|
|
80
79
|
msg_bytes = msg_bytes[:-1]
|
|
81
80
|
|
|
82
81
|
message = _bytes2utf8(msg_bytes)
|
|
83
|
-
message = message.encode('utf-8') if USING_PYTHON2 else message
|
|
84
82
|
return message + "... [truncated]"
|
|
85
83
|
|
|
86
84
|
def is_resource_log(self, message):
|
|
87
|
-
if USING_PYTHON2:
|
|
88
|
-
return message.startswith(b"CPU: ")
|
|
89
85
|
return message.startswith("CPU: ")
|
|
90
86
|
|
|
91
87
|
def emit(self, record):
|
|
@@ -95,9 +91,6 @@ class DXLogHandler(SysLogHandler):
|
|
|
95
91
|
# with the extra padding introduced by the log function, the
|
|
96
92
|
# incoming message needs to be smaller - we truncate it to
|
|
97
93
|
# at most 8015 bytes here.
|
|
98
|
-
# Note: we use Python 2 semantics here (byte strings). This
|
|
99
|
-
# script is not Python 3 ready. If *line* was a unicode string
|
|
100
|
-
# with wide chars, its byte length would exceed the limit.
|
|
101
94
|
message = self.truncate_message(message)
|
|
102
95
|
|
|
103
96
|
data = json.dumps({"source": self.source, "timestamp": int(round(time.time() * 1000)),
|
|
@@ -28,7 +28,6 @@ import socket
|
|
|
28
28
|
from urllib3.exceptions import HTTPError
|
|
29
29
|
|
|
30
30
|
import dxpy
|
|
31
|
-
from .compat import USING_PYTHON2
|
|
32
31
|
import urllib3
|
|
33
32
|
import ssl
|
|
34
33
|
|
|
@@ -246,10 +245,8 @@ network_exceptions = (urllib3.exceptions.ProtocolError,
|
|
|
246
245
|
urllib3.exceptions.SSLError,
|
|
247
246
|
ssl.SSLError,
|
|
248
247
|
HTTPError,
|
|
249
|
-
socket.error
|
|
250
|
-
|
|
251
|
-
network_exceptions += (ConnectionResetError,)
|
|
252
|
-
|
|
248
|
+
socket.error,
|
|
249
|
+
ConnectionResetError,)
|
|
253
250
|
|
|
254
251
|
try:
|
|
255
252
|
json_exceptions = (json.decoder.JSONDecodeError,)
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
{
|
|
2
|
+
"aws:ap-southeast-2": "project-F4gVXbj5qkxXq3v038V7qjxq",
|
|
3
|
+
"aws:eu-central-1": "project-F3zqG304yvb1GVkZ6987jP9F",
|
|
4
|
+
"aws:eu-west-2-g": "project-G4B9FBpKKgK6g9v13q423FXk",
|
|
5
|
+
"aws:me-south-1": "project-GvGXBYV3pK4v9X7F2jv9zz94",
|
|
6
|
+
"aws:us-east-1": "project-B6JG897KGbkGb6Z7pQ9Q02jG",
|
|
7
|
+
"azure:westeurope": "project-FGZZFkjBPJqBXq1X84pq4VJf",
|
|
8
|
+
"azure:westus": "project-F3vk1q09FX8Jxxp20pq6z8P9"
|
|
9
|
+
}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
{
|
|
2
|
+
"aws:ap-southeast-2": "project-G0zG49j5K4gjfv5V4FfqFK9X",
|
|
3
|
+
"aws:eu-central-1": "project-G0zG4F04KqpFYVz84FQkvF1P",
|
|
4
|
+
"aws:eu-west-2-g": "project-G4gyXkXK8BX9B0jp1V6029vJ",
|
|
5
|
+
"aws:me-south-1": "project-Gv8PVXk32gZ83kfp0pZ1v3V2",
|
|
6
|
+
"aws:us-east-1": "project-G0zG4B808VFFYVz84FQkvF1K",
|
|
7
|
+
"azure:westeurope": "project-G0zG4BQBq1XQby2F4FqQpPXv",
|
|
8
|
+
"azure:westus": "project-G0zG4B09ZJ5gx1Vz4Fb8Q9XX"
|
|
9
|
+
}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
{
|
|
2
|
+
"aws:ap-southeast-2": "record-GzyP89850x72260z2yYyZQfJ",
|
|
3
|
+
"aws:eu-central-1": "record-GzyP7584vB9Yp0j7qfv5F134",
|
|
4
|
+
"aws:eu-west-2-g": "record-GzyP80XKPQ6K81G8XV89fyvZ",
|
|
5
|
+
"aws:me-south-1": "record-GzyP85V306B9xkQ4Pv1yf7Kz",
|
|
6
|
+
"aws:us-east-1": "record-GzyP6580KGjqV4gp6QXFGBzX",
|
|
7
|
+
"azure:westeurope": "record-GzyP9z0BzJx8zjf6bpvY6Yqv",
|
|
8
|
+
"azure:westus": "record-GzyP9vQ9zggPyfkj5GKKYVG5"
|
|
9
|
+
}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
{
|
|
2
|
+
"aws:ap-southeast-2": "record-GzyJ2F0506kK1651KKg3K1Xq",
|
|
3
|
+
"aws:eu-central-1": "record-GzyJ2Yj4f2xpzzypx8G484FB",
|
|
4
|
+
"aws:eu-west-2-g": "record-GzyJ28XKKGJ7758Yj62X97bV",
|
|
5
|
+
"aws:me-south-1": "record-GzyJ2GV311jBJYzx2XpgZXQ0",
|
|
6
|
+
"aws:us-east-1": "record-GzyJ08002j9B36YfkbQQxpkf",
|
|
7
|
+
"azure:westeurope": "record-GzyJ4B8B095jZ8F4BF5p2Ggz",
|
|
8
|
+
"azure:westus": "record-GzyJ3q89X1QBpvfvgx9bZzgP"
|
|
9
|
+
}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
{
|
|
2
|
+
"aws:ap-southeast-2": "record-GzyP8B05q2Y748ZfZXQ4139j",
|
|
3
|
+
"aws:eu-central-1": "record-GzyP7FQ4p6pX7q2Qbx06zX9v",
|
|
4
|
+
"aws:eu-west-2-g": "record-GzyP8FXKkjkqfjBjjZzFv68F",
|
|
5
|
+
"aws:me-south-1": "record-GzyP8593PgfZjYXPJP9FFqV4",
|
|
6
|
+
"aws:us-east-1": "record-GzyP6X00F3gq8p88gP7PQFkx",
|
|
7
|
+
"azure:westeurope": "record-GzyPB98BB2b867v2KK1BXx8p",
|
|
8
|
+
"azure:westus": "record-GzyPB6j9b5Yy81G8XV89jkZX"
|
|
9
|
+
}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
{
|
|
2
|
+
"aws:ap-southeast-2": "record-GzyJ2Y05x444p61z5XKFP54Q",
|
|
3
|
+
"aws:eu-central-1": "record-GzyJ2xQ4Pffppvfvgx9bZzg6",
|
|
4
|
+
"aws:eu-west-2-g": "record-GzyJ2Z2KppXG1ZXvQ2kp08YQ",
|
|
5
|
+
"aws:me-south-1": "record-GzyJ2Y93z3b2Q4202Jpx1j78",
|
|
6
|
+
"aws:us-east-1": "record-GzyJ19j08Ffppvfvgx9bZzfz",
|
|
7
|
+
"azure:westeurope": "record-GzyJ4QjByJz29GJg3K3Gv2yB",
|
|
8
|
+
"azure:westus": "record-GzyJ45Q9xQPB36YfkbQQxpp8"
|
|
9
|
+
}
|
|
@@ -5,7 +5,6 @@ from .nextflow_utils import (get_template_dir, get_source_file_name, get_resourc
|
|
|
5
5
|
import json
|
|
6
6
|
import os
|
|
7
7
|
from dxpy import TOOLKIT_VERSION
|
|
8
|
-
from dxpy.compat import USING_PYTHON2, sys_encoding
|
|
9
8
|
|
|
10
9
|
|
|
11
10
|
def get_nextflow_dxapp(
|
|
@@ -94,11 +93,7 @@ def get_nextflow_src(custom_inputs=None, profile=None, resources_dir=None):
|
|
|
94
93
|
src = src.replace("@@PROFILE_ARG@@", profile_arg)
|
|
95
94
|
src = src.replace("@@EXCLUDE_INPUT_DOWNLOAD@@", exclude_input_download)
|
|
96
95
|
src = src.replace("@@DXPY_BUILD_VERSION@@", TOOLKIT_VERSION)
|
|
97
|
-
|
|
98
|
-
src = src.replace("@@RESOURCES_SUBPATH@@",
|
|
99
|
-
get_resources_subpath(resources_dir).encode(sys_encoding))
|
|
100
|
-
else:
|
|
101
|
-
src = src.replace("@@RESOURCES_SUBPATH@@",
|
|
96
|
+
src = src.replace("@@RESOURCES_SUBPATH@@",
|
|
102
97
|
get_resources_subpath(resources_dir))
|
|
103
98
|
|
|
104
99
|
return src
|
|
@@ -5,6 +5,7 @@ import errno
|
|
|
5
5
|
import dxpy
|
|
6
6
|
import json
|
|
7
7
|
import shutil
|
|
8
|
+
import logging
|
|
8
9
|
from dxpy.exceptions import ResourceNotFound
|
|
9
10
|
from dxpy.nextflow.collect_images import run_nextaur_collect, bundle_docker_images
|
|
10
11
|
|
|
@@ -37,6 +38,22 @@ def get_template_dir():
|
|
|
37
38
|
return path.join(path.dirname(dxpy.__file__), 'templating', 'templates', 'nextflow')
|
|
38
39
|
|
|
39
40
|
|
|
41
|
+
def get_project_with_assets(region):
|
|
42
|
+
nextflow_basepath = path.join(path.dirname(dxpy.__file__), 'nextflow')
|
|
43
|
+
projects_path = path.join(nextflow_basepath, "app_asset_projects_ids_prod.json")
|
|
44
|
+
|
|
45
|
+
try:
|
|
46
|
+
with open(projects_path, 'r') as projects_f:
|
|
47
|
+
project = json.load(projects_f)[region]
|
|
48
|
+
dxpy.describe(project, fields={}) # existence check
|
|
49
|
+
except ResourceNotFound:
|
|
50
|
+
projects_path = path.join(nextflow_basepath, "app_asset_projects_ids_staging.json")
|
|
51
|
+
with open(projects_path, 'r') as projects_f:
|
|
52
|
+
project = json.load(projects_f)[region]
|
|
53
|
+
|
|
54
|
+
return project
|
|
55
|
+
|
|
56
|
+
|
|
40
57
|
def is_importer_job():
|
|
41
58
|
try:
|
|
42
59
|
with open("/home/dnanexus/dnanexus-job.json", "r") as f:
|
|
@@ -102,6 +119,8 @@ def get_regional_options(region, resources_dir, profile, cache_docker, nextflow_
|
|
|
102
119
|
image_bundled = bundle_docker_images(image_refs)
|
|
103
120
|
else:
|
|
104
121
|
image_bundled = {}
|
|
122
|
+
|
|
123
|
+
project_with_assets = get_project_with_assets(region)
|
|
105
124
|
regional_options = {
|
|
106
125
|
region: {
|
|
107
126
|
"systemRequirements": {
|
|
@@ -110,9 +129,18 @@ def get_regional_options(region, resources_dir, profile, cache_docker, nextflow_
|
|
|
110
129
|
}
|
|
111
130
|
},
|
|
112
131
|
"assetDepends": [
|
|
113
|
-
{"id":
|
|
114
|
-
|
|
115
|
-
|
|
132
|
+
{"id": {"$dnanexus_link": {
|
|
133
|
+
"id": nextaur_asset,
|
|
134
|
+
"project": project_with_assets
|
|
135
|
+
}}},
|
|
136
|
+
{"id": {"$dnanexus_link": {
|
|
137
|
+
"id": nextflow_asset,
|
|
138
|
+
"project": project_with_assets
|
|
139
|
+
}}},
|
|
140
|
+
{"id": {"$dnanexus_link": {
|
|
141
|
+
"id": awscli_asset,
|
|
142
|
+
"project": project_with_assets
|
|
143
|
+
}}}
|
|
116
144
|
],
|
|
117
145
|
"bundledDepends": image_bundled
|
|
118
146
|
}
|