dxpy 0.394.0__tar.gz → 0.395.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {dxpy-0.394.0 → dxpy-0.395.0}/PKG-INFO +1 -1
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/__init__.py +13 -3
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/app_builder.py +0 -1
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/asset_builder.py +0 -1
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/bindings/dxdatabase.py +3 -6
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/bindings/dxdatabase_functions.py +1 -1
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/bindings/dxfile.py +27 -35
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/bindings/dxfile_functions.py +1 -3
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/cli/__init__.py +0 -1
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/cli/dataset_utilities.py +1 -6
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/cli/exec_io.py +1 -3
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/cli/org.py +0 -1
- dxpy-0.395.0/dxpy/compat.py +38 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/dxlog.py +1 -8
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/exceptions.py +2 -5
- dxpy-0.395.0/dxpy/nextflow/nextaur_assets.json +9 -0
- dxpy-0.395.0/dxpy/nextflow/nextaur_assets.staging.json +9 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/nextflow/nextflow_templates.py +1 -6
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/scripts/dx.py +7 -28
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/scripts/dx_app_wizard.py +0 -2
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/scripts/dx_build_app.py +3 -14
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/templating/utils.py +0 -1
- dxpy-0.395.0/dxpy/toolkit_version.py +1 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/utils/__init__.py +2 -1
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/utils/batch_utils.py +1 -4
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/utils/completer.py +0 -1
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/utils/config.py +27 -33
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/utils/describe.py +3 -7
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/utils/exec_utils.py +6 -37
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/utils/executable_unbuilder.py +0 -1
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/utils/file_load_utils.py +9 -9
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/utils/genomic_utils.py +1 -6
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/utils/local_exec_utils.py +19 -29
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/utils/pretty_print.py +2 -2
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/utils/printing.py +2 -2
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/utils/resolver.py +1 -1
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/workflow_builder.py +0 -1
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy.egg-info/PKG-INFO +1 -1
- {dxpy-0.394.0 → dxpy-0.395.0}/scripts/dx-log-stream +0 -1
- {dxpy-0.394.0 → dxpy-0.395.0}/scripts/dx-upload-all-outputs +1 -3
- {dxpy-0.394.0 → dxpy-0.395.0}/test/test_batch.py +3 -9
- {dxpy-0.394.0 → dxpy-0.395.0}/test/test_dx_app_wizard.py +2 -8
- {dxpy-0.394.0 → dxpy-0.395.0}/test/test_dx_completion.py +2 -5
- {dxpy-0.394.0 → dxpy-0.395.0}/test/test_dx_symlink.py +0 -64
- {dxpy-0.394.0 → dxpy-0.395.0}/test/test_dxclient.py +9 -35
- {dxpy-0.394.0 → dxpy-0.395.0}/test/test_dxpy.py +3 -21
- {dxpy-0.394.0 → dxpy-0.395.0}/test/test_dxpy_utils.py +2 -6
- {dxpy-0.394.0 → dxpy-0.395.0}/test/test_nextflow.py +2 -12
- {dxpy-0.394.0 → dxpy-0.395.0}/test/test_nextflow_ImageRefFactory.py +1 -10
- dxpy-0.394.0/dxpy/compat.py +0 -232
- dxpy-0.394.0/dxpy/nextflow/nextaur_assets.json +0 -9
- dxpy-0.394.0/dxpy/nextflow/nextaur_assets.staging.json +0 -9
- dxpy-0.394.0/dxpy/toolkit_version.py +0 -1
- {dxpy-0.394.0 → dxpy-0.395.0}/MANIFEST.in +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/Readme.md +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/api.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/app_categories.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/bindings/__init__.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/bindings/apollo/__init__.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/bindings/apollo/cmd_line_options_validator.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/bindings/apollo/data_transformations.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/bindings/apollo/dataset.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/bindings/apollo/json_validation_by_schema.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/bindings/apollo/schemas/__init__.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/bindings/apollo/schemas/assay_filtering_conditions.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/bindings/apollo/schemas/assay_filtering_json_schemas.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/bindings/apollo/schemas/input_arguments_validation_schemas.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/bindings/apollo/vizclient.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/bindings/apollo/vizserver_filters_from_json_parser.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/bindings/apollo/vizserver_payload_builder.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/bindings/auth.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/bindings/download_all_inputs.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/bindings/dxanalysis.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/bindings/dxapp.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/bindings/dxapp_container_functions.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/bindings/dxapplet.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/bindings/dxdataobject_functions.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/bindings/dxglobalworkflow.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/bindings/dxjob.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/bindings/dxproject.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/bindings/dxrecord.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/bindings/dxworkflow.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/bindings/mount_all_inputs.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/bindings/search.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/cli/cp.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/cli/download.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/cli/help_messages.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/cli/output_handling.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/cli/parsers.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/cli/workflow.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/dx_extract_utils/Homo_sapiens_genes_manifest.json +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/dx_extract_utils/Homo_sapiens_genes_manifest_staging.json +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/dx_extract_utils/Homo_sapiens_genes_manifest_staging_vep.json +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/dx_extract_utils/Homo_sapiens_genes_manifest_vep.json +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/dx_extract_utils/__init__.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/dx_extract_utils/cohort_filter_payload.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/dx_extract_utils/column_conditions.json +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/dx_extract_utils/column_conversion.json +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/dx_extract_utils/filter_to_payload.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/dx_extract_utils/germline_utils.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/dx_extract_utils/input_validation.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/dx_extract_utils/input_validation_somatic.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/dx_extract_utils/retrieve_allele_schema.json +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/dx_extract_utils/retrieve_annotation_schema.json +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/dx_extract_utils/retrieve_bins.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/dx_extract_utils/retrieve_genotype_schema.json +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/dx_extract_utils/return_columns_allele.json +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/dx_extract_utils/return_columns_annotation.json +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/dx_extract_utils/return_columns_genotype.json +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/dx_extract_utils/return_columns_genotype_only.json +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/dx_extract_utils/somatic_filter_payload.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/executable_builder.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/nextflow/ImageRef.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/nextflow/ImageRefFactory.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/nextflow/__init__.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/nextflow/awscli_assets.json +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/nextflow/awscli_assets.staging.json +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/nextflow/collect_images.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/nextflow/nextflow_assets.json +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/nextflow/nextflow_assets.staging.json +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/nextflow/nextflow_builder.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/nextflow/nextflow_utils.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/packages/__init__.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/scripts/__init__.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/scripts/dx_build_applet.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/ssh_tunnel_app_support.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/system_requirements.py +1 -1
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/templating/__init__.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/templating/bash.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/templating/python.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/templating/templates/Readme.md +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/templating/templates/bash/basic/dxapp.json +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/templating/templates/bash/basic/src/code.sh +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/templating/templates/bash/parallelized/dxapp.json +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/templating/templates/bash/parallelized/src/code.sh +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/templating/templates/bash/scatter-process-gather/dxapp.json +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/templating/templates/bash/scatter-process-gather/src/code.sh +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/templating/templates/nextflow/dxapp.json +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/templating/templates/nextflow/src/nextflow.sh +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/templating/templates/python/basic/dxapp.json +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/templating/templates/python/basic/src/code.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/templating/templates/python/basic/test/test.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/templating/templates/python/parallelized/dxapp.json +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/templating/templates/python/parallelized/src/code.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/templating/templates/python/parallelized/test/test.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/templating/templates/python/scatter-process-gather/dxapp.json +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/templating/templates/python/scatter-process-gather/src/code.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/templating/templates/python/scatter-process-gather/test/test.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/utils/file_handle.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/utils/job_log_client.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/utils/pathmatch.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/utils/spelling_corrector.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy/utils/version.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy.egg-info/SOURCES.txt +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy.egg-info/dependency_links.txt +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy.egg-info/entry_points.txt +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy.egg-info/not-zip-safe +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy.egg-info/requires.txt +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/dxpy.egg-info/top_level.txt +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/requirements.txt +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/requirements_setuptools.txt +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/requirements_test.txt +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/scripts/dx-clone-asset +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/scripts/dx-docker +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/scripts/dx-download-all-inputs +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/scripts/dx-fetch-bundled-depends +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/scripts/dx-generate-dxapp +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/scripts/dx-jobutil-add-output +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/scripts/dx-jobutil-dxlink +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/scripts/dx-jobutil-get-identity-token +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/scripts/dx-jobutil-new-job +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/scripts/dx-jobutil-parse-link +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/scripts/dx-jobutil-report-error +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/scripts/dx-mount-all-inputs +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/scripts/dx-notebook-reconnect +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/scripts/dx-print-bash-vars +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/setup.cfg +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/setup.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/test/test_create_cohort.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/test/test_describe.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/test/test_dx-docker.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/test/test_dx_bash_helpers.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/test/test_dxabs.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/test/test_dxasset.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/test/test_dxunpack.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/test/test_extract_assay.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/test/test_extract_dataset.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/test/test_extract_expression.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/test/test_extract_somatic.py +0 -0
- {dxpy-0.394.0 → dxpy-0.395.0}/test/test_nextflow_ImageRef.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: dxpy
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.395.0
|
|
4
4
|
Summary: DNAnexus Platform API bindings for Python
|
|
5
5
|
Home-page: https://github.com/dnanexus/dx-toolkit
|
|
6
6
|
Author: Aleksandra Zalcman, Andrey Kislyuk, Anurag Biyani, Geet Duggal, Katherine Lai, Kurt Jensen, Marek Hrvol, Ohad Rodeh, Phil Sung
|
|
@@ -136,9 +136,11 @@ import socket
|
|
|
136
136
|
import threading
|
|
137
137
|
import certifi
|
|
138
138
|
from collections import namedtuple
|
|
139
|
+
from http.client import BadStatusLine
|
|
140
|
+
from io import StringIO
|
|
141
|
+
from reprlib import Repr
|
|
139
142
|
|
|
140
143
|
from . import exceptions
|
|
141
|
-
from .compat import BadStatusLine, StringIO, bytes, Repr
|
|
142
144
|
from .utils.printing import BOLD, BLUE, YELLOW, GREEN, RED, WHITE
|
|
143
145
|
|
|
144
146
|
from random import randint
|
|
@@ -353,7 +355,14 @@ def _calculate_retry_delay(response, num_attempts):
|
|
|
353
355
|
'''
|
|
354
356
|
if response is not None and response.status in (503, 429) and 'retry-after' in response.headers:
|
|
355
357
|
try:
|
|
356
|
-
|
|
358
|
+
suggested_delay = int(response.headers['retry-after'])
|
|
359
|
+
|
|
360
|
+
# By default, apiserver doesn't track attempts and doesn't provide increased timeout over attempts.
|
|
361
|
+
# So, increasing backoff for throttled requests up to x5 times from the original one.
|
|
362
|
+
# The current implementation of apiserver returns a retry-after header ranging from 20 to 30 seconds.
|
|
363
|
+
# Thus, after the 20th attempt the delay will always be between 100 and 150 seconds.
|
|
364
|
+
return suggested_delay if suggested_delay >= 60 \
|
|
365
|
+
else suggested_delay + int(0.25 * min(num_attempts - 1, 20) * suggested_delay)
|
|
357
366
|
except ValueError:
|
|
358
367
|
# In RFC 2616, retry-after can be formatted as absolute time
|
|
359
368
|
# instead of seconds to wait. We don't bother to parse that,
|
|
@@ -361,7 +370,8 @@ def _calculate_retry_delay(response, num_attempts):
|
|
|
361
370
|
pass
|
|
362
371
|
if num_attempts <= 1:
|
|
363
372
|
return 1
|
|
364
|
-
num_attempts = min(num_attempts,
|
|
373
|
+
num_attempts = min(num_attempts, 8)
|
|
374
|
+
# After the 8th attempt the delay will always be between 64 and 128 seconds
|
|
365
375
|
return randint(2 ** (num_attempts - 2), 2 ** (num_attempts - 1))
|
|
366
376
|
|
|
367
377
|
|
|
@@ -23,19 +23,16 @@ This remote database handler is a Python database-like object.
|
|
|
23
23
|
|
|
24
24
|
from __future__ import print_function, unicode_literals, division, absolute_import
|
|
25
25
|
|
|
26
|
-
import os,
|
|
27
|
-
import
|
|
28
|
-
import mmap
|
|
26
|
+
import os, copy, time
|
|
27
|
+
from io import BytesIO
|
|
29
28
|
from threading import Lock
|
|
30
29
|
from multiprocessing import cpu_count
|
|
31
30
|
|
|
32
31
|
import dxpy
|
|
33
32
|
from . import DXDataObject
|
|
34
|
-
from ..exceptions import DXFileError, DXIncompleteReadsError
|
|
35
|
-
from ..utils import warn
|
|
36
33
|
from ..utils.resolver import object_exists_in_project
|
|
37
|
-
from ..compat import BytesIO, basestring, USING_PYTHON2
|
|
38
34
|
from .. import logger
|
|
35
|
+
from ..compat import basestring
|
|
39
36
|
|
|
40
37
|
|
|
41
38
|
DXFILE_HTTP_THREADS = min(cpu_count(), 8)
|
|
@@ -36,7 +36,7 @@ from .. import logger
|
|
|
36
36
|
from . import dxfile, DXFile
|
|
37
37
|
from . import dxdatabase, DXDatabase
|
|
38
38
|
from .dxfile import FILE_REQUEST_TIMEOUT
|
|
39
|
-
from ..compat import
|
|
39
|
+
from ..compat import md5_hasher
|
|
40
40
|
from ..exceptions import DXFileError, DXChecksumMismatchError, DXIncompleteReadsError, err_exit
|
|
41
41
|
from ..utils import response_iterator
|
|
42
42
|
import subprocess
|
|
@@ -23,7 +23,8 @@ This remote file handler is a Python file-like object.
|
|
|
23
23
|
|
|
24
24
|
from __future__ import print_function, unicode_literals, division, absolute_import
|
|
25
25
|
|
|
26
|
-
import os, sys,
|
|
26
|
+
import os, sys, traceback, copy, time
|
|
27
|
+
from io import BytesIO
|
|
27
28
|
import math
|
|
28
29
|
import mmap
|
|
29
30
|
from threading import Lock
|
|
@@ -34,7 +35,7 @@ from . import DXDataObject
|
|
|
34
35
|
from ..exceptions import DXFileError, DXIncompleteReadsError
|
|
35
36
|
from ..utils import warn
|
|
36
37
|
from ..utils.resolver import object_exists_in_project
|
|
37
|
-
from ..compat import
|
|
38
|
+
from ..compat import md5_hasher, basestring
|
|
38
39
|
|
|
39
40
|
|
|
40
41
|
DXFILE_HTTP_THREADS = min(cpu_count(), 8)
|
|
@@ -488,8 +489,7 @@ class DXFile(DXDataObject):
|
|
|
488
489
|
does not affect where the next :meth:`write` will occur.
|
|
489
490
|
|
|
490
491
|
'''
|
|
491
|
-
|
|
492
|
-
assert(isinstance(data, bytes))
|
|
492
|
+
assert(isinstance(data, bytes))
|
|
493
493
|
|
|
494
494
|
self._ensure_write_bufsize(**kwargs)
|
|
495
495
|
|
|
@@ -546,31 +546,28 @@ class DXFile(DXDataObject):
|
|
|
546
546
|
does not affect where the next :meth:`write` will occur.
|
|
547
547
|
|
|
548
548
|
'''
|
|
549
|
-
|
|
550
|
-
|
|
549
|
+
# In python3, the underlying system methods use the 'bytes' type, not 'string'
|
|
550
|
+
#
|
|
551
|
+
# This is, hopefully, a temporary hack. It is not a good idea for two reasons:
|
|
552
|
+
# 1) Performance, we need to make a pass on the data, and need to allocate
|
|
553
|
+
# another buffer of similar size
|
|
554
|
+
# 2) The types are wrong. The "bytes" type should be visible to the caller
|
|
555
|
+
# of the write method, instead of being hidden.
|
|
556
|
+
|
|
557
|
+
# Should we throw an exception if the file is opened in binary mode,
|
|
558
|
+
# and the data is unicode/text?
|
|
559
|
+
if isinstance(data, str):
|
|
560
|
+
bt = data.encode("utf-8")
|
|
561
|
+
elif isinstance(data, bytearray):
|
|
562
|
+
bt = bytes(data)
|
|
563
|
+
elif isinstance(data, bytes):
|
|
564
|
+
bt = data
|
|
565
|
+
elif isinstance(data, mmap.mmap):
|
|
566
|
+
bt = bytes(data)
|
|
551
567
|
else:
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
# 1) Performance, we need to make a pass on the data, and need to allocate
|
|
556
|
-
# another buffer of similar size
|
|
557
|
-
# 2) The types are wrong. The "bytes" type should be visible to the caller
|
|
558
|
-
# of the write method, instead of being hidden.
|
|
559
|
-
|
|
560
|
-
# Should we throw an exception if the file is opened in binary mode,
|
|
561
|
-
# and the data is unicode/text?
|
|
562
|
-
if isinstance(data, str):
|
|
563
|
-
bt = data.encode("utf-8")
|
|
564
|
-
elif isinstance(data, bytearray):
|
|
565
|
-
bt = bytes(data)
|
|
566
|
-
elif isinstance(data, bytes):
|
|
567
|
-
bt = data
|
|
568
|
-
elif isinstance(data, mmap.mmap):
|
|
569
|
-
bt = bytes(data)
|
|
570
|
-
else:
|
|
571
|
-
raise DXFileError("Invalid type {} for write data argument".format(type(data)))
|
|
572
|
-
assert(isinstance(bt, bytes))
|
|
573
|
-
self._write2(bt, multithread=multithread, **kwargs)
|
|
568
|
+
raise DXFileError("Invalid type {} for write data argument".format(type(data)))
|
|
569
|
+
assert(isinstance(bt, bytes))
|
|
570
|
+
self._write2(bt, multithread=multithread, **kwargs)
|
|
574
571
|
|
|
575
572
|
def closed(self, **kwargs):
|
|
576
573
|
'''
|
|
@@ -606,10 +603,7 @@ class DXFile(DXDataObject):
|
|
|
606
603
|
# settings allow last empty part upload, try to upload
|
|
607
604
|
# an empty part (otherwise files with 0 parts cannot be closed).
|
|
608
605
|
try:
|
|
609
|
-
|
|
610
|
-
self.upload_part('', 1, **kwargs)
|
|
611
|
-
else:
|
|
612
|
-
self.upload_part(b'', 1, **kwargs)
|
|
606
|
+
self.upload_part(b'', 1, **kwargs)
|
|
613
607
|
except dxpy.exceptions.InvalidState:
|
|
614
608
|
pass
|
|
615
609
|
|
|
@@ -648,9 +642,7 @@ class DXFile(DXDataObject):
|
|
|
648
642
|
defaults to 1. This probably only makes sense if this is the
|
|
649
643
|
only part to be uploaded.
|
|
650
644
|
"""
|
|
651
|
-
|
|
652
|
-
# In python3, the underlying system methods use the 'bytes' type, not 'string'
|
|
653
|
-
assert(isinstance(data, bytes))
|
|
645
|
+
assert(isinstance(data, bytes))
|
|
654
646
|
|
|
655
647
|
req_input = {}
|
|
656
648
|
if index is not None:
|
|
@@ -30,8 +30,6 @@ import traceback
|
|
|
30
30
|
import warnings
|
|
31
31
|
from collections import defaultdict
|
|
32
32
|
import multiprocessing
|
|
33
|
-
from random import randint
|
|
34
|
-
from time import sleep
|
|
35
33
|
import crc32c
|
|
36
34
|
import zlib
|
|
37
35
|
import base64
|
|
@@ -41,7 +39,7 @@ from .. import logger
|
|
|
41
39
|
from . import dxfile, DXFile
|
|
42
40
|
from .dxfile import FILE_REQUEST_TIMEOUT
|
|
43
41
|
from ..exceptions import DXError, DXFileError, DXPartLengthMismatchError, DXChecksumMismatchError, DXIncompleteReadsError, err_exit
|
|
44
|
-
from ..compat import
|
|
42
|
+
from ..compat import md5_hasher
|
|
45
43
|
from ..utils import response_iterator
|
|
46
44
|
import subprocess
|
|
47
45
|
import concurrent.futures
|
|
@@ -26,7 +26,6 @@ import sys
|
|
|
26
26
|
INTERACTIVE_CLI = sys.stdin and sys.stdin.isatty() and sys.stdout and sys.stdout.isatty()
|
|
27
27
|
|
|
28
28
|
from ..exceptions import err_exit, default_expected_exceptions, DXError
|
|
29
|
-
from ..compat import input
|
|
30
29
|
|
|
31
30
|
def try_call_err_exit():
|
|
32
31
|
err_exit(expected_exceptions=default_expected_exceptions + (DXError,))
|
|
@@ -36,13 +36,11 @@ from ..bindings.dxfile import DXFile
|
|
|
36
36
|
from ..utils.resolver import resolve_existing_path, is_hashid, ResolutionError, resolve_path, check_folder_exists
|
|
37
37
|
from ..utils.file_handle import as_handle
|
|
38
38
|
from ..utils.describe import print_desc
|
|
39
|
-
from ..compat import USING_PYTHON2
|
|
40
39
|
from ..exceptions import (
|
|
41
40
|
err_exit,
|
|
42
41
|
PermissionDenied,
|
|
43
42
|
InvalidInput,
|
|
44
43
|
InvalidState,
|
|
45
|
-
ResourceNotFound,
|
|
46
44
|
default_expected_exceptions,
|
|
47
45
|
)
|
|
48
46
|
|
|
@@ -1633,10 +1631,7 @@ def validate_cohort_ids(descriptor, project, resp, ids):
|
|
|
1633
1631
|
gpk_type = descriptor.model["entities"][entity_name]["fields"][field_name]["mapping"]["column_sql_type"]
|
|
1634
1632
|
# Prepare a payload to find entries matching the input ids in the dataset
|
|
1635
1633
|
if gpk_type in ["integer", "bigint"]:
|
|
1636
|
-
|
|
1637
|
-
lambda_for_list_conv = lambda a, b: a+[long(b)]
|
|
1638
|
-
else:
|
|
1639
|
-
lambda_for_list_conv = lambda a, b: a+[int(b)]
|
|
1634
|
+
lambda_for_list_conv = lambda a, b: a+[int(b)]
|
|
1640
1635
|
elif gpk_type in ["float", "double"]:
|
|
1641
1636
|
lambda_for_list_conv = lambda a, b: a+[float(b)]
|
|
1642
1637
|
elif gpk_type in ["string"]:
|
|
@@ -33,7 +33,7 @@ from ..utils.describe import (get_find_executions_string, get_ls_l_desc, get_ls_
|
|
|
33
33
|
from ..utils.resolver import (parse_input_keyval, is_hashid, is_job_id, is_localjob_id, paginate_and_pick, pick,
|
|
34
34
|
resolve_existing_path, resolve_multiple_existing_paths, split_unescaped, is_analysis_id)
|
|
35
35
|
from ..utils import OrderedDefaultdict
|
|
36
|
-
from ..compat import
|
|
36
|
+
from ..compat import basestring
|
|
37
37
|
try:
|
|
38
38
|
# Import gnureadline if installed for macOS
|
|
39
39
|
import gnureadline as readline
|
|
@@ -604,8 +604,6 @@ class ExecutableInputs(object):
|
|
|
604
604
|
try:
|
|
605
605
|
parsed_input_value = json.loads(input_value, object_pairs_hook=collections.OrderedDict)
|
|
606
606
|
immediate_types = {collections.OrderedDict, list, int, float}
|
|
607
|
-
if USING_PYTHON2:
|
|
608
|
-
immediate_types.add(long) # noqa
|
|
609
607
|
if type(parsed_input_value) not in immediate_types:
|
|
610
608
|
raise Exception()
|
|
611
609
|
except:
|
|
@@ -20,7 +20,6 @@ the org-based commands of the dx command-line client.
|
|
|
20
20
|
'''
|
|
21
21
|
from __future__ import print_function, unicode_literals, division, absolute_import
|
|
22
22
|
|
|
23
|
-
from ..compat import input
|
|
24
23
|
import dxpy
|
|
25
24
|
from . import try_call, prompt_for_yn, INTERACTIVE_CLI
|
|
26
25
|
from .parsers import process_find_by_property_args, process_phi_param
|
|
@@ -0,0 +1,38 @@
|
|
|
1
|
+
# Copyright (C) 2013-2016 DNAnexus, Inc.
|
|
2
|
+
#
|
|
3
|
+
# This file is part of dx-toolkit (DNAnexus platform client libraries).
|
|
4
|
+
#
|
|
5
|
+
# Licensed under the Apache License, Version 2.0 (the "License"); you may not
|
|
6
|
+
# use this file except in compliance with the License. You may obtain a copy
|
|
7
|
+
# of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
12
|
+
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
|
|
13
|
+
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
|
|
14
|
+
# License for the specific language governing permissions and limitations
|
|
15
|
+
# under the License.
|
|
16
|
+
|
|
17
|
+
from __future__ import print_function, unicode_literals, division, absolute_import
|
|
18
|
+
|
|
19
|
+
import sys, locale, threading, hashlib
|
|
20
|
+
|
|
21
|
+
try:
|
|
22
|
+
sys_encoding = locale.getdefaultlocale()[1] or "UTF-8"
|
|
23
|
+
except Exception:
|
|
24
|
+
sys_encoding = "UTF-8"
|
|
25
|
+
|
|
26
|
+
USING_PYTHON2 = True if sys.version_info < (3, 0) else False
|
|
27
|
+
|
|
28
|
+
basestring = (str, bytes)
|
|
29
|
+
|
|
30
|
+
THREAD_TIMEOUT_MAX = threading.TIMEOUT_MAX
|
|
31
|
+
|
|
32
|
+
# Support FIPS enabled Python
|
|
33
|
+
def md5_hasher():
|
|
34
|
+
try:
|
|
35
|
+
md5_hasher = hashlib.new('md5', usedforsecurity=False)
|
|
36
|
+
except:
|
|
37
|
+
md5_hasher = hashlib.new('md5')
|
|
38
|
+
return md5_hasher
|
|
@@ -21,7 +21,6 @@ import socket, json, time, os, logging
|
|
|
21
21
|
from logging.handlers import SysLogHandler
|
|
22
22
|
|
|
23
23
|
from dxpy.exceptions import DXError
|
|
24
|
-
from dxpy.compat import USING_PYTHON2
|
|
25
24
|
|
|
26
25
|
class DXLogHandler(SysLogHandler):
|
|
27
26
|
'''
|
|
@@ -70,7 +69,7 @@ class DXLogHandler(SysLogHandler):
|
|
|
70
69
|
return self.priority_names[self.priority_map.get(record.levelname, "warning")]
|
|
71
70
|
|
|
72
71
|
def truncate_message(self, message):
|
|
73
|
-
msg_bytes = message
|
|
72
|
+
msg_bytes = message.encode('utf-8')
|
|
74
73
|
|
|
75
74
|
if len(json.dumps(message)) <= 8015:
|
|
76
75
|
return message
|
|
@@ -80,12 +79,9 @@ class DXLogHandler(SysLogHandler):
|
|
|
80
79
|
msg_bytes = msg_bytes[:-1]
|
|
81
80
|
|
|
82
81
|
message = _bytes2utf8(msg_bytes)
|
|
83
|
-
message = message.encode('utf-8') if USING_PYTHON2 else message
|
|
84
82
|
return message + "... [truncated]"
|
|
85
83
|
|
|
86
84
|
def is_resource_log(self, message):
|
|
87
|
-
if USING_PYTHON2:
|
|
88
|
-
return message.startswith(b"CPU: ")
|
|
89
85
|
return message.startswith("CPU: ")
|
|
90
86
|
|
|
91
87
|
def emit(self, record):
|
|
@@ -95,9 +91,6 @@ class DXLogHandler(SysLogHandler):
|
|
|
95
91
|
# with the extra padding introduced by the log function, the
|
|
96
92
|
# incoming message needs to be smaller - we truncate it to
|
|
97
93
|
# at most 8015 bytes here.
|
|
98
|
-
# Note: we use Python 2 semantics here (byte strings). This
|
|
99
|
-
# script is not Python 3 ready. If *line* was a unicode string
|
|
100
|
-
# with wide chars, its byte length would exceed the limit.
|
|
101
94
|
message = self.truncate_message(message)
|
|
102
95
|
|
|
103
96
|
data = json.dumps({"source": self.source, "timestamp": int(round(time.time() * 1000)),
|
|
@@ -28,7 +28,6 @@ import socket
|
|
|
28
28
|
from urllib3.exceptions import HTTPError
|
|
29
29
|
|
|
30
30
|
import dxpy
|
|
31
|
-
from .compat import USING_PYTHON2
|
|
32
31
|
import urllib3
|
|
33
32
|
import ssl
|
|
34
33
|
|
|
@@ -246,10 +245,8 @@ network_exceptions = (urllib3.exceptions.ProtocolError,
|
|
|
246
245
|
urllib3.exceptions.SSLError,
|
|
247
246
|
ssl.SSLError,
|
|
248
247
|
HTTPError,
|
|
249
|
-
socket.error
|
|
250
|
-
|
|
251
|
-
network_exceptions += (ConnectionResetError,)
|
|
252
|
-
|
|
248
|
+
socket.error,
|
|
249
|
+
ConnectionResetError,)
|
|
253
250
|
|
|
254
251
|
try:
|
|
255
252
|
json_exceptions = (json.decoder.JSONDecodeError,)
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
{
|
|
2
|
+
"aws:ap-southeast-2": "record-GzKXzG85y5b99vKK95b4vpKy",
|
|
3
|
+
"aws:eu-central-1": "record-GzKY0y04V0179X5Bz9K355k3",
|
|
4
|
+
"aws:eu-west-2-g": "record-GzKY3jBKXjZ413XPP0yv4fgy",
|
|
5
|
+
"aws:me-south-1": "record-GzKY5G132j8qz8yk710gjKJk",
|
|
6
|
+
"aws:us-east-1": "record-GzKXx300FZzQ73Q0ykjvQ1GV",
|
|
7
|
+
"azure:westeurope": "record-GzKY8zjB5PzvxVg2Gy3g77QK",
|
|
8
|
+
"azure:westus": "record-GzKYJf099Z602KYFxJzvJQj1"
|
|
9
|
+
}
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
{
|
|
2
|
+
"aws:ap-southeast-2": "record-GzKXFyj51PXF1f67BYjP54Px",
|
|
3
|
+
"aws:eu-central-1": "record-GzKXJgj4bPkjfBg634qfx8pF",
|
|
4
|
+
"aws:eu-west-2-g": "record-GzKXPJ2K5b2xPyZbyx3pZg7Y",
|
|
5
|
+
"aws:me-south-1": "record-GzKXQv93j7qB0fVgGXVf71Z9",
|
|
6
|
+
"aws:us-east-1": "record-GzKX8p00kP5q1f67BYjP54PZ",
|
|
7
|
+
"azure:westeurope": "record-GzKXZGjB6fJ6vb04b7VBV2Kx",
|
|
8
|
+
"azure:westus": "record-GzKXgVj9YqqxPyZbyx3pZg8G"
|
|
9
|
+
}
|
|
@@ -5,7 +5,6 @@ from .nextflow_utils import (get_template_dir, get_source_file_name, get_resourc
|
|
|
5
5
|
import json
|
|
6
6
|
import os
|
|
7
7
|
from dxpy import TOOLKIT_VERSION
|
|
8
|
-
from dxpy.compat import USING_PYTHON2, sys_encoding
|
|
9
8
|
|
|
10
9
|
|
|
11
10
|
def get_nextflow_dxapp(
|
|
@@ -94,11 +93,7 @@ def get_nextflow_src(custom_inputs=None, profile=None, resources_dir=None):
|
|
|
94
93
|
src = src.replace("@@PROFILE_ARG@@", profile_arg)
|
|
95
94
|
src = src.replace("@@EXCLUDE_INPUT_DOWNLOAD@@", exclude_input_download)
|
|
96
95
|
src = src.replace("@@DXPY_BUILD_VERSION@@", TOOLKIT_VERSION)
|
|
97
|
-
|
|
98
|
-
src = src.replace("@@RESOURCES_SUBPATH@@",
|
|
99
|
-
get_resources_subpath(resources_dir).encode(sys_encoding))
|
|
100
|
-
else:
|
|
101
|
-
src = src.replace("@@RESOURCES_SUBPATH@@",
|
|
96
|
+
src = src.replace("@@RESOURCES_SUBPATH@@",
|
|
102
97
|
get_resources_subpath(resources_dir))
|
|
103
98
|
|
|
104
99
|
return src
|
|
@@ -25,11 +25,7 @@ import csv
|
|
|
25
25
|
|
|
26
26
|
logging.basicConfig(level=logging.INFO)
|
|
27
27
|
|
|
28
|
-
from ..compat import
|
|
29
|
-
unwrap_stream, sys_encoding)
|
|
30
|
-
|
|
31
|
-
wrap_stdio_in_codecs()
|
|
32
|
-
decode_command_line_args()
|
|
28
|
+
from ..compat import sys_encoding, basestring
|
|
33
29
|
|
|
34
30
|
import dxpy
|
|
35
31
|
from dxpy.scripts import dx_build_app
|
|
@@ -311,8 +307,7 @@ def login(args):
|
|
|
311
307
|
else:
|
|
312
308
|
username = input('Username: ')
|
|
313
309
|
dxpy.config.write("DX_USERNAME", username)
|
|
314
|
-
|
|
315
|
-
password = getpass.getpass()
|
|
310
|
+
password = getpass.getpass()
|
|
316
311
|
|
|
317
312
|
otp = input('Verification code: ') if get_otp else None
|
|
318
313
|
return dict(username=username, password=password, otp=otp)
|
|
@@ -416,9 +411,8 @@ def logout(args):
|
|
|
416
411
|
print("Deleting credentials from {}...".format(authserver))
|
|
417
412
|
token = dxpy.AUTH_HELPER.security_context["auth_token"]
|
|
418
413
|
try:
|
|
419
|
-
|
|
420
|
-
|
|
421
|
-
token = token.encode(sys_encoding)
|
|
414
|
+
# python 3 requires conversion to bytes before hashing
|
|
415
|
+
token = token.encode(sys_encoding)
|
|
422
416
|
token_sig = hashlib.sha256(token).hexdigest()
|
|
423
417
|
response = dxpy.DXHTTPRequest(authserver + "/system/destroyAuthToken",
|
|
424
418
|
dict(tokenSignature=token_sig),
|
|
@@ -2782,8 +2776,6 @@ def build(args):
|
|
|
2782
2776
|
src_dir = args.src_dir
|
|
2783
2777
|
if src_dir is None:
|
|
2784
2778
|
src_dir = os.getcwd()
|
|
2785
|
-
if USING_PYTHON2:
|
|
2786
|
-
src_dir = src_dir.decode(sys.getfilesystemencoding())
|
|
2787
2779
|
return src_dir
|
|
2788
2780
|
|
|
2789
2781
|
def handle_arg_conflicts(args):
|
|
@@ -4170,12 +4162,8 @@ def generate_batch_inputs(args):
|
|
|
4170
4162
|
batch_fname = "{}.{:04d}.tsv".format(args.output_prefix, i)
|
|
4171
4163
|
|
|
4172
4164
|
# In python-3 we need to open the file in textual mode.
|
|
4173
|
-
|
|
4174
|
-
|
|
4175
|
-
delimiter = '\t'.encode('ascii')
|
|
4176
|
-
else:
|
|
4177
|
-
write_mode = 'w'
|
|
4178
|
-
delimiter = '\t'
|
|
4165
|
+
write_mode = 'w'
|
|
4166
|
+
delimiter = '\t'
|
|
4179
4167
|
|
|
4180
4168
|
with open(batch_fname, write_mode) as csvfile:
|
|
4181
4169
|
batchwriter = csv.writer(csvfile, delimiter=delimiter)
|
|
@@ -4523,10 +4511,6 @@ class DXArgumentParser(argparse.ArgumentParser):
|
|
|
4523
4511
|
msg += "\n\nDid you mean: " + BOLD("dx " + suggestion)
|
|
4524
4512
|
|
|
4525
4513
|
err = argparse.ArgumentError(action, msg)
|
|
4526
|
-
if USING_PYTHON2:
|
|
4527
|
-
err.message = err.message.encode(sys_encoding)
|
|
4528
|
-
if err.argument_name is not None:
|
|
4529
|
-
err.argument_name = err.argument_name.encode(sys_encoding)
|
|
4530
4514
|
raise err
|
|
4531
4515
|
|
|
4532
4516
|
def exit(self, status=0, message=None):
|
|
@@ -4538,8 +4522,6 @@ class DXArgumentParser(argparse.ArgumentParser):
|
|
|
4538
4522
|
sys.exit(status)
|
|
4539
4523
|
|
|
4540
4524
|
def error(self, message):
|
|
4541
|
-
if USING_PYTHON2:
|
|
4542
|
-
message = message.decode(sys_encoding)
|
|
4543
4525
|
self.exit(2, '{help}\n{prog}: error: {msg}\n'.format(help=self.format_help(),
|
|
4544
4526
|
prog=self.prog,
|
|
4545
4527
|
msg=message))
|
|
@@ -6837,10 +6819,7 @@ def main():
|
|
|
6837
6819
|
import argcomplete
|
|
6838
6820
|
|
|
6839
6821
|
# In python-3 we need to use a binary output stream
|
|
6840
|
-
|
|
6841
|
-
output_stream = sys.stdout
|
|
6842
|
-
else:
|
|
6843
|
-
output_stream = sys.stdout.buffer
|
|
6822
|
+
output_stream = sys.stdout.buffer
|
|
6844
6823
|
argcomplete.autocomplete(parser,
|
|
6845
6824
|
always_complete_options=False,
|
|
6846
6825
|
exclude=['gtable', 'export'],
|
|
@@ -31,8 +31,6 @@ from dxpy.utils.printing import fill, BOLD, UNDERLINE, DNANEXUS_LOGO, ENDC
|
|
|
31
31
|
from dxpy.app_categories import APP_CATEGORIES
|
|
32
32
|
from dxpy.utils.completer import InstanceTypesCompleter
|
|
33
33
|
from dxpy.utils.pretty_print import format_table
|
|
34
|
-
from dxpy.compat import wrap_stdio_in_codecs
|
|
35
|
-
wrap_stdio_in_codecs()
|
|
36
34
|
|
|
37
35
|
try:
|
|
38
36
|
import colorama
|
|
@@ -46,10 +46,9 @@ from ..utils.completer import LocalCompleter
|
|
|
46
46
|
from ..app_categories import APP_CATEGORIES
|
|
47
47
|
from ..exceptions import err_exit
|
|
48
48
|
from ..utils.printing import BOLD
|
|
49
|
-
from ..compat import
|
|
49
|
+
from ..compat import USING_PYTHON2, basestring
|
|
50
50
|
from ..cli.parsers import process_extra_args
|
|
51
51
|
|
|
52
|
-
decode_command_line_args()
|
|
53
52
|
|
|
54
53
|
parser = argparse.ArgumentParser(description="Uploads a DNAnexus App.")
|
|
55
54
|
|
|
@@ -274,8 +273,6 @@ def _check_file_syntax(filename, temp_dir, override_lang=None, enforce=True):
|
|
|
274
273
|
# problems.
|
|
275
274
|
pyc_path = os.path.join(temp_dir, os.path.basename(filename) + ".pyc")
|
|
276
275
|
try:
|
|
277
|
-
if USING_PYTHON2:
|
|
278
|
-
filename = filename.encode(sys.getfilesystemencoding())
|
|
279
276
|
py_compile.compile(filename, cfile=pyc_path, doraise=True)
|
|
280
277
|
finally:
|
|
281
278
|
try:
|
|
@@ -300,8 +297,6 @@ def _check_file_syntax(filename, temp_dir, override_lang=None, enforce=True):
|
|
|
300
297
|
# don't enforce and ignore if the shebang is ambiguous and we're not sure
|
|
301
298
|
# that the file version is the same as the one we're running
|
|
302
299
|
read_mode = "r"
|
|
303
|
-
if USING_PYTHON2:
|
|
304
|
-
read_mode = "rb"
|
|
305
300
|
with open(filename, read_mode) as f:
|
|
306
301
|
first_line = f.readline()
|
|
307
302
|
if not (('python3' in first_line and not USING_PYTHON2) or
|
|
@@ -322,10 +317,7 @@ def _check_file_syntax(filename, temp_dir, override_lang=None, enforce=True):
|
|
|
322
317
|
checker_fn(filename)
|
|
323
318
|
except subprocess.CalledProcessError as e:
|
|
324
319
|
print(filename + " has a syntax error! Interpreter output:", file=sys.stderr)
|
|
325
|
-
|
|
326
|
-
errmsg = e.output
|
|
327
|
-
else:
|
|
328
|
-
errmsg = _error_message_to_string(e, e.output)
|
|
320
|
+
errmsg = _error_message_to_string(e, e.output)
|
|
329
321
|
for line in errmsg.strip("\n").split("\n"):
|
|
330
322
|
print(" " + line.rstrip("\n"), file=sys.stderr)
|
|
331
323
|
if enforce:
|
|
@@ -334,10 +326,7 @@ def _check_file_syntax(filename, temp_dir, override_lang=None, enforce=True):
|
|
|
334
326
|
if python_unsure:
|
|
335
327
|
print("Unsure if " + filename + " is using Python 2 or Python 3, the following error might not be relevant", file=sys.stderr)
|
|
336
328
|
print(filename + " has a syntax error! Interpreter output:", file=sys.stderr)
|
|
337
|
-
|
|
338
|
-
errmsg = e.msg
|
|
339
|
-
else:
|
|
340
|
-
errmsg = _error_message_to_string(e, e.msg)
|
|
329
|
+
errmsg = _error_message_to_string(e, e.msg)
|
|
341
330
|
print(" " + errmsg.strip(), file=sys.stderr)
|
|
342
331
|
if enforce:
|
|
343
332
|
raise DXSyntaxError(e.msg.strip())
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
version = '0.395.0'
|
|
@@ -22,9 +22,10 @@ from __future__ import print_function, unicode_literals, division, absolute_impo
|
|
|
22
22
|
|
|
23
23
|
import os, json, collections, concurrent.futures, traceback, sys, time, gc, platform
|
|
24
24
|
from multiprocessing import cpu_count
|
|
25
|
+
from collections.abc import Mapping
|
|
25
26
|
import dateutil.parser
|
|
26
27
|
from .. import logger
|
|
27
|
-
from ..compat import basestring, THREAD_TIMEOUT_MAX
|
|
28
|
+
from ..compat import basestring, THREAD_TIMEOUT_MAX
|
|
28
29
|
from ..exceptions import DXError
|
|
29
30
|
import numbers
|
|
30
31
|
import binascii
|
|
@@ -26,8 +26,7 @@ import csv
|
|
|
26
26
|
import dxpy
|
|
27
27
|
import json
|
|
28
28
|
|
|
29
|
-
from ..
|
|
30
|
-
from ..exceptions import err_exit, DXError
|
|
29
|
+
from ..exceptions import err_exit
|
|
31
30
|
|
|
32
31
|
|
|
33
32
|
# Informational columns in the TSV file, which we want to ignore
|
|
@@ -139,8 +138,6 @@ def batch_launch_args(executable, input_json, batch_tsv_file):
|
|
|
139
138
|
header_line = []
|
|
140
139
|
lines = []
|
|
141
140
|
read_mode = "r"
|
|
142
|
-
if USING_PYTHON2:
|
|
143
|
-
read_mode = "rb"
|
|
144
141
|
with open(batch_tsv_file, read_mode) as f:
|
|
145
142
|
reader = csv.reader(f, delimiter=str(u'\t'))
|
|
146
143
|
header_line = next(reader)
|
|
@@ -28,7 +28,6 @@ import dxpy
|
|
|
28
28
|
from .resolver import (get_first_pos_of_char, get_last_pos_of_char, clean_folder_path, resolve_path,
|
|
29
29
|
split_unescaped, ResolutionError)
|
|
30
30
|
from .printing import fill
|
|
31
|
-
from ..compat import str
|
|
32
31
|
|
|
33
32
|
def startswith(text):
|
|
34
33
|
return (lambda string: string.startswith(text))
|