dxpy 0.378.0__tar.gz → 0.380.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (187) hide show
  1. {dxpy-0.378.0 → dxpy-0.380.0}/PKG-INFO +3 -2
  2. {dxpy-0.378.0 → dxpy-0.380.0}/Readme.md +2 -1
  3. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/scripts/dx_build_app.py +1 -1
  4. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/templating/templates/nextflow/src/nextflow.sh +93 -105
  5. dxpy-0.380.0/dxpy/toolkit_version.py +1 -0
  6. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy.egg-info/PKG-INFO +3 -2
  7. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy.egg-info/requires.txt +1 -0
  8. {dxpy-0.378.0 → dxpy-0.380.0}/requirements_test.txt +1 -0
  9. {dxpy-0.378.0 → dxpy-0.380.0}/setup.py +1 -1
  10. {dxpy-0.378.0 → dxpy-0.380.0}/test/test_dxclient.py +31 -31
  11. dxpy-0.378.0/dxpy/toolkit_version.py +0 -1
  12. {dxpy-0.378.0 → dxpy-0.380.0}/MANIFEST.in +0 -0
  13. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/__init__.py +0 -0
  14. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/api.py +0 -0
  15. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/app_builder.py +0 -0
  16. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/app_categories.py +0 -0
  17. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/asset_builder.py +0 -0
  18. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/bindings/__init__.py +0 -0
  19. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/bindings/apollo/__init__.py +0 -0
  20. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/bindings/apollo/cmd_line_options_validator.py +0 -0
  21. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/bindings/apollo/data_transformations.py +0 -0
  22. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/bindings/apollo/dataset.py +0 -0
  23. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/bindings/apollo/json_validation_by_schema.py +0 -0
  24. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/bindings/apollo/schemas/__init__.py +0 -0
  25. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/bindings/apollo/schemas/assay_filtering_conditions.py +0 -0
  26. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/bindings/apollo/schemas/assay_filtering_json_schemas.py +0 -0
  27. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/bindings/apollo/schemas/input_arguments_validation_schemas.py +0 -0
  28. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/bindings/apollo/vizclient.py +0 -0
  29. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/bindings/apollo/vizserver_filters_from_json_parser.py +0 -0
  30. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/bindings/apollo/vizserver_payload_builder.py +0 -0
  31. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/bindings/auth.py +0 -0
  32. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/bindings/download_all_inputs.py +0 -0
  33. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/bindings/dxanalysis.py +0 -0
  34. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/bindings/dxapp.py +0 -0
  35. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/bindings/dxapp_container_functions.py +0 -0
  36. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/bindings/dxapplet.py +0 -0
  37. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/bindings/dxdatabase.py +0 -0
  38. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/bindings/dxdatabase_functions.py +0 -0
  39. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/bindings/dxdataobject_functions.py +0 -0
  40. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/bindings/dxfile.py +0 -0
  41. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/bindings/dxfile_functions.py +0 -0
  42. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/bindings/dxglobalworkflow.py +0 -0
  43. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/bindings/dxjob.py +0 -0
  44. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/bindings/dxproject.py +0 -0
  45. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/bindings/dxrecord.py +0 -0
  46. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/bindings/dxworkflow.py +0 -0
  47. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/bindings/mount_all_inputs.py +0 -0
  48. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/bindings/search.py +0 -0
  49. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/cli/__init__.py +0 -0
  50. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/cli/cp.py +0 -0
  51. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/cli/dataset_utilities.py +0 -0
  52. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/cli/download.py +0 -0
  53. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/cli/exec_io.py +0 -0
  54. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/cli/help_messages.py +0 -0
  55. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/cli/org.py +0 -0
  56. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/cli/output_handling.py +0 -0
  57. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/cli/parsers.py +0 -0
  58. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/cli/workflow.py +0 -0
  59. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/compat.py +0 -0
  60. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/dx_extract_utils/Homo_sapiens_genes_manifest.json +0 -0
  61. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/dx_extract_utils/Homo_sapiens_genes_manifest_staging.json +0 -0
  62. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/dx_extract_utils/Homo_sapiens_genes_manifest_staging_vep.json +0 -0
  63. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/dx_extract_utils/Homo_sapiens_genes_manifest_vep.json +0 -0
  64. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/dx_extract_utils/__init__.py +0 -0
  65. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/dx_extract_utils/cohort_filter_payload.py +0 -0
  66. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/dx_extract_utils/column_conditions.json +0 -0
  67. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/dx_extract_utils/column_conversion.json +0 -0
  68. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/dx_extract_utils/filter_to_payload.py +0 -0
  69. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/dx_extract_utils/germline_utils.py +0 -0
  70. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/dx_extract_utils/input_validation.py +0 -0
  71. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/dx_extract_utils/input_validation_somatic.py +0 -0
  72. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/dx_extract_utils/retrieve_allele_schema.json +0 -0
  73. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/dx_extract_utils/retrieve_annotation_schema.json +0 -0
  74. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/dx_extract_utils/retrieve_bins.py +0 -0
  75. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/dx_extract_utils/retrieve_genotype_schema.json +0 -0
  76. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/dx_extract_utils/return_columns_allele.json +0 -0
  77. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/dx_extract_utils/return_columns_annotation.json +0 -0
  78. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/dx_extract_utils/return_columns_genotype.json +0 -0
  79. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/dx_extract_utils/return_columns_genotype_only.json +0 -0
  80. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/dx_extract_utils/somatic_filter_payload.py +0 -0
  81. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/dxlog.py +0 -0
  82. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/exceptions.py +0 -0
  83. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/executable_builder.py +0 -0
  84. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/nextflow/ImageRef.py +0 -0
  85. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/nextflow/ImageRefFactory.py +0 -0
  86. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/nextflow/__init__.py +0 -0
  87. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/nextflow/awscli_assets.json +0 -0
  88. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/nextflow/awscli_assets.staging.json +0 -0
  89. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/nextflow/collect_images.py +0 -0
  90. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/nextflow/nextaur_assets.json +0 -0
  91. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/nextflow/nextaur_assets.staging.json +0 -0
  92. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/nextflow/nextflow_assets.json +0 -0
  93. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/nextflow/nextflow_assets.staging.json +0 -0
  94. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/nextflow/nextflow_builder.py +0 -0
  95. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/nextflow/nextflow_templates.py +0 -0
  96. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/nextflow/nextflow_utils.py +0 -0
  97. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/packages/__init__.py +0 -0
  98. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/scripts/__init__.py +0 -0
  99. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/scripts/dx.py +0 -0
  100. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/scripts/dx_app_wizard.py +0 -0
  101. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/scripts/dx_build_applet.py +0 -0
  102. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/ssh_tunnel_app_support.py +0 -0
  103. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/system_requirements.py +0 -0
  104. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/templating/__init__.py +0 -0
  105. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/templating/bash.py +0 -0
  106. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/templating/python.py +0 -0
  107. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/templating/templates/Readme.md +0 -0
  108. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/templating/templates/bash/basic/dxapp.json +0 -0
  109. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/templating/templates/bash/basic/src/code.sh +0 -0
  110. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/templating/templates/bash/parallelized/dxapp.json +0 -0
  111. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/templating/templates/bash/parallelized/src/code.sh +0 -0
  112. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/templating/templates/bash/scatter-process-gather/dxapp.json +0 -0
  113. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/templating/templates/bash/scatter-process-gather/src/code.sh +0 -0
  114. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/templating/templates/nextflow/dxapp.json +0 -0
  115. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/templating/templates/python/basic/dxapp.json +0 -0
  116. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/templating/templates/python/basic/src/code.py +0 -0
  117. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/templating/templates/python/basic/test/test.py +0 -0
  118. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/templating/templates/python/parallelized/dxapp.json +0 -0
  119. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/templating/templates/python/parallelized/src/code.py +0 -0
  120. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/templating/templates/python/parallelized/test/test.py +0 -0
  121. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/templating/templates/python/scatter-process-gather/dxapp.json +0 -0
  122. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/templating/templates/python/scatter-process-gather/src/code.py +0 -0
  123. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/templating/templates/python/scatter-process-gather/test/test.py +0 -0
  124. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/templating/utils.py +0 -0
  125. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/utils/__init__.py +0 -0
  126. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/utils/batch_utils.py +0 -0
  127. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/utils/completer.py +0 -0
  128. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/utils/config.py +0 -0
  129. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/utils/describe.py +0 -0
  130. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/utils/exec_utils.py +0 -0
  131. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/utils/executable_unbuilder.py +0 -0
  132. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/utils/file_handle.py +0 -0
  133. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/utils/file_load_utils.py +0 -0
  134. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/utils/genomic_utils.py +0 -0
  135. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/utils/job_log_client.py +0 -0
  136. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/utils/local_exec_utils.py +0 -0
  137. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/utils/pathmatch.py +0 -0
  138. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/utils/pretty_print.py +0 -0
  139. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/utils/printing.py +0 -0
  140. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/utils/resolver.py +0 -0
  141. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/utils/spelling_corrector.py +0 -0
  142. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/utils/version.py +0 -0
  143. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy/workflow_builder.py +0 -0
  144. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy.egg-info/SOURCES.txt +0 -0
  145. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy.egg-info/dependency_links.txt +0 -0
  146. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy.egg-info/entry_points.txt +0 -0
  147. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy.egg-info/not-zip-safe +0 -0
  148. {dxpy-0.378.0 → dxpy-0.380.0}/dxpy.egg-info/top_level.txt +0 -0
  149. {dxpy-0.378.0 → dxpy-0.380.0}/requirements.txt +0 -0
  150. {dxpy-0.378.0 → dxpy-0.380.0}/requirements_setuptools.txt +0 -0
  151. {dxpy-0.378.0 → dxpy-0.380.0}/scripts/dx-clone-asset +0 -0
  152. {dxpy-0.378.0 → dxpy-0.380.0}/scripts/dx-docker +0 -0
  153. {dxpy-0.378.0 → dxpy-0.380.0}/scripts/dx-download-all-inputs +0 -0
  154. {dxpy-0.378.0 → dxpy-0.380.0}/scripts/dx-fetch-bundled-depends +0 -0
  155. {dxpy-0.378.0 → dxpy-0.380.0}/scripts/dx-generate-dxapp +0 -0
  156. {dxpy-0.378.0 → dxpy-0.380.0}/scripts/dx-jobutil-add-output +0 -0
  157. {dxpy-0.378.0 → dxpy-0.380.0}/scripts/dx-jobutil-dxlink +0 -0
  158. {dxpy-0.378.0 → dxpy-0.380.0}/scripts/dx-jobutil-get-identity-token +0 -0
  159. {dxpy-0.378.0 → dxpy-0.380.0}/scripts/dx-jobutil-new-job +0 -0
  160. {dxpy-0.378.0 → dxpy-0.380.0}/scripts/dx-jobutil-parse-link +0 -0
  161. {dxpy-0.378.0 → dxpy-0.380.0}/scripts/dx-jobutil-report-error +0 -0
  162. {dxpy-0.378.0 → dxpy-0.380.0}/scripts/dx-log-stream +0 -0
  163. {dxpy-0.378.0 → dxpy-0.380.0}/scripts/dx-mount-all-inputs +0 -0
  164. {dxpy-0.378.0 → dxpy-0.380.0}/scripts/dx-notebook-reconnect +0 -0
  165. {dxpy-0.378.0 → dxpy-0.380.0}/scripts/dx-print-bash-vars +0 -0
  166. {dxpy-0.378.0 → dxpy-0.380.0}/scripts/dx-upload-all-outputs +0 -0
  167. {dxpy-0.378.0 → dxpy-0.380.0}/setup.cfg +0 -0
  168. {dxpy-0.378.0 → dxpy-0.380.0}/test/test_batch.py +0 -0
  169. {dxpy-0.378.0 → dxpy-0.380.0}/test/test_create_cohort.py +0 -0
  170. {dxpy-0.378.0 → dxpy-0.380.0}/test/test_describe.py +0 -0
  171. {dxpy-0.378.0 → dxpy-0.380.0}/test/test_dx-docker.py +0 -0
  172. {dxpy-0.378.0 → dxpy-0.380.0}/test/test_dx_app_wizard.py +0 -0
  173. {dxpy-0.378.0 → dxpy-0.380.0}/test/test_dx_bash_helpers.py +0 -0
  174. {dxpy-0.378.0 → dxpy-0.380.0}/test/test_dx_completion.py +0 -0
  175. {dxpy-0.378.0 → dxpy-0.380.0}/test/test_dx_symlink.py +0 -0
  176. {dxpy-0.378.0 → dxpy-0.380.0}/test/test_dxabs.py +0 -0
  177. {dxpy-0.378.0 → dxpy-0.380.0}/test/test_dxasset.py +0 -0
  178. {dxpy-0.378.0 → dxpy-0.380.0}/test/test_dxpy.py +0 -0
  179. {dxpy-0.378.0 → dxpy-0.380.0}/test/test_dxpy_utils.py +0 -0
  180. {dxpy-0.378.0 → dxpy-0.380.0}/test/test_dxunpack.py +0 -0
  181. {dxpy-0.378.0 → dxpy-0.380.0}/test/test_extract_assay.py +0 -0
  182. {dxpy-0.378.0 → dxpy-0.380.0}/test/test_extract_dataset.py +0 -0
  183. {dxpy-0.378.0 → dxpy-0.380.0}/test/test_extract_expression.py +0 -0
  184. {dxpy-0.378.0 → dxpy-0.380.0}/test/test_extract_somatic.py +0 -0
  185. {dxpy-0.378.0 → dxpy-0.380.0}/test/test_nextflow.py +0 -0
  186. {dxpy-0.378.0 → dxpy-0.380.0}/test/test_nextflow_ImageRef.py +0 -0
  187. {dxpy-0.378.0 → dxpy-0.380.0}/test/test_nextflow_ImageRefFactory.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dxpy
3
- Version: 0.378.0
3
+ Version: 0.380.0
4
4
  Summary: DNAnexus Platform API bindings for Python
5
5
  Home-page: https://github.com/dnanexus/dx-toolkit
6
6
  Author: Aleksandra Zalcman, Andrey Kislyuk, Anurag Biyani, Geet Duggal, Katherine Lai, Kurt Jensen, Marek Hrvol, Ohad Rodeh, Phil Sung
@@ -21,8 +21,9 @@ Provides-Extra: xattr
21
21
 
22
22
  dxpy: DNAnexus Python API
23
23
  =========================
24
+ [DNAnexus Documentation](https://documentation.dnanexus.com/)
24
25
 
25
- [API Documentation](http://autodoc.dnanexus.com/bindings/python/current/)
26
+ [dxpy API Documentation](http://autodoc.dnanexus.com/bindings/python/current/)
26
27
 
27
28
  Building
28
29
  --------
@@ -1,7 +1,8 @@
1
1
  dxpy: DNAnexus Python API
2
2
  =========================
3
+ [DNAnexus Documentation](https://documentation.dnanexus.com/)
3
4
 
4
- [API Documentation](http://autodoc.dnanexus.com/bindings/python/current/)
5
+ [dxpy API Documentation](http://autodoc.dnanexus.com/bindings/python/current/)
5
6
 
6
7
  Building
7
8
  --------
@@ -1033,7 +1033,7 @@ def get_project_to_check(destination, extra_args):
1033
1033
 
1034
1034
  def verify_nf_license(destination, extra_args):
1035
1035
  dest_project_to_check = get_project_to_check(destination, extra_args)
1036
- features = dxpy.DXHTTPRequest("/" + dest_project_to_check + "/checkFeatureAccess", {"features": ["dxNextflow"]}).get("features", {})
1036
+ features = dxpy.DXHTTPRequest("/" + dest_project_to_check + "/checkFeatureAccess", {"features": ["dxNextflow"]}, always_retry=True).get("features", {})
1037
1037
  dx_nextflow_lic = features.get("dxNextflow", False)
1038
1038
  if not dx_nextflow_lic:
1039
1039
  raise dxpy.app_builder.AppBuilderException("PermissionDenied: billTo of the applet's destination project must have the dxNextflow feature enabled. For inquiries, please contact support@dnanexus.com")
@@ -35,9 +35,7 @@ main() {
35
35
  set -x
36
36
  fi
37
37
 
38
- # If cache is used, it will be stored in the project at
39
- DX_CACHEDIR=$DX_PROJECT_CONTEXT_ID:/.nextflow_cache_db
40
- get_nextaur_version
38
+ detect_nextaur_plugin_version
41
39
 
42
40
  # unset properties
43
41
  cloned_job_properties=$(dx describe "$DX_JOB_ID" --json | jq -r '.properties | to_entries[] | select(.key | startswith("nextflow")) | .key')
@@ -46,11 +44,6 @@ main() {
46
44
  # check if all run opts provided by user are supported
47
45
  validate_run_opts
48
46
 
49
- # Check if limit reached for Nextflow sessions preserved in this project's cache
50
- if [[ $preserve_cache == true ]]; then
51
- check_cache_db_storage
52
- fi
53
-
54
47
  # set default NXF env constants
55
48
 
56
49
  # Disable use of newer flag --cpus when running Docker
@@ -88,36 +81,6 @@ main() {
88
81
  # get current executable name
89
82
  EXECUTABLE_NAME=$(jq -r .executableName /home/dnanexus/dnanexus-job.json)
90
83
 
91
- # If resuming session, use resume id; otherwise create id for this session
92
- if [[ -n $resume ]]; then
93
- get_resume_session_id
94
- else
95
- NXF_UUID=$(uuidgen)
96
- fi
97
- export NXF_UUID
98
-
99
- # Using the lenient mode to caching makes it possible to reuse working files for resume on the platform
100
- export NXF_CACHE_MODE=LENIENT
101
-
102
- if [[ $preserve_cache == true ]]; then
103
- dx set_properties "$DX_JOB_ID" \
104
- nextflow_executable="$EXECUTABLE_NAME" \
105
- nextflow_session_id="$NXF_UUID" \
106
- nextflow_preserve_cache="$preserve_cache"
107
- fi
108
-
109
- # check if there are any ongoing jobs resuming
110
- # and generating new cache for the session to resume
111
- if [[ $preserve_cache == true && -n $resume ]]; then
112
- check_running_jobs
113
- fi
114
-
115
- # restore previous cache and create resume argument to nextflow run
116
- RESUME_CMD=""
117
- if [[ -n $resume ]]; then
118
- restore_cache
119
- fi
120
-
121
84
  # download default applet file type inputs
122
85
  dx-download-all-inputs --parallel @@EXCLUDE_INPUT_DOWNLOAD@@ 2>/dev/null 1>&2
123
86
  RUNTIME_CONFIG_CMD=''
@@ -140,7 +103,6 @@ main() {
140
103
  run @@RESOURCES_SUBPATH@@ \
141
104
  $profile_arg \
142
105
  -name ${DX_JOB_ID}${env_job_suffix} \
143
- $RESUME_CMD \
144
106
  $nextflow_run_opts \
145
107
  $RUNTIME_PARAMS_FILE \
146
108
  $nextflow_pipeline_params)"
@@ -156,9 +118,23 @@ main() {
156
118
  aws_login
157
119
  aws_relogin_loop & AWS_RELOGIN_PID=$!
158
120
 
121
+ set_vars_session_and_cache
122
+
123
+ if [[ $preserve_cache == true ]]; then
124
+ set_job_properties_cache
125
+ check_cache_db_storage_limit
126
+ if [[ -n $resume ]]; then
127
+ check_no_concurrent_job_same_cache
128
+ fi
129
+ fi
130
+
131
+ RESUME_CMD=""
132
+ if [[ -n $resume ]]; then
133
+ restore_cache_and_set_resume_cmd
134
+ fi
135
+
159
136
  # Set Nextflow workdir based on S3 workdir / preserve_cache options
160
137
  setup_workdir
161
- export NXF_WORK
162
138
 
163
139
  # set beginning timestamp
164
140
  BEGIN_TIME="$(date +"%Y-%m-%d %H:%M:%S")"
@@ -230,24 +206,9 @@ on_exit() {
230
206
  set -xe
231
207
  fi
232
208
 
233
- # backup cache
234
209
  if [[ $preserve_cache == true ]]; then
235
210
  echo "=== Execution completed — caching current session to $DX_CACHEDIR/$NXF_UUID"
236
-
237
- # wrap cache folder and upload cache.tar
238
- if [[ -n "$(ls -A .nextflow)" ]]; then
239
- tar -cf cache.tar .nextflow
240
-
241
- CACHE_ID=$(dx upload "cache.tar" --path "$DX_CACHEDIR/$NXF_UUID/cache.tar" --no-progress --brief --wait -p -r) &&
242
- echo "Upload cache of current session as file: $CACHE_ID" &&
243
- rm -f cache.tar ||
244
- echo "Failed to upload cache of current session $NXF_UUID"
245
- else
246
- echo "No cache is generated from this execution. Skip uploading cache."
247
- fi
248
-
249
- # preserve_cache is false
250
- # clean up files of this session
211
+ upload_session_cache_file
251
212
  else
252
213
  echo "=== Execution completed — cache and working files will not be resumable"
253
214
  fi
@@ -378,7 +339,7 @@ docker_registry_login() {
378
339
  aws_login() {
379
340
  if [ -f "$AWS_ENV" ]; then
380
341
  source $AWS_ENV
381
- detect_using_s3_workdir
342
+ detect_if_using_s3_workdir
382
343
 
383
344
  # aws env file example values:
384
345
  # "iamRoleArnToAssume", "jobTokenAudience", "jobTokenSubjectClaims", "awsRegion"
@@ -414,7 +375,7 @@ aws_relogin_loop() {
414
375
  # Helpers: workdir configuration
415
376
  # =========================================================
416
377
 
417
- detect_using_s3_workdir() {
378
+ detect_if_using_s3_workdir() {
418
379
  if [[ -f "$AWS_ENV" ]]; then
419
380
  source $AWS_ENV
420
381
  fi
@@ -441,6 +402,8 @@ setup_workdir() {
441
402
  # Work dir on platform and not using cache, use workspace
442
403
  NXF_WORK="dx://$DX_WORKSPACE_ID:/work/"
443
404
  fi
405
+
406
+ export NXF_WORK
444
407
  }
445
408
 
446
409
  # =========================================================
@@ -454,7 +417,7 @@ validate_run_opts() {
454
417
  for opt in "${opts[@]}"; do
455
418
  case $opt in
456
419
  -w=* | -work-dir=* | -w | -work-dir)
457
- dx-jobutil-report-error "Nextflow workDir is set as $DX_CACHEDIR/<session_id>/work/ if preserve_cache=true, or $DX_WORKSPACE_ID:/work/ if preserve_cache=false. Please remove workDir specification (-w|-work-dir path) in nextflow_run_opts and run again."
420
+ dx-jobutil-report-error "Please remove workDir specification (-w|-work-dir path) in nextflow_run_opts. For Nextflow runs on DNAnexus, the workdir will be located at 1) In the workspace container-xxx 2) In project-yyy:/.nextflow_cache_db if preserve_cache=true, or 3) on S3, if specified."
458
421
  ;;
459
422
  -profile | -profile=*)
460
423
  if [ -n "$profile_arg" ]; then
@@ -467,26 +430,7 @@ validate_run_opts() {
467
430
  done
468
431
  }
469
432
 
470
- dx_path() {
471
- local str=${1#"dx://"}
472
- local tmp=$(mktemp -t nf-XXXXXXXXXX)
473
- case $str in
474
- project-*)
475
- dx download $str -o $tmp --no-progress --recursive -f
476
- echo file://$tmp
477
- ;;
478
- container-*)
479
- dx download $str -o $tmp --no-progress --recursive -f
480
- echo file://$tmp
481
- ;;
482
- *)
483
- echo "Invalid $2 path: $1"
484
- return 1
485
- ;;
486
- esac
487
- }
488
-
489
- get_nextaur_version() {
433
+ detect_nextaur_plugin_version() {
490
434
  executable=$(cat dnanexus-executable.json | jq -r .id )
491
435
  bundled_dependency=$(dx describe ${executable} --json | jq -r '.runSpec.bundledDepends[] | select(.name=="nextaur.tar.gz") | .id."$dnanexus_link"')
492
436
  asset_dependency=$(dx describe ${bundled_dependency} --json | jq -r .properties.AssetBundle)
@@ -513,6 +457,7 @@ log_context_info() {
513
457
  echo "=== NF projectDir : @@RESOURCES_SUBPATH@@"
514
458
  echo "=== NF session ID : ${NXF_UUID}"
515
459
  echo "=== NF log file : dx://${DX_JOB_OUTDIR%/}/${LOG_NAME}"
460
+ echo "=== NF workdir : ${NXF_WORK}"
516
461
  if [[ $preserve_cache == true ]]; then
517
462
  echo "=== NF cache folder : dx://${DX_CACHEDIR}/${NXF_UUID}/"
518
463
  fi
@@ -569,6 +514,22 @@ download_cmd_launcher_file() {
569
514
  # Helpers: run with preserve cache, resume
570
515
  # =========================================================
571
516
 
517
+ set_vars_session_and_cache() {
518
+ # Path in project to store cached sessions
519
+ export DX_CACHEDIR="${DX_PROJECT_CONTEXT_ID}:/.nextflow_cache_db"
520
+
521
+ # Using the lenient mode to caching makes it possible to reuse working files for resume on the platform
522
+ export NXF_CACHE_MODE=LENIENT
523
+
524
+ # If resuming session, use resume id; otherwise create id for this session
525
+ if [[ -n $resume ]]; then
526
+ get_resume_session_id
527
+ else
528
+ NXF_UUID=$(uuidgen)
529
+ fi
530
+ export NXF_UUID
531
+ }
532
+
572
533
  get_resume_session_id() {
573
534
  if [[ $resume == 'true' || $resume == 'last' ]]; then
574
535
  # find the latest job run by applet with the same name
@@ -603,7 +564,48 @@ get_resume_session_id() {
603
564
  NXF_UUID=$PREV_JOB_SESSION_ID
604
565
  }
605
566
 
606
- restore_cache() {
567
+ set_job_properties_cache() {
568
+ # Set properties on job, which can be used to look up cached job later
569
+ dx set_properties "$DX_JOB_ID" \
570
+ nextflow_executable="$EXECUTABLE_NAME" \
571
+ nextflow_session_id="$NXF_UUID" \
572
+ nextflow_preserve_cache="$preserve_cache"
573
+ }
574
+
575
+ check_cache_db_storage_limit() {
576
+ # Enforce a limit on cached session workdirs stored in the DNAnexus project
577
+ # Removal must be manual, because applet can only upload, not delete project files
578
+ # Limit does not apply when the workdir is external (e.g. S3)
579
+
580
+ MAX_CACHE_STORAGE=20
581
+ existing_cache=$(dx ls $DX_CACHEDIR --folders 2>/dev/null | wc -l)
582
+ [[ $existing_cache -lt $MAX_CACHE_STORAGE || $USING_S3_WORKDIR == true ]] ||
583
+ dx-jobutil-report-error "The limit for preserved sessions in the project is $MAX_CACHE_STORAGE. Please remove folders from $DX_CACHEDIR to be under the limit, run without preserve_cache=true, or use S3 as workdir."
584
+ }
585
+
586
+ check_no_concurrent_job_same_cache() {
587
+ # Do not allow more than 1 concurrent run with the same session id,
588
+ # to prevent conflicting workdir contents
589
+
590
+ FIRST_RESUMED_JOB=$(
591
+ dx api system findExecutions \
592
+ '{"state":["idle", "waiting_on_input", "runnable", "running", "debug_hold", "waiting_on_output", "restartable", "terminating"],
593
+ "project":"'$DX_PROJECT_CONTEXT_ID'",
594
+ "includeSubjobs":false,
595
+ "properties":{
596
+ "nextflow_session_id":"'$NXF_UUID'",
597
+ "nextflow_preserve_cache":"true",
598
+ "nextflow_executable":"'$EXECUTABLE_NAME'"}}' 2>/dev/null |
599
+ jq -r '.results[-1].id // empty'
600
+ )
601
+
602
+ [[ -n $FIRST_RESUMED_JOB && $DX_JOB_ID == $FIRST_RESUMED_JOB ]] ||
603
+ dx-jobutil-report-error "There is at least one other non-terminal state job with the same sessionID $NXF_UUID.
604
+ Please wait until all other jobs sharing the same sessionID to enter their terminal state and rerun,
605
+ or run without preserve_cache set to true."
606
+ }
607
+
608
+ restore_cache_and_set_resume_cmd() {
607
609
  # download latest cache.tar from $DX_CACHEDIR/$PREV_JOB_SESSION_ID/
608
610
  PREV_JOB_CACHE_FILE=$(
609
611
  dx api system findDataObjects \
@@ -643,30 +645,16 @@ restore_cache() {
643
645
  dx tag "$DX_JOB_ID" "resumed"
644
646
  }
645
647
 
646
- # Have to ask user to empty the cache if limit exceeded because Nextflow only
647
- # has UPLOAD access to project
648
- check_cache_db_storage() {
649
- MAX_CACHE_STORAGE=20
650
- existing_cache=$(dx ls $DX_CACHEDIR --folders 2>/dev/null | wc -l)
651
- [[ $existing_cache -le MAX_CACHE_STORAGE ]] ||
652
- dx-jobutil-report-error "The number of preserved sessions is already at the limit ($MAX_CACHE_STORAGE) and preserve_cache is true. Please remove the folders in $DX_CACHEDIR to be under the limit, or run without preserve_cache set to true."
653
- }
654
-
655
- check_running_jobs() {
656
- FIRST_RESUMED_JOB=$(
657
- dx api system findExecutions \
658
- '{"state":["idle", "waiting_on_input", "runnable", "running", "debug_hold", "waiting_on_output", "restartable", "terminating"],
659
- "project":"'$DX_PROJECT_CONTEXT_ID'",
660
- "includeSubjobs":false,
661
- "properties":{
662
- "nextflow_session_id":"'$NXF_UUID'",
663
- "nextflow_preserve_cache":"true",
664
- "nextflow_executable":"'$EXECUTABLE_NAME'"}}' 2>/dev/null |
665
- jq -r '.results[-1].id // empty'
666
- )
648
+ upload_session_cache_file() {
649
+ # wrap cache folder and upload cache.tar
650
+ if [[ -n "$(ls -A .nextflow)" ]]; then
651
+ tar -cf cache.tar .nextflow
667
652
 
668
- [[ -n $FIRST_RESUMED_JOB && $DX_JOB_ID == $FIRST_RESUMED_JOB ]] ||
669
- dx-jobutil-report-error "There is at least one other non-terminal state job with the same sessionID $NXF_UUID.
670
- Please wait until all other jobs sharing the same sessionID to enter their terminal state and rerun,
671
- or run without preserve_cache set to true."
653
+ CACHE_ID=$(dx upload "cache.tar" --path "$DX_CACHEDIR/$NXF_UUID/cache.tar" --no-progress --brief --wait -p -r) &&
654
+ echo "Upload cache of current session as file: $CACHE_ID" &&
655
+ rm -f cache.tar ||
656
+ echo "Failed to upload cache of current session $NXF_UUID"
657
+ else
658
+ echo "No cache is generated from this execution. Skip uploading cache."
659
+ fi
672
660
  }
@@ -0,0 +1 @@
1
+ version = '0.380.0'
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dxpy
3
- Version: 0.378.0
3
+ Version: 0.380.0
4
4
  Summary: DNAnexus Platform API bindings for Python
5
5
  Home-page: https://github.com/dnanexus/dx-toolkit
6
6
  Author: Aleksandra Zalcman, Andrey Kislyuk, Anurag Biyani, Geet Duggal, Katherine Lai, Kurt Jensen, Marek Hrvol, Ohad Rodeh, Phil Sung
@@ -21,8 +21,9 @@ Provides-Extra: xattr
21
21
 
22
22
  dxpy: DNAnexus Python API
23
23
  =========================
24
+ [DNAnexus Documentation](https://documentation.dnanexus.com/)
24
25
 
25
- [API Documentation](http://autodoc.dnanexus.com/bindings/python/current/)
26
+ [dxpy API Documentation](http://autodoc.dnanexus.com/bindings/python/current/)
26
27
 
27
28
  Building
28
29
  --------
@@ -16,6 +16,7 @@ colorama<=0.4.6,>=0.4.4
16
16
 
17
17
  [pandas]
18
18
  pandas==1.3.5
19
+ numpy<2.0.0
19
20
 
20
21
  [xattr]
21
22
 
@@ -6,4 +6,5 @@ pytest-timeout==2.1.0
6
6
  parameterized==0.8.1
7
7
  pandas==1.3.5; python_version>='3.7'
8
8
  pandas>=0.23.3,<=0.25.3; python_version>='3.5.3' and python_version<'3.7'
9
+ numpy<2.0.0
9
10
  requests
@@ -100,7 +100,7 @@ setup(
100
100
  python_requires = '>=3.8',
101
101
  install_requires = dependencies,
102
102
  extras_require={
103
- 'pandas': ["pandas==1.3.5"],
103
+ 'pandas': ["pandas==1.3.5", "numpy<2.0.0"],
104
104
  'xattr': ["xattr==0.10.1; sys_platform == 'linux2' or sys_platform == 'linux'"]
105
105
  },
106
106
  tests_require = test_dependencies,
@@ -320,7 +320,7 @@ class TestDXClient(DXTestCase):
320
320
  fd.close()
321
321
  run("dx api {p} describe --input {fn}".format(p=self.project, fn=fd.name))
322
322
 
323
- @pytest.mark.TRACEABILITY_MATRIX
323
+ @pytest.mark.TRACEABILITY_ISOLATED_ENV
324
324
  @testutil.update_traceability_matrix(["DNA_CLI_PROJ_INVITE_USER"])
325
325
  @unittest.skipUnless(testutil.TEST_NO_RATE_LIMITS,
326
326
  'skipping tests that need rate limits to be disabled')
@@ -332,7 +332,7 @@ class TestDXClient(DXTestCase):
332
332
  with self.assertSubprocessFailure(stderr_regexp="invalid choice", exit_code=2):
333
333
  run(("dx invite alice.nonexistent : ПРОСМОТР").format(p=self.project))
334
334
 
335
- @pytest.mark.TRACEABILITY_MATRIX
335
+ @pytest.mark.TRACEABILITY_ISOLATED_ENV
336
336
  @testutil.update_traceability_matrix(["DNA_CLI_PROJ_REVOKE_USER_PERMISSIONS"])
337
337
  @unittest.skipUnless(testutil.TEST_NO_RATE_LIMITS,
338
338
  'skipping tests that need rate limits to be disabled')
@@ -656,7 +656,7 @@ class TestDXClient(DXTestCase):
656
656
  self.assertEqual(run("dx find projects --brief --name {name}".format(name=project_name)), "")
657
657
 
658
658
  @unittest.skipUnless(testutil.TEST_ISOLATED_ENV, 'skipping test that requires presence of test user')
659
- @pytest.mark.TRACEABILITY_MATRIX
659
+ @pytest.mark.TRACEABILITY_ISOLATED_ENV
660
660
  @testutil.update_traceability_matrix(["DNA_API_PROJ_VIEW_SHAREES","DNA_API_PROJ_ADD_USERS"])
661
661
  def test_dx_project_invite_without_email(self):
662
662
  user_id = 'user-bob'
@@ -2273,7 +2273,7 @@ class TestDXClientDescribe(DXTestCaseBuildWorkflows):
2273
2273
 
2274
2274
  @unittest.skipUnless(testutil.TEST_ISOLATED_ENV,
2275
2275
  'skipping test that would create apps')
2276
- @pytest.mark.TRACEABILITY_MATRIX
2276
+ @pytest.mark.TRACEABILITY_ISOLATED_ENV
2277
2277
  @testutil.update_traceability_matrix(["DNA_API_APP_DELETE"])
2278
2278
  def test_describe_deleted_app(self):
2279
2279
  applet_id = dxpy.api.applet_new({"project": self.project,
@@ -5017,7 +5017,7 @@ class TestDXClientFind(DXTestCase):
5017
5017
 
5018
5018
  @unittest.skipUnless(testutil.TEST_ISOLATED_ENV,
5019
5019
  'skipping test that creates apps')
5020
- @pytest.mark.TRACEABILITY_MATRIX
5020
+ @pytest.mark.TRACEABILITY_ISOLATED_ENV
5021
5021
  @testutil.update_traceability_matrix(["DNA_API_APP_PUBLISH"])
5022
5022
  def test_dx_find_apps(self):
5023
5023
  test_applet_id = dxpy.api.applet_new({"name": "my_find_applet",
@@ -5100,7 +5100,7 @@ class TestDXClientFind(DXTestCase):
5100
5100
 
5101
5101
  @unittest.skipUnless(testutil.TEST_ISOLATED_ENV,
5102
5102
  'skipping test that creates global workflows')
5103
- @pytest.mark.TRACEABILITY_MATRIX
5103
+ @pytest.mark.TRACEABILITY_ISOLATED_ENV
5104
5104
  @testutil.update_traceability_matrix(["DNA_CLI_WORKFLOW_LIST_AVAILABLE_WORKFLOWS_GLOBALWF"])
5105
5105
  def test_dx_find_globalworkflows(self):
5106
5106
  test_applet_id = dxpy.api.applet_new({"name": "my_find_applet",
@@ -5464,7 +5464,7 @@ class TestDXClientFind(DXTestCase):
5464
5464
 
5465
5465
  @unittest.skipUnless(testutil.TEST_ISOLATED_ENV,
5466
5466
  'skipping test that depends on a public project only defined in the nucleus integration tests')
5467
- @pytest.mark.TRACEABILITY_MATRIX
5467
+ @pytest.mark.TRACEABILITY_ISOLATED_ENV
5468
5468
  @testutil.update_traceability_matrix(["DNA_API_PROJ_VIEW_PUBLIC_PROJECTS"])
5469
5469
  def test_dx_find_public_projects(self):
5470
5470
  unique_project_name = 'dx find public projects test ' + str(time.time())
@@ -5917,7 +5917,7 @@ class TestDXClientFind(DXTestCase):
5917
5917
  self.assertEqual(get_ids(run("dx find jobs " + options3)), set([job_id, subjob_id]))
5918
5918
  self.assertEqual(get_ids(run("dx find analyses " + options3)), set([analysis_id]))
5919
5919
 
5920
- @pytest.mark.TRACEABILITY_MATRIX
5920
+ @pytest.mark.TRACEABILITY_ISOLATED_ENV
5921
5921
  @testutil.update_traceability_matrix(["DNA_CLI_ORG_LIST_ORGS"])
5922
5922
  @unittest.skipUnless(testutil.TEST_ISOLATED_ENV,
5923
5923
  'skipping test that requires presence of test org')
@@ -6007,7 +6007,7 @@ class TestDXClientFindInOrg(DXTestCaseBuildApps):
6007
6007
  with self.assertSubprocessFailure(stderr_regexp='error: argument --level: expected one argument', exit_code=2):
6008
6008
  run("dx find org members org-piratelabs --level")
6009
6009
 
6010
- @pytest.mark.TRACEABILITY_MATRIX
6010
+ @pytest.mark.TRACEABILITY_ISOLATED_ENV
6011
6011
  @testutil.update_traceability_matrix(["DNA_CLI_ORG_LIST_MEMBERS",
6012
6012
  "DNA_API_ORG_FIND_MEMBERS"])
6013
6013
  def test_dx_find_org_members(self):
@@ -6083,7 +6083,7 @@ class TestDXClientFindInOrg(DXTestCaseBuildApps):
6083
6083
  with self.assertSubprocessFailure(stderr_regexp='expected one argument', exit_code=2):
6084
6084
  run(cmd.format(opts="--phi"))
6085
6085
 
6086
- @pytest.mark.TRACEABILITY_MATRIX
6086
+ @pytest.mark.TRACEABILITY_ISOLATED_ENV
6087
6087
  @testutil.update_traceability_matrix(["DNA_CLI_ORG_LIST_PROJECTS",
6088
6088
  "DNA_API_ORG_FIND_PROJECTS"])
6089
6089
  def test_dx_find_org_projects(self):
@@ -6245,7 +6245,7 @@ class TestDXClientFindInOrg(DXTestCaseBuildApps):
6245
6245
  self.assertTrue(len(res) == 1, "Expected to find one project")
6246
6246
  self.assertEqual(res[0], project1_id)
6247
6247
 
6248
- @pytest.mark.TRACEABILITY_MATRIX
6248
+ @pytest.mark.TRACEABILITY_ISOLATED_ENV
6249
6249
  @testutil.update_traceability_matrix(["DNA_CLI_APP_LIST_APPS_ORG",
6250
6250
  "DNA_API_ORG_FIND_APPS"])
6251
6251
  def test_dx_find_org_apps(self):
@@ -6311,7 +6311,7 @@ class TestDXClientOrg(DXTestCase):
6311
6311
  "error: argument --member-list-visibility: invalid choice"):
6312
6312
  run('dx new org --member-list-visibility NONE')
6313
6313
 
6314
- @pytest.mark.TRACEABILITY_MATRIX
6314
+ @pytest.mark.TRACEABILITY_ISOLATED_ENV
6315
6315
  @testutil.update_traceability_matrix(["DNA_CLI_ORG_CREATE",
6316
6316
  "DNA_API_ORG_CREATE",
6317
6317
  "DNA_API_ORG_DESCRIBE"])
@@ -6481,7 +6481,7 @@ class TestDXClientOrg(DXTestCase):
6481
6481
  with self.assertSubprocessFailure(stderr_regexp="--project-transfer-ability.*invalid", exit_code=2):
6482
6482
  run("dx update org {o} --project-transfer-ability PUBLIC".format(o=self.org_id))
6483
6483
 
6484
- @pytest.mark.TRACEABILITY_MATRIX
6484
+ @pytest.mark.TRACEABILITY_ISOLATED_ENV
6485
6485
  @testutil.update_traceability_matrix(["DNA_CLI_ORG_UPDATE_INFORMATION"])
6486
6486
  def test_org_update(self):
6487
6487
  def get_name_and_policies(org_id=None):
@@ -6586,7 +6586,7 @@ class TestDXClientNewProject(DXTestCase):
6586
6586
  with self.assertRaisesRegex(subprocess.CalledProcessError, "InvalidInput"):
6587
6587
  run("dx new project --brief --region aws:not-a-region InvalidRegionProject")
6588
6588
 
6589
- @pytest.mark.TRACEABILITY_MATRIX
6589
+ @pytest.mark.TRACEABILITY_ISOLATED_ENV
6590
6590
  @testutil.update_traceability_matrix(["DNA_CLI_PROJ_CREATE_NEW_PROJECT",
6591
6591
  "DNA_API_USR_MGMT_SET_BILLING_ACCOUNT",
6592
6592
  "DNA_API_ORG_ALLOW_BILLABLE_ACTIVITIES"])
@@ -6664,7 +6664,7 @@ class TestDXClientNewUser(DXTestCase):
6664
6664
  def tearDown(self):
6665
6665
  super(TestDXClientNewUser, self).tearDown()
6666
6666
 
6667
- @pytest.mark.TRACEABILITY_MATRIX
6667
+ @pytest.mark.TRACEABILITY_ISOLATED_ENV
6668
6668
  @testutil.update_traceability_matrix(["DNA_CLI_USR_MGMT_NEW_USER"])
6669
6669
  def test_create_user_account_and_set_bill_to_negative(self):
6670
6670
  username, email = generate_unique_username_email()
@@ -6783,7 +6783,7 @@ class TestDXClientNewUser(DXTestCase):
6783
6783
  "last": last,
6784
6784
  "middle": middle})
6785
6785
 
6786
- @pytest.mark.TRACEABILITY_MATRIX
6786
+ @pytest.mark.TRACEABILITY_ISOLATED_ENV
6787
6787
  @testutil.update_traceability_matrix(["DNA_API_ORG_ADD_USER"])
6788
6788
  def test_create_user_account_and_invite_to_org(self):
6789
6789
  # TODO: Test --no-email flag.
@@ -6954,7 +6954,7 @@ class TestDXClientMembership(DXTestCase):
6954
6954
  self._remove_user(self.user_id)
6955
6955
  super(TestDXClientMembership, self).tearDown()
6956
6956
 
6957
- @pytest.mark.TRACEABILITY_MATRIX
6957
+ @pytest.mark.TRACEABILITY_ISOLATED_ENV
6958
6958
  @testutil.update_traceability_matrix(["DNA_CLI_ORG_ADD_MEMBER"])
6959
6959
  def test_add_membership_default(self):
6960
6960
  cmd = "dx add member {o} {u} --level {l}"
@@ -7023,7 +7023,7 @@ class TestDXClientMembership(DXTestCase):
7023
7023
  with self.assertRaisesRegex(subprocess.CalledProcessError, "DXCLIError"):
7024
7024
  run(" ".join([cmd, self.org_id, self.username, "--level ADMIN"]))
7025
7025
 
7026
- @pytest.mark.TRACEABILITY_MATRIX
7026
+ @pytest.mark.TRACEABILITY_ISOLATED_ENV
7027
7027
  @testutil.update_traceability_matrix(["DNA_CLI_ORG_REMOVE_MEMBER",
7028
7028
  "DNA_API_ORG_REMOVE_USER"])
7029
7029
  def test_remove_membership_default(self):
@@ -7146,7 +7146,7 @@ class TestDXClientMembership(DXTestCase):
7146
7146
  with self.assertRaises(subprocess.CalledProcessError):
7147
7147
  run(" ".join([cmd, invalid_opts]))
7148
7148
 
7149
- @pytest.mark.TRACEABILITY_MATRIX
7149
+ @pytest.mark.TRACEABILITY_ISOLATED_ENV
7150
7150
  @testutil.update_traceability_matrix(["DNA_CLI_ORG_UPDATE_USER_MEMBERSHIP",
7151
7151
  "DNA_API_ORG_CHANGE_USER_PERMISSIONS"])
7152
7152
  def test_update_membership_positive(self):
@@ -7663,7 +7663,7 @@ class TestDXBuildWorkflow(DXTestCaseBuildWorkflows):
7663
7663
 
7664
7664
  @unittest.skipUnless(testutil.TEST_ISOLATED_ENV,
7665
7665
  'skipping test that would create global workflows')
7666
- @pytest.mark.TRACEABILITY_MATRIX
7666
+ @pytest.mark.TRACEABILITY_ISOLATED_ENV
7667
7667
  @testutil.update_traceability_matrix(["DNA_CLI_WORKFLOW_REMOVE_AUTHORIZED_USERS_GLOBALWF",
7668
7668
  "DNA_CLI_WORKFLOW_LIST_AUTHORIZED_USERS_GLOBALWF",
7669
7669
  "DNA_CLI_WORKFLOW_ADD_AUTHORIZED_USERS_GLOBALWF"])
@@ -7716,7 +7716,7 @@ class TestDXBuildWorkflow(DXTestCaseBuildWorkflows):
7716
7716
  run('dx remove users wf_test_dx_users nonexistentuser')
7717
7717
  run('dx remove users wf_test_dx_users piratelabs')
7718
7718
 
7719
- @pytest.mark.TRACEABILITY_MATRIX
7719
+ @pytest.mark.TRACEABILITY_ISOLATED_ENV
7720
7720
  @testutil.update_traceability_matrix(["DNA_CLI_WORKFLOW_ADD_DEVELOPERS_GLOBALWF",
7721
7721
  "DNA_CLI_WORKFLOW_LIST_DEVELOPERS_GLOBALWF",
7722
7722
  "DNA_CLI_WORKFLOW_REMOVE_DEVELOPERS_GLOBALWF"])
@@ -7779,7 +7779,7 @@ class TestDXBuildWorkflow(DXTestCaseBuildWorkflows):
7779
7779
  run('dx remove developers wf_test_dx_developers piratelabs')
7780
7780
 
7781
7781
 
7782
- @pytest.mark.TRACEABILITY_MATRIX
7782
+ @pytest.mark.TRACEABILITY_ISOLATED_ENV
7783
7783
  @testutil.update_traceability_matrix(["DNA_CLI_WORKFLOW_PUBLISH_GLOBALWF"])
7784
7784
  @unittest.skipUnless(testutil.TEST_ISOLATED_ENV,
7785
7785
  'skipping test that would create global workflows')
@@ -8296,7 +8296,7 @@ class TestDXBuildApp(DXTestCaseBuildApps):
8296
8296
 
8297
8297
  @unittest.skipUnless(testutil.TEST_ISOLATED_ENV,
8298
8298
  'skipping test that would create apps')
8299
- @pytest.mark.TRACEABILITY_MATRIX
8299
+ @pytest.mark.TRACEABILITY_ISOLATED_ENV
8300
8300
  @testutil.update_traceability_matrix(["DNA_API_APP_DESCRIBE"])
8301
8301
  def test_build_single_region_app_without_regional_options(self):
8302
8302
  # Backwards-compatible.
@@ -8817,7 +8817,7 @@ class TestDXBuildApp(DXTestCaseBuildApps):
8817
8817
 
8818
8818
  @unittest.skipUnless(testutil.TEST_ISOLATED_ENV,
8819
8819
  'skipping test that would create apps')
8820
- @pytest.mark.TRACEABILITY_MATRIX
8820
+ @pytest.mark.TRACEABILITY_ISOLATED_ENV
8821
8821
  @testutil.update_traceability_matrix(["DNA_API_APP_CREATE"])
8822
8822
  def test_build_app_with_bill_to(self):
8823
8823
  alice_id = "user-alice"
@@ -8969,7 +8969,7 @@ class TestDXBuildApp(DXTestCaseBuildApps):
8969
8969
  self.assertEqual(json.loads(run("dx api " + app_id + " listCategories"))["categories"], ['B'])
8970
8970
 
8971
8971
  @unittest.skipUnless(testutil.TEST_ISOLATED_ENV, 'skipping test that would create apps')
8972
- @pytest.mark.TRACEABILITY_MATRIX
8972
+ @pytest.mark.TRACEABILITY_ISOLATED_ENV
8973
8973
  @testutil.update_traceability_matrix(["DNA_API_APP_LIST_AUTHORIZED_USERS","DNA_API_APP_ADD_AUTHORIZED_USER"])
8974
8974
  def test_update_app_authorized_users(self):
8975
8975
  app0_spec = dict(self.base_app_spec, name="update_app_authorized_users")
@@ -8991,7 +8991,7 @@ class TestDXBuildApp(DXTestCaseBuildApps):
8991
8991
  self.assertEqual(json.loads(run("dx api " + app_id +
8992
8992
  " listAuthorizedUsers"))["authorizedUsers"], ["user-eve"])
8993
8993
 
8994
- @pytest.mark.TRACEABILITY_MATRIX
8994
+ @pytest.mark.TRACEABILITY_ISOLATED_ENV
8995
8995
  @testutil.update_traceability_matrix(["DNA_CLI_APP_ADD_AUTHORIZED_USERS_APP",
8996
8996
  "DNA_CLI_APP_LIST_AUTHORIZED_USERS_APP",
8997
8997
  "DNA_CLI_APP_REMOVE_AUTHORIZED_USERS_APP"])
@@ -9044,7 +9044,7 @@ class TestDXBuildApp(DXTestCaseBuildApps):
9044
9044
  run('dx remove users test_dx_users nonexistentuser')
9045
9045
  run('dx remove users test_dx_users piratelabs')
9046
9046
 
9047
- @pytest.mark.TRACEABILITY_MATRIX
9047
+ @pytest.mark.TRACEABILITY_ISOLATED_ENV
9048
9048
  @testutil.update_traceability_matrix(["DNA_CLI_APP_ADD_DEVELOPERS_APP",
9049
9049
  "DNA_CLI_APP_LIST_DEVELOPERS_APP",
9050
9050
  "DNA_CLI_APP_REMOVE_DEVELOPERS_APP",
@@ -9980,7 +9980,7 @@ def main(in1):
9980
9980
 
9981
9981
  @unittest.skipUnless(testutil.TEST_ISOLATED_ENV,
9982
9982
  'skipping test that would create app')
9983
- @pytest.mark.TRACEABILITY_MATRIX
9983
+ @pytest.mark.TRACEABILITY_ISOLATED_ENV
9984
9984
  @testutil.update_traceability_matrix(["DNA_CLI_APP_PUBLISH"])
9985
9985
  def test_dx_publish_app(self):
9986
9986
  app_name = "dx_publish_app"
@@ -10102,7 +10102,7 @@ class TestDXGetWorkflows(DXTestCaseBuildWorkflows):
10102
10102
  with self.assertSubprocessFailure(stderr_regexp='already exists', exit_code=3):
10103
10103
  run("dx get -o destdir_withfile get_workflow")
10104
10104
 
10105
- @pytest.mark.TRACEABILITY_MATRIX
10105
+ @pytest.mark.TRACEABILITY_ISOLATED_ENV
10106
10106
  @testutil.update_traceability_matrix(["DNA_CLI_DATA_OBJ_DOWNLOAD_EXECUTABLE"])
10107
10107
  @unittest.skipUnless(testutil.TEST_ISOLATED_ENV, 'skipping test that would create global workflows')
10108
10108
  def test_get_global_workflow(self):
@@ -10699,7 +10699,7 @@ class TestDXGetAppsAndApplets(DXTestCaseBuildApps):
10699
10699
  break
10700
10700
  self.assertTrue(seenResources)
10701
10701
 
10702
- @pytest.mark.TRACEABILITY_MATRIX
10702
+ @pytest.mark.TRACEABILITY_ISOLATED_ENV
10703
10703
  @testutil.update_traceability_matrix(["DNA_CLI_APP_LIST_AVAILABLE_APPS",
10704
10704
  "DNA_CLI_APP_INSTALL_APP",
10705
10705
  "DNA_CLI_APP_UNINSTALL_APP",
@@ -11213,7 +11213,7 @@ class TestDXRun(DXTestCase):
11213
11213
  class TestDXUpdateApp(DXTestCaseBuildApps):
11214
11214
  @unittest.skipUnless(testutil.TEST_ISOLATED_ENV,
11215
11215
  'skipping test that creates apps')
11216
- @pytest.mark.TRACEABILITY_MATRIX
11216
+ @pytest.mark.TRACEABILITY_ISOLATED_ENV
11217
11217
  @testutil.update_traceability_matrix(["DNA_API_APP_UPDATE"])
11218
11218
  def test_update_app(self):
11219
11219
  # Build and publish app with initial version
@@ -1 +0,0 @@
1
- version = '0.378.0'
File without changes