dxpy 0.401.0__tar.gz → 0.402.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (193) hide show
  1. {dxpy-0.401.0 → dxpy-0.402.0}/PKG-INFO +2 -1
  2. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/bindings/dxfile_functions.py +41 -60
  3. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/bindings/dxproject.py +5 -1
  4. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/cli/download.py +1 -1
  5. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/cli/parsers.py +18 -18
  6. dxpy-0.402.0/dxpy/nextflow/nextaur_assets.json +10 -0
  7. dxpy-0.402.0/dxpy/nextflow/nextaur_assets.staging.json +10 -0
  8. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/scripts/dx.py +2 -2
  9. dxpy-0.402.0/dxpy/toolkit_version.py +1 -0
  10. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/utils/completer.py +37 -2
  11. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy.egg-info/PKG-INFO +2 -1
  12. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy.egg-info/SOURCES.txt +1 -0
  13. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy.egg-info/requires.txt +1 -0
  14. {dxpy-0.401.0 → dxpy-0.402.0}/requirements.txt +1 -0
  15. dxpy-0.402.0/test/test_dxfile_functions.py +71 -0
  16. dxpy-0.401.0/dxpy/nextflow/nextaur_assets.json +0 -10
  17. dxpy-0.401.0/dxpy/nextflow/nextaur_assets.staging.json +0 -10
  18. dxpy-0.401.0/dxpy/toolkit_version.py +0 -1
  19. {dxpy-0.401.0 → dxpy-0.402.0}/MANIFEST.in +0 -0
  20. {dxpy-0.401.0 → dxpy-0.402.0}/Readme.md +0 -0
  21. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/__init__.py +0 -0
  22. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/api.py +0 -0
  23. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/app_builder.py +0 -0
  24. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/app_categories.py +0 -0
  25. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/asset_builder.py +0 -0
  26. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/bindings/__init__.py +0 -0
  27. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/bindings/apollo/__init__.py +0 -0
  28. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/bindings/apollo/cmd_line_options_validator.py +0 -0
  29. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/bindings/apollo/data_transformations.py +0 -0
  30. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/bindings/apollo/dataset.py +0 -0
  31. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/bindings/apollo/json_validation_by_schema.py +0 -0
  32. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/bindings/apollo/schemas/__init__.py +0 -0
  33. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/bindings/apollo/schemas/assay_filtering_conditions.py +0 -0
  34. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/bindings/apollo/schemas/assay_filtering_json_schemas.py +0 -0
  35. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/bindings/apollo/schemas/input_arguments_validation_schemas.py +0 -0
  36. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/bindings/apollo/vizclient.py +0 -0
  37. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/bindings/apollo/vizserver_filters_from_json_parser.py +0 -0
  38. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/bindings/apollo/vizserver_payload_builder.py +0 -0
  39. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/bindings/auth.py +0 -0
  40. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/bindings/download_all_inputs.py +0 -0
  41. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/bindings/dxanalysis.py +0 -0
  42. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/bindings/dxapp.py +0 -0
  43. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/bindings/dxapp_container_functions.py +0 -0
  44. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/bindings/dxapplet.py +0 -0
  45. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/bindings/dxdatabase.py +0 -0
  46. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/bindings/dxdatabase_functions.py +0 -0
  47. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/bindings/dxdataobject_functions.py +0 -0
  48. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/bindings/dxfile.py +0 -0
  49. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/bindings/dxglobalworkflow.py +0 -0
  50. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/bindings/dxjob.py +0 -0
  51. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/bindings/dxrecord.py +0 -0
  52. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/bindings/dxworkflow.py +0 -0
  53. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/bindings/mount_all_inputs.py +0 -0
  54. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/bindings/search.py +0 -0
  55. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/cli/__init__.py +0 -0
  56. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/cli/cp.py +0 -0
  57. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/cli/dataset_utilities.py +0 -0
  58. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/cli/exec_io.py +0 -0
  59. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/cli/help_messages.py +0 -0
  60. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/cli/org.py +0 -0
  61. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/cli/output_handling.py +0 -0
  62. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/cli/workflow.py +0 -0
  63. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/compat.py +0 -0
  64. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/dx_extract_utils/Homo_sapiens_genes_manifest.json +0 -0
  65. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/dx_extract_utils/Homo_sapiens_genes_manifest_staging.json +0 -0
  66. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/dx_extract_utils/Homo_sapiens_genes_manifest_staging_vep.json +0 -0
  67. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/dx_extract_utils/Homo_sapiens_genes_manifest_vep.json +0 -0
  68. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/dx_extract_utils/__init__.py +0 -0
  69. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/dx_extract_utils/cohort_filter_payload.py +0 -0
  70. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/dx_extract_utils/column_conditions.json +0 -0
  71. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/dx_extract_utils/column_conversion.json +0 -0
  72. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/dx_extract_utils/filter_to_payload.py +0 -0
  73. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/dx_extract_utils/germline_utils.py +0 -0
  74. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/dx_extract_utils/input_validation.py +0 -0
  75. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/dx_extract_utils/input_validation_somatic.py +0 -0
  76. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/dx_extract_utils/retrieve_allele_schema.json +0 -0
  77. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/dx_extract_utils/retrieve_annotation_schema.json +0 -0
  78. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/dx_extract_utils/retrieve_bins.py +0 -0
  79. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/dx_extract_utils/retrieve_genotype_schema.json +0 -0
  80. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/dx_extract_utils/return_columns_allele.json +0 -0
  81. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/dx_extract_utils/return_columns_annotation.json +0 -0
  82. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/dx_extract_utils/return_columns_genotype.json +0 -0
  83. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/dx_extract_utils/return_columns_genotype_only.json +0 -0
  84. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/dx_extract_utils/somatic_filter_payload.py +0 -0
  85. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/dxlog.py +0 -0
  86. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/exceptions.py +0 -0
  87. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/executable_builder.py +0 -0
  88. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/nextflow/ImageRef.py +0 -0
  89. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/nextflow/ImageRefFactory.py +0 -0
  90. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/nextflow/__init__.py +0 -0
  91. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/nextflow/app_asset_projects_ids_prod.json +0 -0
  92. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/nextflow/app_asset_projects_ids_staging.json +0 -0
  93. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/nextflow/awscli_assets.json +0 -0
  94. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/nextflow/awscli_assets.staging.json +0 -0
  95. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/nextflow/collect_images.py +0 -0
  96. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/nextflow/default_nextflow_instance_types.json +0 -0
  97. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/nextflow/nextflow_assets.json +0 -0
  98. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/nextflow/nextflow_assets.staging.json +0 -0
  99. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/nextflow/nextflow_builder.py +0 -0
  100. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/nextflow/nextflow_templates.py +0 -0
  101. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/nextflow/nextflow_utils.py +0 -0
  102. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/packages/__init__.py +0 -0
  103. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/scripts/__init__.py +0 -0
  104. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/scripts/dx_app_wizard.py +0 -0
  105. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/scripts/dx_build_app.py +0 -0
  106. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/scripts/dx_build_applet.py +0 -0
  107. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/ssh_tunnel_app_support.py +0 -0
  108. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/system_requirements.py +0 -0
  109. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/templating/__init__.py +0 -0
  110. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/templating/bash.py +0 -0
  111. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/templating/python.py +0 -0
  112. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/templating/templates/Readme.md +0 -0
  113. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/templating/templates/bash/basic/dxapp.json +0 -0
  114. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/templating/templates/bash/basic/src/code.sh +0 -0
  115. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/templating/templates/bash/parallelized/dxapp.json +0 -0
  116. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/templating/templates/bash/parallelized/src/code.sh +0 -0
  117. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/templating/templates/bash/scatter-process-gather/dxapp.json +0 -0
  118. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/templating/templates/bash/scatter-process-gather/src/code.sh +0 -0
  119. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/templating/templates/nextflow/dxapp.json +0 -0
  120. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/templating/templates/nextflow/src/nextflow.sh +0 -0
  121. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/templating/templates/python/basic/dxapp.json +0 -0
  122. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/templating/templates/python/basic/src/code.py +0 -0
  123. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/templating/templates/python/basic/test/test.py +0 -0
  124. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/templating/templates/python/parallelized/dxapp.json +0 -0
  125. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/templating/templates/python/parallelized/src/code.py +0 -0
  126. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/templating/templates/python/parallelized/test/test.py +0 -0
  127. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/templating/templates/python/scatter-process-gather/dxapp.json +0 -0
  128. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/templating/templates/python/scatter-process-gather/src/code.py +0 -0
  129. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/templating/templates/python/scatter-process-gather/test/test.py +0 -0
  130. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/templating/utils.py +0 -0
  131. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/utils/__init__.py +0 -0
  132. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/utils/batch_utils.py +0 -0
  133. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/utils/config.py +0 -0
  134. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/utils/describe.py +0 -0
  135. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/utils/exec_utils.py +0 -0
  136. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/utils/executable_unbuilder.py +0 -0
  137. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/utils/file_handle.py +0 -0
  138. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/utils/file_load_utils.py +0 -0
  139. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/utils/genomic_utils.py +0 -0
  140. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/utils/job_log_client.py +0 -0
  141. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/utils/local_exec_utils.py +0 -0
  142. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/utils/pathmatch.py +0 -0
  143. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/utils/pretty_print.py +0 -0
  144. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/utils/printing.py +0 -0
  145. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/utils/resolver.py +0 -0
  146. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/utils/spelling_corrector.py +0 -0
  147. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/utils/version.py +0 -0
  148. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy/workflow_builder.py +0 -0
  149. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy.egg-info/dependency_links.txt +0 -0
  150. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy.egg-info/entry_points.txt +0 -0
  151. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy.egg-info/not-zip-safe +0 -0
  152. {dxpy-0.401.0 → dxpy-0.402.0}/dxpy.egg-info/top_level.txt +0 -0
  153. {dxpy-0.401.0 → dxpy-0.402.0}/requirements_setuptools.txt +0 -0
  154. {dxpy-0.401.0 → dxpy-0.402.0}/requirements_test.txt +0 -0
  155. {dxpy-0.401.0 → dxpy-0.402.0}/scripts/dx-clone-asset +0 -0
  156. {dxpy-0.401.0 → dxpy-0.402.0}/scripts/dx-docker +0 -0
  157. {dxpy-0.401.0 → dxpy-0.402.0}/scripts/dx-download-all-inputs +0 -0
  158. {dxpy-0.401.0 → dxpy-0.402.0}/scripts/dx-fetch-bundled-depends +0 -0
  159. {dxpy-0.401.0 → dxpy-0.402.0}/scripts/dx-generate-dxapp +0 -0
  160. {dxpy-0.401.0 → dxpy-0.402.0}/scripts/dx-jobutil-add-output +0 -0
  161. {dxpy-0.401.0 → dxpy-0.402.0}/scripts/dx-jobutil-dxlink +0 -0
  162. {dxpy-0.401.0 → dxpy-0.402.0}/scripts/dx-jobutil-get-identity-token +0 -0
  163. {dxpy-0.401.0 → dxpy-0.402.0}/scripts/dx-jobutil-new-job +0 -0
  164. {dxpy-0.401.0 → dxpy-0.402.0}/scripts/dx-jobutil-parse-link +0 -0
  165. {dxpy-0.401.0 → dxpy-0.402.0}/scripts/dx-jobutil-report-error +0 -0
  166. {dxpy-0.401.0 → dxpy-0.402.0}/scripts/dx-log-stream +0 -0
  167. {dxpy-0.401.0 → dxpy-0.402.0}/scripts/dx-mount-all-inputs +0 -0
  168. {dxpy-0.401.0 → dxpy-0.402.0}/scripts/dx-notebook-reconnect +0 -0
  169. {dxpy-0.401.0 → dxpy-0.402.0}/scripts/dx-print-bash-vars +0 -0
  170. {dxpy-0.401.0 → dxpy-0.402.0}/scripts/dx-upload-all-outputs +0 -0
  171. {dxpy-0.401.0 → dxpy-0.402.0}/setup.cfg +0 -0
  172. {dxpy-0.401.0 → dxpy-0.402.0}/setup.py +0 -0
  173. {dxpy-0.401.0 → dxpy-0.402.0}/test/test_batch.py +0 -0
  174. {dxpy-0.401.0 → dxpy-0.402.0}/test/test_create_cohort.py +0 -0
  175. {dxpy-0.401.0 → dxpy-0.402.0}/test/test_describe.py +0 -0
  176. {dxpy-0.401.0 → dxpy-0.402.0}/test/test_dx-docker.py +0 -0
  177. {dxpy-0.401.0 → dxpy-0.402.0}/test/test_dx_app_wizard.py +0 -0
  178. {dxpy-0.401.0 → dxpy-0.402.0}/test/test_dx_bash_helpers.py +0 -0
  179. {dxpy-0.401.0 → dxpy-0.402.0}/test/test_dx_completion.py +0 -0
  180. {dxpy-0.401.0 → dxpy-0.402.0}/test/test_dx_symlink.py +0 -0
  181. {dxpy-0.401.0 → dxpy-0.402.0}/test/test_dxabs.py +0 -0
  182. {dxpy-0.401.0 → dxpy-0.402.0}/test/test_dxasset.py +0 -0
  183. {dxpy-0.401.0 → dxpy-0.402.0}/test/test_dxclient.py +0 -0
  184. {dxpy-0.401.0 → dxpy-0.402.0}/test/test_dxpy.py +0 -0
  185. {dxpy-0.401.0 → dxpy-0.402.0}/test/test_dxpy_utils.py +0 -0
  186. {dxpy-0.401.0 → dxpy-0.402.0}/test/test_dxunpack.py +0 -0
  187. {dxpy-0.401.0 → dxpy-0.402.0}/test/test_extract_assay.py +0 -0
  188. {dxpy-0.401.0 → dxpy-0.402.0}/test/test_extract_dataset.py +0 -0
  189. {dxpy-0.401.0 → dxpy-0.402.0}/test/test_extract_expression.py +0 -0
  190. {dxpy-0.401.0 → dxpy-0.402.0}/test/test_extract_somatic.py +0 -0
  191. {dxpy-0.401.0 → dxpy-0.402.0}/test/test_nextflow.py +0 -0
  192. {dxpy-0.401.0 → dxpy-0.402.0}/test/test_nextflow_ImageRef.py +0 -0
  193. {dxpy-0.401.0 → dxpy-0.402.0}/test/test_nextflow_ImageRefFactory.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dxpy
3
- Version: 0.401.0
3
+ Version: 0.402.0
4
4
  Summary: DNAnexus Platform API bindings for Python
5
5
  Home-page: https://github.com/dnanexus/dx-toolkit
6
6
  Author: Aleksandra Zalcman, Andrey Kislyuk, Anurag Biyani, Geet Duggal, Katherine Lai, Kurt Jensen, Marek Hrvol, Ohad Rodeh, Phil Sung
@@ -26,6 +26,7 @@ Requires-Dist: urllib3<2.2,>=1.25
26
26
  Requires-Dist: pyreadline3==3.4.1; sys_platform == "win32"
27
27
  Requires-Dist: colorama<=0.4.6,>=0.4.4; sys_platform == "win32"
28
28
  Requires-Dist: crc32c>=2.7.1
29
+ Requires-Dist: awscrt>=0.23.4
29
30
  Provides-Extra: pandas
30
31
  Requires-Dist: pandas==1.3.5; extra == "pandas"
31
32
  Requires-Dist: numpy<2.0.0; extra == "pandas"
@@ -31,6 +31,7 @@ import warnings
31
31
  from collections import defaultdict
32
32
  import multiprocessing
33
33
  import crc32c
34
+ from awscrt import checksums
34
35
  import zlib
35
36
  import base64
36
37
 
@@ -227,70 +228,35 @@ def _download_symbolic_link(dxid, md5digest, project, dest_filename, symlink_max
227
228
  if md5digest is not None:
228
229
  _verify(dest_filename, md5digest)
229
230
 
230
-
231
- def _verify_per_part_checksum_on_downloaded_file(filename, dxfile_desc, show_progress=False):
232
- parts = dxfile_desc["parts"]
233
- parts_to_get = sorted(parts, key=int)
234
- file_size = dxfile_desc.get("size")
235
- per_part_checksum = dxfile_desc.get('perPartCheckSum')
236
- _bytes = 0
237
-
238
- if per_part_checksum is None:
231
+ def _verify_checksum(parts, part_id, chunk_data, checksum_type, dxfile_id):
232
+ if checksum_type is None:
239
233
  return
240
-
241
- offset = 0
242
- for part_id in parts_to_get:
243
- parts[part_id]["start"] = offset
244
- offset += parts[part_id]["size"]
245
-
246
- def read_chunk(filename, start, size, part_id):
247
- with open(filename, 'rb') as f:
248
- f.seek(start)
249
- chunk = f.read(size)
250
- return (chunk, part_id)
251
-
252
- def process_file_in_parallel(filename):
253
- chunks = []
254
- for part_id in parts_to_get:
255
- part_info = parts[part_id]
256
- start = part_info["start"]
257
- size = part_info["size"]
258
- chunks.append((start, size, part_id))
259
-
260
- with concurrent.futures.ThreadPoolExecutor() as executor:
261
- futures = [executor.submit(read_chunk, filename, start, size, part_id) for start, size, part_id in chunks]
262
- for future in concurrent.futures.as_completed(futures):
263
- yield future.result()
264
-
265
- for (chunk, part_id) in process_file_in_parallel(filename):
266
- _verify_per_part_checksum(parts, part_id, chunk, per_part_checksum, dxfile_desc['id'])
267
- if show_progress:
268
- _bytes += parts[part_id]["size"]
269
- _print_progress(_bytes, file_size, filename, action="Verified")
270
234
 
235
+ part = parts.get(part_id)
236
+ if part is None:
237
+ raise DXFileError("Part {} not found in {}".format(part_id, dxfile_id))
271
238
 
272
- def _verify_per_part_checksum(parts, part_id, chunk_data, per_part_checksum, dxfile_id):
273
- if per_part_checksum is None:
274
- return
275
-
276
- part = parts[part_id]
277
239
  expected_checksum = part.get('checksum')
278
240
  verifiers = {
279
241
  'CRC32': lambda data: zlib.crc32(data).to_bytes(4, 'big'),
280
242
  'CRC32C': lambda data: crc32c.crc32c(data).to_bytes(4, 'big'),
281
243
  'SHA1': lambda data: hashlib.sha1(data).digest(),
282
- 'SHA256': lambda data: hashlib.sha256(data).digest()
244
+ 'SHA256': lambda data: hashlib.sha256(data).digest(),
245
+ 'CRC64NVME': lambda data: checksums.crc64nvme(data).to_bytes(8, 'big'),
283
246
  }
284
247
 
285
- if per_part_checksum not in verifiers:
286
- raise DXFileError("Unsupported per-part checksum type: {}".format(per_part_checksum))
248
+ if checksum_type not in verifiers:
249
+ raise DXFileError("Unsupported checksum type: {}".format(checksum_type))
287
250
  if expected_checksum is None:
288
- raise DXFileError("{} checksum not found in part {}".format(per_part_checksum, part_id))
251
+ raise DXFileError("{} checksum not found in part {}".format(checksum_type, part_id))
289
252
 
290
253
  expected_checksum = base64.b64decode(expected_checksum)
291
- got_checksum = verifiers[per_part_checksum](chunk_data)
254
+ got_checksum = verifiers[checksum_type](chunk_data)
255
+
292
256
  if got_checksum != expected_checksum:
293
- raise DXChecksumMismatchError("Checksum mismatch in {} part {} (expected {}, got {})".format(dxfile_id, part_id, expected_checksum, got_checksum))
257
+ raise DXChecksumMismatchError("{} checksum mismatch in {} in part {} (expected {}, got {})".format(checksum_type, dxfile_id, part_id, expected_checksum, got_checksum))
258
+
259
+ return True
294
260
 
295
261
 
296
262
  def _print_progress(bytes_downloaded, file_size, filename, action="Downloaded"):
@@ -352,12 +318,12 @@ def _download_dxfile(dxid, filename, part_retry_counter,
352
318
  else:
353
319
  md5 = None
354
320
  _download_symbolic_link(dxid, md5, project, filename, symlink_max_tries=symlink_max_tries)
355
- _verify_per_part_checksum_on_downloaded_file(filename, dxfile_desc, show_progress)
356
321
  return True
357
322
 
358
323
  parts = dxfile_desc["parts"]
359
324
  parts_to_get = sorted(parts, key=int)
360
325
  file_size = dxfile_desc.get("size")
326
+ checksum_type = dxfile_desc.get("checksumType")
361
327
 
362
328
  offset = 0
363
329
  for part_id in parts_to_get:
@@ -375,8 +341,11 @@ def _download_dxfile(dxid, filename, part_retry_counter,
375
341
  if show_progress:
376
342
  _print_progress(0, None, filename)
377
343
 
378
- def get_chunk(part_id_to_get, start, end):
344
+ def get_chunk(part_id_to_get, start, end, e_tag = None):
379
345
  url, headers = dxfile.get_download_url(project=project, **kwargs)
346
+ if e_tag is not None:
347
+ headers["If-Match"] = e_tag
348
+
380
349
  # If we're fetching the whole object in one shot, avoid setting the Range header to take advantage of gzip
381
350
  # transfer compression
382
351
  sub_range = False
@@ -385,14 +354,18 @@ def _download_dxfile(dxid, filename, part_retry_counter,
385
354
  data = dxpy._dxhttp_read_range(url, headers, start, end, FILE_REQUEST_TIMEOUT, sub_range)
386
355
  return part_id_to_get, data
387
356
 
388
- def chunk_requests():
357
+ def chunk_requests(e_tag = None):
389
358
  for part_id_to_chunk in parts_to_get:
390
359
  part_info = parts[part_id_to_chunk]
391
360
  for chunk_start in range(part_info["start"], part_info["start"] + part_info["size"], chunksize):
392
361
  chunk_end = min(chunk_start + chunksize, part_info["start"] + part_info["size"]) - 1
393
- yield get_chunk, [part_id_to_chunk, chunk_start, chunk_end], {}
362
+ yield get_chunk, [part_id_to_chunk, chunk_start, chunk_end, e_tag], {}
363
+
364
+ def verify_part(_part_id, got_bytes, hasher, e_tag = None):
365
+ # If there's no md5 but we have an eTag, skip the client-side md5 check
366
+ if got_bytes is not None and "md5" not in parts[_part_id] and e_tag is not None:
367
+ return
394
368
 
395
- def verify_part(_part_id, got_bytes, hasher):
396
369
  if got_bytes is not None and got_bytes != parts[_part_id]["size"]:
397
370
  msg = "Unexpected part data size in {} part {} (expected {}, got {})"
398
371
  msg = msg.format(dxfile.get_id(), _part_id, parts[_part_id]["size"], got_bytes)
@@ -400,10 +373,10 @@ def _download_dxfile(dxid, filename, part_retry_counter,
400
373
  if hasher is not None and "md5" not in parts[_part_id]:
401
374
  warnings.warn("Download of file {} is not being checked for integrity".format(dxfile.get_id()))
402
375
  elif hasher is not None and hasher.hexdigest() != parts[_part_id]["md5"]:
403
- msg = "Checksum mismatch in {} part {} (expected {}, got {})"
376
+ msg = "md5sum mismatch in {} part {} (expected {}, got {})"
404
377
  msg = msg.format(dxfile.get_id(), _part_id, parts[_part_id]["md5"], hasher.hexdigest())
405
378
  raise DXChecksumMismatchError(msg)
406
-
379
+
407
380
 
408
381
  with fh:
409
382
  last_verified_pos = 0
@@ -427,6 +400,8 @@ def _download_dxfile(dxid, filename, part_retry_counter,
427
400
  bytes_to_read -= max_verify_chunk_size
428
401
  if hasher.hexdigest() != part_info["md5"]:
429
402
  raise DXFileError("Checksum mismatch when verifying downloaded part {}".format(part_id))
403
+ if dxfile_desc.get('drive') is not None:
404
+ _verify_checksum(parts, part_id, chunk, checksum_type, dxfile.get_id())
430
405
  else:
431
406
  last_verified_part = part_id
432
407
  last_verified_pos = fh.tell()
@@ -447,19 +422,25 @@ def _download_dxfile(dxid, filename, part_retry_counter,
447
422
  # Main loop. In parallel: download chunks, verify them, and write them to disk.
448
423
  get_first_chunk_sequentially = (file_size > 128 * 1024 and last_verified_pos == 0 and dxpy.JOB_ID)
449
424
  cur_part, got_bytes, hasher = None, None, None
450
- for chunk_part, chunk_data in response_iterator(chunk_requests(),
425
+ e_tag = None
426
+ if describe_output and describe_output.get("symlinkTargetIdentifier"):
427
+ e_tag = describe_output["symlinkTargetIdentifier"].get("ETag")
428
+
429
+ for chunk_part, chunk_data in response_iterator(chunk_requests(e_tag),
451
430
  dxfile._http_threadpool,
452
431
  do_first_task_sequentially=get_first_chunk_sequentially):
453
432
  if chunk_part != cur_part:
454
- verify_part(cur_part, got_bytes, hasher)
433
+ verify_part(cur_part, got_bytes, hasher, e_tag)
455
434
  cur_part, got_bytes, hasher = chunk_part, 0, md5_hasher()
435
+ if dxfile_desc.get('drive') is not None:
436
+ _verify_checksum(parts, cur_part, chunk_data, checksum_type, dxfile.get_id())
456
437
  got_bytes += len(chunk_data)
457
438
  hasher.update(chunk_data)
458
439
  fh.write(chunk_data)
459
440
  if show_progress:
460
441
  _bytes += len(chunk_data)
461
442
  _print_progress(_bytes, file_size, filename)
462
- verify_part(cur_part, got_bytes, hasher)
443
+ verify_part(cur_part, got_bytes, hasher, e_tag)
463
444
  if show_progress:
464
445
  _print_progress(_bytes, file_size, filename, action="Completed")
465
446
  except DXFileError:
@@ -285,7 +285,7 @@ class DXProject(DXContainer):
285
285
  restricted=None, download_restricted=None, contains_phi=None,
286
286
  tags=None, properties=None, bill_to=None, database_ui_view_only=None,
287
287
  external_upload_restricted=None, default_symlink=None,
288
- database_results_restricted=None,
288
+ database_results_restricted=None, drive=None,
289
289
  **kwargs):
290
290
  """
291
291
  :param name: The name of the project
@@ -318,6 +318,8 @@ class DXProject(DXContainer):
318
318
  :type database_results_restricted: int
319
319
  :param default_symlink: If provided, the details needed to have writable symlinks in the project. Dict must include drive, container, and optional prefix.
320
320
  :type default_symlink: dict
321
+ :param drive: If provided, this drive will associated with the project.
322
+ :type drive: string
321
323
 
322
324
  Creates a new project. Initially only the user performing this action
323
325
  will be in the permissions/member list, with ADMINISTER access.
@@ -357,6 +359,8 @@ class DXProject(DXContainer):
357
359
  input_hash["properties"] = properties
358
360
  if default_symlink is not None:
359
361
  input_hash["defaultSymlink"] = default_symlink
362
+ if drive is not None:
363
+ input_hash["drive"] = drive
360
364
 
361
365
  self.set_id(dxpy.api.project_new(input_hash, **kwargs)["id"])
362
366
  self._desc = {}
@@ -200,7 +200,7 @@ def download(args):
200
200
  "size": True,
201
201
  "drive": True,
202
202
  "md5": True,
203
- "perPartCheckSum": True
203
+ "checksumType": True
204
204
  }})
205
205
 
206
206
  project, folderpath, matching_files = try_call(resolve_existing_path, path, **resolver_kwargs)
@@ -244,7 +244,7 @@ class PrintInstanceTypeHelp(argparse.Action):
244
244
  'instance type to be used by each entry point.'))
245
245
  print(fill('A single instance type can be requested to be used by all entry points by providing the instance type name. Different instance types can also be requested for different entry points of an app or applet by providing a JSON string mapping from function names to instance types, e.g.'))
246
246
  print()
247
- print(' {"main": "mem2_hdd2_v2_x2", "other_function": "mem1_ssd1_v2_x2"}')
247
+ print(' {"main": "mem2_ssd2_v3_x4", "other_function": "mem1_ssd2_v3_x2"}')
248
248
  if parser.prog == 'dx run':
249
249
  print()
250
250
  print(fill('When running a workflow, --instance-type lets you specify instance types for ' +
@@ -261,15 +261,15 @@ class PrintInstanceTypeHelp(argparse.Action):
261
261
  ' "*": "mem1_ssd1_v2_x4"}\'')
262
262
  print()
263
263
  print(fill('2. Runs all entry points of the first stage with ' +
264
- 'mem2_hdd2_v2_x2, the main entry point of the second stage with mem1_ssd1_v2_x4, ' +
264
+ 'mem2_ssd2_v3_x2, the main entry point of the second stage with mem1_ssd1_v2_x4, ' +
265
265
  'the stage named "BWA" with mem1_ssd1_v2_x2, and all other ' +
266
- 'stages with mem2_hdd2_v2_x4'))
266
+ 'stages with mem2_ssd2_v3_x4'))
267
267
  print()
268
268
  print(' dx run workflow-xxxx \\\n' +
269
- ' --instance-type 0=mem2_hdd2_v2_x2 \\\n' +
269
+ ' --instance-type 0=mem2_ssd2_v3_x2 \\\n' +
270
270
  ' --instance-type 1=\'{"main": "mem1_ssd1_v2_x4"}\' \\\n' +
271
271
  ' --instance-type BWA=mem1_ssd1_v2_x2 \\\n' +
272
- ' --instance-type mem2_hdd2_v2_x4')
272
+ ' --instance-type mem2_ssd2_v3_x4')
273
273
  print()
274
274
  print(fill('--instance-type-by-executable argument is a JSON string with a double mapping that ' +
275
275
  'specifies instance types by app or applet id, then by entry point within the executable.' +
@@ -283,30 +283,30 @@ class PrintInstanceTypeHelp(argparse.Action):
283
283
  ' dx run workflow-xxxx --instance-type-by-executable \'{"*": {"*": "mem2_ssd1_v2_x2"}}\'')
284
284
  print()
285
285
  print(fill(
286
- '4. Force every job in the execution tree executing applet-xyz1 to use mem2_ssd1_v2_x2'))
286
+ '4. Force every job in the execution tree executing applet-xyz1 to use mem3_ssd2_v3_x2'))
287
287
  print()
288
288
  print(
289
- ' dx run workflow-xxxx --instance-type-by-executable \'{"applet-xyz1":{"*": "mem2_ssd1_v2_x2"}}\'')
289
+ ' dx run workflow-xxxx --instance-type-by-executable \'{"applet-xyz1":{"*": "mem3_ssd2_v3_x2"}}\'')
290
290
  print()
291
- print(fill('5. Force every job executing applet-xyz1 to use mem2_ssd1_v2_x4 ' +
292
- 'for the main entry point and mem2_ssd1_v2_x2 for all other entry points.' +
293
- 'Also force the collect entry point of all executables other than applet-xyz1 to use mem2_ssd1_v2_x8.' +
291
+ print(fill('5. Force every job executing applet-xyz1 to use mem1_ssd2_v3_x4 ' +
292
+ 'for the main entry point and mem1_ssd2_v3_x2 for all other entry points.' +
293
+ 'Also force the collect entry point of all executables other than applet-xyz1 to use mem2_ssd2_v3_x8.' +
294
294
  'Other entry points of executable other than applet-xyz1 may be overridden by ' +
295
295
  'lower-priority mechanisms'))
296
296
  print()
297
297
  print(' dx run workflow-xxxx --instance-type-by-executable \\\n' +
298
- ' \'{"applet-xyz1": {"main": "mem2_ssd1_v2_x4", "*": "mem2_ssd1_v2_x2"},\n' +
299
- ' "*": {"collect": "mem2_ssd1_v2_x8"}}\'')
298
+ ' \'{"applet-xyz1": {"main": "mem1_ssd2_v3_x4", "*": "mem1_ssd2_v3_x2"},\n' +
299
+ ' "*": {"collect": "mem2_ssd2_v3_x8"}}\'')
300
300
  print()
301
- print(fill('6. Force every job executing applet-xxxx to use mem2_ssd1_v2_x2 for all entry points ' +
301
+ print(fill('6. Force every job executing applet-xxxx to use mem1_ssd2_v3_x2 for all entry points ' +
302
302
  'in the entire execution tree. ' +
303
- 'Also force stage 0 executable to run on mem2_ssd1_v2_x4, unless stage 0 invokes ' +
304
- 'applet-xxxx, in which case applet-xxxx\'s jobs will use mem2_ssd1_v2_x2 as specified by ' +
303
+ 'Also force stage 0 executable to run on mem1_ssd2_v3_x4, unless stage 0 invokes ' +
304
+ 'applet-xxxx, in which case applet-xxxx\'s jobs will use mem1_ssd2_v3_x2 as specified by ' +
305
305
  '--instance-type-by-executable.'))
306
306
  print()
307
307
  print(' dx run workflow-xxxx \\\n' +
308
- ' --instance-type-by-executable \'{"applet-xxxx": {"*": "mem2_ssd1_v2_x2"}}\' \\\n' +
309
- ' --instance-type 0=mem2_ssd1_v2_x4')
308
+ ' --instance-type-by-executable \'{"applet-xxxx": {"*": "mem1_ssd2_v3_x2"}}\' \\\n' +
309
+ ' --instance-type 0=mem1_ssd2_v3_x4')
310
310
  print()
311
311
  print(fill(
312
312
  'See "Requesting Instance Types" in DNAnexus documentation for more details.'))
@@ -332,7 +332,7 @@ instance_type_arg.add_argument('--instance-type',
332
332
  metavar='INSTANCE_TYPE_OR_MAPPING',
333
333
  help=fill('''When running an app or applet, the mapping lists executable's entry points or "*" as keys, and instance types to use for these entry points as values.
334
334
  When running a workflow, the specified instance types can be prefixed by a stage name or stage index followed by "=" to apply to a specific stage, or apply to all workflow stages without such prefix.
335
- The instance type corresponding to the "*" key is applied to all entry points not explicitly mentioned in the --instance-type mapping. Specifying a single instance type is equivalent to using it for all entry points, so "--instance-type mem1_ssd1_v2_x2" is same as "--instance-type '{"*":"mem1_ssd1_v2_x2"}'.
335
+ The instance type corresponding to the "*" key is applied to all entry points not explicitly mentioned in the --instance-type mapping. Specifying a single instance type is equivalent to using it for all entry points, so "--instance-type mem1_ssd2_v3_x2" is same as "--instance-type '{"*":"mem1_ssd2_v3_x2"}'.
336
336
  Note that "dx run" calls within the execution subtree may override the values specified at the root of the execution tree.
337
337
  See dx run --instance-type-help for details.
338
338
  ''', width_adjustment=-24, replace_whitespace=False),
@@ -0,0 +1,10 @@
1
+ {
2
+ "aws:ap-southeast-2": "record-J4YYvjj5FVfkvq9259y58qGz",
3
+ "aws:eu-central-1": "record-J4YYqf048jzY7Pk1KV22YB42",
4
+ "aws:eu-west-2-g": "record-J4YYvgXKXGfY3b41gVzXPzQV",
5
+ "aws:me-south-1": "record-J4YYvbV38v58JVJzvG20FXQQ",
6
+ "aws:us-east-1": "record-J4YYp2j0v52gXXp42ZgJ4F6k",
7
+ "azure:westeurope": "record-J4YYy5QBJV21F607bX2qX58F",
8
+ "azure:westus": "record-J4YYyV892393KKj9vbj76KpY",
9
+ "oci:us-ashburn-1": "record-J4YYx6162FZB2kfvz6g263vP"
10
+ }
@@ -0,0 +1,10 @@
1
+ {
2
+ "aws:ap-southeast-2": "record-J4YX1005k7GGX6g9xgg8yb91",
3
+ "aws:eu-central-1": "record-J4YX0Vj4X3z8fvFB1Jq16bv0",
4
+ "aws:eu-west-2-g": "record-J4YX0PXKfYb57JpGKZZ38f3p",
5
+ "aws:me-south-1": "record-J4YX0K139gXkGPkKf3yGz915",
6
+ "aws:us-east-1": "record-J4YVy580zkbzBBvyqgb90bQF",
7
+ "azure:westeurope": "record-J4YX2z8BVVBpgkJqFfz9p7gb",
8
+ "azure:westus": "record-J4YX2X09fJFY2VQb1Z1Xxjpq",
9
+ "oci:us-ashburn-1": "record-J4fk6yV6X487YP2gQPV93YZb"
10
+ }
@@ -6451,8 +6451,8 @@ EXAMPLES:
6451
6451
  parents=[all_arg],
6452
6452
  prog='dx unarchive')
6453
6453
 
6454
- parser_unarchive.add_argument('--rate', help=fill('The speed at which all files in this request are unarchived.', width_adjustment=-24) + '\n'+ fill('- Azure regions: {Expedited, Standard}', width_adjustment=-24,initial_indent=' ') + '\n'+
6455
- fill('- AWS regions: {Expedited, Standard, Bulk}', width_adjustment=-24,initial_indent=' '), choices=["Expedited", "Standard", "Bulk"], default="Standard")
6454
+ parser_unarchive.add_argument('--rate', help=fill('The speed at which all files in this request are unarchived.', width_adjustment=-24) + '\n'+ fill('- Azure regions: {Standard, Expedited}', width_adjustment=-24,initial_indent=' ') + '\n'+
6455
+ fill('- AWS regions: {Standard, Bulk}', width_adjustment=-24,initial_indent=' '), choices=["Expedited", "Standard", "Bulk"], default="Standard")
6456
6456
 
6457
6457
  parser_unarchive.add_argument('-q', '--quiet', help='Do not print extra info messages', action='store_true')
6458
6458
  parser_unarchive.add_argument(
@@ -0,0 +1 @@
1
+ version = '0.402.0'
@@ -423,6 +423,16 @@ class InstanceTypesCompleter():
423
423
  InstanceTypeSpec('mem1_ssd1_v2_x36', 36, 72.0, 900),
424
424
  InstanceTypeSpec('mem1_ssd1_v2_x72', 72, 144.0, 1800),
425
425
 
426
+ InstanceTypeSpec('mem1_ssd2_v3_x2', 2, 4.0, 118),
427
+ InstanceTypeSpec('mem1_ssd2_v3_x4', 4, 8.0, 237),
428
+ InstanceTypeSpec('mem1_ssd2_v3_x8', 8, 16.0, 474),
429
+ InstanceTypeSpec('mem1_ssd2_v3_x16', 16, 32.0, 950),
430
+ InstanceTypeSpec('mem1_ssd2_v3_x32', 32, 64.0, 1900),
431
+ InstanceTypeSpec('mem1_ssd2_v3_x48', 48, 96.0, 2850),
432
+ InstanceTypeSpec('mem1_ssd2_v3_x64', 64, 128.0, 3800),
433
+ InstanceTypeSpec('mem1_ssd2_v3_x96', 96, 192.0, 5700),
434
+ InstanceTypeSpec('mem1_ssd2_v3_x128', 128, 256.0, 7600),
435
+
426
436
  InstanceTypeSpec('mem1_ssd2_v2_x2', 2, 4.0, 160),
427
437
  InstanceTypeSpec('mem1_ssd2_v2_x4', 4, 8.0, 320),
428
438
  InstanceTypeSpec('mem1_ssd2_v2_x8', 8, 16.0, 640),
@@ -439,6 +449,16 @@ class InstanceTypesCompleter():
439
449
  InstanceTypeSpec('mem2_ssd1_v2_x64', 64, 256.0, 2400),
440
450
  InstanceTypeSpec('mem2_ssd1_v2_x96', 96, 384.0, 3600),
441
451
 
452
+ InstanceTypeSpec('mem2_ssd2_v3_x2', 2, 8.0, 118),
453
+ InstanceTypeSpec('mem2_ssd2_v3_x4', 4, 16.0, 237),
454
+ InstanceTypeSpec('mem2_ssd2_v3_x8', 8, 32.0, 474),
455
+ InstanceTypeSpec('mem2_ssd2_v3_x16', 16, 64.0, 950),
456
+ InstanceTypeSpec('mem2_ssd2_v3_x32', 32, 128.0, 1900),
457
+ InstanceTypeSpec('mem2_ssd2_v3_x48', 48, 192.0, 2850),
458
+ InstanceTypeSpec('mem2_ssd2_v3_x64', 64, 256.0, 3800),
459
+ InstanceTypeSpec('mem2_ssd2_v3_x96', 96, 384.0, 5700),
460
+ InstanceTypeSpec('mem2_ssd2_v3_x128', 128, 512.0, 7600),
461
+
442
462
  InstanceTypeSpec('mem2_ssd2_v2_x2', 2, 8.0, 160),
443
463
  InstanceTypeSpec('mem2_ssd2_v2_x4', 4, 16.0, 320),
444
464
  InstanceTypeSpec('mem2_ssd2_v2_x8', 8, 32.0, 640),
@@ -457,6 +477,16 @@ class InstanceTypesCompleter():
457
477
  InstanceTypeSpec('mem3_ssd1_v2_x64', 64, 512.0, 3200),
458
478
  InstanceTypeSpec('mem3_ssd1_v2_x96', 96, 768.0, 3600),
459
479
 
480
+ InstanceTypeSpec('mem3_ssd2_v3_x2', 2, 16.0, 118),
481
+ InstanceTypeSpec('mem3_ssd2_v3_x4', 4, 32.0, 237),
482
+ InstanceTypeSpec('mem3_ssd2_v3_x8', 8, 64.0, 474),
483
+ InstanceTypeSpec('mem3_ssd2_v3_x16', 16, 128.0, 950),
484
+ InstanceTypeSpec('mem3_ssd2_v3_x32', 32, 256.0, 1900),
485
+ InstanceTypeSpec('mem3_ssd2_v3_x48', 48, 384.0, 2850),
486
+ InstanceTypeSpec('mem3_ssd2_v3_x64', 64, 512.0, 3800),
487
+ InstanceTypeSpec('mem3_ssd2_v3_x96', 96, 768.0, 5700),
488
+ InstanceTypeSpec('mem3_ssd2_v3_x128', 128, 1024.0, 7600),
489
+
460
490
  InstanceTypeSpec('mem3_ssd2_v2_x2', 2, 15.25, 475),
461
491
  InstanceTypeSpec('mem3_ssd2_v2_x4', 4, 30.5, 950),
462
492
  InstanceTypeSpec('mem3_ssd2_v2_x8', 8, 61.0, 1900),
@@ -541,8 +571,8 @@ class InstanceTypesCompleter():
541
571
 
542
572
  fpga_instance_types = OrderedDict()
543
573
  for i in (FpgaInstanceTypeSpec('mem3_ssd2_fpga1_x24', 24, 256.0, 940, 1),
544
- FpgaInstanceTypeSpec('mem3_ssd2_fpga1_x48', 48, 512.0, 1880, 2),
545
- FpgaInstanceTypeSpec('mem3_ssd2_fpga1_x192', 192, 2048.0, 7520, 8)):
574
+ FpgaInstanceTypeSpec('mem3_ssd2_fpga2_x48', 48, 512.0, 1880, 2),
575
+ FpgaInstanceTypeSpec('mem3_ssd2_fpga8_x192', 192, 2048.0, 7520, 8)):
546
576
  fpga_instance_types[i.Name] = i
547
577
 
548
578
  aws_other_instance_types = OrderedDict()
@@ -551,6 +581,7 @@ class InstanceTypesCompleter():
551
581
  InstanceTypeSpec('mem1_ssd1_x8', 8, 15.0, 160),
552
582
  InstanceTypeSpec('mem1_ssd1_x16', 16, 30.0, 320),
553
583
  InstanceTypeSpec('mem1_ssd1_x32', 32, 60.0, 640),
584
+ InstanceTypeSpec('mem1_ssd1_x36', 36, 72.0, 900),
554
585
 
555
586
  InstanceTypeSpec('mem1_ssd2_x2', 2, 3.8, 160),
556
587
  InstanceTypeSpec('mem1_ssd2_x4', 4, 7.5, 320),
@@ -605,6 +636,10 @@ class InstanceTypesCompleter():
605
636
  InstanceTypeSpec('mem2_hdd2_v2_x2', 2, 8.0, 1000),
606
637
  InstanceTypeSpec('mem2_hdd2_v2_x4', 4, 16.0, 2000),
607
638
 
639
+ InstanceTypeSpec('mem3_hdd2_x2', 2, 17.1, 420),
640
+ InstanceTypeSpec('mem3_hdd2_x4', 4, 34.2, 850),
641
+ InstanceTypeSpec('mem3_hdd2_x8', 8, 68.4, 1680),
642
+
608
643
  InstanceTypeSpec('mem3_hdd2_v2_x2', 2, 16.0, 500),
609
644
  InstanceTypeSpec('mem3_hdd2_v2_x4', 4, 32.0, 1000),
610
645
  InstanceTypeSpec('mem3_hdd2_v2_x8', 8, 64.0, 2000)):
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dxpy
3
- Version: 0.401.0
3
+ Version: 0.402.0
4
4
  Summary: DNAnexus Platform API bindings for Python
5
5
  Home-page: https://github.com/dnanexus/dx-toolkit
6
6
  Author: Aleksandra Zalcman, Andrey Kislyuk, Anurag Biyani, Geet Duggal, Katherine Lai, Kurt Jensen, Marek Hrvol, Ohad Rodeh, Phil Sung
@@ -26,6 +26,7 @@ Requires-Dist: urllib3<2.2,>=1.25
26
26
  Requires-Dist: pyreadline3==3.4.1; sys_platform == "win32"
27
27
  Requires-Dist: colorama<=0.4.6,>=0.4.4; sys_platform == "win32"
28
28
  Requires-Dist: crc32c>=2.7.1
29
+ Requires-Dist: awscrt>=0.23.4
29
30
  Provides-Extra: pandas
30
31
  Requires-Dist: pandas==1.3.5; extra == "pandas"
31
32
  Requires-Dist: numpy<2.0.0; extra == "pandas"
@@ -192,6 +192,7 @@ test/test_dx_symlink.py
192
192
  test/test_dxabs.py
193
193
  test/test_dxasset.py
194
194
  test/test_dxclient.py
195
+ test/test_dxfile_functions.py
195
196
  test/test_dxpy.py
196
197
  test/test_dxpy_utils.py
197
198
  test/test_dxunpack.py
@@ -4,6 +4,7 @@ psutil>=5.9.3
4
4
  certifi>=2024.7.4
5
5
  urllib3<2.2,>=1.25
6
6
  crc32c>=2.7.1
7
+ awscrt>=0.23.4
7
8
 
8
9
  [:python_version < "3.10"]
9
10
  argcomplete<2.0.0,>=1.9.4
@@ -8,3 +8,4 @@ urllib3>=1.25,<2.2
8
8
  pyreadline3==3.4.1; sys_platform == "win32"
9
9
  colorama>=0.4.4,<=0.4.6; sys_platform == "win32"
10
10
  crc32c>=2.7.1
11
+ awscrt>=0.23.4
@@ -0,0 +1,71 @@
1
+ #!/usr/bin/env python3
2
+ # -*- coding: utf-8 -*-
3
+ #
4
+ # Copyright (C) 2013-2019 DNAnexus, Inc.
5
+ #
6
+ # This file is part of dx-toolkit (DNAnexus platform client libraries).
7
+ #
8
+ # Licensed under the Apache License, Version 2.0 (the "License"); you may not
9
+ # use this file except in compliance with the License. You may obtain a copy
10
+ # of the License at
11
+ #
12
+ # http://www.apache.org/licenses/LICENSE-2.0
13
+ #
14
+ # Unless required by applicable law or agreed to in writing, software
15
+ # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
16
+ # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
17
+ # License for the specific language governing permissions and limitations
18
+ # under the License.
19
+
20
+ from __future__ import print_function, unicode_literals, division, absolute_import
21
+
22
+ import unittest
23
+ from dxpy.bindings.dxfile_functions import _verify_checksum
24
+ from dxpy.exceptions import DXFileError, DXChecksumMismatchError
25
+
26
+ class TestVerifyPerPartChecksum(unittest.TestCase):
27
+ def setUp(self):
28
+ self.valid_crc32_parts = {'1': {'checksum': '0kY1xw=='}}
29
+ self.valid_crc32c_parts = {'1': {'checksum': 'Hu2dEw=='}}
30
+ self.valid_sha1_parts = {'1': {'checksum': 'yl8CzxRPiurWmuYjQ3ySrSeaCAE='}}
31
+ self.valid_sha256_parts = {'1': {'checksum': 'TuNtcBmfMMIRMpTEjVWVnAVSG56/K+nL2nF2rYi67y0='}}
32
+ self.valid_crc64nvme_parts = {'1': {'checksum': '688cIX1wosY='}}
33
+ self.dx_file_id = 'file-xxxx'
34
+ self.chunk_data = 'fizzbuzz'.encode('utf-8')
35
+
36
+ def test_per_part_checksum_is_none(self):
37
+ assert _verify_checksum(self.valid_crc32_parts, '1', self.chunk_data, None, self.dx_file_id) == None
38
+
39
+ def test_part_id_is_invalid(self):
40
+ with self.assertRaisesRegex(DXFileError, 'Part 5 not found in file-xxxx'):
41
+ _verify_checksum(self.valid_crc32_parts, '5', self.chunk_data, 'CRC32', self.dx_file_id)
42
+
43
+ def test_invalid_checksum(self):
44
+ with self.assertRaisesRegex(DXFileError, 'Unsupported checksum type: ABC'):
45
+ _verify_checksum(self.valid_crc32_parts, '1', self.chunk_data, 'ABC', self.dx_file_id)
46
+
47
+ def test_checksum_not_found(self):
48
+ with self.assertRaisesRegex(DXFileError, 'checksum not found in part 1'):
49
+ _verify_checksum({'1': {}}, '1', self.chunk_data, 'CRC32', self.dx_file_id)
50
+
51
+ def test_valid_crc32_checksum(self):
52
+ assert _verify_checksum(self.valid_crc32_parts, '1', self.chunk_data, 'CRC32', self.dx_file_id) == True
53
+
54
+ def test_valid_crc32c_checksum(self):
55
+ assert _verify_checksum(self.valid_crc32c_parts, '1', self.chunk_data, 'CRC32C', self.dx_file_id) == True
56
+
57
+ def test_valid_sha1_checksum(self):
58
+ assert _verify_checksum(self.valid_sha1_parts, '1', self.chunk_data, 'SHA1', self.dx_file_id) == True
59
+
60
+ def test_valid_sha256_checksum(self):
61
+ assert _verify_checksum(self.valid_sha256_parts, '1', self.chunk_data, 'SHA256', self.dx_file_id) == True
62
+
63
+ def test_valid_crc64nvme_checksum(self):
64
+ assert _verify_checksum(self.valid_crc64nvme_parts, '1', self.chunk_data, 'CRC64NVME', self.dx_file_id) == True
65
+
66
+ def test_checksum_mismatch(self):
67
+ with self.assertRaisesRegex(DXChecksumMismatchError, '^CRC32 checksum mismatch in file-xxxx in part 1 '):
68
+ _verify_checksum(self.valid_crc32_parts, '1', 'foobar'.encode('utf-8'), 'CRC32', self.dx_file_id)
69
+
70
+ if __name__ == '__main__':
71
+ unittest.main()
@@ -1,10 +0,0 @@
1
- {
2
- "aws:ap-southeast-2": "record-GzyP89850x72260z2yYyZQfJ",
3
- "aws:eu-central-1": "record-GzyP7584vB9Yp0j7qfv5F134",
4
- "aws:eu-west-2-g": "record-GzyP80XKPQ6K81G8XV89fyvZ",
5
- "aws:me-south-1": "record-GzyP85V306B9xkQ4Pv1yf7Kz",
6
- "aws:us-east-1": "record-GzyP6580KGjqV4gp6QXFGBzX",
7
- "azure:westeurope": "record-GzyP9z0BzJx8zjf6bpvY6Yqv",
8
- "azure:westus": "record-GzyP9vQ9zggPyfkj5GKKYVG5",
9
- "oci:us-ashburn-1": "record-J30Z6396B086xYKfqQj5gG1j"
10
- }
@@ -1,10 +0,0 @@
1
- {
2
- "aws:ap-southeast-2": "record-GzyJ2F0506kK1651KKg3K1Xq",
3
- "aws:eu-central-1": "record-GzyJ2Yj4f2xpzzypx8G484FB",
4
- "aws:eu-west-2-g": "record-GzyJ28XKKGJ7758Yj62X97bV",
5
- "aws:me-south-1": "record-GzyJ2GV311jBJYzx2XpgZXQ0",
6
- "aws:us-east-1": "record-GzyJ08002j9B36YfkbQQxpkf",
7
- "azure:westeurope": "record-GzyJ4B8B095jZ8F4BF5p2Ggz",
8
- "azure:westus": "record-GzyJ3q89X1QBpvfvgx9bZzgP",
9
- "oci:us-ashburn-1": "record-J30XbBV6j7q0jq6q1Jfz5650"
10
- }
@@ -1 +0,0 @@
1
- version = '0.401.0'
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes
File without changes