dxpy 0.397.0__tar.gz → 0.399.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (190) hide show
  1. {dxpy-0.397.0 → dxpy-0.399.0}/PKG-INFO +4 -1
  2. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/__init__.py +1 -1
  3. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/bindings/apollo/dataset.py +10 -1
  4. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/bindings/apollo/json_validation_by_schema.py +2 -0
  5. dxpy-0.399.0/dxpy/bindings/apollo/schemas/assay_filtering_conditions.py +213 -0
  6. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/bindings/apollo/schemas/assay_filtering_json_schemas.py +8 -2
  7. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/bindings/apollo/schemas/input_arguments_validation_schemas.py +2 -1
  8. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/bindings/apollo/vizserver_filters_from_json_parser.py +93 -14
  9. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/cli/dataset_utilities.py +57 -27
  10. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/cli/parsers.py +12 -2
  11. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/scripts/dx.py +4 -4
  12. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/scripts/dx_app_wizard.py +2 -2
  13. dxpy-0.399.0/dxpy/toolkit_version.py +1 -0
  14. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/utils/completer.py +206 -96
  15. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy.egg-info/PKG-INFO +4 -1
  16. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy.egg-info/SOURCES.txt +16 -16
  17. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy.egg-info/entry_points.txt +1 -0
  18. {dxpy-0.397.0 → dxpy-0.399.0}/test/test_dx_bash_helpers.py +2 -2
  19. {dxpy-0.397.0 → dxpy-0.399.0}/test/test_extract_assay.py +22 -11
  20. {dxpy-0.397.0 → dxpy-0.399.0}/test/test_extract_expression.py +285 -69
  21. dxpy-0.397.0/dxpy/bindings/apollo/schemas/assay_filtering_conditions.py +0 -77
  22. dxpy-0.397.0/dxpy/toolkit_version.py +0 -1
  23. dxpy-0.397.0/scripts/dx-clone-asset +0 -220
  24. dxpy-0.397.0/scripts/dx-docker +0 -492
  25. dxpy-0.397.0/scripts/dx-download-all-inputs +0 -77
  26. dxpy-0.397.0/scripts/dx-fetch-bundled-depends +0 -44
  27. dxpy-0.397.0/scripts/dx-generate-dxapp +0 -341
  28. dxpy-0.397.0/scripts/dx-jobutil-add-output +0 -187
  29. dxpy-0.397.0/scripts/dx-jobutil-dxlink +0 -29
  30. dxpy-0.397.0/scripts/dx-jobutil-get-identity-token +0 -69
  31. dxpy-0.397.0/scripts/dx-jobutil-new-job +0 -104
  32. dxpy-0.397.0/scripts/dx-jobutil-parse-link +0 -39
  33. dxpy-0.397.0/scripts/dx-jobutil-report-error +0 -40
  34. dxpy-0.397.0/scripts/dx-log-stream +0 -69
  35. dxpy-0.397.0/scripts/dx-mount-all-inputs +0 -77
  36. dxpy-0.397.0/scripts/dx-notebook-reconnect +0 -41
  37. dxpy-0.397.0/scripts/dx-print-bash-vars +0 -47
  38. dxpy-0.397.0/scripts/dx-upload-all-outputs +0 -445
  39. {dxpy-0.397.0 → dxpy-0.399.0}/MANIFEST.in +0 -0
  40. {dxpy-0.397.0 → dxpy-0.399.0}/Readme.md +0 -0
  41. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/api.py +0 -0
  42. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/app_builder.py +0 -0
  43. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/app_categories.py +0 -0
  44. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/asset_builder.py +0 -0
  45. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/bindings/__init__.py +0 -0
  46. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/bindings/apollo/__init__.py +0 -0
  47. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/bindings/apollo/cmd_line_options_validator.py +0 -0
  48. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/bindings/apollo/data_transformations.py +0 -0
  49. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/bindings/apollo/schemas/__init__.py +0 -0
  50. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/bindings/apollo/vizclient.py +0 -0
  51. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/bindings/apollo/vizserver_payload_builder.py +0 -0
  52. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/bindings/auth.py +0 -0
  53. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/bindings/download_all_inputs.py +0 -0
  54. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/bindings/dxanalysis.py +0 -0
  55. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/bindings/dxapp.py +0 -0
  56. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/bindings/dxapp_container_functions.py +0 -0
  57. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/bindings/dxapplet.py +0 -0
  58. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/bindings/dxdatabase.py +0 -0
  59. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/bindings/dxdatabase_functions.py +0 -0
  60. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/bindings/dxdataobject_functions.py +0 -0
  61. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/bindings/dxfile.py +0 -0
  62. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/bindings/dxfile_functions.py +0 -0
  63. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/bindings/dxglobalworkflow.py +0 -0
  64. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/bindings/dxjob.py +0 -0
  65. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/bindings/dxproject.py +0 -0
  66. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/bindings/dxrecord.py +0 -0
  67. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/bindings/dxworkflow.py +0 -0
  68. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/bindings/mount_all_inputs.py +0 -0
  69. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/bindings/search.py +0 -0
  70. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/cli/__init__.py +0 -0
  71. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/cli/cp.py +0 -0
  72. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/cli/download.py +0 -0
  73. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/cli/exec_io.py +0 -0
  74. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/cli/help_messages.py +0 -0
  75. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/cli/org.py +0 -0
  76. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/cli/output_handling.py +0 -0
  77. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/cli/workflow.py +0 -0
  78. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/compat.py +0 -0
  79. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/dx_extract_utils/Homo_sapiens_genes_manifest.json +0 -0
  80. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/dx_extract_utils/Homo_sapiens_genes_manifest_staging.json +0 -0
  81. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/dx_extract_utils/Homo_sapiens_genes_manifest_staging_vep.json +0 -0
  82. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/dx_extract_utils/Homo_sapiens_genes_manifest_vep.json +0 -0
  83. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/dx_extract_utils/__init__.py +0 -0
  84. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/dx_extract_utils/cohort_filter_payload.py +0 -0
  85. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/dx_extract_utils/column_conditions.json +0 -0
  86. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/dx_extract_utils/column_conversion.json +0 -0
  87. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/dx_extract_utils/filter_to_payload.py +0 -0
  88. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/dx_extract_utils/germline_utils.py +0 -0
  89. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/dx_extract_utils/input_validation.py +0 -0
  90. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/dx_extract_utils/input_validation_somatic.py +0 -0
  91. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/dx_extract_utils/retrieve_allele_schema.json +0 -0
  92. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/dx_extract_utils/retrieve_annotation_schema.json +0 -0
  93. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/dx_extract_utils/retrieve_bins.py +0 -0
  94. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/dx_extract_utils/retrieve_genotype_schema.json +0 -0
  95. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/dx_extract_utils/return_columns_allele.json +0 -0
  96. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/dx_extract_utils/return_columns_annotation.json +0 -0
  97. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/dx_extract_utils/return_columns_genotype.json +0 -0
  98. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/dx_extract_utils/return_columns_genotype_only.json +0 -0
  99. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/dx_extract_utils/somatic_filter_payload.py +0 -0
  100. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/dxlog.py +0 -0
  101. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/exceptions.py +0 -0
  102. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/executable_builder.py +0 -0
  103. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/nextflow/ImageRef.py +0 -0
  104. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/nextflow/ImageRefFactory.py +0 -0
  105. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/nextflow/__init__.py +0 -0
  106. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/nextflow/app_asset_projects_ids_prod.json +0 -0
  107. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/nextflow/app_asset_projects_ids_staging.json +0 -0
  108. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/nextflow/awscli_assets.json +0 -0
  109. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/nextflow/awscli_assets.staging.json +0 -0
  110. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/nextflow/collect_images.py +0 -0
  111. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/nextflow/nextaur_assets.json +0 -0
  112. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/nextflow/nextaur_assets.staging.json +0 -0
  113. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/nextflow/nextflow_assets.json +0 -0
  114. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/nextflow/nextflow_assets.staging.json +0 -0
  115. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/nextflow/nextflow_builder.py +0 -0
  116. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/nextflow/nextflow_templates.py +0 -0
  117. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/nextflow/nextflow_utils.py +0 -0
  118. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/packages/__init__.py +0 -0
  119. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/scripts/__init__.py +0 -0
  120. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/scripts/dx_build_app.py +0 -0
  121. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/scripts/dx_build_applet.py +0 -0
  122. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/ssh_tunnel_app_support.py +0 -0
  123. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/system_requirements.py +0 -0
  124. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/templating/__init__.py +0 -0
  125. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/templating/bash.py +0 -0
  126. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/templating/python.py +0 -0
  127. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/templating/templates/Readme.md +0 -0
  128. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/templating/templates/bash/basic/dxapp.json +0 -0
  129. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/templating/templates/bash/basic/src/code.sh +0 -0
  130. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/templating/templates/bash/parallelized/dxapp.json +0 -0
  131. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/templating/templates/bash/parallelized/src/code.sh +0 -0
  132. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/templating/templates/bash/scatter-process-gather/dxapp.json +0 -0
  133. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/templating/templates/bash/scatter-process-gather/src/code.sh +0 -0
  134. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/templating/templates/nextflow/dxapp.json +0 -0
  135. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/templating/templates/nextflow/src/nextflow.sh +0 -0
  136. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/templating/templates/python/basic/dxapp.json +0 -0
  137. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/templating/templates/python/basic/src/code.py +0 -0
  138. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/templating/templates/python/basic/test/test.py +0 -0
  139. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/templating/templates/python/parallelized/dxapp.json +0 -0
  140. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/templating/templates/python/parallelized/src/code.py +0 -0
  141. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/templating/templates/python/parallelized/test/test.py +0 -0
  142. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/templating/templates/python/scatter-process-gather/dxapp.json +0 -0
  143. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/templating/templates/python/scatter-process-gather/src/code.py +0 -0
  144. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/templating/templates/python/scatter-process-gather/test/test.py +0 -0
  145. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/templating/utils.py +0 -0
  146. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/utils/__init__.py +0 -0
  147. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/utils/batch_utils.py +0 -0
  148. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/utils/config.py +0 -0
  149. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/utils/describe.py +0 -0
  150. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/utils/exec_utils.py +0 -0
  151. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/utils/executable_unbuilder.py +0 -0
  152. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/utils/file_handle.py +0 -0
  153. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/utils/file_load_utils.py +0 -0
  154. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/utils/genomic_utils.py +0 -0
  155. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/utils/job_log_client.py +0 -0
  156. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/utils/local_exec_utils.py +0 -0
  157. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/utils/pathmatch.py +0 -0
  158. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/utils/pretty_print.py +0 -0
  159. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/utils/printing.py +0 -0
  160. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/utils/resolver.py +0 -0
  161. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/utils/spelling_corrector.py +0 -0
  162. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/utils/version.py +0 -0
  163. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy/workflow_builder.py +0 -0
  164. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy.egg-info/dependency_links.txt +0 -0
  165. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy.egg-info/not-zip-safe +0 -0
  166. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy.egg-info/requires.txt +6 -6
  167. {dxpy-0.397.0 → dxpy-0.399.0}/dxpy.egg-info/top_level.txt +0 -0
  168. {dxpy-0.397.0 → dxpy-0.399.0}/requirements.txt +0 -0
  169. {dxpy-0.397.0 → dxpy-0.399.0}/requirements_setuptools.txt +0 -0
  170. {dxpy-0.397.0 → dxpy-0.399.0}/requirements_test.txt +0 -0
  171. {dxpy-0.397.0 → dxpy-0.399.0}/setup.cfg +0 -0
  172. {dxpy-0.397.0 → dxpy-0.399.0}/setup.py +0 -0
  173. {dxpy-0.397.0 → dxpy-0.399.0}/test/test_batch.py +0 -0
  174. {dxpy-0.397.0 → dxpy-0.399.0}/test/test_create_cohort.py +0 -0
  175. {dxpy-0.397.0 → dxpy-0.399.0}/test/test_describe.py +0 -0
  176. {dxpy-0.397.0 → dxpy-0.399.0}/test/test_dx-docker.py +0 -0
  177. {dxpy-0.397.0 → dxpy-0.399.0}/test/test_dx_app_wizard.py +0 -0
  178. {dxpy-0.397.0 → dxpy-0.399.0}/test/test_dx_completion.py +0 -0
  179. {dxpy-0.397.0 → dxpy-0.399.0}/test/test_dx_symlink.py +0 -0
  180. {dxpy-0.397.0 → dxpy-0.399.0}/test/test_dxabs.py +0 -0
  181. {dxpy-0.397.0 → dxpy-0.399.0}/test/test_dxasset.py +0 -0
  182. {dxpy-0.397.0 → dxpy-0.399.0}/test/test_dxclient.py +0 -0
  183. {dxpy-0.397.0 → dxpy-0.399.0}/test/test_dxpy.py +0 -0
  184. {dxpy-0.397.0 → dxpy-0.399.0}/test/test_dxpy_utils.py +0 -0
  185. {dxpy-0.397.0 → dxpy-0.399.0}/test/test_dxunpack.py +0 -0
  186. {dxpy-0.397.0 → dxpy-0.399.0}/test/test_extract_dataset.py +0 -0
  187. {dxpy-0.397.0 → dxpy-0.399.0}/test/test_extract_somatic.py +0 -0
  188. {dxpy-0.397.0 → dxpy-0.399.0}/test/test_nextflow.py +0 -0
  189. {dxpy-0.397.0 → dxpy-0.399.0}/test/test_nextflow_ImageRef.py +0 -0
  190. {dxpy-0.397.0 → dxpy-0.399.0}/test/test_nextflow_ImageRefFactory.py +0 -0
@@ -1,11 +1,12 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: dxpy
3
- Version: 0.397.0
3
+ Version: 0.399.0
4
4
  Summary: DNAnexus Platform API bindings for Python
5
5
  Home-page: https://github.com/dnanexus/dx-toolkit
6
6
  Author: Aleksandra Zalcman, Andrey Kislyuk, Anurag Biyani, Geet Duggal, Katherine Lai, Kurt Jensen, Marek Hrvol, Ohad Rodeh, Phil Sung
7
7
  Author-email: support@dnanexus.com
8
8
  License: Apache Software License
9
+ Platform: UNKNOWN
9
10
  Classifier: Environment :: Console
10
11
  Classifier: Intended Audience :: Developers
11
12
  Classifier: License :: OSI Approved :: Apache Software License
@@ -124,3 +125,5 @@ We have the following convention for these scripts:
124
125
  if __name__ == '__main__':
125
126
  main()
126
127
  ```
128
+
129
+
@@ -993,7 +993,7 @@ def get_auth_server_name(host_override=None, port_override=None, protocol='https
993
993
  return 'https://stagingauth.cn.dnanexus.com:7001'
994
994
  elif APISERVER_HOST == 'api.cn.dnanexus.com':
995
995
  return 'https://auth.cn.dnanexus.com:8001'
996
- elif APISERVER_HOST == "localhost" or APISERVER_HOST == "127.0.0.1":
996
+ elif APISERVER_HOST == "localhost" or APISERVER_HOST == "127.0.0.1" or APISERVER_HOST == "apiserver":
997
997
  if "DX_AUTHSERVER_HOST" not in os.environ or "DX_AUTHSERVER_PORT" not in os.environ:
998
998
  err_msg = "Must set authserver env vars (DX_AUTHSERVER_HOST, DX_AUTHSERVER_PORT) if apiserver is {apiserver}."
999
999
  raise exceptions.DXError(err_msg.format(apiserver=APISERVER_HOST))
@@ -77,7 +77,7 @@ class Dataset(DXRecord):
77
77
  @property
78
78
  def version(self):
79
79
  return self.detail_describe.get("details").get("version")
80
-
80
+
81
81
  @property
82
82
  def detail_describe(self):
83
83
  if self._detail_describe is None:
@@ -98,6 +98,9 @@ class Dataset(DXRecord):
98
98
  "index": index,
99
99
  "uuid": assays[index]["uuid"],
100
100
  "reference": assays[index].get("reference"),
101
+ "generalized_assay_model_version": assays[index].get(
102
+ "generalized_assay_model_version"
103
+ ),
101
104
  }
102
105
 
103
106
  if model not in assays_info_dict.keys():
@@ -107,6 +110,12 @@ class Dataset(DXRecord):
107
110
 
108
111
  return assays_info_dict
109
112
 
113
+ def assay_info_dict(self, assay_uuid):
114
+ for assay_list in self.assays_info_dict.values():
115
+ for assay in assay_list:
116
+ if assay["uuid"] == assay_uuid:
117
+ return assay
118
+
110
119
  def assay_names_list(self, assay_type):
111
120
  assay_names_list = []
112
121
  if self.assays_info_dict.get(assay_type):
@@ -99,8 +99,10 @@ class JSONValidator(object):
99
99
 
100
100
  # Check for incompatible/conflicting subkeys defined at the key-level
101
101
  if value.get("conflicting_keys"):
102
+ self.check_incompatible_subkeys(input_json, key)
102
103
  self.check_incompatible_subkeys(input_json[key], key)
103
104
 
105
+
104
106
  self.check_incompatible_keys(input_json)
105
107
  self.check_dependent_key_combinations(input_json)
106
108
 
@@ -0,0 +1,213 @@
1
+ EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS_1_0 = {
2
+ "version": "1.0",
3
+ "output_fields_mapping": {
4
+ "default": [
5
+ {"feature_id": "expression$feature_id"},
6
+ {"sample_id": "expression$sample_id"},
7
+ {"expression": "expression$value"},
8
+ ],
9
+ "additional": [
10
+ {"feature_name": "expr_annotation$gene_name"},
11
+ {"chrom": "expr_annotation$chr"},
12
+ {"start": "expr_annotation$start"},
13
+ {"end": "expr_annotation$end"},
14
+ {"strand": "expr_annotation$strand"},
15
+ ],
16
+ },
17
+ "filtering_conditions": {
18
+ "location": {
19
+ "items_combination_operator": "or",
20
+ "filters_combination_operator": "and",
21
+ "max_item_limit": 10,
22
+ "properties": [
23
+ {
24
+ "key": "chromosome",
25
+ "condition": "is",
26
+ "table_column": "expr_annotation$chr",
27
+ },
28
+ {
29
+ "keys": ["starting_position", "ending_position"],
30
+ "condition": "genobin_partial_overlap",
31
+ "max_range": "250",
32
+ "table_column": {
33
+ "starting_position": "expr_annotation$start",
34
+ "ending_position": "expr_annotation$end",
35
+ },
36
+ },
37
+ ],
38
+ },
39
+ "annotation": {
40
+ "properties": {
41
+ "feature_id": {
42
+ "max_item_limit": 100,
43
+ "condition": "in",
44
+ "table_column": "expr_annotation$feature_id",
45
+ },
46
+ "feature_name": {
47
+ "max_item_limit": 100,
48
+ "condition": "in",
49
+ "table_column": "expr_annotation$gene_name",
50
+ },
51
+ }
52
+ },
53
+ "expression": {
54
+ "filters_combination_operator": "and",
55
+ "properties": {
56
+ "min_value": {
57
+ "condition": "greater-than-eq",
58
+ "table_column": "expression$value",
59
+ },
60
+ "max_value": {
61
+ "condition": "less-than-eq",
62
+ "table_column": "expression$value",
63
+ },
64
+ },
65
+ },
66
+ "sample_id": {
67
+ "max_item_limit": 100,
68
+ "condition": "in",
69
+ "table_column": "expression$sample_id",
70
+ },
71
+ },
72
+ "filters_combination_operator": "and",
73
+ "order_by": [
74
+ {"feature_id": "asc"},
75
+ {"sample_id": "asc"},
76
+ ],
77
+ }
78
+
79
+ # EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS_1_1 does not handle optimization of location filters
80
+ EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS_1_1 = {
81
+ "version": "1.1",
82
+ "output_fields_mapping": {
83
+ "default": [
84
+ {"feature_id": "expression_read_optimized$feature_id"},
85
+ {"sample_id": "expression_read_optimized$sample_id"},
86
+ {"expression": "expression_read_optimized$value"},
87
+ ],
88
+ "additional": [
89
+ {"feature_name": "expression_read_optimized$gene_name"},
90
+ {"chrom": "expression_read_optimized$CHROM"},
91
+ {"start": "expression_read_optimized$start"},
92
+ {"end": "expression_read_optimized$end"},
93
+ {"strand": "expression_read_optimized$strand"},
94
+ ],
95
+ },
96
+ "filtering_conditions": {
97
+ "annotation": {
98
+ "properties": {
99
+ "feature_id": {
100
+ "max_item_limit": 100,
101
+ "condition": "in",
102
+ "table_column": "expression_read_optimized$feature_id",
103
+ },
104
+ "feature_name": {
105
+ "max_item_limit": 100,
106
+ "condition": "in",
107
+ "table_column": "expression_read_optimized$gene_name",
108
+ },
109
+ }
110
+ },
111
+ "expression": {
112
+ "filters_combination_operator": "and",
113
+ "properties": {
114
+ "min_value": {
115
+ "condition": "greater-than-eq",
116
+ "table_column": "expression_read_optimized$value",
117
+ },
118
+ "max_value": {
119
+ "condition": "less-than-eq",
120
+ "table_column": "expression_read_optimized$value",
121
+ },
122
+ },
123
+ },
124
+ "sample_id": {
125
+ "max_item_limit": 100,
126
+ "condition": "in",
127
+ "table_column": "expression_read_optimized$sample_id",
128
+ },
129
+ },
130
+ "filters_combination_operator": "and",
131
+ "order_by": [
132
+ {"feature_id": "asc"},
133
+ {"sample_id": "asc"},
134
+ ],
135
+ }
136
+
137
+ EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS_1_1_non_optimized = {
138
+ "version": "1.1",
139
+ "output_fields_mapping": {
140
+ "default": [
141
+ {"feature_id": "expression$feature_id"},
142
+ {"sample_id": "expression$sample_id"},
143
+ {"expression": "expression$value"},
144
+ ],
145
+ "additional": [
146
+ {"feature_name": "expr_annotation$gene_name"},
147
+ {"chrom": "expr_annotation$CHROM"},
148
+ {"start": "expr_annotation$start"},
149
+ {"end": "expr_annotation$end"},
150
+ {"strand": "expr_annotation$strand"},
151
+ ],
152
+ },
153
+ "filtering_conditions": {
154
+ "location": {
155
+ "items_combination_operator": "or",
156
+ "filters_combination_operator": "and",
157
+ "max_item_limit": 10,
158
+ "properties": [
159
+ {
160
+ "key": "chromosome",
161
+ "condition": "is",
162
+ "table_column": "expr_annotation$CHROM",
163
+ },
164
+ {
165
+ "keys": ["starting_position", "ending_position"],
166
+ "condition": "genobin_partial_overlap",
167
+ "max_range": "250",
168
+ "table_column": {
169
+ "starting_position": "expr_annotation$start",
170
+ "ending_position": "expr_annotation$end",
171
+ },
172
+ },
173
+ ],
174
+ },
175
+ "annotation": {
176
+ "properties": {
177
+ "feature_id": {
178
+ "max_item_limit": 100,
179
+ "condition": "in",
180
+ "table_column": "expr_annotation$feature_id",
181
+ },
182
+ "feature_name": {
183
+ "max_item_limit": 100,
184
+ "condition": "in",
185
+ "table_column": "expr_annotation$gene_name",
186
+ },
187
+ }
188
+ },
189
+ "expression": {
190
+ "filters_combination_operator": "and",
191
+ "properties": {
192
+ "min_value": {
193
+ "condition": "greater-than-eq",
194
+ "table_column": "expression$value",
195
+ },
196
+ "max_value": {
197
+ "condition": "less-than-eq",
198
+ "table_column": "expression$value",
199
+ },
200
+ },
201
+ },
202
+ "sample_id": {
203
+ "max_item_limit": 100,
204
+ "condition": "in",
205
+ "table_column": "expression$sample_id",
206
+ },
207
+ },
208
+ "filters_combination_operator": "and",
209
+ "order_by": [
210
+ {"feature_id": "asc"},
211
+ {"sample_id": "asc"},
212
+ ],
213
+ }
@@ -9,7 +9,10 @@ EXTRACT_ASSAY_EXPRESSION_JSON_SCHEMA = {
9
9
  },
10
10
  "expression": {
11
11
  "type": dict,
12
- "properties": {"min_value": {"type": (int, float)}, "max_value": {"type": (int, float)}},
12
+ "properties": {
13
+ "min_value": {"type": (int, float)},
14
+ "max_value": {"type": (int, float)},
15
+ },
13
16
  },
14
17
  "location": {
15
18
  "type": list,
@@ -26,5 +29,8 @@ EXTRACT_ASSAY_EXPRESSION_JSON_SCHEMA = {
26
29
  "type": list,
27
30
  },
28
31
  "conflicting_keys": [["location", "annotation"]],
29
- "dependent_conditional_keys": {"expression": ["annotation", "location"]},
32
+ "dependent_conditional_keys": {
33
+ "expression": ["annotation", "location"],
34
+ "sample_id": ["expression", "annotation", "location"],
35
+ },
30
36
  }
@@ -59,11 +59,12 @@ EXTRACT_ASSAY_EXPRESSION_INPUT_ARGS_SCHEMA = {
59
59
  "filter_json_file",
60
60
  "json_help",
61
61
  "additional_fields_help",
62
+ "sql"
62
63
  ],
63
64
  },
64
65
  "condition": "with_at_least_one_required",
65
66
  "error_message": {
66
- "message": 'The flag "--retrieve_expression" must be followed by "--filter-json", "--filter-json-file", "--json-help", or "--additional-fields-help".'
67
+ "message": 'The flag "--retrieve_expression" must be followed by "--sql", "--filter-json", "--filter-json-file", "--json-help", or "--additional-fields-help".'
67
68
  },
68
69
  },
69
70
  "5_json_help_exclusive_with_exceptions": {
@@ -5,15 +5,16 @@ class JSONFiltersValidator(object):
5
5
  """
6
6
  A specialized class that parses user input JSON according to a schema to prepare vizserver-compliant compound filters.
7
7
 
8
- See assay_filtering_conditions.py for current schemas.
8
+ See assay_filtering_conditions.py for current schemas, which include optimized schema EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS_1_1
9
+ and non-optimized EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS_1_1_non_optimized to handle location filters.
9
10
 
10
11
  Filters must be defined in schema["filtering_conditions"]
11
12
 
12
- There are currently three ways to define filtering_conditions when version is 1.0:
13
+ There are currently three ways to define filtering_conditions when version is 1.1:
13
14
  1. Basic use-case: no "properties" are defined. Only "condition", "table_column" and optionally "max_item_limit" are defined.
14
15
  In this case, there are no sub-keys for the current key in input_json.
15
- (see 'sample_id' in 'assay_filtering_conditions.EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS')
16
- 2. Properties defined as dict of dicts (see 'annotation' and 'expression' in EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS)
16
+ (see 'sample_id' in 'assay_filtering_conditions.EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS_1_1')
17
+ 2. Properties defined as dict of dicts (see 'annotation' and 'expression' in EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS_1_1)
17
18
  2.1. If filters_combination_operator is not defined, the assumption is that there is only one sub-key in input_json
18
19
 
19
20
  3. Complex use-case: Properties defined as list of dicts (more advanced, special use-case with complex conditional logics that needs translation)
@@ -21,7 +22,7 @@ class JSONFiltersValidator(object):
21
22
  In this case, the schema must define "items_combination_operator" and "filters_combination_operator".
22
23
  items_combination_operator: how to combine filters for list items in input_json
23
24
  filters_combination_operator: how to combine filters within each item
24
- (see 'location' in EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS)
25
+ (see 'location' in EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS_1_1_non_optimized)
25
26
 
26
27
  Within 'properties' if "key" is defined, then a generic one-key filter is built.
27
28
  If "keys" is defined, then more complex use-cases are handled via special conditions that are defined in condition_function_mapping.
@@ -66,14 +67,21 @@ class JSONFiltersValidator(object):
66
67
 
67
68
  def parse(self):
68
69
  self.is_valid_json(self.schema)
69
- if self.get_schema_version(self.schema) == "1.0":
70
- return self.parse_v1()
70
+ if self.get_schema_version(self.schema).startswith("1."):
71
+ return self.parse_v1_1()
71
72
  else:
72
73
  raise NotImplementedError
73
74
 
75
+ def parse_v1_1(self):
76
+ """
77
+ Parse input_json according to schema version 1.1 (optimized), and build vizserver compound filters. In this version,
78
+ compound filters are built in one hash.
79
+ """
80
+ return self.merge_duplicate_filters(self.parse_v1())
81
+
74
82
  def parse_v1(self):
75
83
  """
76
- Parse input_json according to schema version 1.0, and build vizserver compound filters.
84
+ Parse input_json according to schema version 1.0 (non-optimized), and build vizserver compound filters.
77
85
  """
78
86
 
79
87
  try:
@@ -85,8 +93,8 @@ class JSONFiltersValidator(object):
85
93
 
86
94
  # Go through the input_json (iterate through keys and values in user input JSON)
87
95
  for filter_key, filter_values in self.input_json.items():
88
- # Example: if schema is EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS
89
- # Then input JSON would probably contain location or annotation
96
+ # Example: if schema is EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS_1_1_non_optimized
97
+ # Then input JSON would contain location and maybe others such as annotation
90
98
  # In this case:
91
99
  # filter_key -> location
92
100
  # filter_values -> list of dicts (where each dict contains "chromosome", "starting_position", "ending_position")
@@ -111,7 +119,7 @@ class JSONFiltersValidator(object):
111
119
  self.validate_max_item_limit(current_filters, filter_values, filter_key)
112
120
 
113
121
  # There are several ways filtering_conditions can be defined
114
- # 1. Basic use-case: no properties, just condition (see 'sample_id' in 'EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS')
122
+ # 1. Basic use-case: no properties, just condition (see 'sample_id' in 'EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS_1_1')
115
123
  # 2. Properties defined as dict of dicts (see 'annotation' and 'expression')
116
124
  # 3. Properties defined as list of dicts (more advanced, special use-case with complex conditional logics that needs translation)
117
125
  # For more information see the docstring of the class
@@ -146,7 +154,7 @@ class JSONFiltersValidator(object):
146
154
  # no properties, so just apply conditions
147
155
  # In other words .get("properties") returns None
148
156
  # Therefore we are dealing with a basic use-case scenario
149
- # (See 'sample_id' in 'EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS' for an example)
157
+ # (See 'sample_id' in 'EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS_1_1' for an example)
150
158
  filters = self.build_one_key_generic_filter(
151
159
  current_filters.get("table_column"),
152
160
  current_filters.get("condition"),
@@ -262,7 +270,7 @@ class JSONFiltersValidator(object):
262
270
 
263
271
  # check if there are two filtering conditions that need to be applied on the same table_column
264
272
  # check if both of those are defined in input_json
265
- # an example of such a case is 'expression' in EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS
273
+ # an example of such a case is 'expression' in EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS_1_1
266
274
  # where we might have a `max_value` and a `min_value`
267
275
  # however providing both is not mandatory
268
276
 
@@ -311,7 +319,7 @@ class JSONFiltersValidator(object):
311
319
 
312
320
  else:
313
321
  # There's no filtering logic, in other words, filters_combination_operator is not defined at this level
314
- # (see 'annotation' in 'EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS' for an example)
322
+ # (see 'annotation' in 'EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS_1_1' for an example)
315
323
  if len(current_properties) > 1:
316
324
  if len(filter_values) > 1:
317
325
  # if there are also more than 1 in input_json
@@ -521,3 +529,74 @@ class JSONFiltersValidator(object):
521
529
  # }
522
530
 
523
531
  return filter_structure
532
+
533
+ def merge_duplicate_filters(self, compound_block):
534
+ """
535
+ Recursively walk the compound tree and merge duplicate 'filters' blocks
536
+ at the same compound level.
537
+
538
+ Example usage:
539
+ # For
540
+ compound_block =
541
+ {
542
+ "logic": "and",
543
+ "compound": [
544
+ {
545
+ "filters": {
546
+ "expression_read_optimized$value": [
547
+ {"condition": "greater-than-eq", "values": 2000}
548
+ ]
549
+ }
550
+ },
551
+ {
552
+ "filters": {
553
+ "expression_read_optimized$gene_name": [
554
+ {"condition": "in", "values": ["FAM229B", "TRAIP", "PLEC"]}
555
+ ]
556
+ }
557
+ },
558
+ ],
559
+ }
560
+ merged_block = merge_duplicate_filters(compound_block)
561
+
562
+ # merged_block will be:
563
+ {
564
+ "logic": "and",
565
+ "compound": [
566
+ {
567
+ "filters": {
568
+ "expression_read_optimized$value": [
569
+ {"condition": "greater-than-eq", "values": 2000}
570
+ ],
571
+ "expression_read_optimized$gene_name": [
572
+ {"condition": "in", "values": ["FAM229B", "TRAIP", "PLEC"]}
573
+ ],
574
+ }
575
+ }
576
+ ],
577
+ }
578
+ """
579
+ if "compound" in compound_block:
580
+ new_compound = []
581
+ merged_filters = {}
582
+
583
+ for item in compound_block["compound"]:
584
+ self.merge_duplicate_filters(item)
585
+
586
+ if "filters" in item and len(item) == 1:
587
+ # Merge filters into the accumulating dict
588
+ for key, conditions in item["filters"].items():
589
+ merged_filters[key] = []
590
+ merged_filters[key].extend(conditions)
591
+ else:
592
+ if merged_filters:
593
+ new_compound.append({"filters": merged_filters})
594
+ merged_filters = {}
595
+ new_compound.append(item)
596
+
597
+ if merged_filters:
598
+ new_compound.append({"filters": merged_filters})
599
+
600
+ compound_block["compound"] = new_compound
601
+
602
+ return compound_block
@@ -71,7 +71,7 @@ from ..bindings.apollo.json_validation_by_schema import JSONValidator
71
71
 
72
72
  from ..bindings.apollo.schemas.input_arguments_validation_schemas import EXTRACT_ASSAY_EXPRESSION_INPUT_ARGS_SCHEMA
73
73
  from ..bindings.apollo.schemas.assay_filtering_json_schemas import EXTRACT_ASSAY_EXPRESSION_JSON_SCHEMA
74
- from ..bindings.apollo.schemas.assay_filtering_conditions import EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS
74
+ from ..bindings.apollo.schemas.assay_filtering_conditions import EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS_1_0, EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS_1_1, EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS_1_1_non_optimized
75
75
 
76
76
  from ..bindings.apollo.vizserver_filters_from_json_parser import JSONFiltersValidator
77
77
  from ..bindings.apollo.vizserver_payload_builder import VizPayloadBuilder
@@ -178,8 +178,6 @@ def raw_api_call(resp, payload, sql_message=True):
178
178
  err_message = "Insufficient permissions due to the project policy.\n" + resp_raw["error"]["message"]
179
179
  elif sql_message and resp_raw["error"]["type"] == "QueryTimeOut":
180
180
  err_message = "Please consider using `--sql` option to generate the SQL query and query via a private compute cluster.\n" + resp_raw["error"]["message"]
181
- elif resp_raw["error"]["type"] == "QueryBuilderError" and resp_raw["error"]["details"] == "rsid exists in request filters without rsid entries in rsid_lookup_table.":
182
- err_message = "At least one rsID provided in the filter is not present in the provided dataset or cohort"
183
181
  elif resp_raw["error"]["type"] == "DxApiError":
184
182
  err_message = resp_raw["error"]["message"]
185
183
  else:
@@ -1432,7 +1430,14 @@ def extract_assay_expression(args):
1432
1430
  except Exception as e:
1433
1431
  err_exit(str(e))
1434
1432
 
1435
- if user_filters_json == {}:
1433
+ # If --sql flag is provided, filter JSON is optional. If not present, all data must be returned
1434
+ if args.sql and not args.filter_json and not args.filter_json_file:
1435
+ return_all_data = True
1436
+ user_filters_json = {}
1437
+ else:
1438
+ return_all_data = False
1439
+
1440
+ if not user_filters_json and not return_all_data:
1436
1441
  err_exit(
1437
1442
  "No filter JSON is passed with --retrieve-expression or input JSON for --retrieve-expression does not contain valid filter information."
1438
1443
  )
@@ -1460,18 +1465,51 @@ def extract_assay_expression(args):
1460
1465
  "ending_position"
1461
1466
  ]["type"] = unicode
1462
1467
 
1463
- # Validate filters JSON provided by the user according to a predefined schema
1464
- input_json_validator = JSONValidator(
1465
- schema=EXTRACT_ASSAY_EXPRESSION_JSON_SCHEMA, error_handler=err_exit
1466
- )
1467
- input_json_validator.validate(input_json=user_filters_json)
1468
+ # In case --sql flag is provided but no input json, function should return all data
1469
+ if user_filters_json:
1470
+ # Validate filters JSON provided by the user according to a predefined schema
1471
+ input_json_validator = JSONValidator(
1472
+ schema=EXTRACT_ASSAY_EXPRESSION_JSON_SCHEMA, error_handler=err_exit
1473
+ )
1474
+ input_json_validator.validate(input_json=user_filters_json)
1475
+
1476
+
1477
+ if args.assay_name:
1478
+ # Assumption: assay names are unique in a dataset descriptor
1479
+ # i.e. there are never two assays of the same type with the same name in the same dataset
1480
+ for molecular_assay in dataset.assays_info_dict["molecular_expression"]:
1481
+ if molecular_assay["name"] == args.assay_name:
1482
+ ASSAY_ID = molecular_assay["uuid"]
1483
+ break
1484
+ else:
1485
+ ASSAY_ID = dataset.assays_info_dict["molecular_expression"][0]["uuid"]
1486
+
1487
+ # Getting generalized_assay_model_version to match filter schema
1488
+ conditions_mapping = {
1489
+ "1.0": EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS_1_0,
1490
+ "1.1": EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS_1_1,
1491
+ }
1492
+ generalized_assay_model_version = dataset.assay_info_dict(ASSAY_ID).get("generalized_assay_model_version")
1493
+ filter_schema = conditions_mapping.get(generalized_assay_model_version)
1494
+
1495
+ # Genomic range limits must be applied. However, when using --sql limits may be ignored.
1496
+ if generalized_assay_model_version == "1.1":
1497
+ if "location" in user_filters_json:
1498
+ filter_schema = EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS_1_1_non_optimized
1499
+ else:
1500
+ filter_schema = EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS_1_1
1501
+
1502
+ # When location filter is used and version is 1.1, or all data must be returned queries should not use optimized table
1503
+ # Genomic range limits must be applied. However, when using --sql limits may be ignored.
1504
+ if "location" in user_filters_json or return_all_data:
1468
1505
 
1469
- if "location" in user_filters_json:
1506
+ if generalized_assay_model_version == "1.1":
1507
+ filter_schema = EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS_1_1_non_optimized
1508
+
1470
1509
  if args.sql:
1471
- EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS["filtering_conditions"][
1510
+ filter_schema["filtering_conditions"][
1472
1511
  "location"
1473
1512
  ]["max_item_limit"] = None
1474
-
1475
1513
  else:
1476
1514
  # Genomic range adding together across multiple contigs should be smaller than 250 Mbps
1477
1515
  input_json_validator.are_list_items_within_range(
@@ -1482,18 +1520,18 @@ def extract_assay_expression(args):
1482
1520
  window_width=250000000,
1483
1521
  check_each_separately=False,
1484
1522
  )
1485
-
1486
1523
  input_json_parser = JSONFiltersValidator(
1487
1524
  input_json=user_filters_json,
1488
- schema=EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS,
1525
+ schema=filter_schema,
1489
1526
  error_handler=err_exit,
1490
1527
  )
1491
1528
  vizserver_raw_filters = input_json_parser.parse()
1492
1529
 
1493
- _db_columns_list = EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS[
1530
+ _db_columns_list = filter_schema[
1494
1531
  "output_fields_mapping"
1495
1532
  ].get("default")
1496
1533
 
1534
+
1497
1535
  if args.additional_fields:
1498
1536
  # All three of the following should work:
1499
1537
  # --additional_fields field1 field2
@@ -1507,9 +1545,10 @@ def extract_assay_expression(args):
1507
1545
  field = [x.strip() for x in item.split(",") if x.strip()]
1508
1546
  additional_fields.extend(field)
1509
1547
 
1510
- all_additional_cols = EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS[
1548
+ all_additional_cols = filter_schema[
1511
1549
  "output_fields_mapping"
1512
1550
  ].get("additional")
1551
+
1513
1552
  incorrect_cols = set(additional_fields) - set(
1514
1553
  {k for d in all_additional_cols for k in d.keys()}
1515
1554
  )
@@ -1526,28 +1565,19 @@ def extract_assay_expression(args):
1526
1565
  project_context=project,
1527
1566
  output_fields_mapping=_db_columns_list,
1528
1567
  filters={"filters": COHORT_FILTERS} if IS_COHORT else None,
1529
- order_by=EXTRACT_ASSAY_EXPRESSION_FILTERING_CONDITIONS["order_by"],
1568
+ order_by=filter_schema["order_by"],
1530
1569
  limit=None,
1531
1570
  base_sql=BASE_SQL,
1532
1571
  is_cohort=IS_COHORT,
1533
1572
  error_handler=err_exit,
1534
1573
  )
1535
1574
 
1575
+
1536
1576
  DATASET_DESCRIPTOR = dataset.descriptor_file_dict
1537
1577
  ASSAY_NAME = (
1538
1578
  args.assay_name if args.assay_name else dataset.assay_names_list("molecular_expression")[0]
1539
1579
  )
1540
1580
 
1541
- if args.assay_name:
1542
- # Assumption: assay names are unique in a dataset descriptor
1543
- # i.e. there are never two assays of the same type with the same name in the same dataset
1544
- for molecular_assay in dataset.assays_info_dict["molecular_expression"]:
1545
- if molecular_assay["name"] == args.assay_name:
1546
- ASSAY_ID = molecular_assay["uuid"]
1547
- break
1548
- else:
1549
- ASSAY_ID = dataset.assays_info_dict["molecular_expression"][0]["uuid"]
1550
-
1551
1581
  viz.assemble_assay_raw_filters(
1552
1582
  assay_name=ASSAY_NAME, assay_id=ASSAY_ID, filters=vizserver_raw_filters
1553
1583
  )