datacontract-cli 0.10.27__tar.gz → 0.10.28__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datacontract-cli might be problematic. Click here for more details.

Files changed (212) hide show
  1. {datacontract_cli-0.10.27/datacontract_cli.egg-info → datacontract_cli-0.10.28}/PKG-INFO +81 -35
  2. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/README.md +76 -32
  3. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/cli.py +10 -3
  4. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/data_contract.py +122 -29
  5. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/engines/data_contract_checks.py +2 -0
  6. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/export/html_exporter.py +28 -23
  7. datacontract_cli-0.10.28/datacontract/export/mermaid_exporter.py +97 -0
  8. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/export/odcs_v3_exporter.py +7 -9
  9. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/imports/excel_importer.py +5 -2
  10. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/imports/importer.py +10 -1
  11. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/imports/odcs_importer.py +2 -2
  12. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/imports/odcs_v3_importer.py +9 -9
  13. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/imports/spark_importer.py +38 -16
  14. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/imports/sql_importer.py +4 -2
  15. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/imports/unity_importer.py +77 -37
  16. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/integration/datamesh_manager.py +16 -2
  17. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/lint/resolve.py +60 -6
  18. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/templates/datacontract.html +4 -0
  19. datacontract_cli-0.10.28/datacontract/templates/datacontract_odcs.html +666 -0
  20. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/templates/index.html +2 -0
  21. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/templates/partials/server.html +2 -0
  22. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/templates/style/output.css +319 -145
  23. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28/datacontract_cli.egg-info}/PKG-INFO +81 -35
  24. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract_cli.egg-info/SOURCES.txt +1 -0
  25. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract_cli.egg-info/requires.txt +4 -2
  26. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/pyproject.toml +6 -4
  27. datacontract_cli-0.10.28/tests/test_import_spark.py +165 -0
  28. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_import_unity_file.py +5 -2
  29. datacontract_cli-0.10.27/datacontract/export/mermaid_exporter.py +0 -32
  30. datacontract_cli-0.10.27/tests/test_import_spark.py +0 -240
  31. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/LICENSE +0 -0
  32. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/MANIFEST.in +0 -0
  33. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/__init__.py +0 -0
  34. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/api.py +0 -0
  35. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/breaking/breaking.py +0 -0
  36. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/breaking/breaking_change.py +0 -0
  37. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/breaking/breaking_rules.py +0 -0
  38. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/catalog/catalog.py +0 -0
  39. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/engines/__init__.py +0 -0
  40. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/engines/data_contract_test.py +0 -0
  41. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/engines/datacontract/check_that_datacontract_contains_valid_servers_configuration.py +0 -0
  42. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/engines/datacontract/check_that_datacontract_file_exists.py +0 -0
  43. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/engines/fastjsonschema/check_jsonschema.py +0 -0
  44. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/engines/fastjsonschema/s3/s3_read_files.py +0 -0
  45. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/engines/soda/__init__.py +0 -0
  46. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/engines/soda/check_soda_execute.py +0 -0
  47. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/engines/soda/connections/bigquery.py +0 -0
  48. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/engines/soda/connections/databricks.py +0 -0
  49. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/engines/soda/connections/duckdb_connection.py +0 -0
  50. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/engines/soda/connections/kafka.py +0 -0
  51. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/engines/soda/connections/postgres.py +0 -0
  52. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/engines/soda/connections/snowflake.py +0 -0
  53. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/engines/soda/connections/sqlserver.py +0 -0
  54. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/engines/soda/connections/trino.py +0 -0
  55. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/export/__init__.py +0 -0
  56. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/export/avro_converter.py +0 -0
  57. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/export/avro_idl_converter.py +0 -0
  58. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/export/bigquery_converter.py +0 -0
  59. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/export/custom_converter.py +0 -0
  60. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/export/data_caterer_converter.py +0 -0
  61. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/export/dbml_converter.py +0 -0
  62. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/export/dbt_converter.py +0 -0
  63. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/export/dcs_exporter.py +0 -0
  64. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/export/duckdb_type_converter.py +0 -0
  65. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/export/exporter.py +0 -0
  66. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/export/exporter_factory.py +0 -0
  67. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/export/go_converter.py +0 -0
  68. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/export/great_expectations_converter.py +0 -0
  69. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/export/iceberg_converter.py +0 -0
  70. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/export/jsonschema_converter.py +0 -0
  71. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/export/markdown_converter.py +0 -0
  72. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/export/pandas_type_converter.py +0 -0
  73. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/export/protobuf_converter.py +0 -0
  74. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/export/pydantic_converter.py +0 -0
  75. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/export/rdf_converter.py +0 -0
  76. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/export/sodacl_converter.py +0 -0
  77. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/export/spark_converter.py +0 -0
  78. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/export/sql_converter.py +0 -0
  79. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/export/sql_type_converter.py +0 -0
  80. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/export/sqlalchemy_converter.py +0 -0
  81. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/export/terraform_converter.py +0 -0
  82. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/imports/avro_importer.py +0 -0
  83. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/imports/bigquery_importer.py +0 -0
  84. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/imports/csv_importer.py +0 -0
  85. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/imports/dbml_importer.py +0 -0
  86. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/imports/dbt_importer.py +0 -0
  87. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/imports/glue_importer.py +0 -0
  88. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/imports/iceberg_importer.py +0 -0
  89. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/imports/importer_factory.py +0 -0
  90. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/imports/jsonschema_importer.py +0 -0
  91. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/imports/parquet_importer.py +0 -0
  92. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/imports/protobuf_importer.py +0 -0
  93. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/init/init_template.py +0 -0
  94. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/lint/files.py +0 -0
  95. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/lint/lint.py +0 -0
  96. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/lint/linters/__init__.py +0 -0
  97. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/lint/linters/description_linter.py +0 -0
  98. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/lint/linters/field_pattern_linter.py +0 -0
  99. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/lint/linters/field_reference_linter.py +0 -0
  100. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/lint/linters/notice_period_linter.py +0 -0
  101. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/lint/linters/valid_constraints_linter.py +0 -0
  102. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/lint/resources.py +0 -0
  103. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/lint/schema.py +0 -0
  104. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/lint/urls.py +0 -0
  105. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/model/data_contract_specification/__init__.py +0 -0
  106. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/model/exceptions.py +0 -0
  107. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/model/odcs.py +0 -0
  108. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/model/run.py +0 -0
  109. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/output/__init__.py +0 -0
  110. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/output/junit_test_results.py +0 -0
  111. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/output/output_format.py +0 -0
  112. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/output/test_results_writer.py +0 -0
  113. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/py.typed +0 -0
  114. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/schemas/datacontract-1.1.0.init.yaml +0 -0
  115. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/schemas/datacontract-1.1.0.schema.json +0 -0
  116. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/schemas/odcs-3.0.1.schema.json +0 -0
  117. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/templates/partials/datacontract_information.html +0 -0
  118. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/templates/partials/datacontract_servicelevels.html +0 -0
  119. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/templates/partials/datacontract_terms.html +0 -0
  120. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/templates/partials/definition.html +0 -0
  121. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/templates/partials/example.html +0 -0
  122. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/templates/partials/model_field.html +0 -0
  123. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract/templates/partials/quality.html +0 -0
  124. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract_cli.egg-info/dependency_links.txt +0 -0
  125. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract_cli.egg-info/entry_points.txt +0 -0
  126. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/datacontract_cli.egg-info/top_level.txt +0 -0
  127. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/setup.cfg +0 -0
  128. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_api.py +0 -0
  129. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_breaking.py +0 -0
  130. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_catalog.py +0 -0
  131. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_changelog.py +0 -0
  132. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_cli.py +0 -0
  133. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_data_contract_checks.py +0 -0
  134. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_data_contract_specification.py +0 -0
  135. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_description_linter.py +0 -0
  136. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_documentation_linter.py +0 -0
  137. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_download_datacontract_file.py +0 -0
  138. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_duckdb_json.py +0 -0
  139. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_export_avro.py +0 -0
  140. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_export_avro_idl.py +0 -0
  141. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_export_bigquery.py +0 -0
  142. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_export_complex_data_contract.py +0 -0
  143. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_export_custom.py +0 -0
  144. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_export_custom_exporter.py +0 -0
  145. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_export_data_caterer.py +0 -0
  146. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_export_dbml.py +0 -0
  147. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_export_dbt_models.py +0 -0
  148. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_export_dbt_sources.py +0 -0
  149. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_export_dbt_staging_sql.py +0 -0
  150. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_export_go.py +0 -0
  151. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_export_great_expectations.py +0 -0
  152. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_export_html.py +0 -0
  153. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_export_iceberg.py +0 -0
  154. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_export_jsonschema.py +0 -0
  155. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_export_markdown.py +0 -0
  156. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_export_mermaid.py +0 -0
  157. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_export_odcs_v3.py +0 -0
  158. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_export_protobuf.py +0 -0
  159. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_export_pydantic.py +0 -0
  160. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_export_rdf.py +0 -0
  161. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_export_sodacl.py +0 -0
  162. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_export_spark.py +0 -0
  163. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_export_sql.py +0 -0
  164. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_export_sql_query.py +0 -0
  165. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_export_sqlalchemy.py +0 -0
  166. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_export_terraform.py +0 -0
  167. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_field_constraint_linter.py +0 -0
  168. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_field_pattern_linter.py +0 -0
  169. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_field_reference_linter.py +0 -0
  170. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_import_avro.py +0 -0
  171. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_import_bigquery.py +0 -0
  172. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_import_csv.py +0 -0
  173. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_import_dbml.py +0 -0
  174. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_import_dbt.py +0 -0
  175. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_import_excel.py +0 -0
  176. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_import_glue.py +0 -0
  177. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_import_iceberg.py +0 -0
  178. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_import_jsonschema.py +0 -0
  179. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_import_odcs_v3.py +0 -0
  180. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_import_parquet.py +0 -0
  181. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_import_protobuf.py +0 -0
  182. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_import_sql_postgres.py +0 -0
  183. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_import_sql_sqlserver.py +0 -0
  184. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_integration_datameshmanager.py +0 -0
  185. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_lint.py +0 -0
  186. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_notice_period_linter.py +0 -0
  187. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_resolve.py +0 -0
  188. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_roundtrip_jsonschema.py +0 -0
  189. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_spec_fields_field.py +0 -0
  190. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_spec_ref.py +0 -0
  191. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_test_azure_remote.py +0 -0
  192. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_test_bigquery.py +0 -0
  193. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_test_databricks.py +0 -0
  194. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_test_dataframe.py +0 -0
  195. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_test_delta.py +0 -0
  196. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_test_gcs_json_remote.py +0 -0
  197. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_test_kafka.py +0 -0
  198. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_test_kafka_remote.py +0 -0
  199. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_test_local_json.py +0 -0
  200. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_test_output_junit.py +0 -0
  201. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_test_parquet.py +0 -0
  202. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_test_postgres.py +0 -0
  203. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_test_quality.py +0 -0
  204. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_test_s3_csv.py +0 -0
  205. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_test_s3_delta.py +0 -0
  206. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_test_s3_json.py +0 -0
  207. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_test_s3_json_complex.py +0 -0
  208. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_test_s3_json_multiple_models.py +0 -0
  209. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_test_s3_json_remote.py +0 -0
  210. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_test_snowflake.py +0 -0
  211. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_test_sqlserver.py +0 -0
  212. {datacontract_cli-0.10.27 → datacontract_cli-0.10.28}/tests/test_test_trino.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datacontract-cli
3
- Version: 0.10.27
3
+ Version: 0.10.28
4
4
  Summary: The datacontract CLI is an open source command-line tool for working with Data Contracts. It uses data contract YAML files to lint the data contract, connect to data sources and execute schema and quality tests, detect breaking changes, and export to different formats. The tool is written in Python. It can be used as a standalone CLI tool, in a CI/CD pipeline, or directly as a Python library.
5
5
  Author-email: Jochen Christ <jochen.christ@innoq.com>, Stefan Negele <stefan.negele@innoq.com>, Simon Harrer <simon.harrer@innoq.com>
6
6
  License-Expression: MIT
@@ -42,12 +42,14 @@ Provides-Extra: databricks
42
42
  Requires-Dist: soda-core-spark-df<3.6.0,>=3.3.20; extra == "databricks"
43
43
  Requires-Dist: soda-core-spark[databricks]<3.6.0,>=3.3.20; extra == "databricks"
44
44
  Requires-Dist: databricks-sql-connector<4.1.0,>=3.7.0; extra == "databricks"
45
- Requires-Dist: databricks-sdk<0.51.0; extra == "databricks"
45
+ Requires-Dist: databricks-sdk<0.55.0; extra == "databricks"
46
+ Requires-Dist: pyspark==3.5.5; extra == "databricks"
46
47
  Provides-Extra: iceberg
47
48
  Requires-Dist: pyiceberg==0.8.1; extra == "iceberg"
48
49
  Provides-Extra: kafka
49
50
  Requires-Dist: datacontract-cli[avro]; extra == "kafka"
50
51
  Requires-Dist: soda-core-spark-df<3.6.0,>=3.3.20; extra == "kafka"
52
+ Requires-Dist: pyspark==3.5.5; extra == "kafka"
51
53
  Provides-Extra: postgres
52
54
  Requires-Dist: soda-core-postgres<3.6.0,>=3.3.20; extra == "postgres"
53
55
  Provides-Extra: s3
@@ -79,7 +81,7 @@ Provides-Extra: dev
79
81
  Requires-Dist: datacontract-cli[all]; extra == "dev"
80
82
  Requires-Dist: httpx==0.28.1; extra == "dev"
81
83
  Requires-Dist: kafka-python; extra == "dev"
82
- Requires-Dist: moto==5.1.4; extra == "dev"
84
+ Requires-Dist: moto==5.1.5; extra == "dev"
83
85
  Requires-Dist: pandas>=2.1.0; extra == "dev"
84
86
  Requires-Dist: pre-commit<4.3.0,>=3.7.1; extra == "dev"
85
87
  Requires-Dist: pytest; extra == "dev"
@@ -399,30 +401,38 @@ Commands
399
401
  │ [default: datacontract.yaml] │
400
402
  ╰──────────────────────────────────────────────────────────────────────────────────────────────────╯
401
403
  ╭─ Options ────────────────────────────────────────────────────────────────────────────────────────╮
402
- │ --schema TEXT The location (url or path) of the Data
403
- Contract Specification JSON Schema
404
- [default: None]
405
- --server TEXT The server configuration to run the
406
- schema and quality tests. Use the key of
407
- the server object in the data contract
408
- yaml file to refer to a server, e.g.,
409
- `production`, or `all` for all servers
410
- (default).
411
- [default: all]
412
- --publish TEXT The url to publish the results after the
413
- test
414
- [default: None]
415
- --output PATH Specify the file path where the test
416
- results should be written to (e.g.,
417
- './test-results/TEST-datacontract.xml').
418
- [default: None]
419
- --output-format [junit] The target format for the test results.
420
- [default: None]
421
- │ --logs --no-logs Print logs [default: no-logs]
422
- --ssl-verification --no-ssl-verification SSL verification when publishing the
423
- data contract.
424
- [default: ssl-verification]
425
- --help Show this message and exit.
404
+ │ --schema TEXT The location (url or path) of
405
+ the Data Contract Specification
406
+ JSON Schema
407
+ [default: None]
408
+ --server TEXT The server configuration to run
409
+ the schema and quality tests.
410
+ Use the key of the server object
411
+ in the data contract yaml file
412
+ to refer to a server, e.g.,
413
+ `production`, or `all` for all
414
+ servers (default).
415
+ [default: all]
416
+ --publish-test-results --no-publish-test-results Publish the results after the
417
+ test
418
+ [default:
419
+ no-publish-test-results]
420
+ --publish TEXT DEPRECATED. The url to publish
421
+ the results after the test.
422
+ [default: None]
423
+ │ --output PATH Specify the file path where the
424
+ test results should be written
425
+ to (e.g.,
426
+ './test-results/TEST-datacontra…
427
+ [default: None]
428
+ │ --output-format [junit] The target format for the test │
429
+ │ results. │
430
+ │ [default: None] │
431
+ │ --logs --no-logs Print logs [default: no-logs] │
432
+ │ --ssl-verification --no-ssl-verification SSL verification when publishing │
433
+ │ the data contract. │
434
+ │ [default: ssl-verification] │
435
+ │ --help Show this message and exit. │
426
436
  ╰──────────────────────────────────────────────────────────────────────────────────────────────────╯
427
437
 
428
438
  ```
@@ -996,6 +1006,21 @@ Available export options:
996
1006
  | `custom` | Export to Custom format with Jinja | ✅ |
997
1007
  | Missing something? | Please create an issue on GitHub | TBD |
998
1008
 
1009
+ #### SQL
1010
+
1011
+ The `export` function converts a given data contract into a SQL data definition language (DDL).
1012
+
1013
+ ```shell
1014
+ datacontract export datacontract.yaml --format sql --output output.sql
1015
+ ```
1016
+
1017
+ If using Databricks, and an error is thrown when trying to deploy the SQL DDLs with `variant` columns set the following properties.
1018
+
1019
+ ```shell
1020
+ spark.conf.set(“spark.databricks.delta.schema.typeCheck.enabled”, “false”)
1021
+ from datacontract.model import data_contract_specification
1022
+ data_contract_specification.DATACONTRACT_TYPES.append(“variant”)
1023
+ ```
999
1024
 
1000
1025
  #### Great Expectations
1001
1026
 
@@ -1003,7 +1028,7 @@ The `export` function transforms a specified data contract into a comprehensive
1003
1028
  If the contract includes multiple models, you need to specify the names of the model you wish to export.
1004
1029
 
1005
1030
  ```shell
1006
- datacontract export datacontract.yaml --format great-expectations --model orders
1031
+ datacontract export datacontract.yaml --format great-expectations --model orders
1007
1032
  ```
1008
1033
 
1009
1034
  The export creates a list of expectations by utilizing:
@@ -1028,7 +1053,7 @@ To further customize the export, the following optional arguments are available:
1028
1053
 
1029
1054
  #### RDF
1030
1055
 
1031
- The export function converts a given data contract into a RDF representation. You have the option to
1056
+ The `export` function converts a given data contract into a RDF representation. You have the option to
1032
1057
  add a base_url which will be used as the default prefix to resolve relative IRIs inside the document.
1033
1058
 
1034
1059
  ```shell
@@ -1273,6 +1298,10 @@ FROM
1273
1298
  │ --source TEXT The path to the file that │
1274
1299
  │ should be imported. │
1275
1300
  │ [default: None] │
1301
+ │ --spec [datacontract_specification|od The format of the data │
1302
+ │ cs] contract to import. │
1303
+ │ [default: │
1304
+ │ datacontract_specification] │
1276
1305
  │ --dialect TEXT The SQL dialect to use when │
1277
1306
  │ importing SQL files, e.g., │
1278
1307
  │ postgres, tsql, bigquery. │
@@ -1361,11 +1390,11 @@ Available import options:
1361
1390
  | `jsonschema` | Import from JSON Schemas | ✅ |
1362
1391
  | `odcs` | Import from Open Data Contract Standard (ODCS) | ✅ |
1363
1392
  | `parquet` | Import from Parquet File Metadata | ✅ |
1364
- | `protobuf` | Import from Protobuf schemas | ✅ |
1365
- | `spark` | Import from Spark StructTypes | ✅ |
1393
+ | `protobuf` | Import from Protobuf schemas | ✅ |
1394
+ | `spark` | Import from Spark StructTypes, Variant | ✅ |
1366
1395
  | `sql` | Import from SQL DDL | ✅ |
1367
1396
  | `unity` | Import from Databricks Unity Catalog | partial |
1368
- | Missing something? | Please create an issue on GitHub | TBD |
1397
+ | Missing something? | Please create an issue on GitHub | TBD |
1369
1398
 
1370
1399
 
1371
1400
  #### ODCS
@@ -1467,14 +1496,31 @@ datacontract import --format glue --source <database_name>
1467
1496
 
1468
1497
  #### Spark
1469
1498
 
1470
- Importing from Spark table or view these must be created or accessible in the Spark context. Specify tables list in `source` parameter.
1471
-
1472
- Example:
1499
+ Importing from Spark table or view these must be created or accessible in the Spark context. Specify tables list in `source` parameter. If the `source` tables are registered as tables in Databricks, and they have a table-level descriptions they will also be added to the Data Contract Specification.
1473
1500
 
1474
1501
  ```bash
1502
+ # Example: Import Spark table(s) from Spark context
1475
1503
  datacontract import --format spark --source "users,orders"
1476
1504
  ```
1477
1505
 
1506
+ ```bash
1507
+ # Example: Import Spark table
1508
+ DataContract().import_from_source("spark", "users")
1509
+ DataContract().import_from_source(format = "spark", source = "users")
1510
+
1511
+ # Example: Import Spark dataframe
1512
+ DataContract().import_from_source("spark", "users", dataframe = df_user)
1513
+ DataContract().import_from_source(format = "spark", source = "users", dataframe = df_user)
1514
+
1515
+ # Example: Import Spark table + table description
1516
+ DataContract().import_from_source("spark", "users", description = "description")
1517
+ DataContract().import_from_source(format = "spark", source = "users", description = "description")
1518
+
1519
+ # Example: Import Spark dataframe + table description
1520
+ DataContract().import_from_source("spark", "users", dataframe = df_user, description = "description")
1521
+ DataContract().import_from_source(format = "spark", source = "users", dataframe = df_user, description = "description")
1522
+ ```
1523
+
1478
1524
  #### DBML
1479
1525
 
1480
1526
  Importing from DBML Documents.
@@ -307,30 +307,38 @@ Commands
307
307
  │ [default: datacontract.yaml] │
308
308
  ╰──────────────────────────────────────────────────────────────────────────────────────────────────╯
309
309
  ╭─ Options ────────────────────────────────────────────────────────────────────────────────────────╮
310
- │ --schema TEXT The location (url or path) of the Data
311
- Contract Specification JSON Schema
312
- [default: None]
313
- --server TEXT The server configuration to run the
314
- schema and quality tests. Use the key of
315
- the server object in the data contract
316
- yaml file to refer to a server, e.g.,
317
- `production`, or `all` for all servers
318
- (default).
319
- [default: all]
320
- --publish TEXT The url to publish the results after the
321
- test
322
- [default: None]
323
- --output PATH Specify the file path where the test
324
- results should be written to (e.g.,
325
- './test-results/TEST-datacontract.xml').
326
- [default: None]
327
- --output-format [junit] The target format for the test results.
328
- [default: None]
329
- │ --logs --no-logs Print logs [default: no-logs]
330
- --ssl-verification --no-ssl-verification SSL verification when publishing the
331
- data contract.
332
- [default: ssl-verification]
333
- --help Show this message and exit.
310
+ │ --schema TEXT The location (url or path) of
311
+ the Data Contract Specification
312
+ JSON Schema
313
+ [default: None]
314
+ --server TEXT The server configuration to run
315
+ the schema and quality tests.
316
+ Use the key of the server object
317
+ in the data contract yaml file
318
+ to refer to a server, e.g.,
319
+ `production`, or `all` for all
320
+ servers (default).
321
+ [default: all]
322
+ --publish-test-results --no-publish-test-results Publish the results after the
323
+ test
324
+ [default:
325
+ no-publish-test-results]
326
+ --publish TEXT DEPRECATED. The url to publish
327
+ the results after the test.
328
+ [default: None]
329
+ │ --output PATH Specify the file path where the
330
+ test results should be written
331
+ to (e.g.,
332
+ './test-results/TEST-datacontra…
333
+ [default: None]
334
+ │ --output-format [junit] The target format for the test │
335
+ │ results. │
336
+ │ [default: None] │
337
+ │ --logs --no-logs Print logs [default: no-logs] │
338
+ │ --ssl-verification --no-ssl-verification SSL verification when publishing │
339
+ │ the data contract. │
340
+ │ [default: ssl-verification] │
341
+ │ --help Show this message and exit. │
334
342
  ╰──────────────────────────────────────────────────────────────────────────────────────────────────╯
335
343
 
336
344
  ```
@@ -904,6 +912,21 @@ Available export options:
904
912
  | `custom` | Export to Custom format with Jinja | ✅ |
905
913
  | Missing something? | Please create an issue on GitHub | TBD |
906
914
 
915
+ #### SQL
916
+
917
+ The `export` function converts a given data contract into a SQL data definition language (DDL).
918
+
919
+ ```shell
920
+ datacontract export datacontract.yaml --format sql --output output.sql
921
+ ```
922
+
923
+ If using Databricks, and an error is thrown when trying to deploy the SQL DDLs with `variant` columns set the following properties.
924
+
925
+ ```shell
926
+ spark.conf.set(“spark.databricks.delta.schema.typeCheck.enabled”, “false”)
927
+ from datacontract.model import data_contract_specification
928
+ data_contract_specification.DATACONTRACT_TYPES.append(“variant”)
929
+ ```
907
930
 
908
931
  #### Great Expectations
909
932
 
@@ -911,7 +934,7 @@ The `export` function transforms a specified data contract into a comprehensive
911
934
  If the contract includes multiple models, you need to specify the names of the model you wish to export.
912
935
 
913
936
  ```shell
914
- datacontract export datacontract.yaml --format great-expectations --model orders
937
+ datacontract export datacontract.yaml --format great-expectations --model orders
915
938
  ```
916
939
 
917
940
  The export creates a list of expectations by utilizing:
@@ -936,7 +959,7 @@ To further customize the export, the following optional arguments are available:
936
959
 
937
960
  #### RDF
938
961
 
939
- The export function converts a given data contract into a RDF representation. You have the option to
962
+ The `export` function converts a given data contract into a RDF representation. You have the option to
940
963
  add a base_url which will be used as the default prefix to resolve relative IRIs inside the document.
941
964
 
942
965
  ```shell
@@ -1181,6 +1204,10 @@ FROM
1181
1204
  │ --source TEXT The path to the file that │
1182
1205
  │ should be imported. │
1183
1206
  │ [default: None] │
1207
+ │ --spec [datacontract_specification|od The format of the data │
1208
+ │ cs] contract to import. │
1209
+ │ [default: │
1210
+ │ datacontract_specification] │
1184
1211
  │ --dialect TEXT The SQL dialect to use when │
1185
1212
  │ importing SQL files, e.g., │
1186
1213
  │ postgres, tsql, bigquery. │
@@ -1269,11 +1296,11 @@ Available import options:
1269
1296
  | `jsonschema` | Import from JSON Schemas | ✅ |
1270
1297
  | `odcs` | Import from Open Data Contract Standard (ODCS) | ✅ |
1271
1298
  | `parquet` | Import from Parquet File Metadata | ✅ |
1272
- | `protobuf` | Import from Protobuf schemas | ✅ |
1273
- | `spark` | Import from Spark StructTypes | ✅ |
1299
+ | `protobuf` | Import from Protobuf schemas | ✅ |
1300
+ | `spark` | Import from Spark StructTypes, Variant | ✅ |
1274
1301
  | `sql` | Import from SQL DDL | ✅ |
1275
1302
  | `unity` | Import from Databricks Unity Catalog | partial |
1276
- | Missing something? | Please create an issue on GitHub | TBD |
1303
+ | Missing something? | Please create an issue on GitHub | TBD |
1277
1304
 
1278
1305
 
1279
1306
  #### ODCS
@@ -1375,14 +1402,31 @@ datacontract import --format glue --source <database_name>
1375
1402
 
1376
1403
  #### Spark
1377
1404
 
1378
- Importing from Spark table or view these must be created or accessible in the Spark context. Specify tables list in `source` parameter.
1379
-
1380
- Example:
1405
+ Importing from Spark table or view these must be created or accessible in the Spark context. Specify tables list in `source` parameter. If the `source` tables are registered as tables in Databricks, and they have a table-level descriptions they will also be added to the Data Contract Specification.
1381
1406
 
1382
1407
  ```bash
1408
+ # Example: Import Spark table(s) from Spark context
1383
1409
  datacontract import --format spark --source "users,orders"
1384
1410
  ```
1385
1411
 
1412
+ ```bash
1413
+ # Example: Import Spark table
1414
+ DataContract().import_from_source("spark", "users")
1415
+ DataContract().import_from_source(format = "spark", source = "users")
1416
+
1417
+ # Example: Import Spark dataframe
1418
+ DataContract().import_from_source("spark", "users", dataframe = df_user)
1419
+ DataContract().import_from_source(format = "spark", source = "users", dataframe = df_user)
1420
+
1421
+ # Example: Import Spark table + table description
1422
+ DataContract().import_from_source("spark", "users", description = "description")
1423
+ DataContract().import_from_source(format = "spark", source = "users", description = "description")
1424
+
1425
+ # Example: Import Spark dataframe + table description
1426
+ DataContract().import_from_source("spark", "users", dataframe = df_user, description = "description")
1427
+ DataContract().import_from_source(format = "spark", source = "users", dataframe = df_user, description = "description")
1428
+ ```
1429
+
1386
1430
  #### DBML
1387
1431
 
1388
1432
  Importing from DBML Documents.
@@ -11,7 +11,7 @@ from typing_extensions import Annotated
11
11
 
12
12
  from datacontract.catalog.catalog import create_data_contract_html, create_index_html
13
13
  from datacontract.data_contract import DataContract, ExportFormat
14
- from datacontract.imports.importer import ImportFormat
14
+ from datacontract.imports.importer import ImportFormat, Spec
15
15
  from datacontract.init.init_template import get_init_template
16
16
  from datacontract.integration.datamesh_manager import (
17
17
  publish_data_contract_to_datamesh_manager,
@@ -126,7 +126,8 @@ def test(
126
126
  "servers (default)."
127
127
  ),
128
128
  ] = "all",
129
- publish: Annotated[str, typer.Option(help="The url to publish the results after the test")] = None,
129
+ publish_test_results: Annotated[bool, typer.Option(help="Publish the results after the test")] = False,
130
+ publish: Annotated[str, typer.Option(help="DEPRECATED. The url to publish the results after the test.")] = None,
130
131
  output: Annotated[
131
132
  Path,
132
133
  typer.Option(
@@ -149,6 +150,7 @@ def test(
149
150
  run = DataContract(
150
151
  data_contract_file=location,
151
152
  schema_location=schema,
153
+ publish_test_results=publish_test_results,
152
154
  publish_url=publish,
153
155
  server=server,
154
156
  ssl_verification=ssl_verification,
@@ -246,6 +248,10 @@ def import_(
246
248
  Optional[str],
247
249
  typer.Option(help="The path to the file that should be imported."),
248
250
  ] = None,
251
+ spec: Annotated[
252
+ Spec,
253
+ typer.Option(help="The format of the data contract to import. "),
254
+ ] = Spec.datacontract_specification,
249
255
  dialect: Annotated[
250
256
  Optional[str],
251
257
  typer.Option(help="The SQL dialect to use when importing SQL files, e.g., postgres, tsql, bigquery."),
@@ -265,7 +271,7 @@ def import_(
265
271
  ),
266
272
  ] = None,
267
273
  unity_table_full_name: Annotated[
268
- Optional[str], typer.Option(help="Full name of a table in the unity catalog")
274
+ Optional[List[str]], typer.Option(help="Full name of a table in the unity catalog")
269
275
  ] = None,
270
276
  dbt_model: Annotated[
271
277
  Optional[List[str]],
@@ -312,6 +318,7 @@ def import_(
312
318
  result = DataContract().import_from_source(
313
319
  format=format,
314
320
  source=source,
321
+ spec=spec,
315
322
  template=template,
316
323
  schema=schema,
317
324
  dialect=dialect,
@@ -1,6 +1,12 @@
1
1
  import logging
2
2
  import typing
3
3
 
4
+ from open_data_contract_standard.model import CustomProperty, OpenDataContractStandard
5
+
6
+ from datacontract.export.odcs_v3_exporter import to_odcs_v3
7
+ from datacontract.imports.importer import Spec
8
+ from datacontract.imports.odcs_v3_importer import import_from_odcs
9
+
4
10
  if typing.TYPE_CHECKING:
5
11
  from pyspark.sql import SparkSession
6
12
 
@@ -44,6 +50,7 @@ class DataContract:
44
50
  inline_definitions: bool = True,
45
51
  inline_quality: bool = True,
46
52
  ssl_verification: bool = True,
53
+ publish_test_results: bool = False,
47
54
  ):
48
55
  self._data_contract_file = data_contract_file
49
56
  self._data_contract_str = data_contract_str
@@ -51,6 +58,7 @@ class DataContract:
51
58
  self._schema_location = schema_location
52
59
  self._server = server
53
60
  self._publish_url = publish_url
61
+ self._publish_test_results = publish_test_results
54
62
  self._spark = spark
55
63
  self._duckdb_connection = duckdb_connection
56
64
  self._inline_definitions = inline_definitions
@@ -178,7 +186,7 @@ class DataContract:
178
186
 
179
187
  run.finish()
180
188
 
181
- if self._publish_url is not None:
189
+ if self._publish_url is not None or self._publish_test_results:
182
190
  publish_test_results_to_datamesh_manager(run, self._publish_url, self._ssl_verification)
183
191
 
184
192
  return run
@@ -243,43 +251,128 @@ class DataContract:
243
251
  )
244
252
 
245
253
  def export(self, export_format: ExportFormat, model: str = "all", sql_server_type: str = "auto", **kwargs) -> str:
246
- data_contract = resolve.resolve_data_contract(
247
- self._data_contract_file,
248
- self._data_contract_str,
249
- self._data_contract,
250
- schema_location=self._schema_location,
251
- inline_definitions=self._inline_definitions,
252
- inline_quality=self._inline_quality,
253
- )
254
+ if export_format == ExportFormat.html or export_format == ExportFormat.mermaid:
255
+ data_contract = resolve.resolve_data_contract_v2(
256
+ self._data_contract_file,
257
+ self._data_contract_str,
258
+ self._data_contract,
259
+ schema_location=self._schema_location,
260
+ inline_definitions=self._inline_definitions,
261
+ inline_quality=self._inline_quality,
262
+ )
254
263
 
255
- return exporter_factory.create(export_format).export(
256
- data_contract=data_contract,
257
- model=model,
258
- server=self._server,
259
- sql_server_type=sql_server_type,
260
- export_args=kwargs,
261
- )
264
+ return exporter_factory.create(export_format).export(
265
+ data_contract=data_contract,
266
+ model=model,
267
+ server=self._server,
268
+ sql_server_type=sql_server_type,
269
+ export_args=kwargs,
270
+ )
271
+ else:
272
+ data_contract = resolve.resolve_data_contract(
273
+ self._data_contract_file,
274
+ self._data_contract_str,
275
+ self._data_contract,
276
+ schema_location=self._schema_location,
277
+ inline_definitions=self._inline_definitions,
278
+ inline_quality=self._inline_quality,
279
+ )
280
+
281
+ return exporter_factory.create(export_format).export(
282
+ data_contract=data_contract,
283
+ model=model,
284
+ server=self._server,
285
+ sql_server_type=sql_server_type,
286
+ export_args=kwargs,
287
+ )
262
288
 
289
+ # REFACTOR THIS
290
+ # could be a class method, not using anything from the instance
263
291
  def import_from_source(
264
292
  self,
265
293
  format: str,
266
294
  source: typing.Optional[str] = None,
267
295
  template: typing.Optional[str] = None,
268
296
  schema: typing.Optional[str] = None,
297
+ spec: Spec = Spec.datacontract_specification,
269
298
  **kwargs,
270
- ) -> DataContractSpecification:
271
- data_contract_specification_initial = DataContract.init(template=template, schema=schema)
299
+ ) -> DataContractSpecification | OpenDataContractStandard:
300
+ id = kwargs.get("id")
301
+ owner = kwargs.get("owner")
272
302
 
273
- imported_data_contract_specification = importer_factory.create(format).import_source(
274
- data_contract_specification=data_contract_specification_initial, source=source, import_args=kwargs
275
- )
303
+ if spec == Spec.odcs:
304
+ data_contract_specification_initial = DataContract.init(template=template, schema=schema)
305
+
306
+ odcs_imported = importer_factory.create(format).import_source(
307
+ data_contract_specification=data_contract_specification_initial, source=source, import_args=kwargs
308
+ )
309
+
310
+ if isinstance(odcs_imported, DataContractSpecification):
311
+ # convert automatically
312
+ odcs_imported = to_odcs_v3(odcs_imported)
313
+
314
+ self._overwrite_id_in_odcs(odcs_imported, id)
315
+ self._overwrite_owner_in_odcs(odcs_imported, owner)
316
+
317
+ return odcs_imported
318
+ elif spec == Spec.datacontract_specification:
319
+ data_contract_specification_initial = DataContract.init(template=template, schema=schema)
320
+
321
+ data_contract_specification_imported = importer_factory.create(format).import_source(
322
+ data_contract_specification=data_contract_specification_initial, source=source, import_args=kwargs
323
+ )
324
+
325
+ if isinstance(data_contract_specification_imported, OpenDataContractStandard):
326
+ # convert automatically
327
+ data_contract_specification_imported = import_from_odcs(
328
+ data_contract_specification_initial, data_contract_specification_imported
329
+ )
330
+
331
+ self._overwrite_id_in_data_contract_specification(data_contract_specification_imported, id)
332
+ self._overwrite_owner_in_data_contract_specification(data_contract_specification_imported, owner)
333
+
334
+ return data_contract_specification_imported
335
+ else:
336
+ raise DataContractException(
337
+ type="general",
338
+ result=ResultEnum.error,
339
+ name="Import Data Contract",
340
+ reason=f"Unsupported data contract format: {spec}",
341
+ engine="datacontract",
342
+ )
343
+
344
+ def _overwrite_id_in_data_contract_specification(
345
+ self, data_contract_specification: DataContractSpecification, id: str | None
346
+ ):
347
+ if not id:
348
+ return
349
+
350
+ data_contract_specification.id = id
351
+
352
+ def _overwrite_owner_in_data_contract_specification(
353
+ self, data_contract_specification: DataContractSpecification, owner: str | None
354
+ ):
355
+ if not owner:
356
+ return
357
+
358
+ if data_contract_specification.info is None:
359
+ data_contract_specification.info = Info()
360
+ data_contract_specification.info.owner = owner
361
+
362
+ def _overwrite_owner_in_odcs(self, odcs: OpenDataContractStandard, owner: str | None):
363
+ if not owner:
364
+ return
365
+
366
+ if odcs.customProperties is None:
367
+ odcs.customProperties = []
368
+ for customProperty in odcs.customProperties:
369
+ if customProperty.name == "owner":
370
+ customProperty.value = owner
371
+ return
372
+ odcs.customProperties.append(CustomProperty(property="owner", value=owner))
276
373
 
277
- # Set id and owner if provided
278
- if kwargs.get("id"):
279
- data_contract_specification_initial.id = kwargs["id"]
280
- if kwargs.get("owner"):
281
- if data_contract_specification_initial.info is None:
282
- data_contract_specification_initial.info = Info()
283
- data_contract_specification_initial.info.owner = kwargs["owner"]
374
+ def _overwrite_id_in_odcs(self, odcs: OpenDataContractStandard, id: str | None):
375
+ if not id:
376
+ return
284
377
 
285
- return imported_data_contract_specification
378
+ odcs.id = id
@@ -502,11 +502,13 @@ def prepare_query(quality: Quality, model_name: str, field_name: str = None) ->
502
502
  query = quality.query
503
503
 
504
504
  query = query.replace("{model}", model_name)
505
+ query = query.replace("{schema}", model_name)
505
506
  query = query.replace("{table}", model_name)
506
507
 
507
508
  if field_name is not None:
508
509
  query = query.replace("{field}", field_name)
509
510
  query = query.replace("{column}", field_name)
511
+ query = query.replace("{property}", field_name)
510
512
 
511
513
  return query
512
514