datacontract-cli 0.10.22__tar.gz → 0.10.23__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datacontract-cli might be problematic. Click here for more details.

Files changed (203) hide show
  1. {datacontract_cli-0.10.22/datacontract_cli.egg-info → datacontract_cli-0.10.23}/PKG-INFO +54 -50
  2. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/README.md +48 -44
  3. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/cli.py +20 -72
  4. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/engines/data_contract_test.py +22 -6
  5. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/engines/datacontract/check_that_datacontract_contains_valid_servers_configuration.py +2 -3
  6. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/engines/soda/check_soda_execute.py +1 -1
  7. datacontract_cli-0.10.22/datacontract/engines/soda/connections/duckdb.py → datacontract_cli-0.10.23/datacontract/engines/soda/connections/duckdb_connection.py +6 -5
  8. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/export/avro_converter.py +2 -2
  9. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/export/dbt_converter.py +13 -10
  10. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/export/odcs_v3_exporter.py +22 -3
  11. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/imports/odcs_v3_importer.py +1 -1
  12. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/lint/resolve.py +17 -4
  13. datacontract_cli-0.10.23/datacontract/output/junit_test_results.py +135 -0
  14. datacontract_cli-0.10.23/datacontract/output/output_format.py +10 -0
  15. datacontract_cli-0.10.23/datacontract/output/test_results_writer.py +79 -0
  16. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23/datacontract_cli.egg-info}/PKG-INFO +54 -50
  17. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract_cli.egg-info/SOURCES.txt +5 -1
  18. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract_cli.egg-info/requires.txt +5 -5
  19. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/pyproject.toml +6 -6
  20. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_export_dbt_models.py +57 -0
  21. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_export_odcs_v3.py +6 -6
  22. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_resolve.py +37 -0
  23. datacontract_cli-0.10.23/tests/test_test_output_junit.py +22 -0
  24. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/LICENSE +0 -0
  25. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/MANIFEST.in +0 -0
  26. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/__init__.py +0 -0
  27. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/api.py +0 -0
  28. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/breaking/breaking.py +0 -0
  29. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/breaking/breaking_change.py +0 -0
  30. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/breaking/breaking_rules.py +0 -0
  31. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/catalog/catalog.py +0 -0
  32. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/data_contract.py +0 -0
  33. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/engines/__init__.py +0 -0
  34. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/engines/data_contract_checks.py +0 -0
  35. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/engines/datacontract/check_that_datacontract_file_exists.py +0 -0
  36. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/engines/fastjsonschema/check_jsonschema.py +0 -0
  37. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/engines/fastjsonschema/s3/s3_read_files.py +0 -0
  38. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/engines/soda/__init__.py +0 -0
  39. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/engines/soda/connections/bigquery.py +0 -0
  40. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/engines/soda/connections/databricks.py +0 -0
  41. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/engines/soda/connections/kafka.py +0 -0
  42. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/engines/soda/connections/postgres.py +0 -0
  43. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/engines/soda/connections/snowflake.py +0 -0
  44. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/engines/soda/connections/sqlserver.py +0 -0
  45. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/engines/soda/connections/trino.py +0 -0
  46. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/export/__init__.py +0 -0
  47. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/export/avro_idl_converter.py +0 -0
  48. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/export/bigquery_converter.py +0 -0
  49. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/export/csv_type_converter.py +0 -0
  50. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/export/custom_converter.py +0 -0
  51. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/export/data_caterer_converter.py +0 -0
  52. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/export/dbml_converter.py +0 -0
  53. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/export/dcs_exporter.py +0 -0
  54. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/export/exporter.py +0 -0
  55. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/export/exporter_factory.py +0 -0
  56. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/export/go_converter.py +0 -0
  57. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/export/great_expectations_converter.py +0 -0
  58. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/export/html_export.py +0 -0
  59. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/export/iceberg_converter.py +0 -0
  60. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/export/jsonschema_converter.py +0 -0
  61. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/export/markdown_converter.py +0 -0
  62. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/export/pandas_type_converter.py +0 -0
  63. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/export/protobuf_converter.py +0 -0
  64. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/export/pydantic_converter.py +0 -0
  65. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/export/rdf_converter.py +0 -0
  66. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/export/sodacl_converter.py +0 -0
  67. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/export/spark_converter.py +0 -0
  68. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/export/sql_converter.py +0 -0
  69. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/export/sql_type_converter.py +0 -0
  70. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/export/sqlalchemy_converter.py +0 -0
  71. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/export/terraform_converter.py +0 -0
  72. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/imports/avro_importer.py +0 -0
  73. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/imports/bigquery_importer.py +0 -0
  74. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/imports/csv_importer.py +0 -0
  75. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/imports/dbml_importer.py +0 -0
  76. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/imports/dbt_importer.py +0 -0
  77. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/imports/glue_importer.py +0 -0
  78. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/imports/iceberg_importer.py +0 -0
  79. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/imports/importer.py +0 -0
  80. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/imports/importer_factory.py +0 -0
  81. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/imports/jsonschema_importer.py +0 -0
  82. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/imports/odcs_importer.py +0 -0
  83. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/imports/parquet_importer.py +0 -0
  84. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/imports/spark_importer.py +0 -0
  85. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/imports/sql_importer.py +0 -0
  86. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/imports/unity_importer.py +0 -0
  87. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/init/init_template.py +0 -0
  88. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/integration/datamesh_manager.py +0 -0
  89. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/lint/files.py +0 -0
  90. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/lint/lint.py +0 -0
  91. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/lint/linters/__init__.py +0 -0
  92. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/lint/linters/description_linter.py +0 -0
  93. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/lint/linters/field_pattern_linter.py +0 -0
  94. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/lint/linters/field_reference_linter.py +0 -0
  95. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/lint/linters/notice_period_linter.py +0 -0
  96. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/lint/linters/quality_schema_linter.py +0 -0
  97. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/lint/linters/valid_constraints_linter.py +0 -0
  98. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/lint/resources.py +0 -0
  99. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/lint/schema.py +0 -0
  100. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/lint/urls.py +0 -0
  101. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/model/data_contract_specification.py +0 -0
  102. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/model/exceptions.py +0 -0
  103. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/model/odcs.py +0 -0
  104. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/model/run.py +0 -0
  105. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/py.typed +0 -0
  106. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/schemas/datacontract-1.1.0.init.yaml +0 -0
  107. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/schemas/datacontract-1.1.0.schema.json +0 -0
  108. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/schemas/odcs-3.0.1.schema.json +0 -0
  109. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/templates/datacontract.html +0 -0
  110. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/templates/index.html +0 -0
  111. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/templates/partials/datacontract_information.html +0 -0
  112. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/templates/partials/datacontract_servicelevels.html +0 -0
  113. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/templates/partials/datacontract_terms.html +0 -0
  114. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/templates/partials/definition.html +0 -0
  115. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/templates/partials/example.html +0 -0
  116. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/templates/partials/model_field.html +0 -0
  117. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/templates/partials/quality.html +0 -0
  118. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/templates/partials/server.html +0 -0
  119. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract/templates/style/output.css +0 -0
  120. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract_cli.egg-info/dependency_links.txt +0 -0
  121. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract_cli.egg-info/entry_points.txt +0 -0
  122. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/datacontract_cli.egg-info/top_level.txt +0 -0
  123. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/setup.cfg +0 -0
  124. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_api.py +0 -0
  125. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_breaking.py +0 -0
  126. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_catalog.py +0 -0
  127. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_changelog.py +0 -0
  128. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_cli.py +0 -0
  129. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_data_contract_checks.py +0 -0
  130. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_data_contract_specification.py +0 -0
  131. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_description_linter.py +0 -0
  132. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_documentation_linter.py +0 -0
  133. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_download_datacontract_file.py +0 -0
  134. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_export_avro.py +0 -0
  135. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_export_avro_idl.py +0 -0
  136. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_export_bigquery.py +0 -0
  137. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_export_complex_data_contract.py +0 -0
  138. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_export_custom.py +0 -0
  139. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_export_custom_exporter.py +0 -0
  140. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_export_data_caterer.py +0 -0
  141. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_export_dbml.py +0 -0
  142. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_export_dbt_sources.py +0 -0
  143. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_export_dbt_staging_sql.py +0 -0
  144. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_export_go.py +0 -0
  145. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_export_great_expectations.py +0 -0
  146. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_export_html.py +0 -0
  147. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_export_iceberg.py +0 -0
  148. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_export_jsonschema.py +0 -0
  149. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_export_markdown.py +0 -0
  150. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_export_protobuf.py +0 -0
  151. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_export_pydantic.py +0 -0
  152. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_export_rdf.py +0 -0
  153. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_export_sodacl.py +0 -0
  154. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_export_spark.py +0 -0
  155. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_export_sql.py +0 -0
  156. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_export_sql_query.py +0 -0
  157. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_export_sqlalchemy.py +0 -0
  158. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_export_terraform.py +0 -0
  159. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_field_constraint_linter.py +0 -0
  160. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_field_pattern_linter.py +0 -0
  161. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_field_reference_linter.py +0 -0
  162. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_import_avro.py +0 -0
  163. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_import_bigquery.py +0 -0
  164. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_import_csv.py +0 -0
  165. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_import_dbml.py +0 -0
  166. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_import_dbt.py +0 -0
  167. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_import_glue.py +0 -0
  168. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_import_iceberg.py +0 -0
  169. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_import_jsonschema.py +0 -0
  170. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_import_odcs_v3.py +0 -0
  171. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_import_parquet.py +0 -0
  172. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_import_spark.py +0 -0
  173. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_import_sql_postgres.py +0 -0
  174. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_import_sql_sqlserver.py +0 -0
  175. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_import_unity_file.py +0 -0
  176. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_integration_datameshmanager.py +0 -0
  177. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_lint.py +0 -0
  178. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_notice_period_linter.py +0 -0
  179. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_quality_schema_linter.py +0 -0
  180. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_roundtrip_jsonschema.py +0 -0
  181. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_spec_fields_field.py +0 -0
  182. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_spec_ref.py +0 -0
  183. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_test_azure_remote.py +0 -0
  184. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_test_bigquery.py +0 -0
  185. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_test_databricks.py +0 -0
  186. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_test_dataframe.py +0 -0
  187. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_test_delta.py +0 -0
  188. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_test_gcs_json_remote.py +0 -0
  189. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_test_kafka.py +0 -0
  190. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_test_kafka_remote.py +0 -0
  191. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_test_local_json.py +0 -0
  192. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_test_parquet.py +0 -0
  193. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_test_postgres.py +0 -0
  194. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_test_quality.py +0 -0
  195. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_test_s3_csv.py +0 -0
  196. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_test_s3_delta.py +0 -0
  197. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_test_s3_json.py +0 -0
  198. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_test_s3_json_complex.py +0 -0
  199. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_test_s3_json_multiple_models.py +0 -0
  200. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_test_s3_json_remote.py +0 -0
  201. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_test_snowflake.py +0 -0
  202. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_test_sqlserver.py +0 -0
  203. {datacontract_cli-0.10.22 → datacontract_cli-0.10.23}/tests/test_test_trino.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.2
2
2
  Name: datacontract-cli
3
- Version: 0.10.22
3
+ Version: 0.10.23
4
4
  Summary: The datacontract CLI is an open source command-line tool for working with Data Contracts. It uses data contract YAML files to lint the data contract, connect to data sources and execute schema and quality tests, detect breaking changes, and export to different formats. The tool is written in Python. It can be used as a standalone CLI tool, in a CI/CD pipeline, or directly as a Python library.
5
5
  Author-email: Jochen Christ <jochen.christ@innoq.com>, Stefan Negele <stefan.negele@innoq.com>, Simon Harrer <simon.harrer@innoq.com>
6
6
  Project-URL: Homepage, https://cli.datacontract.com
@@ -16,12 +16,12 @@ Requires-Dist: pydantic<2.11.0,>=2.8.2
16
16
  Requires-Dist: pyyaml~=6.0.1
17
17
  Requires-Dist: requests<2.33,>=2.31
18
18
  Requires-Dist: fastjsonschema<2.22.0,>=2.19.1
19
- Requires-Dist: fastparquet==2024.11.0
19
+ Requires-Dist: fastparquet<2025.0.0,>=2024.5.0
20
20
  Requires-Dist: numpy<2.0.0,>=1.26.4
21
21
  Requires-Dist: python-multipart==0.0.20
22
22
  Requires-Dist: rich<13.10,>=13.7
23
23
  Requires-Dist: sqlglot<27.0.0,>=26.6.0
24
- Requires-Dist: duckdb==1.1.2
24
+ Requires-Dist: duckdb<2.0.0,>=1.0.0
25
25
  Requires-Dist: soda-core-duckdb<3.5.0,>=3.3.20
26
26
  Requires-Dist: setuptools>=60
27
27
  Requires-Dist: python-dotenv~=1.0.0
@@ -39,7 +39,7 @@ Provides-Extra: databricks
39
39
  Requires-Dist: soda-core-spark-df<3.4.0,>=3.3.20; extra == "databricks"
40
40
  Requires-Dist: soda-core-spark[databricks]<3.4.0,>=3.3.20; extra == "databricks"
41
41
  Requires-Dist: databricks-sql-connector<3.8.0,>=3.7.0; extra == "databricks"
42
- Requires-Dist: databricks-sdk<0.41.0; extra == "databricks"
42
+ Requires-Dist: databricks-sdk<0.45.0; extra == "databricks"
43
43
  Provides-Extra: iceberg
44
44
  Requires-Dist: pyiceberg==0.8.1; extra == "iceberg"
45
45
  Provides-Extra: kafka
@@ -48,7 +48,7 @@ Requires-Dist: soda-core-spark-df<3.4.0,>=3.3.20; extra == "kafka"
48
48
  Provides-Extra: postgres
49
49
  Requires-Dist: soda-core-postgres<3.4.0,>=3.3.20; extra == "postgres"
50
50
  Provides-Extra: s3
51
- Requires-Dist: s3fs==2024.12.0; extra == "s3"
51
+ Requires-Dist: s3fs==2025.2.0; extra == "s3"
52
52
  Requires-Dist: aiobotocore<2.20.0,>=2.17.0; extra == "s3"
53
53
  Provides-Extra: snowflake
54
54
  Requires-Dist: snowflake-connector-python[pandas]<3.14,>=3.6; extra == "snowflake"
@@ -66,7 +66,7 @@ Requires-Dist: pyarrow>=18.1.0; extra == "parquet"
66
66
  Provides-Extra: rdf
67
67
  Requires-Dist: rdflib==7.0.0; extra == "rdf"
68
68
  Provides-Extra: api
69
- Requires-Dist: fastapi==0.115.6; extra == "api"
69
+ Requires-Dist: fastapi==0.115.8; extra == "api"
70
70
  Requires-Dist: uvicorn==0.34.0; extra == "api"
71
71
  Provides-Extra: all
72
72
  Requires-Dist: datacontract-cli[api,bigquery,csv,databricks,dbml,dbt,iceberg,kafka,parquet,postgres,rdf,s3,snowflake,sqlserver,trino]; extra == "all"
@@ -230,6 +230,12 @@ if not run.has_passed():
230
230
  # Abort pipeline, alert, or take corrective actions...
231
231
  ```
232
232
 
233
+ ## How to
234
+
235
+ - [How to integrate Data Contract CLI in your CI/CD pipeline as a GitHub Action](https://github.com/datacontract/datacontract-action/)
236
+ - [How to run the Data Contract CLI API to test data contracts with POST requests](https://cli.datacontract.com/API)
237
+ - [How to run Data Contract CLI in a Databricks pipeline](https://www.datamesh-architecture.com/howto/build-a-dataproduct-with-databricks#test-the-data-product)
238
+
233
239
 
234
240
  ## Installation
235
241
 
@@ -378,49 +384,41 @@ Commands
378
384
 
379
385
  ### test
380
386
  ```
381
-
382
- Usage: datacontract test [OPTIONS] [LOCATION]
383
-
384
- Run schema and quality tests on configured servers.
385
-
386
- ╭─ Arguments ──────────────────────────────────────────────────────────────────╮
387
- │ location [LOCATION] The location (url or path) of the data contract
388
- │ yaml.
389
- │ [default: datacontract.yaml] │
390
- ╰──────────────────────────────────────────────────────────────────────────────╯
391
- ╭─ Options ────────────────────────────────────────────────────────────────────╮
392
- --schema TEXT The location (url or
393
- path) of the Data
394
- Contract Specification
395
- JSON Schema
396
- [default:
397
- https://datacontract.c…
398
- --server TEXT The server
399
- configuration to run
400
- the schema and quality
401
- tests. Use the key of
402
- the server object in
403
- the data contract yaml
404
- file to refer to a
405
- server, e.g.,
406
- `production`, or `all`
407
- for all servers
408
- (default).
409
- [default: all]
410
- │ --publish TEXT The url to publish the
411
- results after the test
412
- [default: None]
413
- --logs --no-logs Print logs
414
- [default: no-logs]
415
- │ --ssl-verification --no-ssl-verification SSL verification when │
416
- │ publishing the data │
417
- │ contract. │
418
- │ [default: │
419
- │ ssl-verification] │
420
- │ --help Show this message and │
421
- │ exit. │
422
- ╰──────────────────────────────────────────────────────────────────────────────╯
423
-
387
+
388
+ Usage: datacontract test [OPTIONS] [LOCATION]
389
+
390
+ Run schema and quality tests on configured servers.
391
+
392
+ ╭─ Arguments ──────────────────────────────────────────────────────────────────────────────────────╮
393
+ │ location [LOCATION] The location (url or path) of the data contract yaml.
394
+ [default: datacontract.yaml]
395
+ ╰──────────────────────────────────────────────────────────────────────────────────────────────────╯
396
+ ╭─ Options ────────────────────────────────────────────────────────────────────────────────────────╮
397
+ --schema TEXT The location (url or path) of the Data │
398
+ Contract Specification JSON Schema
399
+ [default: None]
400
+ --server TEXT The server configuration to run the
401
+ schema and quality tests. Use the key of
402
+ the server object in the data contract
403
+ yaml file to refer to a server, e.g.,
404
+ `production`, or `all` for all servers
405
+ (default).
406
+ [default: all]
407
+ --publish TEXT The url to publish the results after the
408
+ test
409
+ [default: None]
410
+ --output PATH Specify the file path where the test
411
+ results should be written to (e.g.,
412
+ './test-results/TEST-datacontract.xml').
413
+ [default: None]
414
+ --output-format [junit] The target format for the test results.
415
+ [default: None]
416
+ │ --logs --no-logs Print logs [default: no-logs]
417
+ --ssl-verification --no-ssl-verification SSL verification when publishing the
418
+ data contract.
419
+ [default: ssl-verification]
420
+ --help Show this message and exit.
421
+ ╰──────────────────────────────────────────────────────────────────────────────────────────────────╯
424
422
  ```
425
423
 
426
424
  Data Contract CLI connects to a data source and runs schema and quality tests to verify that the data contract is valid.
@@ -1037,6 +1035,12 @@ The export function converts the logical data types of the datacontract into the
1037
1035
  if a server is selected via the `--server` option (based on the `type` of that server). If no server is selected, the
1038
1036
  logical data types are exported.
1039
1037
 
1038
+ #### DBT & DBT-SOURCES
1039
+
1040
+ The export funciton converts the datacontract to dbt models in YAML format, with support for SQL dialects.
1041
+ If a server is selected via the `--server` option (based on the `type` of that server) then the DBT column `data_types` match the expected data types of the server.
1042
+ If no server is selected, then it defaults to `snowflake`.
1043
+
1040
1044
  #### Spark
1041
1045
 
1042
1046
  The export function converts the data contract specification into a StructType Spark schema. The returned value is a Python code picture of the model schemas.
@@ -1902,7 +1906,7 @@ Python base interpreter should be 3.11.x (unless working on 3.12 release candida
1902
1906
 
1903
1907
  ```bash
1904
1908
  # create venv
1905
- python3 -m venv venv
1909
+ python3.11 -m venv venv
1906
1910
  source venv/bin/activate
1907
1911
 
1908
1912
  # Install Requirements
@@ -144,6 +144,12 @@ if not run.has_passed():
144
144
  # Abort pipeline, alert, or take corrective actions...
145
145
  ```
146
146
 
147
+ ## How to
148
+
149
+ - [How to integrate Data Contract CLI in your CI/CD pipeline as a GitHub Action](https://github.com/datacontract/datacontract-action/)
150
+ - [How to run the Data Contract CLI API to test data contracts with POST requests](https://cli.datacontract.com/API)
151
+ - [How to run Data Contract CLI in a Databricks pipeline](https://www.datamesh-architecture.com/howto/build-a-dataproduct-with-databricks#test-the-data-product)
152
+
147
153
 
148
154
  ## Installation
149
155
 
@@ -292,49 +298,41 @@ Commands
292
298
 
293
299
  ### test
294
300
  ```
295
-
296
- Usage: datacontract test [OPTIONS] [LOCATION]
297
-
298
- Run schema and quality tests on configured servers.
299
-
300
- ╭─ Arguments ──────────────────────────────────────────────────────────────────╮
301
- │ location [LOCATION] The location (url or path) of the data contract
302
- │ yaml.
303
- │ [default: datacontract.yaml] │
304
- ╰──────────────────────────────────────────────────────────────────────────────╯
305
- ╭─ Options ────────────────────────────────────────────────────────────────────╮
306
- --schema TEXT The location (url or
307
- path) of the Data
308
- Contract Specification
309
- JSON Schema
310
- [default:
311
- https://datacontract.c…
312
- --server TEXT The server
313
- configuration to run
314
- the schema and quality
315
- tests. Use the key of
316
- the server object in
317
- the data contract yaml
318
- file to refer to a
319
- server, e.g.,
320
- `production`, or `all`
321
- for all servers
322
- (default).
323
- [default: all]
324
- │ --publish TEXT The url to publish the
325
- results after the test
326
- [default: None]
327
- --logs --no-logs Print logs
328
- [default: no-logs]
329
- │ --ssl-verification --no-ssl-verification SSL verification when │
330
- │ publishing the data │
331
- │ contract. │
332
- │ [default: │
333
- │ ssl-verification] │
334
- │ --help Show this message and │
335
- │ exit. │
336
- ╰──────────────────────────────────────────────────────────────────────────────╯
337
-
301
+
302
+ Usage: datacontract test [OPTIONS] [LOCATION]
303
+
304
+ Run schema and quality tests on configured servers.
305
+
306
+ ╭─ Arguments ──────────────────────────────────────────────────────────────────────────────────────╮
307
+ │ location [LOCATION] The location (url or path) of the data contract yaml.
308
+ [default: datacontract.yaml]
309
+ ╰──────────────────────────────────────────────────────────────────────────────────────────────────╯
310
+ ╭─ Options ────────────────────────────────────────────────────────────────────────────────────────╮
311
+ --schema TEXT The location (url or path) of the Data │
312
+ Contract Specification JSON Schema
313
+ [default: None]
314
+ --server TEXT The server configuration to run the
315
+ schema and quality tests. Use the key of
316
+ the server object in the data contract
317
+ yaml file to refer to a server, e.g.,
318
+ `production`, or `all` for all servers
319
+ (default).
320
+ [default: all]
321
+ --publish TEXT The url to publish the results after the
322
+ test
323
+ [default: None]
324
+ --output PATH Specify the file path where the test
325
+ results should be written to (e.g.,
326
+ './test-results/TEST-datacontract.xml').
327
+ [default: None]
328
+ --output-format [junit] The target format for the test results.
329
+ [default: None]
330
+ │ --logs --no-logs Print logs [default: no-logs]
331
+ --ssl-verification --no-ssl-verification SSL verification when publishing the
332
+ data contract.
333
+ [default: ssl-verification]
334
+ --help Show this message and exit.
335
+ ╰──────────────────────────────────────────────────────────────────────────────────────────────────╯
338
336
  ```
339
337
 
340
338
  Data Contract CLI connects to a data source and runs schema and quality tests to verify that the data contract is valid.
@@ -951,6 +949,12 @@ The export function converts the logical data types of the datacontract into the
951
949
  if a server is selected via the `--server` option (based on the `type` of that server). If no server is selected, the
952
950
  logical data types are exported.
953
951
 
952
+ #### DBT & DBT-SOURCES
953
+
954
+ The export funciton converts the datacontract to dbt models in YAML format, with support for SQL dialects.
955
+ If a server is selected via the `--server` option (based on the `type` of that server) then the DBT column `data_types` match the expected data types of the server.
956
+ If no server is selected, then it defaults to `snowflake`.
957
+
954
958
  #### Spark
955
959
 
956
960
  The export function converts the data contract specification into a StructType Spark schema. The returned value is a Python code picture of the model schemas.
@@ -1816,7 +1820,7 @@ Python base interpreter should be 3.11.x (unless working on 3.12 release candida
1816
1820
 
1817
1821
  ```bash
1818
1822
  # create venv
1819
- python3 -m venv venv
1823
+ python3.11 -m venv venv
1820
1824
  source venv/bin/activate
1821
1825
 
1822
1826
  # Install Requirements
@@ -5,9 +5,7 @@ from typing import Iterable, List, Optional
5
5
 
6
6
  import typer
7
7
  from click import Context
8
- from rich import box
9
8
  from rich.console import Console
10
- from rich.table import Table
11
9
  from typer.core import TyperGroup
12
10
  from typing_extensions import Annotated
13
11
 
@@ -19,6 +17,8 @@ from datacontract.integration.datamesh_manager import (
19
17
  publish_data_contract_to_datamesh_manager,
20
18
  )
21
19
  from datacontract.lint.resolve import resolve_data_contract_dict
20
+ from datacontract.output.output_format import OutputFormat
21
+ from datacontract.output.test_results_writer import write_test_result
22
22
 
23
23
  console = Console()
24
24
 
@@ -92,12 +92,19 @@ def lint(
92
92
  str,
93
93
  typer.Option(help="The location (url or path) of the Data Contract Specification JSON Schema"),
94
94
  ] = None,
95
+ output: Annotated[
96
+ Path,
97
+ typer.Option(
98
+ help="Specify the file path where the test results should be written to (e.g., './test-results/TEST-datacontract.xml'). If no path is provided, the output will be printed to stdout."
99
+ ),
100
+ ] = None,
101
+ output_format: Annotated[OutputFormat, typer.Option(help="The target format for the test results.")] = None,
95
102
  ):
96
103
  """
97
104
  Validate that the datacontract.yaml is correctly formatted.
98
105
  """
99
106
  run = DataContract(data_contract_file=location, schema_location=schema).lint()
100
- _handle_result(run)
107
+ write_test_result(run, console, output_format, output)
101
108
 
102
109
 
103
110
  @app.command()
@@ -120,6 +127,13 @@ def test(
120
127
  ),
121
128
  ] = "all",
122
129
  publish: Annotated[str, typer.Option(help="The url to publish the results after the test")] = None,
130
+ output: Annotated[
131
+ Path,
132
+ typer.Option(
133
+ help="Specify the file path where the test results should be written to (e.g., './test-results/TEST-datacontract.xml')."
134
+ ),
135
+ ] = None,
136
+ output_format: Annotated[OutputFormat, typer.Option(help="The target format for the test results.")] = None,
123
137
  logs: Annotated[bool, typer.Option(help="Print logs")] = False,
124
138
  ssl_verification: Annotated[
125
139
  bool,
@@ -141,7 +155,7 @@ def test(
141
155
  ).test()
142
156
  if logs:
143
157
  _print_logs(run)
144
- _handle_result(run)
158
+ write_test_result(run, console, output_format, output)
145
159
 
146
160
 
147
161
  @app.command()
@@ -214,7 +228,7 @@ def export(
214
228
  if output is None:
215
229
  console.print(result, markup=False, soft_wrap=True)
216
230
  else:
217
- with output.open("w") as f:
231
+ with output.open(mode="w", encoding="utf-8") as f:
218
232
  f.write(result)
219
233
  console.print(f"Written result to {output}")
220
234
 
@@ -306,7 +320,7 @@ def import_(
306
320
  if output is None:
307
321
  console.print(result.to_yaml(), markup=False, soft_wrap=True)
308
322
  else:
309
- with output.open("w") as f:
323
+ with output.open(mode="w", encoding="utf-8") as f:
310
324
  f.write(result.to_yaml())
311
325
  console.print(f"Written result to {output}")
312
326
 
@@ -467,77 +481,11 @@ def api(
467
481
  uvicorn.run(app="datacontract.api:app", port=port, host=host, reload=True, log_config=LOGGING_CONFIG)
468
482
 
469
483
 
470
- def _handle_result(run):
471
- _print_table(run)
472
- if run.result == "passed":
473
- console.print(
474
- f"🟢 data contract is valid. Run {len(run.checks)} checks. Took {(run.timestampEnd - run.timestampStart).total_seconds()} seconds."
475
- )
476
- elif run.result == "warning":
477
- console.print("🟠 data contract has warnings. Found the following warnings:")
478
- i = 1
479
- for check in run.checks:
480
- if check.result != "passed":
481
- field = to_field(run, check)
482
- if field:
483
- field = field + " "
484
- else:
485
- field = ""
486
- console.print(f"{i}) {field}{check.name}: {check.reason}")
487
- i += 1
488
- else:
489
- console.print("🔴 data contract is invalid, found the following errors:")
490
- i = 1
491
- for check in run.checks:
492
- if check.result != "passed":
493
- field = to_field(run, check)
494
- if field:
495
- field = field + " "
496
- else:
497
- field = ""
498
- console.print(f"{i}) {field}{check.name}: {check.reason}")
499
- i += 1
500
- raise typer.Exit(code=1)
501
-
502
-
503
- def _print_table(run):
504
- table = Table(box=box.ROUNDED)
505
- table.add_column("Result", no_wrap=True)
506
- table.add_column("Check", max_width=100)
507
- table.add_column("Field", max_width=32)
508
- table.add_column("Details", max_width=50)
509
- for check in sorted(run.checks, key=lambda c: (c.result or "", c.model or "", c.field or "")):
510
- table.add_row(with_markup(check.result), check.name, to_field(run, check), check.reason)
511
- console.print(table)
512
-
513
-
514
- def to_field(run, check):
515
- models = [c.model for c in run.checks]
516
- if len(set(models)) > 1:
517
- if check.field is None:
518
- return check.model
519
- return check.model + "." + check.field
520
- else:
521
- return check.field
522
-
523
-
524
484
  def _print_logs(run):
525
485
  console.print("\nLogs:")
526
486
  for log in run.logs:
527
487
  console.print(log.timestamp.strftime("%y-%m-%d %H:%M:%S"), log.level.ljust(5), log.message)
528
488
 
529
489
 
530
- def with_markup(result):
531
- if result == "passed":
532
- return "[green]passed[/green]"
533
- if result == "warning":
534
- return "[yellow]warning[/yellow]"
535
- if result == "failed":
536
- return "[red]failed[/red]"
537
- if result == "error":
538
- return "[red]error[/red]"
539
- return result
540
-
541
-
542
490
  if __name__ == "__main__":
543
491
  app()
@@ -29,12 +29,7 @@ def execute_data_contract_test(
29
29
  reason="Models block is missing. Skip executing tests.",
30
30
  engine="datacontract",
31
31
  )
32
- check_that_datacontract_contains_valid_server_configuration(run, data_contract_specification, server_name)
33
- if server_name:
34
- server = data_contract_specification.servers.get(server_name)
35
- else:
36
- server_name = list(data_contract_specification.servers.keys())[0]
37
- server = data_contract_specification.servers.get(server_name)
32
+ server = get_server(data_contract_specification, server_name)
38
33
  run.log_info(f"Running tests for data contract {data_contract_specification.id} with server {server_name}")
39
34
  run.dataContractId = data_contract_specification.id
40
35
  run.dataContractVersion = data_contract_specification.info.version
@@ -49,3 +44,24 @@ def execute_data_contract_test(
49
44
  if server.format == "json" and server.type != "kafka":
50
45
  check_jsonschema(run, data_contract_specification, server)
51
46
  check_soda_execute(run, data_contract_specification, server, spark)
47
+
48
+
49
+ def get_server(data_contract_specification: DataContractSpecification, server_name: str = None):
50
+ """Get the server configuration from the data contract specification.
51
+
52
+ Args:
53
+ data_contract_specification: The data contract specification
54
+ server_name: Optional name of the server to use. If not provided, uses the first server.
55
+
56
+ Returns:
57
+ The selected server configuration
58
+ """
59
+
60
+ check_that_datacontract_contains_valid_server_configuration(data_contract_specification, server_name)
61
+
62
+ if server_name:
63
+ server = data_contract_specification.servers.get(server_name)
64
+ else:
65
+ server_name = list(data_contract_specification.servers.keys())[0]
66
+ server = data_contract_specification.servers.get(server_name)
67
+ return server
@@ -1,12 +1,11 @@
1
1
  from datacontract.model.data_contract_specification import DataContractSpecification
2
2
  from datacontract.model.exceptions import DataContractException
3
- from datacontract.model.run import Run
4
3
 
5
4
 
6
5
  def check_that_datacontract_contains_valid_server_configuration(
7
- run: Run, data_contract: DataContractSpecification, server_name: str
6
+ data_contract: DataContractSpecification, server_name: str | None
8
7
  ):
9
- if data_contract.servers is None:
8
+ if data_contract.servers is None or len(data_contract.servers) == 0:
10
9
  raise DataContractException(
11
10
  type="lint",
12
11
  name="Check that data contract contains valid server configuration",
@@ -3,7 +3,7 @@ import uuid
3
3
 
4
4
  from datacontract.engines.soda.connections.bigquery import to_bigquery_soda_configuration
5
5
  from datacontract.engines.soda.connections.databricks import to_databricks_soda_configuration
6
- from datacontract.engines.soda.connections.duckdb import get_duckdb_connection
6
+ from datacontract.engines.soda.connections.duckdb_connection import get_duckdb_connection
7
7
  from datacontract.engines.soda.connections.kafka import create_spark_session, read_kafka_topic
8
8
  from datacontract.engines.soda.connections.postgres import to_postgres_soda_configuration
9
9
  from datacontract.engines.soda.connections.snowflake import to_snowflake_soda_configuration
@@ -1,4 +1,5 @@
1
1
  import os
2
+ from typing import Any
2
3
 
3
4
  import duckdb
4
5
 
@@ -27,13 +28,13 @@ def get_duckdb_connection(data_contract, server, run: Run):
27
28
  run.log_info(f"Creating table {model_name} for {model_path}")
28
29
 
29
30
  if server.format == "json":
30
- format = "auto"
31
+ json_format = "auto"
31
32
  if server.delimiter == "new_line":
32
- format = "newline_delimited"
33
+ json_format = "newline_delimited"
33
34
  elif server.delimiter == "array":
34
- format = "array"
35
+ json_format = "array"
35
36
  con.sql(f"""
36
- CREATE VIEW "{model_name}" AS SELECT * FROM read_json_auto('{model_path}', format='{format}', hive_partitioning=1);
37
+ CREATE VIEW "{model_name}" AS SELECT * FROM read_json_auto('{model_path}', format='{json_format}', hive_partitioning=1);
37
38
  """)
38
39
  elif server.format == "parquet":
39
40
  con.sql(f"""
@@ -56,7 +57,7 @@ def get_duckdb_connection(data_contract, server, run: Run):
56
57
  return con
57
58
 
58
59
 
59
- def to_csv_types(model) -> dict:
60
+ def to_csv_types(model) -> dict[Any, str | None] | None:
60
61
  if model is None:
61
62
  return None
62
63
  columns = {}
@@ -108,8 +108,8 @@ def to_avro_type(field: Field, field_name: str) -> str | dict:
108
108
  elif field.type in ["time"]:
109
109
  return "long"
110
110
  elif field.type in ["object", "record", "struct"]:
111
- if field.config is not None and 'namespace' in field.config:
112
- return to_avro_record(field_name ,field.fields ,field.description ,field.config['namespace'])
111
+ if field.config is not None and "namespace" in field.config:
112
+ return to_avro_record(field_name, field.fields, field.description, field.config["namespace"])
113
113
  return to_avro_record(field_name, field.fields, field.description, None)
114
114
  elif field.type in ["binary"]:
115
115
  return "bytes"
@@ -9,7 +9,7 @@ from datacontract.model.data_contract_specification import DataContractSpecifica
9
9
 
10
10
  class DbtExporter(Exporter):
11
11
  def export(self, data_contract, model, server, sql_server_type, export_args) -> dict:
12
- return to_dbt_models_yaml(data_contract)
12
+ return to_dbt_models_yaml(data_contract, server)
13
13
 
14
14
 
15
15
  class DbtSourceExporter(Exporter):
@@ -27,15 +27,16 @@ class DbtStageExporter(Exporter):
27
27
  )
28
28
 
29
29
 
30
- def to_dbt_models_yaml(data_contract_spec: DataContractSpecification):
30
+ def to_dbt_models_yaml(data_contract_spec: DataContractSpecification, server: str = None):
31
31
  dbt = {
32
32
  "version": 2,
33
33
  "models": [],
34
34
  }
35
+
35
36
  for model_key, model_value in data_contract_spec.models.items():
36
- dbt_model = _to_dbt_model(model_key, model_value, data_contract_spec)
37
+ dbt_model = _to_dbt_model(model_key, model_value, data_contract_spec, adapter_type=server)
37
38
  dbt["models"].append(dbt_model)
38
- return yaml.dump(dbt, indent=2, sort_keys=False, allow_unicode=True)
39
+ return yaml.safe_dump(dbt, indent=2, sort_keys=False, allow_unicode=True)
39
40
 
40
41
 
41
42
  def to_dbt_staging_sql(data_contract_spec: DataContractSpecification, model_name: str, model_value: Model) -> str:
@@ -60,7 +61,7 @@ def to_dbt_sources_yaml(data_contract_spec: DataContractSpecification, server: s
60
61
  if data_contract_spec.info.owner is not None:
61
62
  source["meta"] = {"owner": data_contract_spec.info.owner}
62
63
  if data_contract_spec.info.description is not None:
63
- source["description"] = data_contract_spec.info.description
64
+ source["description"] = data_contract_spec.info.description.strip().replace("\n", " ")
64
65
  found_server = data_contract_spec.servers.get(server)
65
66
  adapter_type = None
66
67
  if found_server is not None:
@@ -87,14 +88,16 @@ def _to_dbt_source_table(
87
88
  }
88
89
 
89
90
  if model_value.description is not None:
90
- dbt_model["description"] = model_value.description
91
+ dbt_model["description"] = model_value.description.strip().replace("\n", " ")
91
92
  columns = _to_columns(data_contract_spec, model_value.fields, False, adapter_type)
92
93
  if columns:
93
94
  dbt_model["columns"] = columns
94
95
  return dbt_model
95
96
 
96
97
 
97
- def _to_dbt_model(model_key, model_value: Model, data_contract_spec: DataContractSpecification) -> dict:
98
+ def _to_dbt_model(
99
+ model_key, model_value: Model, data_contract_spec: DataContractSpecification, adapter_type: Optional[str]
100
+ ) -> dict:
98
101
  dbt_model = {
99
102
  "name": model_key,
100
103
  }
@@ -108,8 +111,8 @@ def _to_dbt_model(model_key, model_value: Model, data_contract_spec: DataContrac
108
111
  if _supports_constraints(model_type):
109
112
  dbt_model["config"]["contract"] = {"enforced": True}
110
113
  if model_value.description is not None:
111
- dbt_model["description"] = model_value.description
112
- columns = _to_columns(data_contract_spec, model_value.fields, _supports_constraints(model_type), None)
114
+ dbt_model["description"] = model_value.description.strip().replace("\n", " ")
115
+ columns = _to_columns(data_contract_spec, model_value.fields, _supports_constraints(model_type), adapter_type)
113
116
  if columns:
114
117
  dbt_model["columns"] = columns
115
118
  return dbt_model
@@ -171,7 +174,7 @@ def _to_column(
171
174
  {"dbt_expectations.dbt_expectations.expect_column_values_to_be_of_type": {"column_type": dbt_type}}
172
175
  )
173
176
  if field.description is not None:
174
- column["description"] = field.description
177
+ column["description"] = field.description.strip().replace("\n", " ")
175
178
  if field.required:
176
179
  if supports_constraints:
177
180
  column.setdefault("constraints", []).append({"type": "not_null"})