datacontract-cli 0.10.27__tar.gz → 0.10.29__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datacontract-cli might be problematic. Click here for more details.

Files changed (216) hide show
  1. {datacontract_cli-0.10.27/datacontract_cli.egg-info → datacontract_cli-0.10.29}/PKG-INFO +98 -62
  2. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/README.md +86 -52
  3. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/api.py +1 -1
  4. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/cli.py +37 -5
  5. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/data_contract.py +122 -29
  6. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/engines/data_contract_checks.py +2 -0
  7. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/engines/soda/connections/duckdb_connection.py +1 -1
  8. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/export/html_exporter.py +28 -23
  9. datacontract_cli-0.10.29/datacontract/export/mermaid_exporter.py +97 -0
  10. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/export/odcs_v3_exporter.py +7 -9
  11. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/export/rdf_converter.py +2 -2
  12. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/export/sql_type_converter.py +2 -2
  13. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/imports/excel_importer.py +7 -2
  14. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/imports/importer.py +11 -1
  15. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/imports/importer_factory.py +7 -0
  16. datacontract_cli-0.10.29/datacontract/imports/json_importer.py +325 -0
  17. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/imports/odcs_importer.py +2 -2
  18. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/imports/odcs_v3_importer.py +9 -9
  19. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/imports/spark_importer.py +38 -16
  20. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/imports/sql_importer.py +4 -2
  21. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/imports/unity_importer.py +77 -37
  22. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/init/init_template.py +1 -1
  23. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/integration/datamesh_manager.py +16 -2
  24. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/lint/resolve.py +61 -7
  25. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/lint/schema.py +1 -1
  26. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/schemas/datacontract-1.1.0.init.yaml +1 -1
  27. datacontract_cli-0.10.29/datacontract/schemas/datacontract-1.2.0.init.yaml +91 -0
  28. datacontract_cli-0.10.29/datacontract/schemas/datacontract-1.2.0.schema.json +2029 -0
  29. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/templates/datacontract.html +4 -0
  30. datacontract_cli-0.10.29/datacontract/templates/datacontract_odcs.html +666 -0
  31. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/templates/index.html +2 -0
  32. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/templates/partials/server.html +2 -0
  33. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/templates/style/output.css +319 -145
  34. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29/datacontract_cli.egg-info}/PKG-INFO +98 -62
  35. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract_cli.egg-info/SOURCES.txt +5 -0
  36. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract_cli.egg-info/requires.txt +11 -9
  37. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/pyproject.toml +13 -11
  38. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_api.py +1 -1
  39. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_duckdb_json.py +1 -1
  40. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_export_complex_data_contract.py +1 -1
  41. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_export_rdf.py +6 -6
  42. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_export_sodacl.py +1 -1
  43. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_export_sql.py +2 -2
  44. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_import_avro.py +5 -5
  45. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_import_csv.py +1 -1
  46. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_import_dbt.py +4 -4
  47. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_import_iceberg.py +1 -1
  48. datacontract_cli-0.10.29/tests/test_import_json.py +151 -0
  49. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_import_parquet.py +1 -1
  50. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_import_protobuf.py +1 -1
  51. datacontract_cli-0.10.29/tests/test_import_spark.py +165 -0
  52. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_import_sql_postgres.py +2 -2
  53. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_import_sql_sqlserver.py +1 -1
  54. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_import_unity_file.py +5 -2
  55. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_resolve.py +7 -7
  56. datacontract_cli-0.10.27/datacontract/export/mermaid_exporter.py +0 -32
  57. datacontract_cli-0.10.27/tests/test_import_spark.py +0 -240
  58. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/LICENSE +0 -0
  59. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/MANIFEST.in +0 -0
  60. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/__init__.py +0 -0
  61. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/breaking/breaking.py +0 -0
  62. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/breaking/breaking_change.py +0 -0
  63. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/breaking/breaking_rules.py +0 -0
  64. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/catalog/catalog.py +0 -0
  65. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/engines/__init__.py +0 -0
  66. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/engines/data_contract_test.py +0 -0
  67. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/engines/datacontract/check_that_datacontract_contains_valid_servers_configuration.py +0 -0
  68. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/engines/datacontract/check_that_datacontract_file_exists.py +0 -0
  69. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/engines/fastjsonschema/check_jsonschema.py +0 -0
  70. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/engines/fastjsonschema/s3/s3_read_files.py +0 -0
  71. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/engines/soda/__init__.py +0 -0
  72. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/engines/soda/check_soda_execute.py +0 -0
  73. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/engines/soda/connections/bigquery.py +0 -0
  74. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/engines/soda/connections/databricks.py +0 -0
  75. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/engines/soda/connections/kafka.py +0 -0
  76. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/engines/soda/connections/postgres.py +0 -0
  77. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/engines/soda/connections/snowflake.py +0 -0
  78. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/engines/soda/connections/sqlserver.py +0 -0
  79. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/engines/soda/connections/trino.py +0 -0
  80. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/export/__init__.py +0 -0
  81. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/export/avro_converter.py +0 -0
  82. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/export/avro_idl_converter.py +0 -0
  83. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/export/bigquery_converter.py +0 -0
  84. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/export/custom_converter.py +0 -0
  85. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/export/data_caterer_converter.py +0 -0
  86. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/export/dbml_converter.py +0 -0
  87. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/export/dbt_converter.py +0 -0
  88. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/export/dcs_exporter.py +0 -0
  89. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/export/duckdb_type_converter.py +0 -0
  90. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/export/exporter.py +0 -0
  91. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/export/exporter_factory.py +0 -0
  92. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/export/go_converter.py +0 -0
  93. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/export/great_expectations_converter.py +0 -0
  94. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/export/iceberg_converter.py +0 -0
  95. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/export/jsonschema_converter.py +0 -0
  96. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/export/markdown_converter.py +0 -0
  97. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/export/pandas_type_converter.py +0 -0
  98. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/export/protobuf_converter.py +0 -0
  99. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/export/pydantic_converter.py +0 -0
  100. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/export/sodacl_converter.py +0 -0
  101. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/export/spark_converter.py +0 -0
  102. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/export/sql_converter.py +0 -0
  103. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/export/sqlalchemy_converter.py +0 -0
  104. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/export/terraform_converter.py +0 -0
  105. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/imports/avro_importer.py +0 -0
  106. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/imports/bigquery_importer.py +0 -0
  107. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/imports/csv_importer.py +0 -0
  108. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/imports/dbml_importer.py +0 -0
  109. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/imports/dbt_importer.py +0 -0
  110. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/imports/glue_importer.py +0 -0
  111. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/imports/iceberg_importer.py +0 -0
  112. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/imports/jsonschema_importer.py +0 -0
  113. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/imports/parquet_importer.py +0 -0
  114. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/imports/protobuf_importer.py +0 -0
  115. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/lint/files.py +0 -0
  116. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/lint/lint.py +0 -0
  117. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/lint/linters/__init__.py +0 -0
  118. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/lint/linters/description_linter.py +0 -0
  119. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/lint/linters/field_pattern_linter.py +0 -0
  120. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/lint/linters/field_reference_linter.py +0 -0
  121. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/lint/linters/notice_period_linter.py +0 -0
  122. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/lint/linters/valid_constraints_linter.py +0 -0
  123. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/lint/resources.py +0 -0
  124. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/lint/urls.py +0 -0
  125. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/model/data_contract_specification/__init__.py +0 -0
  126. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/model/exceptions.py +0 -0
  127. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/model/odcs.py +0 -0
  128. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/model/run.py +0 -0
  129. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/output/__init__.py +0 -0
  130. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/output/junit_test_results.py +0 -0
  131. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/output/output_format.py +0 -0
  132. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/output/test_results_writer.py +0 -0
  133. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/py.typed +0 -0
  134. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/schemas/datacontract-1.1.0.schema.json +0 -0
  135. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/schemas/odcs-3.0.1.schema.json +0 -0
  136. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/templates/partials/datacontract_information.html +0 -0
  137. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/templates/partials/datacontract_servicelevels.html +0 -0
  138. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/templates/partials/datacontract_terms.html +0 -0
  139. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/templates/partials/definition.html +0 -0
  140. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/templates/partials/example.html +0 -0
  141. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/templates/partials/model_field.html +0 -0
  142. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract/templates/partials/quality.html +0 -0
  143. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract_cli.egg-info/dependency_links.txt +0 -0
  144. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract_cli.egg-info/entry_points.txt +0 -0
  145. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/datacontract_cli.egg-info/top_level.txt +0 -0
  146. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/setup.cfg +0 -0
  147. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_breaking.py +0 -0
  148. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_catalog.py +0 -0
  149. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_changelog.py +0 -0
  150. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_cli.py +0 -0
  151. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_data_contract_checks.py +0 -0
  152. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_data_contract_specification.py +0 -0
  153. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_description_linter.py +0 -0
  154. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_documentation_linter.py +0 -0
  155. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_download_datacontract_file.py +0 -0
  156. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_export_avro.py +0 -0
  157. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_export_avro_idl.py +0 -0
  158. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_export_bigquery.py +0 -0
  159. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_export_custom.py +0 -0
  160. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_export_custom_exporter.py +0 -0
  161. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_export_data_caterer.py +0 -0
  162. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_export_dbml.py +0 -0
  163. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_export_dbt_models.py +0 -0
  164. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_export_dbt_sources.py +0 -0
  165. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_export_dbt_staging_sql.py +0 -0
  166. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_export_go.py +0 -0
  167. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_export_great_expectations.py +0 -0
  168. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_export_html.py +0 -0
  169. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_export_iceberg.py +0 -0
  170. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_export_jsonschema.py +0 -0
  171. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_export_markdown.py +0 -0
  172. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_export_mermaid.py +0 -0
  173. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_export_odcs_v3.py +0 -0
  174. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_export_protobuf.py +0 -0
  175. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_export_pydantic.py +0 -0
  176. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_export_spark.py +0 -0
  177. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_export_sql_query.py +0 -0
  178. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_export_sqlalchemy.py +0 -0
  179. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_export_terraform.py +0 -0
  180. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_field_constraint_linter.py +0 -0
  181. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_field_pattern_linter.py +0 -0
  182. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_field_reference_linter.py +0 -0
  183. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_import_bigquery.py +0 -0
  184. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_import_dbml.py +0 -0
  185. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_import_excel.py +0 -0
  186. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_import_glue.py +0 -0
  187. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_import_jsonschema.py +0 -0
  188. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_import_odcs_v3.py +0 -0
  189. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_integration_datameshmanager.py +0 -0
  190. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_lint.py +0 -0
  191. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_notice_period_linter.py +0 -0
  192. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_roundtrip_jsonschema.py +0 -0
  193. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_spec_fields_field.py +0 -0
  194. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_spec_ref.py +0 -0
  195. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_test_azure_remote.py +0 -0
  196. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_test_bigquery.py +0 -0
  197. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_test_databricks.py +0 -0
  198. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_test_dataframe.py +0 -0
  199. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_test_delta.py +0 -0
  200. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_test_gcs_json_remote.py +0 -0
  201. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_test_kafka.py +0 -0
  202. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_test_kafka_remote.py +0 -0
  203. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_test_local_json.py +0 -0
  204. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_test_output_junit.py +0 -0
  205. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_test_parquet.py +0 -0
  206. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_test_postgres.py +0 -0
  207. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_test_quality.py +0 -0
  208. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_test_s3_csv.py +0 -0
  209. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_test_s3_delta.py +0 -0
  210. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_test_s3_json.py +0 -0
  211. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_test_s3_json_complex.py +0 -0
  212. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_test_s3_json_multiple_models.py +0 -0
  213. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_test_s3_json_remote.py +0 -0
  214. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_test_snowflake.py +0 -0
  215. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_test_sqlserver.py +0 -0
  216. {datacontract_cli-0.10.27 → datacontract_cli-0.10.29}/tests/test_test_trino.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datacontract-cli
3
- Version: 0.10.27
3
+ Version: 0.10.29
4
4
  Summary: The datacontract CLI is an open source command-line tool for working with Data Contracts. It uses data contract YAML files to lint the data contract, connect to data sources and execute schema and quality tests, detect breaking changes, and export to different formats. The tool is written in Python. It can be used as a standalone CLI tool, in a CI/CD pipeline, or directly as a Python library.
5
5
  Author-email: Jochen Christ <jochen.christ@innoq.com>, Stefan Negele <stefan.negele@innoq.com>, Simon Harrer <simon.harrer@innoq.com>
6
6
  License-Expression: MIT
@@ -28,7 +28,7 @@ Requires-Dist: python-dotenv<2.0.0,>=1.0.0
28
28
  Requires-Dist: boto3<2.0.0,>=1.34.41
29
29
  Requires-Dist: Jinja2<4.0.0,>=3.1.5
30
30
  Requires-Dist: jinja_partials<1.0.0,>=0.2.1
31
- Requires-Dist: datacontract-specification<2.0.0,>=1.1.1
31
+ Requires-Dist: datacontract-specification<2.0.0,>=1.2.0
32
32
  Requires-Dist: open-data-contract-standard<4.0.0,>=3.0.4
33
33
  Provides-Extra: avro
34
34
  Requires-Dist: avro==1.12.0; extra == "avro"
@@ -42,19 +42,21 @@ Provides-Extra: databricks
42
42
  Requires-Dist: soda-core-spark-df<3.6.0,>=3.3.20; extra == "databricks"
43
43
  Requires-Dist: soda-core-spark[databricks]<3.6.0,>=3.3.20; extra == "databricks"
44
44
  Requires-Dist: databricks-sql-connector<4.1.0,>=3.7.0; extra == "databricks"
45
- Requires-Dist: databricks-sdk<0.51.0; extra == "databricks"
45
+ Requires-Dist: databricks-sdk<0.58.0; extra == "databricks"
46
+ Requires-Dist: pyspark<4.0.0,>=3.5.5; extra == "databricks"
46
47
  Provides-Extra: iceberg
47
- Requires-Dist: pyiceberg==0.8.1; extra == "iceberg"
48
+ Requires-Dist: pyiceberg==0.9.1; extra == "iceberg"
48
49
  Provides-Extra: kafka
49
50
  Requires-Dist: datacontract-cli[avro]; extra == "kafka"
50
51
  Requires-Dist: soda-core-spark-df<3.6.0,>=3.3.20; extra == "kafka"
52
+ Requires-Dist: pyspark<4.0.0,>=3.5.5; extra == "kafka"
51
53
  Provides-Extra: postgres
52
54
  Requires-Dist: soda-core-postgres<3.6.0,>=3.3.20; extra == "postgres"
53
55
  Provides-Extra: s3
54
56
  Requires-Dist: s3fs<2026.0.0,>=2025.2.0; extra == "s3"
55
57
  Requires-Dist: aiobotocore<2.23.0,>=2.17.0; extra == "s3"
56
58
  Provides-Extra: snowflake
57
- Requires-Dist: snowflake-connector-python[pandas]<3.15,>=3.6; extra == "snowflake"
59
+ Requires-Dist: snowflake-connector-python[pandas]<3.16,>=3.6; extra == "snowflake"
58
60
  Requires-Dist: soda-core-snowflake<3.6.0,>=3.3.20; extra == "snowflake"
59
61
  Provides-Extra: sqlserver
60
62
  Requires-Dist: soda-core-sqlserver<3.6.0,>=3.3.20; extra == "sqlserver"
@@ -69,8 +71,8 @@ Requires-Dist: pyarrow>=18.1.0; extra == "parquet"
69
71
  Provides-Extra: rdf
70
72
  Requires-Dist: rdflib==7.0.0; extra == "rdf"
71
73
  Provides-Extra: api
72
- Requires-Dist: fastapi==0.115.12; extra == "api"
73
- Requires-Dist: uvicorn==0.34.2; extra == "api"
74
+ Requires-Dist: fastapi==0.115.14; extra == "api"
75
+ Requires-Dist: uvicorn==0.35.0; extra == "api"
74
76
  Provides-Extra: protobuf
75
77
  Requires-Dist: grpcio-tools>=1.53; extra == "protobuf"
76
78
  Provides-Extra: all
@@ -79,15 +81,15 @@ Provides-Extra: dev
79
81
  Requires-Dist: datacontract-cli[all]; extra == "dev"
80
82
  Requires-Dist: httpx==0.28.1; extra == "dev"
81
83
  Requires-Dist: kafka-python; extra == "dev"
82
- Requires-Dist: moto==5.1.4; extra == "dev"
84
+ Requires-Dist: moto==5.1.6; extra == "dev"
83
85
  Requires-Dist: pandas>=2.1.0; extra == "dev"
84
86
  Requires-Dist: pre-commit<4.3.0,>=3.7.1; extra == "dev"
85
87
  Requires-Dist: pytest; extra == "dev"
86
88
  Requires-Dist: pytest-xdist; extra == "dev"
87
- Requires-Dist: pymssql==2.3.4; extra == "dev"
89
+ Requires-Dist: pymssql==2.3.6; extra == "dev"
88
90
  Requires-Dist: ruff; extra == "dev"
89
91
  Requires-Dist: testcontainers[kafka,minio,mssql,postgres]==4.10.0; extra == "dev"
90
- Requires-Dist: trino==0.333.0; extra == "dev"
92
+ Requires-Dist: trino==0.335.0; extra == "dev"
91
93
  Dynamic: license-file
92
94
 
93
95
  # Data Contract CLI
@@ -399,30 +401,38 @@ Commands
399
401
  │ [default: datacontract.yaml] │
400
402
  ╰──────────────────────────────────────────────────────────────────────────────────────────────────╯
401
403
  ╭─ Options ────────────────────────────────────────────────────────────────────────────────────────╮
402
- │ --schema TEXT The location (url or path) of the Data
403
- Contract Specification JSON Schema
404
- [default: None]
405
- --server TEXT The server configuration to run the
406
- schema and quality tests. Use the key of
407
- the server object in the data contract
408
- yaml file to refer to a server, e.g.,
409
- `production`, or `all` for all servers
410
- (default).
411
- [default: all]
412
- --publish TEXT The url to publish the results after the
413
- test
414
- [default: None]
415
- --output PATH Specify the file path where the test
416
- results should be written to (e.g.,
417
- './test-results/TEST-datacontract.xml').
418
- [default: None]
419
- --output-format [junit] The target format for the test results.
420
- [default: None]
421
- │ --logs --no-logs Print logs [default: no-logs]
422
- --ssl-verification --no-ssl-verification SSL verification when publishing the
423
- data contract.
424
- [default: ssl-verification]
425
- --help Show this message and exit.
404
+ │ --schema TEXT The location (url or path) of
405
+ the Data Contract Specification
406
+ JSON Schema
407
+ [default: None]
408
+ --server TEXT The server configuration to run
409
+ the schema and quality tests.
410
+ Use the key of the server object
411
+ in the data contract yaml file
412
+ to refer to a server, e.g.,
413
+ `production`, or `all` for all
414
+ servers (default).
415
+ [default: all]
416
+ --publish-test-results --no-publish-test-results Publish the results after the
417
+ test
418
+ [default:
419
+ no-publish-test-results]
420
+ --publish TEXT DEPRECATED. The url to publish
421
+ the results after the test.
422
+ [default: None]
423
+ │ --output PATH Specify the file path where the
424
+ test results should be written
425
+ to (e.g.,
426
+ './test-results/TEST-datacontra…
427
+ [default: None]
428
+ │ --output-format [junit] The target format for the test │
429
+ │ results. │
430
+ │ [default: None] │
431
+ │ --logs --no-logs Print logs [default: no-logs] │
432
+ │ --ssl-verification --no-ssl-verification SSL verification when publishing │
433
+ │ the data contract. │
434
+ │ [default: ssl-verification] │
435
+ │ --help Show this message and exit. │
426
436
  ╰──────────────────────────────────────────────────────────────────────────────────────────────────╯
427
437
 
428
438
  ```
@@ -996,6 +1006,21 @@ Available export options:
996
1006
  | `custom` | Export to Custom format with Jinja | ✅ |
997
1007
  | Missing something? | Please create an issue on GitHub | TBD |
998
1008
 
1009
+ #### SQL
1010
+
1011
+ The `export` function converts a given data contract into a SQL data definition language (DDL).
1012
+
1013
+ ```shell
1014
+ datacontract export datacontract.yaml --format sql --output output.sql
1015
+ ```
1016
+
1017
+ If using Databricks, and an error is thrown when trying to deploy the SQL DDLs with `variant` columns set the following properties.
1018
+
1019
+ ```shell
1020
+ spark.conf.set(“spark.databricks.delta.schema.typeCheck.enabled”, “false”)
1021
+ from datacontract.model import data_contract_specification
1022
+ data_contract_specification.DATACONTRACT_TYPES.append(“variant”)
1023
+ ```
999
1024
 
1000
1025
  #### Great Expectations
1001
1026
 
@@ -1003,7 +1028,7 @@ The `export` function transforms a specified data contract into a comprehensive
1003
1028
  If the contract includes multiple models, you need to specify the names of the model you wish to export.
1004
1029
 
1005
1030
  ```shell
1006
- datacontract export datacontract.yaml --format great-expectations --model orders
1031
+ datacontract export datacontract.yaml --format great-expectations --model orders
1007
1032
  ```
1008
1033
 
1009
1034
  The export creates a list of expectations by utilizing:
@@ -1028,7 +1053,7 @@ To further customize the export, the following optional arguments are available:
1028
1053
 
1029
1054
  #### RDF
1030
1055
 
1031
- The export function converts a given data contract into a RDF representation. You have the option to
1056
+ The `export` function converts a given data contract into a RDF representation. You have the option to
1032
1057
  add a base_url which will be used as the default prefix to resolve relative IRIs inside the document.
1033
1058
 
1034
1059
  ```shell
@@ -1261,9 +1286,9 @@ FROM
1261
1286
 
1262
1287
  ╭─ Options ────────────────────────────────────────────────────────────────────────────────────────╮
1263
1288
  │ * --format [sql|avro|dbt|dbml|glue|jsonsc The format of the source file. │
1264
- │ hema|bigquery|odcs|unity|spark [default: None] │
1265
- │ |iceberg|parquet|csv|protobuf| [required] │
1266
- │ excel]
1289
+ │ hema|json|bigquery|odcs|unity| [default: None] │
1290
+ spark|iceberg|parquet|csv|prot [required] │
1291
+ obuf|excel]
1267
1292
  │ --output PATH Specify the file path where │
1268
1293
  │ the Data Contract will be │
1269
1294
  │ saved. If no path is provided, │
@@ -1273,6 +1298,10 @@ FROM
1273
1298
  │ --source TEXT The path to the file that │
1274
1299
  │ should be imported. │
1275
1300
  │ [default: None] │
1301
+ │ --spec [datacontract_specification|od The format of the data │
1302
+ │ cs] contract to import. │
1303
+ │ [default: │
1304
+ │ datacontract_specification] │
1276
1305
  │ --dialect TEXT The SQL dialect to use when │
1277
1306
  │ importing SQL files, e.g., │
1278
1307
  │ postgres, tsql, bigquery. │
@@ -1361,11 +1390,11 @@ Available import options:
1361
1390
  | `jsonschema` | Import from JSON Schemas | ✅ |
1362
1391
  | `odcs` | Import from Open Data Contract Standard (ODCS) | ✅ |
1363
1392
  | `parquet` | Import from Parquet File Metadata | ✅ |
1364
- | `protobuf` | Import from Protobuf schemas | ✅ |
1365
- | `spark` | Import from Spark StructTypes | ✅ |
1393
+ | `protobuf` | Import from Protobuf schemas | ✅ |
1394
+ | `spark` | Import from Spark StructTypes, Variant | ✅ |
1366
1395
  | `sql` | Import from SQL DDL | ✅ |
1367
1396
  | `unity` | Import from Databricks Unity Catalog | partial |
1368
- | Missing something? | Please create an issue on GitHub | TBD |
1397
+ | Missing something? | Please create an issue on GitHub | TBD |
1369
1398
 
1370
1399
 
1371
1400
  #### ODCS
@@ -1467,14 +1496,31 @@ datacontract import --format glue --source <database_name>
1467
1496
 
1468
1497
  #### Spark
1469
1498
 
1470
- Importing from Spark table or view these must be created or accessible in the Spark context. Specify tables list in `source` parameter.
1471
-
1472
- Example:
1499
+ Importing from Spark table or view these must be created or accessible in the Spark context. Specify tables list in `source` parameter. If the `source` tables are registered as tables in Databricks, and they have a table-level descriptions they will also be added to the Data Contract Specification.
1473
1500
 
1474
1501
  ```bash
1502
+ # Example: Import Spark table(s) from Spark context
1475
1503
  datacontract import --format spark --source "users,orders"
1476
1504
  ```
1477
1505
 
1506
+ ```bash
1507
+ # Example: Import Spark table
1508
+ DataContract().import_from_source("spark", "users")
1509
+ DataContract().import_from_source(format = "spark", source = "users")
1510
+
1511
+ # Example: Import Spark dataframe
1512
+ DataContract().import_from_source("spark", "users", dataframe = df_user)
1513
+ DataContract().import_from_source(format = "spark", source = "users", dataframe = df_user)
1514
+
1515
+ # Example: Import Spark table + table description
1516
+ DataContract().import_from_source("spark", "users", description = "description")
1517
+ DataContract().import_from_source(format = "spark", source = "users", description = "description")
1518
+
1519
+ # Example: Import Spark dataframe + table description
1520
+ DataContract().import_from_source("spark", "users", dataframe = df_user, description = "description")
1521
+ DataContract().import_from_source(format = "spark", source = "users", dataframe = df_user, description = "description")
1522
+ ```
1523
+
1478
1524
  #### DBML
1479
1525
 
1480
1526
  Importing from DBML Documents.
@@ -1666,6 +1712,8 @@ datacontract catalog --files "*.odcs.yaml"
1666
1712
  information.
1667
1713
  To connect to servers (such as a Snowflake data source), set the credentials as environment
1668
1714
  variables as documented in https://cli.datacontract.com/#test
1715
+ It is possible to run the API with extra arguments for `uvicorn.run()` as keyword arguments, e.g.:
1716
+ `datacontract api --port 1234 --root_path /datacontract`.
1669
1717
 
1670
1718
  ╭─ Options ────────────────────────────────────────────────────────────────────────────────────────╮
1671
1719
  │ --port INTEGER Bind socket to this port. [default: 4242] │
@@ -1932,7 +1980,7 @@ if __name__ == "__main__":
1932
1980
  Output
1933
1981
 
1934
1982
  ```yaml
1935
- dataContractSpecification: 1.1.0
1983
+ dataContractSpecification: 1.2.0
1936
1984
  id: uuid-custom
1937
1985
  info:
1938
1986
  title: my_custom_imported_data
@@ -1951,22 +1999,9 @@ models:
1951
1999
  ```
1952
2000
  ## Development Setup
1953
2001
 
1954
- Python base interpreter should be 3.11.x (unless working on 3.12 release candidate).
1955
-
1956
- ```bash
1957
- # create venv
1958
- python3.11 -m venv venv
1959
- source venv/bin/activate
1960
-
1961
- # Install Requirements
1962
- pip install --upgrade pip setuptools wheel
1963
- pip install -e '.[dev]'
1964
- pre-commit install
1965
- pre-commit run --all-files
1966
- pytest
1967
- ```
1968
-
1969
- ### Use uv (recommended)
2002
+ - Install [uv](https://docs.astral.sh/uv/)
2003
+ - Python base interpreter should be 3.11.x .
2004
+ - Docker engine must be running to execute the tests.
1970
2005
 
1971
2006
  ```bash
1972
2007
  # make sure uv is installed
@@ -2048,6 +2083,7 @@ We are happy to receive your contributions. Propose your change in an issue or d
2048
2083
  - [INNOQ](https://innoq.com)
2049
2084
  - [Data Catering](https://data.catering/)
2050
2085
  - [Oliver Wyman](https://www.oliverwyman.com/)
2086
+ - [dmTECH](https://www.dmtech.tech/de)
2051
2087
  - And many more. To add your company, please create a pull request.
2052
2088
 
2053
2089
  ## Related Tools
@@ -307,30 +307,38 @@ Commands
307
307
  │ [default: datacontract.yaml] │
308
308
  ╰──────────────────────────────────────────────────────────────────────────────────────────────────╯
309
309
  ╭─ Options ────────────────────────────────────────────────────────────────────────────────────────╮
310
- │ --schema TEXT The location (url or path) of the Data
311
- Contract Specification JSON Schema
312
- [default: None]
313
- --server TEXT The server configuration to run the
314
- schema and quality tests. Use the key of
315
- the server object in the data contract
316
- yaml file to refer to a server, e.g.,
317
- `production`, or `all` for all servers
318
- (default).
319
- [default: all]
320
- --publish TEXT The url to publish the results after the
321
- test
322
- [default: None]
323
- --output PATH Specify the file path where the test
324
- results should be written to (e.g.,
325
- './test-results/TEST-datacontract.xml').
326
- [default: None]
327
- --output-format [junit] The target format for the test results.
328
- [default: None]
329
- │ --logs --no-logs Print logs [default: no-logs]
330
- --ssl-verification --no-ssl-verification SSL verification when publishing the
331
- data contract.
332
- [default: ssl-verification]
333
- --help Show this message and exit.
310
+ │ --schema TEXT The location (url or path) of
311
+ the Data Contract Specification
312
+ JSON Schema
313
+ [default: None]
314
+ --server TEXT The server configuration to run
315
+ the schema and quality tests.
316
+ Use the key of the server object
317
+ in the data contract yaml file
318
+ to refer to a server, e.g.,
319
+ `production`, or `all` for all
320
+ servers (default).
321
+ [default: all]
322
+ --publish-test-results --no-publish-test-results Publish the results after the
323
+ test
324
+ [default:
325
+ no-publish-test-results]
326
+ --publish TEXT DEPRECATED. The url to publish
327
+ the results after the test.
328
+ [default: None]
329
+ │ --output PATH Specify the file path where the
330
+ test results should be written
331
+ to (e.g.,
332
+ './test-results/TEST-datacontra…
333
+ [default: None]
334
+ │ --output-format [junit] The target format for the test │
335
+ │ results. │
336
+ │ [default: None] │
337
+ │ --logs --no-logs Print logs [default: no-logs] │
338
+ │ --ssl-verification --no-ssl-verification SSL verification when publishing │
339
+ │ the data contract. │
340
+ │ [default: ssl-verification] │
341
+ │ --help Show this message and exit. │
334
342
  ╰──────────────────────────────────────────────────────────────────────────────────────────────────╯
335
343
 
336
344
  ```
@@ -904,6 +912,21 @@ Available export options:
904
912
  | `custom` | Export to Custom format with Jinja | ✅ |
905
913
  | Missing something? | Please create an issue on GitHub | TBD |
906
914
 
915
+ #### SQL
916
+
917
+ The `export` function converts a given data contract into a SQL data definition language (DDL).
918
+
919
+ ```shell
920
+ datacontract export datacontract.yaml --format sql --output output.sql
921
+ ```
922
+
923
+ If using Databricks, and an error is thrown when trying to deploy the SQL DDLs with `variant` columns set the following properties.
924
+
925
+ ```shell
926
+ spark.conf.set(“spark.databricks.delta.schema.typeCheck.enabled”, “false”)
927
+ from datacontract.model import data_contract_specification
928
+ data_contract_specification.DATACONTRACT_TYPES.append(“variant”)
929
+ ```
907
930
 
908
931
  #### Great Expectations
909
932
 
@@ -911,7 +934,7 @@ The `export` function transforms a specified data contract into a comprehensive
911
934
  If the contract includes multiple models, you need to specify the names of the model you wish to export.
912
935
 
913
936
  ```shell
914
- datacontract export datacontract.yaml --format great-expectations --model orders
937
+ datacontract export datacontract.yaml --format great-expectations --model orders
915
938
  ```
916
939
 
917
940
  The export creates a list of expectations by utilizing:
@@ -936,7 +959,7 @@ To further customize the export, the following optional arguments are available:
936
959
 
937
960
  #### RDF
938
961
 
939
- The export function converts a given data contract into a RDF representation. You have the option to
962
+ The `export` function converts a given data contract into a RDF representation. You have the option to
940
963
  add a base_url which will be used as the default prefix to resolve relative IRIs inside the document.
941
964
 
942
965
  ```shell
@@ -1169,9 +1192,9 @@ FROM
1169
1192
 
1170
1193
  ╭─ Options ────────────────────────────────────────────────────────────────────────────────────────╮
1171
1194
  │ * --format [sql|avro|dbt|dbml|glue|jsonsc The format of the source file. │
1172
- │ hema|bigquery|odcs|unity|spark [default: None] │
1173
- │ |iceberg|parquet|csv|protobuf| [required] │
1174
- │ excel]
1195
+ │ hema|json|bigquery|odcs|unity| [default: None] │
1196
+ spark|iceberg|parquet|csv|prot [required] │
1197
+ obuf|excel]
1175
1198
  │ --output PATH Specify the file path where │
1176
1199
  │ the Data Contract will be │
1177
1200
  │ saved. If no path is provided, │
@@ -1181,6 +1204,10 @@ FROM
1181
1204
  │ --source TEXT The path to the file that │
1182
1205
  │ should be imported. │
1183
1206
  │ [default: None] │
1207
+ │ --spec [datacontract_specification|od The format of the data │
1208
+ │ cs] contract to import. │
1209
+ │ [default: │
1210
+ │ datacontract_specification] │
1184
1211
  │ --dialect TEXT The SQL dialect to use when │
1185
1212
  │ importing SQL files, e.g., │
1186
1213
  │ postgres, tsql, bigquery. │
@@ -1269,11 +1296,11 @@ Available import options:
1269
1296
  | `jsonschema` | Import from JSON Schemas | ✅ |
1270
1297
  | `odcs` | Import from Open Data Contract Standard (ODCS) | ✅ |
1271
1298
  | `parquet` | Import from Parquet File Metadata | ✅ |
1272
- | `protobuf` | Import from Protobuf schemas | ✅ |
1273
- | `spark` | Import from Spark StructTypes | ✅ |
1299
+ | `protobuf` | Import from Protobuf schemas | ✅ |
1300
+ | `spark` | Import from Spark StructTypes, Variant | ✅ |
1274
1301
  | `sql` | Import from SQL DDL | ✅ |
1275
1302
  | `unity` | Import from Databricks Unity Catalog | partial |
1276
- | Missing something? | Please create an issue on GitHub | TBD |
1303
+ | Missing something? | Please create an issue on GitHub | TBD |
1277
1304
 
1278
1305
 
1279
1306
  #### ODCS
@@ -1375,14 +1402,31 @@ datacontract import --format glue --source <database_name>
1375
1402
 
1376
1403
  #### Spark
1377
1404
 
1378
- Importing from Spark table or view these must be created or accessible in the Spark context. Specify tables list in `source` parameter.
1379
-
1380
- Example:
1405
+ Importing from Spark table or view these must be created or accessible in the Spark context. Specify tables list in `source` parameter. If the `source` tables are registered as tables in Databricks, and they have a table-level descriptions they will also be added to the Data Contract Specification.
1381
1406
 
1382
1407
  ```bash
1408
+ # Example: Import Spark table(s) from Spark context
1383
1409
  datacontract import --format spark --source "users,orders"
1384
1410
  ```
1385
1411
 
1412
+ ```bash
1413
+ # Example: Import Spark table
1414
+ DataContract().import_from_source("spark", "users")
1415
+ DataContract().import_from_source(format = "spark", source = "users")
1416
+
1417
+ # Example: Import Spark dataframe
1418
+ DataContract().import_from_source("spark", "users", dataframe = df_user)
1419
+ DataContract().import_from_source(format = "spark", source = "users", dataframe = df_user)
1420
+
1421
+ # Example: Import Spark table + table description
1422
+ DataContract().import_from_source("spark", "users", description = "description")
1423
+ DataContract().import_from_source(format = "spark", source = "users", description = "description")
1424
+
1425
+ # Example: Import Spark dataframe + table description
1426
+ DataContract().import_from_source("spark", "users", dataframe = df_user, description = "description")
1427
+ DataContract().import_from_source(format = "spark", source = "users", dataframe = df_user, description = "description")
1428
+ ```
1429
+
1386
1430
  #### DBML
1387
1431
 
1388
1432
  Importing from DBML Documents.
@@ -1574,6 +1618,8 @@ datacontract catalog --files "*.odcs.yaml"
1574
1618
  information.
1575
1619
  To connect to servers (such as a Snowflake data source), set the credentials as environment
1576
1620
  variables as documented in https://cli.datacontract.com/#test
1621
+ It is possible to run the API with extra arguments for `uvicorn.run()` as keyword arguments, e.g.:
1622
+ `datacontract api --port 1234 --root_path /datacontract`.
1577
1623
 
1578
1624
  ╭─ Options ────────────────────────────────────────────────────────────────────────────────────────╮
1579
1625
  │ --port INTEGER Bind socket to this port. [default: 4242] │
@@ -1840,7 +1886,7 @@ if __name__ == "__main__":
1840
1886
  Output
1841
1887
 
1842
1888
  ```yaml
1843
- dataContractSpecification: 1.1.0
1889
+ dataContractSpecification: 1.2.0
1844
1890
  id: uuid-custom
1845
1891
  info:
1846
1892
  title: my_custom_imported_data
@@ -1859,22 +1905,9 @@ models:
1859
1905
  ```
1860
1906
  ## Development Setup
1861
1907
 
1862
- Python base interpreter should be 3.11.x (unless working on 3.12 release candidate).
1863
-
1864
- ```bash
1865
- # create venv
1866
- python3.11 -m venv venv
1867
- source venv/bin/activate
1868
-
1869
- # Install Requirements
1870
- pip install --upgrade pip setuptools wheel
1871
- pip install -e '.[dev]'
1872
- pre-commit install
1873
- pre-commit run --all-files
1874
- pytest
1875
- ```
1876
-
1877
- ### Use uv (recommended)
1908
+ - Install [uv](https://docs.astral.sh/uv/)
1909
+ - Python base interpreter should be 3.11.x .
1910
+ - Docker engine must be running to execute the tests.
1878
1911
 
1879
1912
  ```bash
1880
1913
  # make sure uv is installed
@@ -1956,6 +1989,7 @@ We are happy to receive your contributions. Propose your change in an issue or d
1956
1989
  - [INNOQ](https://innoq.com)
1957
1990
  - [Data Catering](https://data.catering/)
1958
1991
  - [Oliver Wyman](https://www.oliverwyman.com/)
1992
+ - [dmTECH](https://www.dmtech.tech/de)
1959
1993
  - And many more. To add your company, please create a pull request.
1960
1994
 
1961
1995
  ## Related Tools
@@ -10,7 +10,7 @@ from fastapi.security.api_key import APIKeyHeader
10
10
  from datacontract.data_contract import DataContract, ExportFormat
11
11
  from datacontract.model.run import Run
12
12
 
13
- DATA_CONTRACT_EXAMPLE_PAYLOAD = """dataContractSpecification: 1.1.0
13
+ DATA_CONTRACT_EXAMPLE_PAYLOAD = """dataContractSpecification: 1.2.0
14
14
  id: urn:datacontract:checkout:orders-latest
15
15
  info:
16
16
  title: Orders Latest
@@ -11,7 +11,7 @@ from typing_extensions import Annotated
11
11
 
12
12
  from datacontract.catalog.catalog import create_data_contract_html, create_index_html
13
13
  from datacontract.data_contract import DataContract, ExportFormat
14
- from datacontract.imports.importer import ImportFormat
14
+ from datacontract.imports.importer import ImportFormat, Spec
15
15
  from datacontract.init.init_template import get_init_template
16
16
  from datacontract.integration.datamesh_manager import (
17
17
  publish_data_contract_to_datamesh_manager,
@@ -126,7 +126,8 @@ def test(
126
126
  "servers (default)."
127
127
  ),
128
128
  ] = "all",
129
- publish: Annotated[str, typer.Option(help="The url to publish the results after the test")] = None,
129
+ publish_test_results: Annotated[bool, typer.Option(help="Publish the results after the test")] = False,
130
+ publish: Annotated[str, typer.Option(help="DEPRECATED. The url to publish the results after the test.")] = None,
130
131
  output: Annotated[
131
132
  Path,
132
133
  typer.Option(
@@ -149,6 +150,7 @@ def test(
149
150
  run = DataContract(
150
151
  data_contract_file=location,
151
152
  schema_location=schema,
153
+ publish_test_results=publish_test_results,
152
154
  publish_url=publish,
153
155
  server=server,
154
156
  ssl_verification=ssl_verification,
@@ -246,6 +248,10 @@ def import_(
246
248
  Optional[str],
247
249
  typer.Option(help="The path to the file that should be imported."),
248
250
  ] = None,
251
+ spec: Annotated[
252
+ Spec,
253
+ typer.Option(help="The format of the data contract to import. "),
254
+ ] = Spec.datacontract_specification,
249
255
  dialect: Annotated[
250
256
  Optional[str],
251
257
  typer.Option(help="The SQL dialect to use when importing SQL files, e.g., postgres, tsql, bigquery."),
@@ -265,7 +271,7 @@ def import_(
265
271
  ),
266
272
  ] = None,
267
273
  unity_table_full_name: Annotated[
268
- Optional[str], typer.Option(help="Full name of a table in the unity catalog")
274
+ Optional[List[str]], typer.Option(help="Full name of a table in the unity catalog")
269
275
  ] = None,
270
276
  dbt_model: Annotated[
271
277
  Optional[List[str]],
@@ -312,6 +318,7 @@ def import_(
312
318
  result = DataContract().import_from_source(
313
319
  format=format,
314
320
  source=source,
321
+ spec=spec,
315
322
  template=template,
316
323
  schema=schema,
317
324
  dialect=dialect,
@@ -462,8 +469,26 @@ def diff(
462
469
  console.print(result.changelog_str())
463
470
 
464
471
 
465
- @app.command()
472
+ def _get_uvicorn_arguments(port: int, host: str, context: typer.Context) -> dict:
473
+ """
474
+ Take the default datacontract uvicorn arguments and merge them with the
475
+ extra arguments passed to the command to start the API.
476
+ """
477
+ default_args = {
478
+ "app": "datacontract.api:app",
479
+ "port": port,
480
+ "host": host,
481
+ "reload": True,
482
+ }
483
+
484
+ # Create a list of the extra arguments, remove the leading -- from the cli arguments
485
+ trimmed_keys = list(map(lambda x : str(x).replace("--", ""),context.args[::2]))
486
+ # Merge the two dicts and return them as one dict
487
+ return default_args | dict(zip(trimmed_keys, context.args[1::2]))
488
+
489
+ @app.command(context_settings={"allow_extra_args": True, "ignore_unknown_options": True})
466
490
  def api(
491
+ ctx: Annotated[typer.Context, typer.Option(help="Extra arguments to pass to uvicorn.run().")],
467
492
  port: Annotated[int, typer.Option(help="Bind socket to this port.")] = 4242,
468
493
  host: Annotated[
469
494
  str, typer.Option(help="Bind socket to this host. Hint: For running in docker, set it to 0.0.0.0")
@@ -481,6 +506,9 @@ def api(
481
506
 
482
507
  To connect to servers (such as a Snowflake data source), set the credentials as environment variables as documented in
483
508
  https://cli.datacontract.com/#test
509
+
510
+ It is possible to run the API with extra arguments for `uvicorn.run()` as keyword arguments, e.g.:
511
+ `datacontract api --port 1234 --root_path /datacontract`.
484
512
  """
485
513
  import uvicorn
486
514
  from uvicorn.config import LOGGING_CONFIG
@@ -488,7 +516,11 @@ def api(
488
516
  log_config = LOGGING_CONFIG
489
517
  log_config["root"] = {"level": "INFO"}
490
518
 
491
- uvicorn.run(app="datacontract.api:app", port=port, host=host, reload=True, log_config=LOGGING_CONFIG)
519
+ uvicorn_args = _get_uvicorn_arguments(port, host, ctx)
520
+ # Add the log config
521
+ uvicorn_args["log_config"] = log_config
522
+ # Run uvicorn
523
+ uvicorn.run(**uvicorn_args)
492
524
 
493
525
 
494
526
  def _print_logs(run):