datacontract-cli 0.10.30__tar.gz → 0.10.32__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datacontract-cli might be problematic. Click here for more details.

Files changed (215) hide show
  1. {datacontract_cli-0.10.30/datacontract_cli.egg-info → datacontract_cli-0.10.32}/PKG-INFO +2 -5
  2. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/README.md +0 -3
  3. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/export/spark_converter.py +3 -1
  4. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/imports/excel_importer.py +261 -13
  5. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/imports/sql_importer.py +2 -0
  6. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/imports/unity_importer.py +2 -1
  7. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/output/junit_test_results.py +3 -3
  8. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32/datacontract_cli.egg-info}/PKG-INFO +2 -5
  9. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract_cli.egg-info/requires.txt +1 -1
  10. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/pyproject.toml +2 -2
  11. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_import_spark.py +1 -1
  12. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/LICENSE +0 -0
  13. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/MANIFEST.in +0 -0
  14. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/__init__.py +0 -0
  15. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/api.py +0 -0
  16. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/breaking/breaking.py +0 -0
  17. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/breaking/breaking_change.py +0 -0
  18. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/breaking/breaking_rules.py +0 -0
  19. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/catalog/catalog.py +0 -0
  20. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/cli.py +0 -0
  21. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/data_contract.py +0 -0
  22. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/engines/__init__.py +0 -0
  23. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/engines/data_contract_checks.py +0 -0
  24. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/engines/data_contract_test.py +0 -0
  25. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/engines/datacontract/check_that_datacontract_contains_valid_servers_configuration.py +0 -0
  26. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/engines/datacontract/check_that_datacontract_file_exists.py +0 -0
  27. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/engines/fastjsonschema/check_jsonschema.py +0 -0
  28. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/engines/fastjsonschema/s3/s3_read_files.py +0 -0
  29. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/engines/soda/__init__.py +0 -0
  30. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/engines/soda/check_soda_execute.py +0 -0
  31. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/engines/soda/connections/bigquery.py +0 -0
  32. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/engines/soda/connections/databricks.py +0 -0
  33. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/engines/soda/connections/duckdb_connection.py +0 -0
  34. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/engines/soda/connections/kafka.py +0 -0
  35. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/engines/soda/connections/postgres.py +0 -0
  36. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/engines/soda/connections/snowflake.py +0 -0
  37. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/engines/soda/connections/sqlserver.py +0 -0
  38. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/engines/soda/connections/trino.py +0 -0
  39. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/export/__init__.py +0 -0
  40. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/export/avro_converter.py +0 -0
  41. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/export/avro_idl_converter.py +0 -0
  42. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/export/bigquery_converter.py +0 -0
  43. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/export/custom_converter.py +0 -0
  44. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/export/data_caterer_converter.py +0 -0
  45. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/export/dbml_converter.py +0 -0
  46. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/export/dbt_converter.py +0 -0
  47. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/export/dcs_exporter.py +0 -0
  48. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/export/duckdb_type_converter.py +0 -0
  49. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/export/exporter.py +0 -0
  50. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/export/exporter_factory.py +0 -0
  51. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/export/go_converter.py +0 -0
  52. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/export/great_expectations_converter.py +0 -0
  53. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/export/html_exporter.py +0 -0
  54. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/export/iceberg_converter.py +0 -0
  55. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/export/jsonschema_converter.py +0 -0
  56. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/export/markdown_converter.py +0 -0
  57. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/export/mermaid_exporter.py +0 -0
  58. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/export/odcs_v3_exporter.py +0 -0
  59. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/export/pandas_type_converter.py +0 -0
  60. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/export/protobuf_converter.py +0 -0
  61. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/export/pydantic_converter.py +0 -0
  62. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/export/rdf_converter.py +0 -0
  63. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/export/sodacl_converter.py +0 -0
  64. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/export/sql_converter.py +0 -0
  65. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/export/sql_type_converter.py +0 -0
  66. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/export/sqlalchemy_converter.py +0 -0
  67. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/export/terraform_converter.py +0 -0
  68. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/imports/avro_importer.py +0 -0
  69. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/imports/bigquery_importer.py +0 -0
  70. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/imports/csv_importer.py +0 -0
  71. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/imports/dbml_importer.py +0 -0
  72. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/imports/dbt_importer.py +0 -0
  73. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/imports/glue_importer.py +0 -0
  74. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/imports/iceberg_importer.py +0 -0
  75. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/imports/importer.py +0 -0
  76. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/imports/importer_factory.py +0 -0
  77. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/imports/json_importer.py +0 -0
  78. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/imports/jsonschema_importer.py +0 -0
  79. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/imports/odcs_importer.py +0 -0
  80. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/imports/odcs_v3_importer.py +0 -0
  81. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/imports/parquet_importer.py +0 -0
  82. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/imports/protobuf_importer.py +0 -0
  83. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/imports/spark_importer.py +0 -0
  84. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/init/init_template.py +0 -0
  85. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/integration/datamesh_manager.py +0 -0
  86. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/lint/files.py +0 -0
  87. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/lint/lint.py +0 -0
  88. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/lint/linters/__init__.py +0 -0
  89. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/lint/linters/description_linter.py +0 -0
  90. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/lint/linters/field_pattern_linter.py +0 -0
  91. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/lint/linters/field_reference_linter.py +0 -0
  92. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/lint/linters/notice_period_linter.py +0 -0
  93. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/lint/linters/valid_constraints_linter.py +0 -0
  94. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/lint/resolve.py +0 -0
  95. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/lint/resources.py +0 -0
  96. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/lint/schema.py +0 -0
  97. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/lint/urls.py +0 -0
  98. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/model/data_contract_specification/__init__.py +0 -0
  99. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/model/exceptions.py +0 -0
  100. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/model/odcs.py +0 -0
  101. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/model/run.py +0 -0
  102. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/output/__init__.py +0 -0
  103. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/output/output_format.py +0 -0
  104. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/output/test_results_writer.py +0 -0
  105. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/py.typed +0 -0
  106. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/schemas/datacontract-1.1.0.init.yaml +0 -0
  107. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/schemas/datacontract-1.1.0.schema.json +0 -0
  108. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/schemas/datacontract-1.2.0.init.yaml +0 -0
  109. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/schemas/datacontract-1.2.0.schema.json +0 -0
  110. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/schemas/odcs-3.0.1.schema.json +0 -0
  111. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/templates/datacontract.html +0 -0
  112. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/templates/datacontract_odcs.html +0 -0
  113. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/templates/index.html +0 -0
  114. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/templates/partials/datacontract_information.html +0 -0
  115. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/templates/partials/datacontract_servicelevels.html +0 -0
  116. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/templates/partials/datacontract_terms.html +0 -0
  117. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/templates/partials/definition.html +0 -0
  118. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/templates/partials/example.html +0 -0
  119. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/templates/partials/model_field.html +0 -0
  120. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/templates/partials/quality.html +0 -0
  121. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/templates/partials/server.html +0 -0
  122. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract/templates/style/output.css +0 -0
  123. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract_cli.egg-info/SOURCES.txt +0 -0
  124. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract_cli.egg-info/dependency_links.txt +0 -0
  125. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract_cli.egg-info/entry_points.txt +0 -0
  126. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/datacontract_cli.egg-info/top_level.txt +0 -0
  127. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/setup.cfg +0 -0
  128. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_api.py +0 -0
  129. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_breaking.py +0 -0
  130. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_catalog.py +0 -0
  131. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_changelog.py +0 -0
  132. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_cli.py +0 -0
  133. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_data_contract_checks.py +0 -0
  134. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_data_contract_specification.py +0 -0
  135. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_description_linter.py +0 -0
  136. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_documentation_linter.py +0 -0
  137. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_download_datacontract_file.py +0 -0
  138. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_duckdb_json.py +0 -0
  139. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_export_avro.py +0 -0
  140. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_export_avro_idl.py +0 -0
  141. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_export_bigquery.py +0 -0
  142. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_export_complex_data_contract.py +0 -0
  143. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_export_custom.py +0 -0
  144. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_export_custom_exporter.py +0 -0
  145. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_export_data_caterer.py +0 -0
  146. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_export_dbml.py +0 -0
  147. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_export_dbt_models.py +0 -0
  148. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_export_dbt_sources.py +0 -0
  149. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_export_dbt_staging_sql.py +0 -0
  150. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_export_go.py +0 -0
  151. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_export_great_expectations.py +0 -0
  152. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_export_html.py +0 -0
  153. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_export_iceberg.py +0 -0
  154. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_export_jsonschema.py +0 -0
  155. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_export_markdown.py +0 -0
  156. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_export_mermaid.py +0 -0
  157. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_export_odcs_v3.py +0 -0
  158. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_export_protobuf.py +0 -0
  159. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_export_pydantic.py +0 -0
  160. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_export_rdf.py +0 -0
  161. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_export_sodacl.py +0 -0
  162. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_export_spark.py +0 -0
  163. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_export_sql.py +0 -0
  164. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_export_sql_query.py +0 -0
  165. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_export_sqlalchemy.py +0 -0
  166. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_export_terraform.py +0 -0
  167. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_field_constraint_linter.py +0 -0
  168. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_field_pattern_linter.py +0 -0
  169. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_field_reference_linter.py +0 -0
  170. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_import_avro.py +0 -0
  171. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_import_bigquery.py +0 -0
  172. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_import_csv.py +0 -0
  173. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_import_dbml.py +0 -0
  174. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_import_dbt.py +0 -0
  175. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_import_excel.py +0 -0
  176. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_import_glue.py +0 -0
  177. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_import_iceberg.py +0 -0
  178. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_import_json.py +0 -0
  179. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_import_jsonschema.py +0 -0
  180. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_import_odcs_v3.py +0 -0
  181. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_import_parquet.py +0 -0
  182. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_import_protobuf.py +0 -0
  183. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_import_sql_postgres.py +0 -0
  184. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_import_sql_sqlserver.py +0 -0
  185. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_import_unity_file.py +0 -0
  186. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_integration_datameshmanager.py +0 -0
  187. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_lint.py +0 -0
  188. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_notice_period_linter.py +0 -0
  189. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_resolve.py +0 -0
  190. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_roundtrip_jsonschema.py +0 -0
  191. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_spec_fields_field.py +0 -0
  192. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_spec_ref.py +0 -0
  193. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_test_azure_remote.py +0 -0
  194. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_test_bigquery.py +0 -0
  195. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_test_databricks.py +0 -0
  196. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_test_dataframe.py +0 -0
  197. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_test_delta.py +0 -0
  198. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_test_gcs_csv_remote.py +0 -0
  199. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_test_gcs_json_remote.py +0 -0
  200. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_test_kafka.py +0 -0
  201. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_test_kafka_remote.py +0 -0
  202. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_test_local_json.py +0 -0
  203. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_test_output_junit.py +0 -0
  204. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_test_parquet.py +0 -0
  205. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_test_postgres.py +0 -0
  206. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_test_quality.py +0 -0
  207. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_test_s3_csv.py +0 -0
  208. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_test_s3_delta.py +0 -0
  209. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_test_s3_json.py +0 -0
  210. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_test_s3_json_complex.py +0 -0
  211. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_test_s3_json_multiple_models.py +0 -0
  212. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_test_s3_json_remote.py +0 -0
  213. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_test_snowflake.py +0 -0
  214. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_test_sqlserver.py +0 -0
  215. {datacontract_cli-0.10.30 → datacontract_cli-0.10.32}/tests/test_test_trino.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datacontract-cli
3
- Version: 0.10.30
3
+ Version: 0.10.32
4
4
  Summary: The datacontract CLI is an open source command-line tool for working with Data Contracts. It uses data contract YAML files to lint the data contract, connect to data sources and execute schema and quality tests, detect breaking changes, and export to different formats. The tool is written in Python. It can be used as a standalone CLI tool, in a CI/CD pipeline, or directly as a Python library.
5
5
  Author-email: Jochen Christ <jochen.christ@innoq.com>, Stefan Negele <stefan.negele@innoq.com>, Simon Harrer <simon.harrer@innoq.com>
6
6
  License-Expression: MIT
@@ -42,7 +42,7 @@ Provides-Extra: databricks
42
42
  Requires-Dist: soda-core-spark-df<3.6.0,>=3.3.20; extra == "databricks"
43
43
  Requires-Dist: soda-core-spark[databricks]<3.6.0,>=3.3.20; extra == "databricks"
44
44
  Requires-Dist: databricks-sql-connector<4.1.0,>=3.7.0; extra == "databricks"
45
- Requires-Dist: databricks-sdk<0.59.0; extra == "databricks"
45
+ Requires-Dist: databricks-sdk<0.60.0; extra == "databricks"
46
46
  Requires-Dist: pyspark<4.0.0,>=3.5.5; extra == "databricks"
47
47
  Provides-Extra: iceberg
48
48
  Requires-Dist: pyiceberg==0.9.1; extra == "iceberg"
@@ -1018,8 +1018,6 @@ If using Databricks, and an error is thrown when trying to deploy the SQL DDLs w
1018
1018
 
1019
1019
  ```shell
1020
1020
  spark.conf.set(“spark.databricks.delta.schema.typeCheck.enabled”, “false”)
1021
- from datacontract.model import data_contract_specification
1022
- data_contract_specification.DATACONTRACT_TYPES.append(“variant”)
1023
1021
  ```
1024
1022
 
1025
1023
  #### Great Expectations
@@ -2083,7 +2081,6 @@ We are happy to receive your contributions. Propose your change in an issue or d
2083
2081
  - [INNOQ](https://innoq.com)
2084
2082
  - [Data Catering](https://data.catering/)
2085
2083
  - [Oliver Wyman](https://www.oliverwyman.com/)
2086
- - [dmTECH](https://www.dmtech.tech/de)
2087
2084
  - And many more. To add your company, please create a pull request.
2088
2085
 
2089
2086
  ## Related Tools
@@ -924,8 +924,6 @@ If using Databricks, and an error is thrown when trying to deploy the SQL DDLs w
924
924
 
925
925
  ```shell
926
926
  spark.conf.set(“spark.databricks.delta.schema.typeCheck.enabled”, “false”)
927
- from datacontract.model import data_contract_specification
928
- data_contract_specification.DATACONTRACT_TYPES.append(“variant”)
929
927
  ```
930
928
 
931
929
  #### Great Expectations
@@ -1989,7 +1987,6 @@ We are happy to receive your contributions. Propose your change in an issue or d
1989
1987
  - [INNOQ](https://innoq.com)
1990
1988
  - [Data Catering](https://data.catering/)
1991
1989
  - [Oliver Wyman](https://www.oliverwyman.com/)
1992
- - [dmTECH](https://www.dmtech.tech/de)
1993
1990
  - And many more. To add your company, please create a pull request.
1994
1991
 
1995
1992
  ## Related Tools
@@ -126,6 +126,8 @@ def to_spark_data_type(field: Field) -> types.DataType:
126
126
  return types.StructType(to_struct_type(field.fields))
127
127
  if field_type == "map":
128
128
  return types.MapType(to_spark_data_type(field.keys), to_spark_data_type(field.values))
129
+ if field_type == "variant":
130
+ return types.VariantType()
129
131
  if field_type in ["string", "varchar", "text"]:
130
132
  return types.StringType()
131
133
  if field_type in ["number", "decimal", "numeric"]:
@@ -150,7 +152,7 @@ def to_spark_data_type(field: Field) -> types.DataType:
150
152
  return types.DateType()
151
153
  if field_type == "bytes":
152
154
  return types.BinaryType()
153
- return types.BinaryType()
155
+ return types.StringType() # default if no condition is met
154
156
 
155
157
 
156
158
  def print_schema(dtype: types.DataType) -> str:
@@ -1,5 +1,6 @@
1
1
  import logging
2
2
  import os
3
+ from decimal import Decimal
3
4
  from typing import Any, Dict, List, Optional
4
5
 
5
6
  import openpyxl
@@ -80,8 +81,16 @@ def import_excel_as_odcs(excel_file_path: str) -> OpenDataContractStandard:
80
81
  if tags_str:
81
82
  tags = [tag.strip() for tag in tags_str.split(",") if tag.strip()]
82
83
 
83
- # Import other components
84
+ # Import quality data first (standalone from schemas)
85
+ quality_map = import_quality(workbook)
86
+
87
+ # Import schemas
84
88
  schemas = import_schemas(workbook)
89
+
90
+ # Attach quality to schemas and properties
91
+ schemas_with_quality = attach_quality_to_schemas(schemas, quality_map)
92
+
93
+ # Import other components
85
94
  support = import_support(workbook)
86
95
  team = import_team(workbook)
87
96
  roles = import_roles(workbook)
@@ -103,7 +112,7 @@ def import_excel_as_odcs(excel_file_path: str) -> OpenDataContractStandard:
103
112
  tenant=get_cell_value_by_name(workbook, "tenant"),
104
113
  description=description,
105
114
  tags=tags,
106
- schema=schemas,
115
+ schema=schemas_with_quality,
107
116
  support=support,
108
117
  price=price,
109
118
  team=team,
@@ -150,7 +159,7 @@ def import_schemas(workbook) -> Optional[List[SchemaObject]]:
150
159
  dataGranularityDescription=get_cell_value_by_name_in_sheet(sheet, "schema.dataGranularityDescription"),
151
160
  authoritativeDefinitions=None,
152
161
  properties=import_properties(sheet),
153
- quality=None,
162
+ quality=None, # Quality will be attached later
154
163
  customProperties=None,
155
164
  tags=None,
156
165
  )
@@ -230,16 +239,8 @@ def import_properties(sheet) -> Optional[List[SchemaProperty]]:
230
239
  )
231
240
  ]
232
241
 
233
- # Quality
234
- quality_type = get_cell_value(row, headers.get("quality type"))
235
- quality_description = get_cell_value(row, headers.get("quality description"))
236
- if quality_type and quality_description:
237
- property_obj.quality = [
238
- DataQuality(
239
- type=quality_type,
240
- description=quality_description,
241
- )
242
- ]
242
+ # Quality will be attached later via quality_map
243
+ property_obj.quality = None
243
244
 
244
245
  # Transform sources
245
246
  transform_sources = get_cell_value(row, headers.get("transform sources"))
@@ -853,3 +854,250 @@ def parse_property_value(value: str) -> Any:
853
854
  except (ValueError, TypeError, AttributeError):
854
855
  # If conversion fails, return original string
855
856
  return value
857
+
858
+
859
+ def import_quality(workbook: Workbook) -> Dict[str, List[DataQuality]]:
860
+ """
861
+ Import quality data from Quality sheet and organize by schema.property key
862
+
863
+ Returns:
864
+ Dictionary mapping schema.property keys to lists of DataQuality objects
865
+ """
866
+ try:
867
+ quality_sheet = workbook["Quality"]
868
+ if not quality_sheet:
869
+ return {}
870
+ except KeyError:
871
+ logger.warning("Quality sheet not found")
872
+ return {}
873
+
874
+ try:
875
+ quality_range = get_range_by_name_in_workbook(workbook, "quality")
876
+ if not quality_range:
877
+ logger.warning("Quality range not found")
878
+ return {}
879
+
880
+ quality_header_row_index = quality_range[0] - 1
881
+ headers = get_headers_from_header_row(quality_sheet, quality_header_row_index)
882
+
883
+ quality_map = {}
884
+
885
+ for row_idx in range(quality_range[0], quality_range[1]):
886
+ if len(list(quality_sheet.rows)) < row_idx + 1:
887
+ break
888
+ row = list(quality_sheet.rows)[row_idx]
889
+
890
+ # Extract quality fields from row
891
+ schema_name = get_cell_value(row, headers.get("schema"))
892
+ property_name = get_cell_value(row, headers.get("property"))
893
+ quality_type = get_cell_value(row, headers.get("quality type"))
894
+ description = get_cell_value(row, headers.get("description"))
895
+ rule = get_cell_value(row, headers.get("rule (library)"))
896
+ query = get_cell_value(row, headers.get("query (sql)"))
897
+ engine = get_cell_value(row, headers.get("quality engine (custom)"))
898
+ implementation = get_cell_value(row, headers.get("implementation (custom)"))
899
+ severity = get_cell_value(row, headers.get("severity"))
900
+ scheduler = get_cell_value(row, headers.get("scheduler"))
901
+ schedule = get_cell_value(row, headers.get("schedule"))
902
+ threshold_operator = get_cell_value(row, headers.get("threshold operator"))
903
+ threshold_value = get_cell_value(row, headers.get("threshold value"))
904
+
905
+ # Skip if no schema name or insufficient quality data
906
+ if not schema_name or (not quality_type and not description and not rule):
907
+ continue
908
+
909
+ # Parse threshold values based on operator
910
+ threshold_dict = parse_threshold_values(threshold_operator, threshold_value)
911
+
912
+ # Create DataQuality object with parsed thresholds
913
+ quality = DataQuality(
914
+ name=None,
915
+ description=description,
916
+ type=quality_type,
917
+ rule=rule,
918
+ unit=None,
919
+ validValues=None,
920
+ query=query,
921
+ engine=engine,
922
+ implementation=implementation,
923
+ dimension=None,
924
+ method=None,
925
+ severity=severity,
926
+ businessImpact=None,
927
+ customProperties=None,
928
+ authoritativeDefinitions=None,
929
+ tags=None,
930
+ scheduler=scheduler,
931
+ schedule=schedule,
932
+ **threshold_dict, # Unpack threshold values
933
+ )
934
+
935
+ # Create key for mapping - use schema.property format
936
+ key = schema_name if not property_name else f"{schema_name}.{property_name}"
937
+
938
+ if key not in quality_map:
939
+ quality_map[key] = []
940
+ quality_map[key].append(quality)
941
+
942
+ except Exception as e:
943
+ logger.warning(f"Error importing quality: {str(e)}")
944
+ return {}
945
+
946
+ return quality_map
947
+
948
+
949
+ def parse_threshold_values(threshold_operator: str, threshold_value: str) -> Dict[str, Any]:
950
+ """
951
+ Parse threshold operator and value into DataQuality threshold fields
952
+
953
+ Args:
954
+ threshold_operator: The threshold operator (e.g., "mustBe", "mustBeBetween")
955
+ threshold_value: The threshold value (string representation)
956
+
957
+ Returns:
958
+ Dictionary with appropriate threshold fields set
959
+ """
960
+ threshold_dict = {}
961
+
962
+ if not threshold_operator or not threshold_value:
963
+ return threshold_dict
964
+
965
+ # Parse threshold values based on operator
966
+ if threshold_operator in ["mustBeBetween", "mustNotBeBetween"]:
967
+ # Parse "[value1, value2]" format
968
+ if threshold_value.startswith("[") and threshold_value.endswith("]"):
969
+ content = threshold_value[1:-1] # Remove brackets
970
+ try:
971
+ values = [Decimal(v.strip()) for v in content.split(",") if v.strip()]
972
+ if len(values) >= 2:
973
+ threshold_dict[threshold_operator] = values[:2] # Take first two values
974
+ except (ValueError, TypeError) as e:
975
+ logger.warning(f"Failed to parse between values: {threshold_value}, error: {e}")
976
+ else:
977
+ # Single value for other operators
978
+ try:
979
+ # Try to parse as number
980
+ if threshold_value.replace(".", "").replace("-", "").isdigit():
981
+ value = Decimal(threshold_value)
982
+ threshold_dict[threshold_operator] = value
983
+ except (ValueError, TypeError) as e:
984
+ logger.warning(f"Failed to parse threshold value: {threshold_value}, error: {e}")
985
+
986
+ return threshold_dict
987
+
988
+
989
+ def attach_quality_to_schemas(
990
+ schemas: Optional[List[SchemaObject]], quality_map: Dict[str, List[DataQuality]]
991
+ ) -> Optional[List[SchemaObject]]:
992
+ """
993
+ Attach quality attributes to schemas and their properties based on quality_map
994
+
995
+ Args:
996
+ schemas: List of schema objects
997
+ quality_map: Dictionary mapping schema.property keys to quality lists
998
+
999
+ Returns:
1000
+ List of schema objects with quality attached
1001
+ """
1002
+ if not schemas:
1003
+ return None
1004
+
1005
+ updated_schemas = []
1006
+
1007
+ for schema in schemas:
1008
+ schema_name = schema.name
1009
+ if not schema_name:
1010
+ updated_schemas.append(schema)
1011
+ continue
1012
+
1013
+ # Get schema-level quality attributes
1014
+ schema_quality = quality_map.get(schema_name)
1015
+ if schema_quality:
1016
+ schema.quality = schema_quality
1017
+
1018
+ # Attach quality to properties
1019
+ if schema.properties:
1020
+ schema.properties = attach_quality_to_properties(schema.properties, schema_name, quality_map)
1021
+
1022
+ updated_schemas.append(schema)
1023
+
1024
+ return updated_schemas
1025
+
1026
+
1027
+ def attach_quality_to_properties(
1028
+ properties: List[SchemaProperty], schema_name: str, quality_map: Dict[str, List[DataQuality]], prefix: str = ""
1029
+ ) -> List[SchemaProperty]:
1030
+ """
1031
+ Recursively attach quality attributes to properties and nested properties
1032
+
1033
+ Args:
1034
+ properties: List of property objects
1035
+ schema_name: Name of the parent schema
1036
+ quality_map: Dictionary mapping schema.property keys to quality lists
1037
+ prefix: Current property path prefix for nested properties
1038
+
1039
+ Returns:
1040
+ List of property objects with quality attached
1041
+ """
1042
+ updated_properties = []
1043
+
1044
+ for prop in properties:
1045
+ property_name = prop.name
1046
+ if not property_name:
1047
+ updated_properties.append(prop)
1048
+ continue
1049
+
1050
+ # Build full property path
1051
+ full_property_name = f"{prefix}.{property_name}" if prefix else property_name
1052
+ quality_key = f"{schema_name}.{full_property_name}"
1053
+
1054
+ # Get quality for this property
1055
+ property_quality = quality_map.get(quality_key)
1056
+ if property_quality:
1057
+ prop.quality = property_quality
1058
+
1059
+ # Handle nested properties
1060
+ if prop.properties:
1061
+ prop.properties = attach_quality_to_properties(
1062
+ prop.properties, schema_name, quality_map, full_property_name
1063
+ )
1064
+
1065
+ # Handle array items
1066
+ if prop.items:
1067
+ items_quality_key = f"{schema_name}.{full_property_name}.items"
1068
+ items_quality = quality_map.get(items_quality_key)
1069
+ if items_quality:
1070
+ prop.items.quality = items_quality
1071
+
1072
+ # Handle nested properties in array items
1073
+ if prop.items.properties:
1074
+ prop.items.properties = attach_quality_to_properties(
1075
+ prop.items.properties, schema_name, quality_map, f"{full_property_name}.items"
1076
+ )
1077
+
1078
+ updated_properties.append(prop)
1079
+
1080
+ return updated_properties
1081
+
1082
+
1083
+ def get_headers_from_header_row(sheet: Worksheet, header_row_index: int) -> Dict[str, int]:
1084
+ """
1085
+ Get headers from the first row and map them to column indices
1086
+
1087
+ Args:
1088
+ sheet: The worksheet
1089
+ header_row_index: 0-based row index of the header row
1090
+
1091
+ Returns:
1092
+ Dictionary mapping header names (lowercase) to column indices
1093
+ """
1094
+ headers = {}
1095
+ try:
1096
+ header_row = list(sheet.rows)[header_row_index]
1097
+ for i, cell in enumerate(header_row):
1098
+ if cell.value:
1099
+ headers[str(cell.value).lower().strip()] = i
1100
+ except (IndexError, AttributeError) as e:
1101
+ logger.warning(f"Error getting headers from row {header_row_index}: {e}")
1102
+
1103
+ return headers
@@ -230,6 +230,8 @@ def map_type_from_sql(sql_type: str) -> str | None:
230
230
  return "int"
231
231
  elif sql_type_normed.startswith("float"):
232
232
  return "float"
233
+ elif sql_type_normed.startswith("double"):
234
+ return "double"
233
235
  elif sql_type_normed.startswith("decimal"):
234
236
  return "decimal"
235
237
  elif sql_type_normed.startswith("numeric"):
@@ -200,7 +200,8 @@ def import_table_fields(columns: List[ColumnInfo]) -> dict[str, Field]:
200
200
 
201
201
  def _to_field(column: ColumnInfo) -> Field:
202
202
  field = Field()
203
- if column.type_name is not None:
203
+ # The second condition evaluates for complex types (e.g. variant)
204
+ if column.type_name is not None or (column.type_name is None and column.type_text is not None):
204
205
  sql_type = str(column.type_text)
205
206
  field.type = map_type_from_sql(sql_type)
206
207
  physical_type_key = to_physical_type_key("databricks")
@@ -56,19 +56,19 @@ def write_junit_test_results(run: Run, console, output_path: Path):
56
56
  type=check.category if check.category else "General",
57
57
  )
58
58
  error.text = to_failure_text(check)
59
- elif check.result is ResultEnum.warning:
59
+ elif check.result == ResultEnum.warning:
60
60
  skipped = ET.SubElement(
61
61
  testcase,
62
62
  "skipped",
63
63
  message=check.reason if check.reason else "Warning",
64
64
  type=check.category if check.category else "General",
65
65
  )
66
- skipped.skipped = to_failure_text(check)
66
+ skipped.text = to_failure_text(check)
67
67
  else:
68
68
  ET.SubElement(
69
69
  testcase,
70
70
  "skipped",
71
- message=check.reason if check.reason else "None",
71
+ message=check.reason if check.reason else "Skipped",
72
72
  type=check.category if check.category else "General",
73
73
  )
74
74
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datacontract-cli
3
- Version: 0.10.30
3
+ Version: 0.10.32
4
4
  Summary: The datacontract CLI is an open source command-line tool for working with Data Contracts. It uses data contract YAML files to lint the data contract, connect to data sources and execute schema and quality tests, detect breaking changes, and export to different formats. The tool is written in Python. It can be used as a standalone CLI tool, in a CI/CD pipeline, or directly as a Python library.
5
5
  Author-email: Jochen Christ <jochen.christ@innoq.com>, Stefan Negele <stefan.negele@innoq.com>, Simon Harrer <simon.harrer@innoq.com>
6
6
  License-Expression: MIT
@@ -42,7 +42,7 @@ Provides-Extra: databricks
42
42
  Requires-Dist: soda-core-spark-df<3.6.0,>=3.3.20; extra == "databricks"
43
43
  Requires-Dist: soda-core-spark[databricks]<3.6.0,>=3.3.20; extra == "databricks"
44
44
  Requires-Dist: databricks-sql-connector<4.1.0,>=3.7.0; extra == "databricks"
45
- Requires-Dist: databricks-sdk<0.59.0; extra == "databricks"
45
+ Requires-Dist: databricks-sdk<0.60.0; extra == "databricks"
46
46
  Requires-Dist: pyspark<4.0.0,>=3.5.5; extra == "databricks"
47
47
  Provides-Extra: iceberg
48
48
  Requires-Dist: pyiceberg==0.9.1; extra == "iceberg"
@@ -1018,8 +1018,6 @@ If using Databricks, and an error is thrown when trying to deploy the SQL DDLs w
1018
1018
 
1019
1019
  ```shell
1020
1020
  spark.conf.set(“spark.databricks.delta.schema.typeCheck.enabled”, “false”)
1021
- from datacontract.model import data_contract_specification
1022
- data_contract_specification.DATACONTRACT_TYPES.append(“variant”)
1023
1021
  ```
1024
1022
 
1025
1023
  #### Great Expectations
@@ -2083,7 +2081,6 @@ We are happy to receive your contributions. Propose your change in an issue or d
2083
2081
  - [INNOQ](https://innoq.com)
2084
2082
  - [Data Catering](https://data.catering/)
2085
2083
  - [Oliver Wyman](https://www.oliverwyman.com/)
2086
- - [dmTECH](https://www.dmtech.tech/de)
2087
2084
  - And many more. To add your company, please create a pull request.
2088
2085
 
2089
2086
  ## Related Tools
@@ -38,7 +38,7 @@ pandas>=2.0.0
38
38
  soda-core-spark-df<3.6.0,>=3.3.20
39
39
  soda-core-spark[databricks]<3.6.0,>=3.3.20
40
40
  databricks-sql-connector<4.1.0,>=3.7.0
41
- databricks-sdk<0.59.0
41
+ databricks-sdk<0.60.0
42
42
  pyspark<4.0.0,>=3.5.5
43
43
 
44
44
  [dbml]
@@ -1,6 +1,6 @@
1
1
  [project]
2
2
  name = "datacontract-cli"
3
- version = "0.10.30"
3
+ version = "0.10.32"
4
4
  description = "The datacontract CLI is an open source command-line tool for working with Data Contracts. It uses data contract YAML files to lint the data contract, connect to data sources and execute schema and quality tests, detect breaking changes, and export to different formats. The tool is written in Python. It can be used as a standalone CLI tool, in a CI/CD pipeline, or directly as a Python library."
5
5
  license = "MIT"
6
6
  readme = "README.md"
@@ -60,7 +60,7 @@ databricks = [
60
60
  "soda-core-spark-df>=3.3.20,<3.6.0",
61
61
  "soda-core-spark[databricks]>=3.3.20,<3.6.0",
62
62
  "databricks-sql-connector>=3.7.0,<4.1.0",
63
- "databricks-sdk<0.59.0",
63
+ "databricks-sdk<0.60.0",
64
64
  "pyspark>=3.5.5,<4.0.0",
65
65
  ]
66
66
 
@@ -162,4 +162,4 @@ def test_prog(spark: SparkSession, df_user, user_datacontract_no_desc, user_data
162
162
 
163
163
  # does include a table level description (dataframe object method)
164
164
  result4 = DataContract().import_from_source("spark", "users", dataframe=df_user, description="description")
165
- assert yaml.safe_load(result4.to_yaml()) == yaml.safe_load(expected_desc)
165
+ assert yaml.safe_load(result4.to_yaml()) == yaml.safe_load(expected_desc)