datacontract-cli 0.10.15__tar.gz → 0.10.16__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datacontract-cli might be problematic. Click here for more details.

Files changed (202) hide show
  1. {datacontract_cli-0.10.15/datacontract_cli.egg-info → datacontract_cli-0.10.16}/PKG-INFO +71 -13
  2. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/README.md +59 -1
  3. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/breaking/breaking.py +3 -3
  4. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/cli.py +2 -2
  5. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/engines/soda/check_soda_execute.py +4 -4
  6. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/export/dbt_converter.py +43 -36
  7. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/export/exporter.py +1 -0
  8. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/export/exporter_factory.py +4 -0
  9. datacontract_cli-0.10.16/datacontract/export/iceberg_converter.py +188 -0
  10. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/export/odcs_v3_exporter.py +43 -29
  11. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/export/sodacl_converter.py +3 -2
  12. datacontract_cli-0.10.16/datacontract/imports/dbt_importer.py +158 -0
  13. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/imports/iceberg_importer.py +12 -1
  14. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/imports/odcs_v3_importer.py +5 -0
  15. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/lint/resolve.py +6 -2
  16. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/model/data_contract_specification.py +3 -2
  17. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16/datacontract_cli.egg-info}/PKG-INFO +71 -13
  18. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract_cli.egg-info/SOURCES.txt +2 -0
  19. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract_cli.egg-info/requires.txt +11 -11
  20. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/pyproject.toml +12 -12
  21. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_export_dbt_models.py +6 -6
  22. datacontract_cli-0.10.16/tests/test_export_dbt_sources.py +134 -0
  23. datacontract_cli-0.10.16/tests/test_export_iceberg.py +254 -0
  24. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_export_odcs_v3.py +7 -0
  25. datacontract_cli-0.10.16/tests/test_export_sodacl.py +91 -0
  26. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_import_dbt.py +183 -2
  27. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_import_iceberg.py +1 -0
  28. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_test_postgres.py +1 -1
  29. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_test_quality.py +3 -3
  30. datacontract_cli-0.10.15/datacontract/imports/dbt_importer.py +0 -94
  31. datacontract_cli-0.10.15/tests/test_export_dbt_sources.py +0 -74
  32. datacontract_cli-0.10.15/tests/test_export_sodacl.py +0 -96
  33. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/LICENSE +0 -0
  34. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/MANIFEST.in +0 -0
  35. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/__init__.py +0 -0
  36. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/breaking/breaking_rules.py +0 -0
  37. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/catalog/catalog.py +0 -0
  38. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/data_contract.py +0 -0
  39. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/engines/__init__.py +0 -0
  40. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/engines/datacontract/check_that_datacontract_contains_valid_servers_configuration.py +0 -0
  41. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/engines/datacontract/check_that_datacontract_file_exists.py +0 -0
  42. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/engines/fastjsonschema/check_jsonschema.py +0 -0
  43. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/engines/fastjsonschema/s3/s3_read_files.py +0 -0
  44. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/engines/soda/__init__.py +0 -0
  45. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/engines/soda/connections/bigquery.py +0 -0
  46. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/engines/soda/connections/dask.py +0 -0
  47. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/engines/soda/connections/databricks.py +0 -0
  48. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/engines/soda/connections/duckdb.py +0 -0
  49. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/engines/soda/connections/kafka.py +0 -0
  50. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/engines/soda/connections/postgres.py +0 -0
  51. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/engines/soda/connections/snowflake.py +0 -0
  52. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/engines/soda/connections/sqlserver.py +0 -0
  53. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/engines/soda/connections/trino.py +0 -0
  54. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/export/__init__.py +0 -0
  55. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/export/avro_converter.py +0 -0
  56. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/export/avro_idl_converter.py +0 -0
  57. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/export/bigquery_converter.py +0 -0
  58. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/export/csv_type_converter.py +0 -0
  59. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/export/data_caterer_converter.py +0 -0
  60. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/export/dbml_converter.py +0 -0
  61. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/export/dcs_exporter.py +0 -0
  62. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/export/go_converter.py +0 -0
  63. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/export/great_expectations_converter.py +0 -0
  64. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/export/html_export.py +0 -0
  65. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/export/jsonschema_converter.py +0 -0
  66. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/export/odcs_v2_exporter.py +0 -0
  67. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/export/pandas_type_converter.py +0 -0
  68. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/export/protobuf_converter.py +0 -0
  69. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/export/pydantic_converter.py +0 -0
  70. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/export/rdf_converter.py +0 -0
  71. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/export/spark_converter.py +0 -0
  72. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/export/sql_converter.py +0 -0
  73. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/export/sql_type_converter.py +0 -0
  74. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/export/sqlalchemy_converter.py +0 -0
  75. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/export/terraform_converter.py +0 -0
  76. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/imports/avro_importer.py +0 -0
  77. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/imports/bigquery_importer.py +0 -0
  78. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/imports/dbml_importer.py +0 -0
  79. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/imports/glue_importer.py +0 -0
  80. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/imports/importer.py +0 -0
  81. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/imports/importer_factory.py +0 -0
  82. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/imports/jsonschema_importer.py +0 -0
  83. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/imports/odcs_importer.py +0 -0
  84. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/imports/odcs_v2_importer.py +0 -0
  85. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/imports/parquet_importer.py +0 -0
  86. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/imports/spark_importer.py +0 -0
  87. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/imports/sql_importer.py +0 -0
  88. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/imports/unity_importer.py +0 -0
  89. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/init/download_datacontract_file.py +0 -0
  90. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/integration/datamesh_manager.py +0 -0
  91. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/integration/opentelemetry.py +0 -0
  92. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/lint/files.py +0 -0
  93. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/lint/lint.py +0 -0
  94. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/lint/linters/__init__.py +0 -0
  95. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/lint/linters/description_linter.py +0 -0
  96. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/lint/linters/example_model_linter.py +0 -0
  97. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/lint/linters/field_pattern_linter.py +0 -0
  98. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/lint/linters/field_reference_linter.py +0 -0
  99. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/lint/linters/notice_period_linter.py +0 -0
  100. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/lint/linters/quality_schema_linter.py +0 -0
  101. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/lint/linters/valid_constraints_linter.py +0 -0
  102. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/lint/resources.py +0 -0
  103. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/lint/schema.py +0 -0
  104. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/lint/urls.py +0 -0
  105. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/model/breaking_change.py +0 -0
  106. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/model/exceptions.py +0 -0
  107. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/model/odcs.py +0 -0
  108. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/model/run.py +0 -0
  109. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/py.typed +0 -0
  110. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/templates/datacontract.html +0 -0
  111. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/templates/index.html +0 -0
  112. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/templates/partials/datacontract_information.html +0 -0
  113. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/templates/partials/datacontract_servicelevels.html +0 -0
  114. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/templates/partials/datacontract_terms.html +0 -0
  115. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/templates/partials/definition.html +0 -0
  116. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/templates/partials/example.html +0 -0
  117. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/templates/partials/model_field.html +0 -0
  118. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/templates/partials/server.html +0 -0
  119. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/templates/style/output.css +0 -0
  120. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract/web.py +0 -0
  121. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract_cli.egg-info/dependency_links.txt +0 -0
  122. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract_cli.egg-info/entry_points.txt +0 -0
  123. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/datacontract_cli.egg-info/top_level.txt +0 -0
  124. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/setup.cfg +0 -0
  125. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_breaking.py +0 -0
  126. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_catalog.py +0 -0
  127. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_changelog.py +0 -0
  128. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_cli.py +0 -0
  129. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_data_contract_specification.py +0 -0
  130. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_description_linter.py +0 -0
  131. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_documentation_linter.py +0 -0
  132. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_download_datacontract_file.py +0 -0
  133. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_example_model_linter.py +0 -0
  134. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_export_avro.py +0 -0
  135. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_export_avro_idl.py +0 -0
  136. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_export_bigquery.py +0 -0
  137. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_export_complex_data_contract.py +0 -0
  138. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_export_custom_exporter.py +0 -0
  139. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_export_data_caterer.py +0 -0
  140. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_export_dbml.py +0 -0
  141. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_export_dbt_staging_sql.py +0 -0
  142. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_export_go.py +0 -0
  143. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_export_great_expectations.py +0 -0
  144. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_export_html.py +0 -0
  145. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_export_jsonschema.py +0 -0
  146. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_export_odcs_v2.py +0 -0
  147. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_export_protobuf.py +0 -0
  148. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_export_pydantic.py +0 -0
  149. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_export_rdf.py +0 -0
  150. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_export_spark.py +0 -0
  151. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_export_sql.py +0 -0
  152. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_export_sql_query.py +0 -0
  153. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_export_sqlalchemy.py +0 -0
  154. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_export_terraform.py +0 -0
  155. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_field_constraint_linter.py +0 -0
  156. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_field_pattern_linter.py +0 -0
  157. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_field_reference_linter.py +0 -0
  158. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_import_avro.py +0 -0
  159. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_import_bigquery.py +0 -0
  160. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_import_dbml.py +0 -0
  161. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_import_glue.py +0 -0
  162. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_import_jsonschema.py +0 -0
  163. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_import_odcs_v2.py +0 -0
  164. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_import_odcs_v3.py +0 -0
  165. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_import_parquet.py +0 -0
  166. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_import_spark.py +0 -0
  167. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_import_sql.py +0 -0
  168. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_import_unity_file.py +0 -0
  169. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_integration_datameshmanager.py +0 -0
  170. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_integration_opentelemetry.py +0 -0
  171. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_lint.py +0 -0
  172. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_notice_period_linter.py +0 -0
  173. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_quality_schema_linter.py +0 -0
  174. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_resolve.py +0 -0
  175. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_roundtrip_jsonschema.py +0 -0
  176. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_spec_fields_field.py +0 -0
  177. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_spec_ref.py +0 -0
  178. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_test_azure_parquet_remote.py +0 -0
  179. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_test_bigquery.py +0 -0
  180. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_test_databricks.py +0 -0
  181. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_test_dataframe.py +0 -0
  182. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_test_delta.py +0 -0
  183. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_test_examples_csv.py +0 -0
  184. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_test_examples_formats_valid.py +0 -0
  185. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_test_examples_inline.py +0 -0
  186. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_test_examples_json.py +0 -0
  187. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_test_examples_missing.py +0 -0
  188. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_test_gcs_json_remote.py +0 -0
  189. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_test_kafka.py +0 -0
  190. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_test_kafka_remote.py +0 -0
  191. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_test_local_json.py +0 -0
  192. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_test_parquet.py +0 -0
  193. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_test_s3_csv.py +0 -0
  194. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_test_s3_delta.py +0 -0
  195. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_test_s3_json.py +0 -0
  196. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_test_s3_json_complex.py +0 -0
  197. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_test_s3_json_multiple_models.py +0 -0
  198. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_test_s3_json_remote.py +0 -0
  199. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_test_snowflake.py +0 -0
  200. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_test_sqlserver.py +0 -0
  201. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_test_trino.py +0 -0
  202. {datacontract_cli-0.10.15 → datacontract_cli-0.10.16}/tests/test_web.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: datacontract-cli
3
- Version: 0.10.15
3
+ Version: 0.10.16
4
4
  Summary: The datacontract CLI is an open source command-line tool for working with Data Contracts. It uses data contract YAML files to lint the data contract, connect to data sources and execute schema and quality tests, detect breaking changes, and export to different formats. The tool is written in Python. It can be used as a standalone CLI tool, in a CI/CD pipeline, or directly as a Python library.
5
5
  Author-email: Jochen Christ <jochen.christ@innoq.com>, Stefan Negele <stefan.negele@innoq.com>, Simon Harrer <simon.harrer@innoq.com>
6
6
  Project-URL: Homepage, https://cli.datacontract.com
@@ -11,15 +11,15 @@ Classifier: Operating System :: OS Independent
11
11
  Requires-Python: >=3.10
12
12
  Description-Content-Type: text/markdown
13
13
  License-File: LICENSE
14
- Requires-Dist: typer<0.14,>=0.12
15
- Requires-Dist: pydantic<2.10.0,>=2.8.2
14
+ Requires-Dist: typer<0.15,>=0.12
15
+ Requires-Dist: pydantic<2.11.0,>=2.8.2
16
16
  Requires-Dist: pyyaml~=6.0.1
17
17
  Requires-Dist: requests<2.33,>=2.31
18
- Requires-Dist: fastapi==0.115.5
18
+ Requires-Dist: fastapi==0.115.6
19
19
  Requires-Dist: uvicorn==0.32.1
20
- Requires-Dist: fastjsonschema<2.21.0,>=2.19.1
21
- Requires-Dist: fastparquet==2024.5.0
22
- Requires-Dist: python-multipart==0.0.12
20
+ Requires-Dist: fastjsonschema<2.22.0,>=2.19.1
21
+ Requires-Dist: fastparquet==2024.11.0
22
+ Requires-Dist: python-multipart==0.0.19
23
23
  Requires-Dist: rich<13.10,>=13.7
24
24
  Requires-Dist: simple-ddl-parser==1.7.1
25
25
  Requires-Dist: duckdb==1.1.2
@@ -41,7 +41,7 @@ Requires-Dist: databricks-sql-connector<3.6.0,>=3.1.2; extra == "databricks"
41
41
  Requires-Dist: databricks-sdk<0.39.0,>=0.32.0; extra == "databricks"
42
42
  Requires-Dist: soda-core-spark[databricks]<3.5.0,>=3.3.1; extra == "databricks"
43
43
  Provides-Extra: iceberg
44
- Requires-Dist: pyiceberg==0.7.1; extra == "iceberg"
44
+ Requires-Dist: pyiceberg==0.8.1; extra == "iceberg"
45
45
  Provides-Extra: kafka
46
46
  Requires-Dist: datacontract-cli[avro]; extra == "kafka"
47
47
  Requires-Dist: soda-core-spark-df<3.5.0,>=3.3.1; extra == "kafka"
@@ -66,16 +66,16 @@ Provides-Extra: all
66
66
  Requires-Dist: datacontract-cli[bigquery,databricks,dbml,dbt,iceberg,kafka,parquet,postgres,s3,snowflake,sqlserver,trino]; extra == "all"
67
67
  Provides-Extra: dev
68
68
  Requires-Dist: datacontract-cli[all]; extra == "dev"
69
- Requires-Dist: httpx==0.27.2; extra == "dev"
69
+ Requires-Dist: httpx==0.28.1; extra == "dev"
70
70
  Requires-Dist: kafka-python; extra == "dev"
71
- Requires-Dist: moto==5.0.18; extra == "dev"
71
+ Requires-Dist: moto==5.0.22; extra == "dev"
72
72
  Requires-Dist: pandas>=2.1.0; extra == "dev"
73
73
  Requires-Dist: pre-commit<4.1.0,>=3.7.1; extra == "dev"
74
74
  Requires-Dist: pytest; extra == "dev"
75
75
  Requires-Dist: pytest-xdist; extra == "dev"
76
- Requires-Dist: pymssql==2.3.1; extra == "dev"
76
+ Requires-Dist: pymssql==2.3.2; extra == "dev"
77
77
  Requires-Dist: ruff; extra == "dev"
78
- Requires-Dist: testcontainers[kafka,minio,mssql,postgres]==4.8.2; extra == "dev"
78
+ Requires-Dist: testcontainers[kafka,minio,mssql,postgres]==4.9.0; extra == "dev"
79
79
  Requires-Dist: trino==0.330.0; extra == "dev"
80
80
 
81
81
  # Data Contract CLI
@@ -841,7 +841,7 @@ models:
841
841
  │ t-staging-sql|odcs|odcs_v2|odcs_v3|rdf|avro|protobuf │
842
842
  │ |great-expectations|terraform|avro-idl|sql|sql-query │
843
843
  │ |html|go|bigquery|dbml|spark|sqlalchemy|data-caterer │
844
- │ |dcs] │
844
+ │ |dcs|iceberg] │
845
845
  │ --output PATH Specify the file path where the exported data will be │
846
846
  │ saved. If no path is provided, the output will be │
847
847
  │ printed to stdout. │
@@ -902,6 +902,7 @@ Available export options:
902
902
  | `sqlalchemy` | Export to SQLAlchemy Models | ✅ |
903
903
  | `data-caterer` | Export to Data Caterer in YAML format | ✅ |
904
904
  | `dcs` | Export to Data Contract Specification in YAML format | ✅ |
905
+ | `iceberg` | Export to an Iceberg JSON Schema Definition | partial |
905
906
  | Missing something? | Please create an issue on GitHub | TBD |
906
907
 
907
908
 
@@ -1025,6 +1026,63 @@ models:
1025
1026
  - **avroLogicalType**: Specifies the logical type of the field in Avro. In this example, it is `local-timestamp-micros`.
1026
1027
  - **avroDefault**: Specifies the default value for the field in Avro. In this example, it is 1672534861000000 which corresponds to ` 2023-01-01 01:01:01 UTC`.
1027
1028
 
1029
+ #### Iceberg
1030
+
1031
+ Exports to an [Iceberg Table Json Schema Definition](https://iceberg.apache.org/spec/#appendix-c-json-serialization).
1032
+
1033
+ This export only supports a single model export at a time because Iceberg's schema definition is for a single table and the exporter maps 1 model to 1 table, use the `--model` flag
1034
+ to limit your contract export to a single model.
1035
+
1036
+ ```bash
1037
+ $ datacontract export --format iceberg --model orders https://datacontract.com/examples/orders-latest/datacontract.yaml --output /tmp/orders_iceberg.json
1038
+
1039
+ $ cat /tmp/orders_iceberg.json | jq '.'
1040
+ {
1041
+ "type": "struct",
1042
+ "fields": [
1043
+ {
1044
+ "id": 1,
1045
+ "name": "order_id",
1046
+ "type": "string",
1047
+ "required": true
1048
+ },
1049
+ {
1050
+ "id": 2,
1051
+ "name": "order_timestamp",
1052
+ "type": "timestamptz",
1053
+ "required": true
1054
+ },
1055
+ {
1056
+ "id": 3,
1057
+ "name": "order_total",
1058
+ "type": "long",
1059
+ "required": true
1060
+ },
1061
+ {
1062
+ "id": 4,
1063
+ "name": "customer_id",
1064
+ "type": "string",
1065
+ "required": false
1066
+ },
1067
+ {
1068
+ "id": 5,
1069
+ "name": "customer_email_address",
1070
+ "type": "string",
1071
+ "required": true
1072
+ },
1073
+ {
1074
+ "id": 6,
1075
+ "name": "processed_timestamp",
1076
+ "type": "timestamptz",
1077
+ "required": true
1078
+ }
1079
+ ],
1080
+ "schema-id": 0,
1081
+ "identifier-field-ids": [
1082
+ 1
1083
+ ]
1084
+ }
1085
+ ```
1028
1086
 
1029
1087
  ### import
1030
1088
 
@@ -761,7 +761,7 @@ models:
761
761
  │ t-staging-sql|odcs|odcs_v2|odcs_v3|rdf|avro|protobuf │
762
762
  │ |great-expectations|terraform|avro-idl|sql|sql-query │
763
763
  │ |html|go|bigquery|dbml|spark|sqlalchemy|data-caterer │
764
- │ |dcs] │
764
+ │ |dcs|iceberg] │
765
765
  │ --output PATH Specify the file path where the exported data will be │
766
766
  │ saved. If no path is provided, the output will be │
767
767
  │ printed to stdout. │
@@ -822,6 +822,7 @@ Available export options:
822
822
  | `sqlalchemy` | Export to SQLAlchemy Models | ✅ |
823
823
  | `data-caterer` | Export to Data Caterer in YAML format | ✅ |
824
824
  | `dcs` | Export to Data Contract Specification in YAML format | ✅ |
825
+ | `iceberg` | Export to an Iceberg JSON Schema Definition | partial |
825
826
  | Missing something? | Please create an issue on GitHub | TBD |
826
827
 
827
828
 
@@ -945,6 +946,63 @@ models:
945
946
  - **avroLogicalType**: Specifies the logical type of the field in Avro. In this example, it is `local-timestamp-micros`.
946
947
  - **avroDefault**: Specifies the default value for the field in Avro. In this example, it is 1672534861000000 which corresponds to ` 2023-01-01 01:01:01 UTC`.
947
948
 
949
+ #### Iceberg
950
+
951
+ Exports to an [Iceberg Table Json Schema Definition](https://iceberg.apache.org/spec/#appendix-c-json-serialization).
952
+
953
+ This export only supports a single model export at a time because Iceberg's schema definition is for a single table and the exporter maps 1 model to 1 table, use the `--model` flag
954
+ to limit your contract export to a single model.
955
+
956
+ ```bash
957
+ $ datacontract export --format iceberg --model orders https://datacontract.com/examples/orders-latest/datacontract.yaml --output /tmp/orders_iceberg.json
958
+
959
+ $ cat /tmp/orders_iceberg.json | jq '.'
960
+ {
961
+ "type": "struct",
962
+ "fields": [
963
+ {
964
+ "id": 1,
965
+ "name": "order_id",
966
+ "type": "string",
967
+ "required": true
968
+ },
969
+ {
970
+ "id": 2,
971
+ "name": "order_timestamp",
972
+ "type": "timestamptz",
973
+ "required": true
974
+ },
975
+ {
976
+ "id": 3,
977
+ "name": "order_total",
978
+ "type": "long",
979
+ "required": true
980
+ },
981
+ {
982
+ "id": 4,
983
+ "name": "customer_id",
984
+ "type": "string",
985
+ "required": false
986
+ },
987
+ {
988
+ "id": 5,
989
+ "name": "customer_email_address",
990
+ "type": "string",
991
+ "required": true
992
+ },
993
+ {
994
+ "id": 6,
995
+ "name": "processed_timestamp",
996
+ "type": "timestamptz",
997
+ "required": true
998
+ }
999
+ ],
1000
+ "schema-id": 0,
1001
+ "identifier-field-ids": [
1002
+ 1
1003
+ ]
1004
+ }
1005
+ ```
948
1006
 
949
1007
  ### import
950
1008
 
@@ -1,6 +1,6 @@
1
1
  from datacontract.breaking.breaking_rules import BreakingRules
2
2
  from datacontract.model.breaking_change import BreakingChange, Location, Severity
3
- from datacontract.model.data_contract_specification import Contact, Field, Info, Model, Quality, Terms
3
+ from datacontract.model.data_contract_specification import Contact, DeprecatedQuality, Field, Info, Model, Terms
4
4
 
5
5
 
6
6
  def info_breaking_changes(
@@ -216,8 +216,8 @@ def terms_breaking_changes(
216
216
 
217
217
 
218
218
  def quality_breaking_changes(
219
- old_quality: Quality,
220
- new_quality: Quality,
219
+ old_quality: DeprecatedQuality,
220
+ new_quality: DeprecatedQuality,
221
221
  new_path: str,
222
222
  include_severities: [Severity],
223
223
  ) -> list[BreakingChange]:
@@ -221,7 +221,7 @@ def export(
221
221
  )
222
222
  # Don't interpret console markup in output.
223
223
  if output is None:
224
- console.print(result, markup=False)
224
+ console.print(result, markup=False, soft_wrap=True)
225
225
  else:
226
226
  with output.open("w") as f:
227
227
  f.write(result)
@@ -298,7 +298,7 @@ def import_(
298
298
  iceberg_table=iceberg_table,
299
299
  )
300
300
  if output is None:
301
- console.print(result.to_yaml())
301
+ console.print(result.to_yaml(), markup=False, soft_wrap=True)
302
302
  else:
303
303
  with output.open("w") as f:
304
304
  f.write(result.to_yaml())
@@ -12,7 +12,7 @@ from datacontract.engines.soda.connections.sqlserver import to_sqlserver_soda_co
12
12
  from datacontract.engines.soda.connections.trino import to_trino_soda_configuration
13
13
  from datacontract.export.sodacl_converter import to_sodacl_yaml
14
14
  from datacontract.model.data_contract_specification import DataContractSpecification, Server
15
- from datacontract.model.run import Check, Log, Run
15
+ from datacontract.model.run import Check, Log, ResultEnum, Run
16
16
 
17
17
 
18
18
  def check_soda_execute(run: Run, data_contract: DataContractSpecification, server: Server, spark, tmp_dir):
@@ -33,7 +33,7 @@ def check_soda_execute(run: Run, data_contract: DataContractSpecification, serve
33
33
  Check(
34
34
  type="general",
35
35
  name="Check that format is supported",
36
- result="warning",
36
+ result=ResultEnum.warning,
37
37
  reason=f"Format {server.format} not yet supported by datacontract CLI",
38
38
  engine="datacontract",
39
39
  )
@@ -93,7 +93,7 @@ def check_soda_execute(run: Run, data_contract: DataContractSpecification, serve
93
93
  Check(
94
94
  type="general",
95
95
  name="Check that server type is supported",
96
- result="warning",
96
+ result=ResultEnum.warning,
97
97
  reason=f"Server type {server.type} not yet supported by datacontract CLI",
98
98
  engine="datacontract-cli",
99
99
  )
@@ -182,5 +182,5 @@ def update_reason(check, c):
182
182
  check.reason = diagnostics_text_split[1].strip()
183
183
  # print(check.reason)
184
184
  break # Exit the loop once the desired block is found
185
- if c["diagnostics"]["fail"] is not None:
185
+ if "fail" in c["diagnostics"]:
186
186
  check.reason = f"Got: {c['diagnostics']['value']} Expected: {c['diagnostics']['fail']}"
@@ -1,4 +1,4 @@
1
- from typing import Dict
1
+ from typing import Dict, Optional
2
2
 
3
3
  import yaml
4
4
 
@@ -52,14 +52,14 @@ def to_dbt_staging_sql(data_contract_spec: DataContractSpecification, model_name
52
52
  # TODO escape SQL reserved key words, probably dependent on server type
53
53
  columns.append(field_name)
54
54
  return f"""
55
- select
55
+ select
56
56
  {", ".join(columns)}
57
57
  from {{{{ source('{id}', '{model_name}') }}}}
58
58
  """
59
59
 
60
60
 
61
61
  def to_dbt_sources_yaml(data_contract_spec: DataContractSpecification, server: str = None):
62
- source = {"name": data_contract_spec.id, "tables": []}
62
+ source = {"name": data_contract_spec.id}
63
63
  dbt = {
64
64
  "version": 2,
65
65
  "sources": [source],
@@ -69,24 +69,31 @@ def to_dbt_sources_yaml(data_contract_spec: DataContractSpecification, server: s
69
69
  if data_contract_spec.info.description is not None:
70
70
  source["description"] = data_contract_spec.info.description
71
71
  found_server = data_contract_spec.servers.get(server)
72
+ adapter_type = None
72
73
  if found_server is not None:
73
- source["database"] = found_server.database
74
- source["schema"] = found_server.schema_
74
+ adapter_type = found_server.type
75
+ if adapter_type == "bigquery":
76
+ source["database"] = found_server.project
77
+ source["schema"] = found_server.dataset
78
+ else:
79
+ source["database"] = found_server.database
80
+ source["schema"] = found_server.schema_
75
81
 
82
+ source["tables"] = []
76
83
  for model_key, model_value in data_contract_spec.models.items():
77
- dbt_model = _to_dbt_source_table(model_key, model_value)
84
+ dbt_model = _to_dbt_source_table(model_key, model_value, adapter_type)
78
85
  source["tables"].append(dbt_model)
79
86
  return yaml.dump(dbt, indent=2, sort_keys=False, allow_unicode=True)
80
87
 
81
88
 
82
- def _to_dbt_source_table(model_key, model_value: Model) -> dict:
89
+ def _to_dbt_source_table(model_key, model_value: Model, adapter_type: Optional[str]) -> dict:
83
90
  dbt_model = {
84
91
  "name": model_key,
85
92
  }
86
93
 
87
94
  if model_value.description is not None:
88
95
  dbt_model["description"] = model_value.description
89
- columns = _to_columns(model_value.fields, False, False)
96
+ columns = _to_columns(model_value.fields, False, adapter_type)
90
97
  if columns:
91
98
  dbt_model["columns"] = columns
92
99
  return dbt_model
@@ -107,7 +114,7 @@ def _to_dbt_model(model_key, model_value: Model, data_contract_spec: DataContrac
107
114
  dbt_model["config"]["contract"] = {"enforced": True}
108
115
  if model_value.description is not None:
109
116
  dbt_model["description"] = model_value.description
110
- columns = _to_columns(model_value.fields, _supports_constraints(model_type), True)
117
+ columns = _to_columns(model_value.fields, _supports_constraints(model_type), None)
111
118
  if columns:
112
119
  dbt_model["columns"] = columns
113
120
  return dbt_model
@@ -130,48 +137,47 @@ def _supports_constraints(model_type):
130
137
  return model_type == "table" or model_type == "incremental"
131
138
 
132
139
 
133
- def _to_columns(fields: Dict[str, Field], supports_constraints: bool, supports_datatype: bool) -> list:
140
+ def _to_columns(fields: Dict[str, Field], supports_constraints: bool, adapter_type: Optional[str]) -> list:
134
141
  columns = []
135
142
  for field_name, field in fields.items():
136
- column = _to_column(field, supports_constraints, supports_datatype)
137
- column["name"] = field_name
143
+ column = _to_column(field_name, field, supports_constraints, adapter_type)
138
144
  columns.append(column)
139
145
  return columns
140
146
 
141
147
 
142
- def _to_column(field: Field, supports_constraints: bool, supports_datatype: bool) -> dict:
143
- column = {}
144
- dbt_type = convert_to_sql_type(field, "snowflake")
148
+ def _to_column(field_name: str, field: Field, supports_constraints: bool, adapter_type: Optional[str]) -> dict:
149
+ column = {"name": field_name}
150
+ adapter_type = adapter_type or "snowflake"
151
+ dbt_type = convert_to_sql_type(field, adapter_type)
152
+
153
+ column["data_tests"] = []
145
154
  if dbt_type is not None:
146
- if supports_datatype:
147
- column["data_type"] = dbt_type
148
- else:
149
- column.setdefault("tests", []).append(
150
- {"dbt_expectations.dbt_expectations.expect_column_values_to_be_of_type": {"column_type": dbt_type}}
151
- )
155
+ column["data_type"] = dbt_type
156
+ else:
157
+ column["data_tests"].append(
158
+ {"dbt_expectations.dbt_expectations.expect_column_values_to_be_of_type": {"column_type": dbt_type}}
159
+ )
152
160
  if field.description is not None:
153
161
  column["description"] = field.description
154
162
  if field.required:
155
163
  if supports_constraints:
156
164
  column.setdefault("constraints", []).append({"type": "not_null"})
157
165
  else:
158
- column.setdefault("tests", []).append("not_null")
166
+ column["data_tests"].append("not_null")
159
167
  if field.unique:
160
168
  if supports_constraints:
161
169
  column.setdefault("constraints", []).append({"type": "unique"})
162
170
  else:
163
- column.setdefault("tests", []).append("unique")
171
+ column["data_tests"].append("unique")
164
172
  if field.enum is not None and len(field.enum) > 0:
165
- column.setdefault("tests", []).append({"accepted_values": {"values": field.enum}})
173
+ column["data_tests"].append({"accepted_values": {"values": field.enum}})
166
174
  if field.minLength is not None or field.maxLength is not None:
167
175
  length_test = {}
168
176
  if field.minLength is not None:
169
177
  length_test["min_value"] = field.minLength
170
178
  if field.maxLength is not None:
171
179
  length_test["max_value"] = field.maxLength
172
- column.setdefault("tests", []).append(
173
- {"dbt_expectations.expect_column_value_lengths_to_be_between": length_test}
174
- )
180
+ column["data_tests"].append({"dbt_expectations.expect_column_value_lengths_to_be_between": length_test})
175
181
  if field.pii is not None:
176
182
  column.setdefault("meta", {})["pii"] = field.pii
177
183
  if field.classification is not None:
@@ -180,9 +186,7 @@ def _to_column(field: Field, supports_constraints: bool, supports_datatype: bool
180
186
  column.setdefault("tags", []).extend(field.tags)
181
187
  if field.pattern is not None:
182
188
  # Beware, the data contract pattern is a regex, not a like pattern
183
- column.setdefault("tests", []).append(
184
- {"dbt_expectations.expect_column_values_to_match_regex": {"regex": field.pattern}}
185
- )
189
+ column["data_tests"].append({"dbt_expectations.expect_column_values_to_match_regex": {"regex": field.pattern}})
186
190
  if (
187
191
  field.minimum is not None
188
192
  or field.maximum is not None
@@ -194,7 +198,7 @@ def _to_column(field: Field, supports_constraints: bool, supports_datatype: bool
194
198
  range_test["min_value"] = field.minimum
195
199
  if field.maximum is not None:
196
200
  range_test["max_value"] = field.maximum
197
- column.setdefault("tests", []).append({"dbt_expectations.expect_column_values_to_be_between": range_test})
201
+ column["data_tests"].append({"dbt_expectations.expect_column_values_to_be_between": range_test})
198
202
  elif (
199
203
  field.exclusiveMinimum is not None
200
204
  or field.exclusiveMaximum is not None
@@ -207,18 +211,18 @@ def _to_column(field: Field, supports_constraints: bool, supports_datatype: bool
207
211
  if field.exclusiveMaximum is not None:
208
212
  range_test["max_value"] = field.exclusiveMaximum
209
213
  range_test["strictly"] = True
210
- column.setdefault("tests", []).append({"dbt_expectations.expect_column_values_to_be_between": range_test})
214
+ column["data_tests"].append({"dbt_expectations.expect_column_values_to_be_between": range_test})
211
215
  else:
212
216
  if field.minimum is not None:
213
- column.setdefault("tests", []).append(
217
+ column["data_tests"].append(
214
218
  {"dbt_expectations.expect_column_values_to_be_between": {"min_value": field.minimum}}
215
219
  )
216
220
  if field.maximum is not None:
217
- column.setdefault("tests", []).append(
221
+ column["data_tests"].append(
218
222
  {"dbt_expectations.expect_column_values_to_be_between": {"max_value": field.maximum}}
219
223
  )
220
224
  if field.exclusiveMinimum is not None:
221
- column.setdefault("tests", []).append(
225
+ column["data_tests"].append(
222
226
  {
223
227
  "dbt_expectations.expect_column_values_to_be_between": {
224
228
  "min_value": field.exclusiveMinimum,
@@ -227,7 +231,7 @@ def _to_column(field: Field, supports_constraints: bool, supports_datatype: bool
227
231
  }
228
232
  )
229
233
  if field.exclusiveMaximum is not None:
230
- column.setdefault("tests", []).append(
234
+ column["data_tests"].append(
231
235
  {
232
236
  "dbt_expectations.expect_column_values_to_be_between": {
233
237
  "max_value": field.exclusiveMaximum,
@@ -236,5 +240,8 @@ def _to_column(field: Field, supports_constraints: bool, supports_datatype: bool
236
240
  }
237
241
  )
238
242
 
243
+ if not column["data_tests"]:
244
+ column.pop("data_tests")
245
+
239
246
  # TODO: all constraints
240
247
  return column
@@ -40,6 +40,7 @@ class ExportFormat(str, Enum):
40
40
  sqlalchemy = "sqlalchemy"
41
41
  data_caterer = "data-caterer"
42
42
  dcs = "dcs"
43
+ iceberg = "iceberg"
43
44
 
44
45
  @classmethod
45
46
  def get_supported_formats(cls):
@@ -168,3 +168,7 @@ exporter_factory.register_lazy_exporter(
168
168
  exporter_factory.register_lazy_exporter(
169
169
  name=ExportFormat.dcs, module_path="datacontract.export.dcs_exporter", class_name="DcsExporter"
170
170
  )
171
+
172
+ exporter_factory.register_lazy_exporter(
173
+ name=ExportFormat.iceberg, module_path="datacontract.export.iceberg_converter", class_name="IcebergExporter"
174
+ )