datacontract-cli 0.10.13__py3-none-any.whl → 0.10.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datacontract-cli might be problematic. Click here for more details.

Files changed (77) hide show
  1. datacontract/breaking/breaking.py +227 -9
  2. datacontract/breaking/breaking_rules.py +24 -0
  3. datacontract/catalog/catalog.py +1 -1
  4. datacontract/cli.py +104 -32
  5. datacontract/data_contract.py +35 -5
  6. datacontract/engines/datacontract/check_that_datacontract_file_exists.py +1 -1
  7. datacontract/engines/fastjsonschema/check_jsonschema.py +114 -22
  8. datacontract/engines/soda/check_soda_execute.py +5 -3
  9. datacontract/engines/soda/connections/duckdb.py +1 -0
  10. datacontract/engines/soda/connections/kafka.py +38 -17
  11. datacontract/export/avro_converter.py +8 -1
  12. datacontract/export/avro_idl_converter.py +2 -2
  13. datacontract/export/bigquery_converter.py +4 -3
  14. datacontract/export/data_caterer_converter.py +1 -1
  15. datacontract/export/dbml_converter.py +2 -4
  16. datacontract/export/dbt_converter.py +2 -3
  17. datacontract/export/dcs_exporter.py +6 -0
  18. datacontract/export/exporter.py +5 -2
  19. datacontract/export/exporter_factory.py +16 -3
  20. datacontract/export/go_converter.py +3 -2
  21. datacontract/export/great_expectations_converter.py +202 -40
  22. datacontract/export/html_export.py +1 -1
  23. datacontract/export/jsonschema_converter.py +3 -2
  24. datacontract/export/{odcs_converter.py → odcs_v2_exporter.py} +5 -5
  25. datacontract/export/odcs_v3_exporter.py +294 -0
  26. datacontract/export/pandas_type_converter.py +40 -0
  27. datacontract/export/protobuf_converter.py +1 -1
  28. datacontract/export/rdf_converter.py +4 -5
  29. datacontract/export/sodacl_converter.py +86 -2
  30. datacontract/export/spark_converter.py +10 -7
  31. datacontract/export/sql_converter.py +1 -2
  32. datacontract/export/sql_type_converter.py +55 -11
  33. datacontract/export/sqlalchemy_converter.py +1 -2
  34. datacontract/export/terraform_converter.py +1 -1
  35. datacontract/imports/avro_importer.py +1 -1
  36. datacontract/imports/bigquery_importer.py +1 -1
  37. datacontract/imports/dbml_importer.py +2 -2
  38. datacontract/imports/dbt_importer.py +3 -2
  39. datacontract/imports/glue_importer.py +5 -3
  40. datacontract/imports/iceberg_importer.py +161 -0
  41. datacontract/imports/importer.py +2 -0
  42. datacontract/imports/importer_factory.py +12 -1
  43. datacontract/imports/jsonschema_importer.py +3 -2
  44. datacontract/imports/odcs_importer.py +25 -168
  45. datacontract/imports/odcs_v2_importer.py +177 -0
  46. datacontract/imports/odcs_v3_importer.py +309 -0
  47. datacontract/imports/parquet_importer.py +81 -0
  48. datacontract/imports/spark_importer.py +2 -1
  49. datacontract/imports/sql_importer.py +1 -1
  50. datacontract/imports/unity_importer.py +3 -3
  51. datacontract/integration/datamesh_manager.py +1 -1
  52. datacontract/integration/opentelemetry.py +0 -1
  53. datacontract/lint/lint.py +2 -1
  54. datacontract/lint/linters/description_linter.py +1 -0
  55. datacontract/lint/linters/example_model_linter.py +1 -0
  56. datacontract/lint/linters/field_pattern_linter.py +1 -0
  57. datacontract/lint/linters/field_reference_linter.py +1 -0
  58. datacontract/lint/linters/notice_period_linter.py +1 -0
  59. datacontract/lint/linters/quality_schema_linter.py +1 -0
  60. datacontract/lint/linters/valid_constraints_linter.py +1 -0
  61. datacontract/lint/resolve.py +14 -9
  62. datacontract/lint/resources.py +21 -0
  63. datacontract/lint/schema.py +1 -1
  64. datacontract/lint/urls.py +4 -2
  65. datacontract/model/data_contract_specification.py +83 -13
  66. datacontract/model/odcs.py +11 -0
  67. datacontract/model/run.py +21 -12
  68. datacontract/templates/index.html +6 -6
  69. datacontract/web.py +2 -3
  70. {datacontract_cli-0.10.13.dist-info → datacontract_cli-0.10.15.dist-info}/METADATA +176 -93
  71. datacontract_cli-0.10.15.dist-info/RECORD +105 -0
  72. {datacontract_cli-0.10.13.dist-info → datacontract_cli-0.10.15.dist-info}/WHEEL +1 -1
  73. datacontract/engines/datacontract/check_that_datacontract_str_is_valid.py +0 -48
  74. datacontract_cli-0.10.13.dist-info/RECORD +0 -97
  75. {datacontract_cli-0.10.13.dist-info → datacontract_cli-0.10.15.dist-info}/LICENSE +0 -0
  76. {datacontract_cli-0.10.13.dist-info → datacontract_cli-0.10.15.dist-info}/entry_points.txt +0 -0
  77. {datacontract_cli-0.10.13.dist-info → datacontract_cli-0.10.15.dist-info}/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: datacontract-cli
3
- Version: 0.10.13
3
+ Version: 0.10.15
4
4
  Summary: The datacontract CLI is an open source command-line tool for working with Data Contracts. It uses data contract YAML files to lint the data contract, connect to data sources and execute schema and quality tests, detect breaking changes, and export to different formats. The tool is written in Python. It can be used as a standalone CLI tool, in a CI/CD pipeline, or directly as a Python library.
5
5
  Author-email: Jochen Christ <jochen.christ@innoq.com>, Stefan Negele <stefan.negele@innoq.com>, Simon Harrer <simon.harrer@innoq.com>
6
6
  Project-URL: Homepage, https://cli.datacontract.com
@@ -11,69 +11,72 @@ Classifier: Operating System :: OS Independent
11
11
  Requires-Python: >=3.10
12
12
  Description-Content-Type: text/markdown
13
13
  License-File: LICENSE
14
- Requires-Dist: typer<0.13,>=0.12
14
+ Requires-Dist: typer<0.14,>=0.12
15
15
  Requires-Dist: pydantic<2.10.0,>=2.8.2
16
16
  Requires-Dist: pyyaml~=6.0.1
17
17
  Requires-Dist: requests<2.33,>=2.31
18
- Requires-Dist: fastapi==0.114.2
19
- Requires-Dist: uvicorn==0.30.6
18
+ Requires-Dist: fastapi==0.115.5
19
+ Requires-Dist: uvicorn==0.32.1
20
20
  Requires-Dist: fastjsonschema<2.21.0,>=2.19.1
21
21
  Requires-Dist: fastparquet==2024.5.0
22
- Requires-Dist: python-multipart==0.0.9
23
- Requires-Dist: rich<13.9,>=13.7
24
- Requires-Dist: simple-ddl-parser==1.6.1
25
- Requires-Dist: duckdb==1.0.0
26
- Requires-Dist: soda-core-duckdb<3.4.0,>=3.3.1
22
+ Requires-Dist: python-multipart==0.0.12
23
+ Requires-Dist: rich<13.10,>=13.7
24
+ Requires-Dist: simple-ddl-parser==1.7.1
25
+ Requires-Dist: duckdb==1.1.2
26
+ Requires-Dist: soda-core-duckdb<3.5.0,>=3.3.1
27
27
  Requires-Dist: setuptools>=60
28
28
  Requires-Dist: python-dotenv~=1.0.0
29
29
  Requires-Dist: rdflib==7.0.0
30
30
  Requires-Dist: opentelemetry-exporter-otlp-proto-grpc~=1.16
31
31
  Requires-Dist: opentelemetry-exporter-otlp-proto-http~=1.16
32
- Requires-Dist: boto3<1.35.20,>=1.34.41
33
- Requires-Dist: jinja-partials>=0.2.1
34
- Provides-Extra: all
35
- Requires-Dist: datacontract-cli[bigquery,databricks,dbml,dbt,kafka,postgres,s3,snowflake,sqlserver,trino]; extra == "all"
32
+ Requires-Dist: boto3<1.35.69,>=1.34.41
33
+ Requires-Dist: jinja_partials>=0.2.1
36
34
  Provides-Extra: avro
37
35
  Requires-Dist: avro==1.12.0; extra == "avro"
38
36
  Provides-Extra: bigquery
39
- Requires-Dist: soda-core-bigquery<3.4.0,>=3.3.1; extra == "bigquery"
37
+ Requires-Dist: soda-core-bigquery<3.5.0,>=3.3.1; extra == "bigquery"
40
38
  Provides-Extra: databricks
41
- Requires-Dist: soda-core-spark-df<3.4.0,>=3.3.1; extra == "databricks"
42
- Requires-Dist: databricks-sql-connector<3.5.0,>=3.1.2; extra == "databricks"
43
- Requires-Dist: databricks-sdk<0.33.0,>=0.32.0; extra == "databricks"
44
- Requires-Dist: soda-core-spark[databricks]<3.4.0,>=3.3.1; extra == "databricks"
45
- Provides-Extra: dbml
46
- Requires-Dist: pydbml>=1.1.1; extra == "dbml"
39
+ Requires-Dist: soda-core-spark-df<3.5.0,>=3.3.1; extra == "databricks"
40
+ Requires-Dist: databricks-sql-connector<3.6.0,>=3.1.2; extra == "databricks"
41
+ Requires-Dist: databricks-sdk<0.39.0,>=0.32.0; extra == "databricks"
42
+ Requires-Dist: soda-core-spark[databricks]<3.5.0,>=3.3.1; extra == "databricks"
43
+ Provides-Extra: iceberg
44
+ Requires-Dist: pyiceberg==0.7.1; extra == "iceberg"
45
+ Provides-Extra: kafka
46
+ Requires-Dist: datacontract-cli[avro]; extra == "kafka"
47
+ Requires-Dist: soda-core-spark-df<3.5.0,>=3.3.1; extra == "kafka"
48
+ Provides-Extra: postgres
49
+ Requires-Dist: soda-core-postgres<3.5.0,>=3.3.1; extra == "postgres"
50
+ Provides-Extra: s3
51
+ Requires-Dist: s3fs==2024.10.0; extra == "s3"
52
+ Provides-Extra: snowflake
53
+ Requires-Dist: snowflake-connector-python[pandas]<3.13,>=3.6; extra == "snowflake"
54
+ Requires-Dist: soda-core-snowflake<3.5.0,>=3.3.1; extra == "snowflake"
55
+ Provides-Extra: sqlserver
56
+ Requires-Dist: soda-core-sqlserver<3.5.0,>=3.3.1; extra == "sqlserver"
57
+ Provides-Extra: trino
58
+ Requires-Dist: soda-core-trino<3.5.0,>=3.3.1; extra == "trino"
47
59
  Provides-Extra: dbt
48
60
  Requires-Dist: dbt-core>=1.8.0; extra == "dbt"
61
+ Provides-Extra: dbml
62
+ Requires-Dist: pydbml>=1.1.1; extra == "dbml"
63
+ Provides-Extra: parquet
64
+ Requires-Dist: pyarrow>=12.0.0; extra == "parquet"
65
+ Provides-Extra: all
66
+ Requires-Dist: datacontract-cli[bigquery,databricks,dbml,dbt,iceberg,kafka,parquet,postgres,s3,snowflake,sqlserver,trino]; extra == "all"
49
67
  Provides-Extra: dev
50
68
  Requires-Dist: datacontract-cli[all]; extra == "dev"
51
69
  Requires-Dist: httpx==0.27.2; extra == "dev"
52
70
  Requires-Dist: kafka-python; extra == "dev"
53
- Requires-Dist: moto==5.0.14; extra == "dev"
71
+ Requires-Dist: moto==5.0.18; extra == "dev"
54
72
  Requires-Dist: pandas>=2.1.0; extra == "dev"
55
- Requires-Dist: pre-commit<3.9.0,>=3.7.1; extra == "dev"
56
- Requires-Dist: pyarrow>=12.0.0; extra == "dev"
73
+ Requires-Dist: pre-commit<4.1.0,>=3.7.1; extra == "dev"
57
74
  Requires-Dist: pytest; extra == "dev"
58
75
  Requires-Dist: pytest-xdist; extra == "dev"
59
76
  Requires-Dist: pymssql==2.3.1; extra == "dev"
60
77
  Requires-Dist: ruff; extra == "dev"
61
- Requires-Dist: testcontainers[kafka,minio,mssql,postgres]==4.8.1; extra == "dev"
62
- Requires-Dist: trino==0.329.0; extra == "dev"
63
- Provides-Extra: kafka
64
- Requires-Dist: datacontract-cli[avro]; extra == "kafka"
65
- Requires-Dist: soda-core-spark-df<3.4.0,>=3.3.1; extra == "kafka"
66
- Provides-Extra: postgres
67
- Requires-Dist: soda-core-postgres<3.4.0,>=3.3.1; extra == "postgres"
68
- Provides-Extra: s3
69
- Requires-Dist: s3fs==2024.9.0; extra == "s3"
70
- Provides-Extra: snowflake
71
- Requires-Dist: snowflake-connector-python[pandas]<3.13,>=3.6; extra == "snowflake"
72
- Requires-Dist: soda-core-snowflake<3.4.0,>=3.3.1; extra == "snowflake"
73
- Provides-Extra: sqlserver
74
- Requires-Dist: soda-core-sqlserver<3.4.0,>=3.3.1; extra == "sqlserver"
75
- Provides-Extra: trino
76
- Requires-Dist: soda-core-trino<3.4.0,>=3.3.1; extra == "trino"
78
+ Requires-Dist: testcontainers[kafka,minio,mssql,postgres]==4.8.2; extra == "dev"
79
+ Requires-Dist: trino==0.330.0; extra == "dev"
77
80
 
78
81
  # Data Contract CLI
79
82
 
@@ -85,8 +88,8 @@ Requires-Dist: soda-core-trino<3.4.0,>=3.3.1; extra == "trino"
85
88
  <a href="https://datacontract.com/slack" rel="nofollow"><img src="https://img.shields.io/badge/slack-join_chat-white.svg?logo=slack&amp;style=social" alt="Slack Status" data-canonical-src="https://img.shields.io/badge/slack-join_chat-white.svg?logo=slack&amp;style=social" style="max-width: 100%;"></a>
86
89
  </p>
87
90
 
88
- The `datacontract` CLI is an open source command-line tool for working with [Data Contracts](https://datacontract.com/).
89
- It uses data contract YAML files to lint the data contract, connect to data sources and execute schema and quality tests, detect breaking changes, and export to different formats. The tool is written in Python. It can be used as a standalone CLI tool, in a CI/CD pipeline, or directly as a Python library.
91
+ The `datacontract` CLI is an open-source command-line tool for working with data contracts.
92
+ It uses data contract YAML files as [Data Contract Specification](https://datacontract.com/) or [ODCS](https://bitol-io.github.io/open-data-contract-standard/latest/) to lint the data contract, connect to data sources and execute schema and quality tests, detect breaking changes, and export to different formats. The tool is written in Python. It can be used as a standalone CLI tool, in a CI/CD pipeline, or directly as a Python library.
90
93
 
91
94
  ![Main features of the Data Contract CLI](datacontractcli.png)
92
95
 
@@ -98,15 +101,15 @@ Let's look at this data contract:
98
101
 
99
102
  We have a _servers_ section with endpoint details to the S3 bucket, _models_ for the structure of the data, _servicelevels_ and _quality_ attributes that describe the expected freshness and number of rows.
100
103
 
101
- This data contract contains all information to connect to S3 and check that the actual data meets the defined schema and quality requirements. We can use this information to test if the actual data set in S3 is compliant to the data contract.
104
+ This data contract contains all information to connect to S3 and check that the actual data meets the defined schema and quality requirements. We can use this information to test if the actual data product in S3 is compliant to the data contract.
102
105
 
103
- Let's use [pip](https://pip.pypa.io/en/stable/getting-started/) to install the CLI (or use the [Docker image](#docker), if you prefer).
106
+ Let's use [pip](https://pip.pypa.io/en/stable/getting-started/) to install the CLI (or use the [Docker image](#docker)),
104
107
  ```bash
105
108
  $ python3 -m pip install datacontract-cli[all]
106
109
  ```
107
110
 
108
111
 
109
- We run the tests:
112
+ now, let's run the tests:
110
113
 
111
114
  ```bash
112
115
  $ datacontract test https://datacontract.com/examples/orders-latest/datacontract.yaml
@@ -144,7 +147,7 @@ Testing https://datacontract.com/examples/orders-latest/datacontract.yaml
144
147
 
145
148
  Voilà, the CLI tested that the _datacontract.yaml_ itself is valid, all records comply with the schema, and all quality attributes are met.
146
149
 
147
- We can also use the datacontract.yaml to export in many [formats](#format), e.g., to SQL:
150
+ We can also use the datacontract.yaml to export in many [formats](#format), e.g., to generate a SQL DDL:
148
151
 
149
152
  ```bash
150
153
  $ datacontract export --format sql https://datacontract.com/examples/orders-latest/datacontract.yaml
@@ -191,7 +194,7 @@ $ datacontract test datacontract.yaml
191
194
  # execute schema and quality checks on the examples within the contract
192
195
  $ datacontract test --examples datacontract.yaml
193
196
 
194
- # export data contract as html (other formats: avro, dbt, dbt-sources, dbt-staging-sql, jsonschema, odcs, rdf, sql, sodacl, terraform, ...)
197
+ # export data contract as html (other formats: avro, dbt, dbt-sources, dbt-staging-sql, jsonschema, odcs_v2, odcs_v3, rdf, sql, sodacl, terraform, ...)
195
198
  $ datacontract export --format html datacontract.yaml > datacontract.html
196
199
 
197
200
  # import avro (other formats: sql, glue, bigquery...)
@@ -224,8 +227,7 @@ if not run.has_passed():
224
227
  Choose the most appropriate installation method for your needs:
225
228
 
226
229
  ### pip
227
- Python 3.11 recommended.
228
- Python 3.12 available as pre-release release candidate for 0.9.3
230
+ Python 3.10, 3.11, and 3.12 are supported. We recommend to use Python 3.11.
229
231
 
230
232
  ```bash
231
233
  python3 -m pip install datacontract-cli[all]
@@ -239,17 +241,22 @@ pipx install datacontract-cli[all]
239
241
 
240
242
  ### Docker
241
243
 
244
+ You can also use our Docker image to run the CLI tool. It is also convenient for CI/CD pipelines.
245
+
242
246
  ```bash
243
247
  docker pull datacontract/cli
244
248
  docker run --rm -v ${PWD}:/home/datacontract datacontract/cli
245
249
  ```
246
250
 
247
- Or via an alias that automatically uses the latest version:
251
+ You can create an alias for the Docker command to make it easier to use:
248
252
 
249
253
  ```bash
250
254
  alias datacontract='docker run --rm -v "${PWD}:/home/datacontract" datacontract/cli:latest'
251
255
  ```
252
256
 
257
+ _Note:_ The output of Docker command line messages is limited to 80 columns and may include line breaks. Don't pipe docker output to files if you want to export code. Use the `--output` option instead.
258
+
259
+
253
260
 
254
261
  ## Optional Dependencies
255
262
 
@@ -267,6 +274,7 @@ A list of available extras:
267
274
  | Avro Support | `pip install datacontract-cli[avro]` |
268
275
  | Google BigQuery | `pip install datacontract-cli[bigquery]` |
269
276
  | Databricks Integration | `pip install datacontract-cli[databricks]` |
277
+ | Iceberg | `pip install datacontract-cli[iceberg]` |
270
278
  | Kafka Integration | `pip install datacontract-cli[kafka]` |
271
279
  | PostgreSQL Integration | `pip install datacontract-cli[postgres]` |
272
280
  | S3 Integration | `pip install datacontract-cli[s3]` |
@@ -274,6 +282,8 @@ A list of available extras:
274
282
  | Microsoft SQL Server | `pip install datacontract-cli[sqlserver]` |
275
283
  | Trino | `pip install datacontract-cli[trino]` |
276
284
  | Dbt | `pip install datacontract-cli[dbt]` |
285
+ | Dbml | `pip install datacontract-cli[dbml]` |
286
+ | Parquet | `pip install datacontract-cli[parquet]` |
277
287
 
278
288
 
279
289
 
@@ -742,10 +752,11 @@ servers:
742
752
 
743
753
  #### Environment Variables
744
754
 
745
- | Environment Variable | Example | Description |
746
- |------------------------------------|---------|-----------------------------|
747
- | `DATACONTRACT_KAFKA_SASL_USERNAME` | `xxx` | The SASL username (key). |
748
- | `DATACONTRACT_KAFKA_SASL_PASSWORD` | `xxx` | The SASL password (secret). |
755
+ | Environment Variable | Example | Description |
756
+ |-------------------------------------|---------|----------------------------------------------------------------------------------|
757
+ | `DATACONTRACT_KAFKA_SASL_USERNAME` | `xxx` | The SASL username (key). |
758
+ | `DATACONTRACT_KAFKA_SASL_PASSWORD` | `xxx` | The SASL password (secret). |
759
+ | `DATACONTRACT_KAFKA_SASL_MECHANISM` | `PLAIN` | Default `PLAIN`. Other supported mechanisms: `SCRAM-SHA-256` and `SCRAM-SHA-512` |
749
760
 
750
761
 
751
762
  ### Postgres
@@ -800,6 +811,10 @@ models:
800
811
  fields:
801
812
  my_column_1: # corresponds to a column
802
813
  type: varchar
814
+ my_column_2: # corresponds to a column with custom trino type
815
+ type: object
816
+ config:
817
+ trinoType: row(en_us varchar, pt_br varchar)
803
818
  ```
804
819
 
805
820
  #### Environment Variables
@@ -814,19 +829,19 @@ models:
814
829
  ### export
815
830
 
816
831
  ```
817
-
818
- Usage: datacontract export [OPTIONS] [LOCATION]
819
-
820
- Convert data contract to a specific format. Prints to stdout or to the specified output file.
821
-
832
+ Usage: datacontract export [OPTIONS] [LOCATION]
833
+
834
+ Convert data contract to a specific format. Saves to file specified by `output` option if present, otherwise prints to stdout.
835
+
822
836
  ╭─ Arguments ────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮
823
837
  │ location [LOCATION] The location (url or path) of the data contract yaml. [default: datacontract.yaml] │
824
838
  ╰────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
825
839
  ╭─ Options ──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮
826
840
  │ * --format [jsonschema|pydantic-model|sodacl|dbt|dbt-sources|db The export format. [default: None] [required] │
827
- │ t-staging-sql|odcs|rdf|avro|protobuf|great-expectati
828
- ons|terraform|avro-idl|sql|sql-query|html|go|bigquer
829
- y|dbml|spark|sqlalchemy|data-caterer]
841
+ │ t-staging-sql|odcs|odcs_v2|odcs_v3|rdf|avro|protobuf │
842
+ │ |great-expectations|terraform|avro-idl|sql|sql-query │
843
+ │ |html|go|bigquery|dbml|spark|sqlalchemy|data-caterer
844
+ │ |dcs] │
830
845
  │ --output PATH Specify the file path where the exported data will be │
831
846
  │ saved. If no path is provided, the output will be │
832
847
  │ printed to stdout. │
@@ -836,6 +851,12 @@ models:
836
851
  │ file to refer to a model, e.g., `orders`, or `all` │
837
852
  │ for all models (default). │
838
853
  │ [default: all] │
854
+ │ --schema TEXT The location (url or path) of the Data Contract │
855
+ │ Specification JSON Schema │
856
+ │ [default: │
857
+ │ https://datacontract.com/datacontract.schema.json] │
858
+ │ --engine TEXT [engine] The engine used for great expection run. │
859
+ │ [default: None] │
839
860
  │ --help Show this message and exit. │
840
861
  ╰────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
841
862
  ╭─ RDF Options ──────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮
@@ -846,44 +867,47 @@ models:
846
867
  │ detect the sql dialect via the specified servers in the data contract. │
847
868
  │ [default: auto] │
848
869
  ╰────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
849
-
850
870
  ```
851
871
 
852
872
  ```bash
853
873
  # Example export data contract as HTML
854
- datacontract export --format html > datacontract.html
874
+ datacontract export --format html --output datacontract.html
855
875
  ```
856
876
 
857
877
  Available export options:
858
878
 
859
879
  | Type | Description | Status |
860
880
  |----------------------|---------------------------------------------------------|--------|
861
- | `html` | Export to HTML | ✅ |
862
- | `jsonschema` | Export to JSON Schema | ✅ |
863
- | `odcs` | Export to Open Data Contract Standard (ODCS) | ✅ |
864
- | `sodacl` | Export to SodaCL quality checks in YAML format | ✅ |
865
- | `dbt` | Export to dbt models in YAML format | ✅ |
866
- | `dbt-sources` | Export to dbt sources in YAML format | ✅ |
867
- | `dbt-staging-sql` | Export to dbt staging SQL models | ✅ |
868
- | `rdf` | Export data contract to RDF representation in N3 format | ✅ |
869
- | `avro` | Export to AVRO models | ✅ |
870
- | `protobuf` | Export to Protobuf | ✅ |
871
- | `terraform` | Export to terraform resources | ✅ |
872
- | `sql` | Export to SQL DDL | ✅ |
873
- | `sql-query` | Export to SQL Query | ✅ |
874
- | `great-expectations` | Export to Great Expectations Suites in JSON Format | ✅ |
875
- | `bigquery` | Export to BigQuery Schemas | ✅ |
876
- | `go` | Export to Go types | ✅ |
877
- | `pydantic-model` | Export to pydantic models | ✅ |
878
- | `DBML` | Export to a DBML Diagram description | ✅ |
879
- | `spark` | Export to a Spark StructType | ✅ |
880
- | `sqlalchemy` | Export to SQLAlchemy Models | ✅ |
881
- | `data-caterer` | Export to Data Caterer in YAML format | ✅ |
881
+ | `html` | Export to HTML | ✅ |
882
+ | `jsonschema` | Export to JSON Schema | ✅ |
883
+ | `odcs_v2` | Export to Open Data Contract Standard (ODCS) V2 | ✅ |
884
+ | `odcs_v3` | Export to Open Data Contract Standard (ODCS) V3 | ✅ |
885
+ | `odcs` | Export to Open Data Contract Standard (ODCS) V3 | ✅ |
886
+ | `sodacl` | Export to SodaCL quality checks in YAML format | ✅ |
887
+ | `dbt` | Export to dbt models in YAML format | ✅ |
888
+ | `dbt-sources` | Export to dbt sources in YAML format | ✅ |
889
+ | `dbt-staging-sql` | Export to dbt staging SQL models | ✅ |
890
+ | `rdf` | Export data contract to RDF representation in N3 format | ✅ |
891
+ | `avro` | Export to AVRO models | ✅ |
892
+ | `protobuf` | Export to Protobuf | ✅ |
893
+ | `terraform` | Export to terraform resources | ✅ |
894
+ | `sql` | Export to SQL DDL | ✅ |
895
+ | `sql-query` | Export to SQL Query | ✅ |
896
+ | `great-expectations` | Export to Great Expectations Suites in JSON Format | ✅ |
897
+ | `bigquery` | Export to BigQuery Schemas | ✅ |
898
+ | `go` | Export to Go types | ✅ |
899
+ | `pydantic-model` | Export to pydantic models | ✅ |
900
+ | `DBML` | Export to a DBML Diagram description | ✅ |
901
+ | `spark` | Export to a Spark StructType | ✅ |
902
+ | `sqlalchemy` | Export to SQLAlchemy Models | ✅ |
903
+ | `data-caterer` | Export to Data Caterer in YAML format | ✅ |
904
+ | `dcs` | Export to Data Contract Specification in YAML format | ✅ |
882
905
  | Missing something? | Please create an issue on GitHub | TBD |
883
906
 
907
+
884
908
  #### Great Expectations
885
909
 
886
- The export function transforms a specified data contract into a comprehensive Great Expectations JSON suite.
910
+ The `export` function transforms a specified data contract into a comprehensive Great Expectations JSON suite.
887
911
  If the contract includes multiple models, you need to specify the names of the model you wish to export.
888
912
 
889
913
  ```shell
@@ -893,7 +917,22 @@ datacontract export datacontract.yaml --format great-expectations --model order
893
917
  The export creates a list of expectations by utilizing:
894
918
 
895
919
  - The data from the Model definition with a fixed mapping
896
- - The expectations provided in the quality field for each model (find here the expectations gallery https://greatexpectations.io/expectations/)
920
+ - The expectations provided in the quality field for each model (find here the expectations gallery: [Great Expectations Gallery](https://greatexpectations.io/expectations/))
921
+
922
+ ### Additional Arguments
923
+
924
+ To further customize the export, the following optional arguments are available:
925
+
926
+ - **`suite_name`**: The name of the expectation suite. This suite groups all generated expectations and provides a convenient identifier within Great Expectations. If not provided, a default suite name will be generated based on the model name(s).
927
+
928
+ - **`engine`**: Specifies the engine used to run Great Expectations checks. Accepted values are:
929
+ - `pandas` — Use this when working with in-memory data frames through the Pandas library.
930
+ - `spark` — Use this for working with Spark dataframes.
931
+ - `sql` — Use this for working with SQL databases.
932
+
933
+ - **`sql_server_type`**: Specifies the type of SQL server to connect with when `engine` is set to `sql`.
934
+
935
+ Providing `sql_server_type` ensures that the appropriate SQL dialect and connection settings are applied during the expectation validation.
897
936
 
898
937
  #### RDF
899
938
 
@@ -990,14 +1029,20 @@ models:
990
1029
  ### import
991
1030
 
992
1031
  ```
993
- Usage: datacontract import [OPTIONS]
994
-
995
- Create a data contract from the given source location. Prints to stdout.
1032
+ Usage: datacontract import [OPTIONS]
1033
+
1034
+ Create a data contract from the given source location. Saves to file specified by `output` option if present, otherwise
1035
+ prints to stdout.
996
1036
 
997
1037
  ╭─ Options ───────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮
998
- │ * --format [sql|avro|dbt|glue|jsonschema|bigquery|odcs The format of the source file. │
999
- │ |unity|spark] [default: None] │
1038
+ │ * --format [sql|avro|dbt|dbml|glue|jsonschema|bigquery The format of the source file. │
1039
+ │ |odcs|unity|spark|iceberg|parquet] [default: None] │
1000
1040
  │ [required] │
1041
+ │ --output PATH Specify the file path where the Data │
1042
+ │ Contract will be saved. If no path is │
1043
+ │ provided, the output will be printed to │
1044
+ │ stdout. │
1045
+ │ [default: None] │
1001
1046
  │ --source TEXT The path to the file or Glue Database that │
1002
1047
  │ should be imported. │
1003
1048
  │ [default: None] │
@@ -1026,6 +1071,9 @@ models:
1026
1071
  │ file (repeat for multiple table names, leave │
1027
1072
  │ empty for all tables in the file). │
1028
1073
  │ [default: None] │
1074
+ │ --iceberg-table TEXT Table name to assign to the model created │
1075
+ │ from the Iceberg schema. │
1076
+ │ [default: None] │
1029
1077
  │ --help Show this message and exit. │
1030
1078
  ╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
1031
1079
  ```
@@ -1034,6 +1082,8 @@ Example:
1034
1082
  ```bash
1035
1083
  # Example import from SQL DDL
1036
1084
  datacontract import --format sql --source my_ddl.sql
1085
+ # To save to file
1086
+ datacontract import --format sql --source my_ddl.sql --output datacontract.yaml
1037
1087
  ```
1038
1088
 
1039
1089
  Available import options:
@@ -1051,12 +1101,26 @@ Available import options:
1051
1101
  | `spark` | Import from Spark StructTypes | ✅ |
1052
1102
  | `dbml` | Import from DBML models | ✅ |
1053
1103
  | `protobuf` | Import from Protobuf schemas | TBD |
1104
+ | `iceberg` | Import from an Iceberg JSON Schema Definition | partial |
1105
+ | `parquet` | Import from Parquet File Metadta | ✅ |
1054
1106
  | Missing something? | Please create an issue on GitHub | TBD |
1055
1107
 
1056
1108
 
1109
+ #### ODCS
1110
+
1111
+ Import from Open Data Contract Standard (ODCS) v2 or v3.
1112
+ The importer automatically detects the ODCS version and imports the data contract.
1113
+
1114
+ Examples:
1115
+
1116
+ ```bash
1117
+ # Example import from ODCS
1118
+ datacontract import --format odcs --source my_data_contract.odcs.yaml
1119
+ ```
1120
+
1057
1121
  #### BigQuery
1058
1122
 
1059
- Bigquery data can either be imported off of JSON Files generated from the table descriptions or directly from the Bigquery API. In case you want to use JSON Files, specify the `source` parameter with a path to the JSON File.
1123
+ BigQuery data can either be imported off of JSON Files generated from the table descriptions or directly from the Bigquery API. In case you want to use JSON Files, specify the `source` parameter with a path to the JSON File.
1060
1124
 
1061
1125
  To import from the Bigquery API, you have to _omit_ `source` and instead need to provide `bigquery-project` and `bigquery-dataset`. Additionally you may specify `bigquery-table` to enumerate the tables that should be imported. If no tables are given, _all_ available tables of the dataset will be imported.
1062
1126
 
@@ -1168,6 +1232,15 @@ datacontract import --format dbml --source <file_path> --dbml-table <table_name_
1168
1232
  datacontract import --format dbml --source <file_path> --dbml-table <table_name_1> --dbml-schema <schema_1>
1169
1233
  ```
1170
1234
 
1235
+ #### Iceberg
1236
+
1237
+ Importing from an [Iceberg Table Json Schema Definition](https://iceberg.apache.org/spec/#appendix-c-json-serialization). Specify location of json files using the `source` parameter.
1238
+
1239
+ Examples:
1240
+
1241
+ ```bash
1242
+ datacontract import --format iceberg --source ./tests/fixtures/iceberg/simple_schema.json --iceberg-table test-table
1243
+ ```
1171
1244
 
1172
1245
  ### breaking
1173
1246
 
@@ -1232,6 +1305,16 @@ datacontract import --format dbml --source <file_path> --dbml-table <table_name_
1232
1305
  ╰──────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
1233
1306
  ```
1234
1307
 
1308
+ Examples:
1309
+
1310
+ ```
1311
+ # create a catalog right in the current folder
1312
+ datacontract catalog --output "."
1313
+
1314
+ # Create a catalog based on a filename convention
1315
+ datacontract catalog --files "*.odcs.yaml"
1316
+ ```
1317
+
1235
1318
  ### Publish
1236
1319
 
1237
1320
  ```
@@ -1549,7 +1632,7 @@ if __name__ == "__main__":
1549
1632
  Output
1550
1633
 
1551
1634
  ```yaml
1552
- dataContractSpecification: 0.9.3
1635
+ dataContractSpecification: 1.1.0
1553
1636
  id: uuid-custom
1554
1637
  info:
1555
1638
  title: my_custom_imported_data
@@ -0,0 +1,105 @@
1
+ datacontract/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
+ datacontract/cli.py,sha256=F3MKhqNMW0pFh0B54ik4DeMECWJANAQ1mEm5a7yAtSc,16506
3
+ datacontract/data_contract.py,sha256=rw9klvbIoaOwtBflpspqDeOXs3YY4qlQXsuOBROAJT0,14669
4
+ datacontract/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
+ datacontract/web.py,sha256=MePB-XJvTZObMzrk3tIW8-bAvk-QcEEt767RVos3Zoc,2105
6
+ datacontract/breaking/breaking.py,sha256=jCYKEH410p9E9MUi1UQJhtIQWjPEHqYy9dC8Z1viYnY,20398
7
+ datacontract/breaking/breaking_rules.py,sha256=OPCBtUHd5erAvsdC8KChTHIAItJ5GZZqwT2KXmuIA1A,3914
8
+ datacontract/catalog/catalog.py,sha256=wmv_2BBxHhNBlilAmQHHhNe4tK14DowkyIOVaQW2DWU,2691
9
+ datacontract/engines/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
+ datacontract/engines/datacontract/check_that_datacontract_contains_valid_servers_configuration.py,sha256=TXO47ON3NjwYI4Y2eBYklMOCo7vAtYzqLPAhZhii6dg,1565
11
+ datacontract/engines/datacontract/check_that_datacontract_file_exists.py,sha256=Vw-7U0GmQT2127tybxggZfpRFiZVgoIh6ndkTGM0FP4,665
12
+ datacontract/engines/fastjsonschema/check_jsonschema.py,sha256=PxDvH2T2bUbS-V6euENfwTIa_R-CMTZFxiRnyxvOTp4,9820
13
+ datacontract/engines/fastjsonschema/s3/s3_read_files.py,sha256=vuz_hLF2VD8LR_prjQpPLBU8Is-iHLAvqp4KwclOv9I,1157
14
+ datacontract/engines/soda/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
+ datacontract/engines/soda/check_soda_execute.py,sha256=nDmbI2luc8u68PyfBL7eZqI_TK4EwDnKnwrhpeSQQQo,7869
16
+ datacontract/engines/soda/connections/bigquery.py,sha256=C-8kxmzpYe88bJp80ObHFLMh4rpnIjnUQ7XOj0Ke7lk,903
17
+ datacontract/engines/soda/connections/dask.py,sha256=Yy6Et2n_vDVsdjtqyBWDSZt7mnjPzPk_MZ-92VZHfnY,1496
18
+ datacontract/engines/soda/connections/databricks.py,sha256=lpMju-o_TzLZeF0EEVwePPr8JahqvFnj5xRYjF15fc8,561
19
+ datacontract/engines/soda/connections/duckdb.py,sha256=c9Fq8uqkvuyc5YsnErlwFUALE4SvlO7cToMMpxzVadY,6272
20
+ datacontract/engines/soda/connections/kafka.py,sha256=bSC-TmZPmQvOr1rMaDtankcfYni4hmfbQooFaSlC_uc,8452
21
+ datacontract/engines/soda/connections/postgres.py,sha256=9GTF4Es3M5vb7ocSGqAxXmslvkS5CjsPQGIuo020CFc,626
22
+ datacontract/engines/soda/connections/snowflake.py,sha256=rfG2ysuqNM6TkvyqQKcGHFsTGJ6AROmud5VleUDRrb0,749
23
+ datacontract/engines/soda/connections/sqlserver.py,sha256=RzGLbCUdRyfmDcqtM_AB9WZ-Xk-XYX91nkXpVNpYbvc,1440
24
+ datacontract/engines/soda/connections/trino.py,sha256=JvKUP9aFg_n095oWE0-bGmfbETSWEOURGEZdQuG8txA,718
25
+ datacontract/export/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
26
+ datacontract/export/avro_converter.py,sha256=xNRo0eXR3n--EA8SziarWqhlvOYtQWkWJtEiVK5QKXI,4451
27
+ datacontract/export/avro_idl_converter.py,sha256=SGO7JfI9UGXLYFR5wMGNUH1qf6kt9lF6dUU9roVqnFo,9878
28
+ datacontract/export/bigquery_converter.py,sha256=VSBdVGWrlar18ETzgNArxDzk8Zt5JcAc_wKjfwLpG_A,4734
29
+ datacontract/export/csv_type_converter.py,sha256=ZZuJwBgQnafZC7PPvAXsBf2IajPJq8TYZ1l8Qq0GYeI,1290
30
+ datacontract/export/data_caterer_converter.py,sha256=MYUhoRjKSTpQFUZjc0CG6daPRpvm5j51wS0NUTSzQNU,5324
31
+ datacontract/export/dbml_converter.py,sha256=Tqsjicvh-NyEnpfn2Lnrpbhn9SQBD8uGlZY6Sb0VBV8,4737
32
+ datacontract/export/dbt_converter.py,sha256=0i6EPOdAnzQgV7MtQxHjxZlM4CdgJI3iygG9BvPuuy8,9389
33
+ datacontract/export/dcs_exporter.py,sha256=RALQ7bLAjak7EsoFFL2GFX2Oju7pnCDPCdRN_wo9wHM,210
34
+ datacontract/export/exporter.py,sha256=62ZO4Sn0V_QCxoShvjqZOBx2q0JLrsvoSfx37JhS2IQ,2979
35
+ datacontract/export/exporter_factory.py,sha256=cdygE42dQeXik1rk0BZ8RbsjJwEJcMDFjgkRT9hn434,5569
36
+ datacontract/export/go_converter.py,sha256=Ttvbfu3YU-3GBwRD6nwCsFyZuc_hiIvJD-Jg2sT5WLw,3331
37
+ datacontract/export/great_expectations_converter.py,sha256=zMaHaj5DLj_Q_q-iFEa7EZHW-qHdFMxWL4MiMIFKV80,10505
38
+ datacontract/export/html_export.py,sha256=ojazWrb0AwSc7Vr72M_otMo-3PA8mfi8tfIy9BCXk9o,2578
39
+ datacontract/export/jsonschema_converter.py,sha256=2MT82MurcQQbrVDRj1kFsxnmFd9scNSfYI1upQSecl4,5631
40
+ datacontract/export/odcs_v2_exporter.py,sha256=0nMI-zTENNs94bllm_Qv3V-8-QyS8jnBW1Be9fEJCmU,4679
41
+ datacontract/export/odcs_v3_exporter.py,sha256=C1Ax4BYeIu_uet_gYYws-sZanE_voGLiuKNHHRjQaV4,12093
42
+ datacontract/export/pandas_type_converter.py,sha256=464pQ3JQKFQa1TO0HBNcEoZvQye_yUbY6jQtiBaphSc,1117
43
+ datacontract/export/protobuf_converter.py,sha256=9K0fzBGbqlj9AhQumw2oq53hyn_QDCT3UlyH2uXJdC0,3192
44
+ datacontract/export/pydantic_converter.py,sha256=1Lt9F8i6zyQYb44MyQtsXwCWWXYxZ47SmzArr_uPqsU,5579
45
+ datacontract/export/rdf_converter.py,sha256=4gnKus37Geth4MJ3Ruc8AbnpD_Ll9OCx8oTIEKScvh8,6435
46
+ datacontract/export/sodacl_converter.py,sha256=zLfQ2VVWYIcvpja3-amMeqUPtfaioX14-skhPteu3vc,10600
47
+ datacontract/export/spark_converter.py,sha256=-6P2_VRFqGfSF7n_lJcD-fuY9Pv8qoH-ud6g8Zimpz4,7190
48
+ datacontract/export/sql_converter.py,sha256=trQV5M76rZ4EpGj-0jkzr9PhGDmMC_RaxDFIAmx1BaQ,4801
49
+ datacontract/export/sql_type_converter.py,sha256=MGTH1hXC90TLMEap1v_Fkahf4N6Ju8yf01pCT7wJrpg,11984
50
+ datacontract/export/sqlalchemy_converter.py,sha256=Cx1_7IFaduDWcTYytboHuKli_ve8imf9LW0_5F471No,6429
51
+ datacontract/export/terraform_converter.py,sha256=ExFoEvErVk-gBnWJiqC38SxDUmUEydpACWc917l5RyM,2163
52
+ datacontract/imports/avro_importer.py,sha256=hpGvO6uv2zcupJC8-wC-c-vbjNb83IQ560a5F3MsEFA,9937
53
+ datacontract/imports/bigquery_importer.py,sha256=7TcP9FDsIas5LwJZ-HrOPXZ-NuR056sxLfDDh3vjo8E,8419
54
+ datacontract/imports/dbml_importer.py,sha256=PhEurAkqPerX1FR6zsks8RsMUpJJWWJojn2Msmcm60Y,3869
55
+ datacontract/imports/dbt_importer.py,sha256=kzQsm_HrKybTr4SSyhZ5n9dHPbd9mg9ojdkRChywM0g,3237
56
+ datacontract/imports/glue_importer.py,sha256=fiJPkvfwOCsaKKCGW19-JM5CCGXZ2mkNrVtUzp2iw6g,8370
57
+ datacontract/imports/iceberg_importer.py,sha256=OFSUgjWn3LQ9QKSAJprIKXAh8CLdKAR6U7QC8KB6fcE,5396
58
+ datacontract/imports/importer.py,sha256=oi_cybcOimEZ4Wc4EJW_2OQoAyErbadPo0foHdVeOmE,860
59
+ datacontract/imports/importer_factory.py,sha256=QBW2tIuQ5Do56-Gtyh7fWLELcCeItYbK2FYq89K5oLw,3486
60
+ datacontract/imports/jsonschema_importer.py,sha256=67H__XLugV4vguHrIqzW02dtx27zYTWnOms4D1ma3bk,4961
61
+ datacontract/imports/odcs_importer.py,sha256=w1TumifVSDgSwsxWV2VmbdzHkb_3vNBKt6mEVE-P284,2042
62
+ datacontract/imports/odcs_v2_importer.py,sha256=s6-WjEToeTvHxSCdkEsiPVGz7ZPahtVY90z7c56SI7A,7151
63
+ datacontract/imports/odcs_v3_importer.py,sha256=TXutHthpl7DvMCS-HZ49Pvs1HKRLT_VWquXaXB8xIR8,12814
64
+ datacontract/imports/parquet_importer.py,sha256=W_0_16mX4stwDUt4GM2L7dnGmTpAySab5k13-OlTCCc,3095
65
+ datacontract/imports/spark_importer.py,sha256=h2na1YtdJYu9Oz07tSvwx8L4RX6aLCCDVkAv-RTKyVA,5100
66
+ datacontract/imports/sql_importer.py,sha256=0zZ7eHXg7xj843DZ14FBgUg2vzdJvP2he2ThSoaDtko,2890
67
+ datacontract/imports/unity_importer.py,sha256=UcPYABhLZaWNl5IkCazwAuMoVDdujsu_QteuV_Q9hgI,6737
68
+ datacontract/init/download_datacontract_file.py,sha256=pj_4mhWKlEtfueWohDgkb1nyuG5ERDipUDszxKwpZUs,413
69
+ datacontract/integration/datamesh_manager.py,sha256=RWIrOKXtyeiDSTRthsLOXV1PxTEXFFasYp1F2ZCw77s,2749
70
+ datacontract/integration/opentelemetry.py,sha256=oTIhP066qM2By9Evttq9LDUgFq3sr8In0ENdS1TIsz4,3827
71
+ datacontract/lint/files.py,sha256=tg0vq_w4LQsEr_8A5qr4hUJmHeGalUpsXJXC1t-OGC0,471
72
+ datacontract/lint/lint.py,sha256=Ew0n3ooXxmCVnUxJ_cDoacsD82QdMZYnKrxnG9J0sWQ,5077
73
+ datacontract/lint/resolve.py,sha256=fzHcz5UolXvleSWJfctjvl4wGNTIgOuw9nz8gCLTtGM,9607
74
+ datacontract/lint/resources.py,sha256=nfeZmORh1aP7EKpMKCmfbS04Te8pQ0nz64vJVkHOq3c,647
75
+ datacontract/lint/schema.py,sha256=W7MJWWQk0qxtp5q4X3waDKUnGJVXVMBkbxXDG-A1-aw,1469
76
+ datacontract/lint/urls.py,sha256=W7Edcd8Iw4NjokVtuSshEfzsXshQpuh_tpO73aM9dsM,2294
77
+ datacontract/lint/linters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
78
+ datacontract/lint/linters/description_linter.py,sha256=7fla7FQwDa-1UrLFCFKFoeUzkR91e4o9W6ySKSW6_U8,1555
79
+ datacontract/lint/linters/example_model_linter.py,sha256=tmgxGxC-GzMUxFumTOuuASdz9ZlncBZHasPGJsWnDT8,3973
80
+ datacontract/lint/linters/field_pattern_linter.py,sha256=lreGvOW3v_Glah_SriVe9ejZ7EuR6_gJsdr2tEORB_8,1084
81
+ datacontract/lint/linters/field_reference_linter.py,sha256=PSwLfct-x97on1dSGdZ3u7MGqy1oexVc6maRZeh7gIA,1551
82
+ datacontract/lint/linters/notice_period_linter.py,sha256=6r413aEVOVHWJHb33-68ecVTAUNzbxL4me6ebmPcgpE,2130
83
+ datacontract/lint/linters/quality_schema_linter.py,sha256=ZXFHlMLFV1GZejizbUdfW6-msffFECoDGNsdynaPnog,2182
84
+ datacontract/lint/linters/valid_constraints_linter.py,sha256=qTFh1X3I9wOtAxuXlvbGesCQ3GQ6iWc-MT_ttIybRsw,4916
85
+ datacontract/model/breaking_change.py,sha256=BIDEUo1U2CQLVT2-I5PyFttxAj6zQPI1UUkEoOOQXMY,2249
86
+ datacontract/model/data_contract_specification.py,sha256=O5nraoORyUwEx-HZeBcVaBna3ZtfhFTV2Z_-U7vaE8g,7733
87
+ datacontract/model/exceptions.py,sha256=zW9NoyzwsND-c9UqgyTVuezUVGEc6KK1Uc2zl12loyo,1178
88
+ datacontract/model/odcs.py,sha256=9PXwm72FASjNwteF1Jn591iP3-St0aq16Cpsk0PkEW8,389
89
+ datacontract/model/run.py,sha256=NMPCSwjnICOjEycgYDgBUUXoj8lWfFp2DVxotzOvWv8,2809
90
+ datacontract/templates/datacontract.html,sha256=w9xGnjYFtFmmsU90_I1NHVLNcuS22flPw51oIAn56WY,13442
91
+ datacontract/templates/index.html,sha256=nyi9nrEsSPuOrXszKzqkqwZGveXsdy1PLJp2g9oDw0A,12517
92
+ datacontract/templates/partials/datacontract_information.html,sha256=7ZBxgEgi2XndKBypeOpe03oCSRPOujC6NVlN7zexGNM,6221
93
+ datacontract/templates/partials/datacontract_servicelevels.html,sha256=ed3QgB11B0Qq2h_NwaroGZ4pQMBPEhfeQaoS-qEipqY,11401
94
+ datacontract/templates/partials/datacontract_terms.html,sha256=1cnJcOTpxwot2BCuZmkLF_SPfiVloLs3c8mj9WfE4sc,1865
95
+ datacontract/templates/partials/definition.html,sha256=jEvjThHghHT2bo4oMAfe3H0lzc4ZuqDsJFOpN9w3Nho,7085
96
+ datacontract/templates/partials/example.html,sha256=F1dWbHDIXQScgfs4OVgqM1lR4uV4xX5j6suasXHNM88,1204
97
+ datacontract/templates/partials/model_field.html,sha256=kh_ZIqJuayyxN-zDNIUPIoXOZeehGxXQxiImYB6G5qY,6946
98
+ datacontract/templates/partials/server.html,sha256=WkWFbz1ZvhIAUQQhH5Lkwb0HZRW907ehEnFmJSkpquQ,6235
99
+ datacontract/templates/style/output.css,sha256=F3oEhUpuv8kA_dWr4pJymBS_Ju6huIIZdLMkJzPzMmU,25647
100
+ datacontract_cli-0.10.15.dist-info/LICENSE,sha256=23h64qnSeIZ0DKeziWAKC-zBCt328iSbRbWBrXoYRb4,2210
101
+ datacontract_cli-0.10.15.dist-info/METADATA,sha256=Bj38D_3c0H1jVgDCweF0Poalj5iuFE5y3Xq7GJIEQ_I,95349
102
+ datacontract_cli-0.10.15.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
103
+ datacontract_cli-0.10.15.dist-info/entry_points.txt,sha256=D3Eqy4q_Z6bHauGd4ppIyQglwbrm1AJnLau4Ppbw9Is,54
104
+ datacontract_cli-0.10.15.dist-info/top_level.txt,sha256=VIRjd8EIUrBYWjEXJJjtdUgc0UAJdPZjmLiOR8BRBYM,13
105
+ datacontract_cli-0.10.15.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.1.0)
2
+ Generator: setuptools (75.6.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5