datacontract-cli 0.10.11__py3-none-any.whl → 0.10.12__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datacontract-cli might be problematic. Click here for more details.

Files changed (35) hide show
  1. datacontract/cli.py +19 -3
  2. datacontract/data_contract.py +5 -10
  3. datacontract/engines/fastjsonschema/check_jsonschema.py +11 -0
  4. datacontract/engines/fastjsonschema/s3/s3_read_files.py +2 -0
  5. datacontract/engines/soda/check_soda_execute.py +2 -8
  6. datacontract/engines/soda/connections/duckdb.py +23 -24
  7. datacontract/engines/soda/connections/kafka.py +81 -23
  8. datacontract/export/avro_converter.py +12 -2
  9. datacontract/export/dbml_converter.py +3 -2
  10. datacontract/export/exporter.py +1 -0
  11. datacontract/export/exporter_factory.py +6 -0
  12. datacontract/export/spark_converter.py +4 -0
  13. datacontract/export/sql_type_converter.py +64 -29
  14. datacontract/export/sqlalchemy_converter.py +169 -0
  15. datacontract/imports/avro_importer.py +1 -0
  16. datacontract/imports/bigquery_importer.py +2 -2
  17. datacontract/imports/dbml_importer.py +112 -0
  18. datacontract/imports/dbt_importer.py +67 -91
  19. datacontract/imports/glue_importer.py +62 -58
  20. datacontract/imports/importer.py +2 -1
  21. datacontract/imports/importer_factory.py +5 -0
  22. datacontract/imports/odcs_importer.py +1 -1
  23. datacontract/imports/spark_importer.py +29 -10
  24. datacontract/imports/sql_importer.py +1 -1
  25. datacontract/imports/unity_importer.py +1 -1
  26. datacontract/integration/{publish_datamesh_manager.py → datamesh_manager.py} +33 -5
  27. datacontract/integration/{publish_opentelemetry.py → opentelemetry.py} +1 -1
  28. datacontract/model/data_contract_specification.py +6 -2
  29. {datacontract_cli-0.10.11.dist-info → datacontract_cli-0.10.12.dist-info}/METADATA +103 -28
  30. {datacontract_cli-0.10.11.dist-info → datacontract_cli-0.10.12.dist-info}/RECORD +34 -33
  31. {datacontract_cli-0.10.11.dist-info → datacontract_cli-0.10.12.dist-info}/WHEEL +1 -1
  32. datacontract/publish/publish.py +0 -32
  33. {datacontract_cli-0.10.11.dist-info → datacontract_cli-0.10.12.dist-info}/LICENSE +0 -0
  34. {datacontract_cli-0.10.11.dist-info → datacontract_cli-0.10.12.dist-info}/entry_points.txt +0 -0
  35. {datacontract_cli-0.10.11.dist-info → datacontract_cli-0.10.12.dist-info}/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: datacontract-cli
3
- Version: 0.10.11
3
+ Version: 0.10.12
4
4
  Summary: The datacontract CLI is an open source command-line tool for working with Data Contracts. It uses data contract YAML files to lint the data contract, connect to data sources and execute schema and quality tests, detect breaking changes, and export to different formats. The tool is written in Python. It can be used as a standalone CLI tool, in a CI/CD pipeline, or directly as a Python library.
5
5
  Author-email: Jochen Christ <jochen.christ@innoq.com>, Stefan Negele <stefan.negele@innoq.com>, Simon Harrer <simon.harrer@innoq.com>
6
6
  Project-URL: Homepage, https://cli.datacontract.com
@@ -11,55 +11,54 @@ Classifier: Operating System :: OS Independent
11
11
  Requires-Python: >=3.10
12
12
  Description-Content-Type: text/markdown
13
13
  License-File: LICENSE
14
- Requires-Dist: typer[all]<0.13,>=0.9
14
+ Requires-Dist: typer<0.13,>=0.12
15
15
  Requires-Dist: pydantic<2.9.0,>=2.8.2
16
16
  Requires-Dist: pyyaml~=6.0.1
17
17
  Requires-Dist: requests<2.33,>=2.31
18
- Requires-Dist: fastapi==0.111.1
18
+ Requires-Dist: fastapi==0.112.0
19
+ Requires-Dist: uvicorn==0.30.5
20
+ Requires-Dist: fastjsonschema<2.21.0,>=2.19.1
19
21
  Requires-Dist: fastparquet==2024.5.0
20
22
  Requires-Dist: python-multipart==0.0.9
21
23
  Requires-Dist: rich~=13.7.0
22
- Requires-Dist: simple-ddl-parser==1.5.2
24
+ Requires-Dist: simple-ddl-parser==1.6.0
25
+ Requires-Dist: duckdb==1.0.0
23
26
  Requires-Dist: soda-core-duckdb<3.4.0,>=3.3.1
24
27
  Requires-Dist: setuptools>=60
25
- Requires-Dist: duckdb==1.0.0
26
- Requires-Dist: fastjsonschema<2.21.0,>=2.19.1
27
28
  Requires-Dist: python-dotenv~=1.0.0
28
29
  Requires-Dist: rdflib==7.0.0
29
30
  Requires-Dist: opentelemetry-exporter-otlp-proto-grpc~=1.16
30
31
  Requires-Dist: opentelemetry-exporter-otlp-proto-http~=1.16
31
- Requires-Dist: boto3<1.34.137,>=1.34.41
32
- Requires-Dist: botocore<1.34.137,>=1.34.41
32
+ Requires-Dist: boto3<1.35.6,>=1.34.41
33
33
  Requires-Dist: jinja-partials>=0.2.1
34
34
  Provides-Extra: all
35
- Requires-Dist: datacontract-cli[bigquery,databricks,deltalake,kafka,postgres,s3,snowflake,sqlserver,trino]; extra == "all"
35
+ Requires-Dist: datacontract-cli[bigquery,databricks,dbml,dbt,kafka,postgres,s3,snowflake,sqlserver,trino]; extra == "all"
36
36
  Provides-Extra: avro
37
- Requires-Dist: avro==1.11.3; extra == "avro"
37
+ Requires-Dist: avro==1.12.0; extra == "avro"
38
38
  Provides-Extra: bigquery
39
39
  Requires-Dist: soda-core-bigquery<3.4.0,>=3.3.1; extra == "bigquery"
40
40
  Provides-Extra: databricks
41
41
  Requires-Dist: soda-core-spark-df<3.4.0,>=3.3.1; extra == "databricks"
42
42
  Requires-Dist: databricks-sql-connector<3.4.0,>=3.1.2; extra == "databricks"
43
43
  Requires-Dist: soda-core-spark[databricks]<3.4.0,>=3.3.1; extra == "databricks"
44
- Provides-Extra: deltalake
45
- Requires-Dist: deltalake<0.19,>=0.17; extra == "deltalake"
44
+ Provides-Extra: dbml
45
+ Requires-Dist: pydbml>=1.1.1; extra == "dbml"
46
+ Provides-Extra: dbt
47
+ Requires-Dist: dbt-core>=1.8.0; extra == "dbt"
46
48
  Provides-Extra: dev
47
49
  Requires-Dist: datacontract-cli[all]; extra == "dev"
48
- Requires-Dist: httpx==0.27.0; extra == "dev"
49
- Requires-Dist: ruff; extra == "dev"
50
+ Requires-Dist: httpx==0.27.2; extra == "dev"
51
+ Requires-Dist: kafka-python; extra == "dev"
52
+ Requires-Dist: moto==5.0.13; extra == "dev"
53
+ Requires-Dist: pandas>=2.1.0; extra == "dev"
50
54
  Requires-Dist: pre-commit<3.9.0,>=3.7.1; extra == "dev"
55
+ Requires-Dist: pyarrow>=12.0.0; extra == "dev"
51
56
  Requires-Dist: pytest; extra == "dev"
52
57
  Requires-Dist: pytest-xdist; extra == "dev"
53
- Requires-Dist: moto==5.0.11; extra == "dev"
54
- Requires-Dist: pymssql==2.3.0; extra == "dev"
55
- Requires-Dist: kafka-python; extra == "dev"
58
+ Requires-Dist: pymssql==2.3.1; extra == "dev"
59
+ Requires-Dist: ruff; extra == "dev"
60
+ Requires-Dist: testcontainers[kafka,minio,mssql,postgres]==4.8.1; extra == "dev"
56
61
  Requires-Dist: trino==0.329.0; extra == "dev"
57
- Requires-Dist: testcontainers==4.7.2; extra == "dev"
58
- Requires-Dist: testcontainers[core]==4.7.2; extra == "dev"
59
- Requires-Dist: testcontainers[minio]==4.7.2; extra == "dev"
60
- Requires-Dist: testcontainers[postgres]==4.7.2; extra == "dev"
61
- Requires-Dist: testcontainers[kafka]==4.7.2; extra == "dev"
62
- Requires-Dist: testcontainers[mssql]==4.7.2; extra == "dev"
63
62
  Provides-Extra: kafka
64
63
  Requires-Dist: datacontract-cli[avro]; extra == "kafka"
65
64
  Requires-Dist: soda-core-spark-df<3.4.0,>=3.3.1; extra == "kafka"
@@ -68,7 +67,7 @@ Requires-Dist: soda-core-postgres<3.4.0,>=3.3.1; extra == "postgres"
68
67
  Provides-Extra: s3
69
68
  Requires-Dist: s3fs==2024.6.1; extra == "s3"
70
69
  Provides-Extra: snowflake
71
- Requires-Dist: snowflake-connector-python[pandas]<3.12,>=3.6; extra == "snowflake"
70
+ Requires-Dist: snowflake-connector-python[pandas]<3.13,>=3.6; extra == "snowflake"
72
71
  Requires-Dist: soda-core-snowflake<3.4.0,>=3.3.1; extra == "snowflake"
73
72
  Provides-Extra: sqlserver
74
73
  Requires-Dist: soda-core-sqlserver<3.4.0,>=3.3.1; extra == "sqlserver"
@@ -82,7 +81,7 @@ Requires-Dist: soda-core-trino<3.4.0,>=3.3.1; extra == "trino"
82
81
  <img alt="Test Workflow" src="https://img.shields.io/github/actions/workflow/status/datacontract/datacontract-cli/ci.yaml?branch=main"></a>
83
82
  <a href="https://github.com/datacontract/datacontract-cli">
84
83
  <img alt="Stars" src="https://img.shields.io/github/stars/datacontract/datacontract-cli" /></a>
85
- <a href="https://datacontract.com/slack" rel="nofollow"><img src="https://camo.githubusercontent.com/5ade1fd1e76a6ab860802cdd2941fe2501e2ca2cb534e5d8968dbf864c13d33d/68747470733a2f2f696d672e736869656c64732e696f2f62616467652f736c61636b2d6a6f696e5f636861742d77686974652e7376673f6c6f676f3d736c61636b267374796c653d736f6369616c" alt="Slack Status" data-canonical-src="https://img.shields.io/badge/slack-join_chat-white.svg?logo=slack&amp;style=social" style="max-width: 100%;"></a>
84
+ <a href="https://datacontract.com/slack" rel="nofollow"><img src="https://img.shields.io/badge/slack-join_chat-white.svg?logo=slack&amp;style=social" alt="Slack Status" data-canonical-src="https://img.shields.io/badge/slack-join_chat-white.svg?logo=slack&amp;style=social" style="max-width: 100%;"></a>
86
85
  </p>
87
86
 
88
87
  The `datacontract` CLI is an open source command-line tool for working with [Data Contracts](https://datacontract.com/).
@@ -267,13 +266,13 @@ A list of available extras:
267
266
  | Avro Support | `pip install datacontract-cli[avro]` |
268
267
  | Google BigQuery | `pip install datacontract-cli[bigquery]` |
269
268
  | Databricks Integration | `pip install datacontract-cli[databricks]` |
270
- | Deltalake Integration | `pip install datacontract-cli[deltalake]` |
271
269
  | Kafka Integration | `pip install datacontract-cli[kafka]` |
272
270
  | PostgreSQL Integration | `pip install datacontract-cli[postgres]` |
273
271
  | S3 Integration | `pip install datacontract-cli[s3]` |
274
272
  | Snowflake Integration | `pip install datacontract-cli[snowflake]` |
275
273
  | Microsoft SQL Server | `pip install datacontract-cli[sqlserver]` |
276
274
  | Trino | `pip install datacontract-cli[trino]` |
275
+ | Dbt | `pip install datacontract-cli[dbt]` |
277
276
 
278
277
 
279
278
 
@@ -406,6 +405,12 @@ Feel free to create an [issue](https://github.com/datacontract/datacontract-cli/
406
405
 
407
406
  Data Contract CLI can test data that is stored in S3 buckets or any S3-compliant endpoints in various formats.
408
407
 
408
+ - CSV
409
+ - JSON
410
+ - Delta
411
+ - Parquet
412
+ - Iceberg (coming soon)
413
+
409
414
  #### Examples
410
415
 
411
416
  ##### JSON
@@ -444,6 +449,32 @@ servers:
444
449
 
445
450
 
446
451
 
452
+ ### Google Cloud Storage (GCS)
453
+
454
+ The [S3](#S3) integration also works with files on Google Cloud Storage through its [interoperability](https://cloud.google.com/storage/docs/interoperability).
455
+ Use `https://storage.googleapis.com` as the endpoint URL.
456
+
457
+ #### Example
458
+
459
+ datacontract.yaml
460
+ ```yaml
461
+ servers:
462
+ production:
463
+ type: s3
464
+ endpointUrl: https://storage.googleapis.com
465
+ location: s3://bucket-name/path/*/*.json # use s3:// schema instead of gs://
466
+ format: json
467
+ delimiter: new_line # new_line, array, or none
468
+ ```
469
+
470
+ #### Environment Variables
471
+
472
+ | Environment Variable | Example | Description |
473
+ |-------------------------------------|----------------|------------------------------------------------------------------------------------------|
474
+ | `DATACONTRACT_S3_ACCESS_KEY_ID` | `GOOG1EZZZ...` | The GCS [HMAC Key](https://cloud.google.com/storage/docs/authentication/hmackeys) Key ID |
475
+ | `DATACONTRACT_S3_SECRET_ACCESS_KEY` | `PDWWpb...` | The GCS [HMAC Key](https://cloud.google.com/storage/docs/authentication/hmackeys) Secret |
476
+
477
+
447
478
  ### BigQuery
448
479
 
449
480
  We support authentication to BigQuery using Service Account Key. The used Service Account should include the roles:
@@ -794,7 +825,7 @@ models:
794
825
  │ * --format [jsonschema|pydantic-model|sodacl|dbt|dbt-sources|db The export format. [default: None] [required] │
795
826
  │ t-staging-sql|odcs|rdf|avro|protobuf|great-expectati │
796
827
  │ ons|terraform|avro-idl|sql|sql-query|html|go|bigquer │
797
- │ y|dbml|spark]
828
+ │ y|dbml|spark|sqlalchemy]
798
829
  │ --output PATH Specify the file path where the exported data will be │
799
830
  │ saved. If no path is provided, the output will be │
800
831
  │ printed to stdout. │
@@ -845,6 +876,7 @@ Available export options:
845
876
  | `pydantic-model` | Export to pydantic models | ✅ |
846
877
  | `DBML` | Export to a DBML Diagram description | ✅ |
847
878
  | `spark` | Export to a Spark StructType | ✅ |
879
+ | `sqlalchemy` | Export to SQLAlchemy Models | ✅ |
848
880
  | Missing something? | Please create an issue on GitHub | TBD |
849
881
 
850
882
  #### Great Expectations
@@ -918,6 +950,7 @@ models:
918
950
  description: Example for AVRO with Timestamp (microsecond precision) https://avro.apache.org/docs/current/spec.html#Local+timestamp+%28microsecond+precision%29
919
951
  type: long
920
952
  example: 1672534861000000 # Equivalent to 2023-01-01 01:01:01 in microseconds
953
+ required: true
921
954
  config:
922
955
  avroLogicalType: local-timestamp-micros
923
956
  avroDefault: 1672534861000000
@@ -932,6 +965,7 @@ models:
932
965
  - **description**: A textual description of the field.
933
966
  - **type**: The data type of the field. In this example, it is `long`.
934
967
  - **example**: An example value for the field.
968
+ - **required**: Is this a required field (as opposed to optional/nullable).
935
969
  - **config**: Section to specify custom Avro properties.
936
970
  - **avroLogicalType**: Specifies the logical type of the field in Avro. In this example, it is `local-timestamp-micros`.
937
971
  - **avroDefault**: Specifies the default value for the field in Avro. In this example, it is 1672534861000000 which corresponds to ` 2023-01-01 01:01:01 UTC`.
@@ -968,6 +1002,14 @@ models:
968
1002
  │ names, leave empty for all models in the │
969
1003
  │ dataset). │
970
1004
  │ [default: None] │
1005
+ │ --dbml-schema TEXT List of schema names to import from the DBML │
1006
+ │ file (repeat for multiple schema names, │
1007
+ │ leave empty for all tables in the file). │
1008
+ │ [default: None] │
1009
+ │ --dbml-table TEXT List of table names to import from the DBML │
1010
+ │ file (repeat for multiple table names, leave │
1011
+ │ empty for all tables in the file). │
1012
+ │ [default: None] │
971
1013
  │ --help Show this message and exit. │
972
1014
  ╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
973
1015
  ```
@@ -991,6 +1033,7 @@ Available import options:
991
1033
  | `dbt` | Import from dbt models | ✅ |
992
1034
  | `odcs` | Import from Open Data Contract Standard (ODCS) | ✅ |
993
1035
  | `spark` | Import from Spark StructTypes | ✅ |
1036
+ | `dbml` | Import from DBML models | ✅ |
994
1037
  | `protobuf` | Import from Protobuf schemas | TBD |
995
1038
  | Missing something? | Please create an issue on GitHub | TBD |
996
1039
 
@@ -1078,6 +1121,38 @@ Example:
1078
1121
  datacontract import --format spark --source "users,orders"
1079
1122
  ```
1080
1123
 
1124
+ #### DBML
1125
+
1126
+ Importing from DBML Documents.
1127
+ **NOTE:** Since DBML does _not_ have strict requirements on the types of columns, this import _may_ create non-valid datacontracts, as not all types of fields can be properly mapped. In this case you will have to adapt the generated document manually.
1128
+ We also assume, that the description for models and fields is stored in a Note within the DBML model.
1129
+
1130
+ You may give the `dbml-table` or `dbml-schema` parameter to enumerate the tables or schemas that should be imported.
1131
+ If no tables are given, _all_ available tables of the source will be imported. Likewise, if no schema is given, _all_ schemas are imported.
1132
+
1133
+ Examples:
1134
+
1135
+ ```bash
1136
+ # Example import from DBML file, importing everything
1137
+ datacontract import --format dbml --source <file_path>
1138
+ ```
1139
+
1140
+ ```bash
1141
+ # Example import from DBML file, filtering for tables from specific schemas
1142
+ datacontract import --format dbml --source <file_path> --dbml-schema <schema_1> --dbml-schema <schema_2>
1143
+ ```
1144
+
1145
+ ```bash
1146
+ # Example import from DBML file, filtering for tables with specific names
1147
+ datacontract import --format dbml --source <file_path> --dbml-table <table_name_1> --dbml-table <table_name_2>
1148
+ ```
1149
+
1150
+ ```bash
1151
+ # Example import from DBML file, filtering for tables with specific names from a specific schema
1152
+ datacontract import --format dbml --source <file_path> --dbml-table <table_name_1> --dbml-schema <schema_1>
1153
+ ```
1154
+
1155
+
1081
1156
  ### breaking
1082
1157
 
1083
1158
  ```
@@ -1427,7 +1502,7 @@ if __name__ == "__main__":
1427
1502
  "models": [
1428
1503
  {
1429
1504
  "name": "model1",
1430
- "desctiption": "model description from app",
1505
+ "description": "model description from app",
1431
1506
  "columns": [
1432
1507
  {
1433
1508
  "name": "columnA",
@@ -1,6 +1,6 @@
1
1
  datacontract/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- datacontract/cli.py,sha256=LZYYECXjCk-AZmPaihuRJkknEAuM8OxvGoK40WpAeAs,14261
3
- datacontract/data_contract.py,sha256=yLisbPzA_GJdQjh2rUeIVG0R_EjFBqqHxxXb2PAq_ec,14075
2
+ datacontract/cli.py,sha256=IFwIjs3q2SLEmBq02njzSGYUxGNPrJa15U3JTfThnn4,14931
3
+ datacontract/data_contract.py,sha256=zW2Mfju4cv5NzjWvfy9EFZHFYxvZaTWs4ILB7mS66wg,13883
4
4
  datacontract/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
5
  datacontract/web.py,sha256=Ov50NJVbQiaxjHXnVOvRw9moMkKHHcxFhdzwbA13QOw,2134
6
6
  datacontract/breaking/breaking.py,sha256=l0ZwUX8G4QK_0nLRLJHArpwgTWTD7WQYMuAp3l_y1bY,12184
@@ -10,28 +10,28 @@ datacontract/engines/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hS
10
10
  datacontract/engines/datacontract/check_that_datacontract_contains_valid_servers_configuration.py,sha256=TXO47ON3NjwYI4Y2eBYklMOCo7vAtYzqLPAhZhii6dg,1565
11
11
  datacontract/engines/datacontract/check_that_datacontract_file_exists.py,sha256=XHvxX6BrTdZk2wN55zd6ER4k7ILDyGzjbCbZxQAJ2iE,665
12
12
  datacontract/engines/datacontract/check_that_datacontract_str_is_valid.py,sha256=CsxFGyInjpsylmoofByV1b-wpFhG1jtRWxSUpp-BXk8,1525
13
- datacontract/engines/fastjsonschema/check_jsonschema.py,sha256=LU-RF6uXgUDvXCB3CQuZJiOenQk4kG5pA6hf6A61pzA,5891
14
- datacontract/engines/fastjsonschema/s3/s3_read_files.py,sha256=HS1dgwkUp3CwuI6VtZnRpOUnTd04E3yMgLbkK5jcW7o,1057
13
+ datacontract/engines/fastjsonschema/check_jsonschema.py,sha256=tI5mfdxilAn-zmq02s8t6pNww9-uJYTOjooO0HimwQo,6321
14
+ datacontract/engines/fastjsonschema/s3/s3_read_files.py,sha256=vuz_hLF2VD8LR_prjQpPLBU8Is-iHLAvqp4KwclOv9I,1157
15
15
  datacontract/engines/soda/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16
- datacontract/engines/soda/check_soda_execute.py,sha256=mqCqVdBFTuJwOiQBf15KSj48ccQgbewFPSMzrAWF_Hk,7820
16
+ datacontract/engines/soda/check_soda_execute.py,sha256=BcPPq2kjOk5dMMjlVC-cspU3a1rpRfvRJ8XoXeBu79Y,7724
17
17
  datacontract/engines/soda/connections/bigquery.py,sha256=C-8kxmzpYe88bJp80ObHFLMh4rpnIjnUQ7XOj0Ke7lk,903
18
18
  datacontract/engines/soda/connections/dask.py,sha256=Yy6Et2n_vDVsdjtqyBWDSZt7mnjPzPk_MZ-92VZHfnY,1496
19
19
  datacontract/engines/soda/connections/databricks.py,sha256=lpMju-o_TzLZeF0EEVwePPr8JahqvFnj5xRYjF15fc8,561
20
- datacontract/engines/soda/connections/duckdb.py,sha256=ZZf_ApeMO4FK2SyPI1otjgH92OfxwsZayBF_jjPLs9s,6788
21
- datacontract/engines/soda/connections/kafka.py,sha256=vh7z-4ZsmpXiYcogf3oTkagrAPcq6HG2SccnxNwFeVQ,5635
20
+ datacontract/engines/soda/connections/duckdb.py,sha256=OVaxDO9HwfD4M6FYDIDS4zwnUJ20uAM1jz_Fy3EkClo,6271
21
+ datacontract/engines/soda/connections/kafka.py,sha256=VPbH0GfJO_YuBGTyQ3xehjm-sVzSGn4y0bt-fZqPcok,7481
22
22
  datacontract/engines/soda/connections/postgres.py,sha256=9GTF4Es3M5vb7ocSGqAxXmslvkS5CjsPQGIuo020CFc,626
23
23
  datacontract/engines/soda/connections/snowflake.py,sha256=rfG2ysuqNM6TkvyqQKcGHFsTGJ6AROmud5VleUDRrb0,749
24
24
  datacontract/engines/soda/connections/sqlserver.py,sha256=RzGLbCUdRyfmDcqtM_AB9WZ-Xk-XYX91nkXpVNpYbvc,1440
25
25
  datacontract/engines/soda/connections/trino.py,sha256=JvKUP9aFg_n095oWE0-bGmfbETSWEOURGEZdQuG8txA,718
26
26
  datacontract/export/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
27
- datacontract/export/avro_converter.py,sha256=CbrGsGQCi-RNrQ7FfxnuGv3oeT9tHrkSl8dhFbLZyjU,3788
27
+ datacontract/export/avro_converter.py,sha256=n14GtP88c5lpcxyxF5W86k7xYGiwc6j0PIedElbeWjM,4171
28
28
  datacontract/export/avro_idl_converter.py,sha256=-zZxV0OAC76tMkAA9hXbEtp2wtpildKJb0OIbPhgoJE,9794
29
29
  datacontract/export/bigquery_converter.py,sha256=chAxgbYybyLG5v6SwqIweK9PePvE-36FStR7fyA_F_4,4577
30
30
  datacontract/export/csv_type_converter.py,sha256=ZZuJwBgQnafZC7PPvAXsBf2IajPJq8TYZ1l8Qq0GYeI,1290
31
- datacontract/export/dbml_converter.py,sha256=4gNNx78bQOrMrPhe-xw-7Hvv0xOt3Y7OX0zhlG1TxFY,4737
31
+ datacontract/export/dbml_converter.py,sha256=Lmg0so_YP652UvDiNYyhAzzxtYvaHQBWFMUHzWadSDA,4739
32
32
  datacontract/export/dbt_converter.py,sha256=QfapVeunwab7bWn_B9-J_1U8fm1wdsPBW_Ws2b99o1E,9390
33
- datacontract/export/exporter.py,sha256=28fpxKxLCFAI0nuIBc0knccjj60Who3XIiyBmMO4Ggk,2845
34
- datacontract/export/exporter_factory.py,sha256=CgYTx5m11Ec0vQV_-5tHxilTIH3qpRvUDvifsOybiZk,4756
33
+ datacontract/export/exporter.py,sha256=BKUAUT6vifrBn0ktIZ7V0HHAMWhmQMohM2v-TBoHOlA,2875
34
+ datacontract/export/exporter_factory.py,sha256=cKyoHolVw3HrUtaqMKyh5NppvlNd_v54xJFC5bOD0gI,4931
35
35
  datacontract/export/go_converter.py,sha256=oWkEjOXBIGN8emTaxeJD8xpB3VYP4sVaNzKX-Oeqgw0,3330
36
36
  datacontract/export/great_expectations_converter.py,sha256=LIuWHn3h7xRrDDs1YoibHOI8NVF2Izh5IZOQFLeWN7k,5302
37
37
  datacontract/export/html_export.py,sha256=i7jckL80osA8incF0YoJdq3NaYmV1_3OU0cZBfPdNDs,2578
@@ -41,24 +41,26 @@ datacontract/export/protobuf_converter.py,sha256=Baqq2uvRg0AHFo6gR-hEYdLyiYRWziC
41
41
  datacontract/export/pydantic_converter.py,sha256=1Lt9F8i6zyQYb44MyQtsXwCWWXYxZ47SmzArr_uPqsU,5579
42
42
  datacontract/export/rdf_converter.py,sha256=nNYVKJ5bQEq-rRz3yHwpvbsNbvjoOMwkCfkev8Y11H0,6436
43
43
  datacontract/export/sodacl_converter.py,sha256=Uv3BOsq20YKCHmSU6mN-6L2hPPF4V9fPxDoJ42okfHs,7280
44
- datacontract/export/spark_converter.py,sha256=aVOlVUAcQ0wexqUohXUckU8A4WQh8NdhPnKQSGpb-Mg,6733
44
+ datacontract/export/spark_converter.py,sha256=wyJLxsyE6lQx_JyMsPtkN67ogggNUef0TE4kcIicZaA,6990
45
45
  datacontract/export/sql_converter.py,sha256=grDTj3MGhVc5BbJbZ__LFawJtrsnv4ApmOMX4ohPnCQ,4802
46
- datacontract/export/sql_type_converter.py,sha256=aUPsHCOm7m9X4NdZ0C_FguOZwnTzu-1pyz9DVlFkniE,10052
46
+ datacontract/export/sql_type_converter.py,sha256=hxIpkCZ3Isi_8Hj06D6PP0r92091J3lSy1QzSQdfeLw,10854
47
+ datacontract/export/sqlalchemy_converter.py,sha256=x2_MVvDMfqgf_c5yfWimSC0um718EYYUpFiCe1-HN4g,6469
47
48
  datacontract/export/terraform_converter.py,sha256=IFwn-6BWPVkfwDQbj_Gl0GwbJdMKiwKgXCXz5A64cEs,2163
48
- datacontract/imports/avro_importer.py,sha256=ZlqBuWl1CIOtccm_EHyycRdIUJCS9r9lV6IHfpx9tsI,9886
49
- datacontract/imports/bigquery_importer.py,sha256=frBs5a9GuNVtNWF3DtEvsOiIemGI3dfDOEH475l1_84,8399
50
- datacontract/imports/dbt_importer.py,sha256=IPfMIsV1qzKEyfodgGmZRcF0nqN-oW1VixOoPJe-d6k,3390
51
- datacontract/imports/glue_importer.py,sha256=4WluEbPOhseBDkyR8C95vJmIZW0Ka4aUZvljl149gQU,8335
52
- datacontract/imports/importer.py,sha256=kr2bkHiciJ35pPpLARoYLA5J7XLGDg2gp_unbZ2V1bg,773
53
- datacontract/imports/importer_factory.py,sha256=xfVyEKY2HXIqKVS9GtWnc5xYrmcJZ-WEpVtExLudyYQ,2999
49
+ datacontract/imports/avro_importer.py,sha256=HkzaMCa9A20D3Z0O_7I67TRcqyNV9XGYX_0JAF51qF8,9937
50
+ datacontract/imports/bigquery_importer.py,sha256=zQU-eMOcY3ZRDXwyA4RYMorLKrSK0kgGRekuEPthh-A,8419
51
+ datacontract/imports/dbml_importer.py,sha256=iWX8EmCRqGfVCtpI2oojVCQQA7oTahtsMUUm_3gPRcE,3869
52
+ datacontract/imports/dbt_importer.py,sha256=f96-eGbdkDMh8B6iKhWy4qfAskE-b4dpWYHfenjTX8Y,3236
53
+ datacontract/imports/glue_importer.py,sha256=QuMvuga4rfiYm59AopFrscGPaiJkgrjtbu0-09_uF_A,8368
54
+ datacontract/imports/importer.py,sha256=FE9YsrcNunLiEpNGXd6PAHb_Zs8_dVUQPPBqOVAMBcE,812
55
+ datacontract/imports/importer_factory.py,sha256=0D8imN7WvQyvASkYHN8JTvNO9VLBllwHlyTeZNUD9Js,3155
54
56
  datacontract/imports/jsonschema_importer.py,sha256=cY70b2bFx9Kfsnq5vNj0iy_GvNOIcBH8xeFjrsxv4bQ,4925
55
- datacontract/imports/odcs_importer.py,sha256=0SQUON2lkTBFopdiUg-s3HVt1ES1oqaVYRP42hZh_7I,7453
56
- datacontract/imports/spark_importer.py,sha256=5_zMcQlHzIBGznxxL6d5wevivq1XwPI8MbKbftUrtYA,4220
57
- datacontract/imports/sql_importer.py,sha256=DS42wTGx4TNQeNAGbRs9o32PvK5JSKpWN2ylETx18ls,2869
58
- datacontract/imports/unity_importer.py,sha256=f2VZYYY9DjxzttPHK6mU0bv5Nv5QKWmxo1r3Lb-1png,5583
57
+ datacontract/imports/odcs_importer.py,sha256=uDBCzWi8mKCslw6Og5t-037dVVVZVsbJjK3EHJH_zQI,7474
58
+ datacontract/imports/spark_importer.py,sha256=fEOtUPVBRkhvaxsdk6fgRd_j-ka1TDwdrh6KkTLvv30,4942
59
+ datacontract/imports/sql_importer.py,sha256=CiOzSthTwyjCHxYQNVO7j4udchEFER8gtXrr_IyX2dU,2890
60
+ datacontract/imports/unity_importer.py,sha256=5qoU-PRq8ABXxiqVll--L2_6V0iAVQgkooWcvRuchLU,5604
59
61
  datacontract/init/download_datacontract_file.py,sha256=pj_4mhWKlEtfueWohDgkb1nyuG5ERDipUDszxKwpZUs,413
60
- datacontract/integration/publish_datamesh_manager.py,sha256=TuLRVdZs21UBet-qCqIywd_7WH5m6m-G83tw515G3hI,1489
61
- datacontract/integration/publish_opentelemetry.py,sha256=PjU9XH2wgisg0dBP34bUvQazxVZjp37Zfck_xEu5sPQ,3812
62
+ datacontract/integration/datamesh_manager.py,sha256=iNjDrcZfH8vyNbyQPbxkv7XMfG5rtbcLZXIlfMJ0c8c,2752
63
+ datacontract/integration/opentelemetry.py,sha256=0XsqOsc06qNtXC5KqZCxyeC_IGlIi63lYWHh3KKlOO0,3828
62
64
  datacontract/lint/files.py,sha256=tg0vq_w4LQsEr_8A5qr4hUJmHeGalUpsXJXC1t-OGC0,471
63
65
  datacontract/lint/lint.py,sha256=GhgBwfDgihXfWIu1WUfYeJoxfYacVJrwq3L2KO4Z2Io,5076
64
66
  datacontract/lint/resolve.py,sha256=KOGKrf5wZYgsa5-Uc0x5-C5GX1aVt6TSftIwD8zxy_Y,8815
@@ -73,10 +75,9 @@ datacontract/lint/linters/notice_period_linter.py,sha256=oHSOLU2uXd7BDvfEd_VjeXS
73
75
  datacontract/lint/linters/quality_schema_linter.py,sha256=-zaI49okETJYo-qR-J6rQqMRI9QX17UK9ASf1GxgYnk,2181
74
76
  datacontract/lint/linters/valid_constraints_linter.py,sha256=GeeE2hojMt24EoHuSHE1lURA2v2p5Dmxc7Qq8IvZQH0,4915
75
77
  datacontract/model/breaking_change.py,sha256=BIDEUo1U2CQLVT2-I5PyFttxAj6zQPI1UUkEoOOQXMY,2249
76
- datacontract/model/data_contract_specification.py,sha256=1tbaMfV9PVuLbf9wymWvdnJoclLiDNwhbyS8peRtluc,6016
78
+ datacontract/model/data_contract_specification.py,sha256=T_BEZojXRYC_1nc4wJf2Zlp0xZ9WWCqcC8VIGM6doS0,6073
77
79
  datacontract/model/exceptions.py,sha256=zW9NoyzwsND-c9UqgyTVuezUVGEc6KK1Uc2zl12loyo,1178
78
80
  datacontract/model/run.py,sha256=v7QNsPwYS1otQrpjeOaFsLRnxO8OgbdK_d-Z_e3iy0s,2610
79
- datacontract/publish/publish.py,sha256=rWnQWWneCjokCpwYv88EAtAlFnA-3hZgl-1k6dak_8w,1191
80
81
  datacontract/templates/datacontract.html,sha256=w9xGnjYFtFmmsU90_I1NHVLNcuS22flPw51oIAn56WY,13442
81
82
  datacontract/templates/index.html,sha256=dggq8ng61xZEeu6-6tbTQqjNcRFT0JTkn7y75087U58,12503
82
83
  datacontract/templates/partials/datacontract_information.html,sha256=7ZBxgEgi2XndKBypeOpe03oCSRPOujC6NVlN7zexGNM,6221
@@ -87,9 +88,9 @@ datacontract/templates/partials/example.html,sha256=F1dWbHDIXQScgfs4OVgqM1lR4uV4
87
88
  datacontract/templates/partials/model_field.html,sha256=kh_ZIqJuayyxN-zDNIUPIoXOZeehGxXQxiImYB6G5qY,6946
88
89
  datacontract/templates/partials/server.html,sha256=WkWFbz1ZvhIAUQQhH5Lkwb0HZRW907ehEnFmJSkpquQ,6235
89
90
  datacontract/templates/style/output.css,sha256=F3oEhUpuv8kA_dWr4pJymBS_Ju6huIIZdLMkJzPzMmU,25647
90
- datacontract_cli-0.10.11.dist-info/LICENSE,sha256=23h64qnSeIZ0DKeziWAKC-zBCt328iSbRbWBrXoYRb4,2210
91
- datacontract_cli-0.10.11.dist-info/METADATA,sha256=ERyMxpIUXotZn0ah0RYOeMQ7_5UQYuT54z9QoJlvAmc,84667
92
- datacontract_cli-0.10.11.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
93
- datacontract_cli-0.10.11.dist-info/entry_points.txt,sha256=D3Eqy4q_Z6bHauGd4ppIyQglwbrm1AJnLau4Ppbw9Is,54
94
- datacontract_cli-0.10.11.dist-info/top_level.txt,sha256=VIRjd8EIUrBYWjEXJJjtdUgc0UAJdPZjmLiOR8BRBYM,13
95
- datacontract_cli-0.10.11.dist-info/RECORD,,
91
+ datacontract_cli-0.10.12.dist-info/LICENSE,sha256=23h64qnSeIZ0DKeziWAKC-zBCt328iSbRbWBrXoYRb4,2210
92
+ datacontract_cli-0.10.12.dist-info/METADATA,sha256=Kg7oXLjfYrIU1uNuhJMh5VSlwOBYX9NukWQpznbcQow,88281
93
+ datacontract_cli-0.10.12.dist-info/WHEEL,sha256=cVxcB9AmuTcXqmwrtPhNK88dr7IR_b6qagTj0UvIEbY,91
94
+ datacontract_cli-0.10.12.dist-info/entry_points.txt,sha256=D3Eqy4q_Z6bHauGd4ppIyQglwbrm1AJnLau4Ppbw9Is,54
95
+ datacontract_cli-0.10.12.dist-info/top_level.txt,sha256=VIRjd8EIUrBYWjEXJJjtdUgc0UAJdPZjmLiOR8BRBYM,13
96
+ datacontract_cli-0.10.12.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (72.1.0)
2
+ Generator: setuptools (74.1.2)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,32 +0,0 @@
1
- import os
2
-
3
- import requests
4
-
5
- from datacontract.data_contract import DataContract
6
-
7
-
8
- def publish_to_datamesh_manager(data_contract: DataContract):
9
- try:
10
- headers = {"Content-Type": "application/json", "x-api-key": _require_datamesh_manager_api_key()}
11
- spec = data_contract.get_data_contract_specification()
12
- id = spec.id
13
- url = "https://api.datamesh-manager.com/api/datacontracts/{0}".format(id)
14
- request_body = spec.model_dump_json().encode("utf-8")
15
- response = requests.put(
16
- url=url,
17
- data=request_body,
18
- headers=headers,
19
- )
20
- if response.status_code != 200:
21
- print(f"Error publishing data contract to Data Mesh Manager: {response.text}")
22
- exit(1)
23
- print(f"Published data contract to {url}")
24
- except Exception as e:
25
- print(f"Failed publishing data contract. Error: {str(e)}")
26
-
27
-
28
- def _require_datamesh_manager_api_key():
29
- datamesh_manager_api_key = os.getenv("DATAMESH_MANAGER_API_KEY")
30
- if datamesh_manager_api_key is None:
31
- raise Exception("Cannot publish data contract, as DATAMESH_MANAGER_API_KEY is not set")
32
- return datamesh_manager_api_key