datacontract-cli 0.10.15__py3-none-any.whl → 0.10.18__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datacontract-cli might be problematic. Click here for more details.

Files changed (36) hide show
  1. datacontract/breaking/breaking.py +3 -3
  2. datacontract/breaking/breaking_rules.py +4 -0
  3. datacontract/cli.py +33 -9
  4. datacontract/data_contract.py +14 -10
  5. datacontract/engines/fastjsonschema/check_jsonschema.py +15 -4
  6. datacontract/engines/soda/check_soda_execute.py +13 -8
  7. datacontract/engines/soda/connections/databricks.py +12 -3
  8. datacontract/export/dbml_converter.py +2 -2
  9. datacontract/export/dbt_converter.py +75 -43
  10. datacontract/export/exporter.py +7 -2
  11. datacontract/export/exporter_factory.py +52 -14
  12. datacontract/export/iceberg_converter.py +188 -0
  13. datacontract/export/markdown_converter.py +208 -0
  14. datacontract/export/odcs_v3_exporter.py +49 -29
  15. datacontract/export/sodacl_converter.py +4 -3
  16. datacontract/export/sql_converter.py +1 -1
  17. datacontract/export/sql_type_converter.py +21 -0
  18. datacontract/export/sqlalchemy_converter.py +3 -1
  19. datacontract/imports/dbml_importer.py +1 -1
  20. datacontract/imports/dbt_importer.py +163 -17
  21. datacontract/imports/iceberg_importer.py +12 -1
  22. datacontract/imports/odcs_v2_importer.py +1 -1
  23. datacontract/imports/odcs_v3_importer.py +6 -1
  24. datacontract/imports/sql_importer.py +1 -1
  25. datacontract/integration/datamesh_manager.py +14 -3
  26. datacontract/lint/resolve.py +32 -15
  27. datacontract/model/data_contract_specification.py +14 -6
  28. datacontract/model/run.py +1 -0
  29. datacontract/templates/partials/model_field.html +1 -1
  30. {datacontract_cli-0.10.15.dist-info → datacontract_cli-0.10.18.dist-info}/METADATA +117 -75
  31. {datacontract_cli-0.10.15.dist-info → datacontract_cli-0.10.18.dist-info}/RECORD +35 -34
  32. {datacontract_cli-0.10.15.dist-info → datacontract_cli-0.10.18.dist-info}/WHEEL +1 -1
  33. datacontract/integration/opentelemetry.py +0 -103
  34. {datacontract_cli-0.10.15.dist-info → datacontract_cli-0.10.18.dist-info}/LICENSE +0 -0
  35. {datacontract_cli-0.10.15.dist-info → datacontract_cli-0.10.18.dist-info}/entry_points.txt +0 -0
  36. {datacontract_cli-0.10.15.dist-info → datacontract_cli-0.10.18.dist-info}/top_level.txt +0 -0
@@ -1,6 +1,6 @@
1
- Metadata-Version: 2.1
1
+ Metadata-Version: 2.2
2
2
  Name: datacontract-cli
3
- Version: 0.10.15
3
+ Version: 0.10.18
4
4
  Summary: The datacontract CLI is an open source command-line tool for working with Data Contracts. It uses data contract YAML files to lint the data contract, connect to data sources and execute schema and quality tests, detect breaking changes, and export to different formats. The tool is written in Python. It can be used as a standalone CLI tool, in a CI/CD pipeline, or directly as a Python library.
5
5
  Author-email: Jochen Christ <jochen.christ@innoq.com>, Stefan Negele <stefan.negele@innoq.com>, Simon Harrer <simon.harrer@innoq.com>
6
6
  Project-URL: Homepage, https://cli.datacontract.com
@@ -11,72 +11,72 @@ Classifier: Operating System :: OS Independent
11
11
  Requires-Python: >=3.10
12
12
  Description-Content-Type: text/markdown
13
13
  License-File: LICENSE
14
- Requires-Dist: typer<0.14,>=0.12
15
- Requires-Dist: pydantic<2.10.0,>=2.8.2
14
+ Requires-Dist: typer<0.16,>=0.15.1
15
+ Requires-Dist: pydantic<2.11.0,>=2.8.2
16
16
  Requires-Dist: pyyaml~=6.0.1
17
17
  Requires-Dist: requests<2.33,>=2.31
18
- Requires-Dist: fastapi==0.115.5
19
- Requires-Dist: uvicorn==0.32.1
20
- Requires-Dist: fastjsonschema<2.21.0,>=2.19.1
21
- Requires-Dist: fastparquet==2024.5.0
22
- Requires-Dist: python-multipart==0.0.12
18
+ Requires-Dist: fastapi==0.115.6
19
+ Requires-Dist: uvicorn==0.34.0
20
+ Requires-Dist: fastjsonschema<2.22.0,>=2.19.1
21
+ Requires-Dist: fastparquet==2024.11.0
22
+ Requires-Dist: numpy<2.0.0,>=1.26.4
23
+ Requires-Dist: python-multipart==0.0.20
23
24
  Requires-Dist: rich<13.10,>=13.7
24
25
  Requires-Dist: simple-ddl-parser==1.7.1
25
26
  Requires-Dist: duckdb==1.1.2
26
- Requires-Dist: soda-core-duckdb<3.5.0,>=3.3.1
27
+ Requires-Dist: soda-core-duckdb<3.4.0,>=3.3.20
27
28
  Requires-Dist: setuptools>=60
28
29
  Requires-Dist: python-dotenv~=1.0.0
29
30
  Requires-Dist: rdflib==7.0.0
30
- Requires-Dist: opentelemetry-exporter-otlp-proto-grpc~=1.16
31
- Requires-Dist: opentelemetry-exporter-otlp-proto-http~=1.16
32
- Requires-Dist: boto3<1.35.69,>=1.34.41
31
+ Requires-Dist: boto3<1.35.98,>=1.34.41
33
32
  Requires-Dist: jinja_partials>=0.2.1
34
33
  Provides-Extra: avro
35
34
  Requires-Dist: avro==1.12.0; extra == "avro"
36
35
  Provides-Extra: bigquery
37
- Requires-Dist: soda-core-bigquery<3.5.0,>=3.3.1; extra == "bigquery"
36
+ Requires-Dist: soda-core-bigquery<3.4.0,>=3.3.20; extra == "bigquery"
38
37
  Provides-Extra: databricks
39
- Requires-Dist: soda-core-spark-df<3.5.0,>=3.3.1; extra == "databricks"
40
- Requires-Dist: databricks-sql-connector<3.6.0,>=3.1.2; extra == "databricks"
41
- Requires-Dist: databricks-sdk<0.39.0,>=0.32.0; extra == "databricks"
42
- Requires-Dist: soda-core-spark[databricks]<3.5.0,>=3.3.1; extra == "databricks"
38
+ Requires-Dist: soda-core-spark-df<3.4.0,>=3.3.20; extra == "databricks"
39
+ Requires-Dist: soda-core-spark[databricks]<3.4.0,>=3.3.20; extra == "databricks"
40
+ Requires-Dist: databricks-sql-connector<3.8.0,>=3.7.0; extra == "databricks"
41
+ Requires-Dist: databricks-sdk<0.41.0; extra == "databricks"
43
42
  Provides-Extra: iceberg
44
- Requires-Dist: pyiceberg==0.7.1; extra == "iceberg"
43
+ Requires-Dist: pyiceberg==0.8.1; extra == "iceberg"
45
44
  Provides-Extra: kafka
46
45
  Requires-Dist: datacontract-cli[avro]; extra == "kafka"
47
- Requires-Dist: soda-core-spark-df<3.5.0,>=3.3.1; extra == "kafka"
46
+ Requires-Dist: soda-core-spark-df<3.4.0,>=3.3.20; extra == "kafka"
48
47
  Provides-Extra: postgres
49
- Requires-Dist: soda-core-postgres<3.5.0,>=3.3.1; extra == "postgres"
48
+ Requires-Dist: soda-core-postgres<3.4.0,>=3.3.20; extra == "postgres"
50
49
  Provides-Extra: s3
51
- Requires-Dist: s3fs==2024.10.0; extra == "s3"
50
+ Requires-Dist: s3fs==2024.12.0; extra == "s3"
51
+ Requires-Dist: aiobotocore<2.18.0,>=2.17.0; extra == "s3"
52
52
  Provides-Extra: snowflake
53
53
  Requires-Dist: snowflake-connector-python[pandas]<3.13,>=3.6; extra == "snowflake"
54
- Requires-Dist: soda-core-snowflake<3.5.0,>=3.3.1; extra == "snowflake"
54
+ Requires-Dist: soda-core-snowflake<3.4.0,>=3.3.20; extra == "snowflake"
55
55
  Provides-Extra: sqlserver
56
- Requires-Dist: soda-core-sqlserver<3.5.0,>=3.3.1; extra == "sqlserver"
56
+ Requires-Dist: soda-core-sqlserver<3.4.0,>=3.3.20; extra == "sqlserver"
57
57
  Provides-Extra: trino
58
- Requires-Dist: soda-core-trino<3.5.0,>=3.3.1; extra == "trino"
58
+ Requires-Dist: soda-core-trino<3.4.0,>=3.3.20; extra == "trino"
59
59
  Provides-Extra: dbt
60
60
  Requires-Dist: dbt-core>=1.8.0; extra == "dbt"
61
61
  Provides-Extra: dbml
62
62
  Requires-Dist: pydbml>=1.1.1; extra == "dbml"
63
63
  Provides-Extra: parquet
64
- Requires-Dist: pyarrow>=12.0.0; extra == "parquet"
64
+ Requires-Dist: pyarrow>=18.1.0; extra == "parquet"
65
65
  Provides-Extra: all
66
66
  Requires-Dist: datacontract-cli[bigquery,databricks,dbml,dbt,iceberg,kafka,parquet,postgres,s3,snowflake,sqlserver,trino]; extra == "all"
67
67
  Provides-Extra: dev
68
68
  Requires-Dist: datacontract-cli[all]; extra == "dev"
69
- Requires-Dist: httpx==0.27.2; extra == "dev"
69
+ Requires-Dist: httpx==0.28.1; extra == "dev"
70
70
  Requires-Dist: kafka-python; extra == "dev"
71
- Requires-Dist: moto==5.0.18; extra == "dev"
71
+ Requires-Dist: moto==5.0.26; extra == "dev"
72
72
  Requires-Dist: pandas>=2.1.0; extra == "dev"
73
73
  Requires-Dist: pre-commit<4.1.0,>=3.7.1; extra == "dev"
74
74
  Requires-Dist: pytest; extra == "dev"
75
75
  Requires-Dist: pytest-xdist; extra == "dev"
76
- Requires-Dist: pymssql==2.3.1; extra == "dev"
76
+ Requires-Dist: pymssql==2.3.2; extra == "dev"
77
77
  Requires-Dist: ruff; extra == "dev"
78
- Requires-Dist: testcontainers[kafka,minio,mssql,postgres]==4.8.2; extra == "dev"
79
- Requires-Dist: trino==0.330.0; extra == "dev"
78
+ Requires-Dist: testcontainers[kafka,minio,mssql,postgres]==4.9.0; extra == "dev"
79
+ Requires-Dist: trino==0.331.0; extra == "dev"
80
80
 
81
81
  # Data Contract CLI
82
82
 
@@ -105,7 +105,7 @@ This data contract contains all information to connect to S3 and check that the
105
105
 
106
106
  Let's use [pip](https://pip.pypa.io/en/stable/getting-started/) to install the CLI (or use the [Docker image](#docker)),
107
107
  ```bash
108
- $ python3 -m pip install datacontract-cli[all]
108
+ $ python3 -m pip install 'datacontract-cli[all]'
109
109
  ```
110
110
 
111
111
 
@@ -230,13 +230,13 @@ Choose the most appropriate installation method for your needs:
230
230
  Python 3.10, 3.11, and 3.12 are supported. We recommend to use Python 3.11.
231
231
 
232
232
  ```bash
233
- python3 -m pip install datacontract-cli[all]
233
+ python3 -m pip install 'datacontract-cli[all]'
234
234
  ```
235
235
 
236
236
  ### pipx
237
237
  pipx installs into an isolated environment.
238
238
  ```bash
239
- pipx install datacontract-cli[all]
239
+ pipx install 'datacontract-cli[all]'
240
240
  ```
241
241
 
242
242
  ### Docker
@@ -301,6 +301,7 @@ Commands
301
301
  - [diff](#diff)
302
302
  - [catalog](#catalog)
303
303
  - [publish](#publish)
304
+ - [serve](#serve)
304
305
 
305
306
  ### init
306
307
 
@@ -365,9 +366,6 @@ Commands
365
366
  │ [default: no-examples] │
366
367
  │ --publish TEXT The url to publish the results after the test │
367
368
  │ [default: None] │
368
- │ --publish-to-opentelemetry --no-publish-to-opentelemetry Publish the results to opentelemetry. Use environment │
369
- │ variables to configure the OTLP endpoint, headers, etc. │
370
- │ [default: no-publish-to-opentelemetry] │
371
369
  │ --logs --no-logs Print logs [default: no-logs] │
372
370
  │ --help Show this message and exit. │
373
371
  ╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
@@ -592,7 +590,6 @@ datacontract.yaml
592
590
  servers:
593
591
  production:
594
592
  type: databricks
595
- host: dbc-abcdefgh-1234.cloud.databricks.com
596
593
  catalog: acme_catalog_prod
597
594
  schema: orders_latest
598
595
  models:
@@ -603,10 +600,11 @@ models:
603
600
 
604
601
  #### Environment Variables
605
602
 
606
- | Environment Variable | Example | Description |
607
- |----------------------------------------------|--------------------------------------|-------------------------------------------------------|
608
- | `DATACONTRACT_DATABRICKS_TOKEN` | `dapia00000000000000000000000000000` | The personal access token to authenticate |
609
- | `DATACONTRACT_DATABRICKS_HTTP_PATH` | `/sql/1.0/warehouses/b053a3ffffffff` | The HTTP path to the SQL warehouse or compute cluster |
603
+ | Environment Variable | Example | Description |
604
+ |-------------------------------------------|--------------------------------------|-----------------------------------------------------------|
605
+ | `DATACONTRACT_DATABRICKS_TOKEN` | `dapia00000000000000000000000000000` | The personal access token to authenticate |
606
+ | `DATACONTRACT_DATABRICKS_HTTP_PATH` | `/sql/1.0/warehouses/b053a3ffffffff` | The HTTP path to the SQL warehouse or compute cluster |
607
+ | `DATACONTRACT_DATABRICKS_SERVER_HOSTNAME` | `dbc-abcdefgh-1234.cloud.databricks.com` | The host name of the SQL warehouse or compute cluster |
610
608
 
611
609
 
612
610
  ### Databricks (programmatic)
@@ -841,7 +839,7 @@ models:
841
839
  │ t-staging-sql|odcs|odcs_v2|odcs_v3|rdf|avro|protobuf │
842
840
  │ |great-expectations|terraform|avro-idl|sql|sql-query │
843
841
  │ |html|go|bigquery|dbml|spark|sqlalchemy|data-caterer │
844
- │ |dcs]
842
+ │ |dcs|markdown|iceberg]
845
843
  │ --output PATH Specify the file path where the exported data will be │
846
844
  │ saved. If no path is provided, the output will be │
847
845
  │ printed to stdout. │
@@ -902,6 +900,8 @@ Available export options:
902
900
  | `sqlalchemy` | Export to SQLAlchemy Models | ✅ |
903
901
  | `data-caterer` | Export to Data Caterer in YAML format | ✅ |
904
902
  | `dcs` | Export to Data Contract Specification in YAML format | ✅ |
903
+ | `markdown` | Export to Markdown | ✅ |
904
+ | `iceberg` | Export to an Iceberg JSON Schema Definition | partial |
905
905
  | Missing something? | Please create an issue on GitHub | TBD |
906
906
 
907
907
 
@@ -1025,6 +1025,63 @@ models:
1025
1025
  - **avroLogicalType**: Specifies the logical type of the field in Avro. In this example, it is `local-timestamp-micros`.
1026
1026
  - **avroDefault**: Specifies the default value for the field in Avro. In this example, it is 1672534861000000 which corresponds to ` 2023-01-01 01:01:01 UTC`.
1027
1027
 
1028
+ #### Iceberg
1029
+
1030
+ Exports to an [Iceberg Table Json Schema Definition](https://iceberg.apache.org/spec/#appendix-c-json-serialization).
1031
+
1032
+ This export only supports a single model export at a time because Iceberg's schema definition is for a single table and the exporter maps 1 model to 1 table, use the `--model` flag
1033
+ to limit your contract export to a single model.
1034
+
1035
+ ```bash
1036
+ $ datacontract export --format iceberg --model orders https://datacontract.com/examples/orders-latest/datacontract.yaml --output /tmp/orders_iceberg.json
1037
+
1038
+ $ cat /tmp/orders_iceberg.json | jq '.'
1039
+ {
1040
+ "type": "struct",
1041
+ "fields": [
1042
+ {
1043
+ "id": 1,
1044
+ "name": "order_id",
1045
+ "type": "string",
1046
+ "required": true
1047
+ },
1048
+ {
1049
+ "id": 2,
1050
+ "name": "order_timestamp",
1051
+ "type": "timestamptz",
1052
+ "required": true
1053
+ },
1054
+ {
1055
+ "id": 3,
1056
+ "name": "order_total",
1057
+ "type": "long",
1058
+ "required": true
1059
+ },
1060
+ {
1061
+ "id": 4,
1062
+ "name": "customer_id",
1063
+ "type": "string",
1064
+ "required": false
1065
+ },
1066
+ {
1067
+ "id": 5,
1068
+ "name": "customer_email_address",
1069
+ "type": "string",
1070
+ "required": true
1071
+ },
1072
+ {
1073
+ "id": 6,
1074
+ "name": "processed_timestamp",
1075
+ "type": "timestamptz",
1076
+ "required": true
1077
+ }
1078
+ ],
1079
+ "schema-id": 0,
1080
+ "identifier-field-ids": [
1081
+ 1
1082
+ ]
1083
+ }
1084
+ ```
1028
1085
 
1029
1086
  ### import
1030
1087
 
@@ -1331,13 +1388,28 @@ datacontract catalog --files "*.odcs.yaml"
1331
1388
  ╰────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
1332
1389
  ```
1333
1390
 
1391
+ ### Serve
1392
+
1393
+ ```
1394
+
1395
+ Usage: datacontract serve [OPTIONS]
1396
+
1397
+ Start the datacontract web server.
1398
+
1399
+ ╭─ Options ──────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮
1400
+ │ --port INTEGER Bind socket to this port. [default: 4242] │
1401
+ │ --host TEXT Bind socket to this host. [default: 127.0.0.1] │
1402
+ │ --help Show this message and exit. │
1403
+ ╰────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
1404
+
1405
+ ```
1406
+
1334
1407
  ## Integrations
1335
1408
 
1336
1409
  | Integration | Option | Description |
1337
1410
  |-----------------------|------------------------------|---------------------------------------------------------------------------------------------------------------|
1338
1411
  | Data Mesh Manager | `--publish` | Push full results to the [Data Mesh Manager API](https://api.datamesh-manager.com/swagger/index.html) |
1339
1412
  | Data Contract Manager | `--publish` | Push full results to the [Data Contract Manager API](https://api.datacontract-manager.com/swagger/index.html) |
1340
- | OpenTelemetry | `--publish-to-opentelemetry` | Push result as gauge metrics |
1341
1413
 
1342
1414
  ### Integration with Data Mesh Manager
1343
1415
 
@@ -1351,36 +1423,6 @@ $ datacontract test https://demo.datamesh-manager.com/demo279750347121/datacontr
1351
1423
  --publish https://api.datamesh-manager.com/api/test-results
1352
1424
  ```
1353
1425
 
1354
- ### Integration with OpenTelemetry
1355
-
1356
- If you use OpenTelemetry, you can use the data contract URL and append the `--publish-to-opentelemetry` option to send the test results to your OLTP-compatible instance, e.g., Prometheus.
1357
-
1358
- The metric name is "datacontract.cli.test.result" and it uses the following encoding for the result:
1359
-
1360
- | datacontract.cli.test.result | Description |
1361
- |------------------------------|---------------------------------------|
1362
- | 0 | test run passed, no warnings |
1363
- | 1 | test run has warnings |
1364
- | 2 | test run failed |
1365
- | 3 | test run not possible due to an error |
1366
- | 4 | test status unknown |
1367
-
1368
-
1369
- ```bash
1370
- # Fetch current data contract, execute tests on production, and publish result to open telemetry
1371
- $ EXPORT OTEL_SERVICE_NAME=datacontract-cli
1372
- $ EXPORT OTEL_EXPORTER_OTLP_ENDPOINT=https://YOUR_ID.apm.westeurope.azure.elastic-cloud.com:443
1373
- $ EXPORT OTEL_EXPORTER_OTLP_HEADERS=Authorization=Bearer%20secret # Optional, when using SaaS Products
1374
- $ EXPORT OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf # Optional, default is http/protobuf - use value grpc to use the gRPC protocol instead
1375
- # Send to OpenTelemetry
1376
- $ datacontract test https://demo.datamesh-manager.com/demo279750347121/datacontracts/4df9d6ee-e55d-4088-9598-b635b2fdcbbc/datacontract.yaml --server production --publish-to-opentelemetry
1377
- ```
1378
-
1379
- Current limitations:
1380
- - currently, only ConsoleExporter and OTLP Exporter
1381
- - Metrics only, no logs yet (but loosely planned)
1382
-
1383
-
1384
1426
  ## Best Practices
1385
1427
 
1386
1428
  We share best practices in using the Data Contract CLI.
@@ -1412,7 +1454,7 @@ Create a data contract based on the actual data. This is the fastest way to get
1412
1454
  $ datacontract lint
1413
1455
  ```
1414
1456
 
1415
- 5. Set up a CI pipeline that executes daily and reports the results to the [Data Mesh Manager](https://datamesh-manager.com). Or to some place else. You can even publish to any opentelemetry compatible system.
1457
+ 5. Set up a CI pipeline that executes daily for continuous quality checks. You can also report the test results to tools like [Data Mesh Manager](https://datamesh-manager.com)
1416
1458
  ```bash
1417
1459
  $ datacontract test --publish https://api.datamesh-manager.com/api/test-results
1418
1460
  ```
@@ -1,21 +1,21 @@
1
1
  datacontract/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
- datacontract/cli.py,sha256=F3MKhqNMW0pFh0B54ik4DeMECWJANAQ1mEm5a7yAtSc,16506
3
- datacontract/data_contract.py,sha256=rw9klvbIoaOwtBflpspqDeOXs3YY4qlQXsuOBROAJT0,14669
2
+ datacontract/cli.py,sha256=lmWynN3ew6Tf-AM3l-KtRTccTku1xn8jH9jZJ2on7FE,17456
3
+ datacontract/data_contract.py,sha256=ct9z4UiuHcR8sYDqA4sEXNul0lOcdAC645E742BzLz0,14752
4
4
  datacontract/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
5
  datacontract/web.py,sha256=MePB-XJvTZObMzrk3tIW8-bAvk-QcEEt767RVos3Zoc,2105
6
- datacontract/breaking/breaking.py,sha256=jCYKEH410p9E9MUi1UQJhtIQWjPEHqYy9dC8Z1viYnY,20398
7
- datacontract/breaking/breaking_rules.py,sha256=OPCBtUHd5erAvsdC8KChTHIAItJ5GZZqwT2KXmuIA1A,3914
6
+ datacontract/breaking/breaking.py,sha256=vUjPZzGsOF_ufTjdKa2T_gjQgPtZrZKytmcatkUr7ck,20428
7
+ datacontract/breaking/breaking_rules.py,sha256=OKwYWCMkIob2uX8zxav7LbIhx_6RC4msioBjuNlox1k,4060
8
8
  datacontract/catalog/catalog.py,sha256=wmv_2BBxHhNBlilAmQHHhNe4tK14DowkyIOVaQW2DWU,2691
9
9
  datacontract/engines/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
10
10
  datacontract/engines/datacontract/check_that_datacontract_contains_valid_servers_configuration.py,sha256=TXO47ON3NjwYI4Y2eBYklMOCo7vAtYzqLPAhZhii6dg,1565
11
11
  datacontract/engines/datacontract/check_that_datacontract_file_exists.py,sha256=Vw-7U0GmQT2127tybxggZfpRFiZVgoIh6ndkTGM0FP4,665
12
- datacontract/engines/fastjsonschema/check_jsonschema.py,sha256=PxDvH2T2bUbS-V6euENfwTIa_R-CMTZFxiRnyxvOTp4,9820
12
+ datacontract/engines/fastjsonschema/check_jsonschema.py,sha256=-knTZ-NsHpBWCoR7r1JP5iYSWx697mugijmqUPx0pEY,10307
13
13
  datacontract/engines/fastjsonschema/s3/s3_read_files.py,sha256=vuz_hLF2VD8LR_prjQpPLBU8Is-iHLAvqp4KwclOv9I,1157
14
14
  datacontract/engines/soda/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
15
- datacontract/engines/soda/check_soda_execute.py,sha256=nDmbI2luc8u68PyfBL7eZqI_TK4EwDnKnwrhpeSQQQo,7869
15
+ datacontract/engines/soda/check_soda_execute.py,sha256=YfLumuxX_zZklP2q7fjytMtwpxr-2OMBpb6rAmTL7AE,8204
16
16
  datacontract/engines/soda/connections/bigquery.py,sha256=C-8kxmzpYe88bJp80ObHFLMh4rpnIjnUQ7XOj0Ke7lk,903
17
17
  datacontract/engines/soda/connections/dask.py,sha256=Yy6Et2n_vDVsdjtqyBWDSZt7mnjPzPk_MZ-92VZHfnY,1496
18
- datacontract/engines/soda/connections/databricks.py,sha256=lpMju-o_TzLZeF0EEVwePPr8JahqvFnj5xRYjF15fc8,561
18
+ datacontract/engines/soda/connections/databricks.py,sha256=cMRasuO0MrSKVgHPB-9uFTGTZPFg6z9Kpk3tJ0SdR0s,943
19
19
  datacontract/engines/soda/connections/duckdb.py,sha256=c9Fq8uqkvuyc5YsnErlwFUALE4SvlO7cToMMpxzVadY,6272
20
20
  datacontract/engines/soda/connections/kafka.py,sha256=bSC-TmZPmQvOr1rMaDtankcfYni4hmfbQooFaSlC_uc,8452
21
21
  datacontract/engines/soda/connections/postgres.py,sha256=9GTF4Es3M5vb7ocSGqAxXmslvkS5CjsPQGIuo020CFc,626
@@ -28,49 +28,50 @@ datacontract/export/avro_idl_converter.py,sha256=SGO7JfI9UGXLYFR5wMGNUH1qf6kt9lF
28
28
  datacontract/export/bigquery_converter.py,sha256=VSBdVGWrlar18ETzgNArxDzk8Zt5JcAc_wKjfwLpG_A,4734
29
29
  datacontract/export/csv_type_converter.py,sha256=ZZuJwBgQnafZC7PPvAXsBf2IajPJq8TYZ1l8Qq0GYeI,1290
30
30
  datacontract/export/data_caterer_converter.py,sha256=MYUhoRjKSTpQFUZjc0CG6daPRpvm5j51wS0NUTSzQNU,5324
31
- datacontract/export/dbml_converter.py,sha256=Tqsjicvh-NyEnpfn2Lnrpbhn9SQBD8uGlZY6Sb0VBV8,4737
32
- datacontract/export/dbt_converter.py,sha256=0i6EPOdAnzQgV7MtQxHjxZlM4CdgJI3iygG9BvPuuy8,9389
31
+ datacontract/export/dbml_converter.py,sha256=f_OZEFwRUyL-Kg2yn_G58I8iz1VfFrZh8Nbw3Wq0JDo,4777
32
+ datacontract/export/dbt_converter.py,sha256=BPvcKyT-v1C_zcYh8ocn5qeNVUjTjYIcMVxnq8BY-Us,10342
33
33
  datacontract/export/dcs_exporter.py,sha256=RALQ7bLAjak7EsoFFL2GFX2Oju7pnCDPCdRN_wo9wHM,210
34
- datacontract/export/exporter.py,sha256=62ZO4Sn0V_QCxoShvjqZOBx2q0JLrsvoSfx37JhS2IQ,2979
35
- datacontract/export/exporter_factory.py,sha256=cdygE42dQeXik1rk0BZ8RbsjJwEJcMDFjgkRT9hn434,5569
34
+ datacontract/export/exporter.py,sha256=b9z5vjY5dsc0Nw-gG03RkNYyOuidMk-RGkS9GAE-elk,3051
35
+ datacontract/export/exporter_factory.py,sha256=sW6-43FiOFTzqgho2jvSzhd7_PGqWApfKOnfFFzjs90,6021
36
36
  datacontract/export/go_converter.py,sha256=Ttvbfu3YU-3GBwRD6nwCsFyZuc_hiIvJD-Jg2sT5WLw,3331
37
37
  datacontract/export/great_expectations_converter.py,sha256=zMaHaj5DLj_Q_q-iFEa7EZHW-qHdFMxWL4MiMIFKV80,10505
38
38
  datacontract/export/html_export.py,sha256=ojazWrb0AwSc7Vr72M_otMo-3PA8mfi8tfIy9BCXk9o,2578
39
+ datacontract/export/iceberg_converter.py,sha256=ArcQ_Y3z_W4_kGDU_8jPRx2-pHpP3Nhx1zYoETOL3c4,6804
39
40
  datacontract/export/jsonschema_converter.py,sha256=2MT82MurcQQbrVDRj1kFsxnmFd9scNSfYI1upQSecl4,5631
41
+ datacontract/export/markdown_converter.py,sha256=chtaZX4vXTee7JCMYmWiDQ9m55gwJjHPw6SEM3UOwpQ,6467
40
42
  datacontract/export/odcs_v2_exporter.py,sha256=0nMI-zTENNs94bllm_Qv3V-8-QyS8jnBW1Be9fEJCmU,4679
41
- datacontract/export/odcs_v3_exporter.py,sha256=C1Ax4BYeIu_uet_gYYws-sZanE_voGLiuKNHHRjQaV4,12093
43
+ datacontract/export/odcs_v3_exporter.py,sha256=52WggPBpUi9AwGrVjlScojSJ2DhhgMuFIxRQokIvQ_o,12542
42
44
  datacontract/export/pandas_type_converter.py,sha256=464pQ3JQKFQa1TO0HBNcEoZvQye_yUbY6jQtiBaphSc,1117
43
45
  datacontract/export/protobuf_converter.py,sha256=9K0fzBGbqlj9AhQumw2oq53hyn_QDCT3UlyH2uXJdC0,3192
44
46
  datacontract/export/pydantic_converter.py,sha256=1Lt9F8i6zyQYb44MyQtsXwCWWXYxZ47SmzArr_uPqsU,5579
45
47
  datacontract/export/rdf_converter.py,sha256=4gnKus37Geth4MJ3Ruc8AbnpD_Ll9OCx8oTIEKScvh8,6435
46
- datacontract/export/sodacl_converter.py,sha256=zLfQ2VVWYIcvpja3-amMeqUPtfaioX14-skhPteu3vc,10600
48
+ datacontract/export/sodacl_converter.py,sha256=JLTTFnmPmKPLC-_wEMLID5LSLH6YwUwz3YdgJBC_vec,10730
47
49
  datacontract/export/spark_converter.py,sha256=-6P2_VRFqGfSF7n_lJcD-fuY9Pv8qoH-ud6g8Zimpz4,7190
48
- datacontract/export/sql_converter.py,sha256=trQV5M76rZ4EpGj-0jkzr9PhGDmMC_RaxDFIAmx1BaQ,4801
49
- datacontract/export/sql_type_converter.py,sha256=MGTH1hXC90TLMEap1v_Fkahf4N6Ju8yf01pCT7wJrpg,11984
50
- datacontract/export/sqlalchemy_converter.py,sha256=Cx1_7IFaduDWcTYytboHuKli_ve8imf9LW0_5F471No,6429
50
+ datacontract/export/sql_converter.py,sha256=BGjmOAlzB5QfzJiXP61ajV0wj4M5oJrmNZZe_4Lo1Ik,4821
51
+ datacontract/export/sql_type_converter.py,sha256=6y64XAAIXkEpi-AkzikmdF1w0MIHQKPusZ_UWGdKYDk,12761
52
+ datacontract/export/sqlalchemy_converter.py,sha256=0DMncvA811lTtd5q4ZORREQ9YH1vQm1lJeqMWsFvloE,6463
51
53
  datacontract/export/terraform_converter.py,sha256=ExFoEvErVk-gBnWJiqC38SxDUmUEydpACWc917l5RyM,2163
52
54
  datacontract/imports/avro_importer.py,sha256=hpGvO6uv2zcupJC8-wC-c-vbjNb83IQ560a5F3MsEFA,9937
53
55
  datacontract/imports/bigquery_importer.py,sha256=7TcP9FDsIas5LwJZ-HrOPXZ-NuR056sxLfDDh3vjo8E,8419
54
- datacontract/imports/dbml_importer.py,sha256=PhEurAkqPerX1FR6zsks8RsMUpJJWWJojn2Msmcm60Y,3869
55
- datacontract/imports/dbt_importer.py,sha256=kzQsm_HrKybTr4SSyhZ5n9dHPbd9mg9ojdkRChywM0g,3237
56
+ datacontract/imports/dbml_importer.py,sha256=o0IOgvXN34lU1FICDHm_QUTv0DKsgwbHPHUDxQhIapE,3872
57
+ datacontract/imports/dbt_importer.py,sha256=hQwqD9vbvwLLc6Yj3tQbar5ldI0pV-ynSiz7CZZ0JCc,8290
56
58
  datacontract/imports/glue_importer.py,sha256=fiJPkvfwOCsaKKCGW19-JM5CCGXZ2mkNrVtUzp2iw6g,8370
57
- datacontract/imports/iceberg_importer.py,sha256=OFSUgjWn3LQ9QKSAJprIKXAh8CLdKAR6U7QC8KB6fcE,5396
59
+ datacontract/imports/iceberg_importer.py,sha256=vadGJVqQKgG-j8swUytZALFB8QjbGRqZPCcPcCy0vco,5923
58
60
  datacontract/imports/importer.py,sha256=oi_cybcOimEZ4Wc4EJW_2OQoAyErbadPo0foHdVeOmE,860
59
61
  datacontract/imports/importer_factory.py,sha256=QBW2tIuQ5Do56-Gtyh7fWLELcCeItYbK2FYq89K5oLw,3486
60
62
  datacontract/imports/jsonschema_importer.py,sha256=67H__XLugV4vguHrIqzW02dtx27zYTWnOms4D1ma3bk,4961
61
63
  datacontract/imports/odcs_importer.py,sha256=w1TumifVSDgSwsxWV2VmbdzHkb_3vNBKt6mEVE-P284,2042
62
- datacontract/imports/odcs_v2_importer.py,sha256=s6-WjEToeTvHxSCdkEsiPVGz7ZPahtVY90z7c56SI7A,7151
63
- datacontract/imports/odcs_v3_importer.py,sha256=TXutHthpl7DvMCS-HZ49Pvs1HKRLT_VWquXaXB8xIR8,12814
64
+ datacontract/imports/odcs_v2_importer.py,sha256=w_K9bcv0HHlhEbr7sT0NwgkuSNVkqWBr64UPqERA0Fs,7154
65
+ datacontract/imports/odcs_v3_importer.py,sha256=NB16EQjVuywFsI-Ti0AfMXWZj__fdHu0iKA1IPSBhmo,13019
64
66
  datacontract/imports/parquet_importer.py,sha256=W_0_16mX4stwDUt4GM2L7dnGmTpAySab5k13-OlTCCc,3095
65
67
  datacontract/imports/spark_importer.py,sha256=h2na1YtdJYu9Oz07tSvwx8L4RX6aLCCDVkAv-RTKyVA,5100
66
- datacontract/imports/sql_importer.py,sha256=0zZ7eHXg7xj843DZ14FBgUg2vzdJvP2he2ThSoaDtko,2890
68
+ datacontract/imports/sql_importer.py,sha256=a-zlpo7574fX4R6W6BzAe_9GBm2yjNk_B_GdW-vC6Yk,2893
67
69
  datacontract/imports/unity_importer.py,sha256=UcPYABhLZaWNl5IkCazwAuMoVDdujsu_QteuV_Q9hgI,6737
68
70
  datacontract/init/download_datacontract_file.py,sha256=pj_4mhWKlEtfueWohDgkb1nyuG5ERDipUDszxKwpZUs,413
69
- datacontract/integration/datamesh_manager.py,sha256=RWIrOKXtyeiDSTRthsLOXV1PxTEXFFasYp1F2ZCw77s,2749
70
- datacontract/integration/opentelemetry.py,sha256=oTIhP066qM2By9Evttq9LDUgFq3sr8In0ENdS1TIsz4,3827
71
+ datacontract/integration/datamesh_manager.py,sha256=40ZGF0Z1Dc83TMtHOvcJKIhZzxebyr-0vD1SH0zqQ14,3017
71
72
  datacontract/lint/files.py,sha256=tg0vq_w4LQsEr_8A5qr4hUJmHeGalUpsXJXC1t-OGC0,471
72
73
  datacontract/lint/lint.py,sha256=Ew0n3ooXxmCVnUxJ_cDoacsD82QdMZYnKrxnG9J0sWQ,5077
73
- datacontract/lint/resolve.py,sha256=fzHcz5UolXvleSWJfctjvl4wGNTIgOuw9nz8gCLTtGM,9607
74
+ datacontract/lint/resolve.py,sha256=WAKC1Rjf3XL3usHjFN7QnRQPxuWXIZ3ZTl7R179TDa8,9974
74
75
  datacontract/lint/resources.py,sha256=nfeZmORh1aP7EKpMKCmfbS04Te8pQ0nz64vJVkHOq3c,647
75
76
  datacontract/lint/schema.py,sha256=W7MJWWQk0qxtp5q4X3waDKUnGJVXVMBkbxXDG-A1-aw,1469
76
77
  datacontract/lint/urls.py,sha256=W7Edcd8Iw4NjokVtuSshEfzsXshQpuh_tpO73aM9dsM,2294
@@ -83,10 +84,10 @@ datacontract/lint/linters/notice_period_linter.py,sha256=6r413aEVOVHWJHb33-68ecV
83
84
  datacontract/lint/linters/quality_schema_linter.py,sha256=ZXFHlMLFV1GZejizbUdfW6-msffFECoDGNsdynaPnog,2182
84
85
  datacontract/lint/linters/valid_constraints_linter.py,sha256=qTFh1X3I9wOtAxuXlvbGesCQ3GQ6iWc-MT_ttIybRsw,4916
85
86
  datacontract/model/breaking_change.py,sha256=BIDEUo1U2CQLVT2-I5PyFttxAj6zQPI1UUkEoOOQXMY,2249
86
- datacontract/model/data_contract_specification.py,sha256=O5nraoORyUwEx-HZeBcVaBna3ZtfhFTV2Z_-U7vaE8g,7733
87
+ datacontract/model/data_contract_specification.py,sha256=G-XKCenheR4boPW884l97RXhNJ36NNkC0pU5BdOLJG4,8054
87
88
  datacontract/model/exceptions.py,sha256=zW9NoyzwsND-c9UqgyTVuezUVGEc6KK1Uc2zl12loyo,1178
88
89
  datacontract/model/odcs.py,sha256=9PXwm72FASjNwteF1Jn591iP3-St0aq16Cpsk0PkEW8,389
89
- datacontract/model/run.py,sha256=NMPCSwjnICOjEycgYDgBUUXoj8lWfFp2DVxotzOvWv8,2809
90
+ datacontract/model/run.py,sha256=zIosfMAKB3WVsWN3UcFQteHaQ04SlYZX45opaRXklNw,2827
90
91
  datacontract/templates/datacontract.html,sha256=w9xGnjYFtFmmsU90_I1NHVLNcuS22flPw51oIAn56WY,13442
91
92
  datacontract/templates/index.html,sha256=nyi9nrEsSPuOrXszKzqkqwZGveXsdy1PLJp2g9oDw0A,12517
92
93
  datacontract/templates/partials/datacontract_information.html,sha256=7ZBxgEgi2XndKBypeOpe03oCSRPOujC6NVlN7zexGNM,6221
@@ -94,12 +95,12 @@ datacontract/templates/partials/datacontract_servicelevels.html,sha256=ed3QgB11B
94
95
  datacontract/templates/partials/datacontract_terms.html,sha256=1cnJcOTpxwot2BCuZmkLF_SPfiVloLs3c8mj9WfE4sc,1865
95
96
  datacontract/templates/partials/definition.html,sha256=jEvjThHghHT2bo4oMAfe3H0lzc4ZuqDsJFOpN9w3Nho,7085
96
97
  datacontract/templates/partials/example.html,sha256=F1dWbHDIXQScgfs4OVgqM1lR4uV4xX5j6suasXHNM88,1204
97
- datacontract/templates/partials/model_field.html,sha256=kh_ZIqJuayyxN-zDNIUPIoXOZeehGxXQxiImYB6G5qY,6946
98
+ datacontract/templates/partials/model_field.html,sha256=DMUnqBCD64MM_Z9Unv6VCokUZ7sj3csgyC2ucYJSHmw,6966
98
99
  datacontract/templates/partials/server.html,sha256=WkWFbz1ZvhIAUQQhH5Lkwb0HZRW907ehEnFmJSkpquQ,6235
99
100
  datacontract/templates/style/output.css,sha256=F3oEhUpuv8kA_dWr4pJymBS_Ju6huIIZdLMkJzPzMmU,25647
100
- datacontract_cli-0.10.15.dist-info/LICENSE,sha256=23h64qnSeIZ0DKeziWAKC-zBCt328iSbRbWBrXoYRb4,2210
101
- datacontract_cli-0.10.15.dist-info/METADATA,sha256=Bj38D_3c0H1jVgDCweF0Poalj5iuFE5y3Xq7GJIEQ_I,95349
102
- datacontract_cli-0.10.15.dist-info/WHEEL,sha256=PZUExdf71Ui_so67QXpySuHtCi3-J3wvF4ORK6k_S8U,91
103
- datacontract_cli-0.10.15.dist-info/entry_points.txt,sha256=D3Eqy4q_Z6bHauGd4ppIyQglwbrm1AJnLau4Ppbw9Is,54
104
- datacontract_cli-0.10.15.dist-info/top_level.txt,sha256=VIRjd8EIUrBYWjEXJJjtdUgc0UAJdPZjmLiOR8BRBYM,13
105
- datacontract_cli-0.10.15.dist-info/RECORD,,
101
+ datacontract_cli-0.10.18.dist-info/LICENSE,sha256=23h64qnSeIZ0DKeziWAKC-zBCt328iSbRbWBrXoYRb4,2210
102
+ datacontract_cli-0.10.18.dist-info/METADATA,sha256=d7J1Ysi7ycsu5UMFwh2wBaKfeG6CDhzbMI7795IW7ps,95923
103
+ datacontract_cli-0.10.18.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
104
+ datacontract_cli-0.10.18.dist-info/entry_points.txt,sha256=D3Eqy4q_Z6bHauGd4ppIyQglwbrm1AJnLau4Ppbw9Is,54
105
+ datacontract_cli-0.10.18.dist-info/top_level.txt,sha256=VIRjd8EIUrBYWjEXJJjtdUgc0UAJdPZjmLiOR8BRBYM,13
106
+ datacontract_cli-0.10.18.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (75.6.0)
2
+ Generator: setuptools (75.8.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,103 +0,0 @@
1
- import logging
2
- import math
3
- import os
4
- from importlib import metadata
5
-
6
- from opentelemetry import metrics
7
- from opentelemetry.exporter.otlp.proto.grpc.metric_exporter import OTLPMetricExporter as OTLPgRPCMetricExporter
8
- from opentelemetry.exporter.otlp.proto.http.metric_exporter import OTLPMetricExporter
9
- from opentelemetry.metrics import Observation
10
- from opentelemetry.sdk.metrics import MeterProvider
11
- from opentelemetry.sdk.metrics.export import ConsoleMetricExporter, PeriodicExportingMetricReader
12
-
13
- from datacontract.model.run import Run
14
-
15
- # Publishes metrics of a test run.
16
- # Metric contains the values:
17
- # 0 == test run passed,
18
- # 1 == test run has warnings
19
- # 2 == test run failed
20
- # 3 == test run not possible due to an error
21
- # 4 == test status unknown
22
- #
23
- # Tested with these environment variables:
24
- #
25
- # OTEL_SERVICE_NAME=datacontract-cli
26
- # OTEL_EXPORTER_OTLP_ENDPOINT=https://YOUR_ID.apm.westeurope.azure.elastic-cloud.com:443
27
- # OTEL_EXPORTER_OTLP_HEADERS=Authorization=Bearer%20secret (Optional, when using SaaS Products)
28
- # OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf and OTEL_EXPORTER_OTLP_PROTOCOL=grpc
29
- #
30
- # Current limitations:
31
- # - no gRPC support
32
- # - currently, only ConsoleExporter and OTLP Exporter
33
- # - Metrics only, no logs yet (but loosely planned)
34
-
35
-
36
- def publish_test_results_to_opentelemetry(run: Run):
37
- try:
38
- if run.dataContractId is None:
39
- raise Exception("Cannot publish run results, as data contract ID is unknown")
40
-
41
- endpoint = os.getenv("OTEL_EXPORTER_OTLP_ENDPOINT")
42
- logging.info(f"Publishing test results to opentelemetry at {endpoint}")
43
-
44
- telemetry = Telemetry()
45
- provider = metrics.get_meter_provider()
46
- meter = provider.get_meter("com.datacontract.cli", metadata.version("datacontract-cli"))
47
- meter.create_observable_gauge(
48
- name="datacontract.cli.test",
49
- callbacks=[lambda x: _to_observation_callback(run)],
50
- unit="result",
51
- description="The overall result of the data contract test run",
52
- )
53
-
54
- telemetry.publish()
55
- except Exception as e:
56
- logging.error(f"Failed publishing test results. Error: {str(e)}")
57
-
58
-
59
- def _to_observation_callback(run):
60
- yield _to_observation(run)
61
-
62
-
63
- def _to_observation(run):
64
- attributes = {
65
- "datacontract.id": run.dataContractId,
66
- "datacontract.version": run.dataContractVersion,
67
- }
68
-
69
- if run.result == "passed":
70
- result_value = 0 # think of exit codes
71
- elif run.result == "warning":
72
- result_value = 1
73
- elif run.result == "failed":
74
- result_value = 2
75
- elif run.result == "error":
76
- result_value = 3
77
- else:
78
- result_value = 4
79
- return Observation(value=result_value, attributes=attributes)
80
-
81
-
82
- class Telemetry:
83
- def __init__(self):
84
- protocol = os.getenv("OTEL_EXPORTER_OTLP_PROTOCOL")
85
-
86
- # lower to allow grpc, GRPC and alike values.
87
- if protocol and protocol.lower() == "grpc":
88
- self.remote_exporter = OTLPgRPCMetricExporter()
89
- else:
90
- # Fallback to default OTEL http/protobuf which is used when the variable is not set.
91
- # This Exporter also works for http/json.
92
- self.remote_exporter = OTLPMetricExporter()
93
-
94
- self.console_exporter = ConsoleMetricExporter()
95
- # using math.inf so it does not collect periodically. we do this in collect ourselves, one-time.
96
- self.reader = PeriodicExportingMetricReader(self.console_exporter, export_interval_millis=math.inf)
97
- self.remote_reader = PeriodicExportingMetricReader(self.remote_exporter, export_interval_millis=math.inf)
98
- provider = MeterProvider(metric_readers=[self.reader, self.remote_reader])
99
- metrics.set_meter_provider(provider)
100
-
101
- def publish(self):
102
- self.reader.collect()
103
- self.remote_reader.collect()