datacontract-cli 0.10.33__py3-none-any.whl → 0.10.35__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datacontract-cli might be problematic. Click here for more details.

@@ -1,4 +1,6 @@
1
+ import atexit
1
2
  import logging
3
+ import tempfile
2
4
 
3
5
  from databricks.sdk import WorkspaceClient
4
6
  from pyspark.sql import DataFrame, SparkSession, types
@@ -54,7 +56,16 @@ def import_spark(
54
56
  Returns:
55
57
  DataContractSpecification: The updated contract spec with imported models.
56
58
  """
57
- spark = SparkSession.builder.getOrCreate()
59
+
60
+ tmp_dir = tempfile.TemporaryDirectory(prefix="datacontract-cli-spark")
61
+ atexit.register(tmp_dir.cleanup)
62
+
63
+ spark = (
64
+ SparkSession.builder.config("spark.sql.warehouse.dir", f"{tmp_dir}/spark-warehouse")
65
+ .config("spark.streaming.stopGracefullyOnShutdown", "true")
66
+ .config("spark.ui.enabled", "false")
67
+ .getOrCreate()
68
+ )
58
69
  data_contract_specification.servers["local"] = Server(type="dataframe")
59
70
 
60
71
  if dataframe is not None:
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datacontract-cli
3
- Version: 0.10.33
3
+ Version: 0.10.35
4
4
  Summary: The datacontract CLI is an open source command-line tool for working with Data Contracts. It uses data contract YAML files to lint the data contract, connect to data sources and execute schema and quality tests, detect breaking changes, and export to different formats. The tool is written in Python. It can be used as a standalone CLI tool, in a CI/CD pipeline, or directly as a Python library.
5
5
  Author-email: Jochen Christ <jochen.christ@innoq.com>, Stefan Negele <stefan.negele@innoq.com>, Simon Harrer <simon.harrer@innoq.com>
6
6
  License-Expression: MIT
@@ -20,7 +20,7 @@ Requires-Dist: fastparquet<2025.0.0,>=2024.5.0
20
20
  Requires-Dist: numpy<2.0.0,>=1.26.4
21
21
  Requires-Dist: python-multipart<1.0.0,>=0.0.20
22
22
  Requires-Dist: rich<15.0,>=13.7
23
- Requires-Dist: sqlglot<27.0.0,>=26.6.0
23
+ Requires-Dist: sqlglot<28.0.0,>=26.6.0
24
24
  Requires-Dist: duckdb<2.0.0,>=1.0.0
25
25
  Requires-Dist: soda-core-duckdb<3.6.0,>=3.3.20
26
26
  Requires-Dist: setuptools>=60
@@ -42,7 +42,7 @@ Provides-Extra: databricks
42
42
  Requires-Dist: soda-core-spark-df<3.6.0,>=3.3.20; extra == "databricks"
43
43
  Requires-Dist: soda-core-spark[databricks]<3.6.0,>=3.3.20; extra == "databricks"
44
44
  Requires-Dist: databricks-sql-connector<4.1.0,>=3.7.0; extra == "databricks"
45
- Requires-Dist: databricks-sdk<0.61.0; extra == "databricks"
45
+ Requires-Dist: databricks-sdk<0.64.0; extra == "databricks"
46
46
  Requires-Dist: pyspark<4.0.0,>=3.5.5; extra == "databricks"
47
47
  Provides-Extra: iceberg
48
48
  Requires-Dist: pyiceberg==0.9.1; extra == "iceberg"
@@ -54,12 +54,14 @@ Provides-Extra: postgres
54
54
  Requires-Dist: soda-core-postgres<3.6.0,>=3.3.20; extra == "postgres"
55
55
  Provides-Extra: s3
56
56
  Requires-Dist: s3fs<2026.0.0,>=2025.2.0; extra == "s3"
57
- Requires-Dist: aiobotocore<2.24.0,>=2.17.0; extra == "s3"
57
+ Requires-Dist: aiobotocore<2.25.0,>=2.17.0; extra == "s3"
58
58
  Provides-Extra: snowflake
59
- Requires-Dist: snowflake-connector-python[pandas]<3.16,>=3.6; extra == "snowflake"
59
+ Requires-Dist: snowflake-connector-python[pandas]<3.17,>=3.6; extra == "snowflake"
60
60
  Requires-Dist: soda-core-snowflake<3.6.0,>=3.3.20; extra == "snowflake"
61
61
  Provides-Extra: sqlserver
62
62
  Requires-Dist: soda-core-sqlserver<3.6.0,>=3.3.20; extra == "sqlserver"
63
+ Provides-Extra: athena
64
+ Requires-Dist: soda-core-athena<3.6.0,>=3.3.20; extra == "athena"
63
65
  Provides-Extra: trino
64
66
  Requires-Dist: soda-core-trino<3.6.0,>=3.3.20; extra == "trino"
65
67
  Provides-Extra: dbt
@@ -76,20 +78,20 @@ Requires-Dist: uvicorn==0.35.0; extra == "api"
76
78
  Provides-Extra: protobuf
77
79
  Requires-Dist: grpcio-tools>=1.53; extra == "protobuf"
78
80
  Provides-Extra: all
79
- Requires-Dist: datacontract-cli[api,bigquery,csv,databricks,dbml,dbt,excel,iceberg,kafka,parquet,postgres,protobuf,rdf,s3,snowflake,sqlserver,trino]; extra == "all"
81
+ Requires-Dist: datacontract-cli[api,athena,bigquery,csv,databricks,dbml,dbt,excel,iceberg,kafka,parquet,postgres,protobuf,rdf,s3,snowflake,sqlserver,trino]; extra == "all"
80
82
  Provides-Extra: dev
81
83
  Requires-Dist: datacontract-cli[all]; extra == "dev"
82
84
  Requires-Dist: httpx==0.28.1; extra == "dev"
83
85
  Requires-Dist: kafka-python; extra == "dev"
84
- Requires-Dist: moto==5.1.6; extra == "dev"
86
+ Requires-Dist: moto==5.1.10; extra == "dev"
85
87
  Requires-Dist: pandas>=2.1.0; extra == "dev"
86
- Requires-Dist: pre-commit<4.3.0,>=3.7.1; extra == "dev"
88
+ Requires-Dist: pre-commit<4.4.0,>=3.7.1; extra == "dev"
87
89
  Requires-Dist: pytest; extra == "dev"
88
90
  Requires-Dist: pytest-xdist; extra == "dev"
89
91
  Requires-Dist: pymssql==2.3.7; extra == "dev"
90
92
  Requires-Dist: ruff; extra == "dev"
91
- Requires-Dist: testcontainers[kafka,minio,mssql,postgres]==4.10.0; extra == "dev"
92
- Requires-Dist: trino==0.335.0; extra == "dev"
93
+ Requires-Dist: testcontainers[kafka,minio,mssql,postgres]==4.12.0; extra == "dev"
94
+ Requires-Dist: trino==0.336.0; extra == "dev"
93
95
  Dynamic: license-file
94
96
 
95
97
  # Data Contract CLI
@@ -255,6 +257,14 @@ if not run.has_passed():
255
257
 
256
258
  Choose the most appropriate installation method for your needs:
257
259
 
260
+ ### uv
261
+
262
+ If you have [uv](https://docs.astral.sh/uv/) installed, you can run datacontract-cli directly without installing:
263
+
264
+ ```
265
+ uv run --with 'datacontract-cli[all]' datacontract --version
266
+ ```
267
+
258
268
  ### pip
259
269
  Python 3.10, 3.11, and 3.12 are supported. We recommend to use Python 3.11.
260
270
 
@@ -316,6 +326,7 @@ A list of available extras:
316
326
 
317
327
  | Dependency | Installation Command |
318
328
  |-------------------------|--------------------------------------------|
329
+ | Amazon Athena | `pip install datacontract-cli[athena]` |
319
330
  | Avro Support | `pip install datacontract-cli[avro]` |
320
331
  | Google BigQuery | `pip install datacontract-cli[bigquery]` |
321
332
  | Databricks Integration | `pip install datacontract-cli[databricks]` |
@@ -460,6 +471,7 @@ Credentials are provided with environment variables.
460
471
  Supported server types:
461
472
 
462
473
  - [s3](#S3)
474
+ - [athena](#athena)
463
475
  - [bigquery](#bigquery)
464
476
  - [azure](#azure)
465
477
  - [sqlserver](#sqlserver)
@@ -470,6 +482,7 @@ Supported server types:
470
482
  - [kafka](#kafka)
471
483
  - [postgres](#postgres)
472
484
  - [trino](#trino)
485
+ - [api](#api)
473
486
  - [local](#local)
474
487
 
475
488
  Supported formats:
@@ -529,6 +542,41 @@ servers:
529
542
  | `DATACONTRACT_S3_SESSION_TOKEN` | `AQoDYXdzEJr...` | AWS temporary session token (optional) |
530
543
 
531
544
 
545
+ #### Athena
546
+
547
+ Data Contract CLI can test data in AWS Athena stored in S3.
548
+ Supports different file formats, such as Iceberg, Parquet, JSON, CSV...
549
+
550
+ ##### Example
551
+
552
+ datacontract.yaml
553
+ ```yaml
554
+ servers:
555
+ athena:
556
+ type: athena
557
+ catalog: awsdatacatalog # awsdatacatalog is the default setting
558
+ schema: icebergdemodb # in Athena, this is called "database"
559
+ regionName: eu-central-1
560
+ stagingDir: s3://my-bucket/athena-results/
561
+ models:
562
+ my_table: # corresponds to a table of view name
563
+ type: table
564
+ fields:
565
+ my_column_1: # corresponds to a column
566
+ type: string
567
+ config:
568
+ physicalType: varchar
569
+ ```
570
+
571
+ ##### Environment Variables
572
+
573
+ | Environment Variable | Example | Description |
574
+ |-------------------------------------|---------------------------------|----------------------------------------|
575
+ | `DATACONTRACT_S3_REGION` | `eu-central-1` | Region of Athena service |
576
+ | `DATACONTRACT_S3_ACCESS_KEY_ID` | `AKIAXV5Q5QABCDEFGH` | AWS Access Key ID |
577
+ | `DATACONTRACT_S3_SECRET_ACCESS_KEY` | `93S7LRrJcqLaaaa/XXXXXXXXXXXXX` | AWS Secret Access Key |
578
+ | `DATACONTRACT_S3_SESSION_TOKEN` | `AQoDYXdzEJr...` | AWS temporary session token (optional) |
579
+
532
580
 
533
581
  #### Google Cloud Storage (GCS)
534
582
 
@@ -896,6 +944,38 @@ models:
896
944
  | `DATACONTRACT_TRINO_PASSWORD` | `mysecretpassword` | Password |
897
945
 
898
946
 
947
+ #### API
948
+
949
+ Data Contract CLI can test APIs that return data in JSON format.
950
+ Currently, only GET requests are supported.
951
+
952
+ ##### Example
953
+
954
+ datacontract.yaml
955
+ ```yaml
956
+ servers:
957
+ api:
958
+ type: "api"
959
+ location: "https://api.example.com/path"
960
+ delimiter: none # new_line, array, or none (default)
961
+
962
+ models:
963
+ my_object: # corresponds to the root element of the JSON response
964
+ type: object
965
+ fields:
966
+ field1:
967
+ type: string
968
+ fields2:
969
+ type: number
970
+ ```
971
+
972
+ ##### Environment Variables
973
+
974
+ | Environment Variable | Example | Description |
975
+ |-----------------------------------------|------------------|---------------------------------------------------|
976
+ | `DATACONTRACT_API_HEADER_AUTHORIZATION` | `Bearer <token>` | The value for the `authorization` header. Optional. |
977
+
978
+
899
979
  #### Local
900
980
 
901
981
  Data Contract CLI can test local files in parquet, json, csv, or delta format.
@@ -939,7 +1019,7 @@ models:
939
1019
  │ terraform|avro-idl|sql|sql-query|mer │
940
1020
  │ maid|html|go|bigquery|dbml|spark|sql │
941
1021
  │ alchemy|data-caterer|dcs|markdown|ic │
942
- │ eberg|custom|excel] │
1022
+ │ eberg|custom|excel|dqx] │
943
1023
  │ --output PATH Specify the file path where the │
944
1024
  │ exported data will be saved. If no │
945
1025
  │ path is provided, the output will be │
@@ -959,8 +1039,10 @@ models:
959
1039
  │ --engine TEXT [engine] The engine used for great │
960
1040
  │ expection run. │
961
1041
  │ [default: None] │
962
- │ --template PATH [custom] The file path of Jinja
963
- template.
1042
+ │ --template PATH The file path or URL of a template.
1043
+ For Excel format: path/URL to custom
1044
+ │ Excel template. For custom format: │
1045
+ │ path to Jinja template. │
964
1046
  │ [default: None] │
965
1047
  │ --help Show this message and exit. │
966
1048
  ╰──────────────────────────────────────────────────────────────────────────────────────────────────╯
@@ -983,35 +1065,36 @@ datacontract export --format html --output datacontract.html
983
1065
 
984
1066
  Available export options:
985
1067
 
986
- | Type | Description | Status |
987
- |----------------------|---------------------------------------------------------|--------|
988
- | `html` | Export to HTML | ✅ |
989
- | `jsonschema` | Export to JSON Schema | ✅ |
990
- | `odcs` | Export to Open Data Contract Standard (ODCS) V3 | ✅ |
991
- | `sodacl` | Export to SodaCL quality checks in YAML format | ✅ |
992
- | `dbt` | Export to dbt models in YAML format | ✅ |
993
- | `dbt-sources` | Export to dbt sources in YAML format | ✅ |
994
- | `dbt-staging-sql` | Export to dbt staging SQL models | ✅ |
995
- | `rdf` | Export data contract to RDF representation in N3 format | ✅ |
996
- | `avro` | Export to AVRO models | ✅ |
997
- | `protobuf` | Export to Protobuf | ✅ |
998
- | `terraform` | Export to terraform resources | ✅ |
999
- | `sql` | Export to SQL DDL | ✅ |
1000
- | `sql-query` | Export to SQL Query | ✅ |
1001
- | `great-expectations` | Export to Great Expectations Suites in JSON Format | ✅ |
1002
- | `bigquery` | Export to BigQuery Schemas | ✅ |
1003
- | `go` | Export to Go types | ✅ |
1004
- | `pydantic-model` | Export to pydantic models | ✅ |
1005
- | `DBML` | Export to a DBML Diagram description | ✅ |
1006
- | `spark` | Export to a Spark StructType | ✅ |
1007
- | `sqlalchemy` | Export to SQLAlchemy Models | ✅ |
1008
- | `data-caterer` | Export to Data Caterer in YAML format | ✅ |
1009
- | `dcs` | Export to Data Contract Specification in YAML format | ✅ |
1010
- | `markdown` | Export to Markdown | ✅ |
1068
+ | Type | Description | Status |
1069
+ |----------------------|---------------------------------------------------------|---------|
1070
+ | `html` | Export to HTML | ✅ |
1071
+ | `jsonschema` | Export to JSON Schema | ✅ |
1072
+ | `odcs` | Export to Open Data Contract Standard (ODCS) V3 | ✅ |
1073
+ | `sodacl` | Export to SodaCL quality checks in YAML format | ✅ |
1074
+ | `dbt` | Export to dbt models in YAML format | ✅ |
1075
+ | `dbt-sources` | Export to dbt sources in YAML format | ✅ |
1076
+ | `dbt-staging-sql` | Export to dbt staging SQL models | ✅ |
1077
+ | `rdf` | Export data contract to RDF representation in N3 format | ✅ |
1078
+ | `avro` | Export to AVRO models | ✅ |
1079
+ | `protobuf` | Export to Protobuf | ✅ |
1080
+ | `terraform` | Export to terraform resources | ✅ |
1081
+ | `sql` | Export to SQL DDL | ✅ |
1082
+ | `sql-query` | Export to SQL Query | ✅ |
1083
+ | `great-expectations` | Export to Great Expectations Suites in JSON Format | ✅ |
1084
+ | `bigquery` | Export to BigQuery Schemas | ✅ |
1085
+ | `go` | Export to Go types | ✅ |
1086
+ | `pydantic-model` | Export to pydantic models | ✅ |
1087
+ | `DBML` | Export to a DBML Diagram description | ✅ |
1088
+ | `spark` | Export to a Spark StructType | ✅ |
1089
+ | `sqlalchemy` | Export to SQLAlchemy Models | ✅ |
1090
+ | `data-caterer` | Export to Data Caterer in YAML format | ✅ |
1091
+ | `dcs` | Export to Data Contract Specification in YAML format | ✅ |
1092
+ | `markdown` | Export to Markdown | ✅ |
1011
1093
  | `iceberg` | Export to an Iceberg JSON Schema Definition | partial |
1012
- | `excel` | Export to ODCS Excel Template | ✅ |
1013
- | `custom` | Export to Custom format with Jinja | ✅ |
1014
- | Missing something? | Please create an issue on GitHub | TBD |
1094
+ | `excel` | Export to ODCS Excel Template | ✅ |
1095
+ | `custom` | Export to Custom format with Jinja | ✅ |
1096
+ | `dqx` | Export to DQX in YAML format | |
1097
+ | Missing something? | Please create an issue on GitHub | TBD |
1015
1098
 
1016
1099
  #### SQL
1017
1100
 
@@ -2102,6 +2185,7 @@ We are happy to receive your contributions. Propose your change in an issue or d
2102
2185
 
2103
2186
  ## Companies using this tool
2104
2187
 
2188
+ - [Entropy Data](https://www.entropy-data.com)
2105
2189
  - [INNOQ](https://innoq.com)
2106
2190
  - [Data Catering](https://data.catering/)
2107
2191
  - [Oliver Wyman](https://www.oliverwyman.com/)
@@ -2120,7 +2204,7 @@ We are happy to receive your contributions. Propose your change in an issue or d
2120
2204
 
2121
2205
  ## Credits
2122
2206
 
2123
- Created by [Stefan Negele](https://www.linkedin.com/in/stefan-negele-573153112/) and [Jochen Christ](https://www.linkedin.com/in/jochenchrist/).
2207
+ Created by [Stefan Negele](https://www.linkedin.com/in/stefan-negele-573153112/), [Jochen Christ](https://www.linkedin.com/in/jochenchrist/), and [Simon Harrer]().
2124
2208
 
2125
2209
 
2126
2210
 
@@ -1,6 +1,6 @@
1
1
  datacontract/__init__.py,sha256=ThDdxDJsd7qNErLoh628nK5M7RzhJNYCmN-C6BAJFoo,405
2
- datacontract/api.py,sha256=Ze6pVD3Ub0oyMJI3iYSNXH78K2nPKbXKKHA-0DerJ48,8175
3
- datacontract/cli.py,sha256=MxtTI15tnkPieSbHdqtU-wCiwj1oCiEnlMHFGzB4OUg,19364
2
+ datacontract/api.py,sha256=kU-xnHgYUORK8W34IMPtSMNZ482MDnxBzY8AjujHEf4,8578
3
+ datacontract/cli.py,sha256=jlFJionOL6Q78eyIKoO8RuZt2wx3D1I4-yOZEmA43iI,19401
4
4
  datacontract/data_contract.py,sha256=Jlgkbzj6UN8RtFDK5VFcqm7v8oitVs-q10msU8W3Uo8,15183
5
5
  datacontract/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
6
  datacontract/breaking/breaking.py,sha256=DnqgxUjD-EAZcg5RBizOP9a2WxsFTaQBik0AB_m3K00,20431
@@ -8,24 +8,25 @@ datacontract/breaking/breaking_change.py,sha256=BIDEUo1U2CQLVT2-I5PyFttxAj6zQPI1
8
8
  datacontract/breaking/breaking_rules.py,sha256=M9IdzVJSA7oOr1fvLQl0y9MoBKeItPz42Db2U2cjH2Y,4063
9
9
  datacontract/catalog/catalog.py,sha256=HyFmUPsN3pWJ2WTdbs0DmOf0qmwAzc2-ijWse9_dhBc,2729
10
10
  datacontract/engines/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
11
- datacontract/engines/data_contract_checks.py,sha256=2WSxqslt7hxr0CzCir-8I3RihCbFAAa8LmF3fxORMQk,26728
12
- datacontract/engines/data_contract_test.py,sha256=NAnO_744H30tkQh43PSUSsAs3TC9DaNIWZe9jFKzA8Q,3242
11
+ datacontract/engines/data_contract_checks.py,sha256=NbYz7p9ljnia2XiF6PeWR4UNiZVpCAj2ufKgpir-Ve4,28234
12
+ datacontract/engines/data_contract_test.py,sha256=8qg0SkwtTmayfzNL2U_0xgx5Hi_DUePaMt2q_JiCqX8,4543
13
13
  datacontract/engines/datacontract/check_that_datacontract_contains_valid_servers_configuration.py,sha256=zrDn-_EJJ5kv0kZWAA-toeEPuBd3YQ0-U7Jb8euNUS8,1558
14
14
  datacontract/engines/datacontract/check_that_datacontract_file_exists.py,sha256=Vw-7U0GmQT2127tybxggZfpRFiZVgoIh6ndkTGM0FP4,665
15
- datacontract/engines/fastjsonschema/check_jsonschema.py,sha256=-knTZ-NsHpBWCoR7r1JP5iYSWx697mugijmqUPx0pEY,10307
15
+ datacontract/engines/fastjsonschema/check_jsonschema.py,sha256=EKPkFM8iGyiWsHw8peErhQvbt9gu_zJ2S-NDQnupfeM,10921
16
16
  datacontract/engines/fastjsonschema/s3/s3_read_files.py,sha256=0sTDWvuu0AzSgn7fKWJxGaTmPww00TFYyDK-X0s5T3c,1193
17
17
  datacontract/engines/soda/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
18
- datacontract/engines/soda/check_soda_execute.py,sha256=SYJdPpkozOA62yTM7s6cfwLfgfxgGJptO5U-cShrtPk,8600
18
+ datacontract/engines/soda/check_soda_execute.py,sha256=qc56ZNKyHIoSFgoXzBRioOhnhgsFSJ6L-nyBU7d0fW8,8902
19
+ datacontract/engines/soda/connections/athena.py,sha256=wjrJA9CHhl6FbSW0HulWcYlkT2_nY1s19Y2MFe4lbCU,3028
19
20
  datacontract/engines/soda/connections/bigquery.py,sha256=C-8kxmzpYe88bJp80ObHFLMh4rpnIjnUQ7XOj0Ke7lk,903
20
21
  datacontract/engines/soda/connections/databricks.py,sha256=cMRasuO0MrSKVgHPB-9uFTGTZPFg6z9Kpk3tJ0SdR0s,943
21
- datacontract/engines/soda/connections/duckdb_connection.py,sha256=UY6BElfLUs_LPZ4EA5SNZKhaq6EsPe9Mb79zhoSv4sg,9110
22
+ datacontract/engines/soda/connections/duckdb_connection.py,sha256=wGiB6EKr-OZosEFvT2gkutFgAzAxFMKicfpjbIJUZwQ,9332
22
23
  datacontract/engines/soda/connections/kafka.py,sha256=lnj_-3-CnJ6stetGqm6HOzN1Qatlw7xoCQU2zKBIXxU,8725
23
24
  datacontract/engines/soda/connections/postgres.py,sha256=9GTF4Es3M5vb7ocSGqAxXmslvkS5CjsPQGIuo020CFc,626
24
25
  datacontract/engines/soda/connections/snowflake.py,sha256=rfG2ysuqNM6TkvyqQKcGHFsTGJ6AROmud5VleUDRrb0,749
25
26
  datacontract/engines/soda/connections/sqlserver.py,sha256=RzGLbCUdRyfmDcqtM_AB9WZ-Xk-XYX91nkXpVNpYbvc,1440
26
27
  datacontract/engines/soda/connections/trino.py,sha256=JvKUP9aFg_n095oWE0-bGmfbETSWEOURGEZdQuG8txA,718
27
28
  datacontract/export/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
28
- datacontract/export/avro_converter.py,sha256=0_MOAwJYOMxQpmgYKSQ5TJjOTeuqVWfm4kTxmaSVH3A,4882
29
+ datacontract/export/avro_converter.py,sha256=MnfeW2x-Eko9dK6_fpdQYWtEzLkFWXfKABAUSJqiDpo,5381
29
30
  datacontract/export/avro_idl_converter.py,sha256=SGO7JfI9UGXLYFR5wMGNUH1qf6kt9lF6dUU9roVqnFo,9878
30
31
  datacontract/export/bigquery_converter.py,sha256=9mm-XP3klu1C5J87L9EL5ZyMCJhLBwsixo3aAw9QmRI,4738
31
32
  datacontract/export/custom_converter.py,sha256=xb8KbkRRgHmT4ewwC7XxtnKpe_ZMSJWBjYOaKjmO_KQ,1216
@@ -33,29 +34,30 @@ datacontract/export/data_caterer_converter.py,sha256=eSEuy3TbqUIG_lHYEBOydAgp_CJ
33
34
  datacontract/export/dbml_converter.py,sha256=f_OZEFwRUyL-Kg2yn_G58I8iz1VfFrZh8Nbw3Wq0JDo,4777
34
35
  datacontract/export/dbt_converter.py,sha256=U2x7rtEnq1s3pHhM0L2B6D6OQtKdCdm4PBSqNCHczHk,10577
35
36
  datacontract/export/dcs_exporter.py,sha256=RALQ7bLAjak7EsoFFL2GFX2Oju7pnCDPCdRN_wo9wHM,210
37
+ datacontract/export/dqx_converter.py,sha256=BWMK_U-_c0XHb_OI0p3q31RAAbOj9X4erD_oUtMz0Hg,4013
36
38
  datacontract/export/duckdb_type_converter.py,sha256=hUAAbImhJUMJOXEG-UoOKQqYGrJM6UILpn2YjUuAUOw,2216
37
39
  datacontract/export/excel_exporter.py,sha256=ySZL93oaENIjaLyctwoXOiT3yWf311YG3vYtLttjImI,38274
38
- datacontract/export/exporter.py,sha256=Xo4RyPq9W42hH3xfAX2v8FeQdMFoW0eVzgahY6JjlWI,3069
39
- datacontract/export/exporter_factory.py,sha256=JRrfcQ9CXiZCw56nFNu9uPSLjlDJLfUC7xPdVTyk6K8,6164
40
+ datacontract/export/exporter.py,sha256=DfvMHDWmdqhJswLkQ5oMNojgYDblXDuRgFJRHuFSawM,3085
41
+ datacontract/export/exporter_factory.py,sha256=UvP3_U7xj-GEjaifi__Jri6eYKx9SFXtmSrnkSbWuP0,6318
40
42
  datacontract/export/go_converter.py,sha256=Ttvbfu3YU-3GBwRD6nwCsFyZuc_hiIvJD-Jg2sT5WLw,3331
41
43
  datacontract/export/great_expectations_converter.py,sha256=Wx0mESRy4xAf8y7HjESsGsQaaei8k9xOVu3RbC6BlQM,12257
42
44
  datacontract/export/html_exporter.py,sha256=EyTMj25_Df3irZiYw1hxVZeLYWp6YSG6z3IuFUviP14,3066
43
45
  datacontract/export/iceberg_converter.py,sha256=ArcQ_Y3z_W4_kGDU_8jPRx2-pHpP3Nhx1zYoETOL3c4,6804
44
46
  datacontract/export/jsonschema_converter.py,sha256=2MT82MurcQQbrVDRj1kFsxnmFd9scNSfYI1upQSecl4,5631
45
- datacontract/export/markdown_converter.py,sha256=ioTFe3mHKLYwpo7wasOr-4TBlHIjowDHSonDgXV4uOQ,6468
46
- datacontract/export/mermaid_exporter.py,sha256=wBFqRKgy3Y5FZnzD0RCR1UAlT9ZqYW7dkXWSJa6eHiQ,3465
47
+ datacontract/export/markdown_converter.py,sha256=sV74JBGEfvhteNYPwBR-78ucq0Avp9oaPdFpu3Ckd0w,9935
48
+ datacontract/export/mermaid_exporter.py,sha256=Hg2yc5DYDTEZ7etoIhB1LU6rob_sGlouDtkPxUtf6kQ,4008
47
49
  datacontract/export/odcs_v3_exporter.py,sha256=b__AiPAnCUuFQE5DPHsvXBrMeEl1t_mJ1vzTx84TMlI,13931
48
50
  datacontract/export/pandas_type_converter.py,sha256=464pQ3JQKFQa1TO0HBNcEoZvQye_yUbY6jQtiBaphSc,1117
49
51
  datacontract/export/protobuf_converter.py,sha256=DHLl8BW26xqltBsd7Qhz0RhTl9YZQKCbkmjNpECgubg,7928
50
52
  datacontract/export/pydantic_converter.py,sha256=1Lt9F8i6zyQYb44MyQtsXwCWWXYxZ47SmzArr_uPqsU,5579
51
53
  datacontract/export/rdf_converter.py,sha256=1aTe_fwBRBnYUrJZzhEQ8eVnl0mQ1hcF45aKVdgvSIc,6435
52
54
  datacontract/export/sodacl_converter.py,sha256=75vQ2TnoLfjiDtWT2x8opumvotXVRs1YaIu1NLYz05M,1473
53
- datacontract/export/spark_converter.py,sha256=c4C_QZFRQLMW6Cz35WYdHqYGKyHSF7bBVmJua2YF0Cs,7290
55
+ datacontract/export/spark_converter.py,sha256=aol9ygEq29mjrZNiaK3Vdm8kEZhCgFFphuFiFDX-pOE,7953
54
56
  datacontract/export/sql_converter.py,sha256=vyLbDqzt_J3LRXpPv2W2HqUIyAtQx_S-jviBiSxh14A,5087
55
- datacontract/export/sql_type_converter.py,sha256=4r8S7nb4z2ODgXTwHxdtU158HWE8uxXHydiZ_y4-N_E,13592
57
+ datacontract/export/sql_type_converter.py,sha256=6-FKC4GTTSftXntIesiptQ51WVtS-mYgJpKrzhVDi1M,13694
56
58
  datacontract/export/sqlalchemy_converter.py,sha256=0DMncvA811lTtd5q4ZORREQ9YH1vQm1lJeqMWsFvloE,6463
57
59
  datacontract/export/terraform_converter.py,sha256=ExFoEvErVk-gBnWJiqC38SxDUmUEydpACWc917l5RyM,2163
58
- datacontract/imports/avro_importer.py,sha256=ryu4iUCSPJEV1uaE3AKdxD7fUxmRJ-ta936xurbgtHc,10922
60
+ datacontract/imports/avro_importer.py,sha256=isfAnMq9bk-Yo5zSyTnqMegu7JIujn_sTGSTOYAc8-0,11847
59
61
  datacontract/imports/bigquery_importer.py,sha256=7TcP9FDsIas5LwJZ-HrOPXZ-NuR056sxLfDDh3vjo8E,8419
60
62
  datacontract/imports/csv_importer.py,sha256=mBsmyTvfB8q64Z3NYqv4zTDUOvoXG896hZvp3oLt5YM,5330
61
63
  datacontract/imports/dbml_importer.py,sha256=o0IOgvXN34lU1FICDHm_QUTv0DKsgwbHPHUDxQhIapE,3872
@@ -68,10 +70,10 @@ datacontract/imports/importer_factory.py,sha256=RS7uwkkT7rIKGeMKgPmZhE3GVC9IfZxZ
68
70
  datacontract/imports/json_importer.py,sha256=JeGbqAC_wAO0u8HeMA5H-KJBfs6gpp1oGIpxt6nxSZI,12641
69
71
  datacontract/imports/jsonschema_importer.py,sha256=67H__XLugV4vguHrIqzW02dtx27zYTWnOms4D1ma3bk,4961
70
72
  datacontract/imports/odcs_importer.py,sha256=ZP2u3kJsgULANTbbqkP3joOlU9cUneZOPy6Ak3oTMgs,2140
71
- datacontract/imports/odcs_v3_importer.py,sha256=smtFSoywNojUCdB8sDu0e_3yHN4CmUw5YS3qF-AU0qY,16873
73
+ datacontract/imports/odcs_v3_importer.py,sha256=kuPqTBsUid3n9x0oNexFhTPIqFxH3bzQBWFTc617Cfc,18115
72
74
  datacontract/imports/parquet_importer.py,sha256=W_0_16mX4stwDUt4GM2L7dnGmTpAySab5k13-OlTCCc,3095
73
75
  datacontract/imports/protobuf_importer.py,sha256=rlUIskv9PNi5rFQ4Hobt9zlnKpahGsb4dy5G5UJoVAw,10840
74
- datacontract/imports/spark_importer.py,sha256=vqGM3kisQGSiHS1AvuC6gp0unj2-6TT3R-ejG96j-Tw,8618
76
+ datacontract/imports/spark_importer.py,sha256=OxX9hJhi8e1o1pZGOKh5zWsK96SX13r0WV04kKDD61M,8964
75
77
  datacontract/imports/sql_importer.py,sha256=CfHap1zpyy-NVol_i21bDai3l4PD8OLuJQwaVlihbqg,9543
76
78
  datacontract/imports/unity_importer.py,sha256=iW4CJNbHNoQR5fqVlBJfdHnbbDasPgkwHHmyV9FKGkI,8687
77
79
  datacontract/init/init_template.py,sha256=nc-B2ZkwDQ3GNFqxNmSDcStQMDbBTxysgTZDLw15izo,721
@@ -113,9 +115,9 @@ datacontract/templates/partials/model_field.html,sha256=2YBF95ypNCPFYuYKoeilRnDG
113
115
  datacontract/templates/partials/quality.html,sha256=ynEDWRn8I90Uje-xhGYgFcfwOgKI1R-CDki-EvTsauQ,1785
114
116
  datacontract/templates/partials/server.html,sha256=dHFJtonMjhiUHtT69RUgTpkoRwmNdTRzkCdH0LtGg_4,6279
115
117
  datacontract/templates/style/output.css,sha256=ioIo1f96VW7LHhDifj6QI8QbRChJl-LlQ59EwM8MEmA,28692
116
- datacontract_cli-0.10.33.dist-info/licenses/LICENSE,sha256=23h64qnSeIZ0DKeziWAKC-zBCt328iSbRbWBrXoYRb4,2210
117
- datacontract_cli-0.10.33.dist-info/METADATA,sha256=gqEgdS3X0NnbsbAuATDP2YmwNVcqQMgXEJmVayqQbVA,111469
118
- datacontract_cli-0.10.33.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
119
- datacontract_cli-0.10.33.dist-info/entry_points.txt,sha256=D3Eqy4q_Z6bHauGd4ppIyQglwbrm1AJnLau4Ppbw9Is,54
120
- datacontract_cli-0.10.33.dist-info/top_level.txt,sha256=VIRjd8EIUrBYWjEXJJjtdUgc0UAJdPZjmLiOR8BRBYM,13
121
- datacontract_cli-0.10.33.dist-info/RECORD,,
118
+ datacontract_cli-0.10.35.dist-info/licenses/LICENSE,sha256=0hcS8X51AL0UvEsx1ZM6WQcxiy9d0j5iOfzdPYM6ONU,2205
119
+ datacontract_cli-0.10.35.dist-info/METADATA,sha256=QkNoML7XM2Y2tlcv4in835nVni6yCoP9FbV-T0pJ0Pc,114486
120
+ datacontract_cli-0.10.35.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
121
+ datacontract_cli-0.10.35.dist-info/entry_points.txt,sha256=D3Eqy4q_Z6bHauGd4ppIyQglwbrm1AJnLau4Ppbw9Is,54
122
+ datacontract_cli-0.10.35.dist-info/top_level.txt,sha256=VIRjd8EIUrBYWjEXJJjtdUgc0UAJdPZjmLiOR8BRBYM,13
123
+ datacontract_cli-0.10.35.dist-info/RECORD,,
@@ -1,6 +1,6 @@
1
1
  MIT License
2
2
 
3
- Copyright (c) 2023 innoQ Deutschland GmbH
3
+ Copyright (c) 2025 Entropy Data GmbH
4
4
 
5
5
  Permission is hereby granted, free of charge, to any person obtaining a copy
6
6
  of this software and associated documentation files (the "Software"), to deal