datacontract-cli 0.10.22__py3-none-any.whl → 0.10.23__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- datacontract/cli.py +20 -72
- datacontract/engines/data_contract_test.py +22 -6
- datacontract/engines/datacontract/check_that_datacontract_contains_valid_servers_configuration.py +2 -3
- datacontract/engines/soda/check_soda_execute.py +1 -1
- datacontract/engines/soda/connections/{duckdb.py → duckdb_connection.py} +6 -5
- datacontract/export/avro_converter.py +2 -2
- datacontract/export/dbt_converter.py +13 -10
- datacontract/export/odcs_v3_exporter.py +22 -3
- datacontract/imports/odcs_v3_importer.py +1 -1
- datacontract/lint/resolve.py +17 -4
- datacontract/output/junit_test_results.py +135 -0
- datacontract/output/output_format.py +10 -0
- datacontract/output/test_results_writer.py +79 -0
- {datacontract_cli-0.10.22.dist-info → datacontract_cli-0.10.23.dist-info}/METADATA +54 -50
- {datacontract_cli-0.10.22.dist-info → datacontract_cli-0.10.23.dist-info}/RECORD +19 -16
- {datacontract_cli-0.10.22.dist-info → datacontract_cli-0.10.23.dist-info}/WHEEL +1 -1
- {datacontract_cli-0.10.22.dist-info → datacontract_cli-0.10.23.dist-info}/LICENSE +0 -0
- {datacontract_cli-0.10.22.dist-info → datacontract_cli-0.10.23.dist-info}/entry_points.txt +0 -0
- {datacontract_cli-0.10.22.dist-info → datacontract_cli-0.10.23.dist-info}/top_level.txt +0 -0
datacontract/cli.py
CHANGED
|
@@ -5,9 +5,7 @@ from typing import Iterable, List, Optional
|
|
|
5
5
|
|
|
6
6
|
import typer
|
|
7
7
|
from click import Context
|
|
8
|
-
from rich import box
|
|
9
8
|
from rich.console import Console
|
|
10
|
-
from rich.table import Table
|
|
11
9
|
from typer.core import TyperGroup
|
|
12
10
|
from typing_extensions import Annotated
|
|
13
11
|
|
|
@@ -19,6 +17,8 @@ from datacontract.integration.datamesh_manager import (
|
|
|
19
17
|
publish_data_contract_to_datamesh_manager,
|
|
20
18
|
)
|
|
21
19
|
from datacontract.lint.resolve import resolve_data_contract_dict
|
|
20
|
+
from datacontract.output.output_format import OutputFormat
|
|
21
|
+
from datacontract.output.test_results_writer import write_test_result
|
|
22
22
|
|
|
23
23
|
console = Console()
|
|
24
24
|
|
|
@@ -92,12 +92,19 @@ def lint(
|
|
|
92
92
|
str,
|
|
93
93
|
typer.Option(help="The location (url or path) of the Data Contract Specification JSON Schema"),
|
|
94
94
|
] = None,
|
|
95
|
+
output: Annotated[
|
|
96
|
+
Path,
|
|
97
|
+
typer.Option(
|
|
98
|
+
help="Specify the file path where the test results should be written to (e.g., './test-results/TEST-datacontract.xml'). If no path is provided, the output will be printed to stdout."
|
|
99
|
+
),
|
|
100
|
+
] = None,
|
|
101
|
+
output_format: Annotated[OutputFormat, typer.Option(help="The target format for the test results.")] = None,
|
|
95
102
|
):
|
|
96
103
|
"""
|
|
97
104
|
Validate that the datacontract.yaml is correctly formatted.
|
|
98
105
|
"""
|
|
99
106
|
run = DataContract(data_contract_file=location, schema_location=schema).lint()
|
|
100
|
-
|
|
107
|
+
write_test_result(run, console, output_format, output)
|
|
101
108
|
|
|
102
109
|
|
|
103
110
|
@app.command()
|
|
@@ -120,6 +127,13 @@ def test(
|
|
|
120
127
|
),
|
|
121
128
|
] = "all",
|
|
122
129
|
publish: Annotated[str, typer.Option(help="The url to publish the results after the test")] = None,
|
|
130
|
+
output: Annotated[
|
|
131
|
+
Path,
|
|
132
|
+
typer.Option(
|
|
133
|
+
help="Specify the file path where the test results should be written to (e.g., './test-results/TEST-datacontract.xml')."
|
|
134
|
+
),
|
|
135
|
+
] = None,
|
|
136
|
+
output_format: Annotated[OutputFormat, typer.Option(help="The target format for the test results.")] = None,
|
|
123
137
|
logs: Annotated[bool, typer.Option(help="Print logs")] = False,
|
|
124
138
|
ssl_verification: Annotated[
|
|
125
139
|
bool,
|
|
@@ -141,7 +155,7 @@ def test(
|
|
|
141
155
|
).test()
|
|
142
156
|
if logs:
|
|
143
157
|
_print_logs(run)
|
|
144
|
-
|
|
158
|
+
write_test_result(run, console, output_format, output)
|
|
145
159
|
|
|
146
160
|
|
|
147
161
|
@app.command()
|
|
@@ -214,7 +228,7 @@ def export(
|
|
|
214
228
|
if output is None:
|
|
215
229
|
console.print(result, markup=False, soft_wrap=True)
|
|
216
230
|
else:
|
|
217
|
-
with output.open("w") as f:
|
|
231
|
+
with output.open(mode="w", encoding="utf-8") as f:
|
|
218
232
|
f.write(result)
|
|
219
233
|
console.print(f"Written result to {output}")
|
|
220
234
|
|
|
@@ -306,7 +320,7 @@ def import_(
|
|
|
306
320
|
if output is None:
|
|
307
321
|
console.print(result.to_yaml(), markup=False, soft_wrap=True)
|
|
308
322
|
else:
|
|
309
|
-
with output.open("w") as f:
|
|
323
|
+
with output.open(mode="w", encoding="utf-8") as f:
|
|
310
324
|
f.write(result.to_yaml())
|
|
311
325
|
console.print(f"Written result to {output}")
|
|
312
326
|
|
|
@@ -467,77 +481,11 @@ def api(
|
|
|
467
481
|
uvicorn.run(app="datacontract.api:app", port=port, host=host, reload=True, log_config=LOGGING_CONFIG)
|
|
468
482
|
|
|
469
483
|
|
|
470
|
-
def _handle_result(run):
|
|
471
|
-
_print_table(run)
|
|
472
|
-
if run.result == "passed":
|
|
473
|
-
console.print(
|
|
474
|
-
f"🟢 data contract is valid. Run {len(run.checks)} checks. Took {(run.timestampEnd - run.timestampStart).total_seconds()} seconds."
|
|
475
|
-
)
|
|
476
|
-
elif run.result == "warning":
|
|
477
|
-
console.print("🟠 data contract has warnings. Found the following warnings:")
|
|
478
|
-
i = 1
|
|
479
|
-
for check in run.checks:
|
|
480
|
-
if check.result != "passed":
|
|
481
|
-
field = to_field(run, check)
|
|
482
|
-
if field:
|
|
483
|
-
field = field + " "
|
|
484
|
-
else:
|
|
485
|
-
field = ""
|
|
486
|
-
console.print(f"{i}) {field}{check.name}: {check.reason}")
|
|
487
|
-
i += 1
|
|
488
|
-
else:
|
|
489
|
-
console.print("🔴 data contract is invalid, found the following errors:")
|
|
490
|
-
i = 1
|
|
491
|
-
for check in run.checks:
|
|
492
|
-
if check.result != "passed":
|
|
493
|
-
field = to_field(run, check)
|
|
494
|
-
if field:
|
|
495
|
-
field = field + " "
|
|
496
|
-
else:
|
|
497
|
-
field = ""
|
|
498
|
-
console.print(f"{i}) {field}{check.name}: {check.reason}")
|
|
499
|
-
i += 1
|
|
500
|
-
raise typer.Exit(code=1)
|
|
501
|
-
|
|
502
|
-
|
|
503
|
-
def _print_table(run):
|
|
504
|
-
table = Table(box=box.ROUNDED)
|
|
505
|
-
table.add_column("Result", no_wrap=True)
|
|
506
|
-
table.add_column("Check", max_width=100)
|
|
507
|
-
table.add_column("Field", max_width=32)
|
|
508
|
-
table.add_column("Details", max_width=50)
|
|
509
|
-
for check in sorted(run.checks, key=lambda c: (c.result or "", c.model or "", c.field or "")):
|
|
510
|
-
table.add_row(with_markup(check.result), check.name, to_field(run, check), check.reason)
|
|
511
|
-
console.print(table)
|
|
512
|
-
|
|
513
|
-
|
|
514
|
-
def to_field(run, check):
|
|
515
|
-
models = [c.model for c in run.checks]
|
|
516
|
-
if len(set(models)) > 1:
|
|
517
|
-
if check.field is None:
|
|
518
|
-
return check.model
|
|
519
|
-
return check.model + "." + check.field
|
|
520
|
-
else:
|
|
521
|
-
return check.field
|
|
522
|
-
|
|
523
|
-
|
|
524
484
|
def _print_logs(run):
|
|
525
485
|
console.print("\nLogs:")
|
|
526
486
|
for log in run.logs:
|
|
527
487
|
console.print(log.timestamp.strftime("%y-%m-%d %H:%M:%S"), log.level.ljust(5), log.message)
|
|
528
488
|
|
|
529
489
|
|
|
530
|
-
def with_markup(result):
|
|
531
|
-
if result == "passed":
|
|
532
|
-
return "[green]passed[/green]"
|
|
533
|
-
if result == "warning":
|
|
534
|
-
return "[yellow]warning[/yellow]"
|
|
535
|
-
if result == "failed":
|
|
536
|
-
return "[red]failed[/red]"
|
|
537
|
-
if result == "error":
|
|
538
|
-
return "[red]error[/red]"
|
|
539
|
-
return result
|
|
540
|
-
|
|
541
|
-
|
|
542
490
|
if __name__ == "__main__":
|
|
543
491
|
app()
|
|
@@ -29,12 +29,7 @@ def execute_data_contract_test(
|
|
|
29
29
|
reason="Models block is missing. Skip executing tests.",
|
|
30
30
|
engine="datacontract",
|
|
31
31
|
)
|
|
32
|
-
|
|
33
|
-
if server_name:
|
|
34
|
-
server = data_contract_specification.servers.get(server_name)
|
|
35
|
-
else:
|
|
36
|
-
server_name = list(data_contract_specification.servers.keys())[0]
|
|
37
|
-
server = data_contract_specification.servers.get(server_name)
|
|
32
|
+
server = get_server(data_contract_specification, server_name)
|
|
38
33
|
run.log_info(f"Running tests for data contract {data_contract_specification.id} with server {server_name}")
|
|
39
34
|
run.dataContractId = data_contract_specification.id
|
|
40
35
|
run.dataContractVersion = data_contract_specification.info.version
|
|
@@ -49,3 +44,24 @@ def execute_data_contract_test(
|
|
|
49
44
|
if server.format == "json" and server.type != "kafka":
|
|
50
45
|
check_jsonschema(run, data_contract_specification, server)
|
|
51
46
|
check_soda_execute(run, data_contract_specification, server, spark)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def get_server(data_contract_specification: DataContractSpecification, server_name: str = None):
|
|
50
|
+
"""Get the server configuration from the data contract specification.
|
|
51
|
+
|
|
52
|
+
Args:
|
|
53
|
+
data_contract_specification: The data contract specification
|
|
54
|
+
server_name: Optional name of the server to use. If not provided, uses the first server.
|
|
55
|
+
|
|
56
|
+
Returns:
|
|
57
|
+
The selected server configuration
|
|
58
|
+
"""
|
|
59
|
+
|
|
60
|
+
check_that_datacontract_contains_valid_server_configuration(data_contract_specification, server_name)
|
|
61
|
+
|
|
62
|
+
if server_name:
|
|
63
|
+
server = data_contract_specification.servers.get(server_name)
|
|
64
|
+
else:
|
|
65
|
+
server_name = list(data_contract_specification.servers.keys())[0]
|
|
66
|
+
server = data_contract_specification.servers.get(server_name)
|
|
67
|
+
return server
|
datacontract/engines/datacontract/check_that_datacontract_contains_valid_servers_configuration.py
CHANGED
|
@@ -1,12 +1,11 @@
|
|
|
1
1
|
from datacontract.model.data_contract_specification import DataContractSpecification
|
|
2
2
|
from datacontract.model.exceptions import DataContractException
|
|
3
|
-
from datacontract.model.run import Run
|
|
4
3
|
|
|
5
4
|
|
|
6
5
|
def check_that_datacontract_contains_valid_server_configuration(
|
|
7
|
-
|
|
6
|
+
data_contract: DataContractSpecification, server_name: str | None
|
|
8
7
|
):
|
|
9
|
-
if data_contract.servers is None:
|
|
8
|
+
if data_contract.servers is None or len(data_contract.servers) == 0:
|
|
10
9
|
raise DataContractException(
|
|
11
10
|
type="lint",
|
|
12
11
|
name="Check that data contract contains valid server configuration",
|
|
@@ -3,7 +3,7 @@ import uuid
|
|
|
3
3
|
|
|
4
4
|
from datacontract.engines.soda.connections.bigquery import to_bigquery_soda_configuration
|
|
5
5
|
from datacontract.engines.soda.connections.databricks import to_databricks_soda_configuration
|
|
6
|
-
from datacontract.engines.soda.connections.
|
|
6
|
+
from datacontract.engines.soda.connections.duckdb_connection import get_duckdb_connection
|
|
7
7
|
from datacontract.engines.soda.connections.kafka import create_spark_session, read_kafka_topic
|
|
8
8
|
from datacontract.engines.soda.connections.postgres import to_postgres_soda_configuration
|
|
9
9
|
from datacontract.engines.soda.connections.snowflake import to_snowflake_soda_configuration
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import os
|
|
2
|
+
from typing import Any
|
|
2
3
|
|
|
3
4
|
import duckdb
|
|
4
5
|
|
|
@@ -27,13 +28,13 @@ def get_duckdb_connection(data_contract, server, run: Run):
|
|
|
27
28
|
run.log_info(f"Creating table {model_name} for {model_path}")
|
|
28
29
|
|
|
29
30
|
if server.format == "json":
|
|
30
|
-
|
|
31
|
+
json_format = "auto"
|
|
31
32
|
if server.delimiter == "new_line":
|
|
32
|
-
|
|
33
|
+
json_format = "newline_delimited"
|
|
33
34
|
elif server.delimiter == "array":
|
|
34
|
-
|
|
35
|
+
json_format = "array"
|
|
35
36
|
con.sql(f"""
|
|
36
|
-
CREATE VIEW "{model_name}" AS SELECT * FROM read_json_auto('{model_path}', format='{
|
|
37
|
+
CREATE VIEW "{model_name}" AS SELECT * FROM read_json_auto('{model_path}', format='{json_format}', hive_partitioning=1);
|
|
37
38
|
""")
|
|
38
39
|
elif server.format == "parquet":
|
|
39
40
|
con.sql(f"""
|
|
@@ -56,7 +57,7 @@ def get_duckdb_connection(data_contract, server, run: Run):
|
|
|
56
57
|
return con
|
|
57
58
|
|
|
58
59
|
|
|
59
|
-
def to_csv_types(model) -> dict:
|
|
60
|
+
def to_csv_types(model) -> dict[Any, str | None] | None:
|
|
60
61
|
if model is None:
|
|
61
62
|
return None
|
|
62
63
|
columns = {}
|
|
@@ -108,8 +108,8 @@ def to_avro_type(field: Field, field_name: str) -> str | dict:
|
|
|
108
108
|
elif field.type in ["time"]:
|
|
109
109
|
return "long"
|
|
110
110
|
elif field.type in ["object", "record", "struct"]:
|
|
111
|
-
if field.config is not None and
|
|
112
|
-
return to_avro_record(field_name
|
|
111
|
+
if field.config is not None and "namespace" in field.config:
|
|
112
|
+
return to_avro_record(field_name, field.fields, field.description, field.config["namespace"])
|
|
113
113
|
return to_avro_record(field_name, field.fields, field.description, None)
|
|
114
114
|
elif field.type in ["binary"]:
|
|
115
115
|
return "bytes"
|
|
@@ -9,7 +9,7 @@ from datacontract.model.data_contract_specification import DataContractSpecifica
|
|
|
9
9
|
|
|
10
10
|
class DbtExporter(Exporter):
|
|
11
11
|
def export(self, data_contract, model, server, sql_server_type, export_args) -> dict:
|
|
12
|
-
return to_dbt_models_yaml(data_contract)
|
|
12
|
+
return to_dbt_models_yaml(data_contract, server)
|
|
13
13
|
|
|
14
14
|
|
|
15
15
|
class DbtSourceExporter(Exporter):
|
|
@@ -27,15 +27,16 @@ class DbtStageExporter(Exporter):
|
|
|
27
27
|
)
|
|
28
28
|
|
|
29
29
|
|
|
30
|
-
def to_dbt_models_yaml(data_contract_spec: DataContractSpecification):
|
|
30
|
+
def to_dbt_models_yaml(data_contract_spec: DataContractSpecification, server: str = None):
|
|
31
31
|
dbt = {
|
|
32
32
|
"version": 2,
|
|
33
33
|
"models": [],
|
|
34
34
|
}
|
|
35
|
+
|
|
35
36
|
for model_key, model_value in data_contract_spec.models.items():
|
|
36
|
-
dbt_model = _to_dbt_model(model_key, model_value, data_contract_spec)
|
|
37
|
+
dbt_model = _to_dbt_model(model_key, model_value, data_contract_spec, adapter_type=server)
|
|
37
38
|
dbt["models"].append(dbt_model)
|
|
38
|
-
return yaml.
|
|
39
|
+
return yaml.safe_dump(dbt, indent=2, sort_keys=False, allow_unicode=True)
|
|
39
40
|
|
|
40
41
|
|
|
41
42
|
def to_dbt_staging_sql(data_contract_spec: DataContractSpecification, model_name: str, model_value: Model) -> str:
|
|
@@ -60,7 +61,7 @@ def to_dbt_sources_yaml(data_contract_spec: DataContractSpecification, server: s
|
|
|
60
61
|
if data_contract_spec.info.owner is not None:
|
|
61
62
|
source["meta"] = {"owner": data_contract_spec.info.owner}
|
|
62
63
|
if data_contract_spec.info.description is not None:
|
|
63
|
-
source["description"] = data_contract_spec.info.description
|
|
64
|
+
source["description"] = data_contract_spec.info.description.strip().replace("\n", " ")
|
|
64
65
|
found_server = data_contract_spec.servers.get(server)
|
|
65
66
|
adapter_type = None
|
|
66
67
|
if found_server is not None:
|
|
@@ -87,14 +88,16 @@ def _to_dbt_source_table(
|
|
|
87
88
|
}
|
|
88
89
|
|
|
89
90
|
if model_value.description is not None:
|
|
90
|
-
dbt_model["description"] = model_value.description
|
|
91
|
+
dbt_model["description"] = model_value.description.strip().replace("\n", " ")
|
|
91
92
|
columns = _to_columns(data_contract_spec, model_value.fields, False, adapter_type)
|
|
92
93
|
if columns:
|
|
93
94
|
dbt_model["columns"] = columns
|
|
94
95
|
return dbt_model
|
|
95
96
|
|
|
96
97
|
|
|
97
|
-
def _to_dbt_model(
|
|
98
|
+
def _to_dbt_model(
|
|
99
|
+
model_key, model_value: Model, data_contract_spec: DataContractSpecification, adapter_type: Optional[str]
|
|
100
|
+
) -> dict:
|
|
98
101
|
dbt_model = {
|
|
99
102
|
"name": model_key,
|
|
100
103
|
}
|
|
@@ -108,8 +111,8 @@ def _to_dbt_model(model_key, model_value: Model, data_contract_spec: DataContrac
|
|
|
108
111
|
if _supports_constraints(model_type):
|
|
109
112
|
dbt_model["config"]["contract"] = {"enforced": True}
|
|
110
113
|
if model_value.description is not None:
|
|
111
|
-
dbt_model["description"] = model_value.description
|
|
112
|
-
columns = _to_columns(data_contract_spec, model_value.fields, _supports_constraints(model_type),
|
|
114
|
+
dbt_model["description"] = model_value.description.strip().replace("\n", " ")
|
|
115
|
+
columns = _to_columns(data_contract_spec, model_value.fields, _supports_constraints(model_type), adapter_type)
|
|
113
116
|
if columns:
|
|
114
117
|
dbt_model["columns"] = columns
|
|
115
118
|
return dbt_model
|
|
@@ -171,7 +174,7 @@ def _to_column(
|
|
|
171
174
|
{"dbt_expectations.dbt_expectations.expect_column_values_to_be_of_type": {"column_type": dbt_type}}
|
|
172
175
|
)
|
|
173
176
|
if field.description is not None:
|
|
174
|
-
column["description"] = field.description
|
|
177
|
+
column["description"] = field.description.strip().replace("\n", " ")
|
|
175
178
|
if field.required:
|
|
176
179
|
if supports_constraints:
|
|
177
180
|
column.setdefault("constraints", []).append({"type": "not_null"})
|
|
@@ -19,7 +19,7 @@ def to_odcs_v3_yaml(data_contract_spec: DataContractSpecification) -> str:
|
|
|
19
19
|
"name": data_contract_spec.info.title,
|
|
20
20
|
"version": data_contract_spec.info.version,
|
|
21
21
|
"domain": data_contract_spec.info.owner,
|
|
22
|
-
"status": data_contract_spec.info.status,
|
|
22
|
+
"status": to_status(data_contract_spec.info.status),
|
|
23
23
|
}
|
|
24
24
|
|
|
25
25
|
if data_contract_spec.terms is not None:
|
|
@@ -217,9 +217,9 @@ def to_property(field_name: str, field: Field) -> dict:
|
|
|
217
217
|
if field.description is not None:
|
|
218
218
|
property["description"] = field.description
|
|
219
219
|
if field.required is not None:
|
|
220
|
-
property["
|
|
220
|
+
property["nullable"] = not field.required
|
|
221
221
|
if field.unique is not None:
|
|
222
|
-
property["
|
|
222
|
+
property["unique"] = field.unique
|
|
223
223
|
if field.classification is not None:
|
|
224
224
|
property["classification"] = field.classification
|
|
225
225
|
if field.examples is not None:
|
|
@@ -312,3 +312,22 @@ def to_odcs_quality(quality):
|
|
|
312
312
|
if quality.implementation is not None:
|
|
313
313
|
quality_dict["implementation"] = quality.implementation
|
|
314
314
|
return quality_dict
|
|
315
|
+
|
|
316
|
+
|
|
317
|
+
def to_status(status):
|
|
318
|
+
"""Convert the data contract status to ODCS v3 format."""
|
|
319
|
+
if status is None:
|
|
320
|
+
return "draft" # Default to draft if no status is provided
|
|
321
|
+
|
|
322
|
+
# Valid status values according to ODCS v3.0.1 spec
|
|
323
|
+
valid_statuses = ["proposed", "draft", "active", "deprecated", "retired"]
|
|
324
|
+
|
|
325
|
+
# Convert to lowercase for comparison
|
|
326
|
+
status_lower = status.lower()
|
|
327
|
+
|
|
328
|
+
# If status is already valid, return it as is
|
|
329
|
+
if status_lower in valid_statuses:
|
|
330
|
+
return status_lower
|
|
331
|
+
|
|
332
|
+
# Default to "draft" for any non-standard status
|
|
333
|
+
return "draft"
|
|
@@ -233,7 +233,7 @@ def import_field_config(odcs_property: Dict[str, Any], server_type=None) -> Dict
|
|
|
233
233
|
config["redshiftType"] = physical_type
|
|
234
234
|
elif server_type == "sqlserver":
|
|
235
235
|
config["sqlserverType"] = physical_type
|
|
236
|
-
elif server_type == "
|
|
236
|
+
elif server_type == "databricks":
|
|
237
237
|
config["databricksType"] = physical_type
|
|
238
238
|
else:
|
|
239
239
|
config["physicalType"] = physical_type
|
datacontract/lint/resolve.py
CHANGED
|
@@ -125,11 +125,24 @@ def _resolve_definition_ref(ref, spec) -> Definition:
|
|
|
125
125
|
path = path.replace("file://", "")
|
|
126
126
|
definition_str = _fetch_file(path)
|
|
127
127
|
definition_dict = _to_yaml(definition_str)
|
|
128
|
+
if definition_path:
|
|
129
|
+
path_parts = [part for part in definition_path.split("/") if part != ""]
|
|
130
|
+
for path_part in path_parts:
|
|
131
|
+
definition_dict = definition_dict.get(path_part, None)
|
|
132
|
+
if not definition_dict:
|
|
133
|
+
raise DataContractException(
|
|
134
|
+
type="lint",
|
|
135
|
+
result="failed",
|
|
136
|
+
name="Check that data contract YAML is valid",
|
|
137
|
+
reason=f"Cannot resolve definition {definition_path}, {path_part} not found",
|
|
138
|
+
engine="datacontract",
|
|
139
|
+
)
|
|
140
|
+
# this assumes that definitions_dict is a definitions dict, however,
|
|
141
|
+
# all we know is that it is a file!
|
|
128
142
|
definition = Definition(**definition_dict)
|
|
129
|
-
if definition_path is not None:
|
|
130
|
-
|
|
131
|
-
|
|
132
|
-
return definition
|
|
143
|
+
# if definition_path is not None:
|
|
144
|
+
# definition = _find_by_path_in_definition(definition_path, definition)
|
|
145
|
+
return definition
|
|
133
146
|
elif ref.startswith("#"):
|
|
134
147
|
logging.info(f"Resolving definition local path {path}")
|
|
135
148
|
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
import xml.etree.ElementTree as ET
|
|
2
|
+
from pathlib import Path
|
|
3
|
+
from xml.dom import minidom
|
|
4
|
+
|
|
5
|
+
import yaml
|
|
6
|
+
|
|
7
|
+
from datacontract.model.run import ResultEnum, Run
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
def write_junit_test_results(run: Run, console, output_path: Path):
|
|
11
|
+
if not output_path:
|
|
12
|
+
console.print("No output path specified for JUnit test results. Skip writing JUnit test results.")
|
|
13
|
+
return
|
|
14
|
+
|
|
15
|
+
testsuite = ET.Element(
|
|
16
|
+
"testsuite",
|
|
17
|
+
id=str(run.runId),
|
|
18
|
+
name=run.dataContractId if run.dataContractId else "Data Contract",
|
|
19
|
+
tests=str(len(run.checks)),
|
|
20
|
+
errors=str(count_errors(run)),
|
|
21
|
+
failures=str(count_failed(run)),
|
|
22
|
+
skipped=str(count_skipped(run)),
|
|
23
|
+
timestamp=run.timestampStart.replace(tzinfo=None).isoformat(),
|
|
24
|
+
time=str((run.timestampEnd - run.timestampStart).total_seconds()),
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
testsuiteProperties = ET.SubElement(testsuite, "properties")
|
|
28
|
+
if run.dataContractId is not None:
|
|
29
|
+
ET.SubElement(testsuiteProperties, "property", name="dataContractId", value=run.dataContractId)
|
|
30
|
+
if run.dataContractVersion is not None:
|
|
31
|
+
ET.SubElement(testsuiteProperties, "property", name="dataContractVersion", value=run.dataContractVersion)
|
|
32
|
+
if run.dataProductId is not None:
|
|
33
|
+
ET.SubElement(testsuiteProperties, "property", name="dataProductId", value=run.dataProductId)
|
|
34
|
+
if run.outputPortId is not None:
|
|
35
|
+
ET.SubElement(testsuiteProperties, "property", name="outputPortId", value=run.outputPortId)
|
|
36
|
+
if run.server is not None:
|
|
37
|
+
ET.SubElement(testsuiteProperties, "property", name="server", value=run.server)
|
|
38
|
+
|
|
39
|
+
for check in run.checks:
|
|
40
|
+
testcase = ET.SubElement(testsuite, "testcase", classname=to_class_name(check), name=to_testcase_name(check))
|
|
41
|
+
if check.result == ResultEnum.passed:
|
|
42
|
+
pass
|
|
43
|
+
elif check.result == ResultEnum.failed:
|
|
44
|
+
failure = ET.SubElement(
|
|
45
|
+
testcase,
|
|
46
|
+
"failure",
|
|
47
|
+
message=check.reason if check.reason else "Failed",
|
|
48
|
+
type=check.category if check.category else "General",
|
|
49
|
+
)
|
|
50
|
+
failure.text = to_failure_text(check)
|
|
51
|
+
elif check.result == ResultEnum.error:
|
|
52
|
+
error = ET.SubElement(
|
|
53
|
+
testcase,
|
|
54
|
+
"error",
|
|
55
|
+
message=check.reason if check.reason else "Error",
|
|
56
|
+
type=check.category if check.category else "General",
|
|
57
|
+
)
|
|
58
|
+
error.text = to_failure_text(check)
|
|
59
|
+
elif check.result is ResultEnum.warning:
|
|
60
|
+
skipped = ET.SubElement(
|
|
61
|
+
testcase,
|
|
62
|
+
"skipped",
|
|
63
|
+
message=check.reason if check.reason else "Warning",
|
|
64
|
+
type=check.category if check.category else "General",
|
|
65
|
+
)
|
|
66
|
+
skipped.skipped = to_failure_text(check)
|
|
67
|
+
else:
|
|
68
|
+
ET.SubElement(
|
|
69
|
+
testcase,
|
|
70
|
+
"skipped",
|
|
71
|
+
message=check.reason if check.reason else "None",
|
|
72
|
+
type=check.category if check.category else "General",
|
|
73
|
+
)
|
|
74
|
+
|
|
75
|
+
if run.logs:
|
|
76
|
+
system_out = ET.SubElement(testsuite, "system-out")
|
|
77
|
+
system_out.text = logs_to_system_out(run)
|
|
78
|
+
|
|
79
|
+
xml_str: str = ET.tostring(testsuite, xml_declaration=True, encoding="utf-8")
|
|
80
|
+
xml_str_pretty = minidom.parseString(xml_str).toprettyxml(indent=" ")
|
|
81
|
+
output_path.parent.mkdir(parents=True, exist_ok=True)
|
|
82
|
+
with open(output_path, "w", encoding="utf-8") as f:
|
|
83
|
+
f.write(xml_str_pretty)
|
|
84
|
+
console.print(f"JUnit test results written to {output_path}")
|
|
85
|
+
|
|
86
|
+
|
|
87
|
+
def to_testcase_name(check):
|
|
88
|
+
if check.key:
|
|
89
|
+
return check.key
|
|
90
|
+
if check.name:
|
|
91
|
+
return check.name
|
|
92
|
+
else:
|
|
93
|
+
return "unknown"
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
def logs_to_system_out(run):
|
|
97
|
+
result = ""
|
|
98
|
+
for log in run.logs:
|
|
99
|
+
result += f"{log.timestamp} {log.level}: {log.message}\n"
|
|
100
|
+
return result
|
|
101
|
+
|
|
102
|
+
|
|
103
|
+
def to_class_name(check):
|
|
104
|
+
if check.model and check.field:
|
|
105
|
+
return f"{check.model}.{check.field}"
|
|
106
|
+
elif check.model:
|
|
107
|
+
return check.model
|
|
108
|
+
elif check.field:
|
|
109
|
+
return check.field
|
|
110
|
+
else:
|
|
111
|
+
return "general"
|
|
112
|
+
|
|
113
|
+
|
|
114
|
+
def to_failure_text(check):
|
|
115
|
+
return (
|
|
116
|
+
f"Name: {check.name}\n"
|
|
117
|
+
f"Engine: {check.engine}\n"
|
|
118
|
+
f"Implementation:\n{check.implementation}\n\n"
|
|
119
|
+
f"Result: {check.result.value if check.result is not None else ''}\n"
|
|
120
|
+
f"Reason: {check.reason}\n"
|
|
121
|
+
f"Details: {check.details}\n"
|
|
122
|
+
f"Diagnostics:\n{yaml.dump(check.diagnostics, default_flow_style=False)}"
|
|
123
|
+
)
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def count_errors(run):
|
|
127
|
+
return sum(1 for check in run.checks if check.result == ResultEnum.error)
|
|
128
|
+
|
|
129
|
+
|
|
130
|
+
def count_failed(run):
|
|
131
|
+
return sum(1 for check in run.checks if check.result == ResultEnum.failed)
|
|
132
|
+
|
|
133
|
+
|
|
134
|
+
def count_skipped(run):
|
|
135
|
+
return sum(1 for check in run.checks if check.result is None)
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
from pathlib import Path
|
|
2
|
+
|
|
3
|
+
import typer
|
|
4
|
+
from rich import box
|
|
5
|
+
from rich.console import Console
|
|
6
|
+
from rich.table import Table
|
|
7
|
+
|
|
8
|
+
from datacontract.model.run import Run
|
|
9
|
+
from datacontract.output.junit_test_results import write_junit_test_results
|
|
10
|
+
from datacontract.output.output_format import OutputFormat
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def write_test_result(run: Run, console: Console, output_format: OutputFormat, output_path: Path):
|
|
14
|
+
if output_format == OutputFormat.junit:
|
|
15
|
+
write_junit_test_results(run, console, output_path)
|
|
16
|
+
|
|
17
|
+
_print_table(run, console)
|
|
18
|
+
if run.result == "passed":
|
|
19
|
+
console.print(
|
|
20
|
+
f"🟢 data contract is valid. Run {len(run.checks)} checks. Took {(run.timestampEnd - run.timestampStart).total_seconds()} seconds."
|
|
21
|
+
)
|
|
22
|
+
elif run.result == "warning":
|
|
23
|
+
console.print("🟠 data contract has warnings. Found the following warnings:")
|
|
24
|
+
i = 1
|
|
25
|
+
for check in run.checks:
|
|
26
|
+
if check.result != "passed":
|
|
27
|
+
field = to_field(run, check)
|
|
28
|
+
if field:
|
|
29
|
+
field = field + " "
|
|
30
|
+
else:
|
|
31
|
+
field = ""
|
|
32
|
+
console.print(f"{i}) {field}{check.name}: {check.reason}")
|
|
33
|
+
i += 1
|
|
34
|
+
else:
|
|
35
|
+
console.print("🔴 data contract is invalid, found the following errors:")
|
|
36
|
+
i = 1
|
|
37
|
+
for check in run.checks:
|
|
38
|
+
if check.result != "passed":
|
|
39
|
+
field = to_field(run, check)
|
|
40
|
+
if field:
|
|
41
|
+
field = field + " "
|
|
42
|
+
else:
|
|
43
|
+
field = ""
|
|
44
|
+
console.print(f"{i}) {field}{check.name}: {check.reason}")
|
|
45
|
+
i += 1
|
|
46
|
+
raise typer.Exit(code=1)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def _print_table(run, console):
|
|
50
|
+
table = Table(box=box.ROUNDED)
|
|
51
|
+
table.add_column("Result", no_wrap=True)
|
|
52
|
+
table.add_column("Check", max_width=100)
|
|
53
|
+
table.add_column("Field", max_width=32)
|
|
54
|
+
table.add_column("Details", max_width=50)
|
|
55
|
+
for check in sorted(run.checks, key=lambda c: (c.result or "", c.model or "", c.field or "")):
|
|
56
|
+
table.add_row(with_markup(check.result), check.name, to_field(run, check), check.reason)
|
|
57
|
+
console.print(table)
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def to_field(run, check):
|
|
61
|
+
models = [c.model for c in run.checks]
|
|
62
|
+
if len(set(models)) > 1:
|
|
63
|
+
if check.field is None:
|
|
64
|
+
return check.model
|
|
65
|
+
return check.model + "." + check.field
|
|
66
|
+
else:
|
|
67
|
+
return check.field
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def with_markup(result):
|
|
71
|
+
if result == "passed":
|
|
72
|
+
return "[green]passed[/green]"
|
|
73
|
+
if result == "warning":
|
|
74
|
+
return "[yellow]warning[/yellow]"
|
|
75
|
+
if result == "failed":
|
|
76
|
+
return "[red]failed[/red]"
|
|
77
|
+
if result == "error":
|
|
78
|
+
return "[red]error[/red]"
|
|
79
|
+
return result
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.2
|
|
2
2
|
Name: datacontract-cli
|
|
3
|
-
Version: 0.10.
|
|
3
|
+
Version: 0.10.23
|
|
4
4
|
Summary: The datacontract CLI is an open source command-line tool for working with Data Contracts. It uses data contract YAML files to lint the data contract, connect to data sources and execute schema and quality tests, detect breaking changes, and export to different formats. The tool is written in Python. It can be used as a standalone CLI tool, in a CI/CD pipeline, or directly as a Python library.
|
|
5
5
|
Author-email: Jochen Christ <jochen.christ@innoq.com>, Stefan Negele <stefan.negele@innoq.com>, Simon Harrer <simon.harrer@innoq.com>
|
|
6
6
|
Project-URL: Homepage, https://cli.datacontract.com
|
|
@@ -16,12 +16,12 @@ Requires-Dist: pydantic<2.11.0,>=2.8.2
|
|
|
16
16
|
Requires-Dist: pyyaml~=6.0.1
|
|
17
17
|
Requires-Dist: requests<2.33,>=2.31
|
|
18
18
|
Requires-Dist: fastjsonschema<2.22.0,>=2.19.1
|
|
19
|
-
Requires-Dist: fastparquet
|
|
19
|
+
Requires-Dist: fastparquet<2025.0.0,>=2024.5.0
|
|
20
20
|
Requires-Dist: numpy<2.0.0,>=1.26.4
|
|
21
21
|
Requires-Dist: python-multipart==0.0.20
|
|
22
22
|
Requires-Dist: rich<13.10,>=13.7
|
|
23
23
|
Requires-Dist: sqlglot<27.0.0,>=26.6.0
|
|
24
|
-
Requires-Dist: duckdb
|
|
24
|
+
Requires-Dist: duckdb<2.0.0,>=1.0.0
|
|
25
25
|
Requires-Dist: soda-core-duckdb<3.5.0,>=3.3.20
|
|
26
26
|
Requires-Dist: setuptools>=60
|
|
27
27
|
Requires-Dist: python-dotenv~=1.0.0
|
|
@@ -39,7 +39,7 @@ Provides-Extra: databricks
|
|
|
39
39
|
Requires-Dist: soda-core-spark-df<3.4.0,>=3.3.20; extra == "databricks"
|
|
40
40
|
Requires-Dist: soda-core-spark[databricks]<3.4.0,>=3.3.20; extra == "databricks"
|
|
41
41
|
Requires-Dist: databricks-sql-connector<3.8.0,>=3.7.0; extra == "databricks"
|
|
42
|
-
Requires-Dist: databricks-sdk<0.
|
|
42
|
+
Requires-Dist: databricks-sdk<0.45.0; extra == "databricks"
|
|
43
43
|
Provides-Extra: iceberg
|
|
44
44
|
Requires-Dist: pyiceberg==0.8.1; extra == "iceberg"
|
|
45
45
|
Provides-Extra: kafka
|
|
@@ -48,7 +48,7 @@ Requires-Dist: soda-core-spark-df<3.4.0,>=3.3.20; extra == "kafka"
|
|
|
48
48
|
Provides-Extra: postgres
|
|
49
49
|
Requires-Dist: soda-core-postgres<3.4.0,>=3.3.20; extra == "postgres"
|
|
50
50
|
Provides-Extra: s3
|
|
51
|
-
Requires-Dist: s3fs==
|
|
51
|
+
Requires-Dist: s3fs==2025.2.0; extra == "s3"
|
|
52
52
|
Requires-Dist: aiobotocore<2.20.0,>=2.17.0; extra == "s3"
|
|
53
53
|
Provides-Extra: snowflake
|
|
54
54
|
Requires-Dist: snowflake-connector-python[pandas]<3.14,>=3.6; extra == "snowflake"
|
|
@@ -66,7 +66,7 @@ Requires-Dist: pyarrow>=18.1.0; extra == "parquet"
|
|
|
66
66
|
Provides-Extra: rdf
|
|
67
67
|
Requires-Dist: rdflib==7.0.0; extra == "rdf"
|
|
68
68
|
Provides-Extra: api
|
|
69
|
-
Requires-Dist: fastapi==0.115.
|
|
69
|
+
Requires-Dist: fastapi==0.115.8; extra == "api"
|
|
70
70
|
Requires-Dist: uvicorn==0.34.0; extra == "api"
|
|
71
71
|
Provides-Extra: all
|
|
72
72
|
Requires-Dist: datacontract-cli[api,bigquery,csv,databricks,dbml,dbt,iceberg,kafka,parquet,postgres,rdf,s3,snowflake,sqlserver,trino]; extra == "all"
|
|
@@ -230,6 +230,12 @@ if not run.has_passed():
|
|
|
230
230
|
# Abort pipeline, alert, or take corrective actions...
|
|
231
231
|
```
|
|
232
232
|
|
|
233
|
+
## How to
|
|
234
|
+
|
|
235
|
+
- [How to integrate Data Contract CLI in your CI/CD pipeline as a GitHub Action](https://github.com/datacontract/datacontract-action/)
|
|
236
|
+
- [How to run the Data Contract CLI API to test data contracts with POST requests](https://cli.datacontract.com/API)
|
|
237
|
+
- [How to run Data Contract CLI in a Databricks pipeline](https://www.datamesh-architecture.com/howto/build-a-dataproduct-with-databricks#test-the-data-product)
|
|
238
|
+
|
|
233
239
|
|
|
234
240
|
## Installation
|
|
235
241
|
|
|
@@ -378,49 +384,41 @@ Commands
|
|
|
378
384
|
|
|
379
385
|
### test
|
|
380
386
|
```
|
|
381
|
-
|
|
382
|
-
Usage: datacontract test [OPTIONS] [LOCATION]
|
|
383
|
-
|
|
384
|
-
Run schema and quality tests on configured servers.
|
|
385
|
-
|
|
386
|
-
╭─ Arguments
|
|
387
|
-
│ location [LOCATION] The location (url or path) of the data contract
|
|
388
|
-
│ yaml
|
|
389
|
-
|
|
390
|
-
|
|
391
|
-
|
|
392
|
-
│
|
|
393
|
-
│
|
|
394
|
-
│
|
|
395
|
-
│
|
|
396
|
-
│
|
|
397
|
-
│
|
|
398
|
-
│
|
|
399
|
-
│
|
|
400
|
-
│
|
|
401
|
-
│
|
|
402
|
-
│
|
|
403
|
-
│
|
|
404
|
-
│
|
|
405
|
-
│
|
|
406
|
-
│
|
|
407
|
-
│
|
|
408
|
-
│
|
|
409
|
-
│
|
|
410
|
-
│ --
|
|
411
|
-
│
|
|
412
|
-
│
|
|
413
|
-
│
|
|
414
|
-
│
|
|
415
|
-
|
|
416
|
-
│ publishing the data │
|
|
417
|
-
│ contract. │
|
|
418
|
-
│ [default: │
|
|
419
|
-
│ ssl-verification] │
|
|
420
|
-
│ --help Show this message and │
|
|
421
|
-
│ exit. │
|
|
422
|
-
╰──────────────────────────────────────────────────────────────────────────────╯
|
|
423
|
-
|
|
387
|
+
|
|
388
|
+
Usage: datacontract test [OPTIONS] [LOCATION]
|
|
389
|
+
|
|
390
|
+
Run schema and quality tests on configured servers.
|
|
391
|
+
|
|
392
|
+
╭─ Arguments ──────────────────────────────────────────────────────────────────────────────────────╮
|
|
393
|
+
│ location [LOCATION] The location (url or path) of the data contract yaml. │
|
|
394
|
+
│ [default: datacontract.yaml] │
|
|
395
|
+
╰──────────────────────────────────────────────────────────────────────────────────────────────────╯
|
|
396
|
+
╭─ Options ────────────────────────────────────────────────────────────────────────────────────────╮
|
|
397
|
+
│ --schema TEXT The location (url or path) of the Data │
|
|
398
|
+
│ Contract Specification JSON Schema │
|
|
399
|
+
│ [default: None] │
|
|
400
|
+
│ --server TEXT The server configuration to run the │
|
|
401
|
+
│ schema and quality tests. Use the key of │
|
|
402
|
+
│ the server object in the data contract │
|
|
403
|
+
│ yaml file to refer to a server, e.g., │
|
|
404
|
+
│ `production`, or `all` for all servers │
|
|
405
|
+
│ (default). │
|
|
406
|
+
│ [default: all] │
|
|
407
|
+
│ --publish TEXT The url to publish the results after the │
|
|
408
|
+
│ test │
|
|
409
|
+
│ [default: None] │
|
|
410
|
+
│ --output PATH Specify the file path where the test │
|
|
411
|
+
│ results should be written to (e.g., │
|
|
412
|
+
│ './test-results/TEST-datacontract.xml'). │
|
|
413
|
+
│ [default: None] │
|
|
414
|
+
│ --output-format [junit] The target format for the test results. │
|
|
415
|
+
│ [default: None] │
|
|
416
|
+
│ --logs --no-logs Print logs [default: no-logs] │
|
|
417
|
+
│ --ssl-verification --no-ssl-verification SSL verification when publishing the │
|
|
418
|
+
│ data contract. │
|
|
419
|
+
│ [default: ssl-verification] │
|
|
420
|
+
│ --help Show this message and exit. │
|
|
421
|
+
╰──────────────────────────────────────────────────────────────────────────────────────────────────╯
|
|
424
422
|
```
|
|
425
423
|
|
|
426
424
|
Data Contract CLI connects to a data source and runs schema and quality tests to verify that the data contract is valid.
|
|
@@ -1037,6 +1035,12 @@ The export function converts the logical data types of the datacontract into the
|
|
|
1037
1035
|
if a server is selected via the `--server` option (based on the `type` of that server). If no server is selected, the
|
|
1038
1036
|
logical data types are exported.
|
|
1039
1037
|
|
|
1038
|
+
#### DBT & DBT-SOURCES
|
|
1039
|
+
|
|
1040
|
+
The export funciton converts the datacontract to dbt models in YAML format, with support for SQL dialects.
|
|
1041
|
+
If a server is selected via the `--server` option (based on the `type` of that server) then the DBT column `data_types` match the expected data types of the server.
|
|
1042
|
+
If no server is selected, then it defaults to `snowflake`.
|
|
1043
|
+
|
|
1040
1044
|
#### Spark
|
|
1041
1045
|
|
|
1042
1046
|
The export function converts the data contract specification into a StructType Spark schema. The returned value is a Python code picture of the model schemas.
|
|
@@ -1902,7 +1906,7 @@ Python base interpreter should be 3.11.x (unless working on 3.12 release candida
|
|
|
1902
1906
|
|
|
1903
1907
|
```bash
|
|
1904
1908
|
# create venv
|
|
1905
|
-
python3 -m venv venv
|
|
1909
|
+
python3.11 -m venv venv
|
|
1906
1910
|
source venv/bin/activate
|
|
1907
1911
|
|
|
1908
1912
|
# Install Requirements
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
datacontract/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
2
|
datacontract/api.py,sha256=qZJr8I5MI4wZlvjUEAvqna9Xj5Ic2GCBxSyogBlKEbE,8166
|
|
3
|
-
datacontract/cli.py,sha256=
|
|
3
|
+
datacontract/cli.py,sha256=7jvwNDLe4TqaegpPiNOjMeHIg2qFxK6AGBWUkzqH7AM,16880
|
|
4
4
|
datacontract/data_contract.py,sha256=H9ogUTj41javVW8gcLWYDKZDJMJQHyc_Q4LxLqFrRoo,10686
|
|
5
5
|
datacontract/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
6
|
datacontract/breaking/breaking.py,sha256=DnqgxUjD-EAZcg5RBizOP9a2WxsFTaQBik0AB_m3K00,20431
|
|
@@ -9,30 +9,30 @@ datacontract/breaking/breaking_rules.py,sha256=M9IdzVJSA7oOr1fvLQl0y9MoBKeItPz42
|
|
|
9
9
|
datacontract/catalog/catalog.py,sha256=wmv_2BBxHhNBlilAmQHHhNe4tK14DowkyIOVaQW2DWU,2691
|
|
10
10
|
datacontract/engines/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
11
11
|
datacontract/engines/data_contract_checks.py,sha256=wjnKh1FxG9qXB1zOPjrGmjW7uCpRsxXICMIvyFwpVt4,26622
|
|
12
|
-
datacontract/engines/data_contract_test.py,sha256=
|
|
13
|
-
datacontract/engines/datacontract/check_that_datacontract_contains_valid_servers_configuration.py,sha256=
|
|
12
|
+
datacontract/engines/data_contract_test.py,sha256=8TUpKKausMcOoM_7PX_cqjHK_WKsvSvy09SwvJmJoOM,2855
|
|
13
|
+
datacontract/engines/datacontract/check_that_datacontract_contains_valid_servers_configuration.py,sha256=zrDn-_EJJ5kv0kZWAA-toeEPuBd3YQ0-U7Jb8euNUS8,1558
|
|
14
14
|
datacontract/engines/datacontract/check_that_datacontract_file_exists.py,sha256=Vw-7U0GmQT2127tybxggZfpRFiZVgoIh6ndkTGM0FP4,665
|
|
15
15
|
datacontract/engines/fastjsonschema/check_jsonschema.py,sha256=-knTZ-NsHpBWCoR7r1JP5iYSWx697mugijmqUPx0pEY,10307
|
|
16
16
|
datacontract/engines/fastjsonschema/s3/s3_read_files.py,sha256=vuz_hLF2VD8LR_prjQpPLBU8Is-iHLAvqp4KwclOv9I,1157
|
|
17
17
|
datacontract/engines/soda/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
18
|
-
datacontract/engines/soda/check_soda_execute.py,sha256=
|
|
18
|
+
datacontract/engines/soda/check_soda_execute.py,sha256=ve7Td5uy3ZvVyM5H3xz3ndsP-vYK8RP6j8PiSRi26Hs,8295
|
|
19
19
|
datacontract/engines/soda/connections/bigquery.py,sha256=C-8kxmzpYe88bJp80ObHFLMh4rpnIjnUQ7XOj0Ke7lk,903
|
|
20
20
|
datacontract/engines/soda/connections/databricks.py,sha256=cMRasuO0MrSKVgHPB-9uFTGTZPFg6z9Kpk3tJ0SdR0s,943
|
|
21
|
-
datacontract/engines/soda/connections/
|
|
21
|
+
datacontract/engines/soda/connections/duckdb_connection.py,sha256=_usG-Ci7FaAdEsU3_70zUVGVQlqvVpupP44RYZOa6AE,6772
|
|
22
22
|
datacontract/engines/soda/connections/kafka.py,sha256=j6I9loXixzaV-SU56dIodCykPDAnWnsNRfKG4wOtzvs,8570
|
|
23
23
|
datacontract/engines/soda/connections/postgres.py,sha256=9GTF4Es3M5vb7ocSGqAxXmslvkS5CjsPQGIuo020CFc,626
|
|
24
24
|
datacontract/engines/soda/connections/snowflake.py,sha256=rfG2ysuqNM6TkvyqQKcGHFsTGJ6AROmud5VleUDRrb0,749
|
|
25
25
|
datacontract/engines/soda/connections/sqlserver.py,sha256=RzGLbCUdRyfmDcqtM_AB9WZ-Xk-XYX91nkXpVNpYbvc,1440
|
|
26
26
|
datacontract/engines/soda/connections/trino.py,sha256=JvKUP9aFg_n095oWE0-bGmfbETSWEOURGEZdQuG8txA,718
|
|
27
27
|
datacontract/export/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
28
|
-
datacontract/export/avro_converter.py,sha256=
|
|
28
|
+
datacontract/export/avro_converter.py,sha256=gPPSPSoQaHMMg9iOfiywt1IMYEsuTu8kCYWrSa7PJNs,4626
|
|
29
29
|
datacontract/export/avro_idl_converter.py,sha256=SGO7JfI9UGXLYFR5wMGNUH1qf6kt9lF6dUU9roVqnFo,9878
|
|
30
30
|
datacontract/export/bigquery_converter.py,sha256=VSBdVGWrlar18ETzgNArxDzk8Zt5JcAc_wKjfwLpG_A,4734
|
|
31
31
|
datacontract/export/csv_type_converter.py,sha256=ZZuJwBgQnafZC7PPvAXsBf2IajPJq8TYZ1l8Qq0GYeI,1290
|
|
32
32
|
datacontract/export/custom_converter.py,sha256=xb8KbkRRgHmT4ewwC7XxtnKpe_ZMSJWBjYOaKjmO_KQ,1216
|
|
33
33
|
datacontract/export/data_caterer_converter.py,sha256=eSEuy3TbqUIG_lHYEBOydAgp_CJNoGArXrcJvh81wcw,5984
|
|
34
34
|
datacontract/export/dbml_converter.py,sha256=f_OZEFwRUyL-Kg2yn_G58I8iz1VfFrZh8Nbw3Wq0JDo,4777
|
|
35
|
-
datacontract/export/dbt_converter.py,sha256=
|
|
35
|
+
datacontract/export/dbt_converter.py,sha256=_CpRg3BD6UZvJJUMI4lzdiQj136rOjvXYVBMHvxEYqs,10548
|
|
36
36
|
datacontract/export/dcs_exporter.py,sha256=RALQ7bLAjak7EsoFFL2GFX2Oju7pnCDPCdRN_wo9wHM,210
|
|
37
37
|
datacontract/export/exporter.py,sha256=XrNmoIH_5Myb8jx-vaS1ZCF11RTw5zf5JATkqXWunXE,3025
|
|
38
38
|
datacontract/export/exporter_factory.py,sha256=0XmU51fQNZVQdC78gDy_82CatazhioMmcd6qdCl8muU,5847
|
|
@@ -42,7 +42,7 @@ datacontract/export/html_export.py,sha256=ojazWrb0AwSc7Vr72M_otMo-3PA8mfi8tfIy9B
|
|
|
42
42
|
datacontract/export/iceberg_converter.py,sha256=ArcQ_Y3z_W4_kGDU_8jPRx2-pHpP3Nhx1zYoETOL3c4,6804
|
|
43
43
|
datacontract/export/jsonschema_converter.py,sha256=2MT82MurcQQbrVDRj1kFsxnmFd9scNSfYI1upQSecl4,5631
|
|
44
44
|
datacontract/export/markdown_converter.py,sha256=chtaZX4vXTee7JCMYmWiDQ9m55gwJjHPw6SEM3UOwpQ,6467
|
|
45
|
-
datacontract/export/odcs_v3_exporter.py,sha256=
|
|
45
|
+
datacontract/export/odcs_v3_exporter.py,sha256=w8PGyTczrZrRMKye_onYZCpB47OM0pEJCt5WIum2O5U,13130
|
|
46
46
|
datacontract/export/pandas_type_converter.py,sha256=464pQ3JQKFQa1TO0HBNcEoZvQye_yUbY6jQtiBaphSc,1117
|
|
47
47
|
datacontract/export/protobuf_converter.py,sha256=9K0fzBGbqlj9AhQumw2oq53hyn_QDCT3UlyH2uXJdC0,3192
|
|
48
48
|
datacontract/export/pydantic_converter.py,sha256=1Lt9F8i6zyQYb44MyQtsXwCWWXYxZ47SmzArr_uPqsU,5579
|
|
@@ -64,7 +64,7 @@ datacontract/imports/importer.py,sha256=X4M0SZiRQsz8TGm-FSRuh6bs6qny7V8odyQ7l_1D
|
|
|
64
64
|
datacontract/imports/importer_factory.py,sha256=8ed9-ceJlFMf-AIAyuI6Uzs05CqEs1nPlLA-tvfI0bU,3639
|
|
65
65
|
datacontract/imports/jsonschema_importer.py,sha256=67H__XLugV4vguHrIqzW02dtx27zYTWnOms4D1ma3bk,4961
|
|
66
66
|
datacontract/imports/odcs_importer.py,sha256=vv2dHLGL0Cdivv1CdKn5euJwGNKmiZmXCoxUYAXsHX8,2126
|
|
67
|
-
datacontract/imports/odcs_v3_importer.py,sha256=
|
|
67
|
+
datacontract/imports/odcs_v3_importer.py,sha256=ev-qvtbRsG52I--We15P3T_OB7mvS7Zj9-6MOI-8vUQ,13061
|
|
68
68
|
datacontract/imports/parquet_importer.py,sha256=W_0_16mX4stwDUt4GM2L7dnGmTpAySab5k13-OlTCCc,3095
|
|
69
69
|
datacontract/imports/spark_importer.py,sha256=h2na1YtdJYu9Oz07tSvwx8L4RX6aLCCDVkAv-RTKyVA,5100
|
|
70
70
|
datacontract/imports/sql_importer.py,sha256=ElFS2LILDOvWzW-X4emSIKltFV42i78TEoyg0bvn3II,9322
|
|
@@ -73,7 +73,7 @@ datacontract/init/init_template.py,sha256=BMawR-AF_vXyn_-Co-XoT8Dxj9b55V8xKk2KkJ
|
|
|
73
73
|
datacontract/integration/datamesh_manager.py,sha256=6_mHRQAqw-KCfVtejNxULN4ihBZTZikf_5p0pYHer7g,2849
|
|
74
74
|
datacontract/lint/files.py,sha256=tg0vq_w4LQsEr_8A5qr4hUJmHeGalUpsXJXC1t-OGC0,471
|
|
75
75
|
datacontract/lint/lint.py,sha256=Ew0n3ooXxmCVnUxJ_cDoacsD82QdMZYnKrxnG9J0sWQ,5077
|
|
76
|
-
datacontract/lint/resolve.py,sha256=
|
|
76
|
+
datacontract/lint/resolve.py,sha256=58Z48Bkkjl4QWLNA3sPvqjh46sh1KQGgUzo3cG1BQvI,11431
|
|
77
77
|
datacontract/lint/resources.py,sha256=nfeZmORh1aP7EKpMKCmfbS04Te8pQ0nz64vJVkHOq3c,647
|
|
78
78
|
datacontract/lint/schema.py,sha256=4pYX6JX6SkASftyqaWTodKFRVPi2qV0_Z60tvaCOk80,1813
|
|
79
79
|
datacontract/lint/urls.py,sha256=giac0eAYa6hha8exleL3KsiPtiFlOq8l53axtAmCilw,2529
|
|
@@ -88,6 +88,9 @@ datacontract/model/data_contract_specification.py,sha256=1bEE7hQJEjzoL4XtCQBmPe6
|
|
|
88
88
|
datacontract/model/exceptions.py,sha256=5BMuEH2qWuckNP4FTfpUEeEu6rjgGcLOD0GQugKRQ1U,1242
|
|
89
89
|
datacontract/model/odcs.py,sha256=9PXwm72FASjNwteF1Jn591iP3-St0aq16Cpsk0PkEW8,389
|
|
90
90
|
datacontract/model/run.py,sha256=2HTISHW6gvu7NTke76hLw4wkH_kQAig9Z176OuttYSA,3004
|
|
91
|
+
datacontract/output/junit_test_results.py,sha256=hAT7UgxwbfNzA0f5uuGzM4OIwLfXbmA4dgsfxsclWh0,4822
|
|
92
|
+
datacontract/output/output_format.py,sha256=_ZokDBo7-HXBs6czUv7kLLf9cYft_q5QaKzthsVnc58,212
|
|
93
|
+
datacontract/output/test_results_writer.py,sha256=PWNLs3R_LQMH4xp5WDxLkQgY3xvj8Eyzw1jnfgkQxlc,2713
|
|
91
94
|
datacontract/schemas/datacontract-1.1.0.init.yaml,sha256=_WQX6NRwimXlPVKcKeHLd4mFL0TJ2vYH0WnyMXZeQ8Y,1828
|
|
92
95
|
datacontract/schemas/datacontract-1.1.0.schema.json,sha256=3Bu2rxEjkF6dNLcqi1GF4KoXBnEIopaJ87Qb8S4zUvg,62872
|
|
93
96
|
datacontract/schemas/odcs-3.0.1.schema.json,sha256=bRZsSXA0fV0EmV_8f1K68PlXu1m4K7JcuHpLnY3ESwQ,72933
|
|
@@ -102,9 +105,9 @@ datacontract/templates/partials/model_field.html,sha256=2YBF95ypNCPFYuYKoeilRnDG
|
|
|
102
105
|
datacontract/templates/partials/quality.html,sha256=ynEDWRn8I90Uje-xhGYgFcfwOgKI1R-CDki-EvTsauQ,1785
|
|
103
106
|
datacontract/templates/partials/server.html,sha256=WkWFbz1ZvhIAUQQhH5Lkwb0HZRW907ehEnFmJSkpquQ,6235
|
|
104
107
|
datacontract/templates/style/output.css,sha256=V1k6smSvlz07W2UNOkhcDFUb0HLmoas7DnNg_o8XUcA,25759
|
|
105
|
-
datacontract_cli-0.10.
|
|
106
|
-
datacontract_cli-0.10.
|
|
107
|
-
datacontract_cli-0.10.
|
|
108
|
-
datacontract_cli-0.10.
|
|
109
|
-
datacontract_cli-0.10.
|
|
110
|
-
datacontract_cli-0.10.
|
|
108
|
+
datacontract_cli-0.10.23.dist-info/LICENSE,sha256=23h64qnSeIZ0DKeziWAKC-zBCt328iSbRbWBrXoYRb4,2210
|
|
109
|
+
datacontract_cli-0.10.23.dist-info/METADATA,sha256=ls0fDH5tDEJLo2F1qkl2I0_jZJJHr1U6quP_nEE2QVw,102825
|
|
110
|
+
datacontract_cli-0.10.23.dist-info/WHEEL,sha256=jB7zZ3N9hIM9adW7qlTAyycLYW9npaWKLRzaoVcLKcM,91
|
|
111
|
+
datacontract_cli-0.10.23.dist-info/entry_points.txt,sha256=D3Eqy4q_Z6bHauGd4ppIyQglwbrm1AJnLau4Ppbw9Is,54
|
|
112
|
+
datacontract_cli-0.10.23.dist-info/top_level.txt,sha256=VIRjd8EIUrBYWjEXJJjtdUgc0UAJdPZjmLiOR8BRBYM,13
|
|
113
|
+
datacontract_cli-0.10.23.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|
|
File without changes
|