datacontract-cli 0.9.7__py3-none-any.whl → 0.9.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of datacontract-cli might be problematic. Click here for more details.
- datacontract/breaking/breaking.py +48 -57
- datacontract/cli.py +100 -80
- datacontract/data_contract.py +178 -128
- datacontract/engines/datacontract/check_that_datacontract_contains_valid_servers_configuration.py +5 -1
- datacontract/engines/datacontract/check_that_datacontract_file_exists.py +9 -8
- datacontract/engines/datacontract/check_that_datacontract_str_is_valid.py +26 -22
- datacontract/engines/fastjsonschema/check_jsonschema.py +31 -25
- datacontract/engines/fastjsonschema/s3/s3_read_files.py +8 -6
- datacontract/engines/soda/check_soda_execute.py +58 -36
- datacontract/engines/soda/connections/bigquery.py +5 -3
- datacontract/engines/soda/connections/dask.py +0 -1
- datacontract/engines/soda/connections/databricks.py +2 -2
- datacontract/engines/soda/connections/duckdb.py +25 -8
- datacontract/engines/soda/connections/kafka.py +36 -17
- datacontract/engines/soda/connections/postgres.py +3 -3
- datacontract/engines/soda/connections/snowflake.py +4 -4
- datacontract/export/avro_converter.py +9 -11
- datacontract/export/avro_idl_converter.py +65 -42
- datacontract/export/csv_type_converter.py +36 -0
- datacontract/export/dbt_converter.py +43 -32
- datacontract/export/great_expectations_converter.py +141 -0
- datacontract/export/html_export.py +46 -0
- datacontract/export/jsonschema_converter.py +3 -1
- datacontract/export/odcs_converter.py +5 -7
- datacontract/export/protobuf_converter.py +12 -10
- datacontract/export/pydantic_converter.py +131 -0
- datacontract/export/rdf_converter.py +34 -11
- datacontract/export/sodacl_converter.py +118 -21
- datacontract/export/sql_converter.py +30 -8
- datacontract/export/sql_type_converter.py +44 -4
- datacontract/export/terraform_converter.py +4 -3
- datacontract/imports/avro_importer.py +65 -18
- datacontract/imports/sql_importer.py +0 -2
- datacontract/init/download_datacontract_file.py +2 -2
- datacontract/integration/publish_datamesh_manager.py +6 -12
- datacontract/integration/publish_opentelemetry.py +30 -16
- datacontract/lint/files.py +2 -2
- datacontract/lint/lint.py +26 -31
- datacontract/lint/linters/description_linter.py +12 -21
- datacontract/lint/linters/example_model_linter.py +28 -29
- datacontract/lint/linters/field_pattern_linter.py +8 -8
- datacontract/lint/linters/field_reference_linter.py +11 -10
- datacontract/lint/linters/notice_period_linter.py +18 -22
- datacontract/lint/linters/quality_schema_linter.py +16 -20
- datacontract/lint/linters/valid_constraints_linter.py +42 -37
- datacontract/lint/resolve.py +50 -14
- datacontract/lint/schema.py +2 -3
- datacontract/lint/urls.py +4 -5
- datacontract/model/breaking_change.py +2 -1
- datacontract/model/data_contract_specification.py +8 -7
- datacontract/model/exceptions.py +13 -2
- datacontract/model/run.py +3 -2
- datacontract/web.py +3 -7
- datacontract_cli-0.9.9.dist-info/METADATA +951 -0
- datacontract_cli-0.9.9.dist-info/RECORD +64 -0
- datacontract/lint/linters/primary_field_linter.py +0 -30
- datacontract_cli-0.9.7.dist-info/METADATA +0 -603
- datacontract_cli-0.9.7.dist-info/RECORD +0 -61
- {datacontract_cli-0.9.7.dist-info → datacontract_cli-0.9.9.dist-info}/LICENSE +0 -0
- {datacontract_cli-0.9.7.dist-info → datacontract_cli-0.9.9.dist-info}/WHEEL +0 -0
- {datacontract_cli-0.9.7.dist-info → datacontract_cli-0.9.9.dist-info}/entry_points.txt +0 -0
- {datacontract_cli-0.9.7.dist-info → datacontract_cli-0.9.9.dist-info}/top_level.txt +0 -0
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
from datacontract.breaking.breaking_rules import BreakingRules
|
|
2
|
-
from datacontract.model.breaking_change import
|
|
2
|
+
from datacontract.model.breaking_change import BreakingChange, Location, Severity
|
|
3
3
|
from datacontract.model.data_contract_specification import Field, Model, Quality
|
|
4
4
|
|
|
5
5
|
|
|
@@ -22,10 +22,9 @@ def quality_breaking_changes(
|
|
|
22
22
|
description=description,
|
|
23
23
|
check_name=rule_name,
|
|
24
24
|
severity=severity,
|
|
25
|
-
location=Location(
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
)))
|
|
25
|
+
location=Location(path=new_path, composition=["quality"]),
|
|
26
|
+
)
|
|
27
|
+
)
|
|
29
28
|
elif old_quality and not new_quality:
|
|
30
29
|
rule_name = "quality_removed"
|
|
31
30
|
severity = _get_rule(rule_name)
|
|
@@ -37,10 +36,9 @@ def quality_breaking_changes(
|
|
|
37
36
|
description=description,
|
|
38
37
|
check_name=rule_name,
|
|
39
38
|
severity=severity,
|
|
40
|
-
location=Location(
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
)))
|
|
39
|
+
location=Location(path=new_path, composition=["quality"]),
|
|
40
|
+
)
|
|
41
|
+
)
|
|
44
42
|
|
|
45
43
|
elif old_quality and new_quality:
|
|
46
44
|
if old_quality.type != new_quality.type:
|
|
@@ -54,10 +52,9 @@ def quality_breaking_changes(
|
|
|
54
52
|
description=description,
|
|
55
53
|
check_name=rule_name,
|
|
56
54
|
severity=severity,
|
|
57
|
-
location=Location(
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
)))
|
|
55
|
+
location=Location(path=new_path, composition=["quality", "type"]),
|
|
56
|
+
)
|
|
57
|
+
)
|
|
61
58
|
|
|
62
59
|
if old_quality.specification != new_quality.specification:
|
|
63
60
|
rule_name = "quality_specification_updated"
|
|
@@ -69,10 +66,9 @@ def quality_breaking_changes(
|
|
|
69
66
|
description=description,
|
|
70
67
|
check_name=rule_name,
|
|
71
68
|
severity=severity,
|
|
72
|
-
location=Location(
|
|
73
|
-
|
|
74
|
-
|
|
75
|
-
)))
|
|
69
|
+
location=Location(path=new_path, composition=["quality", "specification"]),
|
|
70
|
+
)
|
|
71
|
+
)
|
|
76
72
|
|
|
77
73
|
return results
|
|
78
74
|
|
|
@@ -96,10 +92,9 @@ def models_breaking_changes(
|
|
|
96
92
|
description="added the model",
|
|
97
93
|
check_name=rule_name,
|
|
98
94
|
severity=severity,
|
|
99
|
-
location=Location(
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
)))
|
|
95
|
+
location=Location(path=new_path, composition=composition + [model_name]),
|
|
96
|
+
)
|
|
97
|
+
)
|
|
103
98
|
|
|
104
99
|
for model_name, old_model in old_models.items():
|
|
105
100
|
if model_name not in new_models.keys():
|
|
@@ -111,10 +106,9 @@ def models_breaking_changes(
|
|
|
111
106
|
description="removed the model",
|
|
112
107
|
check_name=rule_name,
|
|
113
108
|
severity=severity,
|
|
114
|
-
location=Location(
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
)))
|
|
109
|
+
location=Location(path=new_path, composition=composition + [model_name]),
|
|
110
|
+
)
|
|
111
|
+
)
|
|
118
112
|
continue
|
|
119
113
|
|
|
120
114
|
results.extend(
|
|
@@ -124,17 +118,14 @@ def models_breaking_changes(
|
|
|
124
118
|
new_path=new_path,
|
|
125
119
|
composition=composition + [model_name],
|
|
126
120
|
include_severities=include_severities,
|
|
127
|
-
)
|
|
121
|
+
)
|
|
122
|
+
)
|
|
128
123
|
|
|
129
124
|
return results
|
|
130
125
|
|
|
131
126
|
|
|
132
127
|
def model_breaking_changes(
|
|
133
|
-
old_model: Model,
|
|
134
|
-
new_model: Model,
|
|
135
|
-
new_path: str,
|
|
136
|
-
composition: list[str],
|
|
137
|
-
include_severities: [Severity]
|
|
128
|
+
old_model: Model, new_model: Model, new_path: str, composition: list[str], include_severities: [Severity]
|
|
138
129
|
) -> list[BreakingChange]:
|
|
139
130
|
results = list[BreakingChange]()
|
|
140
131
|
|
|
@@ -170,10 +161,9 @@ def model_breaking_changes(
|
|
|
170
161
|
description=description,
|
|
171
162
|
check_name=rule_name,
|
|
172
163
|
severity=severity,
|
|
173
|
-
location=Location(
|
|
174
|
-
|
|
175
|
-
|
|
176
|
-
)))
|
|
164
|
+
location=Location(path=new_path, composition=composition + [model_definition_field]),
|
|
165
|
+
)
|
|
166
|
+
)
|
|
177
167
|
|
|
178
168
|
results.extend(
|
|
179
169
|
fields_breaking_changes(
|
|
@@ -182,7 +172,8 @@ def model_breaking_changes(
|
|
|
182
172
|
new_path=new_path,
|
|
183
173
|
composition=composition + ["fields"],
|
|
184
174
|
include_severities=include_severities,
|
|
185
|
-
)
|
|
175
|
+
)
|
|
176
|
+
)
|
|
186
177
|
|
|
187
178
|
return results
|
|
188
179
|
|
|
@@ -192,7 +183,7 @@ def fields_breaking_changes(
|
|
|
192
183
|
new_fields: dict[str, Field],
|
|
193
184
|
new_path: str,
|
|
194
185
|
composition: list[str],
|
|
195
|
-
include_severities: [Severity]
|
|
186
|
+
include_severities: [Severity],
|
|
196
187
|
) -> list[BreakingChange]:
|
|
197
188
|
results = list[BreakingChange]()
|
|
198
189
|
|
|
@@ -206,10 +197,9 @@ def fields_breaking_changes(
|
|
|
206
197
|
description="added the field",
|
|
207
198
|
check_name=rule_name,
|
|
208
199
|
severity=severity,
|
|
209
|
-
location=Location(
|
|
210
|
-
|
|
211
|
-
|
|
212
|
-
)))
|
|
200
|
+
location=Location(path=new_path, composition=composition + [field_name]),
|
|
201
|
+
)
|
|
202
|
+
)
|
|
213
203
|
|
|
214
204
|
for field_name, old_field in old_fields.items():
|
|
215
205
|
if field_name not in new_fields.keys():
|
|
@@ -218,13 +208,12 @@ def fields_breaking_changes(
|
|
|
218
208
|
if severity in include_severities:
|
|
219
209
|
results.append(
|
|
220
210
|
BreakingChange(
|
|
221
|
-
description=
|
|
211
|
+
description="removed the field",
|
|
222
212
|
check_name=rule_name,
|
|
223
213
|
severity=severity,
|
|
224
|
-
location=Location(
|
|
225
|
-
|
|
226
|
-
|
|
227
|
-
)))
|
|
214
|
+
location=Location(path=new_path, composition=composition + [field_name]),
|
|
215
|
+
)
|
|
216
|
+
)
|
|
228
217
|
continue
|
|
229
218
|
|
|
230
219
|
results.extend(
|
|
@@ -234,7 +223,8 @@ def fields_breaking_changes(
|
|
|
234
223
|
composition=composition + [field_name],
|
|
235
224
|
new_path=new_path,
|
|
236
225
|
include_severities=include_severities,
|
|
237
|
-
)
|
|
226
|
+
)
|
|
227
|
+
)
|
|
238
228
|
return results
|
|
239
229
|
|
|
240
230
|
|
|
@@ -271,7 +261,7 @@ def field_breaking_changes(
|
|
|
271
261
|
description = None
|
|
272
262
|
|
|
273
263
|
# logic for enum, tags and other arrays
|
|
274
|
-
if
|
|
264
|
+
if isinstance(old_value, list) and isinstance(new_value, list):
|
|
275
265
|
if not old_value and new_value:
|
|
276
266
|
rule_name = f"field_{_camel_to_snake(field_definition_field)}_added"
|
|
277
267
|
description = f"added with value: `{new_value}`"
|
|
@@ -285,7 +275,7 @@ def field_breaking_changes(
|
|
|
285
275
|
# logic for normal fields
|
|
286
276
|
elif old_value is None and new_value is not None:
|
|
287
277
|
rule_name = f"field_{_camel_to_snake(field_definition_field)}_added"
|
|
288
|
-
description = f"added with value: `{str(new_value).lower() if
|
|
278
|
+
description = f"added with value: `{str(new_value).lower() if isinstance(new_value, bool) else new_value}`"
|
|
289
279
|
|
|
290
280
|
elif old_value is not None and new_value is None:
|
|
291
281
|
rule_name = f"field_{_camel_to_snake(field_definition_field)}_removed"
|
|
@@ -293,8 +283,10 @@ def field_breaking_changes(
|
|
|
293
283
|
|
|
294
284
|
elif old_value != new_value:
|
|
295
285
|
rule_name = f"field_{_camel_to_snake(field_definition_field)}_updated"
|
|
296
|
-
description = (
|
|
297
|
-
|
|
286
|
+
description = (
|
|
287
|
+
f"changed from `{str(old_value).lower() if isinstance(old_value, bool) else old_value}` "
|
|
288
|
+
f"to `{str(new_value).lower() if isinstance(new_value, bool) else new_value}`"
|
|
289
|
+
)
|
|
298
290
|
|
|
299
291
|
if rule_name is not None:
|
|
300
292
|
severity = _get_rule(rule_name)
|
|
@@ -305,10 +297,9 @@ def field_breaking_changes(
|
|
|
305
297
|
description=description,
|
|
306
298
|
check_name=rule_name,
|
|
307
299
|
severity=severity,
|
|
308
|
-
location=Location(
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
)))
|
|
300
|
+
location=Location(path=new_path, composition=composition + [field_schema_name]),
|
|
301
|
+
)
|
|
302
|
+
)
|
|
312
303
|
|
|
313
304
|
return results
|
|
314
305
|
|
|
@@ -317,9 +308,9 @@ def _get_rule(rule_name) -> Severity:
|
|
|
317
308
|
try:
|
|
318
309
|
return getattr(BreakingRules, rule_name)
|
|
319
310
|
except AttributeError:
|
|
320
|
-
print(f
|
|
311
|
+
print(f"WARNING: Breaking Rule not found for {rule_name}!")
|
|
321
312
|
return Severity.ERROR
|
|
322
313
|
|
|
323
314
|
|
|
324
315
|
def _camel_to_snake(s):
|
|
325
|
-
return
|
|
316
|
+
return "".join(["_" + c.lower() if c.isupper() else c for c in s]).lstrip("_")
|
datacontract/cli.py
CHANGED
|
@@ -5,7 +5,7 @@ from typing import Iterable, Optional
|
|
|
5
5
|
import typer
|
|
6
6
|
from click import Context
|
|
7
7
|
from rich import box
|
|
8
|
-
from rich import
|
|
8
|
+
from rich.console import Console
|
|
9
9
|
from rich.table import Table
|
|
10
10
|
from typer.core import TyperGroup
|
|
11
11
|
from typing_extensions import Annotated
|
|
@@ -14,6 +14,8 @@ from datacontract.data_contract import DataContract
|
|
|
14
14
|
from datacontract.init.download_datacontract_file import \
|
|
15
15
|
download_datacontract_file, FileExistsException
|
|
16
16
|
|
|
17
|
+
console = Console()
|
|
18
|
+
|
|
17
19
|
|
|
18
20
|
class OrderedCommands(TyperGroup):
|
|
19
21
|
def list_commands(self, ctx: Context) -> Iterable[str]:
|
|
@@ -29,15 +31,16 @@ app = typer.Typer(
|
|
|
29
31
|
|
|
30
32
|
def version_callback(value: bool):
|
|
31
33
|
if value:
|
|
32
|
-
print(metadata.version("datacontract-cli"))
|
|
34
|
+
console.print(metadata.version("datacontract-cli"))
|
|
33
35
|
raise typer.Exit()
|
|
34
36
|
|
|
35
37
|
|
|
36
38
|
@app.callback()
|
|
37
39
|
def common(
|
|
38
40
|
ctx: typer.Context,
|
|
39
|
-
version: bool = typer.Option(
|
|
40
|
-
|
|
41
|
+
version: bool = typer.Option(
|
|
42
|
+
None, "--version", help="Prints the current version.", callback=version_callback, is_eager=True
|
|
43
|
+
),
|
|
41
44
|
):
|
|
42
45
|
"""
|
|
43
46
|
The datacontract CLI is an open source command-line tool for working with Data Contracts (https://datacontract.com).
|
|
@@ -51,10 +54,12 @@ def common(
|
|
|
51
54
|
|
|
52
55
|
@app.command()
|
|
53
56
|
def init(
|
|
54
|
-
location: Annotated[
|
|
55
|
-
help="The location (url or path) of the data contract yaml to create.")
|
|
56
|
-
|
|
57
|
-
|
|
57
|
+
location: Annotated[
|
|
58
|
+
str, typer.Argument(help="The location (url or path) of the data contract yaml to create.")
|
|
59
|
+
] = "datacontract.yaml",
|
|
60
|
+
template: Annotated[
|
|
61
|
+
str, typer.Option(help="URL of a template or data contract")
|
|
62
|
+
] = "https://datacontract.com/datacontract.init.yaml",
|
|
58
63
|
overwrite: Annotated[bool, typer.Option(help="Replace the existing datacontract.yaml")] = False,
|
|
59
64
|
):
|
|
60
65
|
"""
|
|
@@ -63,19 +68,20 @@ def init(
|
|
|
63
68
|
try:
|
|
64
69
|
download_datacontract_file(location, template, overwrite)
|
|
65
70
|
except FileExistsException:
|
|
66
|
-
print("File already exists, use --overwrite to overwrite")
|
|
71
|
+
console.print("File already exists, use --overwrite to overwrite")
|
|
67
72
|
raise typer.Exit(code=1)
|
|
68
73
|
else:
|
|
69
|
-
print("📄 data contract written to " + location)
|
|
74
|
+
console.print("📄 data contract written to " + location)
|
|
70
75
|
|
|
71
76
|
|
|
72
77
|
@app.command()
|
|
73
78
|
def lint(
|
|
74
79
|
location: Annotated[
|
|
75
|
-
str, typer.Argument(help="The location (url or path) of the data contract yaml.")
|
|
80
|
+
str, typer.Argument(help="The location (url or path) of the data contract yaml.")
|
|
81
|
+
] = "datacontract.yaml",
|
|
76
82
|
schema: Annotated[
|
|
77
|
-
str, typer.Option(
|
|
78
|
-
|
|
83
|
+
str, typer.Option(help="The location (url or path) of the Data Contract Specification JSON Schema")
|
|
84
|
+
] = "https://datacontract.com/datacontract.schema.json",
|
|
79
85
|
):
|
|
80
86
|
"""
|
|
81
87
|
Validate that the datacontract.yaml is correctly formatted.
|
|
@@ -87,37 +93,46 @@ def lint(
|
|
|
87
93
|
@app.command()
|
|
88
94
|
def test(
|
|
89
95
|
location: Annotated[
|
|
90
|
-
str, typer.Argument(help="The location (url or path) of the data contract yaml.")
|
|
96
|
+
str, typer.Argument(help="The location (url or path) of the data contract yaml.")
|
|
97
|
+
] = "datacontract.yaml",
|
|
91
98
|
schema: Annotated[
|
|
92
|
-
str, typer.Option(
|
|
93
|
-
|
|
94
|
-
server: Annotated[
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
help="
|
|
105
|
-
|
|
106
|
-
|
|
99
|
+
str, typer.Option(help="The location (url or path) of the Data Contract Specification JSON Schema")
|
|
100
|
+
] = "https://datacontract.com/datacontract.schema.json",
|
|
101
|
+
server: Annotated[
|
|
102
|
+
str,
|
|
103
|
+
typer.Option(
|
|
104
|
+
help="The server configuration to run the schema and quality tests. "
|
|
105
|
+
"Use the key of the server object in the data contract yaml file "
|
|
106
|
+
"to refer to a server, e.g., `production`, or `all` for all "
|
|
107
|
+
"servers (default)."
|
|
108
|
+
),
|
|
109
|
+
] = "all",
|
|
110
|
+
examples: Annotated[
|
|
111
|
+
bool, typer.Option(help="Run the schema and quality tests on the example data within the data contract.")
|
|
112
|
+
] = None,
|
|
113
|
+
publish: Annotated[str, typer.Option(help="The url to publish the results after the test")] = None,
|
|
114
|
+
publish_to_opentelemetry: Annotated[
|
|
115
|
+
bool,
|
|
116
|
+
typer.Option(
|
|
117
|
+
help="Publish the results to opentelemetry. Use environment variables to configure the OTLP endpoint, headers, etc."
|
|
118
|
+
),
|
|
119
|
+
] = False,
|
|
120
|
+
logs: Annotated[bool, typer.Option(help="Print logs")] = False,
|
|
107
121
|
):
|
|
108
122
|
"""
|
|
109
123
|
Run schema and quality tests on configured servers.
|
|
110
124
|
"""
|
|
111
|
-
print(f"Testing {location}")
|
|
125
|
+
console.print(f"Testing {location}")
|
|
112
126
|
if server == "all":
|
|
113
127
|
server = None
|
|
114
|
-
run = DataContract(
|
|
115
|
-
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
128
|
+
run = DataContract(
|
|
129
|
+
data_contract_file=location,
|
|
130
|
+
schema_location=schema,
|
|
131
|
+
publish_url=publish,
|
|
132
|
+
publish_to_opentelemetry=publish_to_opentelemetry,
|
|
133
|
+
server=server,
|
|
134
|
+
examples=examples,
|
|
135
|
+
).test()
|
|
121
136
|
if logs:
|
|
122
137
|
_print_logs(run)
|
|
123
138
|
_handle_result(run)
|
|
@@ -125,6 +140,7 @@ def test(
|
|
|
125
140
|
|
|
126
141
|
class ExportFormat(str, Enum):
|
|
127
142
|
jsonschema = "jsonschema"
|
|
143
|
+
pydantic_model = "pydantic-model"
|
|
128
144
|
sodacl = "sodacl"
|
|
129
145
|
dbt = "dbt"
|
|
130
146
|
dbt_sources = "dbt-sources"
|
|
@@ -133,26 +149,43 @@ class ExportFormat(str, Enum):
|
|
|
133
149
|
rdf = "rdf"
|
|
134
150
|
avro = "avro"
|
|
135
151
|
protobuf = "protobuf"
|
|
152
|
+
great_expectations = "great-expectations"
|
|
136
153
|
terraform = "terraform"
|
|
137
154
|
avro_idl = "avro-idl"
|
|
138
155
|
sql = "sql"
|
|
139
156
|
sql_query = "sql-query"
|
|
157
|
+
html = "html"
|
|
140
158
|
|
|
141
159
|
|
|
142
160
|
@app.command()
|
|
143
161
|
def export(
|
|
144
162
|
format: Annotated[ExportFormat, typer.Option(help="The export format.")],
|
|
145
163
|
server: Annotated[str, typer.Option(help="The server name to export.")] = None,
|
|
146
|
-
model: Annotated[
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
164
|
+
model: Annotated[
|
|
165
|
+
str,
|
|
166
|
+
typer.Option(
|
|
167
|
+
help="Use the key of the model in the data contract yaml file "
|
|
168
|
+
"to refer to a model, e.g., `orders`, or `all` for all "
|
|
169
|
+
"models (default)."
|
|
170
|
+
),
|
|
171
|
+
] = "all",
|
|
172
|
+
rdf_base: Annotated[
|
|
173
|
+
Optional[str],
|
|
174
|
+
typer.Option(help="[rdf] The base URI used to generate the RDF graph.", rich_help_panel="RDF Options"),
|
|
175
|
+
] = None,
|
|
176
|
+
sql_server_type: Annotated[
|
|
177
|
+
Optional[str],
|
|
178
|
+
typer.Option(
|
|
179
|
+
help="[sql] The server type to determine the sql dialect. By default, it uses 'auto' to automatically detect the sql dialect via the specified servers in the data contract.",
|
|
180
|
+
rich_help_panel="SQL Options",
|
|
181
|
+
),
|
|
182
|
+
] = "auto",
|
|
151
183
|
location: Annotated[
|
|
152
|
-
str, typer.Argument(help="The location (url or path) of the data contract yaml.")
|
|
184
|
+
str, typer.Argument(help="The location (url or path) of the data contract yaml.")
|
|
185
|
+
] = "datacontract.yaml",
|
|
153
186
|
):
|
|
154
187
|
"""
|
|
155
|
-
Convert data contract to a specific format.
|
|
188
|
+
Convert data contract to a specific format. console.prints to stdout.
|
|
156
189
|
"""
|
|
157
190
|
# TODO exception handling
|
|
158
191
|
result = DataContract(data_contract_file=location, server=server).export(
|
|
@@ -161,7 +194,8 @@ def export(
|
|
|
161
194
|
rdf_base=rdf_base,
|
|
162
195
|
sql_server_type=sql_server_type,
|
|
163
196
|
)
|
|
164
|
-
|
|
197
|
+
# Don't interpret console markup in output.
|
|
198
|
+
console.print(result, markup=False)
|
|
165
199
|
|
|
166
200
|
|
|
167
201
|
class ImportFormat(str, Enum):
|
|
@@ -178,7 +212,7 @@ def import_(
|
|
|
178
212
|
Create a data contract from the given source file. Prints to stdout.
|
|
179
213
|
"""
|
|
180
214
|
result = DataContract().import_from_source(format, source)
|
|
181
|
-
print(result.to_yaml())
|
|
215
|
+
console.print(result.to_yaml())
|
|
182
216
|
|
|
183
217
|
|
|
184
218
|
@app.command()
|
|
@@ -191,16 +225,11 @@ def breaking(
|
|
|
191
225
|
"""
|
|
192
226
|
|
|
193
227
|
# TODO exception handling
|
|
194
|
-
result = DataContract(
|
|
195
|
-
data_contract_file=
|
|
196
|
-
|
|
197
|
-
).breaking(
|
|
198
|
-
DataContract(
|
|
199
|
-
data_contract_file=location_new,
|
|
200
|
-
inline_definitions=True
|
|
201
|
-
))
|
|
228
|
+
result = DataContract(data_contract_file=location_old, inline_definitions=True).breaking(
|
|
229
|
+
DataContract(data_contract_file=location_new, inline_definitions=True)
|
|
230
|
+
)
|
|
202
231
|
|
|
203
|
-
print(result.breaking_str())
|
|
232
|
+
console.print(result.breaking_str())
|
|
204
233
|
|
|
205
234
|
if not result.passed_checks():
|
|
206
235
|
raise typer.Exit(code=1)
|
|
@@ -216,16 +245,11 @@ def changelog(
|
|
|
216
245
|
"""
|
|
217
246
|
|
|
218
247
|
# TODO exception handling
|
|
219
|
-
result = DataContract(
|
|
220
|
-
data_contract_file=
|
|
221
|
-
|
|
222
|
-
).changelog(
|
|
223
|
-
DataContract(
|
|
224
|
-
data_contract_file=location_new,
|
|
225
|
-
inline_definitions=True
|
|
226
|
-
))
|
|
248
|
+
result = DataContract(data_contract_file=location_old, inline_definitions=True).changelog(
|
|
249
|
+
DataContract(data_contract_file=location_new, inline_definitions=True)
|
|
250
|
+
)
|
|
227
251
|
|
|
228
|
-
print(result.changelog_str())
|
|
252
|
+
console.print(result.changelog_str())
|
|
229
253
|
|
|
230
254
|
|
|
231
255
|
@app.command()
|
|
@@ -238,29 +262,25 @@ def diff(
|
|
|
238
262
|
"""
|
|
239
263
|
|
|
240
264
|
# TODO change to diff output, not the changelog entries
|
|
241
|
-
result = DataContract(
|
|
242
|
-
data_contract_file=
|
|
243
|
-
|
|
244
|
-
).changelog(
|
|
245
|
-
DataContract(
|
|
246
|
-
data_contract_file=location_new,
|
|
247
|
-
inline_definitions=True
|
|
248
|
-
))
|
|
265
|
+
result = DataContract(data_contract_file=location_old, inline_definitions=True).changelog(
|
|
266
|
+
DataContract(data_contract_file=location_new, inline_definitions=True)
|
|
267
|
+
)
|
|
249
268
|
|
|
250
|
-
print(result.changelog_str())
|
|
269
|
+
console.print(result.changelog_str())
|
|
251
270
|
|
|
252
271
|
|
|
253
272
|
def _handle_result(run):
|
|
254
273
|
_print_table(run)
|
|
255
274
|
if run.result == "passed":
|
|
256
|
-
print(
|
|
257
|
-
f"🟢 data contract is valid. Run {len(run.checks)} checks. Took {(run.timestampEnd - run.timestampStart).total_seconds()} seconds."
|
|
275
|
+
console.print(
|
|
276
|
+
f"🟢 data contract is valid. Run {len(run.checks)} checks. Took {(run.timestampEnd - run.timestampStart).total_seconds()} seconds."
|
|
277
|
+
)
|
|
258
278
|
else:
|
|
259
|
-
print("🔴 data contract is invalid, found the following errors:")
|
|
279
|
+
console.print("🔴 data contract is invalid, found the following errors:")
|
|
260
280
|
i = 1
|
|
261
281
|
for check in run.checks:
|
|
262
282
|
if check.result != "passed":
|
|
263
|
-
print(str(++i) + ") " + check.reason)
|
|
283
|
+
console.print(str(++i) + ") " + check.reason)
|
|
264
284
|
raise typer.Exit(code=1)
|
|
265
285
|
|
|
266
286
|
|
|
@@ -272,7 +292,7 @@ def _print_table(run):
|
|
|
272
292
|
table.add_column("Details", max_width=50)
|
|
273
293
|
for check in run.checks:
|
|
274
294
|
table.add_row(with_markup(check.result), check.name, to_field(run, check), check.reason)
|
|
275
|
-
print(table)
|
|
295
|
+
console.print(table)
|
|
276
296
|
|
|
277
297
|
|
|
278
298
|
def to_field(run, check):
|
|
@@ -286,9 +306,9 @@ def to_field(run, check):
|
|
|
286
306
|
|
|
287
307
|
|
|
288
308
|
def _print_logs(run):
|
|
289
|
-
print("\nLogs:")
|
|
309
|
+
console.print("\nLogs:")
|
|
290
310
|
for log in run.logs:
|
|
291
|
-
print(log.timestamp.strftime("%y-%m-%d %H:%M:%S"), log.level.ljust(5), log.message)
|
|
311
|
+
console.print(log.timestamp.strftime("%y-%m-%d %H:%M:%S"), log.level.ljust(5), log.message)
|
|
292
312
|
|
|
293
313
|
|
|
294
314
|
def with_markup(result):
|