datacontract-cli 0.10.21__py3-none-any.whl → 0.10.23__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- datacontract/breaking/breaking.py +1 -1
- datacontract/breaking/breaking_rules.py +1 -1
- datacontract/cli.py +25 -77
- datacontract/data_contract.py +14 -100
- datacontract/engines/data_contract_checks.py +735 -0
- datacontract/engines/data_contract_test.py +67 -0
- datacontract/engines/datacontract/check_that_datacontract_contains_valid_servers_configuration.py +2 -3
- datacontract/engines/soda/check_soda_execute.py +37 -31
- datacontract/engines/soda/connections/{duckdb.py → duckdb_connection.py} +6 -5
- datacontract/engines/soda/connections/kafka.py +8 -3
- datacontract/export/avro_converter.py +2 -0
- datacontract/export/dbt_converter.py +13 -10
- datacontract/export/exporter.py +0 -2
- datacontract/export/exporter_factory.py +0 -12
- datacontract/export/odcs_v3_exporter.py +22 -3
- datacontract/export/sodacl_converter.py +22 -294
- datacontract/export/sql_type_converter.py +7 -2
- datacontract/imports/odcs_importer.py +6 -3
- datacontract/imports/odcs_v3_importer.py +3 -1
- datacontract/imports/sql_importer.py +229 -29
- datacontract/lint/resolve.py +17 -4
- datacontract/model/exceptions.py +4 -1
- datacontract/model/run.py +11 -4
- datacontract/output/junit_test_results.py +135 -0
- datacontract/output/output_format.py +10 -0
- datacontract/output/test_results_writer.py +79 -0
- {datacontract_cli-0.10.21.dist-info → datacontract_cli-0.10.23.dist-info}/METADATA +192 -215
- {datacontract_cli-0.10.21.dist-info → datacontract_cli-0.10.23.dist-info}/RECORD +33 -32
- {datacontract_cli-0.10.21.dist-info → datacontract_cli-0.10.23.dist-info}/WHEEL +1 -1
- datacontract/engines/soda/connections/dask.py +0 -28
- datacontract/export/odcs_v2_exporter.py +0 -124
- datacontract/imports/odcs_v2_importer.py +0 -177
- datacontract/lint/linters/example_model_linter.py +0 -91
- /datacontract/{model → breaking}/breaking_change.py +0 -0
- {datacontract_cli-0.10.21.dist-info → datacontract_cli-0.10.23.dist-info}/LICENSE +0 -0
- {datacontract_cli-0.10.21.dist-info → datacontract_cli-0.10.23.dist-info}/entry_points.txt +0 -0
- {datacontract_cli-0.10.21.dist-info → datacontract_cli-0.10.23.dist-info}/top_level.txt +0 -0
|
@@ -1,302 +1,30 @@
|
|
|
1
|
-
from typing import List
|
|
2
|
-
from venv import logger
|
|
3
|
-
|
|
4
1
|
import yaml
|
|
5
2
|
|
|
3
|
+
from datacontract.engines.data_contract_checks import create_checks
|
|
6
4
|
from datacontract.export.exporter import Exporter
|
|
7
|
-
from datacontract.
|
|
8
|
-
from datacontract.model.data_contract_specification import DataContractSpecification, Quality
|
|
5
|
+
from datacontract.model.run import Run
|
|
9
6
|
|
|
10
7
|
|
|
11
8
|
class SodaExporter(Exporter):
|
|
12
9
|
def export(self, data_contract, model, server, sql_server_type, export_args) -> dict:
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
) -> str:
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
def to_checks(model_key, model_value, server_type: str, check_types: bool):
|
|
32
|
-
checks = []
|
|
33
|
-
model_name = to_model_name(model_key, model_value, server_type)
|
|
34
|
-
fields = model_value.fields
|
|
35
|
-
|
|
36
|
-
quote_field_name = server_type in ["postgres", "sqlserver"]
|
|
37
|
-
|
|
38
|
-
for field_name, field in fields.items():
|
|
39
|
-
checks.append(check_field_is_present(field_name))
|
|
40
|
-
if check_types and field.type is not None:
|
|
41
|
-
sql_type = convert_to_sql_type(field, server_type)
|
|
42
|
-
checks.append(check_field_type(field_name, sql_type))
|
|
43
|
-
if field.required:
|
|
44
|
-
checks.append(check_field_required(field_name, quote_field_name))
|
|
45
|
-
if field.unique:
|
|
46
|
-
checks.append(check_field_unique(field_name, quote_field_name))
|
|
47
|
-
if field.minLength is not None:
|
|
48
|
-
checks.append(check_field_min_length(field_name, field.minLength, quote_field_name))
|
|
49
|
-
if field.maxLength is not None:
|
|
50
|
-
checks.append(check_field_max_length(field_name, field.maxLength, quote_field_name))
|
|
51
|
-
if field.minimum is not None:
|
|
52
|
-
checks.append(check_field_minimum(field_name, field.minimum, quote_field_name))
|
|
53
|
-
if field.maximum is not None:
|
|
54
|
-
checks.append(check_field_maximum(field_name, field.maximum, quote_field_name))
|
|
55
|
-
if field.exclusiveMinimum is not None:
|
|
56
|
-
checks.append(check_field_minimum(field_name, field.exclusiveMinimum, quote_field_name))
|
|
57
|
-
checks.append(check_field_not_equal(field_name, field.exclusiveMinimum, quote_field_name))
|
|
58
|
-
if field.exclusiveMaximum is not None:
|
|
59
|
-
checks.append(check_field_maximum(field_name, field.exclusiveMaximum, quote_field_name))
|
|
60
|
-
checks.append(check_field_not_equal(field_name, field.exclusiveMaximum, quote_field_name))
|
|
61
|
-
if field.pattern is not None:
|
|
62
|
-
checks.append(check_field_regex(field_name, field.pattern, quote_field_name))
|
|
63
|
-
if field.enum is not None and len(field.enum) > 0:
|
|
64
|
-
checks.append(check_field_enum(field_name, field.enum, quote_field_name))
|
|
65
|
-
if field.quality is not None and len(field.quality) > 0:
|
|
66
|
-
quality_list = check_quality_list(model_name, field_name, field.quality)
|
|
67
|
-
if (quality_list is not None) and len(quality_list) > 0:
|
|
68
|
-
checks.append(quality_list)
|
|
69
|
-
# TODO references: str = None
|
|
70
|
-
# TODO format
|
|
71
|
-
|
|
72
|
-
if model_value.quality is not None and len(model_value.quality) > 0:
|
|
73
|
-
quality_list = check_quality_list(model_name, None, model_value.quality)
|
|
74
|
-
if (quality_list is not None) and len(quality_list) > 0:
|
|
75
|
-
checks.append(quality_list)
|
|
76
|
-
|
|
77
|
-
checks_for_model_key = f"checks for {model_name}"
|
|
78
|
-
|
|
79
|
-
if quote_field_name:
|
|
80
|
-
checks_for_model_key = f'checks for "{model_name}"'
|
|
81
|
-
|
|
82
|
-
return checks_for_model_key, checks
|
|
83
|
-
|
|
84
|
-
|
|
85
|
-
def to_model_name(model_key, model_value, server_type):
|
|
86
|
-
if server_type == "databricks":
|
|
87
|
-
if model_value.config is not None and "databricksTable" in model_value.config:
|
|
88
|
-
return model_value.config["databricksTable"]
|
|
89
|
-
if server_type == "snowflake":
|
|
90
|
-
if model_value.config is not None and "snowflakeTable" in model_value.config:
|
|
91
|
-
return model_value.config["snowflakeTable"]
|
|
92
|
-
if server_type == "sqlserver":
|
|
93
|
-
if model_value.config is not None and "sqlserverTable" in model_value.config:
|
|
94
|
-
return model_value.config["sqlserverTable"]
|
|
95
|
-
if server_type == "postgres" or server_type == "postgresql":
|
|
96
|
-
if model_value.config is not None and "postgresTable" in model_value.config:
|
|
97
|
-
return model_value.config["postgresTable"]
|
|
98
|
-
return model_key
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
def check_field_is_present(field_name):
|
|
102
|
-
return {
|
|
103
|
-
"schema": {
|
|
104
|
-
"name": f"Check that field {field_name} is present",
|
|
105
|
-
"fail": {
|
|
106
|
-
"when required column missing": [field_name],
|
|
107
|
-
},
|
|
108
|
-
}
|
|
109
|
-
}
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
def check_field_type(field_name: str, type: str):
|
|
113
|
-
return {
|
|
114
|
-
"schema": {
|
|
115
|
-
"name": f"Check that field {field_name} has type {type}",
|
|
116
|
-
"fail": {"when wrong column type": {field_name: type}},
|
|
117
|
-
}
|
|
118
|
-
}
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
def check_field_required(field_name: str, quote_field_name: bool = False):
|
|
122
|
-
if quote_field_name:
|
|
123
|
-
field_name = f'"{field_name}"'
|
|
124
|
-
|
|
125
|
-
return {f"missing_count({field_name}) = 0": {"name": f"Check that required field {field_name} has no null values"}}
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
def check_field_unique(field_name, quote_field_name: bool = False):
|
|
129
|
-
if quote_field_name:
|
|
130
|
-
field_name = f'"{field_name}"'
|
|
131
|
-
return {
|
|
132
|
-
f"duplicate_count({field_name}) = 0": {"name": f"Check that unique field {field_name} has no duplicate values"}
|
|
133
|
-
}
|
|
134
|
-
|
|
135
|
-
|
|
136
|
-
def check_field_min_length(field_name, min_length, quote_field_name: bool = False):
|
|
137
|
-
if quote_field_name:
|
|
138
|
-
field_name = f'"{field_name}"'
|
|
139
|
-
return {
|
|
140
|
-
f"invalid_count({field_name}) = 0": {
|
|
141
|
-
"name": f"Check that field {field_name} has a min length of {min_length}",
|
|
142
|
-
"valid min length": min_length,
|
|
143
|
-
}
|
|
144
|
-
}
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
def check_field_max_length(field_name, max_length, quote_field_name: bool = False):
|
|
148
|
-
if quote_field_name:
|
|
149
|
-
field_name = f'"{field_name}"'
|
|
150
|
-
return {
|
|
151
|
-
f"invalid_count({field_name}) = 0": {
|
|
152
|
-
"name": f"Check that field {field_name} has a max length of {max_length}",
|
|
153
|
-
"valid max length": max_length,
|
|
154
|
-
}
|
|
155
|
-
}
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
def check_field_minimum(field_name, minimum, quote_field_name: bool = False):
|
|
159
|
-
if quote_field_name:
|
|
160
|
-
field_name = f'"{field_name}"'
|
|
161
|
-
return {
|
|
162
|
-
f"invalid_count({field_name}) = 0": {
|
|
163
|
-
"name": f"Check that field {field_name} has a minimum of {minimum}",
|
|
164
|
-
"valid min": minimum,
|
|
165
|
-
}
|
|
166
|
-
}
|
|
167
|
-
|
|
168
|
-
|
|
169
|
-
def check_field_maximum(field_name, maximum, quote_field_name: bool = False):
|
|
170
|
-
if quote_field_name:
|
|
171
|
-
field_name = f'"{field_name}"'
|
|
172
|
-
return {
|
|
173
|
-
f"invalid_count({field_name}) = 0": {
|
|
174
|
-
"name": f"Check that field {field_name} has a maximum of {maximum}",
|
|
175
|
-
"valid max": maximum,
|
|
176
|
-
}
|
|
177
|
-
}
|
|
178
|
-
|
|
179
|
-
|
|
180
|
-
def check_field_not_equal(field_name, value, quote_field_name: bool = False):
|
|
181
|
-
if quote_field_name:
|
|
182
|
-
field_name = f'"{field_name}"'
|
|
183
|
-
return {
|
|
184
|
-
f"invalid_count({field_name}) = 0": {
|
|
185
|
-
"name": f"Check that field {field_name} is not equal to {value}",
|
|
186
|
-
"invalid values": [value],
|
|
187
|
-
}
|
|
188
|
-
}
|
|
189
|
-
|
|
190
|
-
|
|
191
|
-
def check_field_enum(field_name, enum, quote_field_name: bool = False):
|
|
192
|
-
if quote_field_name:
|
|
193
|
-
field_name = f'"{field_name}"'
|
|
194
|
-
return {
|
|
195
|
-
f"invalid_count({field_name}) = 0": {
|
|
196
|
-
"name": f"Check that field {field_name} only contains enum values {enum}",
|
|
197
|
-
"valid values": enum,
|
|
198
|
-
}
|
|
199
|
-
}
|
|
200
|
-
|
|
201
|
-
|
|
202
|
-
def check_field_regex(field_name, pattern, quote_field_name: bool = False):
|
|
203
|
-
if quote_field_name:
|
|
204
|
-
field_name = f'"{field_name}"'
|
|
205
|
-
return {
|
|
206
|
-
f"invalid_count({field_name}) = 0": {
|
|
207
|
-
"name": f"Check that field {field_name} matches regex pattern {pattern}",
|
|
208
|
-
"valid regex": pattern,
|
|
209
|
-
}
|
|
210
|
-
}
|
|
211
|
-
|
|
212
|
-
|
|
213
|
-
def check_quality_list(model_name, field_name, quality_list: List[Quality]):
|
|
214
|
-
checks = {}
|
|
215
|
-
|
|
216
|
-
count = 0
|
|
217
|
-
for quality in quality_list:
|
|
218
|
-
if quality.type == "sql":
|
|
219
|
-
if field_name is None:
|
|
220
|
-
metric_name = f"{model_name}_quality_sql_{count}"
|
|
10
|
+
run = Run.create_run()
|
|
11
|
+
run.checks.extend(create_checks(data_contract, server))
|
|
12
|
+
return to_sodacl_yaml(run)
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def to_sodacl_yaml(run: Run) -> str:
|
|
16
|
+
sodacl_dict = {}
|
|
17
|
+
for run_check in run.checks:
|
|
18
|
+
if run_check.engine != "soda" or run_check.language != "sodacl":
|
|
19
|
+
continue
|
|
20
|
+
check_yaml_str = run_check.implementation
|
|
21
|
+
check_yaml_dict = yaml.safe_load(check_yaml_str)
|
|
22
|
+
for key, value in check_yaml_dict.items():
|
|
23
|
+
if key in sodacl_dict:
|
|
24
|
+
if isinstance(sodacl_dict[key], list) and isinstance(value, list):
|
|
25
|
+
sodacl_dict[key].extend(value)
|
|
26
|
+
else:
|
|
27
|
+
sodacl_dict[key].update(value)
|
|
221
28
|
else:
|
|
222
|
-
|
|
223
|
-
|
|
224
|
-
query = prepare_query(quality, model_name, field_name)
|
|
225
|
-
if query is None:
|
|
226
|
-
logger.warning(f"Quality check {metric_name} has no query")
|
|
227
|
-
continue
|
|
228
|
-
if threshold is None:
|
|
229
|
-
logger.warning(f"Quality check {metric_name} has no valid threshold")
|
|
230
|
-
continue
|
|
231
|
-
checks[f"{metric_name} {threshold}"] = {f"{metric_name} query": query}
|
|
232
|
-
count += 1
|
|
233
|
-
|
|
234
|
-
return checks
|
|
235
|
-
|
|
236
|
-
|
|
237
|
-
def prepare_query(quality: Quality, model_name: str, field_name: str = None) -> str | None:
|
|
238
|
-
if quality.query is None:
|
|
239
|
-
return None
|
|
240
|
-
if quality.query == "":
|
|
241
|
-
return None
|
|
242
|
-
|
|
243
|
-
query = quality.query
|
|
244
|
-
|
|
245
|
-
query = query.replace("{model}", model_name)
|
|
246
|
-
query = query.replace("{table}", model_name)
|
|
247
|
-
|
|
248
|
-
if field_name is not None:
|
|
249
|
-
query = query.replace("{field}", field_name)
|
|
250
|
-
query = query.replace("{column}", field_name)
|
|
251
|
-
|
|
252
|
-
return query
|
|
253
|
-
|
|
254
|
-
|
|
255
|
-
def to_sodacl_threshold(quality: Quality) -> str | None:
|
|
256
|
-
if quality.mustBe is not None:
|
|
257
|
-
return f"= {quality.mustBe}"
|
|
258
|
-
if quality.mustNotBe is not None:
|
|
259
|
-
return f"!= {quality.mustNotBe}"
|
|
260
|
-
if quality.mustBeGreaterThan is not None:
|
|
261
|
-
return f"> {quality.mustBeGreaterThan}"
|
|
262
|
-
if quality.mustBeGreaterThanOrEqualTo is not None:
|
|
263
|
-
return f">= {quality.mustBeGreaterThanOrEqualTo}"
|
|
264
|
-
if quality.mustBeLessThan is not None:
|
|
265
|
-
return f"< {quality.mustBeLessThan}"
|
|
266
|
-
if quality.mustBeLessThanOrEqualTo is not None:
|
|
267
|
-
return f"<= {quality.mustBeLessThanOrEqualTo}"
|
|
268
|
-
if quality.mustBeBetween is not None:
|
|
269
|
-
if len(quality.mustBeBetween) != 2:
|
|
270
|
-
logger.warning(
|
|
271
|
-
f"Quality check has invalid mustBeBetween, must have exactly 2 integers in an array: {quality.mustBeBetween}"
|
|
272
|
-
)
|
|
273
|
-
return None
|
|
274
|
-
return f"between {quality.mustBeBetween[0]} and {quality.mustBeBetween[1]}"
|
|
275
|
-
if quality.mustNotBeBetween is not None:
|
|
276
|
-
if len(quality.mustNotBeBetween) != 2:
|
|
277
|
-
logger.warning(
|
|
278
|
-
f"Quality check has invalid mustNotBeBetween, must have exactly 2 integers in an array: {quality.mustNotBeBetween}"
|
|
279
|
-
)
|
|
280
|
-
return None
|
|
281
|
-
return f"not between {quality.mustNotBeBetween[0]} and {quality.mustNotBeBetween[1]}"
|
|
282
|
-
return None
|
|
283
|
-
|
|
284
|
-
|
|
285
|
-
# These are deprecated root-level quality specifications, use the model-level and field-level quality fields instead
|
|
286
|
-
def add_quality_checks(sodacl, data_contract_spec):
|
|
287
|
-
if data_contract_spec.quality is None:
|
|
288
|
-
return
|
|
289
|
-
if data_contract_spec.quality.type is None:
|
|
290
|
-
return
|
|
291
|
-
if data_contract_spec.quality.type.lower() != "sodacl":
|
|
292
|
-
return
|
|
293
|
-
if isinstance(data_contract_spec.quality.specification, str):
|
|
294
|
-
quality_specification = yaml.safe_load(data_contract_spec.quality.specification)
|
|
295
|
-
else:
|
|
296
|
-
quality_specification = data_contract_spec.quality.specification
|
|
297
|
-
for key, checks in quality_specification.items():
|
|
298
|
-
if key in sodacl:
|
|
299
|
-
for check in checks:
|
|
300
|
-
sodacl[key].append(check)
|
|
301
|
-
else:
|
|
302
|
-
sodacl[key] = checks
|
|
29
|
+
sodacl_dict[key] = value
|
|
30
|
+
return yaml.dump(sodacl_dict)
|
|
@@ -142,11 +142,16 @@ def convert_to_dataframe(field: Field) -> None | str:
|
|
|
142
142
|
if type.lower() in ["boolean"]:
|
|
143
143
|
return "BOOLEAN"
|
|
144
144
|
if type.lower() in ["object", "record", "struct"]:
|
|
145
|
-
|
|
145
|
+
nested_fields = []
|
|
146
|
+
for nested_field_name, nested_field in field.fields.items():
|
|
147
|
+
nested_field_type = convert_to_dataframe(nested_field)
|
|
148
|
+
nested_fields.append(f"{nested_field_name}:{nested_field_type}")
|
|
149
|
+
return f"STRUCT<{','.join(nested_fields)}>"
|
|
146
150
|
if type.lower() in ["bytes"]:
|
|
147
151
|
return "BINARY"
|
|
148
152
|
if type.lower() in ["array"]:
|
|
149
|
-
|
|
153
|
+
item_type = convert_to_dataframe(field.items)
|
|
154
|
+
return f"ARRAY<{item_type}>"
|
|
150
155
|
return None
|
|
151
156
|
|
|
152
157
|
|
|
@@ -41,9 +41,12 @@ def import_odcs(data_contract_specification: DataContractSpecification, source:
|
|
|
41
41
|
)
|
|
42
42
|
|
|
43
43
|
if odcs_api_version.startswith("v2."):
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
44
|
+
raise DataContractException(
|
|
45
|
+
type="schema",
|
|
46
|
+
name="Importing ODCS contract",
|
|
47
|
+
reason=f"Unsupported ODCS API version: {odcs_api_version}",
|
|
48
|
+
engine="datacontract",
|
|
49
|
+
)
|
|
47
50
|
elif odcs_api_version.startswith("v3."):
|
|
48
51
|
from datacontract.imports.odcs_v3_importer import import_odcs_v3
|
|
49
52
|
|
|
@@ -233,7 +233,7 @@ def import_field_config(odcs_property: Dict[str, Any], server_type=None) -> Dict
|
|
|
233
233
|
config["redshiftType"] = physical_type
|
|
234
234
|
elif server_type == "sqlserver":
|
|
235
235
|
config["sqlserverType"] = physical_type
|
|
236
|
-
elif server_type == "
|
|
236
|
+
elif server_type == "databricks":
|
|
237
237
|
config["databricksType"] = physical_type
|
|
238
238
|
else:
|
|
239
239
|
config["physicalType"] = physical_type
|
|
@@ -287,6 +287,8 @@ def import_fields(
|
|
|
287
287
|
|
|
288
288
|
|
|
289
289
|
def map_type(odcs_type: str, custom_mappings: Dict[str, str]) -> str | None:
|
|
290
|
+
if odcs_type is None:
|
|
291
|
+
return None
|
|
290
292
|
t = odcs_type.lower()
|
|
291
293
|
if t in DATACONTRACT_TYPES:
|
|
292
294
|
return t
|