datacontract-cli 0.10.0__py3-none-any.whl → 0.10.37__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- datacontract/__init__.py +13 -0
- datacontract/api.py +260 -0
- datacontract/breaking/breaking.py +242 -12
- datacontract/breaking/breaking_rules.py +37 -1
- datacontract/catalog/catalog.py +80 -0
- datacontract/cli.py +387 -117
- datacontract/data_contract.py +216 -353
- datacontract/engines/data_contract_checks.py +1041 -0
- datacontract/engines/data_contract_test.py +113 -0
- datacontract/engines/datacontract/check_that_datacontract_contains_valid_servers_configuration.py +2 -3
- datacontract/engines/datacontract/check_that_datacontract_file_exists.py +1 -1
- datacontract/engines/fastjsonschema/check_jsonschema.py +176 -42
- datacontract/engines/fastjsonschema/s3/s3_read_files.py +16 -1
- datacontract/engines/soda/check_soda_execute.py +100 -56
- datacontract/engines/soda/connections/athena.py +79 -0
- datacontract/engines/soda/connections/bigquery.py +8 -1
- datacontract/engines/soda/connections/databricks.py +12 -3
- datacontract/engines/soda/connections/duckdb_connection.py +241 -0
- datacontract/engines/soda/connections/kafka.py +206 -113
- datacontract/engines/soda/connections/snowflake.py +8 -5
- datacontract/engines/soda/connections/sqlserver.py +43 -0
- datacontract/engines/soda/connections/trino.py +26 -0
- datacontract/export/avro_converter.py +72 -8
- datacontract/export/avro_idl_converter.py +31 -25
- datacontract/export/bigquery_converter.py +130 -0
- datacontract/export/custom_converter.py +40 -0
- datacontract/export/data_caterer_converter.py +161 -0
- datacontract/export/dbml_converter.py +148 -0
- datacontract/export/dbt_converter.py +141 -54
- datacontract/export/dcs_exporter.py +6 -0
- datacontract/export/dqx_converter.py +126 -0
- datacontract/export/duckdb_type_converter.py +57 -0
- datacontract/export/excel_exporter.py +923 -0
- datacontract/export/exporter.py +100 -0
- datacontract/export/exporter_factory.py +216 -0
- datacontract/export/go_converter.py +105 -0
- datacontract/export/great_expectations_converter.py +257 -36
- datacontract/export/html_exporter.py +86 -0
- datacontract/export/iceberg_converter.py +188 -0
- datacontract/export/jsonschema_converter.py +71 -16
- datacontract/export/markdown_converter.py +337 -0
- datacontract/export/mermaid_exporter.py +110 -0
- datacontract/export/odcs_v3_exporter.py +375 -0
- datacontract/export/pandas_type_converter.py +40 -0
- datacontract/export/protobuf_converter.py +168 -68
- datacontract/export/pydantic_converter.py +6 -0
- datacontract/export/rdf_converter.py +13 -6
- datacontract/export/sodacl_converter.py +36 -188
- datacontract/export/spark_converter.py +245 -0
- datacontract/export/sql_converter.py +37 -3
- datacontract/export/sql_type_converter.py +269 -8
- datacontract/export/sqlalchemy_converter.py +170 -0
- datacontract/export/terraform_converter.py +7 -2
- datacontract/imports/avro_importer.py +246 -26
- datacontract/imports/bigquery_importer.py +221 -0
- datacontract/imports/csv_importer.py +143 -0
- datacontract/imports/dbml_importer.py +112 -0
- datacontract/imports/dbt_importer.py +240 -0
- datacontract/imports/excel_importer.py +1111 -0
- datacontract/imports/glue_importer.py +288 -0
- datacontract/imports/iceberg_importer.py +172 -0
- datacontract/imports/importer.py +51 -0
- datacontract/imports/importer_factory.py +128 -0
- datacontract/imports/json_importer.py +325 -0
- datacontract/imports/jsonschema_importer.py +146 -0
- datacontract/imports/odcs_importer.py +60 -0
- datacontract/imports/odcs_v3_importer.py +516 -0
- datacontract/imports/parquet_importer.py +81 -0
- datacontract/imports/protobuf_importer.py +264 -0
- datacontract/imports/spark_importer.py +262 -0
- datacontract/imports/sql_importer.py +274 -35
- datacontract/imports/unity_importer.py +219 -0
- datacontract/init/init_template.py +20 -0
- datacontract/integration/datamesh_manager.py +86 -0
- datacontract/lint/resolve.py +271 -49
- datacontract/lint/resources.py +21 -0
- datacontract/lint/schema.py +53 -17
- datacontract/lint/urls.py +32 -12
- datacontract/model/data_contract_specification/__init__.py +1 -0
- datacontract/model/exceptions.py +4 -1
- datacontract/model/odcs.py +24 -0
- datacontract/model/run.py +49 -29
- datacontract/output/__init__.py +0 -0
- datacontract/output/junit_test_results.py +135 -0
- datacontract/output/output_format.py +10 -0
- datacontract/output/test_results_writer.py +79 -0
- datacontract/py.typed +0 -0
- datacontract/schemas/datacontract-1.1.0.init.yaml +91 -0
- datacontract/schemas/datacontract-1.1.0.schema.json +1975 -0
- datacontract/schemas/datacontract-1.2.0.init.yaml +91 -0
- datacontract/schemas/datacontract-1.2.0.schema.json +2029 -0
- datacontract/schemas/datacontract-1.2.1.init.yaml +91 -0
- datacontract/schemas/datacontract-1.2.1.schema.json +2058 -0
- datacontract/schemas/odcs-3.0.1.schema.json +2634 -0
- datacontract/schemas/odcs-3.0.2.schema.json +2382 -0
- datacontract/templates/datacontract.html +139 -294
- datacontract/templates/datacontract_odcs.html +685 -0
- datacontract/templates/index.html +236 -0
- datacontract/templates/partials/datacontract_information.html +86 -0
- datacontract/templates/partials/datacontract_servicelevels.html +253 -0
- datacontract/templates/partials/datacontract_terms.html +51 -0
- datacontract/templates/partials/definition.html +25 -0
- datacontract/templates/partials/example.html +27 -0
- datacontract/templates/partials/model_field.html +144 -0
- datacontract/templates/partials/quality.html +49 -0
- datacontract/templates/partials/server.html +211 -0
- datacontract/templates/style/output.css +491 -72
- datacontract_cli-0.10.37.dist-info/METADATA +2235 -0
- datacontract_cli-0.10.37.dist-info/RECORD +119 -0
- {datacontract_cli-0.10.0.dist-info → datacontract_cli-0.10.37.dist-info}/WHEEL +1 -1
- {datacontract_cli-0.10.0.dist-info → datacontract_cli-0.10.37.dist-info/licenses}/LICENSE +1 -1
- datacontract/engines/datacontract/check_that_datacontract_str_is_valid.py +0 -48
- datacontract/engines/soda/connections/dask.py +0 -28
- datacontract/engines/soda/connections/duckdb.py +0 -76
- datacontract/export/csv_type_converter.py +0 -36
- datacontract/export/html_export.py +0 -66
- datacontract/export/odcs_converter.py +0 -102
- datacontract/init/download_datacontract_file.py +0 -17
- datacontract/integration/publish_datamesh_manager.py +0 -33
- datacontract/integration/publish_opentelemetry.py +0 -107
- datacontract/lint/lint.py +0 -141
- datacontract/lint/linters/description_linter.py +0 -34
- datacontract/lint/linters/example_model_linter.py +0 -91
- datacontract/lint/linters/field_pattern_linter.py +0 -34
- datacontract/lint/linters/field_reference_linter.py +0 -38
- datacontract/lint/linters/notice_period_linter.py +0 -55
- datacontract/lint/linters/quality_schema_linter.py +0 -52
- datacontract/lint/linters/valid_constraints_linter.py +0 -99
- datacontract/model/data_contract_specification.py +0 -141
- datacontract/web.py +0 -14
- datacontract_cli-0.10.0.dist-info/METADATA +0 -951
- datacontract_cli-0.10.0.dist-info/RECORD +0 -66
- /datacontract/{model → breaking}/breaking_change.py +0 -0
- /datacontract/{lint/linters → export}/__init__.py +0 -0
- {datacontract_cli-0.10.0.dist-info → datacontract_cli-0.10.37.dist-info}/entry_points.txt +0 -0
- {datacontract_cli-0.10.0.dist-info → datacontract_cli-0.10.37.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,325 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import os
|
|
3
|
+
import re
|
|
4
|
+
from typing import Any, Dict, List, Optional, Tuple
|
|
5
|
+
|
|
6
|
+
from datacontract.imports.importer import Importer
|
|
7
|
+
from datacontract.model.data_contract_specification import DataContractSpecification, Model, Server
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class JsonImporter(Importer):
|
|
11
|
+
def import_source(
|
|
12
|
+
self, data_contract_specification: DataContractSpecification, source: str, import_args: dict
|
|
13
|
+
) -> DataContractSpecification:
|
|
14
|
+
return import_json(data_contract_specification, source)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def is_ndjson(file_path: str) -> bool:
|
|
18
|
+
"""Check if a file contains newline-delimited JSON."""
|
|
19
|
+
with open(file_path, "r", encoding="utf-8") as file:
|
|
20
|
+
for _ in range(5):
|
|
21
|
+
line = file.readline().strip()
|
|
22
|
+
if not line:
|
|
23
|
+
continue
|
|
24
|
+
try:
|
|
25
|
+
json.loads(line)
|
|
26
|
+
return True
|
|
27
|
+
except json.JSONDecodeError:
|
|
28
|
+
break
|
|
29
|
+
return False
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def import_json(
|
|
33
|
+
data_contract_specification: DataContractSpecification, source: str, include_examples: bool = False
|
|
34
|
+
) -> DataContractSpecification:
|
|
35
|
+
# use the file name as base model name
|
|
36
|
+
base_model_name = os.path.splitext(os.path.basename(source))[0]
|
|
37
|
+
|
|
38
|
+
# check if file is newline-delimited JSON
|
|
39
|
+
if is_ndjson(source):
|
|
40
|
+
# load NDJSON data
|
|
41
|
+
json_data = []
|
|
42
|
+
with open(source, "r", encoding="utf-8") as file:
|
|
43
|
+
for line in file:
|
|
44
|
+
line = line.strip()
|
|
45
|
+
if line:
|
|
46
|
+
try:
|
|
47
|
+
json_data.append(json.loads(line))
|
|
48
|
+
except json.JSONDecodeError:
|
|
49
|
+
continue
|
|
50
|
+
else:
|
|
51
|
+
# load regular JSON data
|
|
52
|
+
with open(source, "r", encoding="utf-8") as file:
|
|
53
|
+
json_data = json.load(file)
|
|
54
|
+
|
|
55
|
+
if data_contract_specification.servers is None:
|
|
56
|
+
data_contract_specification.servers = {}
|
|
57
|
+
|
|
58
|
+
data_contract_specification.servers["production"] = Server(type="local", path=source, format="json")
|
|
59
|
+
|
|
60
|
+
# initialisation
|
|
61
|
+
models = {}
|
|
62
|
+
|
|
63
|
+
if isinstance(json_data, list) and json_data:
|
|
64
|
+
# Array of items
|
|
65
|
+
if all(isinstance(item, dict) for item in json_data[:5]):
|
|
66
|
+
# Array of objects, as table
|
|
67
|
+
fields = {}
|
|
68
|
+
for item in json_data[:20]:
|
|
69
|
+
for key, value in item.items():
|
|
70
|
+
field_def = generate_field_definition(value, key, base_model_name, models)
|
|
71
|
+
if key in fields:
|
|
72
|
+
fields[key] = merge_field_definitions(fields[key], field_def)
|
|
73
|
+
else:
|
|
74
|
+
fields[key] = field_def
|
|
75
|
+
|
|
76
|
+
models[base_model_name] = {
|
|
77
|
+
"type": "table",
|
|
78
|
+
"description": f"Generated from JSON array in {source}",
|
|
79
|
+
"fields": fields,
|
|
80
|
+
"examples": json_data[:3] if include_examples else None,
|
|
81
|
+
}
|
|
82
|
+
else:
|
|
83
|
+
# Simple array
|
|
84
|
+
item_type, item_format = infer_array_type(json_data[:20])
|
|
85
|
+
models[base_model_name] = {
|
|
86
|
+
"type": "array",
|
|
87
|
+
"description": f"Generated from JSON array in {source}",
|
|
88
|
+
"items": {"type": item_type, "format": item_format} if item_format else {"type": item_type},
|
|
89
|
+
"examples": [json_data[:5]] if include_examples else None,
|
|
90
|
+
}
|
|
91
|
+
elif isinstance(json_data, dict):
|
|
92
|
+
# Single object
|
|
93
|
+
fields = {}
|
|
94
|
+
for key, value in json_data.items():
|
|
95
|
+
fields[key] = generate_field_definition(value, key, base_model_name, models)
|
|
96
|
+
|
|
97
|
+
models[base_model_name] = {
|
|
98
|
+
"type": "object",
|
|
99
|
+
"description": f"Generated from JSON object in {source}",
|
|
100
|
+
"fields": fields,
|
|
101
|
+
"examples": [json_data] if include_examples else None,
|
|
102
|
+
}
|
|
103
|
+
else:
|
|
104
|
+
# Primitive value
|
|
105
|
+
field_type, field_format = determine_type_and_format(json_data)
|
|
106
|
+
models[base_model_name] = {
|
|
107
|
+
"type": field_type,
|
|
108
|
+
"description": f"Generated from JSON primitive in {source}",
|
|
109
|
+
"format": field_format,
|
|
110
|
+
"examples": [json_data] if include_examples and field_type != "boolean" else None,
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
for model_name, model_def in models.items():
|
|
114
|
+
model_type = model_def.pop("type")
|
|
115
|
+
data_contract_specification.models[model_name] = Model(type=model_type, **model_def)
|
|
116
|
+
|
|
117
|
+
return data_contract_specification
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def generate_field_definition(
|
|
121
|
+
value: Any, field_name: str, parent_model: str, models: Dict[str, Dict[str, Any]]
|
|
122
|
+
) -> Dict[str, Any]:
|
|
123
|
+
"""Generate a field definition for a JSON value, creating nested models."""
|
|
124
|
+
|
|
125
|
+
if isinstance(value, dict):
|
|
126
|
+
# Handle object fields
|
|
127
|
+
fields = {}
|
|
128
|
+
for key, nested_value in value.items():
|
|
129
|
+
fields[key] = generate_field_definition(nested_value, key, parent_model, models)
|
|
130
|
+
|
|
131
|
+
return {"type": "object", "fields": fields}
|
|
132
|
+
|
|
133
|
+
elif isinstance(value, list):
|
|
134
|
+
# Handle array fields
|
|
135
|
+
if not value:
|
|
136
|
+
return {"type": "array", "items": {"type": "string"}}
|
|
137
|
+
|
|
138
|
+
if all(isinstance(item, dict) for item in value):
|
|
139
|
+
# Array of objects
|
|
140
|
+
fields = {}
|
|
141
|
+
for item in value:
|
|
142
|
+
for key, nested_value in item.items():
|
|
143
|
+
field_def = generate_field_definition(nested_value, key, parent_model, models)
|
|
144
|
+
if key in fields:
|
|
145
|
+
fields[key] = merge_field_definitions(fields[key], field_def)
|
|
146
|
+
else:
|
|
147
|
+
fields[key] = field_def
|
|
148
|
+
|
|
149
|
+
return {"type": "array", "items": {"type": "object", "fields": fields}}
|
|
150
|
+
|
|
151
|
+
elif all(isinstance(item, list) for item in value):
|
|
152
|
+
# Array of arrays
|
|
153
|
+
inner_type, inner_format = infer_array_type(value[0])
|
|
154
|
+
return {
|
|
155
|
+
"type": "array",
|
|
156
|
+
"items": {
|
|
157
|
+
"type": "array",
|
|
158
|
+
"items": {"type": inner_type, "format": inner_format} if inner_format else {"type": inner_type},
|
|
159
|
+
},
|
|
160
|
+
"examples": value[:5], # Include examples for nested arrays
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
else:
|
|
164
|
+
# Array of simple or mixed types
|
|
165
|
+
item_type, item_format = infer_array_type(value)
|
|
166
|
+
items_def = {"type": item_type}
|
|
167
|
+
if item_format:
|
|
168
|
+
items_def["format"] = item_format
|
|
169
|
+
|
|
170
|
+
field_def = {"type": "array", "items": items_def}
|
|
171
|
+
|
|
172
|
+
# Add examples if appropriate
|
|
173
|
+
sample_values = [item for item in value[:5] if item is not None]
|
|
174
|
+
if sample_values:
|
|
175
|
+
field_def["examples"] = sample_values
|
|
176
|
+
|
|
177
|
+
return field_def
|
|
178
|
+
|
|
179
|
+
else:
|
|
180
|
+
# Handle primitive types
|
|
181
|
+
field_type, field_format = determine_type_and_format(value)
|
|
182
|
+
field_def = {"type": field_type}
|
|
183
|
+
if field_format:
|
|
184
|
+
field_def["format"] = field_format
|
|
185
|
+
|
|
186
|
+
# Add examples
|
|
187
|
+
if value is not None and field_type != "boolean":
|
|
188
|
+
field_def["examples"] = [value]
|
|
189
|
+
|
|
190
|
+
return field_def
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
def infer_array_type(array: List) -> Tuple[str, Optional[str]]:
|
|
194
|
+
"""Infer the common type of items in an array."""
|
|
195
|
+
if not array:
|
|
196
|
+
return "string", None
|
|
197
|
+
|
|
198
|
+
# if all items are dictionaries with the same structure
|
|
199
|
+
if all(isinstance(item, dict) for item in array):
|
|
200
|
+
return "object", None
|
|
201
|
+
|
|
202
|
+
# if all items are of the same primitive type
|
|
203
|
+
non_null_items = [item for item in array if item is not None]
|
|
204
|
+
if not non_null_items:
|
|
205
|
+
return "null", None
|
|
206
|
+
|
|
207
|
+
types_and_formats = [determine_type_and_format(item) for item in non_null_items]
|
|
208
|
+
types = {t for t, _ in types_and_formats}
|
|
209
|
+
formats = {f for _, f in types_and_formats if f is not None}
|
|
210
|
+
|
|
211
|
+
# simplify type combinations
|
|
212
|
+
if types == {"integer", "number"}:
|
|
213
|
+
return "number", None
|
|
214
|
+
if len(types) == 1:
|
|
215
|
+
type_name = next(iter(types))
|
|
216
|
+
format_name = next(iter(formats)) if len(formats) == 1 else None
|
|
217
|
+
return type_name, format_name
|
|
218
|
+
if all(t in {"string", "integer", "number", "boolean", "null"} for t in types):
|
|
219
|
+
# If all string values have the same format, keep it
|
|
220
|
+
if len(formats) == 1 and "string" in types:
|
|
221
|
+
return "string", next(iter(formats))
|
|
222
|
+
return "string", None
|
|
223
|
+
|
|
224
|
+
# Mixed types
|
|
225
|
+
return "string", None
|
|
226
|
+
|
|
227
|
+
|
|
228
|
+
def determine_type_and_format(value: Any) -> Tuple[str, Optional[str]]:
|
|
229
|
+
"""determine the datacontract type and format for a JSON value."""
|
|
230
|
+
if value is None:
|
|
231
|
+
return "null", None
|
|
232
|
+
elif isinstance(value, bool):
|
|
233
|
+
return "boolean", None
|
|
234
|
+
elif isinstance(value, int):
|
|
235
|
+
return "integer", None
|
|
236
|
+
elif isinstance(value, float):
|
|
237
|
+
return "number", None
|
|
238
|
+
elif isinstance(value, str):
|
|
239
|
+
try:
|
|
240
|
+
if re.match(r"^\d{4}-\d{2}-\d{2}$", value):
|
|
241
|
+
return "string", "date"
|
|
242
|
+
elif re.match(r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?(Z|[+-]\d{2}:\d{2})?$", value):
|
|
243
|
+
return "string", "date-time"
|
|
244
|
+
elif re.match(r"^[\w\.-]+@([\w-]+\.)+[\w-]{2,4}$", value):
|
|
245
|
+
return "string", "email"
|
|
246
|
+
elif re.match(r"^[a-f0-9]{8}-?[a-f0-9]{4}-?[a-f0-9]{4}-?[a-f0-9]{4}-?[a-f0-9]{12}$", value.lower()):
|
|
247
|
+
return "string", "uuid"
|
|
248
|
+
else:
|
|
249
|
+
return "string", None
|
|
250
|
+
except re.error:
|
|
251
|
+
return "string", None
|
|
252
|
+
elif isinstance(value, dict):
|
|
253
|
+
return "object", None
|
|
254
|
+
elif isinstance(value, list):
|
|
255
|
+
return "array", None
|
|
256
|
+
else:
|
|
257
|
+
return "string", None
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
def merge_field_definitions(field1: Dict[str, Any], field2: Dict[str, Any]) -> Dict[str, Any]:
|
|
261
|
+
"""Merge two field definitions."""
|
|
262
|
+
result = field1.copy()
|
|
263
|
+
if field1.get("type") == "object" and field2.get("type") != "object":
|
|
264
|
+
return field1
|
|
265
|
+
if field2.get("type") == "object" and field1.get("type") != "object":
|
|
266
|
+
return field2
|
|
267
|
+
# Handle type differences
|
|
268
|
+
if field1.get("type") != field2.get("type"):
|
|
269
|
+
type1, _ = field1.get("type", "string"), field1.get("format")
|
|
270
|
+
type2, _ = field2.get("type", "string"), field2.get("format")
|
|
271
|
+
|
|
272
|
+
if type1 == "integer" and type2 == "number" or type1 == "number" and type2 == "integer":
|
|
273
|
+
common_type = "number"
|
|
274
|
+
common_format = None
|
|
275
|
+
elif "string" in [type1, type2]:
|
|
276
|
+
common_type = "string"
|
|
277
|
+
common_format = None
|
|
278
|
+
elif all(t in ["string", "integer", "number", "boolean", "null"] for t in [type1, type2]):
|
|
279
|
+
common_type = "string"
|
|
280
|
+
common_format = None
|
|
281
|
+
elif type1 == "array" and type2 == "array":
|
|
282
|
+
# Handle mixed array types
|
|
283
|
+
items1 = field1.get("items", {})
|
|
284
|
+
items2 = field2.get("items", {})
|
|
285
|
+
if items1.get("type") == "object" or items2.get("type") == "object":
|
|
286
|
+
if items1.get("type") == "object" and items2.get("type") == "object":
|
|
287
|
+
merged_items = merge_field_definitions(items1, items2)
|
|
288
|
+
else:
|
|
289
|
+
merged_items = items1 if items1.get("type") == "object" else items2
|
|
290
|
+
return {"type": "array", "items": merged_items}
|
|
291
|
+
else:
|
|
292
|
+
merged_items = merge_field_definitions(items1, items2)
|
|
293
|
+
return {"type": "array", "items": merged_items}
|
|
294
|
+
else:
|
|
295
|
+
common_type = "array" if "array" in [type1, type2] else "object"
|
|
296
|
+
common_format = None
|
|
297
|
+
|
|
298
|
+
result["type"] = common_type
|
|
299
|
+
if common_format:
|
|
300
|
+
result["format"] = common_format
|
|
301
|
+
elif "format" in result:
|
|
302
|
+
del result["format"]
|
|
303
|
+
|
|
304
|
+
# Merge examples
|
|
305
|
+
if "examples" in field2:
|
|
306
|
+
if "examples" in result:
|
|
307
|
+
combined = result["examples"] + [ex for ex in field2["examples"] if ex not in result["examples"]]
|
|
308
|
+
result["examples"] = combined[:5] # Limit to 5 examples
|
|
309
|
+
else:
|
|
310
|
+
result["examples"] = field2["examples"]
|
|
311
|
+
|
|
312
|
+
# Handle nested structures
|
|
313
|
+
if result.get("type") == "array" and "items" in field1 and "items" in field2:
|
|
314
|
+
result["items"] = merge_field_definitions(field1["items"], field2["items"])
|
|
315
|
+
elif result.get("type") == "object" and "fields" in field1 and "fields" in field2:
|
|
316
|
+
# Merge fields from both objects
|
|
317
|
+
merged_fields = field1["fields"].copy()
|
|
318
|
+
for key, field_def in field2["fields"].items():
|
|
319
|
+
if key in merged_fields:
|
|
320
|
+
merged_fields[key] = merge_field_definitions(merged_fields[key], field_def)
|
|
321
|
+
else:
|
|
322
|
+
merged_fields[key] = field_def
|
|
323
|
+
result["fields"] = merged_fields
|
|
324
|
+
|
|
325
|
+
return result
|
|
@@ -0,0 +1,146 @@
|
|
|
1
|
+
import json
|
|
2
|
+
|
|
3
|
+
import fastjsonschema
|
|
4
|
+
|
|
5
|
+
from datacontract.imports.importer import Importer
|
|
6
|
+
from datacontract.model.data_contract_specification import DataContractSpecification, Definition, Field, Model
|
|
7
|
+
from datacontract.model.exceptions import DataContractException
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class JsonSchemaImporter(Importer):
|
|
11
|
+
def import_source(
|
|
12
|
+
self, data_contract_specification: DataContractSpecification, source: str, import_args: dict
|
|
13
|
+
) -> DataContractSpecification:
|
|
14
|
+
return import_jsonschema(data_contract_specification, source)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def import_jsonschema(data_contract_specification: DataContractSpecification, source: str) -> DataContractSpecification:
|
|
18
|
+
if data_contract_specification.models is None:
|
|
19
|
+
data_contract_specification.models = {}
|
|
20
|
+
|
|
21
|
+
json_schema = load_and_validate_json_schema(source)
|
|
22
|
+
|
|
23
|
+
title = json_schema.get("title", "default_model")
|
|
24
|
+
description = json_schema.get("description")
|
|
25
|
+
type_ = json_schema.get("type")
|
|
26
|
+
properties = json_schema.get("properties", {})
|
|
27
|
+
required_properties = json_schema.get("required", [])
|
|
28
|
+
|
|
29
|
+
fields_kwargs = jsonschema_to_args(properties, required_properties)
|
|
30
|
+
fields = {name: Field(**kwargs) for name, kwargs in fields_kwargs.items()}
|
|
31
|
+
|
|
32
|
+
model = Model(description=description, type=type_, title=title, fields=fields)
|
|
33
|
+
data_contract_specification.models[title] = model
|
|
34
|
+
|
|
35
|
+
definitions = json_schema.get("definitions", {})
|
|
36
|
+
for name, schema in definitions.items():
|
|
37
|
+
kwargs = schema_to_args(schema)
|
|
38
|
+
data_contract_specification.definitions[name] = Definition(name=name, **kwargs)
|
|
39
|
+
|
|
40
|
+
return data_contract_specification
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
def load_and_validate_json_schema(source):
|
|
44
|
+
try:
|
|
45
|
+
with open(source, "r") as file:
|
|
46
|
+
json_schema = json.loads(file.read())
|
|
47
|
+
|
|
48
|
+
validator = fastjsonschema.compile({})
|
|
49
|
+
validator(json_schema)
|
|
50
|
+
|
|
51
|
+
except fastjsonschema.JsonSchemaException as e:
|
|
52
|
+
raise DataContractException(
|
|
53
|
+
type="schema",
|
|
54
|
+
name="Parse json schema",
|
|
55
|
+
reason=f"Failed to validate json schema from {source}: {e}",
|
|
56
|
+
engine="datacontract",
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
except Exception as e:
|
|
60
|
+
raise DataContractException(
|
|
61
|
+
type="schema",
|
|
62
|
+
name="Parse json schema",
|
|
63
|
+
reason=f"Failed to parse json schema from {source}",
|
|
64
|
+
engine="datacontract",
|
|
65
|
+
original_exception=e,
|
|
66
|
+
)
|
|
67
|
+
return json_schema
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def jsonschema_to_args(properties, required_properties):
|
|
71
|
+
args = {}
|
|
72
|
+
for property, property_schema in properties.items():
|
|
73
|
+
is_required = property in required_properties
|
|
74
|
+
args[property] = schema_to_args(property_schema, is_required)
|
|
75
|
+
|
|
76
|
+
return args
|
|
77
|
+
|
|
78
|
+
|
|
79
|
+
def schema_to_args(property_schema, is_required: bool = None) -> dict:
|
|
80
|
+
direct_mappings = {
|
|
81
|
+
"title",
|
|
82
|
+
"description",
|
|
83
|
+
"format",
|
|
84
|
+
"pattern",
|
|
85
|
+
"enum",
|
|
86
|
+
"tags",
|
|
87
|
+
"pii",
|
|
88
|
+
"minLength",
|
|
89
|
+
"maxLength",
|
|
90
|
+
"minimum",
|
|
91
|
+
"exclusiveMinimum",
|
|
92
|
+
"maximum",
|
|
93
|
+
"exclusiveMaximum",
|
|
94
|
+
}
|
|
95
|
+
|
|
96
|
+
field_kwargs = {key: value for key, value in property_schema.items() if key in direct_mappings}
|
|
97
|
+
|
|
98
|
+
if is_required is not None:
|
|
99
|
+
field_kwargs["required"] = is_required
|
|
100
|
+
|
|
101
|
+
property_type = determine_type(property_schema)
|
|
102
|
+
if property_type is not None:
|
|
103
|
+
field_kwargs["type"] = property_type
|
|
104
|
+
|
|
105
|
+
if property_type == "array":
|
|
106
|
+
nested_item_type, nested_items = determine_nested_item_type(property_schema)
|
|
107
|
+
|
|
108
|
+
if nested_items is not None:
|
|
109
|
+
field_kwargs["items"] = schema_to_args(nested_item_type)
|
|
110
|
+
|
|
111
|
+
nested_properties = property_schema.get("properties")
|
|
112
|
+
if nested_properties is not None:
|
|
113
|
+
# recursive call for complex nested properties
|
|
114
|
+
required = property_schema.get("required", [])
|
|
115
|
+
field_kwargs["fields"] = jsonschema_to_args(nested_properties, required)
|
|
116
|
+
|
|
117
|
+
return field_kwargs
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def determine_nested_item_type(property_schema):
|
|
121
|
+
nested_items = property_schema.get("items")
|
|
122
|
+
nested_items_is_list = isinstance(nested_items, list)
|
|
123
|
+
if nested_items_is_list and len(nested_items) != 1:
|
|
124
|
+
raise DataContractException(
|
|
125
|
+
type="schema",
|
|
126
|
+
name="Parse json schema",
|
|
127
|
+
reason=f"Union types for arrays are currently not supported ({nested_items})",
|
|
128
|
+
engine="datacontract",
|
|
129
|
+
)
|
|
130
|
+
if nested_items_is_list and len(nested_items) == 1:
|
|
131
|
+
nested_item_type = nested_items[0]
|
|
132
|
+
elif not nested_items_is_list and nested_items is not None:
|
|
133
|
+
nested_item_type = nested_items
|
|
134
|
+
return nested_item_type, nested_items
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def determine_type(property_schema):
|
|
138
|
+
property_type = property_schema.get("type")
|
|
139
|
+
type_is_list = isinstance(property_type, list)
|
|
140
|
+
if type_is_list:
|
|
141
|
+
non_null_types = [t for t in property_type if t != "null"]
|
|
142
|
+
if non_null_types:
|
|
143
|
+
property_type = non_null_types[0]
|
|
144
|
+
else:
|
|
145
|
+
property_type = None
|
|
146
|
+
return property_type
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
import yaml
|
|
2
|
+
|
|
3
|
+
from datacontract.imports.importer import Importer
|
|
4
|
+
from datacontract.lint.resources import read_resource
|
|
5
|
+
from datacontract.model.data_contract_specification import (
|
|
6
|
+
DataContractSpecification,
|
|
7
|
+
)
|
|
8
|
+
from datacontract.model.exceptions import DataContractException
|
|
9
|
+
|
|
10
|
+
|
|
11
|
+
class OdcsImporter(Importer):
|
|
12
|
+
def import_source(
|
|
13
|
+
self, data_contract_specification: DataContractSpecification, source: str, import_args: dict
|
|
14
|
+
) -> DataContractSpecification:
|
|
15
|
+
return import_odcs(data_contract_specification, source)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
def import_odcs(data_contract_specification: DataContractSpecification, source: str) -> DataContractSpecification:
|
|
19
|
+
try:
|
|
20
|
+
odcs_contract = yaml.safe_load(read_resource(source))
|
|
21
|
+
|
|
22
|
+
except Exception as e:
|
|
23
|
+
raise DataContractException(
|
|
24
|
+
type="schema",
|
|
25
|
+
name="Parse ODCS contract",
|
|
26
|
+
reason=f"Failed to parse odcs contract from {source}",
|
|
27
|
+
engine="datacontract",
|
|
28
|
+
original_exception=e,
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
odcs_kind = odcs_contract.get("kind")
|
|
32
|
+
odcs_api_version = odcs_contract.get("apiVersion")
|
|
33
|
+
|
|
34
|
+
# if odcs_kind is not DataContract throw exception
|
|
35
|
+
if odcs_kind != "DataContract":
|
|
36
|
+
raise DataContractException(
|
|
37
|
+
type="schema",
|
|
38
|
+
name="Importing ODCS contract",
|
|
39
|
+
reason=f"Unsupported ODCS kind: {odcs_kind}. Is this a valid ODCS data contract?",
|
|
40
|
+
engine="datacontract",
|
|
41
|
+
)
|
|
42
|
+
|
|
43
|
+
if odcs_api_version.startswith("v2."):
|
|
44
|
+
raise DataContractException(
|
|
45
|
+
type="schema",
|
|
46
|
+
name="Importing ODCS contract",
|
|
47
|
+
reason=f"Unsupported ODCS API version: {odcs_api_version}",
|
|
48
|
+
engine="datacontract",
|
|
49
|
+
)
|
|
50
|
+
elif odcs_api_version.startswith("v3."):
|
|
51
|
+
from datacontract.imports.odcs_v3_importer import import_odcs_v3_as_dcs
|
|
52
|
+
|
|
53
|
+
return import_odcs_v3_as_dcs(data_contract_specification, source)
|
|
54
|
+
else:
|
|
55
|
+
raise DataContractException(
|
|
56
|
+
type="schema",
|
|
57
|
+
name="Importing ODCS contract",
|
|
58
|
+
reason=f"Unsupported ODCS API version: {odcs_api_version}",
|
|
59
|
+
engine="datacontract",
|
|
60
|
+
)
|