datacontract-cli 0.10.6__py3-none-any.whl → 0.10.8__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of datacontract-cli might be problematic. Click here for more details.
- datacontract/cli.py +26 -24
- datacontract/data_contract.py +69 -152
- datacontract/engines/fastjsonschema/s3/s3_read_files.py +13 -1
- datacontract/engines/soda/check_soda_execute.py +11 -0
- datacontract/engines/soda/connections/bigquery.py +8 -1
- datacontract/engines/soda/connections/kafka.py +3 -0
- datacontract/export/__init__.py +0 -0
- datacontract/export/avro_converter.py +28 -21
- datacontract/export/avro_idl_converter.py +29 -22
- datacontract/export/bigquery_converter.py +15 -0
- datacontract/export/dbml_converter.py +9 -0
- datacontract/export/dbt_converter.py +26 -1
- datacontract/export/exporter.py +87 -0
- datacontract/export/exporter_factory.py +52 -0
- datacontract/export/go_converter.py +6 -0
- datacontract/export/great_expectations_converter.py +10 -0
- datacontract/export/html_export.py +6 -0
- datacontract/export/jsonschema_converter.py +24 -16
- datacontract/export/odcs_converter.py +24 -1
- datacontract/export/protobuf_converter.py +6 -0
- datacontract/export/pydantic_converter.py +6 -0
- datacontract/export/rdf_converter.py +9 -0
- datacontract/export/sodacl_converter.py +7 -1
- datacontract/export/sql_converter.py +32 -2
- datacontract/export/sql_type_converter.py +4 -5
- datacontract/export/terraform_converter.py +6 -0
- datacontract/imports/bigquery_importer.py +30 -4
- datacontract/imports/glue_importer.py +13 -3
- datacontract/imports/odcs_importer.py +192 -0
- datacontract/imports/unity_importer.py +138 -0
- datacontract/model/data_contract_specification.py +2 -0
- datacontract/templates/partials/server.html +64 -32
- datacontract/templates/style/output.css +9 -0
- datacontract/web.py +56 -2
- {datacontract_cli-0.10.6.dist-info → datacontract_cli-0.10.8.dist-info}/METADATA +232 -96
- {datacontract_cli-0.10.6.dist-info → datacontract_cli-0.10.8.dist-info}/RECORD +40 -35
- {datacontract_cli-0.10.6.dist-info → datacontract_cli-0.10.8.dist-info}/LICENSE +0 -0
- {datacontract_cli-0.10.6.dist-info → datacontract_cli-0.10.8.dist-info}/WHEEL +0 -0
- {datacontract_cli-0.10.6.dist-info → datacontract_cli-0.10.8.dist-info}/entry_points.txt +0 -0
- {datacontract_cli-0.10.6.dist-info → datacontract_cli-0.10.8.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,192 @@
|
|
|
1
|
+
import datetime
|
|
2
|
+
import logging
|
|
3
|
+
from typing import Any, Dict, List
|
|
4
|
+
import yaml
|
|
5
|
+
from datacontract.model.data_contract_specification import (
|
|
6
|
+
Availability,
|
|
7
|
+
Contact,
|
|
8
|
+
DataContractSpecification,
|
|
9
|
+
Info,
|
|
10
|
+
Model,
|
|
11
|
+
Field,
|
|
12
|
+
Retention,
|
|
13
|
+
ServiceLevel,
|
|
14
|
+
Terms,
|
|
15
|
+
)
|
|
16
|
+
from datacontract.model.exceptions import DataContractException
|
|
17
|
+
|
|
18
|
+
DATACONTRACT_TYPES = [
|
|
19
|
+
"string",
|
|
20
|
+
"text",
|
|
21
|
+
"varchar",
|
|
22
|
+
"number",
|
|
23
|
+
"decimal",
|
|
24
|
+
"numeric",
|
|
25
|
+
"int",
|
|
26
|
+
"integer",
|
|
27
|
+
"long",
|
|
28
|
+
"bigint",
|
|
29
|
+
"float",
|
|
30
|
+
"double",
|
|
31
|
+
"boolean",
|
|
32
|
+
"timestamp",
|
|
33
|
+
"timestamp_tz",
|
|
34
|
+
"timestamp_ntz",
|
|
35
|
+
"date",
|
|
36
|
+
"array",
|
|
37
|
+
"bytes",
|
|
38
|
+
"object",
|
|
39
|
+
"record",
|
|
40
|
+
"struct",
|
|
41
|
+
"null",
|
|
42
|
+
]
|
|
43
|
+
|
|
44
|
+
|
|
45
|
+
def import_odcs(data_contract_specification: DataContractSpecification, source: str) -> DataContractSpecification:
|
|
46
|
+
try:
|
|
47
|
+
with open(source, "r") as file:
|
|
48
|
+
odcs_contract = yaml.safe_load(file.read())
|
|
49
|
+
|
|
50
|
+
except Exception as e:
|
|
51
|
+
raise DataContractException(
|
|
52
|
+
type="schema",
|
|
53
|
+
name="Parse ODCS contract",
|
|
54
|
+
reason=f"Failed to parse odcs contract from {source}",
|
|
55
|
+
engine="datacontract",
|
|
56
|
+
original_exception=e,
|
|
57
|
+
)
|
|
58
|
+
|
|
59
|
+
data_contract_specification.id = odcs_contract["uuid"]
|
|
60
|
+
data_contract_specification.info = import_info(odcs_contract)
|
|
61
|
+
data_contract_specification.terms = import_terms(odcs_contract)
|
|
62
|
+
data_contract_specification.servicelevels = import_servicelevels(odcs_contract)
|
|
63
|
+
data_contract_specification.models = import_models(odcs_contract)
|
|
64
|
+
|
|
65
|
+
return data_contract_specification
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
def import_info(odcs_contract: Dict[str, Any]) -> Info:
|
|
69
|
+
info = Info(title=odcs_contract.get("quantumName"), version=odcs_contract.get("version"))
|
|
70
|
+
|
|
71
|
+
if odcs_contract.get("description").get("purpose") is not None:
|
|
72
|
+
info.description = odcs_contract.get("description").get("purpose")
|
|
73
|
+
|
|
74
|
+
if odcs_contract.get("datasetDomain") is not None:
|
|
75
|
+
info.owner = odcs_contract.get("datasetDomain")
|
|
76
|
+
|
|
77
|
+
if odcs_contract.get("productDl") is not None or odcs_contract.get("productFeedbackUrl") is not None:
|
|
78
|
+
contact = Contact()
|
|
79
|
+
if odcs_contract.get("productDl") is not None:
|
|
80
|
+
contact.name = odcs_contract.get("productDl")
|
|
81
|
+
if odcs_contract.get("productFeedbackUrl") is not None:
|
|
82
|
+
contact.url = odcs_contract.get("productFeedbackUrl")
|
|
83
|
+
|
|
84
|
+
info.contact = contact
|
|
85
|
+
|
|
86
|
+
return info
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def import_terms(odcs_contract: Dict[str, Any]) -> Terms | None:
|
|
90
|
+
if (
|
|
91
|
+
odcs_contract.get("description").get("usage") is not None
|
|
92
|
+
or odcs_contract.get("description").get("limitations") is not None
|
|
93
|
+
or odcs_contract.get("price") is not None
|
|
94
|
+
):
|
|
95
|
+
terms = Terms()
|
|
96
|
+
if odcs_contract.get("description").get("usage") is not None:
|
|
97
|
+
terms.usage = odcs_contract.get("description").get("usage")
|
|
98
|
+
if odcs_contract.get("description").get("limitations") is not None:
|
|
99
|
+
terms.limitations = odcs_contract.get("description").get("limitations")
|
|
100
|
+
if odcs_contract.get("price") is not None:
|
|
101
|
+
terms.billing = f"{odcs_contract.get('price').get('priceAmount')} {odcs_contract.get('price').get('priceCurrency')} / {odcs_contract.get('price').get('priceUnit')}"
|
|
102
|
+
|
|
103
|
+
return terms
|
|
104
|
+
else:
|
|
105
|
+
return None
|
|
106
|
+
|
|
107
|
+
|
|
108
|
+
def import_servicelevels(odcs_contract: Dict[str, Any]) -> ServiceLevel:
|
|
109
|
+
# find the two properties we can map (based on the examples)
|
|
110
|
+
sla_properties = odcs_contract.get("slaProperties") if odcs_contract.get("slaProperties") is not None else []
|
|
111
|
+
availability = next((p for p in sla_properties if p["property"] == "generalAvailability"), None)
|
|
112
|
+
retention = next((p for p in sla_properties if p["property"] == "retention"), None)
|
|
113
|
+
|
|
114
|
+
if availability is not None or retention is not None:
|
|
115
|
+
servicelevel = ServiceLevel()
|
|
116
|
+
|
|
117
|
+
if availability is not None:
|
|
118
|
+
value = availability.get("value")
|
|
119
|
+
if isinstance(value, datetime.datetime):
|
|
120
|
+
value = value.isoformat()
|
|
121
|
+
servicelevel.availability = Availability(description=value)
|
|
122
|
+
|
|
123
|
+
if retention is not None:
|
|
124
|
+
servicelevel.retention = Retention(period=f"{retention.get('value')}{retention.get('unit')}")
|
|
125
|
+
|
|
126
|
+
return servicelevel
|
|
127
|
+
else:
|
|
128
|
+
return None
|
|
129
|
+
|
|
130
|
+
|
|
131
|
+
def import_models(odcs_contract: Dict[str, Any]) -> Dict[str, Model]:
|
|
132
|
+
custom_type_mappings = get_custom_type_mappings(odcs_contract.get("customProperties"))
|
|
133
|
+
|
|
134
|
+
odcs_tables = odcs_contract.get("dataset") if odcs_contract.get("dataset") is not None else []
|
|
135
|
+
result = {}
|
|
136
|
+
|
|
137
|
+
for table in odcs_tables:
|
|
138
|
+
description = table.get("description") if table.get("description") is not None else ""
|
|
139
|
+
model = Model(description=" ".join(description.splitlines()), type="table")
|
|
140
|
+
model.fields = import_fields(table.get("columns"), custom_type_mappings)
|
|
141
|
+
result[table.get("table")] = model
|
|
142
|
+
|
|
143
|
+
return result
|
|
144
|
+
|
|
145
|
+
|
|
146
|
+
def import_fields(odcs_columns: Dict[str, Any], custom_type_mappings: Dict[str, str]) -> Dict[str, Field]:
|
|
147
|
+
logger = logging.getLogger(__name__)
|
|
148
|
+
result = {}
|
|
149
|
+
|
|
150
|
+
for column in odcs_columns:
|
|
151
|
+
mapped_type = map_type(column.get("logicalType"), custom_type_mappings)
|
|
152
|
+
if mapped_type is not None:
|
|
153
|
+
description = column.get("description") if column.get("description") is not None else ""
|
|
154
|
+
field = Field(
|
|
155
|
+
description=" ".join(description.splitlines()),
|
|
156
|
+
type=mapped_type,
|
|
157
|
+
title=column.get("businessName") if column.get("businessName") is not None else "",
|
|
158
|
+
required=not column.get("isNullable") if column.get("isNullable") is not None else False,
|
|
159
|
+
primary=column.get("isPrimary") if column.get("isPrimary") is not None else False,
|
|
160
|
+
unique=column.get("isUnique") if column.get("isUnique") is not None else False,
|
|
161
|
+
classification=column.get("classification") if column.get("classification") is not None else "",
|
|
162
|
+
tags=column.get("tags") if column.get("tags") is not None else [],
|
|
163
|
+
)
|
|
164
|
+
result[column["column"]] = field
|
|
165
|
+
else:
|
|
166
|
+
logger.info(
|
|
167
|
+
f"Can't properly map {column.get('column')} to the Datacontract Mapping types, as there is no equivalent or special mapping. Consider introducing a customProperty 'dc_mapping_{column.get('logicalName')}' that defines your expected type as the 'value'"
|
|
168
|
+
)
|
|
169
|
+
|
|
170
|
+
return result
|
|
171
|
+
|
|
172
|
+
|
|
173
|
+
def map_type(odcs_type: str, custom_mappings: Dict[str, str]) -> str | None:
|
|
174
|
+
t = odcs_type.lower()
|
|
175
|
+
if t in DATACONTRACT_TYPES:
|
|
176
|
+
return t
|
|
177
|
+
elif custom_mappings.get(t) is not None:
|
|
178
|
+
return custom_mappings.get(t)
|
|
179
|
+
else:
|
|
180
|
+
return None
|
|
181
|
+
|
|
182
|
+
|
|
183
|
+
def get_custom_type_mappings(odcs_custom_properties: List[Any]) -> Dict[str, str]:
|
|
184
|
+
result = {}
|
|
185
|
+
if odcs_custom_properties is not None:
|
|
186
|
+
for prop in odcs_custom_properties:
|
|
187
|
+
if prop["property"].startswith("dc_mapping_"):
|
|
188
|
+
odcs_type_name = prop["property"].substring(11)
|
|
189
|
+
datacontract_type = prop["value"]
|
|
190
|
+
result[odcs_type_name] = datacontract_type
|
|
191
|
+
|
|
192
|
+
return result
|
|
@@ -0,0 +1,138 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import requests
|
|
3
|
+
import os
|
|
4
|
+
import typing
|
|
5
|
+
|
|
6
|
+
from datacontract.model.data_contract_specification import DataContractSpecification, Model, Field
|
|
7
|
+
from datacontract.model.exceptions import DataContractException
|
|
8
|
+
|
|
9
|
+
def import_unity_from_json(
|
|
10
|
+
data_contract_specification: DataContractSpecification, source: str
|
|
11
|
+
) -> DataContractSpecification:
|
|
12
|
+
try:
|
|
13
|
+
with open(source, "r") as file:
|
|
14
|
+
unity_schema = json.loads(file.read())
|
|
15
|
+
except json.JSONDecodeError as e:
|
|
16
|
+
raise DataContractException(
|
|
17
|
+
type="schema",
|
|
18
|
+
name="Parse unity schema",
|
|
19
|
+
reason=f"Failed to parse unity schema from {source}",
|
|
20
|
+
engine="datacontract",
|
|
21
|
+
original_exception=e,
|
|
22
|
+
)
|
|
23
|
+
return convert_unity_schema(data_contract_specification, unity_schema)
|
|
24
|
+
|
|
25
|
+
def import_unity_from_api(
|
|
26
|
+
data_contract_specification: DataContractSpecification,
|
|
27
|
+
unity_table_full_name: typing.Optional[str] = None
|
|
28
|
+
) -> DataContractSpecification:
|
|
29
|
+
databricks_instance = os.getenv('DATABRICKS_IMPORT_INSTANCE')
|
|
30
|
+
access_token = os.getenv('DATABRICKS_IMPORT_ACCESS_TOKEN')
|
|
31
|
+
|
|
32
|
+
if not databricks_instance or not access_token:
|
|
33
|
+
print("Missing environment variables for Databricks instance or access token.")
|
|
34
|
+
print("Both, $DATABRICKS_IMPORT_INSTANCE and $DATABRICKS_IMPORT_ACCESS_TOKEN must be set.")
|
|
35
|
+
exit(1) # Exit if variables are not set
|
|
36
|
+
|
|
37
|
+
api_url = f'{databricks_instance}/api/2.1/unity-catalog/tables/{unity_table_full_name}'
|
|
38
|
+
|
|
39
|
+
headers = {
|
|
40
|
+
'Authorization': f'Bearer {access_token}'
|
|
41
|
+
}
|
|
42
|
+
response = requests.get(api_url, headers=headers)
|
|
43
|
+
|
|
44
|
+
if response.status_code != 200:
|
|
45
|
+
raise DataContractException(
|
|
46
|
+
type="schema",
|
|
47
|
+
name="Retrieve unity catalog schema",
|
|
48
|
+
reason=f"Failed to retrieve unity catalog schema from databricks instance: {response.status_code} {response.text}",
|
|
49
|
+
engine="datacontract"
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
convert_unity_schema(data_contract_specification, response.json())
|
|
53
|
+
|
|
54
|
+
return data_contract_specification
|
|
55
|
+
|
|
56
|
+
def convert_unity_schema(
|
|
57
|
+
data_contract_specification: DataContractSpecification, unity_schema: dict
|
|
58
|
+
) -> DataContractSpecification:
|
|
59
|
+
if data_contract_specification.models is None:
|
|
60
|
+
data_contract_specification.models = {}
|
|
61
|
+
|
|
62
|
+
fields = import_table_fields(unity_schema.get("columns"))
|
|
63
|
+
|
|
64
|
+
table_id = unity_schema.get("table_id")
|
|
65
|
+
|
|
66
|
+
data_contract_specification.models[table_id] = Model(fields=fields, type="table")
|
|
67
|
+
|
|
68
|
+
if unity_schema.get("name") is not None:
|
|
69
|
+
data_contract_specification.models[table_id].title = unity_schema.get("name")
|
|
70
|
+
|
|
71
|
+
return data_contract_specification
|
|
72
|
+
|
|
73
|
+
|
|
74
|
+
def import_table_fields(table_fields):
|
|
75
|
+
imported_fields = {}
|
|
76
|
+
for field in table_fields:
|
|
77
|
+
field_name = field.get("name")
|
|
78
|
+
imported_fields[field_name] = Field()
|
|
79
|
+
imported_fields[field_name].required = field.get("nullable") == "false"
|
|
80
|
+
imported_fields[field_name].description = field.get("comment")
|
|
81
|
+
|
|
82
|
+
# databricks api 2.1 specifies that type_name can be any of:
|
|
83
|
+
# BOOLEAN | BYTE | SHORT | INT | LONG | FLOAT | DOUBLE | DATE | TIMESTAMP | TIMESTAMP_NTZ | STRING
|
|
84
|
+
# | BINARY | DECIMAL | INTERVAL | ARRAY | STRUCT | MAP | CHAR | NULL | USER_DEFINED_TYPE | TABLE_TYPE
|
|
85
|
+
if field.get("type_name") in ["INTERVAL", "ARRAY", "STRUCT", "MAP", "USER_DEFINED_TYPE", "TABLE_TYPE"]:
|
|
86
|
+
# complex types are not supported, yet
|
|
87
|
+
raise DataContractException(
|
|
88
|
+
type="schema",
|
|
89
|
+
result="failed",
|
|
90
|
+
name="Map unity type to data contract type",
|
|
91
|
+
reason=f"type ${field.get('type_name')} is not supported yet for unity import",
|
|
92
|
+
engine="datacontract",
|
|
93
|
+
)
|
|
94
|
+
|
|
95
|
+
imported_fields[field_name].type = map_type_from_unity(field.get("type_name"))
|
|
96
|
+
|
|
97
|
+
return imported_fields
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def map_type_from_unity(type_str: str):
|
|
101
|
+
if type_str == "BOOLEAN":
|
|
102
|
+
return "boolean"
|
|
103
|
+
elif type_str == "BYTE":
|
|
104
|
+
return "bytes"
|
|
105
|
+
elif type_str == "SHORT":
|
|
106
|
+
return "int"
|
|
107
|
+
elif type_str == "INT":
|
|
108
|
+
return "int"
|
|
109
|
+
elif type_str == "LONG":
|
|
110
|
+
return "long"
|
|
111
|
+
elif type_str == "FLOAT":
|
|
112
|
+
return "float"
|
|
113
|
+
elif type_str == "DOUBLE":
|
|
114
|
+
return "double"
|
|
115
|
+
elif type_str == "DATE":
|
|
116
|
+
return "date"
|
|
117
|
+
elif type_str == "TIMESTAMP":
|
|
118
|
+
return "timestamp"
|
|
119
|
+
elif type_str == "TIMESTAMP_NTZ":
|
|
120
|
+
return "timestamp_ntz"
|
|
121
|
+
elif type_str == "STRING":
|
|
122
|
+
return "string"
|
|
123
|
+
elif type_str == "BINARY":
|
|
124
|
+
return "bytes"
|
|
125
|
+
elif type_str == "DECIMAL":
|
|
126
|
+
return "decimal"
|
|
127
|
+
elif type_str == "CHAR":
|
|
128
|
+
return "varchar"
|
|
129
|
+
elif type_str == "NULL":
|
|
130
|
+
return "null"
|
|
131
|
+
else:
|
|
132
|
+
raise DataContractException(
|
|
133
|
+
type="schema",
|
|
134
|
+
result="failed",
|
|
135
|
+
name="Map unity type to data contract type",
|
|
136
|
+
reason=f"Unsupported type {type_str} in unity json definition.",
|
|
137
|
+
engine="datacontract",
|
|
138
|
+
)
|
|
@@ -39,6 +39,7 @@ class Terms(pyd.BaseModel):
|
|
|
39
39
|
limitations: str = None
|
|
40
40
|
billing: str = None
|
|
41
41
|
noticePeriod: str = None
|
|
42
|
+
description: str = None
|
|
42
43
|
|
|
43
44
|
|
|
44
45
|
class Definition(pyd.BaseModel):
|
|
@@ -98,6 +99,7 @@ class Model(pyd.BaseModel):
|
|
|
98
99
|
namespace: str = None
|
|
99
100
|
title: str = None
|
|
100
101
|
fields: Dict[str, Field] = {}
|
|
102
|
+
config: Dict[str, Any] = None
|
|
101
103
|
|
|
102
104
|
|
|
103
105
|
class Info(pyd.BaseModel):
|
|
@@ -1,16 +1,20 @@
|
|
|
1
1
|
<li class="relative flex gap-x-6 px-4 py-5 sm:px-6">
|
|
2
2
|
<div class="flex items-center gap-x-4">
|
|
3
3
|
<div class="hidden sm:flex sm:flex-col">
|
|
4
|
-
<
|
|
5
|
-
|
|
4
|
+
<div class="flex flex-col">
|
|
5
|
+
<dt class="text-sm font-medium text-gray-500">Server</dt>
|
|
6
|
+
<dd class="mt-1 text-sm text-gray-900">{{server_name}}</dd>
|
|
7
|
+
</div>
|
|
6
8
|
</div>
|
|
7
9
|
</div>
|
|
8
10
|
|
|
9
11
|
{% if server.type %}
|
|
10
12
|
<div class="flex items-center gap-x-4">
|
|
11
13
|
<div class="hidden sm:flex sm:flex-col">
|
|
12
|
-
<
|
|
13
|
-
|
|
14
|
+
<div class="flex flex-col">
|
|
15
|
+
<dt class="text-sm font-medium text-gray-500">Type</dt>
|
|
16
|
+
<dd class="mt-1 text-sm text-gray-900">{{server.type}}</dd>
|
|
17
|
+
</div>
|
|
14
18
|
</div>
|
|
15
19
|
</div>
|
|
16
20
|
{% endif %}
|
|
@@ -18,8 +22,10 @@
|
|
|
18
22
|
{% if server.project %}
|
|
19
23
|
<div class="flex items-center gap-x-4">
|
|
20
24
|
<div class="hidden sm:flex sm:flex-col">
|
|
21
|
-
<
|
|
22
|
-
|
|
25
|
+
<div class="flex flex-col">
|
|
26
|
+
<dt class="text-sm font-medium text-gray-500">Project</dt>
|
|
27
|
+
<dd class="mt-1 text-sm text-gray-900">{{server.project}}</dd>
|
|
28
|
+
</div>
|
|
23
29
|
</div>
|
|
24
30
|
</div>
|
|
25
31
|
{% endif %}
|
|
@@ -27,8 +33,10 @@
|
|
|
27
33
|
{% if server.dataset %}
|
|
28
34
|
<div class="flex items-center gap-x-4">
|
|
29
35
|
<div class="hidden sm:flex sm:flex-col">
|
|
30
|
-
<
|
|
31
|
-
|
|
36
|
+
<div class="flex flex-col">
|
|
37
|
+
<dt class="text-sm font-medium text-gray-500">Dataset</dt>
|
|
38
|
+
<dd class="mt-1 text-sm text-gray-900">{{server.dataset}}</dd>
|
|
39
|
+
</div>
|
|
32
40
|
</div>
|
|
33
41
|
</div>
|
|
34
42
|
{% endif %}
|
|
@@ -36,8 +44,10 @@
|
|
|
36
44
|
{% if server.location %}
|
|
37
45
|
<div class="flex items-center gap-x-4">
|
|
38
46
|
<div class="hidden sm:flex sm:flex-col">
|
|
39
|
-
<
|
|
40
|
-
|
|
47
|
+
<div class="flex flex-col">
|
|
48
|
+
<dt class="text-sm font-medium text-gray-500">Location</dt>
|
|
49
|
+
<dd class="mt-1 text-sm text-gray-900">{{server.location}}</dd>
|
|
50
|
+
</div>
|
|
41
51
|
</div>
|
|
42
52
|
</div>
|
|
43
53
|
{% endif %}
|
|
@@ -45,8 +55,10 @@
|
|
|
45
55
|
{% if server.endpointUrl %}
|
|
46
56
|
<div class="flex items-center gap-x-4">
|
|
47
57
|
<div class="hidden sm:flex sm:flex-col">
|
|
48
|
-
<
|
|
49
|
-
|
|
58
|
+
<div class="flex flex-col">
|
|
59
|
+
<dt class="text-sm font-medium text-gray-500">Endpoint URL</dt>
|
|
60
|
+
<dd class="mt-1 text-sm text-gray-900">{{server.endpointUrl}}</dd>
|
|
61
|
+
</div>
|
|
50
62
|
</div>
|
|
51
63
|
</div>
|
|
52
64
|
{% endif %}
|
|
@@ -54,8 +66,10 @@
|
|
|
54
66
|
{% if server.account %}
|
|
55
67
|
<div class="flex items-center gap-x-4">
|
|
56
68
|
<div class="hidden sm:flex sm:flex-col">
|
|
57
|
-
<
|
|
58
|
-
|
|
69
|
+
<div class="flex flex-col">
|
|
70
|
+
<dt class="text-sm font-medium text-gray-500">Account</dt>
|
|
71
|
+
<dd class="mt-1 text-sm text-gray-900">{{server.account}}</dd>
|
|
72
|
+
</div>
|
|
59
73
|
</div>
|
|
60
74
|
</div>
|
|
61
75
|
{% endif %}
|
|
@@ -63,8 +77,10 @@
|
|
|
63
77
|
{% if server.host %}
|
|
64
78
|
<div class="flex items-center gap-x-4">
|
|
65
79
|
<div class="hidden sm:flex sm:flex-col">
|
|
66
|
-
<
|
|
67
|
-
|
|
80
|
+
<div class="flex flex-col">
|
|
81
|
+
<dt class="text-sm font-medium text-gray-500">Host</dt>
|
|
82
|
+
<dd class="mt-1 text-sm text-gray-900">{{server.host}}</dd>
|
|
83
|
+
</div>
|
|
68
84
|
</div>
|
|
69
85
|
</div>
|
|
70
86
|
{% endif %}
|
|
@@ -72,8 +88,10 @@
|
|
|
72
88
|
{% if server.port %}
|
|
73
89
|
<div class="flex items-center gap-x-4">
|
|
74
90
|
<div class="hidden sm:flex sm:flex-col">
|
|
75
|
-
<
|
|
76
|
-
|
|
91
|
+
<div class="flex flex-col">
|
|
92
|
+
<dt class="text-sm font-medium text-gray-500">Port</dt>
|
|
93
|
+
<dd class="mt-1 text-sm text-gray-900">{{server.port}}</dd>
|
|
94
|
+
</div>
|
|
77
95
|
</div>
|
|
78
96
|
</div>
|
|
79
97
|
{% endif %}
|
|
@@ -81,8 +99,10 @@
|
|
|
81
99
|
{% if server.catalog %}
|
|
82
100
|
<div class="flex items-center gap-x-4">
|
|
83
101
|
<div class="hidden sm:flex sm:flex-col">
|
|
84
|
-
<
|
|
85
|
-
|
|
102
|
+
<div class="flex flex-col">
|
|
103
|
+
<dt class="text-sm font-medium text-gray-500">Catalog</dt>
|
|
104
|
+
<dd class="mt-1 text-sm text-gray-900">{{server.catalog}}</dd>
|
|
105
|
+
</div>
|
|
86
106
|
</div>
|
|
87
107
|
</div>
|
|
88
108
|
{% endif %}
|
|
@@ -90,8 +110,10 @@
|
|
|
90
110
|
{% if server.database %}
|
|
91
111
|
<div class="flex items-center gap-x-4">
|
|
92
112
|
<div class="hidden sm:flex sm:flex-col">
|
|
93
|
-
<
|
|
94
|
-
|
|
113
|
+
<div class="flex flex-col">
|
|
114
|
+
<dt class="text-sm font-medium text-gray-500">Database</dt>
|
|
115
|
+
<dd class="mt-1 text-sm text-gray-900">{{server.database}}</dd>
|
|
116
|
+
</div>
|
|
95
117
|
</div>
|
|
96
118
|
</div>
|
|
97
119
|
{% endif %}
|
|
@@ -99,8 +121,10 @@
|
|
|
99
121
|
{% if server.schema_ %}
|
|
100
122
|
<div class="flex items-center gap-x-4">
|
|
101
123
|
<div class="hidden sm:flex sm:flex-col">
|
|
102
|
-
<
|
|
103
|
-
|
|
124
|
+
<div class="flex flex-col">
|
|
125
|
+
<dt class="text-sm font-medium text-gray-500">Schema</dt>
|
|
126
|
+
<dd class="mt-1 text-sm text-gray-900">{{server.schema_}}</dd>
|
|
127
|
+
</div>
|
|
104
128
|
</div>
|
|
105
129
|
</div>
|
|
106
130
|
{% endif %}
|
|
@@ -108,8 +132,10 @@
|
|
|
108
132
|
{% if server.topic %}
|
|
109
133
|
<div class="flex items-center gap-x-4">
|
|
110
134
|
<div class="hidden sm:flex sm:flex-col">
|
|
111
|
-
<
|
|
112
|
-
|
|
135
|
+
<div class="flex flex-col">
|
|
136
|
+
<dt class="text-sm font-medium text-gray-500">Topic</dt>
|
|
137
|
+
<dd class="mt-1 text-sm text-gray-900">{{server.topic}}</dd>
|
|
138
|
+
</div>
|
|
113
139
|
</div>
|
|
114
140
|
</div>
|
|
115
141
|
{% endif %}
|
|
@@ -117,8 +143,10 @@
|
|
|
117
143
|
{% if server.path %}
|
|
118
144
|
<div class="flex items-center gap-x-4">
|
|
119
145
|
<div class="hidden sm:flex sm:flex-col">
|
|
120
|
-
<
|
|
121
|
-
|
|
146
|
+
<div class="flex flex-col">
|
|
147
|
+
<dt class="text-sm font-medium text-gray-500">Path</dt>
|
|
148
|
+
<dd class="mt-1 text-sm text-gray-900">{{server.path}}</dd>
|
|
149
|
+
</div>
|
|
122
150
|
</div>
|
|
123
151
|
</div>
|
|
124
152
|
{% endif %}
|
|
@@ -126,8 +154,10 @@
|
|
|
126
154
|
{% if server.format %}
|
|
127
155
|
<div class="flex items-center gap-x-4">
|
|
128
156
|
<div class="hidden sm:flex sm:flex-col">
|
|
129
|
-
<
|
|
130
|
-
|
|
157
|
+
<div class="flex flex-col">
|
|
158
|
+
<dt class="text-sm font-medium text-gray-500">Format</dt>
|
|
159
|
+
<dd class="mt-1 text-sm text-gray-900">{{server.format}}</dd>
|
|
160
|
+
</div>
|
|
131
161
|
</div>
|
|
132
162
|
</div>
|
|
133
163
|
{% endif %}
|
|
@@ -135,8 +165,10 @@
|
|
|
135
165
|
{% if server.delimiter %}
|
|
136
166
|
<div class="flex items-center gap-x-4">
|
|
137
167
|
<div class="hidden sm:flex sm:flex-col">
|
|
138
|
-
<
|
|
139
|
-
|
|
168
|
+
<div class="flex flex-col">
|
|
169
|
+
<dt class="text-sm font-medium text-gray-500">Delimiter</dt>
|
|
170
|
+
<dd class="mt-1 text-sm text-gray-900">{{server.delimiter}}</dd>
|
|
171
|
+
</div>
|
|
140
172
|
</div>
|
|
141
173
|
</div>
|
|
142
174
|
{% endif %}
|
|
@@ -866,6 +866,11 @@ video {
|
|
|
866
866
|
column-gap: 1rem;
|
|
867
867
|
}
|
|
868
868
|
|
|
869
|
+
.gap-x-6 {
|
|
870
|
+
-moz-column-gap: 1.5rem;
|
|
871
|
+
column-gap: 1.5rem;
|
|
872
|
+
}
|
|
873
|
+
|
|
869
874
|
.gap-y-6 {
|
|
870
875
|
row-gap: 1.5rem;
|
|
871
876
|
}
|
|
@@ -1399,6 +1404,10 @@ video {
|
|
|
1399
1404
|
flex-direction: row;
|
|
1400
1405
|
}
|
|
1401
1406
|
|
|
1407
|
+
.sm\:flex-col {
|
|
1408
|
+
flex-direction: column;
|
|
1409
|
+
}
|
|
1410
|
+
|
|
1402
1411
|
.sm\:flex-wrap {
|
|
1403
1412
|
flex-wrap: wrap;
|
|
1404
1413
|
}
|
datacontract/web.py
CHANGED
|
@@ -1,14 +1,68 @@
|
|
|
1
|
-
from typing import Annotated, Union
|
|
1
|
+
from typing import Annotated, Union, Optional
|
|
2
2
|
|
|
3
|
+
import typer
|
|
3
4
|
from fastapi import FastAPI, File
|
|
5
|
+
from fastapi.responses import HTMLResponse
|
|
4
6
|
|
|
5
|
-
from datacontract.data_contract import DataContract
|
|
7
|
+
from datacontract.data_contract import DataContract, ExportFormat
|
|
8
|
+
from fastapi.responses import PlainTextResponse
|
|
6
9
|
|
|
7
10
|
app = FastAPI()
|
|
8
11
|
|
|
9
12
|
|
|
13
|
+
@app.get("/", response_class=HTMLResponse)
|
|
14
|
+
def index():
|
|
15
|
+
# TODO OpenAPI spec
|
|
16
|
+
return """
|
|
17
|
+
<html>
|
|
18
|
+
<body>
|
|
19
|
+
<h1>datacontract web server</h1>
|
|
20
|
+
<ul>
|
|
21
|
+
<li>POST /lint</li>
|
|
22
|
+
<li>POST /export</li>
|
|
23
|
+
</ul>
|
|
24
|
+
</body>
|
|
25
|
+
</html>
|
|
26
|
+
"""
|
|
27
|
+
|
|
28
|
+
|
|
10
29
|
@app.post("/lint")
|
|
11
30
|
def lint(file: Annotated[bytes, File()], linters: Union[str, set[str]] = "all"):
|
|
12
31
|
data_contract = DataContract(data_contract_str=str(file, encoding="utf-8"))
|
|
13
32
|
lint_result = data_contract.lint(enabled_linters=linters)
|
|
14
33
|
return {"result": lint_result.result, "checks": lint_result.checks}
|
|
34
|
+
|
|
35
|
+
|
|
36
|
+
@app.post("/export", response_class=PlainTextResponse)
|
|
37
|
+
def export(
|
|
38
|
+
file: Annotated[bytes, File()],
|
|
39
|
+
export_format: Annotated[ExportFormat, typer.Option(help="The export format.")],
|
|
40
|
+
server: Annotated[str, typer.Option(help="The server name to export.")] = None,
|
|
41
|
+
model: Annotated[
|
|
42
|
+
str,
|
|
43
|
+
typer.Option(
|
|
44
|
+
help="Use the key of the model in the data contract yaml file "
|
|
45
|
+
"to refer to a model, e.g., `orders`, or `all` for all "
|
|
46
|
+
"models (default)."
|
|
47
|
+
),
|
|
48
|
+
] = "all",
|
|
49
|
+
rdf_base: Annotated[
|
|
50
|
+
Optional[str],
|
|
51
|
+
typer.Option(help="[rdf] The base URI used to generate the RDF graph.", rich_help_panel="RDF Options"),
|
|
52
|
+
] = None,
|
|
53
|
+
sql_server_type: Annotated[
|
|
54
|
+
Optional[str],
|
|
55
|
+
typer.Option(
|
|
56
|
+
help="[sql] The server type to determine the sql dialect. By default, it uses 'auto' to automatically detect the sql dialect via the specified servers in the data contract.",
|
|
57
|
+
rich_help_panel="SQL Options",
|
|
58
|
+
),
|
|
59
|
+
] = "auto",
|
|
60
|
+
):
|
|
61
|
+
result = DataContract(data_contract_str=str(file, encoding="utf-8"), server=server).export(
|
|
62
|
+
export_format=export_format,
|
|
63
|
+
model=model,
|
|
64
|
+
rdf_base=rdf_base,
|
|
65
|
+
sql_server_type=sql_server_type,
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
return result
|