datacontract-cli 0.10.28__py3-none-any.whl → 0.10.30__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of datacontract-cli might be problematic. Click here for more details.
- datacontract/api.py +1 -1
- datacontract/cli.py +27 -2
- datacontract/data_contract.py +2 -2
- datacontract/engines/soda/connections/duckdb_connection.py +1 -1
- datacontract/export/great_expectations_converter.py +6 -3
- datacontract/export/markdown_converter.py +1 -1
- datacontract/export/rdf_converter.py +2 -2
- datacontract/export/sql_type_converter.py +2 -2
- datacontract/imports/excel_importer.py +7 -5
- datacontract/imports/importer.py +1 -0
- datacontract/imports/importer_factory.py +7 -0
- datacontract/imports/json_importer.py +325 -0
- datacontract/init/init_template.py +1 -1
- datacontract/lint/resolve.py +1 -1
- datacontract/lint/schema.py +1 -1
- datacontract/schemas/datacontract-1.1.0.init.yaml +1 -1
- datacontract/schemas/datacontract-1.2.0.init.yaml +91 -0
- datacontract/schemas/datacontract-1.2.0.schema.json +2029 -0
- {datacontract_cli-0.10.28.dist-info → datacontract_cli-0.10.30.dist-info}/METADATA +24 -34
- {datacontract_cli-0.10.28.dist-info → datacontract_cli-0.10.30.dist-info}/RECORD +24 -21
- {datacontract_cli-0.10.28.dist-info → datacontract_cli-0.10.30.dist-info}/WHEEL +0 -0
- {datacontract_cli-0.10.28.dist-info → datacontract_cli-0.10.30.dist-info}/entry_points.txt +0 -0
- {datacontract_cli-0.10.28.dist-info → datacontract_cli-0.10.30.dist-info}/licenses/LICENSE +0 -0
- {datacontract_cli-0.10.28.dist-info → datacontract_cli-0.10.30.dist-info}/top_level.txt +0 -0
datacontract/api.py
CHANGED
|
@@ -10,7 +10,7 @@ from fastapi.security.api_key import APIKeyHeader
|
|
|
10
10
|
from datacontract.data_contract import DataContract, ExportFormat
|
|
11
11
|
from datacontract.model.run import Run
|
|
12
12
|
|
|
13
|
-
DATA_CONTRACT_EXAMPLE_PAYLOAD = """dataContractSpecification: 1.
|
|
13
|
+
DATA_CONTRACT_EXAMPLE_PAYLOAD = """dataContractSpecification: 1.2.0
|
|
14
14
|
id: urn:datacontract:checkout:orders-latest
|
|
15
15
|
info:
|
|
16
16
|
title: Orders Latest
|
datacontract/cli.py
CHANGED
|
@@ -469,8 +469,26 @@ def diff(
|
|
|
469
469
|
console.print(result.changelog_str())
|
|
470
470
|
|
|
471
471
|
|
|
472
|
-
|
|
472
|
+
def _get_uvicorn_arguments(port: int, host: str, context: typer.Context) -> dict:
|
|
473
|
+
"""
|
|
474
|
+
Take the default datacontract uvicorn arguments and merge them with the
|
|
475
|
+
extra arguments passed to the command to start the API.
|
|
476
|
+
"""
|
|
477
|
+
default_args = {
|
|
478
|
+
"app": "datacontract.api:app",
|
|
479
|
+
"port": port,
|
|
480
|
+
"host": host,
|
|
481
|
+
"reload": True,
|
|
482
|
+
}
|
|
483
|
+
|
|
484
|
+
# Create a list of the extra arguments, remove the leading -- from the cli arguments
|
|
485
|
+
trimmed_keys = list(map(lambda x : str(x).replace("--", ""),context.args[::2]))
|
|
486
|
+
# Merge the two dicts and return them as one dict
|
|
487
|
+
return default_args | dict(zip(trimmed_keys, context.args[1::2]))
|
|
488
|
+
|
|
489
|
+
@app.command(context_settings={"allow_extra_args": True, "ignore_unknown_options": True})
|
|
473
490
|
def api(
|
|
491
|
+
ctx: Annotated[typer.Context, typer.Option(help="Extra arguments to pass to uvicorn.run().")],
|
|
474
492
|
port: Annotated[int, typer.Option(help="Bind socket to this port.")] = 4242,
|
|
475
493
|
host: Annotated[
|
|
476
494
|
str, typer.Option(help="Bind socket to this host. Hint: For running in docker, set it to 0.0.0.0")
|
|
@@ -488,6 +506,9 @@ def api(
|
|
|
488
506
|
|
|
489
507
|
To connect to servers (such as a Snowflake data source), set the credentials as environment variables as documented in
|
|
490
508
|
https://cli.datacontract.com/#test
|
|
509
|
+
|
|
510
|
+
It is possible to run the API with extra arguments for `uvicorn.run()` as keyword arguments, e.g.:
|
|
511
|
+
`datacontract api --port 1234 --root_path /datacontract`.
|
|
491
512
|
"""
|
|
492
513
|
import uvicorn
|
|
493
514
|
from uvicorn.config import LOGGING_CONFIG
|
|
@@ -495,7 +516,11 @@ def api(
|
|
|
495
516
|
log_config = LOGGING_CONFIG
|
|
496
517
|
log_config["root"] = {"level": "INFO"}
|
|
497
518
|
|
|
498
|
-
|
|
519
|
+
uvicorn_args = _get_uvicorn_arguments(port, host, ctx)
|
|
520
|
+
# Add the log config
|
|
521
|
+
uvicorn_args["log_config"] = log_config
|
|
522
|
+
# Run uvicorn
|
|
523
|
+
uvicorn.run(**uvicorn_args)
|
|
499
524
|
|
|
500
525
|
|
|
501
526
|
def _print_logs(run):
|
datacontract/data_contract.py
CHANGED
|
@@ -4,7 +4,7 @@ import typing
|
|
|
4
4
|
from open_data_contract_standard.model import CustomProperty, OpenDataContractStandard
|
|
5
5
|
|
|
6
6
|
from datacontract.export.odcs_v3_exporter import to_odcs_v3
|
|
7
|
-
from datacontract.imports.importer import Spec
|
|
7
|
+
from datacontract.imports.importer import ImportFormat, Spec
|
|
8
8
|
from datacontract.imports.odcs_v3_importer import import_from_odcs
|
|
9
9
|
|
|
10
10
|
if typing.TYPE_CHECKING:
|
|
@@ -300,7 +300,7 @@ class DataContract:
|
|
|
300
300
|
id = kwargs.get("id")
|
|
301
301
|
owner = kwargs.get("owner")
|
|
302
302
|
|
|
303
|
-
if spec == Spec.odcs:
|
|
303
|
+
if spec == Spec.odcs or format == ImportFormat.excel:
|
|
304
304
|
data_contract_specification_initial = DataContract.init(template=template, schema=schema)
|
|
305
305
|
|
|
306
306
|
odcs_imported = importer_factory.create(format).import_source(
|
|
@@ -132,10 +132,10 @@ def setup_s3_connection(con, server):
|
|
|
132
132
|
use_ssl = "true"
|
|
133
133
|
url_style = "vhost"
|
|
134
134
|
if server.endpointUrl is not None:
|
|
135
|
+
url_style = "path"
|
|
135
136
|
s3_endpoint = server.endpointUrl.removeprefix("http://").removeprefix("https://")
|
|
136
137
|
if server.endpointUrl.startswith("http://"):
|
|
137
138
|
use_ssl = "false"
|
|
138
|
-
url_style = "path"
|
|
139
139
|
|
|
140
140
|
if s3_access_key_id is not None:
|
|
141
141
|
if s3_session_token is not None:
|
|
@@ -14,9 +14,6 @@ from datacontract.export.exporter import (
|
|
|
14
14
|
Exporter,
|
|
15
15
|
_check_models_for_export,
|
|
16
16
|
)
|
|
17
|
-
from datacontract.export.pandas_type_converter import convert_to_pandas_type
|
|
18
|
-
from datacontract.export.spark_converter import to_spark_data_type
|
|
19
|
-
from datacontract.export.sql_type_converter import convert_to_sql_type
|
|
20
17
|
from datacontract.model.data_contract_specification import (
|
|
21
18
|
DataContractSpecification,
|
|
22
19
|
DeprecatedQuality,
|
|
@@ -167,10 +164,16 @@ def add_field_expectations(
|
|
|
167
164
|
"""
|
|
168
165
|
if field.type is not None:
|
|
169
166
|
if engine == GreatExpectationsEngine.spark.value:
|
|
167
|
+
from datacontract.export.spark_converter import to_spark_data_type
|
|
168
|
+
|
|
170
169
|
field_type = to_spark_data_type(field).__class__.__name__
|
|
171
170
|
elif engine == GreatExpectationsEngine.pandas.value:
|
|
171
|
+
from datacontract.export.pandas_type_converter import convert_to_pandas_type
|
|
172
|
+
|
|
172
173
|
field_type = convert_to_pandas_type(field)
|
|
173
174
|
elif engine == GreatExpectationsEngine.sql.value:
|
|
175
|
+
from datacontract.export.sql_type_converter import convert_to_sql_type
|
|
176
|
+
|
|
174
177
|
field_type = convert_to_sql_type(field, sql_server_type)
|
|
175
178
|
else:
|
|
176
179
|
field_type = field.type
|
|
@@ -153,7 +153,7 @@ def field_to_markdown(field_name: str, field: Field, level: int = 0) -> str:
|
|
|
153
153
|
Returns:
|
|
154
154
|
str: A Markdown table rows for the field.
|
|
155
155
|
"""
|
|
156
|
-
tabs = "
|
|
156
|
+
tabs = " " * level
|
|
157
157
|
arrow = "↳" if level > 0 else ""
|
|
158
158
|
column_name = f"{tabs}{arrow} {field_name}"
|
|
159
159
|
|
|
@@ -57,8 +57,8 @@ def to_rdf(data_contract_spec: DataContractSpecification, base) -> Graph:
|
|
|
57
57
|
else:
|
|
58
58
|
g = Graph(base=Namespace(""))
|
|
59
59
|
|
|
60
|
-
dc = Namespace("https://datacontract.com/DataContractSpecification/1.
|
|
61
|
-
dcx = Namespace("https://datacontract.com/DataContractSpecification/1.
|
|
60
|
+
dc = Namespace("https://datacontract.com/DataContractSpecification/1.2.0/")
|
|
61
|
+
dcx = Namespace("https://datacontract.com/DataContractSpecification/1.2.0/Extension/")
|
|
62
62
|
|
|
63
63
|
g.bind("dc", dc)
|
|
64
64
|
g.bind("dcx", dcx)
|
|
@@ -194,8 +194,8 @@ def convert_to_databricks(field: Field) -> None | str:
|
|
|
194
194
|
nested_fields = []
|
|
195
195
|
for nested_field_name, nested_field in field.fields.items():
|
|
196
196
|
nested_field_type = convert_to_databricks(nested_field)
|
|
197
|
-
nested_fields.append(f"{nested_field_name}
|
|
198
|
-
return f"STRUCT<{',
|
|
197
|
+
nested_fields.append(f"{nested_field_name}:{nested_field_type}")
|
|
198
|
+
return f"STRUCT<{','.join(nested_fields)}>"
|
|
199
199
|
if type.lower() in ["bytes"]:
|
|
200
200
|
return "BINARY"
|
|
201
201
|
if type.lower() in ["array"]:
|
|
@@ -568,6 +568,8 @@ def import_roles(workbook: Workbook) -> Optional[List[Role]]:
|
|
|
568
568
|
|
|
569
569
|
roles_list = []
|
|
570
570
|
for row_idx in range(roles_range[0], roles_range[1]):
|
|
571
|
+
if len(list(roles_sheet.rows)) < row_idx + 1:
|
|
572
|
+
break
|
|
571
573
|
row = list(roles_sheet.rows)[row_idx]
|
|
572
574
|
|
|
573
575
|
role_name = get_cell_value(row, headers.get("role"))
|
|
@@ -678,7 +680,7 @@ def import_servers(workbook) -> Optional[List[Server]]:
|
|
|
678
680
|
elif server_type == "databricks":
|
|
679
681
|
server.catalog = get_server_cell_value(workbook, sheet, "servers.databricks.catalog", index)
|
|
680
682
|
server.host = get_server_cell_value(workbook, sheet, "servers.databricks.host", index)
|
|
681
|
-
server.
|
|
683
|
+
server.schema_ = get_server_cell_value(workbook, sheet, "servers.databricks.schema", index)
|
|
682
684
|
elif server_type == "glue":
|
|
683
685
|
server.account = get_server_cell_value(workbook, sheet, "servers.glue.account", index)
|
|
684
686
|
server.database = get_server_cell_value(workbook, sheet, "servers.glue.database", index)
|
|
@@ -692,7 +694,7 @@ def import_servers(workbook) -> Optional[List[Server]]:
|
|
|
692
694
|
server.database = get_server_cell_value(workbook, sheet, "servers.postgres.database", index)
|
|
693
695
|
server.host = get_server_cell_value(workbook, sheet, "servers.postgres.host", index)
|
|
694
696
|
server.port = get_server_cell_value(workbook, sheet, "servers.postgres.port", index)
|
|
695
|
-
server.
|
|
697
|
+
server.schema_ = get_server_cell_value(workbook, sheet, "servers.postgres.schema", index)
|
|
696
698
|
elif server_type == "s3":
|
|
697
699
|
server.delimiter = get_server_cell_value(workbook, sheet, "servers.s3.delimiter", index)
|
|
698
700
|
server.endpointUrl = get_server_cell_value(workbook, sheet, "servers.s3.endpointUrl", index)
|
|
@@ -703,13 +705,13 @@ def import_servers(workbook) -> Optional[List[Server]]:
|
|
|
703
705
|
server.database = get_server_cell_value(workbook, sheet, "servers.snowflake.database", index)
|
|
704
706
|
server.host = get_server_cell_value(workbook, sheet, "servers.snowflake.host", index)
|
|
705
707
|
server.port = get_server_cell_value(workbook, sheet, "servers.snowflake.port", index)
|
|
706
|
-
server.
|
|
708
|
+
server.schema_ = get_server_cell_value(workbook, sheet, "servers.snowflake.schema", index)
|
|
707
709
|
server.warehouse = get_server_cell_value(workbook, sheet, "servers.snowflake.warehouse", index)
|
|
708
710
|
elif server_type == "sqlserver":
|
|
709
711
|
server.database = get_server_cell_value(workbook, sheet, "servers.sqlserver.database", index)
|
|
710
712
|
server.host = get_server_cell_value(workbook, sheet, "servers.sqlserver.host", index)
|
|
711
713
|
server.port = get_server_cell_value(workbook, sheet, "servers.sqlserver.port", index)
|
|
712
|
-
server.
|
|
714
|
+
server.schema_ = get_server_cell_value(workbook, sheet, "servers.sqlserver.schema", index)
|
|
713
715
|
else:
|
|
714
716
|
# Custom server type - grab all possible fields
|
|
715
717
|
server.account = get_server_cell_value(workbook, sheet, "servers.custom.account", index)
|
|
@@ -724,7 +726,7 @@ def import_servers(workbook) -> Optional[List[Server]]:
|
|
|
724
726
|
server.path = get_server_cell_value(workbook, sheet, "servers.custom.path", index)
|
|
725
727
|
server.port = get_server_cell_value(workbook, sheet, "servers.custom.port", index)
|
|
726
728
|
server.project = get_server_cell_value(workbook, sheet, "servers.custom.project", index)
|
|
727
|
-
server.
|
|
729
|
+
server.schema_ = get_server_cell_value(workbook, sheet, "servers.custom.schema", index)
|
|
728
730
|
server.stagingDir = get_server_cell_value(workbook, sheet, "servers.custom.stagingDir", index)
|
|
729
731
|
server.table = get_server_cell_value(workbook, sheet, "servers.custom.table", index)
|
|
730
732
|
server.view = get_server_cell_value(workbook, sheet, "servers.custom.view", index)
|
datacontract/imports/importer.py
CHANGED
|
@@ -119,3 +119,10 @@ importer_factory.register_lazy_importer(
|
|
|
119
119
|
module_path="datacontract.imports.excel_importer",
|
|
120
120
|
class_name="ExcelImporter",
|
|
121
121
|
)
|
|
122
|
+
|
|
123
|
+
|
|
124
|
+
importer_factory.register_lazy_importer(
|
|
125
|
+
name=ImportFormat.json,
|
|
126
|
+
module_path="datacontract.imports.json_importer",
|
|
127
|
+
class_name="JsonImporter",
|
|
128
|
+
)
|
|
@@ -0,0 +1,325 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import os
|
|
3
|
+
import re
|
|
4
|
+
from typing import Any, Dict, List, Optional, Tuple
|
|
5
|
+
|
|
6
|
+
from datacontract.imports.importer import Importer
|
|
7
|
+
from datacontract.model.data_contract_specification import DataContractSpecification, Model, Server
|
|
8
|
+
|
|
9
|
+
|
|
10
|
+
class JsonImporter(Importer):
|
|
11
|
+
def import_source(
|
|
12
|
+
self, data_contract_specification: DataContractSpecification, source: str, import_args: dict
|
|
13
|
+
) -> DataContractSpecification:
|
|
14
|
+
return import_json(data_contract_specification, source)
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
def is_ndjson(file_path: str) -> bool:
|
|
18
|
+
"""Check if a file contains newline-delimited JSON."""
|
|
19
|
+
with open(file_path, "r", encoding="utf-8") as file:
|
|
20
|
+
for _ in range(5):
|
|
21
|
+
line = file.readline().strip()
|
|
22
|
+
if not line:
|
|
23
|
+
continue
|
|
24
|
+
try:
|
|
25
|
+
json.loads(line)
|
|
26
|
+
return True
|
|
27
|
+
except json.JSONDecodeError:
|
|
28
|
+
break
|
|
29
|
+
return False
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
def import_json(
|
|
33
|
+
data_contract_specification: DataContractSpecification, source: str, include_examples: bool = False
|
|
34
|
+
) -> DataContractSpecification:
|
|
35
|
+
# use the file name as base model name
|
|
36
|
+
base_model_name = os.path.splitext(os.path.basename(source))[0]
|
|
37
|
+
|
|
38
|
+
# check if file is newline-delimited JSON
|
|
39
|
+
if is_ndjson(source):
|
|
40
|
+
# load NDJSON data
|
|
41
|
+
json_data = []
|
|
42
|
+
with open(source, "r", encoding="utf-8") as file:
|
|
43
|
+
for line in file:
|
|
44
|
+
line = line.strip()
|
|
45
|
+
if line:
|
|
46
|
+
try:
|
|
47
|
+
json_data.append(json.loads(line))
|
|
48
|
+
except json.JSONDecodeError:
|
|
49
|
+
continue
|
|
50
|
+
else:
|
|
51
|
+
# load regular JSON data
|
|
52
|
+
with open(source, "r", encoding="utf-8") as file:
|
|
53
|
+
json_data = json.load(file)
|
|
54
|
+
|
|
55
|
+
if data_contract_specification.servers is None:
|
|
56
|
+
data_contract_specification.servers = {}
|
|
57
|
+
|
|
58
|
+
data_contract_specification.servers["production"] = Server(type="local", path=source, format="json")
|
|
59
|
+
|
|
60
|
+
# initialisation
|
|
61
|
+
models = {}
|
|
62
|
+
|
|
63
|
+
if isinstance(json_data, list) and json_data:
|
|
64
|
+
# Array of items
|
|
65
|
+
if all(isinstance(item, dict) for item in json_data[:5]):
|
|
66
|
+
# Array of objects, as table
|
|
67
|
+
fields = {}
|
|
68
|
+
for item in json_data[:20]:
|
|
69
|
+
for key, value in item.items():
|
|
70
|
+
field_def = generate_field_definition(value, key, base_model_name, models)
|
|
71
|
+
if key in fields:
|
|
72
|
+
fields[key] = merge_field_definitions(fields[key], field_def)
|
|
73
|
+
else:
|
|
74
|
+
fields[key] = field_def
|
|
75
|
+
|
|
76
|
+
models[base_model_name] = {
|
|
77
|
+
"type": "table",
|
|
78
|
+
"description": f"Generated from JSON array in {source}",
|
|
79
|
+
"fields": fields,
|
|
80
|
+
"examples": json_data[:3] if include_examples else None,
|
|
81
|
+
}
|
|
82
|
+
else:
|
|
83
|
+
# Simple array
|
|
84
|
+
item_type, item_format = infer_array_type(json_data[:20])
|
|
85
|
+
models[base_model_name] = {
|
|
86
|
+
"type": "array",
|
|
87
|
+
"description": f"Generated from JSON array in {source}",
|
|
88
|
+
"items": {"type": item_type, "format": item_format} if item_format else {"type": item_type},
|
|
89
|
+
"examples": [json_data[:5]] if include_examples else None,
|
|
90
|
+
}
|
|
91
|
+
elif isinstance(json_data, dict):
|
|
92
|
+
# Single object
|
|
93
|
+
fields = {}
|
|
94
|
+
for key, value in json_data.items():
|
|
95
|
+
fields[key] = generate_field_definition(value, key, base_model_name, models)
|
|
96
|
+
|
|
97
|
+
models[base_model_name] = {
|
|
98
|
+
"type": "object",
|
|
99
|
+
"description": f"Generated from JSON object in {source}",
|
|
100
|
+
"fields": fields,
|
|
101
|
+
"examples": [json_data] if include_examples else None,
|
|
102
|
+
}
|
|
103
|
+
else:
|
|
104
|
+
# Primitive value
|
|
105
|
+
field_type, field_format = determine_type_and_format(json_data)
|
|
106
|
+
models[base_model_name] = {
|
|
107
|
+
"type": field_type,
|
|
108
|
+
"description": f"Generated from JSON primitive in {source}",
|
|
109
|
+
"format": field_format,
|
|
110
|
+
"examples": [json_data] if include_examples and field_type != "boolean" else None,
|
|
111
|
+
}
|
|
112
|
+
|
|
113
|
+
for model_name, model_def in models.items():
|
|
114
|
+
model_type = model_def.pop("type")
|
|
115
|
+
data_contract_specification.models[model_name] = Model(type=model_type, **model_def)
|
|
116
|
+
|
|
117
|
+
return data_contract_specification
|
|
118
|
+
|
|
119
|
+
|
|
120
|
+
def generate_field_definition(
|
|
121
|
+
value: Any, field_name: str, parent_model: str, models: Dict[str, Dict[str, Any]]
|
|
122
|
+
) -> Dict[str, Any]:
|
|
123
|
+
"""Generate a field definition for a JSON value, creating nested models."""
|
|
124
|
+
|
|
125
|
+
if isinstance(value, dict):
|
|
126
|
+
# Handle object fields
|
|
127
|
+
fields = {}
|
|
128
|
+
for key, nested_value in value.items():
|
|
129
|
+
fields[key] = generate_field_definition(nested_value, key, parent_model, models)
|
|
130
|
+
|
|
131
|
+
return {"type": "object", "fields": fields}
|
|
132
|
+
|
|
133
|
+
elif isinstance(value, list):
|
|
134
|
+
# Handle array fields
|
|
135
|
+
if not value:
|
|
136
|
+
return {"type": "array", "items": {"type": "string"}}
|
|
137
|
+
|
|
138
|
+
if all(isinstance(item, dict) for item in value):
|
|
139
|
+
# Array of objects
|
|
140
|
+
fields = {}
|
|
141
|
+
for item in value:
|
|
142
|
+
for key, nested_value in item.items():
|
|
143
|
+
field_def = generate_field_definition(nested_value, key, parent_model, models)
|
|
144
|
+
if key in fields:
|
|
145
|
+
fields[key] = merge_field_definitions(fields[key], field_def)
|
|
146
|
+
else:
|
|
147
|
+
fields[key] = field_def
|
|
148
|
+
|
|
149
|
+
return {"type": "array", "items": {"type": "object", "fields": fields}}
|
|
150
|
+
|
|
151
|
+
elif all(isinstance(item, list) for item in value):
|
|
152
|
+
# Array of arrays
|
|
153
|
+
inner_type, inner_format = infer_array_type(value[0])
|
|
154
|
+
return {
|
|
155
|
+
"type": "array",
|
|
156
|
+
"items": {
|
|
157
|
+
"type": "array",
|
|
158
|
+
"items": {"type": inner_type, "format": inner_format} if inner_format else {"type": inner_type},
|
|
159
|
+
},
|
|
160
|
+
"examples": value[:5], # Include examples for nested arrays
|
|
161
|
+
}
|
|
162
|
+
|
|
163
|
+
else:
|
|
164
|
+
# Array of simple or mixed types
|
|
165
|
+
item_type, item_format = infer_array_type(value)
|
|
166
|
+
items_def = {"type": item_type}
|
|
167
|
+
if item_format:
|
|
168
|
+
items_def["format"] = item_format
|
|
169
|
+
|
|
170
|
+
field_def = {"type": "array", "items": items_def}
|
|
171
|
+
|
|
172
|
+
# Add examples if appropriate
|
|
173
|
+
sample_values = [item for item in value[:5] if item is not None]
|
|
174
|
+
if sample_values:
|
|
175
|
+
field_def["examples"] = sample_values
|
|
176
|
+
|
|
177
|
+
return field_def
|
|
178
|
+
|
|
179
|
+
else:
|
|
180
|
+
# Handle primitive types
|
|
181
|
+
field_type, field_format = determine_type_and_format(value)
|
|
182
|
+
field_def = {"type": field_type}
|
|
183
|
+
if field_format:
|
|
184
|
+
field_def["format"] = field_format
|
|
185
|
+
|
|
186
|
+
# Add examples
|
|
187
|
+
if value is not None and field_type != "boolean":
|
|
188
|
+
field_def["examples"] = [value]
|
|
189
|
+
|
|
190
|
+
return field_def
|
|
191
|
+
|
|
192
|
+
|
|
193
|
+
def infer_array_type(array: List) -> Tuple[str, Optional[str]]:
|
|
194
|
+
"""Infer the common type of items in an array."""
|
|
195
|
+
if not array:
|
|
196
|
+
return "string", None
|
|
197
|
+
|
|
198
|
+
# if all items are dictionaries with the same structure
|
|
199
|
+
if all(isinstance(item, dict) for item in array):
|
|
200
|
+
return "object", None
|
|
201
|
+
|
|
202
|
+
# if all items are of the same primitive type
|
|
203
|
+
non_null_items = [item for item in array if item is not None]
|
|
204
|
+
if not non_null_items:
|
|
205
|
+
return "null", None
|
|
206
|
+
|
|
207
|
+
types_and_formats = [determine_type_and_format(item) for item in non_null_items]
|
|
208
|
+
types = {t for t, _ in types_and_formats}
|
|
209
|
+
formats = {f for _, f in types_and_formats if f is not None}
|
|
210
|
+
|
|
211
|
+
# simplify type combinations
|
|
212
|
+
if types == {"integer", "number"}:
|
|
213
|
+
return "number", None
|
|
214
|
+
if len(types) == 1:
|
|
215
|
+
type_name = next(iter(types))
|
|
216
|
+
format_name = next(iter(formats)) if len(formats) == 1 else None
|
|
217
|
+
return type_name, format_name
|
|
218
|
+
if all(t in {"string", "integer", "number", "boolean", "null"} for t in types):
|
|
219
|
+
# If all string values have the same format, keep it
|
|
220
|
+
if len(formats) == 1 and "string" in types:
|
|
221
|
+
return "string", next(iter(formats))
|
|
222
|
+
return "string", None
|
|
223
|
+
|
|
224
|
+
# Mixed types
|
|
225
|
+
return "string", None
|
|
226
|
+
|
|
227
|
+
|
|
228
|
+
def determine_type_and_format(value: Any) -> Tuple[str, Optional[str]]:
|
|
229
|
+
"""determine the datacontract type and format for a JSON value."""
|
|
230
|
+
if value is None:
|
|
231
|
+
return "null", None
|
|
232
|
+
elif isinstance(value, bool):
|
|
233
|
+
return "boolean", None
|
|
234
|
+
elif isinstance(value, int):
|
|
235
|
+
return "integer", None
|
|
236
|
+
elif isinstance(value, float):
|
|
237
|
+
return "number", None
|
|
238
|
+
elif isinstance(value, str):
|
|
239
|
+
try:
|
|
240
|
+
if re.match(r"^\d{4}-\d{2}-\d{2}$", value):
|
|
241
|
+
return "string", "date"
|
|
242
|
+
elif re.match(r"^\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}(\.\d+)?(Z|[+-]\d{2}:\d{2})?$", value):
|
|
243
|
+
return "string", "date-time"
|
|
244
|
+
elif re.match(r"^[\w\.-]+@([\w-]+\.)+[\w-]{2,4}$", value):
|
|
245
|
+
return "string", "email"
|
|
246
|
+
elif re.match(r"^[a-f0-9]{8}-?[a-f0-9]{4}-?[a-f0-9]{4}-?[a-f0-9]{4}-?[a-f0-9]{12}$", value.lower()):
|
|
247
|
+
return "string", "uuid"
|
|
248
|
+
else:
|
|
249
|
+
return "string", None
|
|
250
|
+
except re.error:
|
|
251
|
+
return "string", None
|
|
252
|
+
elif isinstance(value, dict):
|
|
253
|
+
return "object", None
|
|
254
|
+
elif isinstance(value, list):
|
|
255
|
+
return "array", None
|
|
256
|
+
else:
|
|
257
|
+
return "string", None
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
def merge_field_definitions(field1: Dict[str, Any], field2: Dict[str, Any]) -> Dict[str, Any]:
|
|
261
|
+
"""Merge two field definitions."""
|
|
262
|
+
result = field1.copy()
|
|
263
|
+
if field1.get("type") == "object" and field2.get("type") != "object":
|
|
264
|
+
return field1
|
|
265
|
+
if field2.get("type") == "object" and field1.get("type") != "object":
|
|
266
|
+
return field2
|
|
267
|
+
# Handle type differences
|
|
268
|
+
if field1.get("type") != field2.get("type"):
|
|
269
|
+
type1, _ = field1.get("type", "string"), field1.get("format")
|
|
270
|
+
type2, _ = field2.get("type", "string"), field2.get("format")
|
|
271
|
+
|
|
272
|
+
if type1 == "integer" and type2 == "number" or type1 == "number" and type2 == "integer":
|
|
273
|
+
common_type = "number"
|
|
274
|
+
common_format = None
|
|
275
|
+
elif "string" in [type1, type2]:
|
|
276
|
+
common_type = "string"
|
|
277
|
+
common_format = None
|
|
278
|
+
elif all(t in ["string", "integer", "number", "boolean", "null"] for t in [type1, type2]):
|
|
279
|
+
common_type = "string"
|
|
280
|
+
common_format = None
|
|
281
|
+
elif type1 == "array" and type2 == "array":
|
|
282
|
+
# Handle mixed array types
|
|
283
|
+
items1 = field1.get("items", {})
|
|
284
|
+
items2 = field2.get("items", {})
|
|
285
|
+
if items1.get("type") == "object" or items2.get("type") == "object":
|
|
286
|
+
if items1.get("type") == "object" and items2.get("type") == "object":
|
|
287
|
+
merged_items = merge_field_definitions(items1, items2)
|
|
288
|
+
else:
|
|
289
|
+
merged_items = items1 if items1.get("type") == "object" else items2
|
|
290
|
+
return {"type": "array", "items": merged_items}
|
|
291
|
+
else:
|
|
292
|
+
merged_items = merge_field_definitions(items1, items2)
|
|
293
|
+
return {"type": "array", "items": merged_items}
|
|
294
|
+
else:
|
|
295
|
+
common_type = "array" if "array" in [type1, type2] else "object"
|
|
296
|
+
common_format = None
|
|
297
|
+
|
|
298
|
+
result["type"] = common_type
|
|
299
|
+
if common_format:
|
|
300
|
+
result["format"] = common_format
|
|
301
|
+
elif "format" in result:
|
|
302
|
+
del result["format"]
|
|
303
|
+
|
|
304
|
+
# Merge examples
|
|
305
|
+
if "examples" in field2:
|
|
306
|
+
if "examples" in result:
|
|
307
|
+
combined = result["examples"] + [ex for ex in field2["examples"] if ex not in result["examples"]]
|
|
308
|
+
result["examples"] = combined[:5] # Limit to 5 examples
|
|
309
|
+
else:
|
|
310
|
+
result["examples"] = field2["examples"]
|
|
311
|
+
|
|
312
|
+
# Handle nested structures
|
|
313
|
+
if result.get("type") == "array" and "items" in field1 and "items" in field2:
|
|
314
|
+
result["items"] = merge_field_definitions(field1["items"], field2["items"])
|
|
315
|
+
elif result.get("type") == "object" and "fields" in field1 and "fields" in field2:
|
|
316
|
+
# Merge fields from both objects
|
|
317
|
+
merged_fields = field1["fields"].copy()
|
|
318
|
+
for key, field_def in field2["fields"].items():
|
|
319
|
+
if key in merged_fields:
|
|
320
|
+
merged_fields[key] = merge_field_definitions(merged_fields[key], field_def)
|
|
321
|
+
else:
|
|
322
|
+
merged_fields[key] = field_def
|
|
323
|
+
result["fields"] = merged_fields
|
|
324
|
+
|
|
325
|
+
return result
|
datacontract/lint/resolve.py
CHANGED
|
@@ -303,7 +303,7 @@ def _resolve_data_contract_from_str(
|
|
|
303
303
|
# if ODCS, then validate the ODCS schema and import to DataContractSpecification directly
|
|
304
304
|
odcs = parse_odcs_v3_from_str(data_contract_str)
|
|
305
305
|
|
|
306
|
-
data_contract_specification = DataContractSpecification(dataContractSpecification="1.
|
|
306
|
+
data_contract_specification = DataContractSpecification(dataContractSpecification="1.2.0")
|
|
307
307
|
return import_from_odcs(data_contract_specification, odcs)
|
|
308
308
|
|
|
309
309
|
logging.info("Importing DCS")
|
datacontract/lint/schema.py
CHANGED
|
@@ -8,7 +8,7 @@ import requests
|
|
|
8
8
|
|
|
9
9
|
from datacontract.model.exceptions import DataContractException
|
|
10
10
|
|
|
11
|
-
DEFAULT_DATA_CONTRACT_SCHEMA = "datacontract-1.
|
|
11
|
+
DEFAULT_DATA_CONTRACT_SCHEMA = "datacontract-1.2.0.schema.json"
|
|
12
12
|
|
|
13
13
|
|
|
14
14
|
def fetch_schema(location: str = None) -> Dict[str, Any]:
|