datacontract-cli 0.10.10__py3-none-any.whl → 0.10.11__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datacontract-cli might be problematic. Click here for more details.

@@ -46,8 +46,8 @@ class DataContract:
46
46
  publish_url: str = None,
47
47
  publish_to_opentelemetry: bool = False,
48
48
  spark: "SparkSession" = None,
49
- inline_definitions: bool = False,
50
- inline_quality: bool = False,
49
+ inline_definitions: bool = True,
50
+ inline_quality: bool = True,
51
51
  ):
52
52
  self._data_contract_file = data_contract_file
53
53
  self._data_contract_str = data_contract_str
@@ -87,8 +87,8 @@ class DataContract:
87
87
  self._data_contract_str,
88
88
  self._data_contract,
89
89
  self._schema_location,
90
- inline_definitions=True,
91
- inline_quality=True,
90
+ inline_definitions=self._inline_definitions,
91
+ inline_quality=self._inline_quality,
92
92
  )
93
93
  run.checks.append(
94
94
  Check(type="lint", result="passed", name="Data contract is syntactically valid", engine="datacontract")
@@ -140,7 +140,12 @@ class DataContract:
140
140
  try:
141
141
  run.log_info("Testing data contract")
142
142
  data_contract = resolve.resolve_data_contract(
143
- self._data_contract_file, self._data_contract_str, self._data_contract, self._schema_location
143
+ self._data_contract_file,
144
+ self._data_contract_str,
145
+ self._data_contract,
146
+ self._schema_location,
147
+ inline_definitions=self._inline_definitions,
148
+ inline_quality=self._inline_quality,
144
149
  )
145
150
 
146
151
  if data_contract.models is None or len(data_contract.models) == 0:
@@ -304,8 +309,8 @@ class DataContract:
304
309
  self._data_contract_str,
305
310
  self._data_contract,
306
311
  schema_location=self._schema_location,
307
- inline_definitions=True,
308
- inline_quality=True,
312
+ inline_definitions=self._inline_definitions,
313
+ inline_quality=self._inline_quality,
309
314
  )
310
315
 
311
316
  return exporter_factory.create(export_format).export(
@@ -148,7 +148,10 @@ def check_jsonschema(run: Run, data_contract: DataContractSpecification, server:
148
148
  schema = to_jsonschema(model_name, model)
149
149
  run.log_info(f"jsonschema: {schema}")
150
150
 
151
- validate = fastjsonschema.compile(schema)
151
+ validate = fastjsonschema.compile(
152
+ schema,
153
+ formats={"uuid": r"^[0-9a-fA-F]{8}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{4}\b-[0-9a-fA-F]{12}$"},
154
+ )
152
155
 
153
156
  # Process files based on server type
154
157
  if server.type == "local":
@@ -49,24 +49,28 @@ def get_duckdb_connection(data_contract, server, run: Run):
49
49
  f"""CREATE VIEW "{model_name}" AS SELECT * FROM read_csv('{model_path}', hive_partitioning=1, columns={columns});"""
50
50
  )
51
51
  elif server.format == "delta":
52
+ if server.type == "local":
53
+ delta_table_arrow = DeltaTable(model_path).to_pyarrow_dataset()
54
+ con.register(model_name, delta_table_arrow)
55
+
52
56
  if server.type == "azure":
53
57
  # After switching to native delta table support
54
58
  # in https://github.com/datacontract/datacontract-cli/issues/258,
55
59
  # azure storage should also work
56
60
  # https://github.com/duckdb/duckdb_delta/issues/21
57
61
  raise NotImplementedError("Support for Delta Tables on Azure Storage is not implemented yet")
58
-
59
- storage_options = {
60
- "AWS_ENDPOINT_URL": server.endpointUrl,
61
- "AWS_ACCESS_KEY_ID": os.getenv("DATACONTRACT_S3_ACCESS_KEY_ID"),
62
- "AWS_SECRET_ACCESS_KEY": os.getenv("DATACONTRACT_S3_SECRET_ACCESS_KEY"),
63
- "AWS_REGION": os.getenv("DATACONTRACT_S3_REGION", "us-east-1"),
64
- "AWS_ALLOW_HTTP": "True" if server.endpointUrl.startswith("http://") else "False",
65
- }
66
-
67
- delta_table_arrow = DeltaTable(model_path, storage_options=storage_options).to_pyarrow_dataset()
68
-
69
- con.register(model_name, delta_table_arrow)
62
+ if server.type == "s3":
63
+ storage_options = {
64
+ "AWS_ENDPOINT_URL": server.endpointUrl,
65
+ "AWS_ACCESS_KEY_ID": os.getenv("DATACONTRACT_S3_ACCESS_KEY_ID"),
66
+ "AWS_SECRET_ACCESS_KEY": os.getenv("DATACONTRACT_S3_SECRET_ACCESS_KEY"),
67
+ "AWS_REGION": os.getenv("DATACONTRACT_S3_REGION", "us-east-1"),
68
+ "AWS_ALLOW_HTTP": "True" if server.endpointUrl.startswith("http://") else "False",
69
+ }
70
+
71
+ delta_table_arrow = DeltaTable(model_path, storage_options=storage_options).to_pyarrow_dataset()
72
+
73
+ con.register(model_name, delta_table_arrow)
70
74
  return con
71
75
 
72
76
 
@@ -4,17 +4,20 @@ import yaml
4
4
 
5
5
 
6
6
  def to_snowflake_soda_configuration(server):
7
+ prefix = "DATACONTRACT_SNOWFLAKE_"
8
+ snowflake_soda_params = {k.replace(prefix, "").lower(): v for k, v in os.environ.items() if k.startswith(prefix)}
9
+
10
+ # backward compatibility
11
+ if "connection_timeout" not in snowflake_soda_params:
12
+ snowflake_soda_params["connection_timeout"] = "5" # minutes
13
+
7
14
  soda_configuration = {
8
15
  f"data_source {server.type}": {
9
16
  "type": "snowflake",
10
- "username": os.getenv("DATACONTRACT_SNOWFLAKE_USERNAME"),
11
- "password": os.getenv("DATACONTRACT_SNOWFLAKE_PASSWORD"),
12
- "role": os.getenv("DATACONTRACT_SNOWFLAKE_ROLE"),
13
17
  "account": server.account,
14
18
  "database": server.database,
15
19
  "schema": server.schema_,
16
- "warehouse": os.getenv("DATACONTRACT_SNOWFLAKE_WAREHOUSE"),
17
- "connection_timeout": 5, # minutes
20
+ **snowflake_soda_params,
18
21
  }
19
22
  }
20
23
  soda_configuration_str = yaml.dump(soda_configuration)
@@ -3,6 +3,7 @@ from importlib.metadata import version
3
3
  from typing import Tuple
4
4
 
5
5
  import pytz
6
+ from datacontract.model.exceptions import DataContractException
6
7
 
7
8
  import datacontract.model.data_contract_specification as spec
8
9
  from datacontract.export.sql_type_converter import convert_to_sql_type
@@ -48,17 +49,7 @@ Using {5} Types for the field types
48
49
  {0}
49
50
  */
50
51
  """.format(generated_info)
51
-
52
- note = """Note project_info {{
53
- '''
54
- {0}
55
- '''
56
- }}
57
- """.format(generated_info)
58
-
59
- return """{0}
60
- {1}
61
- """.format(comment, note)
52
+ return comment
62
53
 
63
54
 
64
55
  def get_version() -> str:
@@ -70,19 +61,18 @@ def get_version() -> str:
70
61
 
71
62
  def generate_project_info(contract: spec.DataContractSpecification) -> str:
72
63
  return """Project "{0}" {{
73
- Note: "{1}"
64
+ Note: '''{1}'''
74
65
  }}\n
75
- """.format(contract.info.title, " ".join(contract.info.description.splitlines()))
66
+ """.format(contract.info.title, contract.info.description)
76
67
 
77
68
 
78
69
  def generate_table(model_name: str, model: spec.Model, server: spec.Server) -> str:
79
70
  result = """Table "{0}" {{
80
- Note: "{1}"
81
- """.format(model_name, " ".join(model.description.splitlines()))
71
+ Note: {1}
72
+ """.format(model_name, formatDescription(model.description))
82
73
 
83
74
  references = []
84
75
 
85
- # Add all the fields
86
76
  for field_name, field in model.fields.items():
87
77
  ref, field_string = generate_field(field_name, field, model_name, server)
88
78
  if ref is not None:
@@ -102,6 +92,30 @@ Note: "{1}"
102
92
 
103
93
 
104
94
  def generate_field(field_name: str, field: spec.Field, model_name: str, server: spec.Server) -> Tuple[str, str]:
95
+ if field.primary:
96
+ if field.required is not None:
97
+ if not field.required:
98
+ raise DataContractException(
99
+ type="lint",
100
+ name="Primary key fields cannot have required == False.",
101
+ result="error",
102
+ reason="Primary key fields cannot have required == False.",
103
+ engine="datacontract",
104
+ )
105
+ else:
106
+ field.required = True
107
+ if field.unique is not None:
108
+ if not field.unique:
109
+ raise DataContractException(
110
+ type="lint",
111
+ name="Primary key fields cannot have unique == False",
112
+ result="error",
113
+ reason="Primary key fields cannot have unique == False.",
114
+ engine="datacontract",
115
+ )
116
+ else:
117
+ field.unique = True
118
+
105
119
  field_attrs = []
106
120
  if field.primary:
107
121
  field_attrs.append("pk")
@@ -115,13 +129,21 @@ def generate_field(field_name: str, field: spec.Field, model_name: str, server:
115
129
  field_attrs.append("null")
116
130
 
117
131
  if field.description:
118
- field_attrs.append('Note: "{0}"'.format(" ".join(field.description.splitlines())))
132
+ field_attrs.append("""Note: {0}""".format(formatDescription(field.description)))
119
133
 
120
134
  field_type = field.type if server is None else convert_to_sql_type(field, server.type)
121
135
 
122
136
  field_str = '"{0}" "{1}" [{2}]'.format(field_name, field_type, ",".join(field_attrs))
123
137
  ref_str = None
124
138
  if (field.references) is not None:
125
- # we always assume many to one, as datacontract doesn't really give us more info
126
- ref_str = "{0}.{1} > {2}".format(model_name, field_name, field.references)
139
+ if field.unique:
140
+ ref_str = "{0}.{1} - {2}".format(model_name, field_name, field.references)
141
+ else:
142
+ ref_str = "{0}.{1} > {2}".format(model_name, field_name, field.references)
127
143
  return (ref_str, field_str)
144
+
145
+ def formatDescription(input: str) -> str:
146
+ if '\n' in input or '\r' in input or '"' in input:
147
+ return "'''{0}'''".format(input)
148
+ else:
149
+ return '"{0}"'.format(input)
@@ -37,7 +37,7 @@ class ExportFormat(str, Enum):
37
37
  spark = "spark"
38
38
 
39
39
  @classmethod
40
- def get_suported_formats(cls):
40
+ def get_supported_formats(cls):
41
41
  return list(map(lambda c: c.value, cls))
42
42
 
43
43
 
@@ -36,10 +36,7 @@ def to_property(field: Field) -> dict:
36
36
  property = {}
37
37
  json_type, json_format = convert_type_format(field.type, field.format)
38
38
  if json_type is not None:
39
- if field.required:
40
- property["type"] = json_type
41
- else:
42
- property["type"] = [json_type, "null"]
39
+ property["type"] = json_type
43
40
  if json_format is not None:
44
41
  property["format"] = json_format
45
42
  if field.unique:
@@ -186,7 +186,7 @@ def create_typed_field(dtype: str) -> Field:
186
186
  """
187
187
  field = Field()
188
188
  dtype = dtype.strip().lower().replace(" ", "")
189
- if dtype.startswith(("array", "struct")):
189
+ if dtype.startswith(("array", "struct", "map")):
190
190
  orig_dtype: str = dtype
191
191
  if dtype.startswith("array"):
192
192
  field.type = "array"
@@ -195,6 +195,12 @@ def create_typed_field(dtype: str) -> Field:
195
195
  field.type = "struct"
196
196
  for f in split_struct(orig_dtype[7:-1]):
197
197
  field.fields[f.split(":", 1)[0].strip()] = create_typed_field(f.split(":", 1)[1])
198
+ elif dtype.startswith("map"):
199
+ field.type = "map"
200
+ key_type = orig_dtype[4:-1].split(",", 1)[0]
201
+ value_type = orig_dtype[4:-1].split(",", 1)[1]
202
+ field.keys = create_typed_field(key_type)
203
+ field.values = create_typed_field(value_type)
198
204
  else:
199
205
  field.type = map_type_from_sql(dtype)
200
206
  return field
@@ -30,5 +30,5 @@ class ImportFormat(str, Enum):
30
30
  spark = "spark"
31
31
 
32
32
  @classmethod
33
- def get_suported_formats(cls):
33
+ def get_supported_formats(cls):
34
34
  return list(map(lambda c: c.value, cls))
@@ -10,140 +10,49 @@ from datacontract.model.exceptions import DataContractException
10
10
  class JsonSchemaImporter(Importer):
11
11
  def import_source(
12
12
  self, data_contract_specification: DataContractSpecification, source: str, import_args: dict
13
- ) -> dict:
13
+ ) -> DataContractSpecification:
14
14
  return import_jsonschema(data_contract_specification, source)
15
15
 
16
16
 
17
- def convert_json_schema_properties(properties, is_definition=False):
18
- fields = {}
19
- for field_name, field_schema in properties.items():
20
- field_kwargs = {}
21
- field_type = field_schema.get("type")
22
-
23
- # Determine if the field is required and set the type to the non-null option if applicable
24
- if isinstance(field_type, list) and "null" in field_type:
25
- field_kwargs["required"] = False
26
- non_null_types = [t for t in field_type if t != "null"]
27
- if non_null_types:
28
- field_type = non_null_types[0]
29
- else:
30
- field_type = None
31
- else:
32
- field_kwargs["required"] = True
33
-
34
- # Set the non-null type
35
- if field_type:
36
- field_kwargs["type"] = field_type
37
-
38
- for key, value in field_schema.items():
39
- match key:
40
- case "title":
41
- field_kwargs["title"] = value
42
- case "type":
43
- pass # type is already handled above
44
- case "format":
45
- field_kwargs["format"] = value
46
- case "description":
47
- field_kwargs["description"] = value
48
- case "pattern":
49
- field_kwargs["pattern"] = value
50
- case "minLength":
51
- field_kwargs["minLength"] = value
52
- case "maxLength":
53
- field_kwargs["maxLength"] = value
54
- case "minimum":
55
- field_kwargs["minimum"] = value
56
- case "exclusiveMinimum":
57
- field_kwargs["exclusiveMinimum"] = value
58
- case "maximum":
59
- field_kwargs["maximum"] = value
60
- case "exclusiveMaximum":
61
- field_kwargs["exclusiveMaximum"] = value
62
- case "enum":
63
- field_kwargs["enum"] = value
64
- case "tags":
65
- field_kwargs["tags"] = value
66
- case "properties":
67
- field_kwargs["fields"] = convert_json_schema_properties(value, is_definition=is_definition)
68
- case "items":
69
- field_kwargs["items"] = convert_json_schema_properties(value, is_definition=is_definition)
70
-
71
- if is_definition:
72
- field = Definition(**field_kwargs)
73
- else:
74
- field = Field(**field_kwargs)
75
- fields[field_name] = field
76
-
77
- return fields
78
-
79
-
80
17
  def import_jsonschema(data_contract_specification: DataContractSpecification, source: str) -> DataContractSpecification:
81
18
  if data_contract_specification.models is None:
82
19
  data_contract_specification.models = {}
83
20
 
21
+ json_schema = load_and_validate_json_schema(source)
22
+
23
+ title = json_schema.get("title", "default_model")
24
+ description = json_schema.get("description")
25
+ type_ = json_schema.get("type")
26
+ properties = json_schema.get("properties", {})
27
+ required_properties = json_schema.get("required", [])
28
+
29
+ fields_kwargs = jsonschema_to_args(properties, required_properties)
30
+ fields = {name: Field(**kwargs) for name, kwargs in fields_kwargs.items()}
31
+
32
+ model = Model(description=description, type=type_, title=title, fields=fields)
33
+ data_contract_specification.models[title] = model
34
+
35
+ definitions = json_schema.get("definitions", {})
36
+ for name, schema in definitions.items():
37
+ kwargs = schema_to_args(schema)
38
+ data_contract_specification.definitions[name] = Definition(name=name, **kwargs)
39
+
40
+ return data_contract_specification
41
+
42
+
43
+ def load_and_validate_json_schema(source):
84
44
  try:
85
45
  with open(source, "r") as file:
86
46
  json_schema = json.loads(file.read())
87
- validator = fastjsonschema.compile({})
88
- validator(json_schema)
89
-
90
- model = Model(
91
- description=json_schema.get("description"),
92
- type=json_schema.get("type"),
93
- title=json_schema.get("title"),
94
- fields=convert_json_schema_properties(json_schema.get("properties", {})),
95
- )
96
- data_contract_specification.models[json_schema.get("title", "default_model")] = model
97
-
98
- if "definitions" in json_schema:
99
- for def_name, def_schema in json_schema["definitions"].items():
100
- definition_kwargs = {}
101
-
102
- for key, value in def_schema.items():
103
- match key:
104
- case "domain":
105
- definition_kwargs["domain"] = value
106
- case "title":
107
- definition_kwargs["title"] = value
108
- case "description":
109
- definition_kwargs["description"] = value
110
- case "type":
111
- definition_kwargs["type"] = value
112
- case "enum":
113
- definition_kwargs["enum"] = value
114
- case "format":
115
- definition_kwargs["format"] = value
116
- case "minLength":
117
- definition_kwargs["minLength"] = value
118
- case "maxLength":
119
- definition_kwargs["maxLength"] = value
120
- case "pattern":
121
- definition_kwargs["pattern"] = value
122
- case "minimum":
123
- definition_kwargs["minimum"] = value
124
- case "exclusiveMinimum":
125
- definition_kwargs["exclusiveMinimum"] = value
126
- case "maximum":
127
- definition_kwargs["maximum"] = value
128
- case "exclusiveMaximum":
129
- definition_kwargs["exclusiveMaximum"] = value
130
- case "pii":
131
- definition_kwargs["pii"] = value
132
- case "classification":
133
- definition_kwargs["classification"] = value
134
- case "tags":
135
- definition_kwargs["tags"] = value
136
- case "properties":
137
- definition_kwargs["fields"] = convert_json_schema_properties(value, is_definition=True)
138
-
139
- definition = Definition(name=def_name, **definition_kwargs)
140
- data_contract_specification.definitions[def_name] = definition
47
+
48
+ validator = fastjsonschema.compile({})
49
+ validator(json_schema)
141
50
 
142
51
  except fastjsonschema.JsonSchemaException as e:
143
52
  raise DataContractException(
144
53
  type="schema",
145
54
  name="Parse json schema",
146
- reason=f"Failed to parse json schema from {source}: {e}",
55
+ reason=f"Failed to validate json schema from {source}: {e}",
147
56
  engine="datacontract",
148
57
  )
149
58
 
@@ -155,5 +64,82 @@ def import_jsonschema(data_contract_specification: DataContractSpecification, so
155
64
  engine="datacontract",
156
65
  original_exception=e,
157
66
  )
67
+ return json_schema
158
68
 
159
- return data_contract_specification
69
+
70
+ def jsonschema_to_args(properties, required_properties):
71
+ args = {}
72
+ for property, property_schema in properties.items():
73
+ is_required = property in required_properties
74
+ args[property] = schema_to_args(property_schema, is_required)
75
+
76
+ return args
77
+
78
+
79
+ def schema_to_args(property_schema, is_required: bool = None) -> dict:
80
+ direct_mappings = {
81
+ "title",
82
+ "description",
83
+ "format",
84
+ "pattern",
85
+ "enum",
86
+ "tags",
87
+ "pii",
88
+ "minLength",
89
+ "maxLength",
90
+ "minimum",
91
+ "exclusiveMinimum",
92
+ "maximum",
93
+ "exclusiveMaximum",
94
+ }
95
+
96
+ field_kwargs = {key: value for key, value in property_schema.items() if key in direct_mappings}
97
+
98
+ if is_required is not None:
99
+ field_kwargs["required"] = is_required
100
+
101
+ property_type = determine_type(property_schema)
102
+ if property_type is not None:
103
+ field_kwargs["type"] = property_type
104
+
105
+ if property_type == "array":
106
+ nested_item_type, nested_items = determine_nested_item_type(property_schema)
107
+
108
+ if nested_items is not None:
109
+ field_kwargs["items"] = schema_to_args(nested_item_type)
110
+
111
+ nested_properties = property_schema.get("properties")
112
+ if nested_properties is not None:
113
+ # recursive call for complex nested properties
114
+ field_kwargs["fields"] = jsonschema_to_args(nested_properties, property_schema["required"])
115
+
116
+ return field_kwargs
117
+
118
+
119
+ def determine_nested_item_type(property_schema):
120
+ nested_items = property_schema.get("items")
121
+ nested_items_is_list = isinstance(nested_items, list)
122
+ if nested_items_is_list and len(nested_items) != 1:
123
+ raise DataContractException(
124
+ type="schema",
125
+ name="Parse json schema",
126
+ reason=f"Union types for arrays are currently not supported ({nested_items})",
127
+ engine="datacontract",
128
+ )
129
+ if nested_items_is_list and len(nested_items) == 1:
130
+ nested_item_type = nested_items[0]
131
+ elif not nested_items_is_list and nested_items is not None:
132
+ nested_item_type = nested_items
133
+ return nested_item_type, nested_items
134
+
135
+
136
+ def determine_type(property_schema):
137
+ property_type = property_schema.get("type")
138
+ type_is_list = isinstance(property_type, list)
139
+ if type_is_list:
140
+ non_null_types = [t for t in property_type if t != "null"]
141
+ if non_null_types:
142
+ property_type = non_null_types[0]
143
+ else:
144
+ property_type = None
145
+ return property_type
@@ -64,6 +64,10 @@ def map_type_from_sql(sql_type: str):
64
64
  return "integer"
65
65
  elif sql_type_normed.startswith("float"):
66
66
  return "float"
67
+ elif sql_type_normed.startswith("decimal"):
68
+ return "decimal"
69
+ elif sql_type_normed.startswith("numeric"):
70
+ return "numeric"
67
71
  elif sql_type_normed.startswith("bool"):
68
72
  return "boolean"
69
73
  elif sql_type_normed.startswith("timestamp"):
@@ -110,5 +110,13 @@
110
110
  {% endif %}
111
111
 
112
112
  {% if field.items %}
113
- {{ render_nested_partial("item", field.items, level) }}
114
- {% endif %}
113
+ {{ render_nested_partial("items", field.items, level) }}
114
+ {% endif %}
115
+
116
+ {% if field.keys %}
117
+ {{ render_nested_partial("keys", field.keys, level) }}
118
+ {% endif %}
119
+
120
+ {% if field.values %}
121
+ {{ render_nested_partial("values", field.values, level) }}
122
+ {% endif %}
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: datacontract-cli
3
- Version: 0.10.10
3
+ Version: 0.10.11
4
4
  Summary: The datacontract CLI is an open source command-line tool for working with Data Contracts. It uses data contract YAML files to lint the data contract, connect to data sources and execute schema and quality tests, detect breaking changes, and export to different formats. The tool is written in Python. It can be used as a standalone CLI tool, in a CI/CD pipeline, or directly as a Python library.
5
5
  Author-email: Jochen Christ <jochen.christ@innoq.com>, Stefan Negele <stefan.negele@innoq.com>, Simon Harrer <simon.harrer@innoq.com>
6
6
  Project-URL: Homepage, https://cli.datacontract.com
@@ -11,69 +11,69 @@ Classifier: Operating System :: OS Independent
11
11
  Requires-Python: >=3.10
12
12
  Description-Content-Type: text/markdown
13
13
  License-File: LICENSE
14
- Requires-Dist: typer[all] <0.13,>=0.9
15
- Requires-Dist: pydantic <2.9.0,>=2.8.2
16
- Requires-Dist: pyyaml ~=6.0.1
17
- Requires-Dist: requests <2.33,>=2.31
18
- Requires-Dist: fastapi ==0.111.1
19
- Requires-Dist: fastparquet ==2024.5.0
20
- Requires-Dist: python-multipart ==0.0.9
21
- Requires-Dist: rich ~=13.7.0
22
- Requires-Dist: simple-ddl-parser ==1.5.1
23
- Requires-Dist: soda-core-duckdb <3.4.0,>=3.3.1
24
- Requires-Dist: setuptools >=60
25
- Requires-Dist: duckdb ==1.0.0
26
- Requires-Dist: fastjsonschema <2.21.0,>=2.19.1
27
- Requires-Dist: python-dotenv ~=1.0.0
28
- Requires-Dist: rdflib ==7.0.0
29
- Requires-Dist: opentelemetry-exporter-otlp-proto-grpc ~=1.16
30
- Requires-Dist: opentelemetry-exporter-otlp-proto-http ~=1.16
31
- Requires-Dist: boto3 <1.34.137,>=1.34.41
32
- Requires-Dist: botocore <1.34.137,>=1.34.41
33
- Requires-Dist: jinja-partials >=0.2.1
14
+ Requires-Dist: typer[all]<0.13,>=0.9
15
+ Requires-Dist: pydantic<2.9.0,>=2.8.2
16
+ Requires-Dist: pyyaml~=6.0.1
17
+ Requires-Dist: requests<2.33,>=2.31
18
+ Requires-Dist: fastapi==0.111.1
19
+ Requires-Dist: fastparquet==2024.5.0
20
+ Requires-Dist: python-multipart==0.0.9
21
+ Requires-Dist: rich~=13.7.0
22
+ Requires-Dist: simple-ddl-parser==1.5.2
23
+ Requires-Dist: soda-core-duckdb<3.4.0,>=3.3.1
24
+ Requires-Dist: setuptools>=60
25
+ Requires-Dist: duckdb==1.0.0
26
+ Requires-Dist: fastjsonschema<2.21.0,>=2.19.1
27
+ Requires-Dist: python-dotenv~=1.0.0
28
+ Requires-Dist: rdflib==7.0.0
29
+ Requires-Dist: opentelemetry-exporter-otlp-proto-grpc~=1.16
30
+ Requires-Dist: opentelemetry-exporter-otlp-proto-http~=1.16
31
+ Requires-Dist: boto3<1.34.137,>=1.34.41
32
+ Requires-Dist: botocore<1.34.137,>=1.34.41
33
+ Requires-Dist: jinja-partials>=0.2.1
34
34
  Provides-Extra: all
35
- Requires-Dist: datacontract-cli[bigquery,databricks,deltalake,kafka,postgres,s3,snowflake,sqlserver,trino] ; extra == 'all'
35
+ Requires-Dist: datacontract-cli[bigquery,databricks,deltalake,kafka,postgres,s3,snowflake,sqlserver,trino]; extra == "all"
36
36
  Provides-Extra: avro
37
- Requires-Dist: avro ==1.11.3 ; extra == 'avro'
37
+ Requires-Dist: avro==1.11.3; extra == "avro"
38
38
  Provides-Extra: bigquery
39
- Requires-Dist: soda-core-bigquery <3.4.0,>=3.3.1 ; extra == 'bigquery'
39
+ Requires-Dist: soda-core-bigquery<3.4.0,>=3.3.1; extra == "bigquery"
40
40
  Provides-Extra: databricks
41
- Requires-Dist: soda-core-spark-df <3.4.0,>=3.3.1 ; extra == 'databricks'
42
- Requires-Dist: databricks-sql-connector <3.3.0,>=3.1.2 ; extra == 'databricks'
43
- Requires-Dist: soda-core-spark[databricks] <3.4.0,>=3.3.1 ; extra == 'databricks'
41
+ Requires-Dist: soda-core-spark-df<3.4.0,>=3.3.1; extra == "databricks"
42
+ Requires-Dist: databricks-sql-connector<3.4.0,>=3.1.2; extra == "databricks"
43
+ Requires-Dist: soda-core-spark[databricks]<3.4.0,>=3.3.1; extra == "databricks"
44
44
  Provides-Extra: deltalake
45
- Requires-Dist: deltalake <0.19,>=0.17 ; extra == 'deltalake'
45
+ Requires-Dist: deltalake<0.19,>=0.17; extra == "deltalake"
46
46
  Provides-Extra: dev
47
- Requires-Dist: datacontract-cli[all] ; extra == 'dev'
48
- Requires-Dist: httpx ==0.27.0 ; extra == 'dev'
49
- Requires-Dist: ruff ; extra == 'dev'
50
- Requires-Dist: pre-commit ~=3.7.1 ; extra == 'dev'
51
- Requires-Dist: pytest ; extra == 'dev'
52
- Requires-Dist: pytest-xdist ; extra == 'dev'
53
- Requires-Dist: moto ==5.0.11 ; extra == 'dev'
54
- Requires-Dist: pymssql ==2.3.0 ; extra == 'dev'
55
- Requires-Dist: kafka-python ; extra == 'dev'
56
- Requires-Dist: trino ==0.329.0 ; extra == 'dev'
57
- Requires-Dist: testcontainers <4.8,>=4.5 ; extra == 'dev'
58
- Requires-Dist: testcontainers[core] ; extra == 'dev'
59
- Requires-Dist: testcontainers[minio] ; extra == 'dev'
60
- Requires-Dist: testcontainers[postgres] ; extra == 'dev'
61
- Requires-Dist: testcontainers[kafka] ; extra == 'dev'
62
- Requires-Dist: testcontainers[mssql] ; extra == 'dev'
47
+ Requires-Dist: datacontract-cli[all]; extra == "dev"
48
+ Requires-Dist: httpx==0.27.0; extra == "dev"
49
+ Requires-Dist: ruff; extra == "dev"
50
+ Requires-Dist: pre-commit<3.9.0,>=3.7.1; extra == "dev"
51
+ Requires-Dist: pytest; extra == "dev"
52
+ Requires-Dist: pytest-xdist; extra == "dev"
53
+ Requires-Dist: moto==5.0.11; extra == "dev"
54
+ Requires-Dist: pymssql==2.3.0; extra == "dev"
55
+ Requires-Dist: kafka-python; extra == "dev"
56
+ Requires-Dist: trino==0.329.0; extra == "dev"
57
+ Requires-Dist: testcontainers==4.7.2; extra == "dev"
58
+ Requires-Dist: testcontainers[core]==4.7.2; extra == "dev"
59
+ Requires-Dist: testcontainers[minio]==4.7.2; extra == "dev"
60
+ Requires-Dist: testcontainers[postgres]==4.7.2; extra == "dev"
61
+ Requires-Dist: testcontainers[kafka]==4.7.2; extra == "dev"
62
+ Requires-Dist: testcontainers[mssql]==4.7.2; extra == "dev"
63
63
  Provides-Extra: kafka
64
- Requires-Dist: datacontract-cli[avro] ; extra == 'kafka'
65
- Requires-Dist: soda-core-spark-df <3.4.0,>=3.3.1 ; extra == 'kafka'
64
+ Requires-Dist: datacontract-cli[avro]; extra == "kafka"
65
+ Requires-Dist: soda-core-spark-df<3.4.0,>=3.3.1; extra == "kafka"
66
66
  Provides-Extra: postgres
67
- Requires-Dist: soda-core-postgres <3.4.0,>=3.3.1 ; extra == 'postgres'
67
+ Requires-Dist: soda-core-postgres<3.4.0,>=3.3.1; extra == "postgres"
68
68
  Provides-Extra: s3
69
- Requires-Dist: s3fs ==2024.6.1 ; extra == 's3'
69
+ Requires-Dist: s3fs==2024.6.1; extra == "s3"
70
70
  Provides-Extra: snowflake
71
- Requires-Dist: snowflake-connector-python[pandas] <3.12,>=3.6 ; extra == 'snowflake'
72
- Requires-Dist: soda-core-snowflake <3.4.0,>=3.3.1 ; extra == 'snowflake'
71
+ Requires-Dist: snowflake-connector-python[pandas]<3.12,>=3.6; extra == "snowflake"
72
+ Requires-Dist: soda-core-snowflake<3.4.0,>=3.3.1; extra == "snowflake"
73
73
  Provides-Extra: sqlserver
74
- Requires-Dist: soda-core-sqlserver <3.4.0,>=3.3.1 ; extra == 'sqlserver'
74
+ Requires-Dist: soda-core-sqlserver<3.4.0,>=3.3.1; extra == "sqlserver"
75
75
  Provides-Extra: trino
76
- Requires-Dist: soda-core-trino <3.4.0,>=3.3.1 ; extra == 'trino'
76
+ Requires-Dist: soda-core-trino<3.4.0,>=3.3.1; extra == "trino"
77
77
 
78
78
  # Data Contract CLI
79
79
 
@@ -197,10 +197,10 @@ $ datacontract export --format html datacontract.yaml > datacontract.html
197
197
  # import avro (other formats: sql, glue, bigquery...)
198
198
  $ datacontract import --format avro --source avro_schema.avsc
199
199
 
200
- # find differences between to data contracts
200
+ # find differences between two data contracts
201
201
  $ datacontract diff datacontract-v1.yaml datacontract-v2.yaml
202
202
 
203
- # find differences between to data contracts categorized into error, warning, and info.
203
+ # find differences between two data contracts categorized into error, warning, and info.
204
204
  $ datacontract changelog datacontract-v1.yaml datacontract-v2.yaml
205
205
 
206
206
  # fail pipeline on breaking changes. Uses changelog internally and showing only error and warning.
@@ -385,7 +385,7 @@ Supported server types:
385
385
  - [sqlserver](#sqlserver)
386
386
  - [databricks](#databricks)
387
387
  - [databricks (programmatic)](#databricks-programmatic)
388
- - [dataframr (programmatic)](#dataframe-programmatic)
388
+ - [dataframe (programmatic)](#dataframe-programmatic)
389
389
  - [snowflake](#snowflake)
390
390
  - [kafka](#kafka)
391
391
  - [postgres](#postgres)
@@ -665,14 +665,31 @@ models:
665
665
  ```
666
666
 
667
667
  #### Environment Variables
668
-
669
- | Environment Variable | Example | Description |
670
- |------------------------------------|--------------------|-----------------------------------------------------|
671
- | `DATACONTRACT_SNOWFLAKE_USERNAME` | `datacontract` | Username |
672
- | `DATACONTRACT_SNOWFLAKE_PASSWORD` | `mysecretpassword` | Password |
673
- | `DATACONTRACT_SNOWFLAKE_ROLE` | `DATAVALIDATION` | The snowflake role to use. |
674
- | `DATACONTRACT_SNOWFLAKE_WAREHOUSE` | `COMPUTE_WH` | The Snowflake Warehouse to use executing the tests. |
675
-
668
+ All [parameters supported by Soda](https://docs.soda.io/soda/connect-snowflake.html), uppercased and prepended by `DATACONTRACT_SNOWFLAKE_` prefix.
669
+ For example:
670
+
671
+ | Soda parameter | Environment Variable |
672
+ |----------------------|---------------------------------------------|
673
+ | `username` | `DATACONTRACT_SNOWFLAKE_USERNAME` |
674
+ | `password` | `DATACONTRACT_SNOWFLAKE_PASSWORD` |
675
+ | `warehouse` | `DATACONTRACT_SNOWFLAKE_WAREHOUSE` |
676
+ | `role` | `DATACONTRACT_SNOWFLAKE_ROLE` |
677
+ | `connection_timeout` | `DATACONTRACT_SNOWFLAKE_CONNECTION_TIMEOUT` |
678
+
679
+ Beware, that parameters:
680
+ * `account`
681
+ * `database`
682
+ * `schema`
683
+
684
+ are obtained from the `servers` section of the YAML-file.
685
+ E.g. from the example above:
686
+ ```yaml
687
+ servers:
688
+ snowflake:
689
+ account: abcdefg-xn12345
690
+ database: ORDER_DB
691
+ schema: ORDERS_PII_V2
692
+ ```
676
693
 
677
694
 
678
695
  ### Kafka
@@ -925,23 +942,34 @@ models:
925
942
  ```
926
943
  Usage: datacontract import [OPTIONS]
927
944
 
928
- Create a data contract from the given source location. Prints to stdout.
929
-
930
- ╭─ Options ───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮
931
- │ * --format [sql|avro|glue|bigquery|jsonschema| The format of the source file. [default: None] [required] |
932
- unity|spark] |
933
- --source TEXT The path to the file or Glue Database that should be imported.
934
- [default: None]
935
- --glue-table TEXT List of table ids to import from the Glue Database (repeat for
936
- multiple table ids, leave empty for all tables in the dataset).
937
- [default: None]
938
- --bigquery-project TEXT The bigquery project id. [default: None]
939
- --bigquery-dataset TEXT The bigquery dataset id. [default: None]
940
- --bigquery-table TEXT List of table ids to import from the bigquery API (repeat for
941
- multiple table ids, leave empty for all tables in the dataset).
942
- [default: None]
943
- │ --help Show this message and exit.
944
- ╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
945
+ Create a data contract from the given source location. Prints to stdout.
946
+
947
+ ╭─ Options ───────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮
948
+ │ * --format [sql|avro|dbt|glue|jsonschema|bigquery|odcs The format of the source file.
949
+ |unity|spark] [default: None] │
950
+ [required]
951
+ --source TEXT The path to the file or Glue Database that
952
+ should be imported.
953
+ [default: None]
954
+ --glue-table TEXT List of table ids to import from the Glue
955
+ Database (repeat for multiple table ids,
956
+ leave empty for all tables in the dataset).
957
+ [default: None]
958
+ --bigquery-project TEXT The bigquery project id. [default: None]
959
+ --bigquery-dataset TEXT The bigquery dataset id. [default: None]
960
+ │ --bigquery-table TEXT List of table ids to import from the
961
+ │ bigquery API (repeat for multiple table ids, │
962
+ │ leave empty for all tables in the dataset). │
963
+ │ [default: None] │
964
+ │ --unity-table-full-name TEXT Full name of a table in the unity catalog │
965
+ │ [default: None] │
966
+ │ --dbt-model TEXT List of models names to import from the dbt │
967
+ │ manifest file (repeat for multiple models │
968
+ │ names, leave empty for all models in the │
969
+ │ dataset). │
970
+ │ [default: None] │
971
+ │ --help Show this message and exit. │
972
+ ╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
945
973
  ```
946
974
 
947
975
  Example:
@@ -952,18 +980,19 @@ datacontract import --format sql --source my_ddl.sql
952
980
 
953
981
  Available import options:
954
982
 
955
- | Type | Description | Status |
956
- |--------------------|------------------------------------------------|---------|
983
+ | Type | Description | Status |
984
+ |--------------------|------------------------------------------------|--------|
957
985
  | `sql` | Import from SQL DDL | ✅ |
958
986
  | `avro` | Import from AVRO schemas | ✅ |
959
987
  | `glue` | Import from AWS Glue DataCatalog | ✅ |
960
- | `protobuf` | Import from Protobuf schemas | TBD |
961
988
  | `jsonschema` | Import from JSON Schemas | ✅ |
962
989
  | `bigquery` | Import from BigQuery Schemas | ✅ |
963
990
  | `unity` | Import from Databricks Unity Catalog | partial |
964
- | `dbt` | Import from dbt models | TBD |
991
+ | `dbt` | Import from dbt models | |
965
992
  | `odcs` | Import from Open Data Contract Standard (ODCS) | ✅ |
966
- | Missing something? | Please create an issue on GitHub | TBD |
993
+ | `spark` | Import from Spark StructTypes | |
994
+ | `protobuf` | Import from Protobuf schemas | TBD |
995
+ | Missing something? | Please create an issue on GitHub | TBD |
967
996
 
968
997
 
969
998
  #### BigQuery
@@ -1005,6 +1034,23 @@ export DATABRICKS_IMPORT_ACCESS_TOKEN=<token>
1005
1034
  datacontract import --format unity --unity-table-full-name <table_full_name>
1006
1035
  ```
1007
1036
 
1037
+ #### dbt
1038
+
1039
+ Importing from dbt manifest file.
1040
+ You may give the `dbt-model` parameter to enumerate the tables that should be imported. If no tables are given, _all_ available tables of the database will be imported.
1041
+
1042
+ Examples:
1043
+
1044
+ ```bash
1045
+ # Example import from dbt manifest with specifying the tables to import
1046
+ datacontract import --format dbt --source <manifest_path> --dbt-model <model_name_1> --dbt-model <model_name_2> --dbt-model <model_name_3>
1047
+ ```
1048
+
1049
+ ```bash
1050
+ # Example import from dbt manifest importing all tables in the database
1051
+ datacontract import --format dbt --source <manifest_path>
1052
+ ```
1053
+
1008
1054
  #### Glue
1009
1055
 
1010
1056
  Importing from Glue reads the necessary Data directly off of the AWS API.
@@ -1304,7 +1350,7 @@ if __name__ == "__main__":
1304
1350
  data_contract = DataContract(
1305
1351
  data_contract_file="/path/datacontract.yaml"
1306
1352
  )
1307
- # call export
1353
+ # Call export
1308
1354
  result = data_contract.export(
1309
1355
  export_format="custom", model="orders", server="production", custom_arg="my_custom_arg"
1310
1356
  )
@@ -1330,10 +1376,11 @@ Output
1330
1376
  Using the importer factory to add a new custom importer
1331
1377
  ```python
1332
1378
 
1333
- from datacontract.model.data_contract_specification import DataContractSpecification
1379
+ from datacontract.model.data_contract_specification import DataContractSpecification, Field, Model
1334
1380
  from datacontract.data_contract import DataContract
1335
1381
  from datacontract.imports.importer import Importer
1336
1382
  from datacontract.imports.importer_factory import importer_factory
1383
+
1337
1384
  import json
1338
1385
 
1339
1386
  # Create a custom class that implements import_source method
@@ -1344,43 +1391,89 @@ class CustomImporter(Importer):
1344
1391
  source_dict = json.loads(source)
1345
1392
  data_contract_specification.id = source_dict.get("id_custom")
1346
1393
  data_contract_specification.info.title = source_dict.get("title")
1394
+ data_contract_specification.info.version = source_dict.get("version")
1347
1395
  data_contract_specification.info.description = source_dict.get("description_from_app")
1348
-
1396
+
1397
+ for model in source_dict.get("models", []):
1398
+ fields = {}
1399
+ for column in model.get('columns'):
1400
+ field = Field(
1401
+ description=column.get('column_description'),
1402
+ type=column.get('type')
1403
+ )
1404
+ fields[column.get('name')] = field
1405
+
1406
+ dc_model = Model(
1407
+ description=model.get('description'),
1408
+ fields= fields
1409
+ )
1410
+
1411
+ data_contract_specification.models[model.get('name')] = dc_model
1349
1412
  return data_contract_specification
1350
-
1413
+
1351
1414
 
1352
1415
  # Register the new custom class into factory
1353
1416
  importer_factory.register_importer("custom_company_importer", CustomImporter)
1354
1417
 
1355
1418
 
1356
1419
  if __name__ == "__main__":
1357
- # get a custom da
1358
- json_from_custom_app = '{"id_custom":"uuid-custom","version":"0.0.2", "title":"my_custom_imported_data", "description_from_app": "Custom contract description"}'
1420
+ # Get a custom data from other app
1421
+ json_from_custom_app = '''
1422
+ {
1423
+ "id_custom": "uuid-custom",
1424
+ "version": "0.0.2",
1425
+ "title": "my_custom_imported_data",
1426
+ "description_from_app": "Custom contract description",
1427
+ "models": [
1428
+ {
1429
+ "name": "model1",
1430
+ "desctiption": "model description from app",
1431
+ "columns": [
1432
+ {
1433
+ "name": "columnA",
1434
+ "type": "varchar",
1435
+ "column_description": "my_column description"
1436
+ },
1437
+ {
1438
+ "name": "columnB",
1439
+ "type": "varchar",
1440
+ "column_description": "my_columnB description"
1441
+ }
1442
+ ]
1443
+ }
1444
+ ]
1445
+ }
1446
+ '''
1359
1447
  # Create a DataContract instance
1360
1448
  data_contract = DataContract()
1361
1449
 
1362
- # call import_from
1450
+ # Call import_from_source
1363
1451
  result = data_contract.import_from_source(
1364
- format="custom_company_importer", data_contract_specification=DataContract.init(), source=json_from_custom_app
1365
- )
1366
- print(dict(result))
1367
-
1452
+ format="custom_company_importer",
1453
+ data_contract_specification=DataContract.init(),
1454
+ source=json_from_custom_app
1455
+ )
1456
+ print(result.to_yaml() )
1368
1457
  ```
1369
1458
  Output
1459
+
1460
+ ```yaml
1461
+ dataContractSpecification: 0.9.3
1462
+ id: uuid-custom
1463
+ info:
1464
+ title: my_custom_imported_data
1465
+ version: 0.0.2
1466
+ description: Custom contract description
1467
+ models:
1468
+ model1:
1469
+ fields:
1470
+ columnA:
1471
+ type: varchar
1472
+ description: my_column description
1473
+ columnB:
1474
+ type: varchar
1475
+ description: my_columnB description
1370
1476
 
1371
- ```python
1372
- {
1373
- 'dataContractSpecification': '0.9.3',
1374
- 'id': 'uuid-custom',
1375
- 'info': Info(title='my_custom_imported_data', version='0.0.1', status=None, description='Custom contract description', owner=None, contact=None),
1376
- 'servers': {},
1377
- 'terms': None,
1378
- 'models': {},
1379
- 'definitions': {},
1380
- 'examples': [],
1381
- 'quality': None,
1382
- 'servicelevels': None
1383
- }
1384
1477
  ```
1385
1478
  ## Development Setup
1386
1479
 
@@ -1469,6 +1562,7 @@ We are happy to receive your contributions. Propose your change in an issue or d
1469
1562
  ## Companies using this tool
1470
1563
 
1471
1564
  - [INNOQ](https://innoq.com)
1565
+ - [Data Catering](https://data.catering/)
1472
1566
  - And many more. To add your company, please create a pull request.
1473
1567
 
1474
1568
  ## Related Tools
@@ -1476,6 +1570,7 @@ We are happy to receive your contributions. Propose your change in an issue or d
1476
1570
  - [Data Contract Manager](https://www.datacontract-manager.com/) is a commercial tool to manage data contracts. It contains a web UI, access management, and data governance for a full enterprise data marketplace.
1477
1571
  - [Data Contract GPT](https://gpt.datacontract.com) is a custom GPT that can help you write data contracts.
1478
1572
  - [Data Contract Editor](https://editor.datacontract.com) is an editor for Data Contracts, including a live html preview.
1573
+ - [Data Contract Playground](https://data-catering.github.io/data-contract-playground/) allows you to validate and export your data contract to different formats within your browser.
1479
1574
 
1480
1575
  ## License
1481
1576
 
@@ -1,6 +1,6 @@
1
1
  datacontract/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
2
2
  datacontract/cli.py,sha256=LZYYECXjCk-AZmPaihuRJkknEAuM8OxvGoK40WpAeAs,14261
3
- datacontract/data_contract.py,sha256=o7lY-n97JXtumt1OX-9cDbXa_nvWlC9f3hivWPFd28s,13842
3
+ datacontract/data_contract.py,sha256=yLisbPzA_GJdQjh2rUeIVG0R_EjFBqqHxxXb2PAq_ec,14075
4
4
  datacontract/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
5
5
  datacontract/web.py,sha256=Ov50NJVbQiaxjHXnVOvRw9moMkKHHcxFhdzwbA13QOw,2134
6
6
  datacontract/breaking/breaking.py,sha256=l0ZwUX8G4QK_0nLRLJHArpwgTWTD7WQYMuAp3l_y1bY,12184
@@ -10,17 +10,17 @@ datacontract/engines/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hS
10
10
  datacontract/engines/datacontract/check_that_datacontract_contains_valid_servers_configuration.py,sha256=TXO47ON3NjwYI4Y2eBYklMOCo7vAtYzqLPAhZhii6dg,1565
11
11
  datacontract/engines/datacontract/check_that_datacontract_file_exists.py,sha256=XHvxX6BrTdZk2wN55zd6ER4k7ILDyGzjbCbZxQAJ2iE,665
12
12
  datacontract/engines/datacontract/check_that_datacontract_str_is_valid.py,sha256=CsxFGyInjpsylmoofByV1b-wpFhG1jtRWxSUpp-BXk8,1525
13
- datacontract/engines/fastjsonschema/check_jsonschema.py,sha256=rXGfGDu9-RJomj9WcVe8vEfluR25vn2rOS7BeOVQ0XA,5748
13
+ datacontract/engines/fastjsonschema/check_jsonschema.py,sha256=LU-RF6uXgUDvXCB3CQuZJiOenQk4kG5pA6hf6A61pzA,5891
14
14
  datacontract/engines/fastjsonschema/s3/s3_read_files.py,sha256=HS1dgwkUp3CwuI6VtZnRpOUnTd04E3yMgLbkK5jcW7o,1057
15
15
  datacontract/engines/soda/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
16
16
  datacontract/engines/soda/check_soda_execute.py,sha256=mqCqVdBFTuJwOiQBf15KSj48ccQgbewFPSMzrAWF_Hk,7820
17
17
  datacontract/engines/soda/connections/bigquery.py,sha256=C-8kxmzpYe88bJp80ObHFLMh4rpnIjnUQ7XOj0Ke7lk,903
18
18
  datacontract/engines/soda/connections/dask.py,sha256=Yy6Et2n_vDVsdjtqyBWDSZt7mnjPzPk_MZ-92VZHfnY,1496
19
19
  datacontract/engines/soda/connections/databricks.py,sha256=lpMju-o_TzLZeF0EEVwePPr8JahqvFnj5xRYjF15fc8,561
20
- datacontract/engines/soda/connections/duckdb.py,sha256=0IqIGKsWarz8MgfC3Uzj6_Vj9nKgoGtW5IX6yOq6Tz8,6537
20
+ datacontract/engines/soda/connections/duckdb.py,sha256=ZZf_ApeMO4FK2SyPI1otjgH92OfxwsZayBF_jjPLs9s,6788
21
21
  datacontract/engines/soda/connections/kafka.py,sha256=vh7z-4ZsmpXiYcogf3oTkagrAPcq6HG2SccnxNwFeVQ,5635
22
22
  datacontract/engines/soda/connections/postgres.py,sha256=9GTF4Es3M5vb7ocSGqAxXmslvkS5CjsPQGIuo020CFc,626
23
- datacontract/engines/soda/connections/snowflake.py,sha256=y1t2a1DWY4_tr5k-X5_nhLE6v1rfCwTahzhtHR91x9A,719
23
+ datacontract/engines/soda/connections/snowflake.py,sha256=rfG2ysuqNM6TkvyqQKcGHFsTGJ6AROmud5VleUDRrb0,749
24
24
  datacontract/engines/soda/connections/sqlserver.py,sha256=RzGLbCUdRyfmDcqtM_AB9WZ-Xk-XYX91nkXpVNpYbvc,1440
25
25
  datacontract/engines/soda/connections/trino.py,sha256=JvKUP9aFg_n095oWE0-bGmfbETSWEOURGEZdQuG8txA,718
26
26
  datacontract/export/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -28,14 +28,14 @@ datacontract/export/avro_converter.py,sha256=CbrGsGQCi-RNrQ7FfxnuGv3oeT9tHrkSl8d
28
28
  datacontract/export/avro_idl_converter.py,sha256=-zZxV0OAC76tMkAA9hXbEtp2wtpildKJb0OIbPhgoJE,9794
29
29
  datacontract/export/bigquery_converter.py,sha256=chAxgbYybyLG5v6SwqIweK9PePvE-36FStR7fyA_F_4,4577
30
30
  datacontract/export/csv_type_converter.py,sha256=ZZuJwBgQnafZC7PPvAXsBf2IajPJq8TYZ1l8Qq0GYeI,1290
31
- datacontract/export/dbml_converter.py,sha256=dIx7iMxLrJkDPFt_pCc_r5VkremGjsoUpNWNYNSzet4,3693
31
+ datacontract/export/dbml_converter.py,sha256=4gNNx78bQOrMrPhe-xw-7Hvv0xOt3Y7OX0zhlG1TxFY,4737
32
32
  datacontract/export/dbt_converter.py,sha256=QfapVeunwab7bWn_B9-J_1U8fm1wdsPBW_Ws2b99o1E,9390
33
- datacontract/export/exporter.py,sha256=Z1t9ctQSwTLNpM1wYBFeLDgBwwCJVqdk8bBAW72qz8I,2844
33
+ datacontract/export/exporter.py,sha256=28fpxKxLCFAI0nuIBc0knccjj60Who3XIiyBmMO4Ggk,2845
34
34
  datacontract/export/exporter_factory.py,sha256=CgYTx5m11Ec0vQV_-5tHxilTIH3qpRvUDvifsOybiZk,4756
35
35
  datacontract/export/go_converter.py,sha256=oWkEjOXBIGN8emTaxeJD8xpB3VYP4sVaNzKX-Oeqgw0,3330
36
36
  datacontract/export/great_expectations_converter.py,sha256=LIuWHn3h7xRrDDs1YoibHOI8NVF2Izh5IZOQFLeWN7k,5302
37
37
  datacontract/export/html_export.py,sha256=i7jckL80osA8incF0YoJdq3NaYmV1_3OU0cZBfPdNDs,2578
38
- datacontract/export/jsonschema_converter.py,sha256=xxJMB4b78m66X30RUb_-qWVwvffLdj0zeiojBt9r7ks,5044
38
+ datacontract/export/jsonschema_converter.py,sha256=Zqx4K7RZWfE9n_F4CMext82jd2Gr6EoqOnBXChRJgfM,4948
39
39
  datacontract/export/odcs_converter.py,sha256=WngHoTF7R9uMDLCedBso7m2s0yDJQS5HTACtH3AhG-E,4673
40
40
  datacontract/export/protobuf_converter.py,sha256=Baqq2uvRg0AHFo6gR-hEYdLyiYRWziC1Q4AujlHgCzo,3192
41
41
  datacontract/export/pydantic_converter.py,sha256=1Lt9F8i6zyQYb44MyQtsXwCWWXYxZ47SmzArr_uPqsU,5579
@@ -48,13 +48,13 @@ datacontract/export/terraform_converter.py,sha256=IFwn-6BWPVkfwDQbj_Gl0GwbJdMKiw
48
48
  datacontract/imports/avro_importer.py,sha256=ZlqBuWl1CIOtccm_EHyycRdIUJCS9r9lV6IHfpx9tsI,9886
49
49
  datacontract/imports/bigquery_importer.py,sha256=frBs5a9GuNVtNWF3DtEvsOiIemGI3dfDOEH475l1_84,8399
50
50
  datacontract/imports/dbt_importer.py,sha256=IPfMIsV1qzKEyfodgGmZRcF0nqN-oW1VixOoPJe-d6k,3390
51
- datacontract/imports/glue_importer.py,sha256=YO-ATDMeUx7WCsuaVzk_VhvKGqks9925qmDkpz5oLbs,8031
52
- datacontract/imports/importer.py,sha256=-hN92fruu__bJAbY3bILWcX1zmikVHt8q3KL83ICpu8,772
51
+ datacontract/imports/glue_importer.py,sha256=4WluEbPOhseBDkyR8C95vJmIZW0Ka4aUZvljl149gQU,8335
52
+ datacontract/imports/importer.py,sha256=kr2bkHiciJ35pPpLARoYLA5J7XLGDg2gp_unbZ2V1bg,773
53
53
  datacontract/imports/importer_factory.py,sha256=xfVyEKY2HXIqKVS9GtWnc5xYrmcJZ-WEpVtExLudyYQ,2999
54
- datacontract/imports/jsonschema_importer.py,sha256=dvugek0rkUVbfqy1xb6y3msQjziCgooF0HKc2vWBTIw,6776
54
+ datacontract/imports/jsonschema_importer.py,sha256=cY70b2bFx9Kfsnq5vNj0iy_GvNOIcBH8xeFjrsxv4bQ,4925
55
55
  datacontract/imports/odcs_importer.py,sha256=0SQUON2lkTBFopdiUg-s3HVt1ES1oqaVYRP42hZh_7I,7453
56
56
  datacontract/imports/spark_importer.py,sha256=5_zMcQlHzIBGznxxL6d5wevivq1XwPI8MbKbftUrtYA,4220
57
- datacontract/imports/sql_importer.py,sha256=nNrFJonNgEv8xwaN_SFq9M26LKdmLP0yHvbx31ErmQo,2723
57
+ datacontract/imports/sql_importer.py,sha256=DS42wTGx4TNQeNAGbRs9o32PvK5JSKpWN2ylETx18ls,2869
58
58
  datacontract/imports/unity_importer.py,sha256=f2VZYYY9DjxzttPHK6mU0bv5Nv5QKWmxo1r3Lb-1png,5583
59
59
  datacontract/init/download_datacontract_file.py,sha256=pj_4mhWKlEtfueWohDgkb1nyuG5ERDipUDszxKwpZUs,413
60
60
  datacontract/integration/publish_datamesh_manager.py,sha256=TuLRVdZs21UBet-qCqIywd_7WH5m6m-G83tw515G3hI,1489
@@ -84,12 +84,12 @@ datacontract/templates/partials/datacontract_servicelevels.html,sha256=ed3QgB11B
84
84
  datacontract/templates/partials/datacontract_terms.html,sha256=1cnJcOTpxwot2BCuZmkLF_SPfiVloLs3c8mj9WfE4sc,1865
85
85
  datacontract/templates/partials/definition.html,sha256=jEvjThHghHT2bo4oMAfe3H0lzc4ZuqDsJFOpN9w3Nho,7085
86
86
  datacontract/templates/partials/example.html,sha256=F1dWbHDIXQScgfs4OVgqM1lR4uV4xX5j6suasXHNM88,1204
87
- datacontract/templates/partials/model_field.html,sha256=w5BNqIxr03-T3r7lynvzi_i4LUGgOJMqe-5zF1_XRck,6762
87
+ datacontract/templates/partials/model_field.html,sha256=kh_ZIqJuayyxN-zDNIUPIoXOZeehGxXQxiImYB6G5qY,6946
88
88
  datacontract/templates/partials/server.html,sha256=WkWFbz1ZvhIAUQQhH5Lkwb0HZRW907ehEnFmJSkpquQ,6235
89
89
  datacontract/templates/style/output.css,sha256=F3oEhUpuv8kA_dWr4pJymBS_Ju6huIIZdLMkJzPzMmU,25647
90
- datacontract_cli-0.10.10.dist-info/LICENSE,sha256=23h64qnSeIZ0DKeziWAKC-zBCt328iSbRbWBrXoYRb4,2210
91
- datacontract_cli-0.10.10.dist-info/METADATA,sha256=b3U9R8eE7T7eO9VMADNMx55Oqj15VZkMOHnhCjGTaes,80778
92
- datacontract_cli-0.10.10.dist-info/WHEEL,sha256=YiKiUUeZQGmGJoR_0N1Y933DOBowq4AIvDe2-UIy8E4,91
93
- datacontract_cli-0.10.10.dist-info/entry_points.txt,sha256=D3Eqy4q_Z6bHauGd4ppIyQglwbrm1AJnLau4Ppbw9Is,54
94
- datacontract_cli-0.10.10.dist-info/top_level.txt,sha256=VIRjd8EIUrBYWjEXJJjtdUgc0UAJdPZjmLiOR8BRBYM,13
95
- datacontract_cli-0.10.10.dist-info/RECORD,,
90
+ datacontract_cli-0.10.11.dist-info/LICENSE,sha256=23h64qnSeIZ0DKeziWAKC-zBCt328iSbRbWBrXoYRb4,2210
91
+ datacontract_cli-0.10.11.dist-info/METADATA,sha256=ERyMxpIUXotZn0ah0RYOeMQ7_5UQYuT54z9QoJlvAmc,84667
92
+ datacontract_cli-0.10.11.dist-info/WHEEL,sha256=R0nc6qTxuoLk7ShA2_Y-UWkN8ZdfDBG2B6Eqpz2WXbs,91
93
+ datacontract_cli-0.10.11.dist-info/entry_points.txt,sha256=D3Eqy4q_Z6bHauGd4ppIyQglwbrm1AJnLau4Ppbw9Is,54
94
+ datacontract_cli-0.10.11.dist-info/top_level.txt,sha256=VIRjd8EIUrBYWjEXJJjtdUgc0UAJdPZjmLiOR8BRBYM,13
95
+ datacontract_cli-0.10.11.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (71.0.2)
2
+ Generator: setuptools (72.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5