datacontract-cli 0.10.12__py3-none-any.whl → 0.10.13__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datacontract-cli might be problematic. Click here for more details.

@@ -25,9 +25,10 @@ def create_spark_session(tmp_dir: str):
25
25
  SparkSession.builder.appName("datacontract")
26
26
  .config("spark.sql.warehouse.dir", f"{tmp_dir}/spark-warehouse")
27
27
  .config("spark.streaming.stopGracefullyOnShutdown", "true")
28
+ .config("spark.ui.enabled", "false")
28
29
  .config(
29
30
  "spark.jars.packages",
30
- "org.apache.spark:spark-sql-kafka-0-10_2.12:3.5.0,org.apache.spark:spark-avro_2.12:3.5.0",
31
+ "org.apache.spark:spark-sql-kafka-0-10_2.12:3.5.2,org.apache.spark:spark-avro_2.12:3.5.2",
31
32
  )
32
33
  .getOrCreate()
33
34
  )
@@ -112,7 +113,7 @@ def get_auth_options():
112
113
  kafka_sasl_username = os.getenv("DATACONTRACT_KAFKA_SASL_USERNAME")
113
114
  kafka_sasl_password = os.getenv("DATACONTRACT_KAFKA_SASL_PASSWORD")
114
115
 
115
- if kafka_sasl_username is None:
116
+ if kafka_sasl_username is None or kafka_sasl_username == "":
116
117
  return {}
117
118
 
118
119
  return {
@@ -44,7 +44,7 @@ def to_fields_array(fields: Dict[str, Field]) -> List[Dict[str, Field]]:
44
44
 
45
45
 
46
46
  def to_field(field_name: str, field: Field) -> dict:
47
- bq_type = map_type_to_bigquery(field.type, field_name)
47
+ bq_type = map_type_to_bigquery(field)
48
48
  bq_field = {
49
49
  "name": field_name,
50
50
  "type": bq_type,
@@ -59,10 +59,8 @@ def to_field(field_name: str, field: Field) -> dict:
59
59
  # in case the array type is a complex object, we want to copy all its fields
60
60
  bq_field["fields"] = to_fields_array(field.items.fields)
61
61
  else:
62
- # otherwise we make up a structure that gets us a single field of the specified type
63
- bq_field["fields"] = to_fields_array(
64
- {f"{field_name}_1": Field(type=field.items.type, required=False, description="")}
65
- )
62
+ bq_field["type"] = map_type_to_bigquery(field.items)
63
+
66
64
  # all of these can carry other fields
67
65
  elif bq_type.lower() in ["record", "struct"]:
68
66
  bq_field["fields"] = to_fields_array(field.fields)
@@ -79,37 +77,46 @@ def to_field(field_name: str, field: Field) -> dict:
79
77
  return bq_field
80
78
 
81
79
 
82
- def map_type_to_bigquery(type_str: str, field_name: str) -> str:
80
+ def map_type_to_bigquery(field: Field) -> str:
83
81
  logger = logging.getLogger(__name__)
84
- if type_str.lower() in ["string", "varchar", "text"]:
82
+
83
+ field_type = field.type
84
+ if not field_type:
85
+ return None
86
+
87
+ if field.config and "bigqueryType" in field.config:
88
+ return field.config["bigqueryType"]
89
+
90
+ if field_type.lower() in ["string", "varchar", "text"]:
85
91
  return "STRING"
86
- elif type_str == "bytes":
92
+ elif field_type.lower() == "bytes":
87
93
  return "BYTES"
88
- elif type_str.lower() in ["int", "integer"]:
94
+ elif field_type.lower() in ["int", "integer"]:
89
95
  return "INTEGER"
90
- elif type_str.lower() in ["long", "bigint"]:
96
+ elif field_type.lower() in ["long", "bigint"]:
91
97
  return "INT64"
92
- elif type_str == "float":
93
- return "FLOAT"
94
- elif type_str == "boolean":
98
+ elif field_type.lower() == "float":
99
+ return "FLOAT64"
100
+ elif field_type.lower() == "boolean":
95
101
  return "BOOL"
96
- elif type_str.lower() in ["timestamp", "timestamp_tz"]:
102
+ elif field_type.lower() in ["timestamp", "timestamp_tz"]:
97
103
  return "TIMESTAMP"
98
- elif type_str == "date":
104
+ elif field_type.lower() == "date":
99
105
  return "DATE"
100
- elif type_str == "timestamp_ntz":
106
+ elif field_type.lower() == "timestamp_ntz":
101
107
  return "TIME"
102
- elif type_str.lower() in ["number", "decimal", "numeric"]:
108
+ elif field_type.lower() in ["number", "decimal", "numeric"]:
103
109
  return "NUMERIC"
104
- elif type_str == "double":
110
+ elif field_type.lower() == "double":
105
111
  return "BIGNUMERIC"
106
- elif type_str.lower() in ["object", "record", "array"]:
112
+ elif field_type.lower() in ["object", "record", "array"]:
107
113
  return "RECORD"
108
- elif type_str == "struct":
114
+ elif field_type.lower() == "struct":
109
115
  return "STRUCT"
110
- elif type_str == "null":
116
+ elif field_type.lower() == "null":
111
117
  logger.info(
112
- f"Can't properly map {field_name} to bigquery Schema, as 'null' is not supported as a type. Mapping it to STRING."
118
+ f"Can't properly map {field.title} to bigquery Schema, as 'null' \
119
+ is not supported as a type. Mapping it to STRING."
113
120
  )
114
121
  return "STRING"
115
122
  else:
@@ -117,6 +124,6 @@ def map_type_to_bigquery(type_str: str, field_name: str) -> str:
117
124
  type="schema",
118
125
  result="failed",
119
126
  name="Map datacontract type to bigquery data type",
120
- reason=f"Unsupported type {type_str} in data contract definition.",
127
+ reason=f"Unsupported type {field_type} in data contract definition.",
121
128
  engine="datacontract",
122
129
  )
@@ -0,0 +1,148 @@
1
+ from typing import Dict
2
+
3
+ import yaml
4
+
5
+ from datacontract.export.exporter import Exporter
6
+ from datacontract.model.data_contract_specification import DataContractSpecification, Model, Field, Server
7
+
8
+
9
+ class DataCatererExporter(Exporter):
10
+ """
11
+ Exporter class for Data Caterer.
12
+ Creates a YAML file, based on the data contract, for Data Caterer to generate synthetic data.
13
+ """
14
+
15
+ def export(self, data_contract, model, server, sql_server_type, export_args) -> dict:
16
+ return to_data_caterer_generate_yaml(data_contract, server)
17
+
18
+
19
+ def to_data_caterer_generate_yaml(data_contract_spec: DataContractSpecification, server):
20
+ generation_task = {"name": data_contract_spec.info.title, "steps": []}
21
+ server_info = _get_server_info(data_contract_spec, server)
22
+
23
+ for model_key, model_value in data_contract_spec.models.items():
24
+ odcs_table = _to_data_caterer_generate_step(model_key, model_value, server_info)
25
+ generation_task["steps"].append(odcs_table)
26
+ return yaml.dump(generation_task, indent=2, sort_keys=False, allow_unicode=True)
27
+
28
+
29
+ def _get_server_info(data_contract_spec: DataContractSpecification, server):
30
+ if server is not None and server in data_contract_spec.servers:
31
+ return data_contract_spec.servers.get(server)
32
+ elif server is not None:
33
+ raise Exception(f"Server name not found in servers list in data contract, server-name={server}")
34
+ elif len(data_contract_spec.servers.keys()) > 0:
35
+ return next(iter(data_contract_spec.servers.values()))
36
+ else:
37
+ return None
38
+
39
+
40
+ def _to_data_caterer_generate_step(model_key, model_value: Model, server: Server) -> dict:
41
+ step = {
42
+ "name": model_key,
43
+ "type": _to_step_type(server),
44
+ "options": _to_data_source_options(model_key, server),
45
+ "schema": [],
46
+ }
47
+ fields = _to_fields(model_value.fields)
48
+ if fields:
49
+ step["schema"] = fields
50
+ return step
51
+
52
+
53
+ def _to_step_type(server: Server):
54
+ if server is not None and server.type is not None:
55
+ if server.type in ["s3", "gcs", "azure", "local"]:
56
+ return server.format
57
+ else:
58
+ return server.type
59
+ else:
60
+ return "csv"
61
+
62
+
63
+ def _to_data_source_options(model_key, server: Server):
64
+ options = {}
65
+ if server is not None and server.type is not None:
66
+ if server.type in ["s3", "gcs", "azure", "local"]:
67
+ if server.path is not None:
68
+ options["path"] = server.path
69
+ elif server.location is not None:
70
+ options["path"] = server.location
71
+ else:
72
+ options["path"] = "/tmp/data_caterer_data"
73
+ elif server.type == "postgres":
74
+ options["schema"] = server.schema_
75
+ options["table"] = model_key
76
+ elif server.type == "kafka":
77
+ options["topic"] = server.topic
78
+
79
+ return options
80
+
81
+
82
+ def _to_fields(fields: Dict[str, Field]) -> list:
83
+ dc_fields = []
84
+ for field_name, field in fields.items():
85
+ column = _to_field(field_name, field)
86
+ dc_fields.append(column)
87
+ return dc_fields
88
+
89
+
90
+ def _to_field(field_name: str, field: Field) -> dict:
91
+ dc_field = {"name": field_name}
92
+ dc_generator_opts = {}
93
+
94
+ if field.type is not None:
95
+ new_type = _to_data_type(field.type)
96
+ dc_field["type"] = _to_data_type(field.type)
97
+ if new_type == "object" or new_type == "record" or new_type == "struct":
98
+ # need to get nested field definitions
99
+ nested_fields = _to_fields(field.fields)
100
+ dc_field["schema"] = {"fields": nested_fields}
101
+
102
+ if field.enum is not None and len(field.enum) > 0:
103
+ dc_generator_opts["oneOf"] = field.enum
104
+ if field.unique is not None and field.unique:
105
+ dc_generator_opts["isUnique"] = field.unique
106
+ if field.minLength is not None:
107
+ dc_generator_opts["minLength"] = field.minLength
108
+ if field.maxLength is not None:
109
+ dc_generator_opts["maxLength"] = field.maxLength
110
+ if field.pattern is not None:
111
+ dc_generator_opts["regex"] = field.pattern
112
+ if field.minimum is not None:
113
+ dc_generator_opts["min"] = field.minimum
114
+ if field.maximum is not None:
115
+ dc_generator_opts["max"] = field.maximum
116
+
117
+ if len(dc_generator_opts.keys()) > 0:
118
+ dc_field["generator"] = {"options": dc_generator_opts}
119
+ return dc_field
120
+
121
+
122
+ def _to_data_type(data_type):
123
+ if data_type == "number" or data_type == "numeric" or data_type == "double":
124
+ return "double"
125
+ elif data_type == "decimal" or data_type == "bigint":
126
+ return "decimal"
127
+ elif data_type == "int":
128
+ return "integer"
129
+ elif data_type == "long":
130
+ return "long"
131
+ elif data_type == "float":
132
+ return "float"
133
+ elif data_type == "string" or data_type == "text" or data_type == "varchar":
134
+ return "string"
135
+ if data_type == "boolean":
136
+ return "boolean"
137
+ if data_type == "timestamp" or data_type == "timestamp_tz" or data_type == "timestamp_ntz":
138
+ return "timestamp"
139
+ elif data_type == "date":
140
+ return "date"
141
+ elif data_type == "array":
142
+ return "array"
143
+ elif data_type == "map" or data_type == "object" or data_type == "record" or data_type == "struct":
144
+ return "struct"
145
+ elif data_type == "bytes":
146
+ return "binary"
147
+ else:
148
+ return "string"
@@ -36,6 +36,7 @@ class ExportFormat(str, Enum):
36
36
  dbml = "dbml"
37
37
  spark = "spark"
38
38
  sqlalchemy = "sqlalchemy"
39
+ data_caterer = "data-caterer"
39
40
 
40
41
  @classmethod
41
42
  def get_supported_formats(cls):
@@ -62,6 +62,12 @@ exporter_factory.register_lazy_exporter(
62
62
  class_name="BigQueryExporter",
63
63
  )
64
64
 
65
+ exporter_factory.register_lazy_exporter(
66
+ name=ExportFormat.data_caterer,
67
+ module_path="datacontract.export.data_caterer_converter",
68
+ class_name="DataCatererExporter",
69
+ )
70
+
65
71
  exporter_factory.register_lazy_exporter(
66
72
  name=ExportFormat.dbml, module_path="datacontract.export.dbml_converter", class_name="DbmlExporter"
67
73
  )
@@ -36,7 +36,19 @@ def to_property(field: Field) -> dict:
36
36
  property = {}
37
37
  json_type, json_format = convert_type_format(field.type, field.format)
38
38
  if json_type is not None:
39
- property["type"] = json_type
39
+ if not field.required:
40
+ """
41
+ From: https://json-schema.org/understanding-json-schema/reference/type
42
+ The type keyword may either be a string or an array:
43
+
44
+ If it's a string, it is the name of one of the basic types above.
45
+ If it is an array, it must be an array of strings, where each string
46
+ is the name of one of the basic types, and each element is unique.
47
+ In this case, the JSON snippet is valid if it matches any of the given types.
48
+ """
49
+ property["type"] = [json_type, "null"]
50
+ else:
51
+ property["type"] = json_type
40
52
  if json_format is not None:
41
53
  property["format"] = json_format
42
54
  if field.unique:
@@ -50,7 +62,6 @@ def to_property(field: Field) -> dict:
50
62
  property["required"] = to_required(field.fields)
51
63
  if json_type == "array":
52
64
  property["items"] = to_property(field.items)
53
-
54
65
  if field.pattern:
55
66
  property["pattern"] = field.pattern
56
67
  if field.enum:
@@ -128,7 +128,7 @@ def to_data_type(field: Field) -> types.DataType:
128
128
  if field_type in ["string", "varchar", "text"]:
129
129
  return types.StringType()
130
130
  if field_type in ["number", "decimal", "numeric"]:
131
- return types.DecimalType()
131
+ return types.DecimalType(precision=field.precision, scale=field.scale)
132
132
  if field_type in ["integer", "int"]:
133
133
  return types.IntegerType()
134
134
  if field_type == "long":
@@ -269,16 +269,7 @@ def convert_type_to_sqlserver(field: Field) -> None | str:
269
269
 
270
270
  def convert_type_to_bigquery(field: Field) -> None | str:
271
271
  """Convert from supported datacontract types to equivalent bigquery types"""
272
- field_type = field.type
273
- if not field_type:
274
- return None
275
-
276
- # If provided sql-server config type, prefer it over default mapping
277
- if bigquery_type := get_type_config(field, "bigqueryType"):
278
- return bigquery_type
279
-
280
- field_type = field_type.lower()
281
- return map_type_to_bigquery(field_type, field.title)
272
+ return map_type_to_bigquery(field)
282
273
 
283
274
 
284
275
  def get_type_config(field: Field, config_attr: str) -> dict[str, str] | None:
@@ -80,6 +80,8 @@ def _field_from_struct_type(spark_field: types.StructField) -> Field:
80
80
  """
81
81
  field = Field()
82
82
  field.required = not spark_field.nullable
83
+ field.description = spark_field.metadata.get("comment")
84
+
83
85
  return _type_from_data_type(field, spark_field.dataType)
84
86
 
85
87
 
@@ -121,7 +123,7 @@ def _data_type_from_spark(spark_type: types.DataType) -> str:
121
123
  """
122
124
  if isinstance(spark_type, types.StringType):
123
125
  return "string"
124
- elif isinstance(spark_type, types.IntegerType):
126
+ elif isinstance(spark_type, (types.IntegerType, types.ShortType)):
125
127
  return "integer"
126
128
  elif isinstance(spark_type, types.LongType):
127
129
  return "long"
@@ -149,5 +151,7 @@ def _data_type_from_spark(spark_type: types.DataType) -> str:
149
151
  return "decimal"
150
152
  elif isinstance(spark_type, types.NullType):
151
153
  return "null"
154
+ elif isinstance(spark_type, types.VarcharType):
155
+ return "varchar"
152
156
  else:
153
157
  raise ValueError(f"Unsupported Spark type: {spark_type}")
@@ -1,17 +1,37 @@
1
1
  import json
2
- import requests
3
2
  import os
4
- import typing
3
+ from typing import List, Optional
4
+
5
+ from pyspark.sql import types
6
+ from databricks.sdk import WorkspaceClient
7
+ from databricks.sdk.service.catalog import TableInfo, ColumnInfo
5
8
 
6
9
  from datacontract.imports.importer import Importer
10
+ from datacontract.imports.spark_importer import _field_from_struct_type
7
11
  from datacontract.model.data_contract_specification import DataContractSpecification, Model, Field
8
12
  from datacontract.model.exceptions import DataContractException
9
13
 
10
14
 
11
15
  class UnityImporter(Importer):
16
+ """
17
+ UnityImporter class for importing data contract specifications from Unity Catalog.
18
+ """
19
+
12
20
  def import_source(
13
21
  self, data_contract_specification: DataContractSpecification, source: str, import_args: dict
14
22
  ) -> DataContractSpecification:
23
+ """
24
+ Import data contract specification from a source.
25
+
26
+ :param data_contract_specification: The data contract specification to be imported.
27
+ :type data_contract_specification: DataContractSpecification
28
+ :param source: The source from which to import the data contract specification.
29
+ :type source: str
30
+ :param import_args: Additional arguments for the import process.
31
+ :type import_args: dict
32
+ :return: The imported data contract specification.
33
+ :rtype: DataContractSpecification
34
+ """
15
35
  if source is not None:
16
36
  data_contract_specification = import_unity_from_json(data_contract_specification, source)
17
37
  else:
@@ -24,9 +44,21 @@ class UnityImporter(Importer):
24
44
  def import_unity_from_json(
25
45
  data_contract_specification: DataContractSpecification, source: str
26
46
  ) -> DataContractSpecification:
47
+ """
48
+ Import data contract specification from a JSON file.
49
+
50
+ :param data_contract_specification: The data contract specification to be imported.
51
+ :type data_contract_specification: DataContractSpecification
52
+ :param source: The path to the JSON file.
53
+ :type source: str
54
+ :return: The imported data contract specification.
55
+ :rtype: DataContractSpecification
56
+ :raises DataContractException: If there is an error parsing the JSON file.
57
+ """
27
58
  try:
28
59
  with open(source, "r") as file:
29
- unity_schema = json.loads(file.read())
60
+ json_contents = json.loads(file.read())
61
+ unity_schema = TableInfo.from_dict(json_contents)
30
62
  except json.JSONDecodeError as e:
31
63
  raise DataContractException(
32
64
  type="schema",
@@ -39,114 +71,103 @@ def import_unity_from_json(
39
71
 
40
72
 
41
73
  def import_unity_from_api(
42
- data_contract_specification: DataContractSpecification, unity_table_full_name: typing.Optional[str] = None
74
+ data_contract_specification: DataContractSpecification, unity_table_full_name: Optional[str] = None
43
75
  ) -> DataContractSpecification:
44
- databricks_instance = os.getenv("DATABRICKS_IMPORT_INSTANCE")
45
- access_token = os.getenv("DATABRICKS_IMPORT_ACCESS_TOKEN")
46
-
47
- if not databricks_instance or not access_token:
48
- print("Missing environment variables for Databricks instance or access token.")
49
- print("Both, $DATABRICKS_IMPORT_INSTANCE and $DATABRICKS_IMPORT_ACCESS_TOKEN must be set.")
50
- exit(1) # Exit if variables are not set
51
-
52
- api_url = f"{databricks_instance}/api/2.1/unity-catalog/tables/{unity_table_full_name}"
53
-
54
- headers = {"Authorization": f"Bearer {access_token}"}
55
- response = requests.get(api_url, headers=headers)
56
-
57
- if response.status_code != 200:
76
+ """
77
+ Import data contract specification from Unity Catalog API.
78
+
79
+ :param data_contract_specification: The data contract specification to be imported.
80
+ :type data_contract_specification: DataContractSpecification
81
+ :param unity_table_full_name: The full name of the Unity table.
82
+ :type unity_table_full_name: Optional[str]
83
+ :return: The imported data contract specification.
84
+ :rtype: DataContractSpecification
85
+ :raises DataContractException: If there is an error retrieving the schema from the API.
86
+ """
87
+ try:
88
+ workspace_client = WorkspaceClient()
89
+ unity_schema: TableInfo = workspace_client.tables.get(unity_table_full_name)
90
+ except Exception as e:
58
91
  raise DataContractException(
59
92
  type="schema",
60
93
  name="Retrieve unity catalog schema",
61
- reason=f"Failed to retrieve unity catalog schema from databricks instance: {response.status_code} {response.text}",
94
+ reason=f"Failed to retrieve unity catalog schema from databricks profile: {os.getenv('DATABRICKS_CONFIG_PROFILE')}",
62
95
  engine="datacontract",
96
+ original_exception=e,
63
97
  )
64
98
 
65
- convert_unity_schema(data_contract_specification, response.json())
99
+ convert_unity_schema(data_contract_specification, unity_schema)
66
100
 
67
101
  return data_contract_specification
68
102
 
69
103
 
70
104
  def convert_unity_schema(
71
- data_contract_specification: DataContractSpecification, unity_schema: dict
105
+ data_contract_specification: DataContractSpecification, unity_schema: TableInfo
72
106
  ) -> DataContractSpecification:
107
+ """
108
+ Convert Unity schema to data contract specification.
109
+
110
+ :param data_contract_specification: The data contract specification to be converted.
111
+ :type data_contract_specification: DataContractSpecification
112
+ :param unity_schema: The Unity schema to be converted.
113
+ :type unity_schema: TableInfo
114
+ :return: The converted data contract specification.
115
+ :rtype: DataContractSpecification
116
+ """
73
117
  if data_contract_specification.models is None:
74
118
  data_contract_specification.models = {}
75
119
 
76
- fields = import_table_fields(unity_schema.get("columns"))
120
+ fields = import_table_fields(unity_schema.columns)
77
121
 
78
- table_id = unity_schema.get("table_id")
122
+ table_id = unity_schema.name or unity_schema.table_id
79
123
 
80
124
  data_contract_specification.models[table_id] = Model(fields=fields, type="table")
81
125
 
82
- if unity_schema.get("name") is not None:
83
- data_contract_specification.models[table_id].title = unity_schema.get("name")
126
+ if unity_schema.name:
127
+ data_contract_specification.models[table_id].title = unity_schema.name
128
+
129
+ if unity_schema.comment:
130
+ data_contract_specification.models[table_id].description = unity_schema.comment
84
131
 
85
132
  return data_contract_specification
86
133
 
87
134
 
88
- def import_table_fields(table_fields):
135
+ def import_table_fields(columns: List[ColumnInfo]) -> dict[str, Field]:
136
+ """
137
+ Import table fields from Unity schema columns.
138
+
139
+ Here we are first converting the `ColumnInfo.type_json` to a Spark StructField object
140
+ so we can leave the complexity of the Spark field types to the Spark JSON schema parser,
141
+ then re-use the logic in `datacontract.imports.spark_importer` to convert the StructField
142
+ into a Field object.
143
+
144
+ :param columns: The list of Unity schema columns.
145
+ :type columns: List[ColumnInfo]
146
+ :return: A dictionary of imported fields.
147
+ :rtype: dict[str, Field]
148
+ """
89
149
  imported_fields = {}
90
- for field in table_fields:
91
- field_name = field.get("name")
92
- imported_fields[field_name] = Field()
93
- imported_fields[field_name].required = field.get("nullable") == "false"
94
- imported_fields[field_name].description = field.get("comment")
95
-
96
- # databricks api 2.1 specifies that type_name can be any of:
97
- # BOOLEAN | BYTE | SHORT | INT | LONG | FLOAT | DOUBLE | DATE | TIMESTAMP | TIMESTAMP_NTZ | STRING
98
- # | BINARY | DECIMAL | INTERVAL | ARRAY | STRUCT | MAP | CHAR | NULL | USER_DEFINED_TYPE | TABLE_TYPE
99
- if field.get("type_name") in ["INTERVAL", "ARRAY", "STRUCT", "MAP", "USER_DEFINED_TYPE", "TABLE_TYPE"]:
100
- # complex types are not supported, yet
101
- raise DataContractException(
102
- type="schema",
103
- result="failed",
104
- name="Map unity type to data contract type",
105
- reason=f"type ${field.get('type_name')} is not supported yet for unity import",
106
- engine="datacontract",
107
- )
108
150
 
109
- imported_fields[field_name].type = map_type_from_unity(field.get("type_name"))
151
+ for column in columns:
152
+ struct_field: types.StructField = _type_json_to_spark_field(column.type_json)
153
+ imported_fields[column.name] = _field_from_struct_type(struct_field)
110
154
 
111
155
  return imported_fields
112
156
 
113
157
 
114
- def map_type_from_unity(type_str: str):
115
- if type_str == "BOOLEAN":
116
- return "boolean"
117
- elif type_str == "BYTE":
118
- return "bytes"
119
- elif type_str == "SHORT":
120
- return "int"
121
- elif type_str == "INT":
122
- return "int"
123
- elif type_str == "LONG":
124
- return "long"
125
- elif type_str == "FLOAT":
126
- return "float"
127
- elif type_str == "DOUBLE":
128
- return "double"
129
- elif type_str == "DATE":
130
- return "date"
131
- elif type_str == "TIMESTAMP":
132
- return "timestamp"
133
- elif type_str == "TIMESTAMP_NTZ":
134
- return "timestamp_ntz"
135
- elif type_str == "STRING":
136
- return "string"
137
- elif type_str == "BINARY":
138
- return "bytes"
139
- elif type_str == "DECIMAL":
140
- return "decimal"
141
- elif type_str == "CHAR":
142
- return "varchar"
143
- elif type_str == "NULL":
144
- return "null"
145
- else:
146
- raise DataContractException(
147
- type="schema",
148
- result="failed",
149
- name="Map unity type to data contract type",
150
- reason=f"Unsupported type {type_str} in unity json definition.",
151
- engine="datacontract",
152
- )
158
+ def _type_json_to_spark_field(type_json: str) -> types.StructField:
159
+ """
160
+ Parses a JSON string representing a Spark field and returns a StructField object.
161
+
162
+ The reason we do this is to leverage the Spark JSON schema parser to handle the
163
+ complexity of the Spark field types. The field `type_json` in the Unity API is
164
+ the output of a `StructField.jsonValue()` call.
165
+
166
+ :param type_json: The JSON string representing the Spark field.
167
+ :type type_json: str
168
+
169
+ :return: The StructField object.
170
+ :rtype: types.StructField
171
+ """
172
+ type_dict = json.loads(type_json)
173
+ return types.StructField.fromJson(type_dict)
@@ -114,7 +114,16 @@ def _resolve_definition_ref(ref, spec) -> Definition:
114
114
 
115
115
  def _find_by_path_in_spec(definition_path: str, spec: DataContractSpecification):
116
116
  path_elements = definition_path.split("/")
117
- definition = spec.definitions[path_elements[2]]
117
+ definition_key = path_elements[2]
118
+ if definition_key not in spec.definitions:
119
+ raise DataContractException(
120
+ type="lint",
121
+ result="failed",
122
+ name="Check that data contract YAML is valid",
123
+ reason=f"Cannot resolve definition {definition_key}",
124
+ engine="datacontract",
125
+ )
126
+ definition = spec.definitions[definition_key]
118
127
  definition = _find_subfield_in_definition(definition, path_elements[3:])
119
128
  return definition
120
129
 
datacontract/lint/urls.py CHANGED
@@ -25,16 +25,30 @@ def fetch_resource(url: str):
25
25
 
26
26
 
27
27
  def _set_api_key(headers, url):
28
- if ".datamesh-manager.com/" not in url:
29
- return
30
- datamesh_manager_api_key = os.getenv("DATAMESH_MANAGER_API_KEY")
31
- if datamesh_manager_api_key is None or datamesh_manager_api_key == "":
32
- print("Error: Data Mesh Manager API Key is not set. Set env variable DATAMESH_MANAGER_API_KEY.")
33
- raise DataContractException(
34
- type="lint",
35
- name=f"Reading data contract from {url}",
36
- reason="Error: Data Mesh Manager API Key is not set. Set env variable DATAMESH_MANAGER_API_KEY.",
37
- engine="datacontract",
38
- result="error",
39
- )
40
- headers["x-api-key"] = datamesh_manager_api_key
28
+ if ".datamesh-manager.com/" in url:
29
+ datamesh_manager_api_key = os.getenv("DATAMESH_MANAGER_API_KEY")
30
+ if datamesh_manager_api_key is None or datamesh_manager_api_key == "":
31
+ print("Error: Data Mesh Manager API Key is not set. Set env variable DATAMESH_MANAGER_API_KEY.")
32
+ raise DataContractException(
33
+ type="lint",
34
+ name=f"Reading data contract from {url}",
35
+ reason="Error: Data Mesh Manager API Key is not set. Set env variable DATAMESH_MANAGER_API_KEY.",
36
+ engine="datacontract",
37
+ result="error",
38
+ )
39
+ headers["x-api-key"] = datamesh_manager_api_key
40
+ elif ".datacontract-manager.com/" in url:
41
+ datacontract_manager_api_key = os.getenv("DATACONTRACT_MANAGER_API_KEY")
42
+ if datacontract_manager_api_key is None or datacontract_manager_api_key == "":
43
+ print("Error: Data Contract Manager API Key is not set. Set env variable DATACONTRACT_MANAGER_API_KEY.")
44
+ raise DataContractException(
45
+ type="lint",
46
+ name=f"Reading data contract from {url}",
47
+ reason="Error: Data Contract Manager API Key is not set. Set env variable DATACONTRACT_MANAGER_API_KEY.",
48
+ engine="datacontract",
49
+ result="error",
50
+ )
51
+ headers["x-api-key"] = datacontract_manager_api_key
52
+ else:
53
+ # do nothing
54
+ pass
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: datacontract-cli
3
- Version: 0.10.12
3
+ Version: 0.10.13
4
4
  Summary: The datacontract CLI is an open source command-line tool for working with Data Contracts. It uses data contract YAML files to lint the data contract, connect to data sources and execute schema and quality tests, detect breaking changes, and export to different formats. The tool is written in Python. It can be used as a standalone CLI tool, in a CI/CD pipeline, or directly as a Python library.
5
5
  Author-email: Jochen Christ <jochen.christ@innoq.com>, Stefan Negele <stefan.negele@innoq.com>, Simon Harrer <simon.harrer@innoq.com>
6
6
  Project-URL: Homepage, https://cli.datacontract.com
@@ -12,16 +12,16 @@ Requires-Python: >=3.10
12
12
  Description-Content-Type: text/markdown
13
13
  License-File: LICENSE
14
14
  Requires-Dist: typer<0.13,>=0.12
15
- Requires-Dist: pydantic<2.9.0,>=2.8.2
15
+ Requires-Dist: pydantic<2.10.0,>=2.8.2
16
16
  Requires-Dist: pyyaml~=6.0.1
17
17
  Requires-Dist: requests<2.33,>=2.31
18
- Requires-Dist: fastapi==0.112.0
19
- Requires-Dist: uvicorn==0.30.5
18
+ Requires-Dist: fastapi==0.114.2
19
+ Requires-Dist: uvicorn==0.30.6
20
20
  Requires-Dist: fastjsonschema<2.21.0,>=2.19.1
21
21
  Requires-Dist: fastparquet==2024.5.0
22
22
  Requires-Dist: python-multipart==0.0.9
23
- Requires-Dist: rich~=13.7.0
24
- Requires-Dist: simple-ddl-parser==1.6.0
23
+ Requires-Dist: rich<13.9,>=13.7
24
+ Requires-Dist: simple-ddl-parser==1.6.1
25
25
  Requires-Dist: duckdb==1.0.0
26
26
  Requires-Dist: soda-core-duckdb<3.4.0,>=3.3.1
27
27
  Requires-Dist: setuptools>=60
@@ -29,7 +29,7 @@ Requires-Dist: python-dotenv~=1.0.0
29
29
  Requires-Dist: rdflib==7.0.0
30
30
  Requires-Dist: opentelemetry-exporter-otlp-proto-grpc~=1.16
31
31
  Requires-Dist: opentelemetry-exporter-otlp-proto-http~=1.16
32
- Requires-Dist: boto3<1.35.6,>=1.34.41
32
+ Requires-Dist: boto3<1.35.20,>=1.34.41
33
33
  Requires-Dist: jinja-partials>=0.2.1
34
34
  Provides-Extra: all
35
35
  Requires-Dist: datacontract-cli[bigquery,databricks,dbml,dbt,kafka,postgres,s3,snowflake,sqlserver,trino]; extra == "all"
@@ -39,7 +39,8 @@ Provides-Extra: bigquery
39
39
  Requires-Dist: soda-core-bigquery<3.4.0,>=3.3.1; extra == "bigquery"
40
40
  Provides-Extra: databricks
41
41
  Requires-Dist: soda-core-spark-df<3.4.0,>=3.3.1; extra == "databricks"
42
- Requires-Dist: databricks-sql-connector<3.4.0,>=3.1.2; extra == "databricks"
42
+ Requires-Dist: databricks-sql-connector<3.5.0,>=3.1.2; extra == "databricks"
43
+ Requires-Dist: databricks-sdk<0.33.0,>=0.32.0; extra == "databricks"
43
44
  Requires-Dist: soda-core-spark[databricks]<3.4.0,>=3.3.1; extra == "databricks"
44
45
  Provides-Extra: dbml
45
46
  Requires-Dist: pydbml>=1.1.1; extra == "dbml"
@@ -49,7 +50,7 @@ Provides-Extra: dev
49
50
  Requires-Dist: datacontract-cli[all]; extra == "dev"
50
51
  Requires-Dist: httpx==0.27.2; extra == "dev"
51
52
  Requires-Dist: kafka-python; extra == "dev"
52
- Requires-Dist: moto==5.0.13; extra == "dev"
53
+ Requires-Dist: moto==5.0.14; extra == "dev"
53
54
  Requires-Dist: pandas>=2.1.0; extra == "dev"
54
55
  Requires-Dist: pre-commit<3.9.0,>=3.7.1; extra == "dev"
55
56
  Requires-Dist: pyarrow>=12.0.0; extra == "dev"
@@ -65,7 +66,7 @@ Requires-Dist: soda-core-spark-df<3.4.0,>=3.3.1; extra == "kafka"
65
66
  Provides-Extra: postgres
66
67
  Requires-Dist: soda-core-postgres<3.4.0,>=3.3.1; extra == "postgres"
67
68
  Provides-Extra: s3
68
- Requires-Dist: s3fs==2024.6.1; extra == "s3"
69
+ Requires-Dist: s3fs==2024.9.0; extra == "s3"
69
70
  Provides-Extra: snowflake
70
71
  Requires-Dist: snowflake-connector-python[pandas]<3.13,>=3.6; extra == "snowflake"
71
72
  Requires-Dist: soda-core-snowflake<3.4.0,>=3.3.1; extra == "snowflake"
@@ -825,7 +826,7 @@ models:
825
826
  │ * --format [jsonschema|pydantic-model|sodacl|dbt|dbt-sources|db The export format. [default: None] [required] │
826
827
  │ t-staging-sql|odcs|rdf|avro|protobuf|great-expectati │
827
828
  │ ons|terraform|avro-idl|sql|sql-query|html|go|bigquer │
828
- │ y|dbml|spark|sqlalchemy]
829
+ │ y|dbml|spark|sqlalchemy|data-caterer]
829
830
  │ --output PATH Specify the file path where the exported data will be │
830
831
  │ saved. If no path is provided, the output will be │
831
832
  │ printed to stdout. │
@@ -877,6 +878,7 @@ Available export options:
877
878
  | `DBML` | Export to a DBML Diagram description | ✅ |
878
879
  | `spark` | Export to a Spark StructType | ✅ |
879
880
  | `sqlalchemy` | Export to SQLAlchemy Models | ✅ |
881
+ | `data-caterer` | Export to Data Caterer in YAML format | ✅ |
880
882
  | Missing something? | Please create an issue on GitHub | TBD |
881
883
 
882
884
  #### Great Expectations
@@ -940,6 +942,20 @@ To specify custom Avro properties in your data contract, you can define them wit
940
942
 
941
943
  >NOTE: At this moment, we just support [logicalType](https://avro.apache.org/docs/1.11.0/spec.html#Logical+Types) and [default](https://avro.apache.org/docs/1.11.0/spec.htm)
942
944
 
945
+ #### Data Caterer
946
+
947
+ The export function converts the data contract to a data generation task in YAML format that can be
948
+ ingested by [Data Caterer](https://github.com/data-catering/data-caterer). This gives you the
949
+ ability to generate production-like data in any environment based off your data contract.
950
+
951
+ ```shell
952
+ datacontract export datacontract.yaml --format data-caterer --model orders
953
+ ```
954
+
955
+ You can further customise the way data is generated via adding
956
+ [additional metadata in the YAML](https://data.catering/setup/generator/data-generator/)
957
+ to suit your needs.
958
+
943
959
  #### Example Configuration
944
960
 
945
961
  ```yaml
@@ -18,7 +18,7 @@ datacontract/engines/soda/connections/bigquery.py,sha256=C-8kxmzpYe88bJp80ObHFLM
18
18
  datacontract/engines/soda/connections/dask.py,sha256=Yy6Et2n_vDVsdjtqyBWDSZt7mnjPzPk_MZ-92VZHfnY,1496
19
19
  datacontract/engines/soda/connections/databricks.py,sha256=lpMju-o_TzLZeF0EEVwePPr8JahqvFnj5xRYjF15fc8,561
20
20
  datacontract/engines/soda/connections/duckdb.py,sha256=OVaxDO9HwfD4M6FYDIDS4zwnUJ20uAM1jz_Fy3EkClo,6271
21
- datacontract/engines/soda/connections/kafka.py,sha256=VPbH0GfJO_YuBGTyQ3xehjm-sVzSGn4y0bt-fZqPcok,7481
21
+ datacontract/engines/soda/connections/kafka.py,sha256=KBkE9ECxAe_sN3bmIAzZm9hDVrovk6P-op35bpWYoDQ,7555
22
22
  datacontract/engines/soda/connections/postgres.py,sha256=9GTF4Es3M5vb7ocSGqAxXmslvkS5CjsPQGIuo020CFc,626
23
23
  datacontract/engines/soda/connections/snowflake.py,sha256=rfG2ysuqNM6TkvyqQKcGHFsTGJ6AROmud5VleUDRrb0,749
24
24
  datacontract/engines/soda/connections/sqlserver.py,sha256=RzGLbCUdRyfmDcqtM_AB9WZ-Xk-XYX91nkXpVNpYbvc,1440
@@ -26,24 +26,25 @@ datacontract/engines/soda/connections/trino.py,sha256=JvKUP9aFg_n095oWE0-bGmfbET
26
26
  datacontract/export/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
27
27
  datacontract/export/avro_converter.py,sha256=n14GtP88c5lpcxyxF5W86k7xYGiwc6j0PIedElbeWjM,4171
28
28
  datacontract/export/avro_idl_converter.py,sha256=-zZxV0OAC76tMkAA9hXbEtp2wtpildKJb0OIbPhgoJE,9794
29
- datacontract/export/bigquery_converter.py,sha256=chAxgbYybyLG5v6SwqIweK9PePvE-36FStR7fyA_F_4,4577
29
+ datacontract/export/bigquery_converter.py,sha256=gVo6PMoUalc3UuuMtw4CGgvm43ineHNtdu-F1OHKOwA,4639
30
30
  datacontract/export/csv_type_converter.py,sha256=ZZuJwBgQnafZC7PPvAXsBf2IajPJq8TYZ1l8Qq0GYeI,1290
31
+ datacontract/export/data_caterer_converter.py,sha256=FaY6GkuR1SPjLbz6FgraEItOVVrYpv0Q5uyHI5fF44Y,5324
31
32
  datacontract/export/dbml_converter.py,sha256=Lmg0so_YP652UvDiNYyhAzzxtYvaHQBWFMUHzWadSDA,4739
32
33
  datacontract/export/dbt_converter.py,sha256=QfapVeunwab7bWn_B9-J_1U8fm1wdsPBW_Ws2b99o1E,9390
33
- datacontract/export/exporter.py,sha256=BKUAUT6vifrBn0ktIZ7V0HHAMWhmQMohM2v-TBoHOlA,2875
34
- datacontract/export/exporter_factory.py,sha256=cKyoHolVw3HrUtaqMKyh5NppvlNd_v54xJFC5bOD0gI,4931
34
+ datacontract/export/exporter.py,sha256=yicZSFRttjFp7kVrGxijD_2QYEbArEhQUsWrlxqP7k8,2909
35
+ datacontract/export/exporter_factory.py,sha256=og8c6ljKtkaZvHitZq66tdUvz5mpYJ9b4kFWZccXOgY,5111
35
36
  datacontract/export/go_converter.py,sha256=oWkEjOXBIGN8emTaxeJD8xpB3VYP4sVaNzKX-Oeqgw0,3330
36
37
  datacontract/export/great_expectations_converter.py,sha256=LIuWHn3h7xRrDDs1YoibHOI8NVF2Izh5IZOQFLeWN7k,5302
37
38
  datacontract/export/html_export.py,sha256=i7jckL80osA8incF0YoJdq3NaYmV1_3OU0cZBfPdNDs,2578
38
- datacontract/export/jsonschema_converter.py,sha256=Zqx4K7RZWfE9n_F4CMext82jd2Gr6EoqOnBXChRJgfM,4948
39
+ datacontract/export/jsonschema_converter.py,sha256=O3pCtqiQ41m0M-ohmmq3eMgIioD3DRhnWGAsEsIKbKQ,5557
39
40
  datacontract/export/odcs_converter.py,sha256=WngHoTF7R9uMDLCedBso7m2s0yDJQS5HTACtH3AhG-E,4673
40
41
  datacontract/export/protobuf_converter.py,sha256=Baqq2uvRg0AHFo6gR-hEYdLyiYRWziC1Q4AujlHgCzo,3192
41
42
  datacontract/export/pydantic_converter.py,sha256=1Lt9F8i6zyQYb44MyQtsXwCWWXYxZ47SmzArr_uPqsU,5579
42
43
  datacontract/export/rdf_converter.py,sha256=nNYVKJ5bQEq-rRz3yHwpvbsNbvjoOMwkCfkev8Y11H0,6436
43
44
  datacontract/export/sodacl_converter.py,sha256=Uv3BOsq20YKCHmSU6mN-6L2hPPF4V9fPxDoJ42okfHs,7280
44
- datacontract/export/spark_converter.py,sha256=wyJLxsyE6lQx_JyMsPtkN67ogggNUef0TE4kcIicZaA,6990
45
+ datacontract/export/spark_converter.py,sha256=lynG9dA2wUOpiqqRI-E3a56qxybG4PPZ1kETjL8jywo,7034
45
46
  datacontract/export/sql_converter.py,sha256=grDTj3MGhVc5BbJbZ__LFawJtrsnv4ApmOMX4ohPnCQ,4802
46
- datacontract/export/sql_type_converter.py,sha256=hxIpkCZ3Isi_8Hj06D6PP0r92091J3lSy1QzSQdfeLw,10854
47
+ datacontract/export/sql_type_converter.py,sha256=jw0ljv5nsGaMBGxWrGa5xCAZsm0bbIy94pRF_tbnDwM,10561
47
48
  datacontract/export/sqlalchemy_converter.py,sha256=x2_MVvDMfqgf_c5yfWimSC0um718EYYUpFiCe1-HN4g,6469
48
49
  datacontract/export/terraform_converter.py,sha256=IFwn-6BWPVkfwDQbj_Gl0GwbJdMKiwKgXCXz5A64cEs,2163
49
50
  datacontract/imports/avro_importer.py,sha256=HkzaMCa9A20D3Z0O_7I67TRcqyNV9XGYX_0JAF51qF8,9937
@@ -55,17 +56,17 @@ datacontract/imports/importer.py,sha256=FE9YsrcNunLiEpNGXd6PAHb_Zs8_dVUQPPBqOVAM
55
56
  datacontract/imports/importer_factory.py,sha256=0D8imN7WvQyvASkYHN8JTvNO9VLBllwHlyTeZNUD9Js,3155
56
57
  datacontract/imports/jsonschema_importer.py,sha256=cY70b2bFx9Kfsnq5vNj0iy_GvNOIcBH8xeFjrsxv4bQ,4925
57
58
  datacontract/imports/odcs_importer.py,sha256=uDBCzWi8mKCslw6Og5t-037dVVVZVsbJjK3EHJH_zQI,7474
58
- datacontract/imports/spark_importer.py,sha256=fEOtUPVBRkhvaxsdk6fgRd_j-ka1TDwdrh6KkTLvv30,4942
59
+ datacontract/imports/spark_importer.py,sha256=ZKjfZ-lqTRO_bU-QSvAeaXf8PWuR7FJZocUOWt8g0Qg,5099
59
60
  datacontract/imports/sql_importer.py,sha256=CiOzSthTwyjCHxYQNVO7j4udchEFER8gtXrr_IyX2dU,2890
60
- datacontract/imports/unity_importer.py,sha256=5qoU-PRq8ABXxiqVll--L2_6V0iAVQgkooWcvRuchLU,5604
61
+ datacontract/imports/unity_importer.py,sha256=a13eBZBYwIzaaeKE4wDWVEPGq5aCM4iZxmBxxpZe3MU,6737
61
62
  datacontract/init/download_datacontract_file.py,sha256=pj_4mhWKlEtfueWohDgkb1nyuG5ERDipUDszxKwpZUs,413
62
63
  datacontract/integration/datamesh_manager.py,sha256=iNjDrcZfH8vyNbyQPbxkv7XMfG5rtbcLZXIlfMJ0c8c,2752
63
64
  datacontract/integration/opentelemetry.py,sha256=0XsqOsc06qNtXC5KqZCxyeC_IGlIi63lYWHh3KKlOO0,3828
64
65
  datacontract/lint/files.py,sha256=tg0vq_w4LQsEr_8A5qr4hUJmHeGalUpsXJXC1t-OGC0,471
65
66
  datacontract/lint/lint.py,sha256=GhgBwfDgihXfWIu1WUfYeJoxfYacVJrwq3L2KO4Z2Io,5076
66
- datacontract/lint/resolve.py,sha256=KOGKrf5wZYgsa5-Uc0x5-C5GX1aVt6TSftIwD8zxy_Y,8815
67
+ datacontract/lint/resolve.py,sha256=6t5eqrtn3nwIA0Hupfa2NaDP92WFxiR5Qp9VGmp54R8,9159
67
68
  datacontract/lint/schema.py,sha256=php99jF3puwCWrE02JD1ownC5UFEKCZsLH89UGOxMww,1469
68
- datacontract/lint/urls.py,sha256=NHTbwadnm1VCxsiF7clfDvsxJ8-D3Mi5AzDj029E6wQ,1325
69
+ datacontract/lint/urls.py,sha256=f8ZbAyPkqPLvxkf7bC9kFoFN9qbn687NI0FhKsdhkX4,2120
69
70
  datacontract/lint/linters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
70
71
  datacontract/lint/linters/description_linter.py,sha256=Cm99NTWo75kDRIW63qz5uEa8xsug_8dFURPRC1twodw,1554
71
72
  datacontract/lint/linters/example_model_linter.py,sha256=EljSvzI4RSswdZrGa840bX6Zjat2GI16o5yDmKJfDOg,3972
@@ -88,9 +89,9 @@ datacontract/templates/partials/example.html,sha256=F1dWbHDIXQScgfs4OVgqM1lR4uV4
88
89
  datacontract/templates/partials/model_field.html,sha256=kh_ZIqJuayyxN-zDNIUPIoXOZeehGxXQxiImYB6G5qY,6946
89
90
  datacontract/templates/partials/server.html,sha256=WkWFbz1ZvhIAUQQhH5Lkwb0HZRW907ehEnFmJSkpquQ,6235
90
91
  datacontract/templates/style/output.css,sha256=F3oEhUpuv8kA_dWr4pJymBS_Ju6huIIZdLMkJzPzMmU,25647
91
- datacontract_cli-0.10.12.dist-info/LICENSE,sha256=23h64qnSeIZ0DKeziWAKC-zBCt328iSbRbWBrXoYRb4,2210
92
- datacontract_cli-0.10.12.dist-info/METADATA,sha256=Kg7oXLjfYrIU1uNuhJMh5VSlwOBYX9NukWQpznbcQow,88281
93
- datacontract_cli-0.10.12.dist-info/WHEEL,sha256=cVxcB9AmuTcXqmwrtPhNK88dr7IR_b6qagTj0UvIEbY,91
94
- datacontract_cli-0.10.12.dist-info/entry_points.txt,sha256=D3Eqy4q_Z6bHauGd4ppIyQglwbrm1AJnLau4Ppbw9Is,54
95
- datacontract_cli-0.10.12.dist-info/top_level.txt,sha256=VIRjd8EIUrBYWjEXJJjtdUgc0UAJdPZjmLiOR8BRBYM,13
96
- datacontract_cli-0.10.12.dist-info/RECORD,,
92
+ datacontract_cli-0.10.13.dist-info/LICENSE,sha256=23h64qnSeIZ0DKeziWAKC-zBCt328iSbRbWBrXoYRb4,2210
93
+ datacontract_cli-0.10.13.dist-info/METADATA,sha256=Y1xIanEO-C--EMNUf7sQuZjKtFXS3Up2cxJpz0EB6HE,89019
94
+ datacontract_cli-0.10.13.dist-info/WHEEL,sha256=GV9aMThwP_4oNCtvEC2ec3qUYutgWeAzklro_0m4WJQ,91
95
+ datacontract_cli-0.10.13.dist-info/entry_points.txt,sha256=D3Eqy4q_Z6bHauGd4ppIyQglwbrm1AJnLau4Ppbw9Is,54
96
+ datacontract_cli-0.10.13.dist-info/top_level.txt,sha256=VIRjd8EIUrBYWjEXJJjtdUgc0UAJdPZjmLiOR8BRBYM,13
97
+ datacontract_cli-0.10.13.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (74.1.2)
2
+ Generator: setuptools (75.1.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5