datacontract-cli 0.10.13__py3-none-any.whl → 0.10.15__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datacontract-cli might be problematic. Click here for more details.

Files changed (77) hide show
  1. datacontract/breaking/breaking.py +227 -9
  2. datacontract/breaking/breaking_rules.py +24 -0
  3. datacontract/catalog/catalog.py +1 -1
  4. datacontract/cli.py +104 -32
  5. datacontract/data_contract.py +35 -5
  6. datacontract/engines/datacontract/check_that_datacontract_file_exists.py +1 -1
  7. datacontract/engines/fastjsonschema/check_jsonschema.py +114 -22
  8. datacontract/engines/soda/check_soda_execute.py +5 -3
  9. datacontract/engines/soda/connections/duckdb.py +1 -0
  10. datacontract/engines/soda/connections/kafka.py +38 -17
  11. datacontract/export/avro_converter.py +8 -1
  12. datacontract/export/avro_idl_converter.py +2 -2
  13. datacontract/export/bigquery_converter.py +4 -3
  14. datacontract/export/data_caterer_converter.py +1 -1
  15. datacontract/export/dbml_converter.py +2 -4
  16. datacontract/export/dbt_converter.py +2 -3
  17. datacontract/export/dcs_exporter.py +6 -0
  18. datacontract/export/exporter.py +5 -2
  19. datacontract/export/exporter_factory.py +16 -3
  20. datacontract/export/go_converter.py +3 -2
  21. datacontract/export/great_expectations_converter.py +202 -40
  22. datacontract/export/html_export.py +1 -1
  23. datacontract/export/jsonschema_converter.py +3 -2
  24. datacontract/export/{odcs_converter.py → odcs_v2_exporter.py} +5 -5
  25. datacontract/export/odcs_v3_exporter.py +294 -0
  26. datacontract/export/pandas_type_converter.py +40 -0
  27. datacontract/export/protobuf_converter.py +1 -1
  28. datacontract/export/rdf_converter.py +4 -5
  29. datacontract/export/sodacl_converter.py +86 -2
  30. datacontract/export/spark_converter.py +10 -7
  31. datacontract/export/sql_converter.py +1 -2
  32. datacontract/export/sql_type_converter.py +55 -11
  33. datacontract/export/sqlalchemy_converter.py +1 -2
  34. datacontract/export/terraform_converter.py +1 -1
  35. datacontract/imports/avro_importer.py +1 -1
  36. datacontract/imports/bigquery_importer.py +1 -1
  37. datacontract/imports/dbml_importer.py +2 -2
  38. datacontract/imports/dbt_importer.py +3 -2
  39. datacontract/imports/glue_importer.py +5 -3
  40. datacontract/imports/iceberg_importer.py +161 -0
  41. datacontract/imports/importer.py +2 -0
  42. datacontract/imports/importer_factory.py +12 -1
  43. datacontract/imports/jsonschema_importer.py +3 -2
  44. datacontract/imports/odcs_importer.py +25 -168
  45. datacontract/imports/odcs_v2_importer.py +177 -0
  46. datacontract/imports/odcs_v3_importer.py +309 -0
  47. datacontract/imports/parquet_importer.py +81 -0
  48. datacontract/imports/spark_importer.py +2 -1
  49. datacontract/imports/sql_importer.py +1 -1
  50. datacontract/imports/unity_importer.py +3 -3
  51. datacontract/integration/datamesh_manager.py +1 -1
  52. datacontract/integration/opentelemetry.py +0 -1
  53. datacontract/lint/lint.py +2 -1
  54. datacontract/lint/linters/description_linter.py +1 -0
  55. datacontract/lint/linters/example_model_linter.py +1 -0
  56. datacontract/lint/linters/field_pattern_linter.py +1 -0
  57. datacontract/lint/linters/field_reference_linter.py +1 -0
  58. datacontract/lint/linters/notice_period_linter.py +1 -0
  59. datacontract/lint/linters/quality_schema_linter.py +1 -0
  60. datacontract/lint/linters/valid_constraints_linter.py +1 -0
  61. datacontract/lint/resolve.py +14 -9
  62. datacontract/lint/resources.py +21 -0
  63. datacontract/lint/schema.py +1 -1
  64. datacontract/lint/urls.py +4 -2
  65. datacontract/model/data_contract_specification.py +83 -13
  66. datacontract/model/odcs.py +11 -0
  67. datacontract/model/run.py +21 -12
  68. datacontract/templates/index.html +6 -6
  69. datacontract/web.py +2 -3
  70. {datacontract_cli-0.10.13.dist-info → datacontract_cli-0.10.15.dist-info}/METADATA +176 -93
  71. datacontract_cli-0.10.15.dist-info/RECORD +105 -0
  72. {datacontract_cli-0.10.13.dist-info → datacontract_cli-0.10.15.dist-info}/WHEEL +1 -1
  73. datacontract/engines/datacontract/check_that_datacontract_str_is_valid.py +0 -48
  74. datacontract_cli-0.10.13.dist-info/RECORD +0 -97
  75. {datacontract_cli-0.10.13.dist-info → datacontract_cli-0.10.15.dist-info}/LICENSE +0 -0
  76. {datacontract_cli-0.10.13.dist-info → datacontract_cli-0.10.15.dist-info}/entry_points.txt +0 -0
  77. {datacontract_cli-0.10.13.dist-info → datacontract_cli-0.10.15.dist-info}/top_level.txt +0 -0
@@ -8,7 +8,12 @@ import yaml
8
8
  if typing.TYPE_CHECKING:
9
9
  from pyspark.sql import SparkSession
10
10
 
11
- from datacontract.breaking.breaking import models_breaking_changes, quality_breaking_changes
11
+ from datacontract.breaking.breaking import (
12
+ info_breaking_changes,
13
+ models_breaking_changes,
14
+ quality_breaking_changes,
15
+ terms_breaking_changes,
16
+ )
12
17
  from datacontract.engines.datacontract.check_that_datacontract_contains_valid_servers_configuration import (
13
18
  check_that_datacontract_contains_valid_server_configuration,
14
19
  )
@@ -17,7 +22,6 @@ from datacontract.engines.soda.check_soda_execute import check_soda_execute
17
22
  from datacontract.export.exporter import ExportFormat
18
23
  from datacontract.export.exporter_factory import exporter_factory
19
24
  from datacontract.imports.importer_factory import importer_factory
20
-
21
25
  from datacontract.integration.datamesh_manager import publish_test_results_to_datamesh_manager
22
26
  from datacontract.integration.opentelemetry import publish_test_results_to_opentelemetry
23
27
  from datacontract.lint import resolve
@@ -28,10 +32,10 @@ from datacontract.lint.linters.field_reference_linter import FieldReferenceLinte
28
32
  from datacontract.lint.linters.notice_period_linter import NoticePeriodLinter
29
33
  from datacontract.lint.linters.quality_schema_linter import QualityUsesSchemaLinter
30
34
  from datacontract.lint.linters.valid_constraints_linter import ValidFieldConstraintsLinter
31
- from datacontract.model.breaking_change import BreakingChanges, BreakingChange, Severity
35
+ from datacontract.model.breaking_change import BreakingChange, BreakingChanges, Severity
32
36
  from datacontract.model.data_contract_specification import DataContractSpecification, Server
33
37
  from datacontract.model.exceptions import DataContractException
34
- from datacontract.model.run import Run, Check
38
+ from datacontract.model.run import Check, Run
35
39
 
36
40
 
37
41
  class DataContract:
@@ -199,7 +203,15 @@ class DataContract:
199
203
 
200
204
  except DataContractException as e:
201
205
  run.checks.append(
202
- Check(type=e.type, result=e.result, name=e.name, reason=e.reason, engine=e.engine, details="")
206
+ Check(
207
+ type=e.type,
208
+ name=e.name,
209
+ result=e.result,
210
+ reason=e.reason,
211
+ model=e.model,
212
+ engine=e.engine,
213
+ details="",
214
+ )
203
215
  )
204
216
  run.log_error(str(e))
205
217
  except Exception as e:
@@ -268,6 +280,24 @@ class DataContract:
268
280
 
269
281
  breaking_changes = list[BreakingChange]()
270
282
 
283
+ breaking_changes.extend(
284
+ info_breaking_changes(
285
+ old_info=old.info,
286
+ new_info=new.info,
287
+ new_path=other._data_contract_file,
288
+ include_severities=include_severities,
289
+ )
290
+ )
291
+
292
+ breaking_changes.extend(
293
+ terms_breaking_changes(
294
+ old_terms=old.terms,
295
+ new_terms=new.terms,
296
+ new_path=other._data_contract_file,
297
+ include_severities=include_severities,
298
+ )
299
+ )
300
+
271
301
  breaking_changes.extend(
272
302
  quality_breaking_changes(
273
303
  old_quality=old.quality,
@@ -1,6 +1,6 @@
1
1
  import os
2
2
 
3
- from datacontract.model.run import Run, Check
3
+ from datacontract.model.run import Check, Run
4
4
 
5
5
 
6
6
  def check_that_datacontract_file_exists(run: Run, file_path: str):
@@ -1,31 +1,114 @@
1
1
  import json
2
2
  import logging
3
3
  import os
4
+ import threading
5
+ from typing import List, Optional
4
6
 
5
7
  import fastjsonschema
8
+ from fastjsonschema import JsonSchemaValueException
6
9
 
7
10
  from datacontract.engines.fastjsonschema.s3.s3_read_files import yield_s3_files
8
11
  from datacontract.export.jsonschema_converter import to_jsonschema
9
12
  from datacontract.model.data_contract_specification import DataContractSpecification, Server
10
13
  from datacontract.model.exceptions import DataContractException
11
- from datacontract.model.run import Run, Check
14
+ from datacontract.model.run import Check, Run
12
15
 
16
+ # Thread-safe cache for primaryKey fields.
17
+ _primary_key_cache = {}
18
+ _cache_lock = threading.Lock()
13
19
 
14
- def validate_json_stream(model_name, validate, json_stream):
20
+
21
+ def get_primary_key_field(schema: dict, model_name: str) -> Optional[str]:
22
+ # Check cache first.
23
+ with _cache_lock:
24
+ cached_value = _primary_key_cache.get(model_name)
25
+ if cached_value is not None:
26
+ return cached_value
27
+
28
+ # Find primaryKey field.
29
+ fields = schema.get("properties", {})
30
+ for field_name, attributes in fields.items():
31
+ if attributes.get("primaryKey", False):
32
+ # Cache the result before returning.
33
+ with _cache_lock:
34
+ _primary_key_cache[model_name] = field_name
35
+ return field_name
36
+
37
+ # Return None if no primary key was found.
38
+ return None
39
+
40
+
41
+ def get_primary_key_value(schema: dict, model_name: str, json_object: dict) -> Optional[str]:
42
+ # Get the `primaryKey` field.
43
+ primary_key_field = get_primary_key_field(schema, model_name)
44
+ if not primary_key_field:
45
+ return None
46
+
47
+ # Return the value of the `primaryKey` field in the JSON object.
48
+ return json_object.get(primary_key_field)
49
+
50
+
51
+ def process_exceptions(run, exceptions: List[DataContractException]):
52
+ if not exceptions:
53
+ return
54
+
55
+ # Define the maximum number of errors to process (can be adjusted by defining an ENV variable).
15
56
  try:
16
- logging.info("Validating JSON")
17
- for json_obj in json_stream:
57
+ error_limit = int(os.getenv("DATACONTRACT_MAX_ERRORS", 500))
58
+ except ValueError:
59
+ # Fallback to default if environment variable is invalid.
60
+ error_limit = 500
61
+
62
+ # Calculate the effective limit to avoid index out of range
63
+ limit = min(len(exceptions), error_limit)
64
+
65
+ # Add all exceptions up to the limit - 1 to `run.checks`.
66
+ DEFAULT_ERROR_MESSAGE = "An error occurred during validation phase. See the logs for more details."
67
+ run.checks.extend(
68
+ [
69
+ Check(
70
+ type=exception.type,
71
+ name=exception.name,
72
+ result=exception.result,
73
+ reason=exception.reason,
74
+ model=exception.model,
75
+ engine=exception.engine,
76
+ message=exception.message or DEFAULT_ERROR_MESSAGE,
77
+ )
78
+ for exception in exceptions[: limit - 1]
79
+ ]
80
+ )
81
+
82
+ # Raise the last exception within the limit.
83
+ last_exception = exceptions[limit - 1]
84
+ raise last_exception
85
+
86
+
87
+ def validate_json_stream(
88
+ schema: dict, model_name: str, validate: callable, json_stream: list[dict]
89
+ ) -> List[DataContractException]:
90
+ logging.info(f"Validating JSON stream for model: '{model_name}'.")
91
+ exceptions: List[DataContractException] = []
92
+ for json_obj in json_stream:
93
+ try:
18
94
  validate(json_obj)
19
- return True
20
- except fastjsonschema.JsonSchemaValueException as e:
21
- raise DataContractException(
22
- type="schema",
23
- name="Check that JSON has valid schema",
24
- model=model_name,
25
- reason=e.message,
26
- engine="jsonschema",
27
- original_exception=e,
28
- )
95
+ except JsonSchemaValueException as e:
96
+ logging.warning(f"Validation failed for JSON object with type: '{model_name}'.")
97
+ primary_key_value = get_primary_key_value(schema, model_name, json_obj)
98
+ exceptions.append(
99
+ DataContractException(
100
+ type="schema",
101
+ name="Check that JSON has valid schema",
102
+ result="failed",
103
+ reason=f"{f'#{primary_key_value}: ' if primary_key_value is not None else ''}{e.message}",
104
+ model=model_name,
105
+ engine="jsonschema",
106
+ message=e.message,
107
+ )
108
+ )
109
+ if not exceptions:
110
+ logging.info(f"All JSON objects in the stream passed validation for model: '{model_name}'.")
111
+ return exceptions
29
112
 
30
113
 
31
114
  def read_json_lines(file):
@@ -59,17 +142,22 @@ def read_json_file_content(file_content: str):
59
142
  yield json.loads(file_content)
60
143
 
61
144
 
62
- def process_json_file(run, model_name, validate, file, delimiter):
145
+ def process_json_file(run, schema, model_name, validate, file, delimiter):
63
146
  if delimiter == "new_line":
64
147
  json_stream = read_json_lines(file)
65
148
  elif delimiter == "array":
66
149
  json_stream = read_json_array(file)
67
150
  else:
68
151
  json_stream = read_json_file(file)
69
- validate_json_stream(model_name, validate, json_stream)
70
152
 
153
+ # Validate the JSON stream and collect exceptions.
154
+ exceptions = validate_json_stream(schema, model_name, validate, json_stream)
71
155
 
72
- def process_local_file(run, server, model_name, validate):
156
+ # Handle all errors from schema validation.
157
+ process_exceptions(run, exceptions)
158
+
159
+
160
+ def process_local_file(run, server, schema, model_name, validate):
73
161
  path = server.path
74
162
  if "{model}" in path:
75
163
  path = path.format(model=model_name)
@@ -79,7 +167,7 @@ def process_local_file(run, server, model_name, validate):
79
167
  else:
80
168
  logging.info(f"Processing file {path}")
81
169
  with open(path, "r") as file:
82
- process_json_file(run, model_name, validate, file, server.delimiter)
170
+ process_json_file(run, schema, model_name, validate, file, server.delimiter)
83
171
 
84
172
 
85
173
  def process_directory(run, path, server, model_name, validate):
@@ -94,7 +182,7 @@ def process_directory(run, path, server, model_name, validate):
94
182
  return success
95
183
 
96
184
 
97
- def process_s3_file(server, model_name, validate):
185
+ def process_s3_file(run, server, schema, model_name, validate):
98
186
  s3_endpoint_url = server.endpointUrl
99
187
  s3_location = server.location
100
188
  if "{model}" in s3_location:
@@ -118,7 +206,11 @@ def process_s3_file(server, model_name, validate):
118
206
  engine="datacontract",
119
207
  )
120
208
 
121
- return validate_json_stream(model_name, validate, json_stream)
209
+ # Validate the JSON stream and collect exceptions.
210
+ exceptions = validate_json_stream(schema, model_name, validate, json_stream)
211
+
212
+ # Handle all errors from schema validation.
213
+ process_exceptions(run, exceptions)
122
214
 
123
215
 
124
216
  def check_jsonschema(run: Run, data_contract: DataContractSpecification, server: Server):
@@ -155,9 +247,9 @@ def check_jsonschema(run: Run, data_contract: DataContractSpecification, server:
155
247
 
156
248
  # Process files based on server type
157
249
  if server.type == "local":
158
- process_local_file(run, server, model_name, validate)
250
+ process_local_file(run, server, schema, model_name, validate)
159
251
  elif server.type == "s3":
160
- process_s3_file(server, model_name, validate)
252
+ process_s3_file(run, server, schema, model_name, validate)
161
253
  elif server.type == "gcs":
162
254
  run.checks.append(
163
255
  Check(
@@ -12,7 +12,7 @@ from datacontract.engines.soda.connections.sqlserver import to_sqlserver_soda_co
12
12
  from datacontract.engines.soda.connections.trino import to_trino_soda_configuration
13
13
  from datacontract.export.sodacl_converter import to_sodacl_yaml
14
14
  from datacontract.model.data_contract_specification import DataContractSpecification, Server
15
- from datacontract.model.run import Run, Check, Log
15
+ from datacontract.model.run import Check, Log, Run
16
16
 
17
17
 
18
18
  def check_soda_execute(run: Run, data_contract: DataContractSpecification, server: Server, spark, tmp_dir):
@@ -176,9 +176,11 @@ def update_reason(check, c):
176
176
  if block["title"] == "Diagnostics":
177
177
  # Extract and print the 'text' value
178
178
  diagnostics_text = block["text"]
179
- print(diagnostics_text)
179
+ # print(diagnostics_text)
180
180
  diagnostics_text_split = diagnostics_text.split(":icon-fail: ")
181
181
  if len(diagnostics_text_split) > 1:
182
182
  check.reason = diagnostics_text_split[1].strip()
183
- print(check.reason)
183
+ # print(check.reason)
184
184
  break # Exit the loop once the desired block is found
185
+ if c["diagnostics"]["fail"] is not None:
186
+ check.reason = f"Got: {c['diagnostics']['value']} Expected: {c['diagnostics']['fail']}"
@@ -1,6 +1,7 @@
1
1
  import os
2
2
 
3
3
  import duckdb
4
+
4
5
  from datacontract.export.csv_type_converter import convert_to_duckdb_csv_type
5
6
  from datacontract.model.run import Run
6
7
 
@@ -2,7 +2,7 @@ import logging
2
2
  import os
3
3
 
4
4
  from datacontract.export.avro_converter import to_avro_schema_json
5
- from datacontract.model.data_contract_specification import DataContractSpecification, Server, Field
5
+ from datacontract.model.data_contract_specification import DataContractSpecification, Field, Server
6
6
  from datacontract.model.exceptions import DataContractException
7
7
 
8
8
 
@@ -69,8 +69,8 @@ def read_kafka_topic(spark, data_contract: DataContractSpecification, server: Se
69
69
 
70
70
  def process_avro_format(df, model_name, model):
71
71
  try:
72
- from pyspark.sql.functions import col, expr
73
72
  from pyspark.sql.avro.functions import from_avro
73
+ from pyspark.sql.functions import col, expr
74
74
  except ImportError as e:
75
75
  raise DataContractException(
76
76
  type="schema",
@@ -112,17 +112,38 @@ def get_auth_options():
112
112
  """Retrieve Kafka authentication options from environment variables."""
113
113
  kafka_sasl_username = os.getenv("DATACONTRACT_KAFKA_SASL_USERNAME")
114
114
  kafka_sasl_password = os.getenv("DATACONTRACT_KAFKA_SASL_PASSWORD")
115
+ kafka_sasl_mechanism = os.getenv("DATACONTRACT_KAFKA_SASL_MECHANISM", "PLAIN").upper()
115
116
 
116
- if kafka_sasl_username is None or kafka_sasl_username == "":
117
+ # Skip authentication if credentials are not provided
118
+ if not kafka_sasl_username or not kafka_sasl_password:
117
119
  return {}
118
120
 
119
- return {
120
- "kafka.sasl.mechanism": "PLAIN",
121
- "kafka.security.protocol": "SASL_SSL",
122
- "kafka.sasl.jaas.config": (
121
+ # SASL mechanisms supported by Kafka
122
+ jaas_config = {
123
+ "PLAIN": (
123
124
  f"org.apache.kafka.common.security.plain.PlainLoginModule required "
124
125
  f'username="{kafka_sasl_username}" password="{kafka_sasl_password}";'
125
126
  ),
127
+ "SCRAM-SHA-256": (
128
+ f"org.apache.kafka.common.security.scram.ScramLoginModule required "
129
+ f'username="{kafka_sasl_username}" password="{kafka_sasl_password}";'
130
+ ),
131
+ "SCRAM-SHA-512": (
132
+ f"org.apache.kafka.common.security.scram.ScramLoginModule required "
133
+ f'username="{kafka_sasl_username}" password="{kafka_sasl_password}";'
134
+ ),
135
+ # Add more mechanisms as needed
136
+ }
137
+
138
+ # Validate SASL mechanism
139
+ if kafka_sasl_mechanism not in jaas_config:
140
+ raise ValueError(f"Unsupported SASL mechanism: {kafka_sasl_mechanism}")
141
+
142
+ # Return config
143
+ return {
144
+ "kafka.sasl.mechanism": kafka_sasl_mechanism,
145
+ "kafka.security.protocol": "SASL_SSL",
146
+ "kafka.sasl.jaas.config": jaas_config[kafka_sasl_mechanism],
126
147
  }
127
148
 
128
149
 
@@ -146,21 +167,21 @@ def to_struct_type(fields):
146
167
  def to_struct_field(field_name: str, field: Field):
147
168
  try:
148
169
  from pyspark.sql.types import (
149
- StructType,
150
- StructField,
151
- StringType,
170
+ ArrayType,
171
+ BinaryType,
172
+ BooleanType,
173
+ DataType,
174
+ DateType,
152
175
  DecimalType,
153
176
  DoubleType,
154
177
  IntegerType,
155
178
  LongType,
156
- BooleanType,
157
- TimestampType,
158
- TimestampNTZType,
159
- DateType,
160
- BinaryType,
161
- ArrayType,
162
179
  NullType,
163
- DataType,
180
+ StringType,
181
+ StructField,
182
+ StructType,
183
+ TimestampNTZType,
184
+ TimestampType,
164
185
  )
165
186
  except ImportError as e:
166
187
  raise DataContractException(
@@ -81,9 +81,16 @@ def to_avro_type(field: Field, field_name: str) -> str | dict:
81
81
  return "null"
82
82
  if field.type in ["string", "varchar", "text"]:
83
83
  return "string"
84
- elif field.type in ["number", "decimal", "numeric"]:
84
+ elif field.type in ["number", "numeric"]:
85
85
  # https://avro.apache.org/docs/1.11.1/specification/#decimal
86
86
  return "bytes"
87
+ elif field.type in ["decimal"]:
88
+ typeVal = {"type": "bytes", "logicalType": "decimal"}
89
+ if field.scale is not None:
90
+ typeVal["scale"] = field.scale
91
+ if field.precision is not None:
92
+ typeVal["precision"] = field.precision
93
+ return typeVal
87
94
  elif field.type in ["float", "double"]:
88
95
  return "double"
89
96
  elif field.type in ["integer", "int"]:
@@ -3,12 +3,11 @@ from dataclasses import dataclass
3
3
  from enum import Enum
4
4
  from io import StringIO
5
5
 
6
+ from datacontract.export.exporter import Exporter
6
7
  from datacontract.lint.resolve import inline_definitions_into_data_contract
7
8
  from datacontract.model.data_contract_specification import DataContractSpecification, Field
8
9
  from datacontract.model.exceptions import DataContractException
9
10
 
10
- from datacontract.export.exporter import Exporter
11
-
12
11
 
13
12
  class AvroPrimitiveType(Enum):
14
13
  int = "int"
@@ -64,6 +63,7 @@ class AvroIDLProtocol:
64
63
  model_types: list[AvroModelType]
65
64
 
66
65
 
66
+ # TODO use DATACONTRACT_TYPES from datacontract/model/data_contract_specification.py
67
67
  avro_primitive_types = set(
68
68
  [
69
69
  "string",
@@ -2,10 +2,9 @@ import json
2
2
  import logging
3
3
  from typing import Dict, List
4
4
 
5
- from datacontract.model.data_contract_specification import Model, Field, Server
6
- from datacontract.model.exceptions import DataContractException
7
-
8
5
  from datacontract.export.exporter import Exporter, _check_models_for_export
6
+ from datacontract.model.data_contract_specification import Field, Model, Server
7
+ from datacontract.model.exceptions import DataContractException
9
8
 
10
9
 
11
10
  class BigQueryExporter(Exporter):
@@ -109,6 +108,8 @@ def map_type_to_bigquery(field: Field) -> str:
109
108
  return "NUMERIC"
110
109
  elif field_type.lower() == "double":
111
110
  return "BIGNUMERIC"
111
+ elif field_type.lower() in ["object", "record"] and not field.fields:
112
+ return "JSON"
112
113
  elif field_type.lower() in ["object", "record", "array"]:
113
114
  return "RECORD"
114
115
  elif field_type.lower() == "struct":
@@ -3,7 +3,7 @@ from typing import Dict
3
3
  import yaml
4
4
 
5
5
  from datacontract.export.exporter import Exporter
6
- from datacontract.model.data_contract_specification import DataContractSpecification, Model, Field, Server
6
+ from datacontract.model.data_contract_specification import DataContractSpecification, Field, Model, Server
7
7
 
8
8
 
9
9
  class DataCatererExporter(Exporter):
@@ -3,13 +3,11 @@ from importlib.metadata import version
3
3
  from typing import Tuple
4
4
 
5
5
  import pytz
6
- from datacontract.model.exceptions import DataContractException
7
6
 
8
7
  import datacontract.model.data_contract_specification as spec
9
- from datacontract.export.sql_type_converter import convert_to_sql_type
10
-
11
-
12
8
  from datacontract.export.exporter import Exporter
9
+ from datacontract.export.sql_type_converter import convert_to_sql_type
10
+ from datacontract.model.exceptions import DataContractException
13
11
 
14
12
 
15
13
  class DbmlExporter(Exporter):
@@ -2,10 +2,9 @@ from typing import Dict
2
2
 
3
3
  import yaml
4
4
 
5
- from datacontract.export.sql_type_converter import convert_to_sql_type
6
- from datacontract.model.data_contract_specification import DataContractSpecification, Model, Field
7
-
8
5
  from datacontract.export.exporter import Exporter, _check_models_for_export
6
+ from datacontract.export.sql_type_converter import convert_to_sql_type
7
+ from datacontract.model.data_contract_specification import DataContractSpecification, Field, Model
9
8
 
10
9
 
11
10
  class DbtExporter(Exporter):
@@ -0,0 +1,6 @@
1
+ from datacontract.export.exporter import Exporter
2
+
3
+
4
+ class DcsExporter(Exporter):
5
+ def export(self, data_contract, model, server, sql_server_type, export_args) -> dict:
6
+ return data_contract.to_yaml()
@@ -1,6 +1,6 @@
1
+ import typing
1
2
  from abc import ABC, abstractmethod
2
3
  from enum import Enum
3
- import typing
4
4
 
5
5
  from datacontract.model.data_contract_specification import DataContractSpecification
6
6
 
@@ -10,7 +10,7 @@ class Exporter(ABC):
10
10
  self.export_format = export_format
11
11
 
12
12
  @abstractmethod
13
- def export(self, data_contract, model, server, sql_server_type, export_args) -> dict:
13
+ def export(self, data_contract, model, server, sql_server_type, export_args) -> dict | str:
14
14
  pass
15
15
 
16
16
 
@@ -22,6 +22,8 @@ class ExportFormat(str, Enum):
22
22
  dbt_sources = "dbt-sources"
23
23
  dbt_staging_sql = "dbt-staging-sql"
24
24
  odcs = "odcs"
25
+ odcs_v2 = "odcs_v2"
26
+ odcs_v3 = "odcs_v3"
25
27
  rdf = "rdf"
26
28
  avro = "avro"
27
29
  protobuf = "protobuf"
@@ -37,6 +39,7 @@ class ExportFormat(str, Enum):
37
39
  spark = "spark"
38
40
  sqlalchemy = "sqlalchemy"
39
41
  data_caterer = "data-caterer"
42
+ dcs = "dcs"
40
43
 
41
44
  @classmethod
42
45
  def get_supported_formats(cls):
@@ -1,6 +1,7 @@
1
1
  import importlib
2
2
  import sys
3
- from datacontract.export.exporter import ExportFormat, Exporter
3
+
4
+ from datacontract.export.exporter import Exporter, ExportFormat
4
5
 
5
6
 
6
7
  class ExporterFactory:
@@ -99,7 +100,15 @@ exporter_factory.register_lazy_exporter(
99
100
  )
100
101
 
101
102
  exporter_factory.register_lazy_exporter(
102
- name=ExportFormat.odcs, module_path="datacontract.export.odcs_converter", class_name="OdcsExporter"
103
+ name=ExportFormat.odcs_v2, module_path="datacontract.export.odcs_v2_exporter", class_name="OdcsV2Exporter"
104
+ )
105
+
106
+ exporter_factory.register_lazy_exporter(
107
+ name=ExportFormat.odcs_v3, module_path="datacontract.export.odcs_v3_exporter", class_name="OdcsV3Exporter"
108
+ )
109
+
110
+ exporter_factory.register_lazy_exporter(
111
+ name=ExportFormat.odcs, module_path="datacontract.export.odcs_v3_exporter", class_name="OdcsV3Exporter"
103
112
  )
104
113
 
105
114
  exporter_factory.register_lazy_exporter(
@@ -109,7 +118,7 @@ exporter_factory.register_lazy_exporter(
109
118
  exporter_factory.register_lazy_exporter(
110
119
  name=ExportFormat.great_expectations,
111
120
  module_path="datacontract.export.great_expectations_converter",
112
- class_name="GreateExpectationsExporter",
121
+ class_name="GreatExpectationsExporter",
113
122
  )
114
123
 
115
124
  exporter_factory.register_lazy_exporter(
@@ -155,3 +164,7 @@ exporter_factory.register_lazy_exporter(
155
164
  module_path="datacontract.export.sqlalchemy_converter",
156
165
  class_name="SQLAlchemyExporter",
157
166
  )
167
+
168
+ exporter_factory.register_lazy_exporter(
169
+ name=ExportFormat.dcs, module_path="datacontract.export.dcs_exporter", class_name="DcsExporter"
170
+ )
@@ -1,6 +1,7 @@
1
- import datacontract.model.data_contract_specification as spec
2
- from typing import List
3
1
  import re
2
+ from typing import List
3
+
4
+ import datacontract.model.data_contract_specification as spec
4
5
  from datacontract.export.exporter import Exporter
5
6
 
6
7