datacontract-cli 0.10.4__py3-none-any.whl → 0.10.6__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datacontract-cli might be problematic. Click here for more details.

Files changed (29) hide show
  1. datacontract/breaking/breaking.py +3 -3
  2. datacontract/catalog/catalog.py +1 -2
  3. datacontract/cli.py +11 -5
  4. datacontract/data_contract.py +32 -16
  5. datacontract/engines/soda/check_soda_execute.py +5 -0
  6. datacontract/engines/soda/connections/duckdb.py +1 -2
  7. datacontract/engines/soda/connections/sqlserver.py +43 -0
  8. datacontract/export/avro_converter.py +8 -1
  9. datacontract/export/bigquery_converter.py +17 -16
  10. datacontract/export/dbml_converter.py +118 -0
  11. datacontract/export/html_export.py +2 -3
  12. datacontract/export/jsonschema_converter.py +4 -5
  13. datacontract/export/rdf_converter.py +1 -2
  14. datacontract/export/sql_type_converter.py +88 -8
  15. datacontract/imports/avro_importer.py +32 -18
  16. datacontract/imports/bigquery_importer.py +28 -16
  17. datacontract/imports/jsonschema_importer.py +80 -82
  18. datacontract/imports/sql_importer.py +2 -2
  19. datacontract/lint/resolve.py +1 -2
  20. datacontract/model/data_contract_specification.py +10 -0
  21. datacontract/py.typed +0 -0
  22. datacontract/templates/index.html +6 -6
  23. datacontract/templates/style/output.css +19 -14
  24. {datacontract_cli-0.10.4.dist-info → datacontract_cli-0.10.6.dist-info}/METADATA +169 -75
  25. {datacontract_cli-0.10.4.dist-info → datacontract_cli-0.10.6.dist-info}/RECORD +29 -26
  26. {datacontract_cli-0.10.4.dist-info → datacontract_cli-0.10.6.dist-info}/LICENSE +0 -0
  27. {datacontract_cli-0.10.4.dist-info → datacontract_cli-0.10.6.dist-info}/WHEEL +0 -0
  28. {datacontract_cli-0.10.4.dist-info → datacontract_cli-0.10.6.dist-info}/entry_points.txt +0 -0
  29. {datacontract_cli-0.10.4.dist-info → datacontract_cli-0.10.6.dist-info}/top_level.txt +0 -0
@@ -256,13 +256,13 @@ def field_breaking_changes(
256
256
  )
257
257
  )
258
258
  continue
259
-
260
- if field_definition_field == "items" and old_field.type == 'array' and new_field.type == 'array':
259
+
260
+ if field_definition_field == "items" and old_field.type == "array" and new_field.type == "array":
261
261
  results.extend(
262
262
  field_breaking_changes(
263
263
  old_field=old_value,
264
264
  new_field=new_value,
265
- composition=composition + ['items'],
265
+ composition=composition + ["items"],
266
266
  new_path=new_path,
267
267
  include_severities=include_severities,
268
268
  )
@@ -7,8 +7,7 @@ from jinja2 import PackageLoader, Environment, select_autoescape
7
7
 
8
8
  from datacontract.data_contract import DataContract
9
9
  from datacontract.export.html_export import get_version
10
- from datacontract.model.data_contract_specification import \
11
- DataContractSpecification
10
+ from datacontract.model.data_contract_specification import DataContractSpecification
12
11
 
13
12
 
14
13
  def create_data_contract_html(contracts, file: Path, path: Path):
datacontract/cli.py CHANGED
@@ -2,6 +2,7 @@ from enum import Enum
2
2
  from importlib import metadata
3
3
  from pathlib import Path
4
4
  from typing import Iterable, Optional
5
+ from typing import List
5
6
 
6
7
  import typer
7
8
  from click import Context
@@ -10,15 +11,12 @@ from rich.console import Console
10
11
  from rich.table import Table
11
12
  from typer.core import TyperGroup
12
13
  from typing_extensions import Annotated
13
- from typing import List
14
14
 
15
15
  from datacontract.catalog.catalog import create_index_html, create_data_contract_html
16
16
  from datacontract.data_contract import DataContract
17
17
  from datacontract.init.download_datacontract_file import download_datacontract_file, FileExistsException
18
-
19
18
  from datacontract.publish.publish import publish_to_datamesh_manager
20
19
 
21
-
22
20
  console = Console()
23
21
 
24
22
 
@@ -162,6 +160,7 @@ class ExportFormat(str, Enum):
162
160
  html = "html"
163
161
  go = "go"
164
162
  bigquery = "bigquery"
163
+ dbml = "dbml"
165
164
 
166
165
 
167
166
  @app.command()
@@ -229,10 +228,17 @@ class ImportFormat(str, Enum):
229
228
  @app.command(name="import")
230
229
  def import_(
231
230
  format: Annotated[ImportFormat, typer.Option(help="The format of the source file.")],
232
- source: Annotated[Optional[str], typer.Option(help="The path to the file or Glue Database that should be imported.")] = None,
231
+ source: Annotated[
232
+ Optional[str], typer.Option(help="The path to the file or Glue Database that should be imported.")
233
+ ] = None,
233
234
  bigquery_project: Annotated[Optional[str], typer.Option(help="The bigquery project id.")] = None,
234
235
  bigquery_dataset: Annotated[Optional[str], typer.Option(help="The bigquery dataset id.")] = None,
235
- bigquery_table: Annotated[Optional[List[str]], typer.Option(help="List of table ids to import from the bigquery API (repeat for multiple table ids, leave empty for all tables in the dataset).")] = None,
236
+ bigquery_table: Annotated[
237
+ Optional[List[str]],
238
+ typer.Option(
239
+ help="List of table ids to import from the bigquery API (repeat for multiple table ids, leave empty for all tables in the dataset)."
240
+ ),
241
+ ] = None,
236
242
  ):
237
243
  """
238
244
  Create a data contract from the given source location. Prints to stdout.
@@ -15,16 +15,15 @@ from datacontract.engines.soda.check_soda_execute import check_soda_execute
15
15
  from datacontract.export.avro_converter import to_avro_schema_json
16
16
  from datacontract.export.avro_idl_converter import to_avro_idl
17
17
  from datacontract.export.bigquery_converter import to_bigquery_json
18
- from datacontract.export.dbt_converter import to_dbt_models_yaml, \
19
- to_dbt_sources_yaml, to_dbt_staging_sql
20
- from datacontract.export.great_expectations_converter import \
21
- to_great_expectations
18
+ from datacontract.export.dbml_converter import to_dbml_diagram
19
+ from datacontract.export.dbt_converter import to_dbt_models_yaml, to_dbt_sources_yaml, to_dbt_staging_sql
20
+ from datacontract.export.go_converter import to_go_types
21
+ from datacontract.export.great_expectations_converter import to_great_expectations
22
22
  from datacontract.export.html_export import to_html
23
23
  from datacontract.export.jsonschema_converter import to_jsonschema_json
24
24
  from datacontract.export.odcs_converter import to_odcs_yaml
25
25
  from datacontract.export.protobuf_converter import to_protobuf
26
26
  from datacontract.export.pydantic_converter import to_pydantic_model_str
27
- from datacontract.export.go_converter import to_go_types
28
27
  from datacontract.export.rdf_converter import to_rdf_n3
29
28
  from datacontract.export.sodacl_converter import to_sodacl_yaml
30
29
  from datacontract.export.sql_converter import to_sql_ddl, to_sql_query
@@ -32,10 +31,9 @@ from datacontract.export.terraform_converter import to_terraform
32
31
  from datacontract.imports.avro_importer import import_avro
33
32
  from datacontract.imports.bigquery_importer import import_bigquery_from_api, import_bigquery_from_json
34
33
  from datacontract.imports.glue_importer import import_glue
35
- from datacontract.imports.sql_importer import import_sql
36
34
  from datacontract.imports.jsonschema_importer import import_jsonschema
37
- from datacontract.integration.publish_datamesh_manager import \
38
- publish_datamesh_manager
35
+ from datacontract.imports.sql_importer import import_sql
36
+ from datacontract.integration.publish_datamesh_manager import publish_datamesh_manager
39
37
  from datacontract.integration.publish_opentelemetry import publish_opentelemetry
40
38
  from datacontract.lint import resolve
41
39
  from datacontract.lint.linters.description_linter import DescriptionLinter
@@ -330,10 +328,17 @@ class DataContract:
330
328
  model_name, model_value = self._check_models_for_export(data_contract, model, export_format)
331
329
  found_server = data_contract.servers.get(self._server)
332
330
  if found_server is None:
333
- raise RuntimeError(f"Export to {export_format} requires selecting a bigquery server from the data contract.")
334
- if found_server.type != 'bigquery':
335
- raise RuntimeError(f"Export to {export_format} requires selecting a bigquery server from the data contract.")
331
+ raise RuntimeError(
332
+ f"Export to {export_format} requires selecting a bigquery server from the data contract."
333
+ )
334
+ if found_server.type != "bigquery":
335
+ raise RuntimeError(
336
+ f"Export to {export_format} requires selecting a bigquery server from the data contract."
337
+ )
336
338
  return to_bigquery_json(model_name, model_value, found_server)
339
+ if export_format == "dbml":
340
+ found_server = data_contract.servers.get(self._server)
341
+ return to_dbml_diagram(data_contract, found_server)
337
342
  else:
338
343
  print(f"Export format {export_format} not supported.")
339
344
  return ""
@@ -387,8 +392,10 @@ class DataContract:
387
392
  )
388
393
  run.log_info(f"Using {server} for testing the examples")
389
394
  return server
390
-
391
- def _check_models_for_export(self, data_contract: DataContractSpecification, model: str, export_format: str) -> typing.Tuple[str, str]:
395
+
396
+ def _check_models_for_export(
397
+ self, data_contract: DataContractSpecification, model: str, export_format: str
398
+ ) -> typing.Tuple[str, str]:
392
399
  if data_contract.models is None:
393
400
  raise RuntimeError(f"Export to {export_format} requires models in the data contract.")
394
401
 
@@ -408,10 +415,17 @@ class DataContract:
408
415
  raise RuntimeError(
409
416
  f"Model {model_name} not found in the data contract. Available models: {model_names}"
410
417
  )
411
-
418
+
412
419
  return model_name, model_value
413
420
 
414
- def import_from_source(self, format: str, source: typing.Optional[str] = None, bigquery_tables: typing.Optional[typing.List[str]] = None, bigquery_project: typing.Optional[str] = None, bigquery_dataset: typing.Optional[str] = None) -> DataContractSpecification:
421
+ def import_from_source(
422
+ self,
423
+ format: str,
424
+ source: typing.Optional[str] = None,
425
+ bigquery_tables: typing.Optional[typing.List[str]] = None,
426
+ bigquery_project: typing.Optional[str] = None,
427
+ bigquery_dataset: typing.Optional[str] = None,
428
+ ) -> DataContractSpecification:
415
429
  data_contract_specification = DataContract.init()
416
430
 
417
431
  if format == "sql":
@@ -426,7 +440,9 @@ class DataContract:
426
440
  if source is not None:
427
441
  data_contract_specification = import_bigquery_from_json(data_contract_specification, source)
428
442
  else:
429
- data_contract_specification = import_bigquery_from_api(data_contract_specification, bigquery_tables, bigquery_project, bigquery_dataset)
443
+ data_contract_specification = import_bigquery_from_api(
444
+ data_contract_specification, bigquery_tables, bigquery_project, bigquery_dataset
445
+ )
430
446
  else:
431
447
  print(f"Import format {format} not supported.")
432
448
 
@@ -9,6 +9,7 @@ from datacontract.engines.soda.connections.duckdb import get_duckdb_connection
9
9
  from datacontract.engines.soda.connections.kafka import create_spark_session, read_kafka_topic
10
10
  from datacontract.engines.soda.connections.postgres import to_postgres_soda_configuration
11
11
  from datacontract.engines.soda.connections.snowflake import to_snowflake_soda_configuration
12
+ from datacontract.engines.soda.connections.sqlserver import to_sqlserver_soda_configuration
12
13
  from datacontract.export.sodacl_converter import to_sodacl_yaml
13
14
  from datacontract.model.data_contract_specification import DataContractSpecification, Server
14
15
  from datacontract.model.run import Run, Check, Log
@@ -69,6 +70,10 @@ def check_soda_execute(
69
70
  read_kafka_topic(spark, data_contract, server, tmp_dir)
70
71
  scan.add_spark_session(spark, data_source_name=server.type)
71
72
  scan.set_data_source_name(server.type)
73
+ elif server.type == "sqlserver":
74
+ soda_configuration_str = to_sqlserver_soda_configuration(server)
75
+ scan.add_configuration_yaml_str(soda_configuration_str)
76
+ scan.set_data_source_name(server.type)
72
77
 
73
78
  else:
74
79
  run.checks.append(
@@ -87,8 +87,7 @@ def setup_s3_connection(con, server):
87
87
  s3_endpoint = server.endpointUrl.removeprefix("http://").removeprefix("https://")
88
88
  if server.endpointUrl.startswith("http://"):
89
89
  use_ssl = "false"
90
- url_style = 'path'
91
-
90
+ url_style = "path"
92
91
 
93
92
  if s3_access_key_id is not None:
94
93
  con.sql(f"""
@@ -0,0 +1,43 @@
1
+ import os
2
+
3
+ import yaml
4
+
5
+ from datacontract.model.data_contract_specification import Server
6
+
7
+
8
+ def to_sqlserver_soda_configuration(server: Server) -> str:
9
+ """Serialize server config to soda configuration.
10
+
11
+
12
+ ### Example:
13
+ type: sqlserver
14
+ host: host
15
+ port: '1433'
16
+ username: simple
17
+ password: simple_pass
18
+ database: database
19
+ schema: dbo
20
+ trusted_connection: false
21
+ encrypt: false
22
+ trust_server_certificate: false
23
+ driver: ODBC Driver 18 for SQL Server
24
+ """
25
+ # with service account key, using an external json file
26
+ soda_configuration = {
27
+ f"data_source {server.type}": {
28
+ "type": "sqlserver",
29
+ "host": server.host,
30
+ "port": str(server.port),
31
+ "username": os.getenv("DATACONTRACT_SQLSERVER_USERNAME", ""),
32
+ "password": os.getenv("DATACONTRACT_SQLSERVER_PASSWORD", ""),
33
+ "database": server.database,
34
+ "schema": server.schema_,
35
+ "trusted_connection": os.getenv("DATACONTRACT_SQLSERVER_TRUSTED_CONNECTION", False),
36
+ "trust_server_certificate": os.getenv("DATACONTRACT_SQLSERVER_TRUST_SERVER_CERTIFICATE", False),
37
+ "encrypt": os.getenv("DATACONTRACT_SQLSERVER_ENCRYPTED_CONNECTION", True),
38
+ "driver": server.driver,
39
+ }
40
+ }
41
+
42
+ soda_configuration_str = yaml.dump(soda_configuration)
43
+ return soda_configuration_str
@@ -38,6 +38,12 @@ def to_avro_field(field, field_name):
38
38
  if field.type in ["timestamp", "timestamp_tz", "timestamp_ntz", "date"]:
39
39
  avro_field["logicalType"] = to_avro_logical_type(field.type)
40
40
 
41
+ if field.config:
42
+ if "avroLogicalType" in field.config:
43
+ avro_field["logicalType"] = field.config["avroLogicalType"]
44
+ if "avroDefault" in field.config:
45
+ avro_field["default"] = field.config["avroDefault"]
46
+
41
47
  return avro_field
42
48
 
43
49
 
@@ -77,6 +83,7 @@ def to_avro_type(field: Field, field_name: str) -> str | dict:
77
83
  else:
78
84
  return "bytes"
79
85
 
86
+
80
87
  def to_avro_logical_type(type: str) -> str:
81
88
  if type in ["timestamp", "timestamp_tz"]:
82
89
  return "timestamp-millis"
@@ -85,4 +92,4 @@ def to_avro_logical_type(type: str) -> str:
85
92
  elif type in ["date"]:
86
93
  return "date"
87
94
  else:
88
- return ""
95
+ return ""
@@ -5,24 +5,21 @@ from typing import Dict, List
5
5
  from datacontract.model.data_contract_specification import Model, Field, Server
6
6
  from datacontract.model.exceptions import DataContractException
7
7
 
8
+
8
9
  def to_bigquery_json(model_name: str, model_value: Model, server: Server) -> str:
9
10
  bigquery_table = to_bigquery_schema(model_name, model_value, server)
10
11
  return json.dumps(bigquery_table, indent=2)
11
12
 
13
+
12
14
  def to_bigquery_schema(model_name: str, model_value: Model, server: Server) -> dict:
13
15
  return {
14
16
  "kind": "bigquery#table",
15
- "tableReference": {
16
- "datasetId": server.dataset,
17
- "projectId": server.project,
18
- "tableId": model_name
19
- },
17
+ "tableReference": {"datasetId": server.dataset, "projectId": server.project, "tableId": model_name},
20
18
  "description": model_value.description,
21
- "schema": {
22
- "fields": to_fields_array(model_value.fields)
23
- }
19
+ "schema": {"fields": to_fields_array(model_value.fields)},
24
20
  }
25
21
 
22
+
26
23
  def to_fields_array(fields: Dict[str, Field]) -> List[Dict[str, Field]]:
27
24
  bq_fields = []
28
25
  for field_name, field in fields.items():
@@ -32,24 +29,25 @@ def to_fields_array(fields: Dict[str, Field]) -> List[Dict[str, Field]]:
32
29
 
33
30
 
34
31
  def to_field(field_name: str, field: Field) -> dict:
35
-
36
32
  bq_type = map_type_to_bigquery(field.type, field_name)
37
33
  bq_field = {
38
34
  "name": field_name,
39
35
  "type": bq_type,
40
36
  "mode": "REQUIRED" if field.required else "NULLABLE",
41
- "description": field.description
37
+ "description": field.description,
42
38
  }
43
39
 
44
40
  # handle arrays
45
- if field.type == 'array':
46
- bq_field["mode"] = 'REPEATED'
47
- if field.items.type == 'object':
41
+ if field.type == "array":
42
+ bq_field["mode"] = "REPEATED"
43
+ if field.items.type == "object":
48
44
  # in case the array type is a complex object, we want to copy all its fields
49
45
  bq_field["fields"] = to_fields_array(field.items.fields)
50
46
  else:
51
47
  # otherwise we make up a structure that gets us a single field of the specified type
52
- bq_field["fields"] = to_fields_array({ f"{field_name}_1": Field(type=field.items.type, required=False, description="")})
48
+ bq_field["fields"] = to_fields_array(
49
+ {f"{field_name}_1": Field(type=field.items.type, required=False, description="")}
50
+ )
53
51
  # all of these can carry other fields
54
52
  elif bq_type.lower() in ["record", "struct"]:
55
53
  bq_field["fields"] = to_fields_array(field.fields)
@@ -65,6 +63,7 @@ def to_field(field_name: str, field: Field) -> dict:
65
63
 
66
64
  return bq_field
67
65
 
66
+
68
67
  def map_type_to_bigquery(type_str: str, field_name: str) -> str:
69
68
  logger = logging.getLogger(__name__)
70
69
  if type_str.lower() in ["string", "varchar", "text"]:
@@ -78,7 +77,7 @@ def map_type_to_bigquery(type_str: str, field_name: str) -> str:
78
77
  elif type_str == "float":
79
78
  return "FLOAT"
80
79
  elif type_str == "boolean":
81
- return "BOOLEAN"
80
+ return "BOOL"
82
81
  elif type_str.lower() in ["timestamp", "timestamp_tz"]:
83
82
  return "TIMESTAMP"
84
83
  elif type_str == "date":
@@ -94,7 +93,9 @@ def map_type_to_bigquery(type_str: str, field_name: str) -> str:
94
93
  elif type_str == "struct":
95
94
  return "STRUCT"
96
95
  elif type_str == "null":
97
- logger.info(f"Can't properly map {field_name} to bigquery Schema, as 'null' is not supported as a type. Mapping it to STRING.")
96
+ logger.info(
97
+ f"Can't properly map {field_name} to bigquery Schema, as 'null' is not supported as a type. Mapping it to STRING."
98
+ )
98
99
  return "STRING"
99
100
  else:
100
101
  raise DataContractException(
@@ -0,0 +1,118 @@
1
+ from datetime import datetime
2
+ from importlib.metadata import version
3
+ from typing import Tuple
4
+
5
+ import pytz
6
+
7
+ import datacontract.model.data_contract_specification as spec
8
+ from datacontract.export.sql_type_converter import convert_to_sql_type
9
+
10
+
11
+ def to_dbml_diagram(contract: spec.DataContractSpecification, server: spec.Server) -> str:
12
+ result = ""
13
+ result += add_generated_info(contract, server) + "\n"
14
+ result += generate_project_info(contract) + "\n"
15
+
16
+ for model_name, model in contract.models.items():
17
+ table_description = generate_table(model_name, model, server)
18
+ result += f"\n{table_description}\n"
19
+
20
+ return result
21
+
22
+
23
+ def add_generated_info(contract: spec.DataContractSpecification, server: spec.Server) -> str:
24
+ tz = pytz.timezone("UTC")
25
+ now = datetime.now(tz)
26
+ formatted_date = now.strftime("%b %d %Y")
27
+ datacontract_cli_version = get_version()
28
+ dialect = "Logical Datacontract" if server is None else server.type
29
+
30
+ generated_info = """
31
+ Generated at {0} by datacontract-cli version {1}
32
+ for datacontract {2} ({3}) version {4}
33
+ Using {5} Types for the field types
34
+ """.format(
35
+ formatted_date, datacontract_cli_version, contract.info.title, contract.id, contract.info.version, dialect
36
+ )
37
+
38
+ comment = """/*
39
+ {0}
40
+ */
41
+ """.format(generated_info)
42
+
43
+ note = """Note project_info {{
44
+ '''
45
+ {0}
46
+ '''
47
+ }}
48
+ """.format(generated_info)
49
+
50
+ return """{0}
51
+ {1}
52
+ """.format(comment, note)
53
+
54
+
55
+ def get_version() -> str:
56
+ try:
57
+ return version("datacontract_cli")
58
+ except Exception:
59
+ return ""
60
+
61
+
62
+ def generate_project_info(contract: spec.DataContractSpecification) -> str:
63
+ return """Project "{0}" {{
64
+ Note: "{1}"
65
+ }}\n
66
+ """.format(contract.info.title, " ".join(contract.info.description.splitlines()))
67
+
68
+
69
+ def generate_table(model_name: str, model: spec.Model, server: spec.Server) -> str:
70
+ result = """Table "{0}" {{
71
+ Note: "{1}"
72
+ """.format(model_name, " ".join(model.description.splitlines()))
73
+
74
+ references = []
75
+
76
+ # Add all the fields
77
+ for field_name, field in model.fields.items():
78
+ ref, field_string = generate_field(field_name, field, model_name, server)
79
+ if ref is not None:
80
+ references.append(ref)
81
+ result += "{0}\n".format(field_string)
82
+
83
+ result += "}\n"
84
+
85
+ # and if any: add the references
86
+ if len(references) > 0:
87
+ for ref in references:
88
+ result += "Ref: {0}\n".format(ref)
89
+
90
+ result += "\n"
91
+
92
+ return result
93
+
94
+
95
+ def generate_field(field_name: str, field: spec.Field, model_name: str, server: spec.Server) -> Tuple[str, str]:
96
+ field_attrs = []
97
+ if field.primary:
98
+ field_attrs.append("pk")
99
+
100
+ if field.unique:
101
+ field_attrs.append("unique")
102
+
103
+ if field.required:
104
+ field_attrs.append("not null")
105
+ else:
106
+ field_attrs.append("null")
107
+
108
+ if field.description:
109
+ field_attrs.append('Note: "{0}"'.format(" ".join(field.description.splitlines())))
110
+
111
+ field_type = field.type if server is None else convert_to_sql_type(field, server.type)
112
+
113
+ field_str = '"{0}" "{1}" [{2}]'.format(field_name, field_type, ",".join(field_attrs))
114
+ ref_str = None
115
+ if (field.references) is not None:
116
+ # we always assume many to one, as datacontract doesn't really give us more info
117
+ ref_str = "{0}.{1} > {2}".format(model_name, field_name, field.references)
118
+ return (ref_str, field_str)
@@ -2,13 +2,12 @@ import datetime
2
2
  import logging
3
3
  from importlib.metadata import version
4
4
 
5
+ import jinja_partials
5
6
  import pytz
6
7
  import yaml
7
- import jinja_partials
8
8
  from jinja2 import Environment, PackageLoader, select_autoescape
9
9
 
10
- from datacontract.model.data_contract_specification import \
11
- DataContractSpecification
10
+ from datacontract.model.data_contract_specification import DataContractSpecification
12
11
 
13
12
 
14
13
  def to_html(data_contract_spec: DataContractSpecification) -> str:
@@ -18,12 +18,11 @@ def to_jsonschema_json(model_key, model_value: Model) -> str:
18
18
 
19
19
 
20
20
  def to_jsonschema(model_key, model_value: Model) -> dict:
21
-
22
- model = {
21
+ model = {
23
22
  "$schema": "http://json-schema.org/draft-07/schema#",
24
23
  "type": "object",
25
24
  "properties": to_properties(model_value.fields),
26
- "required": to_required(model_value.fields)
25
+ "required": to_required(model_value.fields),
27
26
  }
28
27
  if model_value.title:
29
28
  model["title"] = model_value.title
@@ -54,7 +53,7 @@ def to_property(field: Field) -> dict:
54
53
  property["unique"] = True
55
54
  if json_type == "object":
56
55
  # TODO: any better idea to distinguish between properties and patternProperties?
57
- if next(iter(field.fields.keys())).startswith("^"):
56
+ if field.fields.keys() and next(iter(field.fields.keys())).startswith("^"):
58
57
  property["patternProperties"] = to_properties(field.fields)
59
58
  else:
60
59
  property["properties"] = to_properties(field.fields)
@@ -89,10 +88,10 @@ def to_property(field: Field) -> dict:
89
88
  if field.classification:
90
89
  property["classification"] = field.classification
91
90
 
92
-
93
91
  # TODO: all constraints
94
92
  return property
95
93
 
94
+
96
95
  def to_required(fields: Dict[str, Field]):
97
96
  required = []
98
97
  for field_name, field in fields.items():
@@ -1,8 +1,7 @@
1
1
  from pydantic import BaseModel
2
2
  from rdflib import Graph, Literal, BNode, RDF, URIRef, Namespace
3
3
 
4
- from datacontract.model.data_contract_specification import \
5
- DataContractSpecification
4
+ from datacontract.model.data_contract_specification import DataContractSpecification
6
5
 
7
6
 
8
7
  def is_literal(property_name):