datacontract-cli 0.9.7__py3-none-any.whl → 0.9.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datacontract-cli might be problematic. Click here for more details.

Files changed (59) hide show
  1. datacontract/breaking/breaking.py +48 -57
  2. datacontract/cli.py +98 -80
  3. datacontract/data_contract.py +156 -106
  4. datacontract/engines/datacontract/check_that_datacontract_contains_valid_servers_configuration.py +5 -1
  5. datacontract/engines/datacontract/check_that_datacontract_file_exists.py +9 -8
  6. datacontract/engines/datacontract/check_that_datacontract_str_is_valid.py +26 -22
  7. datacontract/engines/fastjsonschema/check_jsonschema.py +31 -25
  8. datacontract/engines/fastjsonschema/s3/s3_read_files.py +8 -6
  9. datacontract/engines/soda/check_soda_execute.py +46 -35
  10. datacontract/engines/soda/connections/bigquery.py +5 -3
  11. datacontract/engines/soda/connections/dask.py +0 -1
  12. datacontract/engines/soda/connections/databricks.py +2 -2
  13. datacontract/engines/soda/connections/duckdb.py +4 -4
  14. datacontract/engines/soda/connections/kafka.py +36 -17
  15. datacontract/engines/soda/connections/postgres.py +3 -3
  16. datacontract/engines/soda/connections/snowflake.py +4 -4
  17. datacontract/export/avro_converter.py +3 -7
  18. datacontract/export/avro_idl_converter.py +65 -42
  19. datacontract/export/dbt_converter.py +43 -32
  20. datacontract/export/great_expectations_converter.py +141 -0
  21. datacontract/export/jsonschema_converter.py +3 -1
  22. datacontract/export/odcs_converter.py +5 -7
  23. datacontract/export/protobuf_converter.py +12 -10
  24. datacontract/export/pydantic_converter.py +140 -0
  25. datacontract/export/rdf_converter.py +34 -11
  26. datacontract/export/sodacl_converter.py +24 -24
  27. datacontract/export/sql_converter.py +20 -9
  28. datacontract/export/sql_type_converter.py +44 -4
  29. datacontract/export/terraform_converter.py +4 -3
  30. datacontract/imports/avro_importer.py +32 -10
  31. datacontract/imports/sql_importer.py +0 -2
  32. datacontract/init/download_datacontract_file.py +2 -2
  33. datacontract/integration/publish_datamesh_manager.py +4 -9
  34. datacontract/integration/publish_opentelemetry.py +30 -16
  35. datacontract/lint/files.py +2 -2
  36. datacontract/lint/lint.py +26 -31
  37. datacontract/lint/linters/description_linter.py +12 -21
  38. datacontract/lint/linters/example_model_linter.py +28 -29
  39. datacontract/lint/linters/field_pattern_linter.py +8 -8
  40. datacontract/lint/linters/field_reference_linter.py +11 -10
  41. datacontract/lint/linters/notice_period_linter.py +18 -22
  42. datacontract/lint/linters/primary_field_linter.py +10 -12
  43. datacontract/lint/linters/quality_schema_linter.py +16 -20
  44. datacontract/lint/linters/valid_constraints_linter.py +42 -37
  45. datacontract/lint/resolve.py +7 -10
  46. datacontract/lint/schema.py +2 -3
  47. datacontract/lint/urls.py +4 -5
  48. datacontract/model/breaking_change.py +2 -1
  49. datacontract/model/data_contract_specification.py +8 -7
  50. datacontract/model/exceptions.py +13 -2
  51. datacontract/model/run.py +1 -1
  52. datacontract/web.py +3 -7
  53. {datacontract_cli-0.9.7.dist-info → datacontract_cli-0.9.8.dist-info}/METADATA +176 -37
  54. datacontract_cli-0.9.8.dist-info/RECORD +63 -0
  55. datacontract_cli-0.9.7.dist-info/RECORD +0 -61
  56. {datacontract_cli-0.9.7.dist-info → datacontract_cli-0.9.8.dist-info}/LICENSE +0 -0
  57. {datacontract_cli-0.9.7.dist-info → datacontract_cli-0.9.8.dist-info}/WHEEL +0 -0
  58. {datacontract_cli-0.9.7.dist-info → datacontract_cli-0.9.8.dist-info}/entry_points.txt +0 -0
  59. {datacontract_cli-0.9.7.dist-info → datacontract_cli-0.9.8.dist-info}/top_level.txt +0 -0
@@ -1,5 +1,5 @@
1
1
  from datacontract.breaking.breaking_rules import BreakingRules
2
- from datacontract.model.breaking_change import BreakingChanges, BreakingChange, Location, Severity
2
+ from datacontract.model.breaking_change import BreakingChange, Location, Severity
3
3
  from datacontract.model.data_contract_specification import Field, Model, Quality
4
4
 
5
5
 
@@ -22,10 +22,9 @@ def quality_breaking_changes(
22
22
  description=description,
23
23
  check_name=rule_name,
24
24
  severity=severity,
25
- location=Location(
26
- path=new_path,
27
- composition=["quality"]
28
- )))
25
+ location=Location(path=new_path, composition=["quality"]),
26
+ )
27
+ )
29
28
  elif old_quality and not new_quality:
30
29
  rule_name = "quality_removed"
31
30
  severity = _get_rule(rule_name)
@@ -37,10 +36,9 @@ def quality_breaking_changes(
37
36
  description=description,
38
37
  check_name=rule_name,
39
38
  severity=severity,
40
- location=Location(
41
- path=new_path,
42
- composition=["quality"]
43
- )))
39
+ location=Location(path=new_path, composition=["quality"]),
40
+ )
41
+ )
44
42
 
45
43
  elif old_quality and new_quality:
46
44
  if old_quality.type != new_quality.type:
@@ -54,10 +52,9 @@ def quality_breaking_changes(
54
52
  description=description,
55
53
  check_name=rule_name,
56
54
  severity=severity,
57
- location=Location(
58
- path=new_path,
59
- composition=["quality", "type"]
60
- )))
55
+ location=Location(path=new_path, composition=["quality", "type"]),
56
+ )
57
+ )
61
58
 
62
59
  if old_quality.specification != new_quality.specification:
63
60
  rule_name = "quality_specification_updated"
@@ -69,10 +66,9 @@ def quality_breaking_changes(
69
66
  description=description,
70
67
  check_name=rule_name,
71
68
  severity=severity,
72
- location=Location(
73
- path=new_path,
74
- composition=["quality", "specification"]
75
- )))
69
+ location=Location(path=new_path, composition=["quality", "specification"]),
70
+ )
71
+ )
76
72
 
77
73
  return results
78
74
 
@@ -96,10 +92,9 @@ def models_breaking_changes(
96
92
  description="added the model",
97
93
  check_name=rule_name,
98
94
  severity=severity,
99
- location=Location(
100
- path=new_path,
101
- composition=composition + [model_name]
102
- )))
95
+ location=Location(path=new_path, composition=composition + [model_name]),
96
+ )
97
+ )
103
98
 
104
99
  for model_name, old_model in old_models.items():
105
100
  if model_name not in new_models.keys():
@@ -111,10 +106,9 @@ def models_breaking_changes(
111
106
  description="removed the model",
112
107
  check_name=rule_name,
113
108
  severity=severity,
114
- location=Location(
115
- path=new_path,
116
- composition=composition + [model_name]
117
- )))
109
+ location=Location(path=new_path, composition=composition + [model_name]),
110
+ )
111
+ )
118
112
  continue
119
113
 
120
114
  results.extend(
@@ -124,17 +118,14 @@ def models_breaking_changes(
124
118
  new_path=new_path,
125
119
  composition=composition + [model_name],
126
120
  include_severities=include_severities,
127
- ))
121
+ )
122
+ )
128
123
 
129
124
  return results
130
125
 
131
126
 
132
127
  def model_breaking_changes(
133
- old_model: Model,
134
- new_model: Model,
135
- new_path: str,
136
- composition: list[str],
137
- include_severities: [Severity]
128
+ old_model: Model, new_model: Model, new_path: str, composition: list[str], include_severities: [Severity]
138
129
  ) -> list[BreakingChange]:
139
130
  results = list[BreakingChange]()
140
131
 
@@ -170,10 +161,9 @@ def model_breaking_changes(
170
161
  description=description,
171
162
  check_name=rule_name,
172
163
  severity=severity,
173
- location=Location(
174
- path=new_path,
175
- composition=composition + [model_definition_field]
176
- )))
164
+ location=Location(path=new_path, composition=composition + [model_definition_field]),
165
+ )
166
+ )
177
167
 
178
168
  results.extend(
179
169
  fields_breaking_changes(
@@ -182,7 +172,8 @@ def model_breaking_changes(
182
172
  new_path=new_path,
183
173
  composition=composition + ["fields"],
184
174
  include_severities=include_severities,
185
- ))
175
+ )
176
+ )
186
177
 
187
178
  return results
188
179
 
@@ -192,7 +183,7 @@ def fields_breaking_changes(
192
183
  new_fields: dict[str, Field],
193
184
  new_path: str,
194
185
  composition: list[str],
195
- include_severities: [Severity]
186
+ include_severities: [Severity],
196
187
  ) -> list[BreakingChange]:
197
188
  results = list[BreakingChange]()
198
189
 
@@ -206,10 +197,9 @@ def fields_breaking_changes(
206
197
  description="added the field",
207
198
  check_name=rule_name,
208
199
  severity=severity,
209
- location=Location(
210
- path=new_path,
211
- composition=composition + [field_name]
212
- )))
200
+ location=Location(path=new_path, composition=composition + [field_name]),
201
+ )
202
+ )
213
203
 
214
204
  for field_name, old_field in old_fields.items():
215
205
  if field_name not in new_fields.keys():
@@ -218,13 +208,12 @@ def fields_breaking_changes(
218
208
  if severity in include_severities:
219
209
  results.append(
220
210
  BreakingChange(
221
- description='removed the field',
211
+ description="removed the field",
222
212
  check_name=rule_name,
223
213
  severity=severity,
224
- location=Location(
225
- path=new_path,
226
- composition=composition + [field_name]
227
- )))
214
+ location=Location(path=new_path, composition=composition + [field_name]),
215
+ )
216
+ )
228
217
  continue
229
218
 
230
219
  results.extend(
@@ -234,7 +223,8 @@ def fields_breaking_changes(
234
223
  composition=composition + [field_name],
235
224
  new_path=new_path,
236
225
  include_severities=include_severities,
237
- ))
226
+ )
227
+ )
238
228
  return results
239
229
 
240
230
 
@@ -271,7 +261,7 @@ def field_breaking_changes(
271
261
  description = None
272
262
 
273
263
  # logic for enum, tags and other arrays
274
- if type(old_value) is list and type(new_value) is list:
264
+ if isinstance(old_value, list) and isinstance(new_value, list):
275
265
  if not old_value and new_value:
276
266
  rule_name = f"field_{_camel_to_snake(field_definition_field)}_added"
277
267
  description = f"added with value: `{new_value}`"
@@ -285,7 +275,7 @@ def field_breaking_changes(
285
275
  # logic for normal fields
286
276
  elif old_value is None and new_value is not None:
287
277
  rule_name = f"field_{_camel_to_snake(field_definition_field)}_added"
288
- description = f"added with value: `{str(new_value).lower() if type(new_value) is bool else new_value}`"
278
+ description = f"added with value: `{str(new_value).lower() if isinstance(new_value, bool) else new_value}`"
289
279
 
290
280
  elif old_value is not None and new_value is None:
291
281
  rule_name = f"field_{_camel_to_snake(field_definition_field)}_removed"
@@ -293,8 +283,10 @@ def field_breaking_changes(
293
283
 
294
284
  elif old_value != new_value:
295
285
  rule_name = f"field_{_camel_to_snake(field_definition_field)}_updated"
296
- description = (f"changed from `{str(old_value).lower() if type(old_value) is bool else old_value}` "
297
- f"to `{str(new_value).lower() if type(new_value) is bool else new_value}`")
286
+ description = (
287
+ f"changed from `{str(old_value).lower() if isinstance(old_value, bool) else old_value}` "
288
+ f"to `{str(new_value).lower() if isinstance(new_value, bool) else new_value}`"
289
+ )
298
290
 
299
291
  if rule_name is not None:
300
292
  severity = _get_rule(rule_name)
@@ -305,10 +297,9 @@ def field_breaking_changes(
305
297
  description=description,
306
298
  check_name=rule_name,
307
299
  severity=severity,
308
- location=Location(
309
- path=new_path,
310
- composition=composition + [field_schema_name]
311
- )))
300
+ location=Location(path=new_path, composition=composition + [field_schema_name]),
301
+ )
302
+ )
312
303
 
313
304
  return results
314
305
 
@@ -317,9 +308,9 @@ def _get_rule(rule_name) -> Severity:
317
308
  try:
318
309
  return getattr(BreakingRules, rule_name)
319
310
  except AttributeError:
320
- print(f'WARNING: Breaking Rule not found for {rule_name}!')
311
+ print(f"WARNING: Breaking Rule not found for {rule_name}!")
321
312
  return Severity.ERROR
322
313
 
323
314
 
324
315
  def _camel_to_snake(s):
325
- return ''.join(['_' + c.lower() if c.isupper() else c for c in s]).lstrip('_')
316
+ return "".join(["_" + c.lower() if c.isupper() else c for c in s]).lstrip("_")
datacontract/cli.py CHANGED
@@ -5,7 +5,7 @@ from typing import Iterable, Optional
5
5
  import typer
6
6
  from click import Context
7
7
  from rich import box
8
- from rich import print
8
+ from rich.console import Console
9
9
  from rich.table import Table
10
10
  from typer.core import TyperGroup
11
11
  from typing_extensions import Annotated
@@ -14,6 +14,7 @@ from datacontract.data_contract import DataContract
14
14
  from datacontract.init.download_datacontract_file import \
15
15
  download_datacontract_file, FileExistsException
16
16
 
17
+ console = Console()
17
18
 
18
19
  class OrderedCommands(TyperGroup):
19
20
  def list_commands(self, ctx: Context) -> Iterable[str]:
@@ -29,15 +30,16 @@ app = typer.Typer(
29
30
 
30
31
  def version_callback(value: bool):
31
32
  if value:
32
- print(metadata.version("datacontract-cli"))
33
+ console.print(metadata.version("datacontract-cli"))
33
34
  raise typer.Exit()
34
35
 
35
36
 
36
37
  @app.callback()
37
38
  def common(
38
39
  ctx: typer.Context,
39
- version: bool = typer.Option(None, "--version", help="Prints the current version.", callback=version_callback,
40
- is_eager=True),
40
+ version: bool = typer.Option(
41
+ None, "--version", help="Prints the current version.", callback=version_callback, is_eager=True
42
+ ),
41
43
  ):
42
44
  """
43
45
  The datacontract CLI is an open source command-line tool for working with Data Contracts (https://datacontract.com).
@@ -51,10 +53,12 @@ def common(
51
53
 
52
54
  @app.command()
53
55
  def init(
54
- location: Annotated[str, typer.Argument(
55
- help="The location (url or path) of the data contract yaml to create.")] = "datacontract.yaml",
56
- template: Annotated[str, typer.Option(
57
- help="URL of a template or data contract")] = "https://datacontract.com/datacontract.init.yaml",
56
+ location: Annotated[
57
+ str, typer.Argument(help="The location (url or path) of the data contract yaml to create.")
58
+ ] = "datacontract.yaml",
59
+ template: Annotated[
60
+ str, typer.Option(help="URL of a template or data contract")
61
+ ] = "https://datacontract.com/datacontract.init.yaml",
58
62
  overwrite: Annotated[bool, typer.Option(help="Replace the existing datacontract.yaml")] = False,
59
63
  ):
60
64
  """
@@ -63,19 +67,20 @@ def init(
63
67
  try:
64
68
  download_datacontract_file(location, template, overwrite)
65
69
  except FileExistsException:
66
- print("File already exists, use --overwrite to overwrite")
70
+ console.print("File already exists, use --overwrite to overwrite")
67
71
  raise typer.Exit(code=1)
68
72
  else:
69
- print("📄 data contract written to " + location)
73
+ console.print("📄 data contract written to " + location)
70
74
 
71
75
 
72
76
  @app.command()
73
77
  def lint(
74
78
  location: Annotated[
75
- str, typer.Argument(help="The location (url or path) of the data contract yaml.")] = "datacontract.yaml",
79
+ str, typer.Argument(help="The location (url or path) of the data contract yaml.")
80
+ ] = "datacontract.yaml",
76
81
  schema: Annotated[
77
- str, typer.Option(
78
- help="The location (url or path) of the Data Contract Specification JSON Schema")] = "https://datacontract.com/datacontract.schema.json",
82
+ str, typer.Option(help="The location (url or path) of the Data Contract Specification JSON Schema")
83
+ ] = "https://datacontract.com/datacontract.schema.json",
79
84
  ):
80
85
  """
81
86
  Validate that the datacontract.yaml is correctly formatted.
@@ -87,37 +92,46 @@ def lint(
87
92
  @app.command()
88
93
  def test(
89
94
  location: Annotated[
90
- str, typer.Argument(help="The location (url or path) of the data contract yaml.")] = "datacontract.yaml",
95
+ str, typer.Argument(help="The location (url or path) of the data contract yaml.")
96
+ ] = "datacontract.yaml",
91
97
  schema: Annotated[
92
- str, typer.Option(
93
- help="The location (url or path) of the Data Contract Specification JSON Schema")] = "https://datacontract.com/datacontract.schema.json",
94
- server: Annotated[str, typer.Option(
95
- help="The server configuration to run the schema and quality tests. "
96
- "Use the key of the server object in the data contract yaml file "
97
- "to refer to a server, e.g., `production`, or `all` for all "
98
- "servers (default).")] = "all",
99
- examples: Annotated[bool, typer.Option(
100
- help="Run the schema and quality tests on the example data within the data contract.")] = None,
101
- publish: Annotated[str, typer.Option(
102
- help="The url to publish the results after the test")] = None,
103
- publish_to_opentelemetry: Annotated[bool, typer.Option(
104
- help="Publish the results to opentelemetry. Use environment variables to configure the OTLP endpoint, headers, etc.")] = False,
105
- logs: Annotated[bool, typer.Option(
106
- help="Print logs")] = False,
98
+ str, typer.Option(help="The location (url or path) of the Data Contract Specification JSON Schema")
99
+ ] = "https://datacontract.com/datacontract.schema.json",
100
+ server: Annotated[
101
+ str,
102
+ typer.Option(
103
+ help="The server configuration to run the schema and quality tests. "
104
+ "Use the key of the server object in the data contract yaml file "
105
+ "to refer to a server, e.g., `production`, or `all` for all "
106
+ "servers (default)."
107
+ ),
108
+ ] = "all",
109
+ examples: Annotated[
110
+ bool, typer.Option(help="Run the schema and quality tests on the example data within the data contract.")
111
+ ] = None,
112
+ publish: Annotated[str, typer.Option(help="The url to publish the results after the test")] = None,
113
+ publish_to_opentelemetry: Annotated[
114
+ bool,
115
+ typer.Option(
116
+ help="Publish the results to opentelemetry. Use environment variables to configure the OTLP endpoint, headers, etc."
117
+ ),
118
+ ] = False,
119
+ logs: Annotated[bool, typer.Option(help="Print logs")] = False,
107
120
  ):
108
121
  """
109
122
  Run schema and quality tests on configured servers.
110
123
  """
111
- print(f"Testing {location}")
124
+ console.print(f"Testing {location}")
112
125
  if server == "all":
113
126
  server = None
114
- run = DataContract(data_contract_file=location,
115
- schema_location=schema,
116
- publish_url=publish,
117
- publish_to_opentelemetry=publish_to_opentelemetry,
118
- server=server,
119
- examples=examples,
120
- ).test()
127
+ run = DataContract(
128
+ data_contract_file=location,
129
+ schema_location=schema,
130
+ publish_url=publish,
131
+ publish_to_opentelemetry=publish_to_opentelemetry,
132
+ server=server,
133
+ examples=examples,
134
+ ).test()
121
135
  if logs:
122
136
  _print_logs(run)
123
137
  _handle_result(run)
@@ -125,6 +139,7 @@ def test(
125
139
 
126
140
  class ExportFormat(str, Enum):
127
141
  jsonschema = "jsonschema"
142
+ pydantic_model = "pydantic-model"
128
143
  sodacl = "sodacl"
129
144
  dbt = "dbt"
130
145
  dbt_sources = "dbt-sources"
@@ -133,6 +148,7 @@ class ExportFormat(str, Enum):
133
148
  rdf = "rdf"
134
149
  avro = "avro"
135
150
  protobuf = "protobuf"
151
+ great_expectations = "great-expectations"
136
152
  terraform = "terraform"
137
153
  avro_idl = "avro-idl"
138
154
  sql = "sql"
@@ -143,16 +159,31 @@ class ExportFormat(str, Enum):
143
159
  def export(
144
160
  format: Annotated[ExportFormat, typer.Option(help="The export format.")],
145
161
  server: Annotated[str, typer.Option(help="The server name to export.")] = None,
146
- model: Annotated[str, typer.Option(help="Use the key of the model in the data contract yaml file "
147
- "to refer to a model, e.g., `orders`, or `all` for all "
148
- "models (default).")] = "all",
149
- rdf_base: Annotated[Optional[str], typer.Option(help="[rdf] The base URI used to generate the RDF graph.", rich_help_panel="RDF Options")] = None,
150
- sql_server_type: Annotated[Optional[str], typer.Option(help="[sql] The server type to determine the sql dialect. By default, it uses 'auto' to automatically detect the sql dialect via the specified servers in the data contract.", rich_help_panel="SQL Options")] = "auto",
162
+ model: Annotated[
163
+ str,
164
+ typer.Option(
165
+ help="Use the key of the model in the data contract yaml file "
166
+ "to refer to a model, e.g., `orders`, or `all` for all "
167
+ "models (default)."
168
+ ),
169
+ ] = "all",
170
+ rdf_base: Annotated[
171
+ Optional[str],
172
+ typer.Option(help="[rdf] The base URI used to generate the RDF graph.", rich_help_panel="RDF Options"),
173
+ ] = None,
174
+ sql_server_type: Annotated[
175
+ Optional[str],
176
+ typer.Option(
177
+ help="[sql] The server type to determine the sql dialect. By default, it uses 'auto' to automatically detect the sql dialect via the specified servers in the data contract.",
178
+ rich_help_panel="SQL Options",
179
+ ),
180
+ ] = "auto",
151
181
  location: Annotated[
152
- str, typer.Argument(help="The location (url or path) of the data contract yaml.")] = "datacontract.yaml",
182
+ str, typer.Argument(help="The location (url or path) of the data contract yaml.")
183
+ ] = "datacontract.yaml",
153
184
  ):
154
185
  """
155
- Convert data contract to a specific format. Prints to stdout.
186
+ Convert data contract to a specific format. console.prints to stdout.
156
187
  """
157
188
  # TODO exception handling
158
189
  result = DataContract(data_contract_file=location, server=server).export(
@@ -161,7 +192,8 @@ def export(
161
192
  rdf_base=rdf_base,
162
193
  sql_server_type=sql_server_type,
163
194
  )
164
- print(result)
195
+ # Don't interpret console markup in output.
196
+ console.print(result, markup=False)
165
197
 
166
198
 
167
199
  class ImportFormat(str, Enum):
@@ -178,7 +210,7 @@ def import_(
178
210
  Create a data contract from the given source file. Prints to stdout.
179
211
  """
180
212
  result = DataContract().import_from_source(format, source)
181
- print(result.to_yaml())
213
+ console.print(result.to_yaml())
182
214
 
183
215
 
184
216
  @app.command()
@@ -191,16 +223,11 @@ def breaking(
191
223
  """
192
224
 
193
225
  # TODO exception handling
194
- result = DataContract(
195
- data_contract_file=location_old,
196
- inline_definitions=True
197
- ).breaking(
198
- DataContract(
199
- data_contract_file=location_new,
200
- inline_definitions=True
201
- ))
226
+ result = DataContract(data_contract_file=location_old, inline_definitions=True).breaking(
227
+ DataContract(data_contract_file=location_new, inline_definitions=True)
228
+ )
202
229
 
203
- print(result.breaking_str())
230
+ console.print(result.breaking_str())
204
231
 
205
232
  if not result.passed_checks():
206
233
  raise typer.Exit(code=1)
@@ -216,16 +243,11 @@ def changelog(
216
243
  """
217
244
 
218
245
  # TODO exception handling
219
- result = DataContract(
220
- data_contract_file=location_old,
221
- inline_definitions=True
222
- ).changelog(
223
- DataContract(
224
- data_contract_file=location_new,
225
- inline_definitions=True
226
- ))
246
+ result = DataContract(data_contract_file=location_old, inline_definitions=True).changelog(
247
+ DataContract(data_contract_file=location_new, inline_definitions=True)
248
+ )
227
249
 
228
- print(result.changelog_str())
250
+ console.print(result.changelog_str())
229
251
 
230
252
 
231
253
  @app.command()
@@ -238,29 +260,25 @@ def diff(
238
260
  """
239
261
 
240
262
  # TODO change to diff output, not the changelog entries
241
- result = DataContract(
242
- data_contract_file=location_old,
243
- inline_definitions=True
244
- ).changelog(
245
- DataContract(
246
- data_contract_file=location_new,
247
- inline_definitions=True
248
- ))
263
+ result = DataContract(data_contract_file=location_old, inline_definitions=True).changelog(
264
+ DataContract(data_contract_file=location_new, inline_definitions=True)
265
+ )
249
266
 
250
- print(result.changelog_str())
267
+ console.print(result.changelog_str())
251
268
 
252
269
 
253
270
  def _handle_result(run):
254
271
  _print_table(run)
255
272
  if run.result == "passed":
256
- print(
257
- f"🟢 data contract is valid. Run {len(run.checks)} checks. Took {(run.timestampEnd - run.timestampStart).total_seconds()} seconds.")
273
+ console.print(
274
+ f"🟢 data contract is valid. Run {len(run.checks)} checks. Took {(run.timestampEnd - run.timestampStart).total_seconds()} seconds."
275
+ )
258
276
  else:
259
- print("🔴 data contract is invalid, found the following errors:")
277
+ console.print("🔴 data contract is invalid, found the following errors:")
260
278
  i = 1
261
279
  for check in run.checks:
262
280
  if check.result != "passed":
263
- print(str(++i) + ") " + check.reason)
281
+ console.print(str(++i) + ") " + check.reason)
264
282
  raise typer.Exit(code=1)
265
283
 
266
284
 
@@ -272,7 +290,7 @@ def _print_table(run):
272
290
  table.add_column("Details", max_width=50)
273
291
  for check in run.checks:
274
292
  table.add_row(with_markup(check.result), check.name, to_field(run, check), check.reason)
275
- print(table)
293
+ console.print(table)
276
294
 
277
295
 
278
296
  def to_field(run, check):
@@ -286,9 +304,9 @@ def to_field(run, check):
286
304
 
287
305
 
288
306
  def _print_logs(run):
289
- print("\nLogs:")
307
+ console.print("\nLogs:")
290
308
  for log in run.logs:
291
- print(log.timestamp.strftime("%y-%m-%d %H:%M:%S"), log.level.ljust(5), log.message)
309
+ console.print(log.timestamp.strftime("%y-%m-%d %H:%M:%S"), log.level.ljust(5), log.message)
292
310
 
293
311
 
294
312
  def with_markup(result):