datacontract-cli 0.9.7__py3-none-any.whl → 0.9.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datacontract-cli might be problematic. Click here for more details.

Files changed (62) hide show
  1. datacontract/breaking/breaking.py +48 -57
  2. datacontract/cli.py +100 -80
  3. datacontract/data_contract.py +178 -128
  4. datacontract/engines/datacontract/check_that_datacontract_contains_valid_servers_configuration.py +5 -1
  5. datacontract/engines/datacontract/check_that_datacontract_file_exists.py +9 -8
  6. datacontract/engines/datacontract/check_that_datacontract_str_is_valid.py +26 -22
  7. datacontract/engines/fastjsonschema/check_jsonschema.py +31 -25
  8. datacontract/engines/fastjsonschema/s3/s3_read_files.py +8 -6
  9. datacontract/engines/soda/check_soda_execute.py +58 -36
  10. datacontract/engines/soda/connections/bigquery.py +5 -3
  11. datacontract/engines/soda/connections/dask.py +0 -1
  12. datacontract/engines/soda/connections/databricks.py +2 -2
  13. datacontract/engines/soda/connections/duckdb.py +25 -8
  14. datacontract/engines/soda/connections/kafka.py +36 -17
  15. datacontract/engines/soda/connections/postgres.py +3 -3
  16. datacontract/engines/soda/connections/snowflake.py +4 -4
  17. datacontract/export/avro_converter.py +9 -11
  18. datacontract/export/avro_idl_converter.py +65 -42
  19. datacontract/export/csv_type_converter.py +36 -0
  20. datacontract/export/dbt_converter.py +43 -32
  21. datacontract/export/great_expectations_converter.py +141 -0
  22. datacontract/export/html_export.py +46 -0
  23. datacontract/export/jsonschema_converter.py +3 -1
  24. datacontract/export/odcs_converter.py +5 -7
  25. datacontract/export/protobuf_converter.py +12 -10
  26. datacontract/export/pydantic_converter.py +131 -0
  27. datacontract/export/rdf_converter.py +34 -11
  28. datacontract/export/sodacl_converter.py +118 -21
  29. datacontract/export/sql_converter.py +30 -8
  30. datacontract/export/sql_type_converter.py +44 -4
  31. datacontract/export/terraform_converter.py +4 -3
  32. datacontract/imports/avro_importer.py +65 -18
  33. datacontract/imports/sql_importer.py +0 -2
  34. datacontract/init/download_datacontract_file.py +2 -2
  35. datacontract/integration/publish_datamesh_manager.py +6 -12
  36. datacontract/integration/publish_opentelemetry.py +30 -16
  37. datacontract/lint/files.py +2 -2
  38. datacontract/lint/lint.py +26 -31
  39. datacontract/lint/linters/description_linter.py +12 -21
  40. datacontract/lint/linters/example_model_linter.py +28 -29
  41. datacontract/lint/linters/field_pattern_linter.py +8 -8
  42. datacontract/lint/linters/field_reference_linter.py +11 -10
  43. datacontract/lint/linters/notice_period_linter.py +18 -22
  44. datacontract/lint/linters/quality_schema_linter.py +16 -20
  45. datacontract/lint/linters/valid_constraints_linter.py +42 -37
  46. datacontract/lint/resolve.py +50 -14
  47. datacontract/lint/schema.py +2 -3
  48. datacontract/lint/urls.py +4 -5
  49. datacontract/model/breaking_change.py +2 -1
  50. datacontract/model/data_contract_specification.py +8 -7
  51. datacontract/model/exceptions.py +13 -2
  52. datacontract/model/run.py +3 -2
  53. datacontract/web.py +3 -7
  54. datacontract_cli-0.9.9.dist-info/METADATA +951 -0
  55. datacontract_cli-0.9.9.dist-info/RECORD +64 -0
  56. datacontract/lint/linters/primary_field_linter.py +0 -30
  57. datacontract_cli-0.9.7.dist-info/METADATA +0 -603
  58. datacontract_cli-0.9.7.dist-info/RECORD +0 -61
  59. {datacontract_cli-0.9.7.dist-info → datacontract_cli-0.9.9.dist-info}/LICENSE +0 -0
  60. {datacontract_cli-0.9.7.dist-info → datacontract_cli-0.9.9.dist-info}/WHEEL +0 -0
  61. {datacontract_cli-0.9.7.dist-info → datacontract_cli-0.9.9.dist-info}/entry_points.txt +0 -0
  62. {datacontract_cli-0.9.7.dist-info → datacontract_cli-0.9.9.dist-info}/top_level.txt +0 -0
@@ -1,5 +1,5 @@
1
1
  from datacontract.breaking.breaking_rules import BreakingRules
2
- from datacontract.model.breaking_change import BreakingChanges, BreakingChange, Location, Severity
2
+ from datacontract.model.breaking_change import BreakingChange, Location, Severity
3
3
  from datacontract.model.data_contract_specification import Field, Model, Quality
4
4
 
5
5
 
@@ -22,10 +22,9 @@ def quality_breaking_changes(
22
22
  description=description,
23
23
  check_name=rule_name,
24
24
  severity=severity,
25
- location=Location(
26
- path=new_path,
27
- composition=["quality"]
28
- )))
25
+ location=Location(path=new_path, composition=["quality"]),
26
+ )
27
+ )
29
28
  elif old_quality and not new_quality:
30
29
  rule_name = "quality_removed"
31
30
  severity = _get_rule(rule_name)
@@ -37,10 +36,9 @@ def quality_breaking_changes(
37
36
  description=description,
38
37
  check_name=rule_name,
39
38
  severity=severity,
40
- location=Location(
41
- path=new_path,
42
- composition=["quality"]
43
- )))
39
+ location=Location(path=new_path, composition=["quality"]),
40
+ )
41
+ )
44
42
 
45
43
  elif old_quality and new_quality:
46
44
  if old_quality.type != new_quality.type:
@@ -54,10 +52,9 @@ def quality_breaking_changes(
54
52
  description=description,
55
53
  check_name=rule_name,
56
54
  severity=severity,
57
- location=Location(
58
- path=new_path,
59
- composition=["quality", "type"]
60
- )))
55
+ location=Location(path=new_path, composition=["quality", "type"]),
56
+ )
57
+ )
61
58
 
62
59
  if old_quality.specification != new_quality.specification:
63
60
  rule_name = "quality_specification_updated"
@@ -69,10 +66,9 @@ def quality_breaking_changes(
69
66
  description=description,
70
67
  check_name=rule_name,
71
68
  severity=severity,
72
- location=Location(
73
- path=new_path,
74
- composition=["quality", "specification"]
75
- )))
69
+ location=Location(path=new_path, composition=["quality", "specification"]),
70
+ )
71
+ )
76
72
 
77
73
  return results
78
74
 
@@ -96,10 +92,9 @@ def models_breaking_changes(
96
92
  description="added the model",
97
93
  check_name=rule_name,
98
94
  severity=severity,
99
- location=Location(
100
- path=new_path,
101
- composition=composition + [model_name]
102
- )))
95
+ location=Location(path=new_path, composition=composition + [model_name]),
96
+ )
97
+ )
103
98
 
104
99
  for model_name, old_model in old_models.items():
105
100
  if model_name not in new_models.keys():
@@ -111,10 +106,9 @@ def models_breaking_changes(
111
106
  description="removed the model",
112
107
  check_name=rule_name,
113
108
  severity=severity,
114
- location=Location(
115
- path=new_path,
116
- composition=composition + [model_name]
117
- )))
109
+ location=Location(path=new_path, composition=composition + [model_name]),
110
+ )
111
+ )
118
112
  continue
119
113
 
120
114
  results.extend(
@@ -124,17 +118,14 @@ def models_breaking_changes(
124
118
  new_path=new_path,
125
119
  composition=composition + [model_name],
126
120
  include_severities=include_severities,
127
- ))
121
+ )
122
+ )
128
123
 
129
124
  return results
130
125
 
131
126
 
132
127
  def model_breaking_changes(
133
- old_model: Model,
134
- new_model: Model,
135
- new_path: str,
136
- composition: list[str],
137
- include_severities: [Severity]
128
+ old_model: Model, new_model: Model, new_path: str, composition: list[str], include_severities: [Severity]
138
129
  ) -> list[BreakingChange]:
139
130
  results = list[BreakingChange]()
140
131
 
@@ -170,10 +161,9 @@ def model_breaking_changes(
170
161
  description=description,
171
162
  check_name=rule_name,
172
163
  severity=severity,
173
- location=Location(
174
- path=new_path,
175
- composition=composition + [model_definition_field]
176
- )))
164
+ location=Location(path=new_path, composition=composition + [model_definition_field]),
165
+ )
166
+ )
177
167
 
178
168
  results.extend(
179
169
  fields_breaking_changes(
@@ -182,7 +172,8 @@ def model_breaking_changes(
182
172
  new_path=new_path,
183
173
  composition=composition + ["fields"],
184
174
  include_severities=include_severities,
185
- ))
175
+ )
176
+ )
186
177
 
187
178
  return results
188
179
 
@@ -192,7 +183,7 @@ def fields_breaking_changes(
192
183
  new_fields: dict[str, Field],
193
184
  new_path: str,
194
185
  composition: list[str],
195
- include_severities: [Severity]
186
+ include_severities: [Severity],
196
187
  ) -> list[BreakingChange]:
197
188
  results = list[BreakingChange]()
198
189
 
@@ -206,10 +197,9 @@ def fields_breaking_changes(
206
197
  description="added the field",
207
198
  check_name=rule_name,
208
199
  severity=severity,
209
- location=Location(
210
- path=new_path,
211
- composition=composition + [field_name]
212
- )))
200
+ location=Location(path=new_path, composition=composition + [field_name]),
201
+ )
202
+ )
213
203
 
214
204
  for field_name, old_field in old_fields.items():
215
205
  if field_name not in new_fields.keys():
@@ -218,13 +208,12 @@ def fields_breaking_changes(
218
208
  if severity in include_severities:
219
209
  results.append(
220
210
  BreakingChange(
221
- description='removed the field',
211
+ description="removed the field",
222
212
  check_name=rule_name,
223
213
  severity=severity,
224
- location=Location(
225
- path=new_path,
226
- composition=composition + [field_name]
227
- )))
214
+ location=Location(path=new_path, composition=composition + [field_name]),
215
+ )
216
+ )
228
217
  continue
229
218
 
230
219
  results.extend(
@@ -234,7 +223,8 @@ def fields_breaking_changes(
234
223
  composition=composition + [field_name],
235
224
  new_path=new_path,
236
225
  include_severities=include_severities,
237
- ))
226
+ )
227
+ )
238
228
  return results
239
229
 
240
230
 
@@ -271,7 +261,7 @@ def field_breaking_changes(
271
261
  description = None
272
262
 
273
263
  # logic for enum, tags and other arrays
274
- if type(old_value) is list and type(new_value) is list:
264
+ if isinstance(old_value, list) and isinstance(new_value, list):
275
265
  if not old_value and new_value:
276
266
  rule_name = f"field_{_camel_to_snake(field_definition_field)}_added"
277
267
  description = f"added with value: `{new_value}`"
@@ -285,7 +275,7 @@ def field_breaking_changes(
285
275
  # logic for normal fields
286
276
  elif old_value is None and new_value is not None:
287
277
  rule_name = f"field_{_camel_to_snake(field_definition_field)}_added"
288
- description = f"added with value: `{str(new_value).lower() if type(new_value) is bool else new_value}`"
278
+ description = f"added with value: `{str(new_value).lower() if isinstance(new_value, bool) else new_value}`"
289
279
 
290
280
  elif old_value is not None and new_value is None:
291
281
  rule_name = f"field_{_camel_to_snake(field_definition_field)}_removed"
@@ -293,8 +283,10 @@ def field_breaking_changes(
293
283
 
294
284
  elif old_value != new_value:
295
285
  rule_name = f"field_{_camel_to_snake(field_definition_field)}_updated"
296
- description = (f"changed from `{str(old_value).lower() if type(old_value) is bool else old_value}` "
297
- f"to `{str(new_value).lower() if type(new_value) is bool else new_value}`")
286
+ description = (
287
+ f"changed from `{str(old_value).lower() if isinstance(old_value, bool) else old_value}` "
288
+ f"to `{str(new_value).lower() if isinstance(new_value, bool) else new_value}`"
289
+ )
298
290
 
299
291
  if rule_name is not None:
300
292
  severity = _get_rule(rule_name)
@@ -305,10 +297,9 @@ def field_breaking_changes(
305
297
  description=description,
306
298
  check_name=rule_name,
307
299
  severity=severity,
308
- location=Location(
309
- path=new_path,
310
- composition=composition + [field_schema_name]
311
- )))
300
+ location=Location(path=new_path, composition=composition + [field_schema_name]),
301
+ )
302
+ )
312
303
 
313
304
  return results
314
305
 
@@ -317,9 +308,9 @@ def _get_rule(rule_name) -> Severity:
317
308
  try:
318
309
  return getattr(BreakingRules, rule_name)
319
310
  except AttributeError:
320
- print(f'WARNING: Breaking Rule not found for {rule_name}!')
311
+ print(f"WARNING: Breaking Rule not found for {rule_name}!")
321
312
  return Severity.ERROR
322
313
 
323
314
 
324
315
  def _camel_to_snake(s):
325
- return ''.join(['_' + c.lower() if c.isupper() else c for c in s]).lstrip('_')
316
+ return "".join(["_" + c.lower() if c.isupper() else c for c in s]).lstrip("_")
datacontract/cli.py CHANGED
@@ -5,7 +5,7 @@ from typing import Iterable, Optional
5
5
  import typer
6
6
  from click import Context
7
7
  from rich import box
8
- from rich import print
8
+ from rich.console import Console
9
9
  from rich.table import Table
10
10
  from typer.core import TyperGroup
11
11
  from typing_extensions import Annotated
@@ -14,6 +14,8 @@ from datacontract.data_contract import DataContract
14
14
  from datacontract.init.download_datacontract_file import \
15
15
  download_datacontract_file, FileExistsException
16
16
 
17
+ console = Console()
18
+
17
19
 
18
20
  class OrderedCommands(TyperGroup):
19
21
  def list_commands(self, ctx: Context) -> Iterable[str]:
@@ -29,15 +31,16 @@ app = typer.Typer(
29
31
 
30
32
  def version_callback(value: bool):
31
33
  if value:
32
- print(metadata.version("datacontract-cli"))
34
+ console.print(metadata.version("datacontract-cli"))
33
35
  raise typer.Exit()
34
36
 
35
37
 
36
38
  @app.callback()
37
39
  def common(
38
40
  ctx: typer.Context,
39
- version: bool = typer.Option(None, "--version", help="Prints the current version.", callback=version_callback,
40
- is_eager=True),
41
+ version: bool = typer.Option(
42
+ None, "--version", help="Prints the current version.", callback=version_callback, is_eager=True
43
+ ),
41
44
  ):
42
45
  """
43
46
  The datacontract CLI is an open source command-line tool for working with Data Contracts (https://datacontract.com).
@@ -51,10 +54,12 @@ def common(
51
54
 
52
55
  @app.command()
53
56
  def init(
54
- location: Annotated[str, typer.Argument(
55
- help="The location (url or path) of the data contract yaml to create.")] = "datacontract.yaml",
56
- template: Annotated[str, typer.Option(
57
- help="URL of a template or data contract")] = "https://datacontract.com/datacontract.init.yaml",
57
+ location: Annotated[
58
+ str, typer.Argument(help="The location (url or path) of the data contract yaml to create.")
59
+ ] = "datacontract.yaml",
60
+ template: Annotated[
61
+ str, typer.Option(help="URL of a template or data contract")
62
+ ] = "https://datacontract.com/datacontract.init.yaml",
58
63
  overwrite: Annotated[bool, typer.Option(help="Replace the existing datacontract.yaml")] = False,
59
64
  ):
60
65
  """
@@ -63,19 +68,20 @@ def init(
63
68
  try:
64
69
  download_datacontract_file(location, template, overwrite)
65
70
  except FileExistsException:
66
- print("File already exists, use --overwrite to overwrite")
71
+ console.print("File already exists, use --overwrite to overwrite")
67
72
  raise typer.Exit(code=1)
68
73
  else:
69
- print("📄 data contract written to " + location)
74
+ console.print("📄 data contract written to " + location)
70
75
 
71
76
 
72
77
  @app.command()
73
78
  def lint(
74
79
  location: Annotated[
75
- str, typer.Argument(help="The location (url or path) of the data contract yaml.")] = "datacontract.yaml",
80
+ str, typer.Argument(help="The location (url or path) of the data contract yaml.")
81
+ ] = "datacontract.yaml",
76
82
  schema: Annotated[
77
- str, typer.Option(
78
- help="The location (url or path) of the Data Contract Specification JSON Schema")] = "https://datacontract.com/datacontract.schema.json",
83
+ str, typer.Option(help="The location (url or path) of the Data Contract Specification JSON Schema")
84
+ ] = "https://datacontract.com/datacontract.schema.json",
79
85
  ):
80
86
  """
81
87
  Validate that the datacontract.yaml is correctly formatted.
@@ -87,37 +93,46 @@ def lint(
87
93
  @app.command()
88
94
  def test(
89
95
  location: Annotated[
90
- str, typer.Argument(help="The location (url or path) of the data contract yaml.")] = "datacontract.yaml",
96
+ str, typer.Argument(help="The location (url or path) of the data contract yaml.")
97
+ ] = "datacontract.yaml",
91
98
  schema: Annotated[
92
- str, typer.Option(
93
- help="The location (url or path) of the Data Contract Specification JSON Schema")] = "https://datacontract.com/datacontract.schema.json",
94
- server: Annotated[str, typer.Option(
95
- help="The server configuration to run the schema and quality tests. "
96
- "Use the key of the server object in the data contract yaml file "
97
- "to refer to a server, e.g., `production`, or `all` for all "
98
- "servers (default).")] = "all",
99
- examples: Annotated[bool, typer.Option(
100
- help="Run the schema and quality tests on the example data within the data contract.")] = None,
101
- publish: Annotated[str, typer.Option(
102
- help="The url to publish the results after the test")] = None,
103
- publish_to_opentelemetry: Annotated[bool, typer.Option(
104
- help="Publish the results to opentelemetry. Use environment variables to configure the OTLP endpoint, headers, etc.")] = False,
105
- logs: Annotated[bool, typer.Option(
106
- help="Print logs")] = False,
99
+ str, typer.Option(help="The location (url or path) of the Data Contract Specification JSON Schema")
100
+ ] = "https://datacontract.com/datacontract.schema.json",
101
+ server: Annotated[
102
+ str,
103
+ typer.Option(
104
+ help="The server configuration to run the schema and quality tests. "
105
+ "Use the key of the server object in the data contract yaml file "
106
+ "to refer to a server, e.g., `production`, or `all` for all "
107
+ "servers (default)."
108
+ ),
109
+ ] = "all",
110
+ examples: Annotated[
111
+ bool, typer.Option(help="Run the schema and quality tests on the example data within the data contract.")
112
+ ] = None,
113
+ publish: Annotated[str, typer.Option(help="The url to publish the results after the test")] = None,
114
+ publish_to_opentelemetry: Annotated[
115
+ bool,
116
+ typer.Option(
117
+ help="Publish the results to opentelemetry. Use environment variables to configure the OTLP endpoint, headers, etc."
118
+ ),
119
+ ] = False,
120
+ logs: Annotated[bool, typer.Option(help="Print logs")] = False,
107
121
  ):
108
122
  """
109
123
  Run schema and quality tests on configured servers.
110
124
  """
111
- print(f"Testing {location}")
125
+ console.print(f"Testing {location}")
112
126
  if server == "all":
113
127
  server = None
114
- run = DataContract(data_contract_file=location,
115
- schema_location=schema,
116
- publish_url=publish,
117
- publish_to_opentelemetry=publish_to_opentelemetry,
118
- server=server,
119
- examples=examples,
120
- ).test()
128
+ run = DataContract(
129
+ data_contract_file=location,
130
+ schema_location=schema,
131
+ publish_url=publish,
132
+ publish_to_opentelemetry=publish_to_opentelemetry,
133
+ server=server,
134
+ examples=examples,
135
+ ).test()
121
136
  if logs:
122
137
  _print_logs(run)
123
138
  _handle_result(run)
@@ -125,6 +140,7 @@ def test(
125
140
 
126
141
  class ExportFormat(str, Enum):
127
142
  jsonschema = "jsonschema"
143
+ pydantic_model = "pydantic-model"
128
144
  sodacl = "sodacl"
129
145
  dbt = "dbt"
130
146
  dbt_sources = "dbt-sources"
@@ -133,26 +149,43 @@ class ExportFormat(str, Enum):
133
149
  rdf = "rdf"
134
150
  avro = "avro"
135
151
  protobuf = "protobuf"
152
+ great_expectations = "great-expectations"
136
153
  terraform = "terraform"
137
154
  avro_idl = "avro-idl"
138
155
  sql = "sql"
139
156
  sql_query = "sql-query"
157
+ html = "html"
140
158
 
141
159
 
142
160
  @app.command()
143
161
  def export(
144
162
  format: Annotated[ExportFormat, typer.Option(help="The export format.")],
145
163
  server: Annotated[str, typer.Option(help="The server name to export.")] = None,
146
- model: Annotated[str, typer.Option(help="Use the key of the model in the data contract yaml file "
147
- "to refer to a model, e.g., `orders`, or `all` for all "
148
- "models (default).")] = "all",
149
- rdf_base: Annotated[Optional[str], typer.Option(help="[rdf] The base URI used to generate the RDF graph.", rich_help_panel="RDF Options")] = None,
150
- sql_server_type: Annotated[Optional[str], typer.Option(help="[sql] The server type to determine the sql dialect. By default, it uses 'auto' to automatically detect the sql dialect via the specified servers in the data contract.", rich_help_panel="SQL Options")] = "auto",
164
+ model: Annotated[
165
+ str,
166
+ typer.Option(
167
+ help="Use the key of the model in the data contract yaml file "
168
+ "to refer to a model, e.g., `orders`, or `all` for all "
169
+ "models (default)."
170
+ ),
171
+ ] = "all",
172
+ rdf_base: Annotated[
173
+ Optional[str],
174
+ typer.Option(help="[rdf] The base URI used to generate the RDF graph.", rich_help_panel="RDF Options"),
175
+ ] = None,
176
+ sql_server_type: Annotated[
177
+ Optional[str],
178
+ typer.Option(
179
+ help="[sql] The server type to determine the sql dialect. By default, it uses 'auto' to automatically detect the sql dialect via the specified servers in the data contract.",
180
+ rich_help_panel="SQL Options",
181
+ ),
182
+ ] = "auto",
151
183
  location: Annotated[
152
- str, typer.Argument(help="The location (url or path) of the data contract yaml.")] = "datacontract.yaml",
184
+ str, typer.Argument(help="The location (url or path) of the data contract yaml.")
185
+ ] = "datacontract.yaml",
153
186
  ):
154
187
  """
155
- Convert data contract to a specific format. Prints to stdout.
188
+ Convert data contract to a specific format. console.prints to stdout.
156
189
  """
157
190
  # TODO exception handling
158
191
  result = DataContract(data_contract_file=location, server=server).export(
@@ -161,7 +194,8 @@ def export(
161
194
  rdf_base=rdf_base,
162
195
  sql_server_type=sql_server_type,
163
196
  )
164
- print(result)
197
+ # Don't interpret console markup in output.
198
+ console.print(result, markup=False)
165
199
 
166
200
 
167
201
  class ImportFormat(str, Enum):
@@ -178,7 +212,7 @@ def import_(
178
212
  Create a data contract from the given source file. Prints to stdout.
179
213
  """
180
214
  result = DataContract().import_from_source(format, source)
181
- print(result.to_yaml())
215
+ console.print(result.to_yaml())
182
216
 
183
217
 
184
218
  @app.command()
@@ -191,16 +225,11 @@ def breaking(
191
225
  """
192
226
 
193
227
  # TODO exception handling
194
- result = DataContract(
195
- data_contract_file=location_old,
196
- inline_definitions=True
197
- ).breaking(
198
- DataContract(
199
- data_contract_file=location_new,
200
- inline_definitions=True
201
- ))
228
+ result = DataContract(data_contract_file=location_old, inline_definitions=True).breaking(
229
+ DataContract(data_contract_file=location_new, inline_definitions=True)
230
+ )
202
231
 
203
- print(result.breaking_str())
232
+ console.print(result.breaking_str())
204
233
 
205
234
  if not result.passed_checks():
206
235
  raise typer.Exit(code=1)
@@ -216,16 +245,11 @@ def changelog(
216
245
  """
217
246
 
218
247
  # TODO exception handling
219
- result = DataContract(
220
- data_contract_file=location_old,
221
- inline_definitions=True
222
- ).changelog(
223
- DataContract(
224
- data_contract_file=location_new,
225
- inline_definitions=True
226
- ))
248
+ result = DataContract(data_contract_file=location_old, inline_definitions=True).changelog(
249
+ DataContract(data_contract_file=location_new, inline_definitions=True)
250
+ )
227
251
 
228
- print(result.changelog_str())
252
+ console.print(result.changelog_str())
229
253
 
230
254
 
231
255
  @app.command()
@@ -238,29 +262,25 @@ def diff(
238
262
  """
239
263
 
240
264
  # TODO change to diff output, not the changelog entries
241
- result = DataContract(
242
- data_contract_file=location_old,
243
- inline_definitions=True
244
- ).changelog(
245
- DataContract(
246
- data_contract_file=location_new,
247
- inline_definitions=True
248
- ))
265
+ result = DataContract(data_contract_file=location_old, inline_definitions=True).changelog(
266
+ DataContract(data_contract_file=location_new, inline_definitions=True)
267
+ )
249
268
 
250
- print(result.changelog_str())
269
+ console.print(result.changelog_str())
251
270
 
252
271
 
253
272
  def _handle_result(run):
254
273
  _print_table(run)
255
274
  if run.result == "passed":
256
- print(
257
- f"🟢 data contract is valid. Run {len(run.checks)} checks. Took {(run.timestampEnd - run.timestampStart).total_seconds()} seconds.")
275
+ console.print(
276
+ f"🟢 data contract is valid. Run {len(run.checks)} checks. Took {(run.timestampEnd - run.timestampStart).total_seconds()} seconds."
277
+ )
258
278
  else:
259
- print("🔴 data contract is invalid, found the following errors:")
279
+ console.print("🔴 data contract is invalid, found the following errors:")
260
280
  i = 1
261
281
  for check in run.checks:
262
282
  if check.result != "passed":
263
- print(str(++i) + ") " + check.reason)
283
+ console.print(str(++i) + ") " + check.reason)
264
284
  raise typer.Exit(code=1)
265
285
 
266
286
 
@@ -272,7 +292,7 @@ def _print_table(run):
272
292
  table.add_column("Details", max_width=50)
273
293
  for check in run.checks:
274
294
  table.add_row(with_markup(check.result), check.name, to_field(run, check), check.reason)
275
- print(table)
295
+ console.print(table)
276
296
 
277
297
 
278
298
  def to_field(run, check):
@@ -286,9 +306,9 @@ def to_field(run, check):
286
306
 
287
307
 
288
308
  def _print_logs(run):
289
- print("\nLogs:")
309
+ console.print("\nLogs:")
290
310
  for log in run.logs:
291
- print(log.timestamp.strftime("%y-%m-%d %H:%M:%S"), log.level.ljust(5), log.message)
311
+ console.print(log.timestamp.strftime("%y-%m-%d %H:%M:%S"), log.level.ljust(5), log.message)
292
312
 
293
313
 
294
314
  def with_markup(result):