datacontract-cli 0.9.6.post2__py3-none-any.whl → 0.9.8__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datacontract-cli might be problematic. Click here for more details.

Files changed (60) hide show
  1. datacontract/breaking/breaking.py +139 -63
  2. datacontract/breaking/breaking_rules.py +71 -54
  3. datacontract/cli.py +138 -45
  4. datacontract/data_contract.py +316 -78
  5. datacontract/engines/datacontract/check_that_datacontract_contains_valid_servers_configuration.py +5 -1
  6. datacontract/engines/datacontract/check_that_datacontract_file_exists.py +9 -8
  7. datacontract/engines/datacontract/check_that_datacontract_str_is_valid.py +26 -22
  8. datacontract/engines/fastjsonschema/check_jsonschema.py +31 -25
  9. datacontract/engines/fastjsonschema/s3/s3_read_files.py +8 -6
  10. datacontract/engines/soda/check_soda_execute.py +46 -35
  11. datacontract/engines/soda/connections/bigquery.py +5 -3
  12. datacontract/engines/soda/connections/dask.py +0 -1
  13. datacontract/engines/soda/connections/databricks.py +2 -2
  14. datacontract/engines/soda/connections/duckdb.py +4 -4
  15. datacontract/engines/soda/connections/kafka.py +36 -17
  16. datacontract/engines/soda/connections/postgres.py +3 -3
  17. datacontract/engines/soda/connections/snowflake.py +4 -4
  18. datacontract/export/avro_converter.py +3 -7
  19. datacontract/export/avro_idl_converter.py +280 -0
  20. datacontract/export/dbt_converter.py +55 -80
  21. datacontract/export/great_expectations_converter.py +141 -0
  22. datacontract/export/jsonschema_converter.py +3 -1
  23. datacontract/export/odcs_converter.py +10 -12
  24. datacontract/export/protobuf_converter.py +99 -0
  25. datacontract/export/pydantic_converter.py +140 -0
  26. datacontract/export/rdf_converter.py +35 -12
  27. datacontract/export/sodacl_converter.py +24 -24
  28. datacontract/export/sql_converter.py +93 -0
  29. datacontract/export/sql_type_converter.py +131 -0
  30. datacontract/export/terraform_converter.py +71 -0
  31. datacontract/imports/avro_importer.py +106 -0
  32. datacontract/imports/sql_importer.py +0 -2
  33. datacontract/init/download_datacontract_file.py +2 -2
  34. datacontract/integration/publish_datamesh_manager.py +4 -9
  35. datacontract/integration/publish_opentelemetry.py +107 -0
  36. datacontract/lint/files.py +2 -2
  37. datacontract/lint/lint.py +46 -31
  38. datacontract/lint/linters/description_linter.py +34 -0
  39. datacontract/lint/linters/example_model_linter.py +67 -43
  40. datacontract/lint/linters/field_pattern_linter.py +34 -0
  41. datacontract/lint/linters/field_reference_linter.py +38 -0
  42. datacontract/lint/linters/notice_period_linter.py +55 -0
  43. datacontract/lint/linters/primary_field_linter.py +28 -0
  44. datacontract/lint/linters/quality_schema_linter.py +52 -0
  45. datacontract/lint/linters/valid_constraints_linter.py +99 -0
  46. datacontract/lint/resolve.py +53 -8
  47. datacontract/lint/schema.py +2 -3
  48. datacontract/lint/urls.py +4 -5
  49. datacontract/model/breaking_change.py +27 -5
  50. datacontract/model/data_contract_specification.py +45 -25
  51. datacontract/model/exceptions.py +13 -2
  52. datacontract/model/run.py +1 -1
  53. datacontract/web.py +5 -8
  54. {datacontract_cli-0.9.6.post2.dist-info → datacontract_cli-0.9.8.dist-info}/METADATA +207 -35
  55. datacontract_cli-0.9.8.dist-info/RECORD +63 -0
  56. {datacontract_cli-0.9.6.post2.dist-info → datacontract_cli-0.9.8.dist-info}/WHEEL +1 -1
  57. datacontract_cli-0.9.6.post2.dist-info/RECORD +0 -47
  58. {datacontract_cli-0.9.6.post2.dist-info → datacontract_cli-0.9.8.dist-info}/LICENSE +0 -0
  59. {datacontract_cli-0.9.6.post2.dist-info → datacontract_cli-0.9.8.dist-info}/entry_points.txt +0 -0
  60. {datacontract_cli-0.9.6.post2.dist-info → datacontract_cli-0.9.8.dist-info}/top_level.txt +0 -0
@@ -25,7 +25,7 @@ def validate_json_stream(model_name, validate, json_stream):
25
25
  model=model_name,
26
26
  reason=e.message,
27
27
  engine="jsonschema",
28
- original_exception=e
28
+ original_exception=e,
29
29
  )
30
30
 
31
31
 
@@ -79,16 +79,16 @@ def process_local_file(run, server, model_name, validate):
79
79
  return process_directory(run, path, server, model_name, validate)
80
80
  else:
81
81
  logging.info(f"Processing file {path}")
82
- with open(path, 'r') as file:
82
+ with open(path, "r") as file:
83
83
  process_json_file(run, model_name, validate, file, server.delimiter)
84
84
 
85
85
 
86
86
  def process_directory(run, path, server, model_name, validate):
87
87
  success = True
88
88
  for filename in os.listdir(path):
89
- if filename.endswith('.json'): # or make this a parameter
89
+ if filename.endswith(".json"): # or make this a parameter
90
90
  file_path = os.path.join(path, filename)
91
- with open(file_path, 'r') as file:
91
+ with open(file_path, "r") as file:
92
92
  if not process_json_file(run, model_name, validate, file, server.delimiter):
93
93
  success = False
94
94
  break
@@ -127,13 +127,15 @@ def check_jsonschema(run: Run, data_contract: DataContractSpecification, server:
127
127
 
128
128
  # Early exit conditions
129
129
  if server.format != "json":
130
- run.checks.append(Check(
131
- type="schema",
132
- name="Check that JSON has valid schema",
133
- result="warning",
134
- reason="Server format is not 'json'. Skip validating jsonschema.",
135
- engine="jsonschema",
136
- ))
130
+ run.checks.append(
131
+ Check(
132
+ type="schema",
133
+ name="Check that JSON has valid schema",
134
+ result="warning",
135
+ reason="Server format is not 'json'. Skip validating jsonschema.",
136
+ engine="jsonschema",
137
+ )
138
+ )
137
139
  run.log_warn("jsonschema: Server format is not 'json'. Skip jsonschema checks.")
138
140
  return
139
141
 
@@ -155,21 +157,25 @@ def check_jsonschema(run: Run, data_contract: DataContractSpecification, server:
155
157
  elif server.type == "s3":
156
158
  process_s3_file(server, model_name, validate)
157
159
  else:
158
- run.checks.append(Check(
160
+ run.checks.append(
161
+ Check(
162
+ type="schema",
163
+ name="Check that JSON has valid schema",
164
+ model=model_name,
165
+ result="warn",
166
+ reason=f"Server type {server.type} not supported",
167
+ engine="jsonschema",
168
+ )
169
+ )
170
+ return
171
+
172
+ run.checks.append(
173
+ Check(
159
174
  type="schema",
160
175
  name="Check that JSON has valid schema",
161
176
  model=model_name,
162
- result="warn",
163
- reason=f"Server type {server.type} not supported",
177
+ result="passed",
178
+ reason="All JSON entries are valid.",
164
179
  engine="jsonschema",
165
- ))
166
- return
167
-
168
- run.checks.append(Check(
169
- type="schema",
170
- name="Check that JSON has valid schema",
171
- model=model_name,
172
- result="passed",
173
- reason="All JSON entries are valid.",
174
- engine="jsonschema",
175
- ))
180
+ )
181
+ )
@@ -14,9 +14,11 @@ def yield_s3_files(s3_endpoint_url, s3_location):
14
14
 
15
15
 
16
16
  def s3_fs(s3_endpoint_url):
17
- aws_access_key_id = os.getenv('DATACONTRACT_S3_ACCESS_KEY_ID')
18
- aws_secret_access_key = os.getenv('DATACONTRACT_S3_SECRET_ACCESS_KEY')
19
- return s3fs.S3FileSystem(key=aws_access_key_id,
20
- secret=aws_secret_access_key,
21
- anon=aws_access_key_id is None,
22
- client_kwargs={'endpoint_url': s3_endpoint_url})
17
+ aws_access_key_id = os.getenv("DATACONTRACT_S3_ACCESS_KEY_ID")
18
+ aws_secret_access_key = os.getenv("DATACONTRACT_S3_SECRET_ACCESS_KEY")
19
+ return s3fs.S3FileSystem(
20
+ key=aws_access_key_id,
21
+ secret=aws_secret_access_key,
22
+ anon=aws_access_key_id is None,
23
+ client_kwargs={"endpoint_url": s3_endpoint_url},
24
+ )
@@ -16,8 +16,7 @@ from datacontract.engines.soda.connections.snowflake import \
16
16
  from datacontract.export.sodacl_converter import to_sodacl_yaml
17
17
  from datacontract.model.data_contract_specification import \
18
18
  DataContractSpecification, Server
19
- from datacontract.model.run import \
20
- Run, Check, Log
19
+ from datacontract.model.run import Run, Check, Log
21
20
 
22
21
 
23
22
  def check_soda_execute(run: Run, data_contract: DataContractSpecification, server: Server, spark, tmp_dir):
@@ -34,13 +33,15 @@ def check_soda_execute(run: Run, data_contract: DataContractSpecification, serve
34
33
  scan.add_duckdb_connection(duckdb_connection=con, data_source_name=server.type)
35
34
  scan.set_data_source_name(server.type)
36
35
  else:
37
- run.checks.append(Check(
38
- type="general",
39
- name="Check that format is supported",
40
- result="warning",
41
- reason=f"Format {server.format} not yet supported by datacontract CLI",
42
- engine="datacontract",
43
- ))
36
+ run.checks.append(
37
+ Check(
38
+ type="general",
39
+ name="Check that format is supported",
40
+ result="warning",
41
+ reason=f"Format {server.format} not yet supported by datacontract CLI",
42
+ engine="datacontract",
43
+ )
44
+ )
44
45
  run.log_warn(f"Format {server.format} not yet supported by datacontract CLI")
45
46
  return
46
47
  elif server.type == "snowflake":
@@ -73,21 +74,24 @@ def check_soda_execute(run: Run, data_contract: DataContractSpecification, serve
73
74
  scan.set_data_source_name(server.type)
74
75
 
75
76
  else:
76
- run.checks.append(Check(
77
- type="general",
78
- name="Check that server type is supported",
79
- result="warning",
80
- reason=f"Server type {server.type} not yet supported by datacontract CLI",
81
- engine="datacontract-cli",
82
- ))
77
+ run.checks.append(
78
+ Check(
79
+ type="general",
80
+ name="Check that server type is supported",
81
+ result="warning",
82
+ reason=f"Server type {server.type} not yet supported by datacontract CLI",
83
+ engine="datacontract-cli",
84
+ )
85
+ )
83
86
  run.log_warn(f"Server type {server.type} not yet supported by datacontract CLI")
84
87
  return
85
88
 
86
89
  # Don't check types for json format, as they are checked with json schema
87
90
  # Don't check types for avro format, as they are checked with avro schema
88
91
  # Don't check types for csv format, as they are hard to detect
92
+ server_type = server.type
89
93
  check_types = server.format != "json" and server.format != "csv" and server.format != "avro"
90
- sodacl_yaml_str = to_sodacl_yaml(data_contract, check_types)
94
+ sodacl_yaml_str = to_sodacl_yaml(data_contract, server_type, check_types)
91
95
  # print("sodacl_yaml_str:\n" + sodacl_yaml_str)
92
96
  scan.add_sodacl_yaml_str(sodacl_yaml_str)
93
97
 
@@ -102,9 +106,12 @@ def check_soda_execute(run: Run, data_contract: DataContractSpecification, serve
102
106
  for c in scan_results.get("checks"):
103
107
  check = Check(
104
108
  type="schema",
105
- result="passed" if c.get("outcome") == "pass" else "failed" if c.get("outcome") == "fail" else c.get(
106
- "outcome"),
107
- reason=', '.join(c.get("outcomeReasons")),
109
+ result="passed"
110
+ if c.get("outcome") == "pass"
111
+ else "failed"
112
+ if c.get("outcome") == "fail"
113
+ else c.get("outcome"),
114
+ reason=", ".join(c.get("outcomeReasons")),
108
115
  name=c.get("name"),
109
116
  model=c.get("table"),
110
117
  field=c.get("column"),
@@ -114,21 +121,25 @@ def check_soda_execute(run: Run, data_contract: DataContractSpecification, serve
114
121
  run.checks.append(check)
115
122
 
116
123
  for log in scan_results.get("logs"):
117
- run.logs.append(Log(
118
- timestamp=log.get("timestamp"),
119
- level=log.get("level"),
120
- message=log.get("message"),
121
- ))
124
+ run.logs.append(
125
+ Log(
126
+ timestamp=log.get("timestamp"),
127
+ level=log.get("level"),
128
+ message=log.get("message"),
129
+ )
130
+ )
122
131
 
123
132
  if scan.has_error_logs():
124
133
  run.log_warn("Engine soda-core has errors. See the logs for details.")
125
- run.checks.append(Check(
126
- type="general",
127
- name="Execute quality checks",
128
- result="warning",
129
- reason=f"Engine soda-core has errors. See the logs for details.",
130
- engine="soda-core",
131
- ))
134
+ run.checks.append(
135
+ Check(
136
+ type="general",
137
+ name="Execute quality checks",
138
+ result="warning",
139
+ reason="Engine soda-core has errors. See the logs for details.",
140
+ engine="soda-core",
141
+ )
142
+ )
132
143
  return
133
144
 
134
145
 
@@ -138,10 +149,10 @@ def update_reason(check, c):
138
149
  return
139
150
  if check.reason is not None and check.reason != "":
140
151
  return
141
- for block in c['diagnostics']['blocks']:
142
- if block['title'] == 'Diagnostics':
152
+ for block in c["diagnostics"]["blocks"]:
153
+ if block["title"] == "Diagnostics":
143
154
  # Extract and print the 'text' value
144
- diagnostics_text = block['text']
155
+ diagnostics_text = block["text"]
145
156
  print(diagnostics_text)
146
157
  diagnostics_text_split = diagnostics_text.split(":icon-fail: ")
147
158
  if len(diagnostics_text_split) > 1:
@@ -1,18 +1,20 @@
1
1
  import os
2
+
2
3
  import yaml
3
4
 
5
+
4
6
  # https://docs.soda.io/soda/connect-bigquery.html#authentication-methods
5
7
  def to_bigquery_soda_configuration(server):
6
8
  # with service account key, using an external json file
7
9
  soda_configuration = {
8
10
  f"data_source {server.type}": {
9
11
  "type": "bigquery",
10
- "account_info_json_path": os.getenv('DATACONTRACT_BIGQUERY_ACCOUNT_INFO_JSON_PATH'),
12
+ "account_info_json_path": os.getenv("DATACONTRACT_BIGQUERY_ACCOUNT_INFO_JSON_PATH"),
11
13
  "auth_scopes": ["https://www.googleapis.com/auth/bigquery"],
12
14
  "project_id": server.project,
13
- "dataset": server.dataset
15
+ "dataset": server.dataset,
14
16
  }
15
17
  }
16
18
 
17
19
  soda_configuration_str = yaml.dump(soda_configuration)
18
- return soda_configuration_str
20
+ return soda_configuration_str
@@ -1,4 +1,3 @@
1
-
2
1
  # def add_s3_connection_dask_json(data_contract, scan, server):
3
2
  # s3_access_key_id = os.getenv('DATACONTRACT_S3_ACCESS_KEY_ID')
4
3
  # s3_secret_access_key = os.getenv('DATACONTRACT_S3_SECRET_ACCESS_KEY')
@@ -11,8 +11,8 @@ def to_databricks_soda_configuration(server):
11
11
  "host": server.host,
12
12
  "catalog": server.catalog,
13
13
  "schema": server.schema_,
14
- "http_path": os.getenv('DATACONTRACT_DATABRICKS_HTTP_PATH'),
15
- "token": os.getenv('DATACONTRACT_DATABRICKS_TOKEN'),
14
+ "http_path": os.getenv("DATACONTRACT_DATABRICKS_HTTP_PATH"),
15
+ "token": os.getenv("DATACONTRACT_DATABRICKS_TOKEN"),
16
16
  }
17
17
  }
18
18
 
@@ -15,7 +15,7 @@ def get_duckdb_connection(data_contract, server):
15
15
  for model_name in data_contract.models:
16
16
  model_path = path
17
17
  if "{model}" in model_path:
18
- model_path = model_path.format(model = model_name)
18
+ model_path = model_path.format(model=model_name)
19
19
  logging.info(f"Creating table {model_name} for {model_path}")
20
20
 
21
21
  if server.format == "json":
@@ -39,9 +39,9 @@ def get_duckdb_connection(data_contract, server):
39
39
 
40
40
 
41
41
  def setup_s3_connection(con, server):
42
- s3_region = os.getenv('DATACONTRACT_S3_REGION')
43
- s3_access_key_id = os.getenv('DATACONTRACT_S3_ACCESS_KEY_ID')
44
- s3_secret_access_key = os.getenv('DATACONTRACT_S3_SECRET_ACCESS_KEY')
42
+ s3_region = os.getenv("DATACONTRACT_S3_REGION")
43
+ s3_access_key_id = os.getenv("DATACONTRACT_S3_ACCESS_KEY_ID")
44
+ s3_secret_access_key = os.getenv("DATACONTRACT_S3_SECRET_ACCESS_KEY")
45
45
  # con.install_extension("httpfs")
46
46
  # con.load_extension("httpfs")
47
47
  if server.endpointUrl is not None:
@@ -4,7 +4,23 @@ import pyspark.sql.functions as fn
4
4
  from pyspark.sql import SparkSession
5
5
  from pyspark.sql.avro.functions import from_avro
6
6
  from pyspark.sql.functions import from_json, col
7
- from pyspark.sql.types import *
7
+ from pyspark.sql.types import (
8
+ StructType,
9
+ DataType,
10
+ NullType,
11
+ ArrayType,
12
+ BinaryType,
13
+ DateType,
14
+ TimestampNTZType,
15
+ TimestampType,
16
+ BooleanType,
17
+ LongType,
18
+ IntegerType,
19
+ DoubleType,
20
+ DecimalType,
21
+ StringType,
22
+ StructField,
23
+ )
8
24
 
9
25
  from datacontract.export.avro_converter import to_avro_schema_json
10
26
  from datacontract.model.data_contract_specification import \
@@ -15,14 +31,18 @@ from datacontract.model.exceptions import DataContractException
15
31
  def create_spark_session(tmp_dir) -> SparkSession:
16
32
  # TODO: Update dependency versions when updating pyspark
17
33
  # TODO: add protobuf library
18
- spark = SparkSession.builder.appName("datacontract") \
19
- .config("spark.sql.warehouse.dir", tmp_dir + "/spark-warehouse") \
20
- .config("spark.streaming.stopGracefullyOnShutdown", True) \
21
- .config('spark.jars.packages',
22
- 'org.apache.spark:spark-sql-kafka-0-10_2.12:3.5.0,org.apache.spark:spark-avro_2.12:3.5.0') \
34
+ spark = (
35
+ SparkSession.builder.appName("datacontract")
36
+ .config("spark.sql.warehouse.dir", tmp_dir + "/spark-warehouse")
37
+ .config("spark.streaming.stopGracefullyOnShutdown", True)
38
+ .config(
39
+ "spark.jars.packages",
40
+ "org.apache.spark:spark-sql-kafka-0-10_2.12:3.5.0,org.apache.spark:spark-avro_2.12:3.5.0",
41
+ )
23
42
  .getOrCreate()
43
+ )
24
44
  spark.sparkContext.setLogLevel("WARN")
25
- print(f'Using PySpark version {spark.version}')
45
+ print(f"Using PySpark version {spark.version}")
26
46
  return spark
27
47
 
28
48
 
@@ -32,14 +52,14 @@ def read_kafka_topic(spark: SparkSession, data_contract: DataContractSpecificati
32
52
  auth_options = get_auth_options()
33
53
 
34
54
  # read full kafka topic
35
- df = spark \
36
- .read \
37
- .format("kafka") \
38
- .options(**auth_options) \
39
- .option("kafka.bootstrap.servers", host) \
40
- .option("subscribe", topic) \
41
- .option("startingOffsets", "earliest") \
55
+ df = (
56
+ spark.read.format("kafka")
57
+ .options(**auth_options)
58
+ .option("kafka.bootstrap.servers", host)
59
+ .option("subscribe", topic)
60
+ .option("startingOffsets", "earliest")
42
61
  .load()
62
+ )
43
63
  # TODO a warning if none or multiple models
44
64
  model_name, model = next(iter(data_contract.models.items()))
45
65
  if server.format == "avro":
@@ -73,8 +93,8 @@ def read_kafka_topic(spark: SparkSession, data_contract: DataContractSpecificati
73
93
 
74
94
 
75
95
  def get_auth_options():
76
- kafka_sasl_username = os.getenv('DATACONTRACT_KAFKA_SASL_USERNAME')
77
- kafka_sasl_password = os.getenv('DATACONTRACT_KAFKA_SASL_PASSWORD')
96
+ kafka_sasl_username = os.getenv("DATACONTRACT_KAFKA_SASL_USERNAME")
97
+ kafka_sasl_password = os.getenv("DATACONTRACT_KAFKA_SASL_PASSWORD")
78
98
  if kafka_sasl_username is None:
79
99
  auth_options = {}
80
100
  else:
@@ -130,4 +150,3 @@ def to_struct_field(field_name: str, field: Field) -> StructField:
130
150
  data_type = DataType()
131
151
 
132
152
  return StructField(field_name, data_type, nullable=not field.required)
133
-
@@ -10,12 +10,12 @@ def to_postgres_soda_configuration(server):
10
10
  "type": "postgres",
11
11
  "host": server.host,
12
12
  "port": str(server.port),
13
- "username": os.getenv('DATACONTRACT_POSTGRES_USERNAME'),
14
- "password": os.getenv('DATACONTRACT_POSTGRES_PASSWORD'),
13
+ "username": os.getenv("DATACONTRACT_POSTGRES_USERNAME"),
14
+ "password": os.getenv("DATACONTRACT_POSTGRES_PASSWORD"),
15
15
  "database": server.database,
16
16
  "schema": server.schema_,
17
17
  }
18
18
  }
19
19
 
20
20
  soda_configuration_str = yaml.dump(soda_configuration)
21
- return soda_configuration_str
21
+ return soda_configuration_str
@@ -7,13 +7,13 @@ def to_snowflake_soda_configuration(server):
7
7
  soda_configuration = {
8
8
  f"data_source {server.type}": {
9
9
  "type": "snowflake",
10
- "username": os.getenv('DATACONTRACT_SNOWFLAKE_USERNAME'),
11
- "password": os.getenv('DATACONTRACT_SNOWFLAKE_PASSWORD'),
12
- "role": os.getenv('DATACONTRACT_SNOWFLAKE_ROLE'),
10
+ "username": os.getenv("DATACONTRACT_SNOWFLAKE_USERNAME"),
11
+ "password": os.getenv("DATACONTRACT_SNOWFLAKE_PASSWORD"),
12
+ "role": os.getenv("DATACONTRACT_SNOWFLAKE_ROLE"),
13
13
  "account": server.account,
14
14
  "database": server.database,
15
15
  "schema": server.schema_,
16
- "warehouse": os.getenv('DATACONTRACT_SNOWFLAKE_WAREHOUSE'),
16
+ "warehouse": os.getenv("DATACONTRACT_SNOWFLAKE_WAREHOUSE"),
17
17
  "connection_timeout": 5, # minutes
18
18
  }
19
19
  }
@@ -6,16 +6,14 @@ from datacontract.model.data_contract_specification import Field
6
6
  def to_avro_schema(model_name, model) -> dict:
7
7
  return to_avro_record(model_name, model.fields, model.description)
8
8
 
9
+
9
10
  def to_avro_schema_json(model_name, model) -> str:
10
11
  schema = to_avro_schema(model_name, model)
11
12
  return json.dumps(schema, indent=2, sort_keys=False)
12
13
 
13
14
 
14
15
  def to_avro_record(name, fields, description) -> dict:
15
- schema = {
16
- "type": "record",
17
- "name": name
18
- }
16
+ schema = {"type": "record", "name": name}
19
17
  if description is not None:
20
18
  schema["doc"] = description
21
19
  schema["fields"] = to_avro_fields(fields)
@@ -30,9 +28,7 @@ def to_avro_fields(fields):
30
28
 
31
29
 
32
30
  def to_avro_field(field, field_name):
33
- avro_field = {
34
- "name": field_name
35
- }
31
+ avro_field = {"name": field_name}
36
32
  if field.description is not None:
37
33
  avro_field["doc"] = field.description
38
34
  avro_field["type"] = to_avro_type(field, field_name)