datacontract-cli 0.10.31__py3-none-any.whl → 0.10.33__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datacontract-cli might be problematic. Click here for more details.

@@ -45,6 +45,7 @@ class ExportFormat(str, Enum):
45
45
  markdown = "markdown"
46
46
  iceberg = "iceberg"
47
47
  custom = "custom"
48
+ excel = "excel"
48
49
 
49
50
  @classmethod
50
51
  def get_supported_formats(cls):
@@ -204,3 +204,7 @@ exporter_factory.register_lazy_exporter(
204
204
  exporter_factory.register_lazy_exporter(
205
205
  name=ExportFormat.custom, module_path="datacontract.export.custom_converter", class_name="CustomExporter"
206
206
  )
207
+
208
+ exporter_factory.register_lazy_exporter(
209
+ name=ExportFormat.excel, module_path="datacontract.export.excel_exporter", class_name="ExcelExporter"
210
+ )
@@ -1,5 +1,6 @@
1
1
  import logging
2
2
  import os
3
+ from decimal import Decimal
3
4
  from typing import Any, Dict, List, Optional
4
5
 
5
6
  import openpyxl
@@ -80,8 +81,16 @@ def import_excel_as_odcs(excel_file_path: str) -> OpenDataContractStandard:
80
81
  if tags_str:
81
82
  tags = [tag.strip() for tag in tags_str.split(",") if tag.strip()]
82
83
 
83
- # Import other components
84
+ # Import quality data first (standalone from schemas)
85
+ quality_map = import_quality(workbook)
86
+
87
+ # Import schemas
84
88
  schemas = import_schemas(workbook)
89
+
90
+ # Attach quality to schemas and properties
91
+ schemas_with_quality = attach_quality_to_schemas(schemas, quality_map)
92
+
93
+ # Import other components
85
94
  support = import_support(workbook)
86
95
  team = import_team(workbook)
87
96
  roles = import_roles(workbook)
@@ -103,7 +112,7 @@ def import_excel_as_odcs(excel_file_path: str) -> OpenDataContractStandard:
103
112
  tenant=get_cell_value_by_name(workbook, "tenant"),
104
113
  description=description,
105
114
  tags=tags,
106
- schema=schemas,
115
+ schema=schemas_with_quality,
107
116
  support=support,
108
117
  price=price,
109
118
  team=team,
@@ -150,7 +159,7 @@ def import_schemas(workbook) -> Optional[List[SchemaObject]]:
150
159
  dataGranularityDescription=get_cell_value_by_name_in_sheet(sheet, "schema.dataGranularityDescription"),
151
160
  authoritativeDefinitions=None,
152
161
  properties=import_properties(sheet),
153
- quality=None,
162
+ quality=None, # Quality will be attached later
154
163
  customProperties=None,
155
164
  tags=None,
156
165
  )
@@ -230,16 +239,8 @@ def import_properties(sheet) -> Optional[List[SchemaProperty]]:
230
239
  )
231
240
  ]
232
241
 
233
- # Quality
234
- quality_type = get_cell_value(row, headers.get("quality type"))
235
- quality_description = get_cell_value(row, headers.get("quality description"))
236
- if quality_type and quality_description:
237
- property_obj.quality = [
238
- DataQuality(
239
- type=quality_type,
240
- description=quality_description,
241
- )
242
- ]
242
+ # Quality will be attached later via quality_map
243
+ property_obj.quality = None
243
244
 
244
245
  # Transform sources
245
246
  transform_sources = get_cell_value(row, headers.get("transform sources"))
@@ -414,7 +415,8 @@ def get_cell_value_by_name(workbook: Workbook, name: str) -> str | None:
414
415
  try:
415
416
  cell = get_cell_by_name_in_workbook(workbook, name)
416
417
  if cell.value is not None:
417
- return str(cell.value)
418
+ value = str(cell.value).strip()
419
+ return value if value else None
418
420
  except Exception as e:
419
421
  logger.warning(f"Error getting cell value by name {name}: {str(e)}")
420
422
  return None
@@ -430,7 +432,8 @@ def get_cell_value_by_name_in_sheet(sheet: Worksheet, name: str) -> str | None:
430
432
  if sheet_title == sheet.title:
431
433
  cell = sheet[coordinate]
432
434
  if cell.value is not None:
433
- return str(cell.value)
435
+ value = str(cell.value).strip()
436
+ return value if value else None
434
437
  except Exception as e:
435
438
  logger.warning(f"Error getting cell value by name {name} in sheet {sheet.title}: {str(e)}")
436
439
  return None
@@ -442,7 +445,10 @@ def get_cell_value(row, col_idx):
442
445
  return None
443
446
  try:
444
447
  cell = row[col_idx]
445
- return str(cell.value) if cell.value is not None else None
448
+ if cell.value is not None:
449
+ value = str(cell.value).strip()
450
+ return value if value else None
451
+ return None
446
452
  except (IndexError, AttributeError):
447
453
  return None
448
454
 
@@ -451,7 +457,10 @@ def get_cell_value_by_position(sheet, row_idx, col_idx):
451
457
  """Get cell value by row and column indices (0-based)"""
452
458
  try:
453
459
  cell = sheet.cell(row=row_idx + 1, column=col_idx + 1) # Convert to 1-based indices
454
- return str(cell.value) if cell.value is not None else None
460
+ if cell.value is not None:
461
+ value = str(cell.value).strip()
462
+ return value if value else None
463
+ return None
455
464
  except Exception as e:
456
465
  logger.warning(f"Error getting cell value by position ({row_idx}, {col_idx}): {str(e)}")
457
466
  return None
@@ -822,7 +831,7 @@ def import_custom_properties(workbook: Workbook) -> List[CustomProperty]:
822
831
  except Exception as e:
823
832
  logger.warning(f"Error importing custom properties: {str(e)}")
824
833
 
825
- return custom_properties
834
+ return custom_properties if custom_properties else None
826
835
 
827
836
 
828
837
  def parse_property_value(value: str) -> Any:
@@ -853,3 +862,250 @@ def parse_property_value(value: str) -> Any:
853
862
  except (ValueError, TypeError, AttributeError):
854
863
  # If conversion fails, return original string
855
864
  return value
865
+
866
+
867
+ def import_quality(workbook: Workbook) -> Dict[str, List[DataQuality]]:
868
+ """
869
+ Import quality data from Quality sheet and organize by schema.property key
870
+
871
+ Returns:
872
+ Dictionary mapping schema.property keys to lists of DataQuality objects
873
+ """
874
+ try:
875
+ quality_sheet = workbook["Quality"]
876
+ if not quality_sheet:
877
+ return {}
878
+ except KeyError:
879
+ logger.warning("Quality sheet not found")
880
+ return {}
881
+
882
+ try:
883
+ quality_range = get_range_by_name_in_workbook(workbook, "quality")
884
+ if not quality_range:
885
+ logger.warning("Quality range not found")
886
+ return {}
887
+
888
+ quality_header_row_index = quality_range[0] - 1
889
+ headers = get_headers_from_header_row(quality_sheet, quality_header_row_index)
890
+
891
+ quality_map = {}
892
+
893
+ for row_idx in range(quality_range[0], quality_range[1]):
894
+ if len(list(quality_sheet.rows)) < row_idx + 1:
895
+ break
896
+ row = list(quality_sheet.rows)[row_idx]
897
+
898
+ # Extract quality fields from row
899
+ schema_name = get_cell_value(row, headers.get("schema"))
900
+ property_name = get_cell_value(row, headers.get("property"))
901
+ quality_type = get_cell_value(row, headers.get("quality type"))
902
+ description = get_cell_value(row, headers.get("description"))
903
+ rule = get_cell_value(row, headers.get("rule (library)"))
904
+ query = get_cell_value(row, headers.get("query (sql)"))
905
+ engine = get_cell_value(row, headers.get("quality engine (custom)"))
906
+ implementation = get_cell_value(row, headers.get("implementation (custom)"))
907
+ severity = get_cell_value(row, headers.get("severity"))
908
+ scheduler = get_cell_value(row, headers.get("scheduler"))
909
+ schedule = get_cell_value(row, headers.get("schedule"))
910
+ threshold_operator = get_cell_value(row, headers.get("threshold operator"))
911
+ threshold_value = get_cell_value(row, headers.get("threshold value"))
912
+
913
+ # Skip if no schema name or insufficient quality data
914
+ if not schema_name or (not quality_type and not description and not rule):
915
+ continue
916
+
917
+ # Parse threshold values based on operator
918
+ threshold_dict = parse_threshold_values(threshold_operator, threshold_value)
919
+
920
+ # Create DataQuality object with parsed thresholds
921
+ quality = DataQuality(
922
+ name=None,
923
+ description=description,
924
+ type=quality_type,
925
+ rule=rule,
926
+ unit=None,
927
+ validValues=None,
928
+ query=query,
929
+ engine=engine,
930
+ implementation=implementation,
931
+ dimension=None,
932
+ method=None,
933
+ severity=severity,
934
+ businessImpact=None,
935
+ customProperties=None,
936
+ authoritativeDefinitions=None,
937
+ tags=None,
938
+ scheduler=scheduler,
939
+ schedule=schedule,
940
+ **threshold_dict, # Unpack threshold values
941
+ )
942
+
943
+ # Create key for mapping - use schema.property format
944
+ key = schema_name if not property_name else f"{schema_name}.{property_name}"
945
+
946
+ if key not in quality_map:
947
+ quality_map[key] = []
948
+ quality_map[key].append(quality)
949
+
950
+ except Exception as e:
951
+ logger.warning(f"Error importing quality: {str(e)}")
952
+ return {}
953
+
954
+ return quality_map
955
+
956
+
957
+ def parse_threshold_values(threshold_operator: str, threshold_value: str) -> Dict[str, Any]:
958
+ """
959
+ Parse threshold operator and value into DataQuality threshold fields
960
+
961
+ Args:
962
+ threshold_operator: The threshold operator (e.g., "mustBe", "mustBeBetween")
963
+ threshold_value: The threshold value (string representation)
964
+
965
+ Returns:
966
+ Dictionary with appropriate threshold fields set
967
+ """
968
+ threshold_dict = {}
969
+
970
+ if not threshold_operator or not threshold_value:
971
+ return threshold_dict
972
+
973
+ # Parse threshold values based on operator
974
+ if threshold_operator in ["mustBeBetween", "mustNotBeBetween"]:
975
+ # Parse "[value1, value2]" format
976
+ if threshold_value.startswith("[") and threshold_value.endswith("]"):
977
+ content = threshold_value[1:-1] # Remove brackets
978
+ try:
979
+ values = [Decimal(v.strip()) for v in content.split(",") if v.strip()]
980
+ if len(values) >= 2:
981
+ threshold_dict[threshold_operator] = values[:2] # Take first two values
982
+ except (ValueError, TypeError) as e:
983
+ logger.warning(f"Failed to parse between values: {threshold_value}, error: {e}")
984
+ else:
985
+ # Single value for other operators
986
+ try:
987
+ # Try to parse as number
988
+ if threshold_value.replace(".", "").replace("-", "").isdigit():
989
+ value = Decimal(threshold_value)
990
+ threshold_dict[threshold_operator] = value
991
+ except (ValueError, TypeError) as e:
992
+ logger.warning(f"Failed to parse threshold value: {threshold_value}, error: {e}")
993
+
994
+ return threshold_dict
995
+
996
+
997
+ def attach_quality_to_schemas(
998
+ schemas: Optional[List[SchemaObject]], quality_map: Dict[str, List[DataQuality]]
999
+ ) -> Optional[List[SchemaObject]]:
1000
+ """
1001
+ Attach quality attributes to schemas and their properties based on quality_map
1002
+
1003
+ Args:
1004
+ schemas: List of schema objects
1005
+ quality_map: Dictionary mapping schema.property keys to quality lists
1006
+
1007
+ Returns:
1008
+ List of schema objects with quality attached
1009
+ """
1010
+ if not schemas:
1011
+ return None
1012
+
1013
+ updated_schemas = []
1014
+
1015
+ for schema in schemas:
1016
+ schema_name = schema.name
1017
+ if not schema_name:
1018
+ updated_schemas.append(schema)
1019
+ continue
1020
+
1021
+ # Get schema-level quality attributes
1022
+ schema_quality = quality_map.get(schema_name)
1023
+ if schema_quality:
1024
+ schema.quality = schema_quality
1025
+
1026
+ # Attach quality to properties
1027
+ if schema.properties:
1028
+ schema.properties = attach_quality_to_properties(schema.properties, schema_name, quality_map)
1029
+
1030
+ updated_schemas.append(schema)
1031
+
1032
+ return updated_schemas
1033
+
1034
+
1035
+ def attach_quality_to_properties(
1036
+ properties: List[SchemaProperty], schema_name: str, quality_map: Dict[str, List[DataQuality]], prefix: str = ""
1037
+ ) -> List[SchemaProperty]:
1038
+ """
1039
+ Recursively attach quality attributes to properties and nested properties
1040
+
1041
+ Args:
1042
+ properties: List of property objects
1043
+ schema_name: Name of the parent schema
1044
+ quality_map: Dictionary mapping schema.property keys to quality lists
1045
+ prefix: Current property path prefix for nested properties
1046
+
1047
+ Returns:
1048
+ List of property objects with quality attached
1049
+ """
1050
+ updated_properties = []
1051
+
1052
+ for prop in properties:
1053
+ property_name = prop.name
1054
+ if not property_name:
1055
+ updated_properties.append(prop)
1056
+ continue
1057
+
1058
+ # Build full property path
1059
+ full_property_name = f"{prefix}.{property_name}" if prefix else property_name
1060
+ quality_key = f"{schema_name}.{full_property_name}"
1061
+
1062
+ # Get quality for this property
1063
+ property_quality = quality_map.get(quality_key)
1064
+ if property_quality:
1065
+ prop.quality = property_quality
1066
+
1067
+ # Handle nested properties
1068
+ if prop.properties:
1069
+ prop.properties = attach_quality_to_properties(
1070
+ prop.properties, schema_name, quality_map, full_property_name
1071
+ )
1072
+
1073
+ # Handle array items
1074
+ if prop.items:
1075
+ items_quality_key = f"{schema_name}.{full_property_name}.items"
1076
+ items_quality = quality_map.get(items_quality_key)
1077
+ if items_quality:
1078
+ prop.items.quality = items_quality
1079
+
1080
+ # Handle nested properties in array items
1081
+ if prop.items.properties:
1082
+ prop.items.properties = attach_quality_to_properties(
1083
+ prop.items.properties, schema_name, quality_map, f"{full_property_name}.items"
1084
+ )
1085
+
1086
+ updated_properties.append(prop)
1087
+
1088
+ return updated_properties
1089
+
1090
+
1091
+ def get_headers_from_header_row(sheet: Worksheet, header_row_index: int) -> Dict[str, int]:
1092
+ """
1093
+ Get headers from the first row and map them to column indices
1094
+
1095
+ Args:
1096
+ sheet: The worksheet
1097
+ header_row_index: 0-based row index of the header row
1098
+
1099
+ Returns:
1100
+ Dictionary mapping header names (lowercase) to column indices
1101
+ """
1102
+ headers = {}
1103
+ try:
1104
+ header_row = list(sheet.rows)[header_row_index]
1105
+ for i, cell in enumerate(header_row):
1106
+ if cell.value:
1107
+ headers[str(cell.value).lower().strip()] = i
1108
+ except (IndexError, AttributeError) as e:
1109
+ logger.warning(f"Error getting headers from row {header_row_index}: {e}")
1110
+
1111
+ return headers
@@ -56,19 +56,19 @@ def write_junit_test_results(run: Run, console, output_path: Path):
56
56
  type=check.category if check.category else "General",
57
57
  )
58
58
  error.text = to_failure_text(check)
59
- elif check.result is ResultEnum.warning:
59
+ elif check.result == ResultEnum.warning:
60
60
  skipped = ET.SubElement(
61
61
  testcase,
62
62
  "skipped",
63
63
  message=check.reason if check.reason else "Warning",
64
64
  type=check.category if check.category else "General",
65
65
  )
66
- skipped.skipped = to_failure_text(check)
66
+ skipped.text = to_failure_text(check)
67
67
  else:
68
68
  ET.SubElement(
69
69
  testcase,
70
70
  "skipped",
71
- message=check.reason if check.reason else "None",
71
+ message=check.reason if check.reason else "Skipped",
72
72
  type=check.category if check.category else "General",
73
73
  )
74
74
 
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datacontract-cli
3
- Version: 0.10.31
3
+ Version: 0.10.33
4
4
  Summary: The datacontract CLI is an open source command-line tool for working with Data Contracts. It uses data contract YAML files to lint the data contract, connect to data sources and execute schema and quality tests, detect breaking changes, and export to different formats. The tool is written in Python. It can be used as a standalone CLI tool, in a CI/CD pipeline, or directly as a Python library.
5
5
  Author-email: Jochen Christ <jochen.christ@innoq.com>, Stefan Negele <stefan.negele@innoq.com>, Simon Harrer <simon.harrer@innoq.com>
6
6
  License-Expression: MIT
@@ -42,7 +42,7 @@ Provides-Extra: databricks
42
42
  Requires-Dist: soda-core-spark-df<3.6.0,>=3.3.20; extra == "databricks"
43
43
  Requires-Dist: soda-core-spark[databricks]<3.6.0,>=3.3.20; extra == "databricks"
44
44
  Requires-Dist: databricks-sql-connector<4.1.0,>=3.7.0; extra == "databricks"
45
- Requires-Dist: databricks-sdk<0.59.0; extra == "databricks"
45
+ Requires-Dist: databricks-sdk<0.61.0; extra == "databricks"
46
46
  Requires-Dist: pyspark<4.0.0,>=3.5.5; extra == "databricks"
47
47
  Provides-Extra: iceberg
48
48
  Requires-Dist: pyiceberg==0.9.1; extra == "iceberg"
@@ -214,9 +214,15 @@ $ datacontract export --format odcs datacontract.yaml --output odcs.yaml
214
214
  # import ODCS to data contract
215
215
  $ datacontract import --format odcs odcs.yaml --output datacontract.yaml
216
216
 
217
- # import sql (other formats: avro, glue, bigquery, jsonschema ...)
217
+ # import sql (other formats: avro, glue, bigquery, jsonschema, excel ...)
218
218
  $ datacontract import --format sql --source my-ddl.sql --dialect postgres --output datacontract.yaml
219
219
 
220
+ # import from Excel template
221
+ $ datacontract import --format excel --source odcs.xlsx --output datacontract.yaml
222
+
223
+ # export to Excel template
224
+ $ datacontract export --format excel --output odcs.xlsx datacontract.yaml
225
+
220
226
  # find differences between two data contracts
221
227
  $ datacontract diff datacontract-v1.yaml datacontract-v2.yaml
222
228
 
@@ -933,7 +939,7 @@ models:
933
939
  │ terraform|avro-idl|sql|sql-query|mer │
934
940
  │ maid|html|go|bigquery|dbml|spark|sql │
935
941
  │ alchemy|data-caterer|dcs|markdown|ic │
936
- │ eberg|custom]
942
+ │ eberg|custom|excel]
937
943
  │ --output PATH Specify the file path where the │
938
944
  │ exported data will be saved. If no │
939
945
  │ path is provided, the output will be │
@@ -1003,6 +1009,7 @@ Available export options:
1003
1009
  | `dcs` | Export to Data Contract Specification in YAML format | ✅ |
1004
1010
  | `markdown` | Export to Markdown | ✅ |
1005
1011
  | `iceberg` | Export to an Iceberg JSON Schema Definition | partial |
1012
+ | `excel` | Export to ODCS Excel Template | ✅ |
1006
1013
  | `custom` | Export to Custom format with Jinja | ✅ |
1007
1014
  | Missing something? | Please create an issue on GitHub | TBD |
1008
1015
 
@@ -1274,6 +1281,22 @@ FROM
1274
1281
  {{ ref('orders') }}
1275
1282
  ```
1276
1283
 
1284
+ #### ODCS Excel Templace
1285
+
1286
+ The `export` function converts a data contract into an ODCS (Open Data Contract Standard) Excel template. This creates a user-friendly Excel spreadsheet that can be used for authoring, sharing, and managing data contracts using the familiar Excel interface.
1287
+
1288
+ ```shell
1289
+ datacontract export --format excel --output datacontract.xlsx datacontract.yaml
1290
+ ```
1291
+
1292
+ The Excel format enables:
1293
+ - **User-friendly authoring**: Create and edit data contracts in Excel's familiar interface
1294
+ - **Easy sharing**: Distribute data contracts as standard Excel files
1295
+ - **Collaboration**: Enable non-technical stakeholders to contribute to data contract definitions
1296
+ - **Round-trip conversion**: Import Excel templates back to YAML data contracts
1297
+
1298
+ For more information about the Excel template structure, visit the [ODCS Excel Template repository](https://github.com/datacontract/open-data-contract-standard-excel-template).
1299
+
1277
1300
  ### import
1278
1301
  ```
1279
1302
 
@@ -1392,6 +1415,7 @@ Available import options:
1392
1415
  | `spark` | Import from Spark StructTypes, Variant | ✅ |
1393
1416
  | `sql` | Import from SQL DDL | ✅ |
1394
1417
  | `unity` | Import from Databricks Unity Catalog | partial |
1418
+ | `excel` | Import from ODCS Excel Template | ✅ |
1395
1419
  | Missing something? | Please create an issue on GitHub | TBD |
1396
1420
 
1397
1421
 
@@ -1,7 +1,7 @@
1
1
  datacontract/__init__.py,sha256=ThDdxDJsd7qNErLoh628nK5M7RzhJNYCmN-C6BAJFoo,405
2
2
  datacontract/api.py,sha256=Ze6pVD3Ub0oyMJI3iYSNXH78K2nPKbXKKHA-0DerJ48,8175
3
- datacontract/cli.py,sha256=KSXii4MsrdmEwFTDN9F7A-OC250gdY0R914FBqA2RuY,18614
4
- datacontract/data_contract.py,sha256=yU0Ys4-MK16tTm5RAnALGaNfqpvFmAjfabZg7ePqV5Y,15074
3
+ datacontract/cli.py,sha256=MxtTI15tnkPieSbHdqtU-wCiwj1oCiEnlMHFGzB4OUg,19364
4
+ datacontract/data_contract.py,sha256=Jlgkbzj6UN8RtFDK5VFcqm7v8oitVs-q10msU8W3Uo8,15183
5
5
  datacontract/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
6
  datacontract/breaking/breaking.py,sha256=DnqgxUjD-EAZcg5RBizOP9a2WxsFTaQBik0AB_m3K00,20431
7
7
  datacontract/breaking/breaking_change.py,sha256=BIDEUo1U2CQLVT2-I5PyFttxAj6zQPI1UUkEoOOQXMY,2249
@@ -34,8 +34,9 @@ datacontract/export/dbml_converter.py,sha256=f_OZEFwRUyL-Kg2yn_G58I8iz1VfFrZh8Nb
34
34
  datacontract/export/dbt_converter.py,sha256=U2x7rtEnq1s3pHhM0L2B6D6OQtKdCdm4PBSqNCHczHk,10577
35
35
  datacontract/export/dcs_exporter.py,sha256=RALQ7bLAjak7EsoFFL2GFX2Oju7pnCDPCdRN_wo9wHM,210
36
36
  datacontract/export/duckdb_type_converter.py,sha256=hUAAbImhJUMJOXEG-UoOKQqYGrJM6UILpn2YjUuAUOw,2216
37
- datacontract/export/exporter.py,sha256=P_6J5d7k5GPm-DUyfrbgEXmU_o45FHu5nlOTT2CdkUk,3049
38
- datacontract/export/exporter_factory.py,sha256=PWA2j82Vjenj4hXlYXUISTzttrMIILdx8LxNv6hM0cg,6014
37
+ datacontract/export/excel_exporter.py,sha256=ySZL93oaENIjaLyctwoXOiT3yWf311YG3vYtLttjImI,38274
38
+ datacontract/export/exporter.py,sha256=Xo4RyPq9W42hH3xfAX2v8FeQdMFoW0eVzgahY6JjlWI,3069
39
+ datacontract/export/exporter_factory.py,sha256=JRrfcQ9CXiZCw56nFNu9uPSLjlDJLfUC7xPdVTyk6K8,6164
39
40
  datacontract/export/go_converter.py,sha256=Ttvbfu3YU-3GBwRD6nwCsFyZuc_hiIvJD-Jg2sT5WLw,3331
40
41
  datacontract/export/great_expectations_converter.py,sha256=Wx0mESRy4xAf8y7HjESsGsQaaei8k9xOVu3RbC6BlQM,12257
41
42
  datacontract/export/html_exporter.py,sha256=EyTMj25_Df3irZiYw1hxVZeLYWp6YSG6z3IuFUviP14,3066
@@ -59,7 +60,7 @@ datacontract/imports/bigquery_importer.py,sha256=7TcP9FDsIas5LwJZ-HrOPXZ-NuR056s
59
60
  datacontract/imports/csv_importer.py,sha256=mBsmyTvfB8q64Z3NYqv4zTDUOvoXG896hZvp3oLt5YM,5330
60
61
  datacontract/imports/dbml_importer.py,sha256=o0IOgvXN34lU1FICDHm_QUTv0DKsgwbHPHUDxQhIapE,3872
61
62
  datacontract/imports/dbt_importer.py,sha256=hQwqD9vbvwLLc6Yj3tQbar5ldI0pV-ynSiz7CZZ0JCc,8290
62
- datacontract/imports/excel_importer.py,sha256=TsyGlF4LE3ABps9UJt7ozwEXwHLgmOhqyJhh6l--xk0,37116
63
+ datacontract/imports/excel_importer.py,sha256=eBLc9VS9OYVFYFcHFHq9HYOStAPBDfVHwmgnBHjxOmc,46415
63
64
  datacontract/imports/glue_importer.py,sha256=fiJPkvfwOCsaKKCGW19-JM5CCGXZ2mkNrVtUzp2iw6g,8370
64
65
  datacontract/imports/iceberg_importer.py,sha256=vadGJVqQKgG-j8swUytZALFB8QjbGRqZPCcPcCy0vco,5923
65
66
  datacontract/imports/importer.py,sha256=NRhR_9AWPWDNq2ac_DVUHGoJuvkVpwwaao8nDfJG_l0,1257
@@ -92,7 +93,7 @@ datacontract/model/odcs.py,sha256=9PXwm72FASjNwteF1Jn591iP3-St0aq16Cpsk0PkEW8,38
92
93
  datacontract/model/run.py,sha256=4UdEUaJl5RxEpN9S3swSu1vGJUVyNhOpRkdfbBZhh90,3146
93
94
  datacontract/model/data_contract_specification/__init__.py,sha256=lO7ywraknlDwJNUaSd2B9FWFsWhE8v5S-kob_shW_lg,47
94
95
  datacontract/output/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
95
- datacontract/output/junit_test_results.py,sha256=hAT7UgxwbfNzA0f5uuGzM4OIwLfXbmA4dgsfxsclWh0,4822
96
+ datacontract/output/junit_test_results.py,sha256=ZjevRMTxNSiR0HMr3bEvqv4olozPw2zEutbuLloInww,4822
96
97
  datacontract/output/output_format.py,sha256=_ZokDBo7-HXBs6czUv7kLLf9cYft_q5QaKzthsVnc58,212
97
98
  datacontract/output/test_results_writer.py,sha256=PWNLs3R_LQMH4xp5WDxLkQgY3xvj8Eyzw1jnfgkQxlc,2713
98
99
  datacontract/schemas/datacontract-1.1.0.init.yaml,sha256=RXRqtMiRsm1oqxXrKAzqQ_eScIr9D8td8-NWWmpYvM0,1828
@@ -112,9 +113,9 @@ datacontract/templates/partials/model_field.html,sha256=2YBF95ypNCPFYuYKoeilRnDG
112
113
  datacontract/templates/partials/quality.html,sha256=ynEDWRn8I90Uje-xhGYgFcfwOgKI1R-CDki-EvTsauQ,1785
113
114
  datacontract/templates/partials/server.html,sha256=dHFJtonMjhiUHtT69RUgTpkoRwmNdTRzkCdH0LtGg_4,6279
114
115
  datacontract/templates/style/output.css,sha256=ioIo1f96VW7LHhDifj6QI8QbRChJl-LlQ59EwM8MEmA,28692
115
- datacontract_cli-0.10.31.dist-info/licenses/LICENSE,sha256=23h64qnSeIZ0DKeziWAKC-zBCt328iSbRbWBrXoYRb4,2210
116
- datacontract_cli-0.10.31.dist-info/METADATA,sha256=T5pyKhT8TWagt86un_b7JSYYN2BLtpLKTvK5kRpOCeg,110144
117
- datacontract_cli-0.10.31.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
118
- datacontract_cli-0.10.31.dist-info/entry_points.txt,sha256=D3Eqy4q_Z6bHauGd4ppIyQglwbrm1AJnLau4Ppbw9Is,54
119
- datacontract_cli-0.10.31.dist-info/top_level.txt,sha256=VIRjd8EIUrBYWjEXJJjtdUgc0UAJdPZjmLiOR8BRBYM,13
120
- datacontract_cli-0.10.31.dist-info/RECORD,,
116
+ datacontract_cli-0.10.33.dist-info/licenses/LICENSE,sha256=23h64qnSeIZ0DKeziWAKC-zBCt328iSbRbWBrXoYRb4,2210
117
+ datacontract_cli-0.10.33.dist-info/METADATA,sha256=gqEgdS3X0NnbsbAuATDP2YmwNVcqQMgXEJmVayqQbVA,111469
118
+ datacontract_cli-0.10.33.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
119
+ datacontract_cli-0.10.33.dist-info/entry_points.txt,sha256=D3Eqy4q_Z6bHauGd4ppIyQglwbrm1AJnLau4Ppbw9Is,54
120
+ datacontract_cli-0.10.33.dist-info/top_level.txt,sha256=VIRjd8EIUrBYWjEXJJjtdUgc0UAJdPZjmLiOR8BRBYM,13
121
+ datacontract_cli-0.10.33.dist-info/RECORD,,