datacontract-cli 0.10.29__py3-none-any.whl → 0.10.31__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datacontract-cli might be problematic. Click here for more details.

@@ -4,7 +4,7 @@ import typing
4
4
  from open_data_contract_standard.model import CustomProperty, OpenDataContractStandard
5
5
 
6
6
  from datacontract.export.odcs_v3_exporter import to_odcs_v3
7
- from datacontract.imports.importer import Spec
7
+ from datacontract.imports.importer import ImportFormat, Spec
8
8
  from datacontract.imports.odcs_v3_importer import import_from_odcs
9
9
 
10
10
  if typing.TYPE_CHECKING:
@@ -300,7 +300,7 @@ class DataContract:
300
300
  id = kwargs.get("id")
301
301
  owner = kwargs.get("owner")
302
302
 
303
- if spec == Spec.odcs:
303
+ if spec == Spec.odcs or format == ImportFormat.excel:
304
304
  data_contract_specification_initial = DataContract.init(template=template, schema=schema)
305
305
 
306
306
  odcs_imported = importer_factory.create(format).import_source(
@@ -14,9 +14,6 @@ from datacontract.export.exporter import (
14
14
  Exporter,
15
15
  _check_models_for_export,
16
16
  )
17
- from datacontract.export.pandas_type_converter import convert_to_pandas_type
18
- from datacontract.export.spark_converter import to_spark_data_type
19
- from datacontract.export.sql_type_converter import convert_to_sql_type
20
17
  from datacontract.model.data_contract_specification import (
21
18
  DataContractSpecification,
22
19
  DeprecatedQuality,
@@ -167,10 +164,16 @@ def add_field_expectations(
167
164
  """
168
165
  if field.type is not None:
169
166
  if engine == GreatExpectationsEngine.spark.value:
167
+ from datacontract.export.spark_converter import to_spark_data_type
168
+
170
169
  field_type = to_spark_data_type(field).__class__.__name__
171
170
  elif engine == GreatExpectationsEngine.pandas.value:
171
+ from datacontract.export.pandas_type_converter import convert_to_pandas_type
172
+
172
173
  field_type = convert_to_pandas_type(field)
173
174
  elif engine == GreatExpectationsEngine.sql.value:
175
+ from datacontract.export.sql_type_converter import convert_to_sql_type
176
+
174
177
  field_type = convert_to_sql_type(field, sql_server_type)
175
178
  else:
176
179
  field_type = field.type
@@ -153,7 +153,7 @@ def field_to_markdown(field_name: str, field: Field, level: int = 0) -> str:
153
153
  Returns:
154
154
  str: A Markdown table rows for the field.
155
155
  """
156
- tabs = " " * level
156
+ tabs = " " * level
157
157
  arrow = "↳" if level > 0 else ""
158
158
  column_name = f"{tabs}{arrow} {field_name}"
159
159
 
@@ -126,6 +126,8 @@ def to_spark_data_type(field: Field) -> types.DataType:
126
126
  return types.StructType(to_struct_type(field.fields))
127
127
  if field_type == "map":
128
128
  return types.MapType(to_spark_data_type(field.keys), to_spark_data_type(field.values))
129
+ if field_type == "variant":
130
+ return types.VariantType()
129
131
  if field_type in ["string", "varchar", "text"]:
130
132
  return types.StringType()
131
133
  if field_type in ["number", "decimal", "numeric"]:
@@ -150,7 +152,7 @@ def to_spark_data_type(field: Field) -> types.DataType:
150
152
  return types.DateType()
151
153
  if field_type == "bytes":
152
154
  return types.BinaryType()
153
- return types.BinaryType()
155
+ return types.StringType() # default if no condition is met
154
156
 
155
157
 
156
158
  def print_schema(dtype: types.DataType) -> str:
@@ -680,7 +680,7 @@ def import_servers(workbook) -> Optional[List[Server]]:
680
680
  elif server_type == "databricks":
681
681
  server.catalog = get_server_cell_value(workbook, sheet, "servers.databricks.catalog", index)
682
682
  server.host = get_server_cell_value(workbook, sheet, "servers.databricks.host", index)
683
- server.schema = get_server_cell_value(workbook, sheet, "servers.databricks.schema", index)
683
+ server.schema_ = get_server_cell_value(workbook, sheet, "servers.databricks.schema", index)
684
684
  elif server_type == "glue":
685
685
  server.account = get_server_cell_value(workbook, sheet, "servers.glue.account", index)
686
686
  server.database = get_server_cell_value(workbook, sheet, "servers.glue.database", index)
@@ -694,7 +694,7 @@ def import_servers(workbook) -> Optional[List[Server]]:
694
694
  server.database = get_server_cell_value(workbook, sheet, "servers.postgres.database", index)
695
695
  server.host = get_server_cell_value(workbook, sheet, "servers.postgres.host", index)
696
696
  server.port = get_server_cell_value(workbook, sheet, "servers.postgres.port", index)
697
- server.schema = get_server_cell_value(workbook, sheet, "servers.postgres.schema", index)
697
+ server.schema_ = get_server_cell_value(workbook, sheet, "servers.postgres.schema", index)
698
698
  elif server_type == "s3":
699
699
  server.delimiter = get_server_cell_value(workbook, sheet, "servers.s3.delimiter", index)
700
700
  server.endpointUrl = get_server_cell_value(workbook, sheet, "servers.s3.endpointUrl", index)
@@ -705,13 +705,13 @@ def import_servers(workbook) -> Optional[List[Server]]:
705
705
  server.database = get_server_cell_value(workbook, sheet, "servers.snowflake.database", index)
706
706
  server.host = get_server_cell_value(workbook, sheet, "servers.snowflake.host", index)
707
707
  server.port = get_server_cell_value(workbook, sheet, "servers.snowflake.port", index)
708
- server.schema = get_server_cell_value(workbook, sheet, "servers.snowflake.schema", index)
708
+ server.schema_ = get_server_cell_value(workbook, sheet, "servers.snowflake.schema", index)
709
709
  server.warehouse = get_server_cell_value(workbook, sheet, "servers.snowflake.warehouse", index)
710
710
  elif server_type == "sqlserver":
711
711
  server.database = get_server_cell_value(workbook, sheet, "servers.sqlserver.database", index)
712
712
  server.host = get_server_cell_value(workbook, sheet, "servers.sqlserver.host", index)
713
713
  server.port = get_server_cell_value(workbook, sheet, "servers.sqlserver.port", index)
714
- server.schema = get_server_cell_value(workbook, sheet, "servers.sqlserver.schema", index)
714
+ server.schema_ = get_server_cell_value(workbook, sheet, "servers.sqlserver.schema", index)
715
715
  else:
716
716
  # Custom server type - grab all possible fields
717
717
  server.account = get_server_cell_value(workbook, sheet, "servers.custom.account", index)
@@ -726,7 +726,7 @@ def import_servers(workbook) -> Optional[List[Server]]:
726
726
  server.path = get_server_cell_value(workbook, sheet, "servers.custom.path", index)
727
727
  server.port = get_server_cell_value(workbook, sheet, "servers.custom.port", index)
728
728
  server.project = get_server_cell_value(workbook, sheet, "servers.custom.project", index)
729
- server.schema = get_server_cell_value(workbook, sheet, "servers.custom.schema", index)
729
+ server.schema_ = get_server_cell_value(workbook, sheet, "servers.custom.schema", index)
730
730
  server.stagingDir = get_server_cell_value(workbook, sheet, "servers.custom.stagingDir", index)
731
731
  server.table = get_server_cell_value(workbook, sheet, "servers.custom.table", index)
732
732
  server.view = get_server_cell_value(workbook, sheet, "servers.custom.view", index)
@@ -230,6 +230,8 @@ def map_type_from_sql(sql_type: str) -> str | None:
230
230
  return "int"
231
231
  elif sql_type_normed.startswith("float"):
232
232
  return "float"
233
+ elif sql_type_normed.startswith("double"):
234
+ return "double"
233
235
  elif sql_type_normed.startswith("decimal"):
234
236
  return "decimal"
235
237
  elif sql_type_normed.startswith("numeric"):
@@ -200,7 +200,8 @@ def import_table_fields(columns: List[ColumnInfo]) -> dict[str, Field]:
200
200
 
201
201
  def _to_field(column: ColumnInfo) -> Field:
202
202
  field = Field()
203
- if column.type_name is not None:
203
+ # The second condition evaluates for complex types (e.g. variant)
204
+ if column.type_name is not None or (column.type_name is None and column.type_text is not None):
204
205
  sql_type = str(column.type_text)
205
206
  field.type = map_type_from_sql(sql_type)
206
207
  physical_type_key = to_physical_type_key("databricks")
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: datacontract-cli
3
- Version: 0.10.29
3
+ Version: 0.10.31
4
4
  Summary: The datacontract CLI is an open source command-line tool for working with Data Contracts. It uses data contract YAML files to lint the data contract, connect to data sources and execute schema and quality tests, detect breaking changes, and export to different formats. The tool is written in Python. It can be used as a standalone CLI tool, in a CI/CD pipeline, or directly as a Python library.
5
5
  Author-email: Jochen Christ <jochen.christ@innoq.com>, Stefan Negele <stefan.negele@innoq.com>, Simon Harrer <simon.harrer@innoq.com>
6
6
  License-Expression: MIT
@@ -11,7 +11,7 @@ Classifier: Operating System :: OS Independent
11
11
  Requires-Python: >=3.10
12
12
  Description-Content-Type: text/markdown
13
13
  License-File: LICENSE
14
- Requires-Dist: typer<0.16,>=0.15.1
14
+ Requires-Dist: typer<0.17,>=0.15.1
15
15
  Requires-Dist: pydantic<2.12.0,>=2.8.2
16
16
  Requires-Dist: pyyaml~=6.0.1
17
17
  Requires-Dist: requests<2.33,>=2.31
@@ -42,7 +42,7 @@ Provides-Extra: databricks
42
42
  Requires-Dist: soda-core-spark-df<3.6.0,>=3.3.20; extra == "databricks"
43
43
  Requires-Dist: soda-core-spark[databricks]<3.6.0,>=3.3.20; extra == "databricks"
44
44
  Requires-Dist: databricks-sql-connector<4.1.0,>=3.7.0; extra == "databricks"
45
- Requires-Dist: databricks-sdk<0.58.0; extra == "databricks"
45
+ Requires-Dist: databricks-sdk<0.59.0; extra == "databricks"
46
46
  Requires-Dist: pyspark<4.0.0,>=3.5.5; extra == "databricks"
47
47
  Provides-Extra: iceberg
48
48
  Requires-Dist: pyiceberg==0.9.1; extra == "iceberg"
@@ -54,7 +54,7 @@ Provides-Extra: postgres
54
54
  Requires-Dist: soda-core-postgres<3.6.0,>=3.3.20; extra == "postgres"
55
55
  Provides-Extra: s3
56
56
  Requires-Dist: s3fs<2026.0.0,>=2025.2.0; extra == "s3"
57
- Requires-Dist: aiobotocore<2.23.0,>=2.17.0; extra == "s3"
57
+ Requires-Dist: aiobotocore<2.24.0,>=2.17.0; extra == "s3"
58
58
  Provides-Extra: snowflake
59
59
  Requires-Dist: snowflake-connector-python[pandas]<3.16,>=3.6; extra == "snowflake"
60
60
  Requires-Dist: soda-core-snowflake<3.6.0,>=3.3.20; extra == "snowflake"
@@ -71,7 +71,7 @@ Requires-Dist: pyarrow>=18.1.0; extra == "parquet"
71
71
  Provides-Extra: rdf
72
72
  Requires-Dist: rdflib==7.0.0; extra == "rdf"
73
73
  Provides-Extra: api
74
- Requires-Dist: fastapi==0.115.14; extra == "api"
74
+ Requires-Dist: fastapi==0.116.1; extra == "api"
75
75
  Requires-Dist: uvicorn==0.35.0; extra == "api"
76
76
  Provides-Extra: protobuf
77
77
  Requires-Dist: grpcio-tools>=1.53; extra == "protobuf"
@@ -86,7 +86,7 @@ Requires-Dist: pandas>=2.1.0; extra == "dev"
86
86
  Requires-Dist: pre-commit<4.3.0,>=3.7.1; extra == "dev"
87
87
  Requires-Dist: pytest; extra == "dev"
88
88
  Requires-Dist: pytest-xdist; extra == "dev"
89
- Requires-Dist: pymssql==2.3.6; extra == "dev"
89
+ Requires-Dist: pymssql==2.3.7; extra == "dev"
90
90
  Requires-Dist: ruff; extra == "dev"
91
91
  Requires-Dist: testcontainers[kafka,minio,mssql,postgres]==4.10.0; extra == "dev"
92
92
  Requires-Dist: trino==0.335.0; extra == "dev"
@@ -1018,8 +1018,6 @@ If using Databricks, and an error is thrown when trying to deploy the SQL DDLs w
1018
1018
 
1019
1019
  ```shell
1020
1020
  spark.conf.set(“spark.databricks.delta.schema.typeCheck.enabled”, “false”)
1021
- from datacontract.model import data_contract_specification
1022
- data_contract_specification.DATACONTRACT_TYPES.append(“variant”)
1023
1021
  ```
1024
1022
 
1025
1023
  #### Great Expectations
@@ -2083,7 +2081,6 @@ We are happy to receive your contributions. Propose your change in an issue or d
2083
2081
  - [INNOQ](https://innoq.com)
2084
2082
  - [Data Catering](https://data.catering/)
2085
2083
  - [Oliver Wyman](https://www.oliverwyman.com/)
2086
- - [dmTECH](https://www.dmtech.tech/de)
2087
2084
  - And many more. To add your company, please create a pull request.
2088
2085
 
2089
2086
  ## Related Tools
@@ -1,7 +1,7 @@
1
1
  datacontract/__init__.py,sha256=ThDdxDJsd7qNErLoh628nK5M7RzhJNYCmN-C6BAJFoo,405
2
2
  datacontract/api.py,sha256=Ze6pVD3Ub0oyMJI3iYSNXH78K2nPKbXKKHA-0DerJ48,8175
3
3
  datacontract/cli.py,sha256=KSXii4MsrdmEwFTDN9F7A-OC250gdY0R914FBqA2RuY,18614
4
- datacontract/data_contract.py,sha256=-nIf68Uo2bclJpqGUz_AkLesGqwrlgpn59WVSFwoi3I,15028
4
+ datacontract/data_contract.py,sha256=yU0Ys4-MK16tTm5RAnALGaNfqpvFmAjfabZg7ePqV5Y,15074
5
5
  datacontract/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
6
6
  datacontract/breaking/breaking.py,sha256=DnqgxUjD-EAZcg5RBizOP9a2WxsFTaQBik0AB_m3K00,20431
7
7
  datacontract/breaking/breaking_change.py,sha256=BIDEUo1U2CQLVT2-I5PyFttxAj6zQPI1UUkEoOOQXMY,2249
@@ -37,11 +37,11 @@ datacontract/export/duckdb_type_converter.py,sha256=hUAAbImhJUMJOXEG-UoOKQqYGrJM
37
37
  datacontract/export/exporter.py,sha256=P_6J5d7k5GPm-DUyfrbgEXmU_o45FHu5nlOTT2CdkUk,3049
38
38
  datacontract/export/exporter_factory.py,sha256=PWA2j82Vjenj4hXlYXUISTzttrMIILdx8LxNv6hM0cg,6014
39
39
  datacontract/export/go_converter.py,sha256=Ttvbfu3YU-3GBwRD6nwCsFyZuc_hiIvJD-Jg2sT5WLw,3331
40
- datacontract/export/great_expectations_converter.py,sha256=WvEa4HNUMyzn2a5YNkcI9mjROzGkKRYOS2gFPjRRHNc,12218
40
+ datacontract/export/great_expectations_converter.py,sha256=Wx0mESRy4xAf8y7HjESsGsQaaei8k9xOVu3RbC6BlQM,12257
41
41
  datacontract/export/html_exporter.py,sha256=EyTMj25_Df3irZiYw1hxVZeLYWp6YSG6z3IuFUviP14,3066
42
42
  datacontract/export/iceberg_converter.py,sha256=ArcQ_Y3z_W4_kGDU_8jPRx2-pHpP3Nhx1zYoETOL3c4,6804
43
43
  datacontract/export/jsonschema_converter.py,sha256=2MT82MurcQQbrVDRj1kFsxnmFd9scNSfYI1upQSecl4,5631
44
- datacontract/export/markdown_converter.py,sha256=chtaZX4vXTee7JCMYmWiDQ9m55gwJjHPw6SEM3UOwpQ,6467
44
+ datacontract/export/markdown_converter.py,sha256=ioTFe3mHKLYwpo7wasOr-4TBlHIjowDHSonDgXV4uOQ,6468
45
45
  datacontract/export/mermaid_exporter.py,sha256=wBFqRKgy3Y5FZnzD0RCR1UAlT9ZqYW7dkXWSJa6eHiQ,3465
46
46
  datacontract/export/odcs_v3_exporter.py,sha256=b__AiPAnCUuFQE5DPHsvXBrMeEl1t_mJ1vzTx84TMlI,13931
47
47
  datacontract/export/pandas_type_converter.py,sha256=464pQ3JQKFQa1TO0HBNcEoZvQye_yUbY6jQtiBaphSc,1117
@@ -49,7 +49,7 @@ datacontract/export/protobuf_converter.py,sha256=DHLl8BW26xqltBsd7Qhz0RhTl9YZQKC
49
49
  datacontract/export/pydantic_converter.py,sha256=1Lt9F8i6zyQYb44MyQtsXwCWWXYxZ47SmzArr_uPqsU,5579
50
50
  datacontract/export/rdf_converter.py,sha256=1aTe_fwBRBnYUrJZzhEQ8eVnl0mQ1hcF45aKVdgvSIc,6435
51
51
  datacontract/export/sodacl_converter.py,sha256=75vQ2TnoLfjiDtWT2x8opumvotXVRs1YaIu1NLYz05M,1473
52
- datacontract/export/spark_converter.py,sha256=LCue-rLan3ki7HgzUFyBaO8YUlc6CrDNBZD-QVgUv-U,7190
52
+ datacontract/export/spark_converter.py,sha256=c4C_QZFRQLMW6Cz35WYdHqYGKyHSF7bBVmJua2YF0Cs,7290
53
53
  datacontract/export/sql_converter.py,sha256=vyLbDqzt_J3LRXpPv2W2HqUIyAtQx_S-jviBiSxh14A,5087
54
54
  datacontract/export/sql_type_converter.py,sha256=4r8S7nb4z2ODgXTwHxdtU158HWE8uxXHydiZ_y4-N_E,13592
55
55
  datacontract/export/sqlalchemy_converter.py,sha256=0DMncvA811lTtd5q4ZORREQ9YH1vQm1lJeqMWsFvloE,6463
@@ -59,7 +59,7 @@ datacontract/imports/bigquery_importer.py,sha256=7TcP9FDsIas5LwJZ-HrOPXZ-NuR056s
59
59
  datacontract/imports/csv_importer.py,sha256=mBsmyTvfB8q64Z3NYqv4zTDUOvoXG896hZvp3oLt5YM,5330
60
60
  datacontract/imports/dbml_importer.py,sha256=o0IOgvXN34lU1FICDHm_QUTv0DKsgwbHPHUDxQhIapE,3872
61
61
  datacontract/imports/dbt_importer.py,sha256=hQwqD9vbvwLLc6Yj3tQbar5ldI0pV-ynSiz7CZZ0JCc,8290
62
- datacontract/imports/excel_importer.py,sha256=6HJcbRxyZ0PTFL0f0Ww-VAMdUnnph_e7JqZWwOeAK6Y,37111
62
+ datacontract/imports/excel_importer.py,sha256=TsyGlF4LE3ABps9UJt7ozwEXwHLgmOhqyJhh6l--xk0,37116
63
63
  datacontract/imports/glue_importer.py,sha256=fiJPkvfwOCsaKKCGW19-JM5CCGXZ2mkNrVtUzp2iw6g,8370
64
64
  datacontract/imports/iceberg_importer.py,sha256=vadGJVqQKgG-j8swUytZALFB8QjbGRqZPCcPcCy0vco,5923
65
65
  datacontract/imports/importer.py,sha256=NRhR_9AWPWDNq2ac_DVUHGoJuvkVpwwaao8nDfJG_l0,1257
@@ -71,8 +71,8 @@ datacontract/imports/odcs_v3_importer.py,sha256=smtFSoywNojUCdB8sDu0e_3yHN4CmUw5
71
71
  datacontract/imports/parquet_importer.py,sha256=W_0_16mX4stwDUt4GM2L7dnGmTpAySab5k13-OlTCCc,3095
72
72
  datacontract/imports/protobuf_importer.py,sha256=rlUIskv9PNi5rFQ4Hobt9zlnKpahGsb4dy5G5UJoVAw,10840
73
73
  datacontract/imports/spark_importer.py,sha256=vqGM3kisQGSiHS1AvuC6gp0unj2-6TT3R-ejG96j-Tw,8618
74
- datacontract/imports/sql_importer.py,sha256=a5Ym4qkjjvT1g1P7lIn0KpvIxL45zGPn_qPKqg570ro,9472
75
- datacontract/imports/unity_importer.py,sha256=Aa-Rqjzfen5chUB4EUt9bOL0QOAbj4j7ZoU-chdhtvk,8554
74
+ datacontract/imports/sql_importer.py,sha256=CfHap1zpyy-NVol_i21bDai3l4PD8OLuJQwaVlihbqg,9543
75
+ datacontract/imports/unity_importer.py,sha256=iW4CJNbHNoQR5fqVlBJfdHnbbDasPgkwHHmyV9FKGkI,8687
76
76
  datacontract/init/init_template.py,sha256=nc-B2ZkwDQ3GNFqxNmSDcStQMDbBTxysgTZDLw15izo,721
77
77
  datacontract/integration/datamesh_manager.py,sha256=FT9eadzFz181lg54b49_c_x2caGJT7mR3drlZBSBJLo,3375
78
78
  datacontract/lint/files.py,sha256=tg0vq_w4LQsEr_8A5qr4hUJmHeGalUpsXJXC1t-OGC0,471
@@ -112,9 +112,9 @@ datacontract/templates/partials/model_field.html,sha256=2YBF95ypNCPFYuYKoeilRnDG
112
112
  datacontract/templates/partials/quality.html,sha256=ynEDWRn8I90Uje-xhGYgFcfwOgKI1R-CDki-EvTsauQ,1785
113
113
  datacontract/templates/partials/server.html,sha256=dHFJtonMjhiUHtT69RUgTpkoRwmNdTRzkCdH0LtGg_4,6279
114
114
  datacontract/templates/style/output.css,sha256=ioIo1f96VW7LHhDifj6QI8QbRChJl-LlQ59EwM8MEmA,28692
115
- datacontract_cli-0.10.29.dist-info/licenses/LICENSE,sha256=23h64qnSeIZ0DKeziWAKC-zBCt328iSbRbWBrXoYRb4,2210
116
- datacontract_cli-0.10.29.dist-info/METADATA,sha256=qYg7wS67rf8uW1mgGwlIGTaQxs5cIz2-3Lw5wLTW-kw,110312
117
- datacontract_cli-0.10.29.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
118
- datacontract_cli-0.10.29.dist-info/entry_points.txt,sha256=D3Eqy4q_Z6bHauGd4ppIyQglwbrm1AJnLau4Ppbw9Is,54
119
- datacontract_cli-0.10.29.dist-info/top_level.txt,sha256=VIRjd8EIUrBYWjEXJJjtdUgc0UAJdPZjmLiOR8BRBYM,13
120
- datacontract_cli-0.10.29.dist-info/RECORD,,
115
+ datacontract_cli-0.10.31.dist-info/licenses/LICENSE,sha256=23h64qnSeIZ0DKeziWAKC-zBCt328iSbRbWBrXoYRb4,2210
116
+ datacontract_cli-0.10.31.dist-info/METADATA,sha256=T5pyKhT8TWagt86un_b7JSYYN2BLtpLKTvK5kRpOCeg,110144
117
+ datacontract_cli-0.10.31.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
118
+ datacontract_cli-0.10.31.dist-info/entry_points.txt,sha256=D3Eqy4q_Z6bHauGd4ppIyQglwbrm1AJnLau4Ppbw9Is,54
119
+ datacontract_cli-0.10.31.dist-info/top_level.txt,sha256=VIRjd8EIUrBYWjEXJJjtdUgc0UAJdPZjmLiOR8BRBYM,13
120
+ datacontract_cli-0.10.31.dist-info/RECORD,,