datacontract-cli 0.10.0__py3-none-any.whl → 0.10.37__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- datacontract/__init__.py +13 -0
- datacontract/api.py +260 -0
- datacontract/breaking/breaking.py +242 -12
- datacontract/breaking/breaking_rules.py +37 -1
- datacontract/catalog/catalog.py +80 -0
- datacontract/cli.py +387 -117
- datacontract/data_contract.py +216 -353
- datacontract/engines/data_contract_checks.py +1041 -0
- datacontract/engines/data_contract_test.py +113 -0
- datacontract/engines/datacontract/check_that_datacontract_contains_valid_servers_configuration.py +2 -3
- datacontract/engines/datacontract/check_that_datacontract_file_exists.py +1 -1
- datacontract/engines/fastjsonschema/check_jsonschema.py +176 -42
- datacontract/engines/fastjsonschema/s3/s3_read_files.py +16 -1
- datacontract/engines/soda/check_soda_execute.py +100 -56
- datacontract/engines/soda/connections/athena.py +79 -0
- datacontract/engines/soda/connections/bigquery.py +8 -1
- datacontract/engines/soda/connections/databricks.py +12 -3
- datacontract/engines/soda/connections/duckdb_connection.py +241 -0
- datacontract/engines/soda/connections/kafka.py +206 -113
- datacontract/engines/soda/connections/snowflake.py +8 -5
- datacontract/engines/soda/connections/sqlserver.py +43 -0
- datacontract/engines/soda/connections/trino.py +26 -0
- datacontract/export/avro_converter.py +72 -8
- datacontract/export/avro_idl_converter.py +31 -25
- datacontract/export/bigquery_converter.py +130 -0
- datacontract/export/custom_converter.py +40 -0
- datacontract/export/data_caterer_converter.py +161 -0
- datacontract/export/dbml_converter.py +148 -0
- datacontract/export/dbt_converter.py +141 -54
- datacontract/export/dcs_exporter.py +6 -0
- datacontract/export/dqx_converter.py +126 -0
- datacontract/export/duckdb_type_converter.py +57 -0
- datacontract/export/excel_exporter.py +923 -0
- datacontract/export/exporter.py +100 -0
- datacontract/export/exporter_factory.py +216 -0
- datacontract/export/go_converter.py +105 -0
- datacontract/export/great_expectations_converter.py +257 -36
- datacontract/export/html_exporter.py +86 -0
- datacontract/export/iceberg_converter.py +188 -0
- datacontract/export/jsonschema_converter.py +71 -16
- datacontract/export/markdown_converter.py +337 -0
- datacontract/export/mermaid_exporter.py +110 -0
- datacontract/export/odcs_v3_exporter.py +375 -0
- datacontract/export/pandas_type_converter.py +40 -0
- datacontract/export/protobuf_converter.py +168 -68
- datacontract/export/pydantic_converter.py +6 -0
- datacontract/export/rdf_converter.py +13 -6
- datacontract/export/sodacl_converter.py +36 -188
- datacontract/export/spark_converter.py +245 -0
- datacontract/export/sql_converter.py +37 -3
- datacontract/export/sql_type_converter.py +269 -8
- datacontract/export/sqlalchemy_converter.py +170 -0
- datacontract/export/terraform_converter.py +7 -2
- datacontract/imports/avro_importer.py +246 -26
- datacontract/imports/bigquery_importer.py +221 -0
- datacontract/imports/csv_importer.py +143 -0
- datacontract/imports/dbml_importer.py +112 -0
- datacontract/imports/dbt_importer.py +240 -0
- datacontract/imports/excel_importer.py +1111 -0
- datacontract/imports/glue_importer.py +288 -0
- datacontract/imports/iceberg_importer.py +172 -0
- datacontract/imports/importer.py +51 -0
- datacontract/imports/importer_factory.py +128 -0
- datacontract/imports/json_importer.py +325 -0
- datacontract/imports/jsonschema_importer.py +146 -0
- datacontract/imports/odcs_importer.py +60 -0
- datacontract/imports/odcs_v3_importer.py +516 -0
- datacontract/imports/parquet_importer.py +81 -0
- datacontract/imports/protobuf_importer.py +264 -0
- datacontract/imports/spark_importer.py +262 -0
- datacontract/imports/sql_importer.py +274 -35
- datacontract/imports/unity_importer.py +219 -0
- datacontract/init/init_template.py +20 -0
- datacontract/integration/datamesh_manager.py +86 -0
- datacontract/lint/resolve.py +271 -49
- datacontract/lint/resources.py +21 -0
- datacontract/lint/schema.py +53 -17
- datacontract/lint/urls.py +32 -12
- datacontract/model/data_contract_specification/__init__.py +1 -0
- datacontract/model/exceptions.py +4 -1
- datacontract/model/odcs.py +24 -0
- datacontract/model/run.py +49 -29
- datacontract/output/__init__.py +0 -0
- datacontract/output/junit_test_results.py +135 -0
- datacontract/output/output_format.py +10 -0
- datacontract/output/test_results_writer.py +79 -0
- datacontract/py.typed +0 -0
- datacontract/schemas/datacontract-1.1.0.init.yaml +91 -0
- datacontract/schemas/datacontract-1.1.0.schema.json +1975 -0
- datacontract/schemas/datacontract-1.2.0.init.yaml +91 -0
- datacontract/schemas/datacontract-1.2.0.schema.json +2029 -0
- datacontract/schemas/datacontract-1.2.1.init.yaml +91 -0
- datacontract/schemas/datacontract-1.2.1.schema.json +2058 -0
- datacontract/schemas/odcs-3.0.1.schema.json +2634 -0
- datacontract/schemas/odcs-3.0.2.schema.json +2382 -0
- datacontract/templates/datacontract.html +139 -294
- datacontract/templates/datacontract_odcs.html +685 -0
- datacontract/templates/index.html +236 -0
- datacontract/templates/partials/datacontract_information.html +86 -0
- datacontract/templates/partials/datacontract_servicelevels.html +253 -0
- datacontract/templates/partials/datacontract_terms.html +51 -0
- datacontract/templates/partials/definition.html +25 -0
- datacontract/templates/partials/example.html +27 -0
- datacontract/templates/partials/model_field.html +144 -0
- datacontract/templates/partials/quality.html +49 -0
- datacontract/templates/partials/server.html +211 -0
- datacontract/templates/style/output.css +491 -72
- datacontract_cli-0.10.37.dist-info/METADATA +2235 -0
- datacontract_cli-0.10.37.dist-info/RECORD +119 -0
- {datacontract_cli-0.10.0.dist-info → datacontract_cli-0.10.37.dist-info}/WHEEL +1 -1
- {datacontract_cli-0.10.0.dist-info → datacontract_cli-0.10.37.dist-info/licenses}/LICENSE +1 -1
- datacontract/engines/datacontract/check_that_datacontract_str_is_valid.py +0 -48
- datacontract/engines/soda/connections/dask.py +0 -28
- datacontract/engines/soda/connections/duckdb.py +0 -76
- datacontract/export/csv_type_converter.py +0 -36
- datacontract/export/html_export.py +0 -66
- datacontract/export/odcs_converter.py +0 -102
- datacontract/init/download_datacontract_file.py +0 -17
- datacontract/integration/publish_datamesh_manager.py +0 -33
- datacontract/integration/publish_opentelemetry.py +0 -107
- datacontract/lint/lint.py +0 -141
- datacontract/lint/linters/description_linter.py +0 -34
- datacontract/lint/linters/example_model_linter.py +0 -91
- datacontract/lint/linters/field_pattern_linter.py +0 -34
- datacontract/lint/linters/field_reference_linter.py +0 -38
- datacontract/lint/linters/notice_period_linter.py +0 -55
- datacontract/lint/linters/quality_schema_linter.py +0 -52
- datacontract/lint/linters/valid_constraints_linter.py +0 -99
- datacontract/model/data_contract_specification.py +0 -141
- datacontract/web.py +0 -14
- datacontract_cli-0.10.0.dist-info/METADATA +0 -951
- datacontract_cli-0.10.0.dist-info/RECORD +0 -66
- /datacontract/{model → breaking}/breaking_change.py +0 -0
- /datacontract/{lint/linters → export}/__init__.py +0 -0
- {datacontract_cli-0.10.0.dist-info → datacontract_cli-0.10.37.dist-info}/entry_points.txt +0 -0
- {datacontract_cli-0.10.0.dist-info → datacontract_cli-0.10.37.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,119 @@
|
|
|
1
|
+
datacontract/__init__.py,sha256=ThDdxDJsd7qNErLoh628nK5M7RzhJNYCmN-C6BAJFoo,405
|
|
2
|
+
datacontract/api.py,sha256=nFmrJOhC5AygY9YS1VXsbvKNtW92B8AF-lXdhuCvcPE,8578
|
|
3
|
+
datacontract/cli.py,sha256=ix1iwdo8bGfUW_tf1qiSoLhGBKCLcIgpVgn6diV9MB0,20742
|
|
4
|
+
datacontract/data_contract.py,sha256=24QE2ym5dfwTP6vJ0OmW37GNfGCCV6y4x4-J5Ouvfjk,13248
|
|
5
|
+
datacontract/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
|
+
datacontract/breaking/breaking.py,sha256=DnqgxUjD-EAZcg5RBizOP9a2WxsFTaQBik0AB_m3K00,20431
|
|
7
|
+
datacontract/breaking/breaking_change.py,sha256=BIDEUo1U2CQLVT2-I5PyFttxAj6zQPI1UUkEoOOQXMY,2249
|
|
8
|
+
datacontract/breaking/breaking_rules.py,sha256=M9IdzVJSA7oOr1fvLQl0y9MoBKeItPz42Db2U2cjH2Y,4063
|
|
9
|
+
datacontract/catalog/catalog.py,sha256=U5TpDyT9kcF086DoDSS3bWBE4Q8uj6HVuCSFaxN5kMw,2830
|
|
10
|
+
datacontract/engines/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
11
|
+
datacontract/engines/data_contract_checks.py,sha256=LNI0OZPrzrf7sn6lPgTL4uAHSqMH2VBrmaBo8etuSE0,37453
|
|
12
|
+
datacontract/engines/data_contract_test.py,sha256=8qg0SkwtTmayfzNL2U_0xgx5Hi_DUePaMt2q_JiCqX8,4543
|
|
13
|
+
datacontract/engines/datacontract/check_that_datacontract_contains_valid_servers_configuration.py,sha256=zrDn-_EJJ5kv0kZWAA-toeEPuBd3YQ0-U7Jb8euNUS8,1558
|
|
14
|
+
datacontract/engines/datacontract/check_that_datacontract_file_exists.py,sha256=Vw-7U0GmQT2127tybxggZfpRFiZVgoIh6ndkTGM0FP4,665
|
|
15
|
+
datacontract/engines/fastjsonschema/check_jsonschema.py,sha256=EKPkFM8iGyiWsHw8peErhQvbt9gu_zJ2S-NDQnupfeM,10921
|
|
16
|
+
datacontract/engines/fastjsonschema/s3/s3_read_files.py,sha256=0sTDWvuu0AzSgn7fKWJxGaTmPww00TFYyDK-X0s5T3c,1193
|
|
17
|
+
datacontract/engines/soda/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
18
|
+
datacontract/engines/soda/check_soda_execute.py,sha256=qc56ZNKyHIoSFgoXzBRioOhnhgsFSJ6L-nyBU7d0fW8,8902
|
|
19
|
+
datacontract/engines/soda/connections/athena.py,sha256=wjrJA9CHhl6FbSW0HulWcYlkT2_nY1s19Y2MFe4lbCU,3028
|
|
20
|
+
datacontract/engines/soda/connections/bigquery.py,sha256=C-8kxmzpYe88bJp80ObHFLMh4rpnIjnUQ7XOj0Ke7lk,903
|
|
21
|
+
datacontract/engines/soda/connections/databricks.py,sha256=cMRasuO0MrSKVgHPB-9uFTGTZPFg6z9Kpk3tJ0SdR0s,943
|
|
22
|
+
datacontract/engines/soda/connections/duckdb_connection.py,sha256=wGiB6EKr-OZosEFvT2gkutFgAzAxFMKicfpjbIJUZwQ,9332
|
|
23
|
+
datacontract/engines/soda/connections/kafka.py,sha256=lnj_-3-CnJ6stetGqm6HOzN1Qatlw7xoCQU2zKBIXxU,8725
|
|
24
|
+
datacontract/engines/soda/connections/postgres.py,sha256=9GTF4Es3M5vb7ocSGqAxXmslvkS5CjsPQGIuo020CFc,626
|
|
25
|
+
datacontract/engines/soda/connections/snowflake.py,sha256=rfG2ysuqNM6TkvyqQKcGHFsTGJ6AROmud5VleUDRrb0,749
|
|
26
|
+
datacontract/engines/soda/connections/sqlserver.py,sha256=RzGLbCUdRyfmDcqtM_AB9WZ-Xk-XYX91nkXpVNpYbvc,1440
|
|
27
|
+
datacontract/engines/soda/connections/trino.py,sha256=JvKUP9aFg_n095oWE0-bGmfbETSWEOURGEZdQuG8txA,718
|
|
28
|
+
datacontract/export/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
29
|
+
datacontract/export/avro_converter.py,sha256=MnfeW2x-Eko9dK6_fpdQYWtEzLkFWXfKABAUSJqiDpo,5381
|
|
30
|
+
datacontract/export/avro_idl_converter.py,sha256=SGO7JfI9UGXLYFR5wMGNUH1qf6kt9lF6dUU9roVqnFo,9878
|
|
31
|
+
datacontract/export/bigquery_converter.py,sha256=9mm-XP3klu1C5J87L9EL5ZyMCJhLBwsixo3aAw9QmRI,4738
|
|
32
|
+
datacontract/export/custom_converter.py,sha256=xb8KbkRRgHmT4ewwC7XxtnKpe_ZMSJWBjYOaKjmO_KQ,1216
|
|
33
|
+
datacontract/export/data_caterer_converter.py,sha256=eSEuy3TbqUIG_lHYEBOydAgp_CJNoGArXrcJvh81wcw,5984
|
|
34
|
+
datacontract/export/dbml_converter.py,sha256=f_OZEFwRUyL-Kg2yn_G58I8iz1VfFrZh8Nbw3Wq0JDo,4777
|
|
35
|
+
datacontract/export/dbt_converter.py,sha256=58bub8n22dfL8w6bKdFe28BLF0e4PHbrxO_H3rZ9wfk,11840
|
|
36
|
+
datacontract/export/dcs_exporter.py,sha256=RALQ7bLAjak7EsoFFL2GFX2Oju7pnCDPCdRN_wo9wHM,210
|
|
37
|
+
datacontract/export/dqx_converter.py,sha256=5UevFPE8RdFIeu4CgeVnXMNDfWU7DhR34DW7O1aVIFs,4105
|
|
38
|
+
datacontract/export/duckdb_type_converter.py,sha256=hUAAbImhJUMJOXEG-UoOKQqYGrJM6UILpn2YjUuAUOw,2216
|
|
39
|
+
datacontract/export/excel_exporter.py,sha256=qDVcwO38aWPMTYyXo2qJd1HJLkmj_Izhi1hdVOQ8o_w,38424
|
|
40
|
+
datacontract/export/exporter.py,sha256=DfvMHDWmdqhJswLkQ5oMNojgYDblXDuRgFJRHuFSawM,3085
|
|
41
|
+
datacontract/export/exporter_factory.py,sha256=UvP3_U7xj-GEjaifi__Jri6eYKx9SFXtmSrnkSbWuP0,6318
|
|
42
|
+
datacontract/export/go_converter.py,sha256=Ttvbfu3YU-3GBwRD6nwCsFyZuc_hiIvJD-Jg2sT5WLw,3331
|
|
43
|
+
datacontract/export/great_expectations_converter.py,sha256=Wx0mESRy4xAf8y7HjESsGsQaaei8k9xOVu3RbC6BlQM,12257
|
|
44
|
+
datacontract/export/html_exporter.py,sha256=EyTMj25_Df3irZiYw1hxVZeLYWp6YSG6z3IuFUviP14,3066
|
|
45
|
+
datacontract/export/iceberg_converter.py,sha256=ArcQ_Y3z_W4_kGDU_8jPRx2-pHpP3Nhx1zYoETOL3c4,6804
|
|
46
|
+
datacontract/export/jsonschema_converter.py,sha256=2MT82MurcQQbrVDRj1kFsxnmFd9scNSfYI1upQSecl4,5631
|
|
47
|
+
datacontract/export/markdown_converter.py,sha256=J6QEGuopR9AUyEhux1GpjmJaQa1iihsbNMAmGRQ63BQ,10430
|
|
48
|
+
datacontract/export/mermaid_exporter.py,sha256=Hg2yc5DYDTEZ7etoIhB1LU6rob_sGlouDtkPxUtf6kQ,4008
|
|
49
|
+
datacontract/export/odcs_v3_exporter.py,sha256=b__AiPAnCUuFQE5DPHsvXBrMeEl1t_mJ1vzTx84TMlI,13931
|
|
50
|
+
datacontract/export/pandas_type_converter.py,sha256=464pQ3JQKFQa1TO0HBNcEoZvQye_yUbY6jQtiBaphSc,1117
|
|
51
|
+
datacontract/export/protobuf_converter.py,sha256=DHLl8BW26xqltBsd7Qhz0RhTl9YZQKCbkmjNpECgubg,7928
|
|
52
|
+
datacontract/export/pydantic_converter.py,sha256=1Lt9F8i6zyQYb44MyQtsXwCWWXYxZ47SmzArr_uPqsU,5579
|
|
53
|
+
datacontract/export/rdf_converter.py,sha256=zY2BZrRxM0J6C2cgf5zA8c7FxDRImFjZUrJ4ksmvSTw,6435
|
|
54
|
+
datacontract/export/sodacl_converter.py,sha256=75vQ2TnoLfjiDtWT2x8opumvotXVRs1YaIu1NLYz05M,1473
|
|
55
|
+
datacontract/export/spark_converter.py,sha256=aol9ygEq29mjrZNiaK3Vdm8kEZhCgFFphuFiFDX-pOE,7953
|
|
56
|
+
datacontract/export/sql_converter.py,sha256=vyLbDqzt_J3LRXpPv2W2HqUIyAtQx_S-jviBiSxh14A,5087
|
|
57
|
+
datacontract/export/sql_type_converter.py,sha256=eWHRHJNeg6oOT2uUPjmcVjEf6H_qXZvDhvSCk-_iBAM,13890
|
|
58
|
+
datacontract/export/sqlalchemy_converter.py,sha256=0DMncvA811lTtd5q4ZORREQ9YH1vQm1lJeqMWsFvloE,6463
|
|
59
|
+
datacontract/export/terraform_converter.py,sha256=ExFoEvErVk-gBnWJiqC38SxDUmUEydpACWc917l5RyM,2163
|
|
60
|
+
datacontract/imports/avro_importer.py,sha256=isfAnMq9bk-Yo5zSyTnqMegu7JIujn_sTGSTOYAc8-0,11847
|
|
61
|
+
datacontract/imports/bigquery_importer.py,sha256=7TcP9FDsIas5LwJZ-HrOPXZ-NuR056sxLfDDh3vjo8E,8419
|
|
62
|
+
datacontract/imports/csv_importer.py,sha256=mBsmyTvfB8q64Z3NYqv4zTDUOvoXG896hZvp3oLt5YM,5330
|
|
63
|
+
datacontract/imports/dbml_importer.py,sha256=o0IOgvXN34lU1FICDHm_QUTv0DKsgwbHPHUDxQhIapE,3872
|
|
64
|
+
datacontract/imports/dbt_importer.py,sha256=hQwqD9vbvwLLc6Yj3tQbar5ldI0pV-ynSiz7CZZ0JCc,8290
|
|
65
|
+
datacontract/imports/excel_importer.py,sha256=eBLc9VS9OYVFYFcHFHq9HYOStAPBDfVHwmgnBHjxOmc,46415
|
|
66
|
+
datacontract/imports/glue_importer.py,sha256=fiJPkvfwOCsaKKCGW19-JM5CCGXZ2mkNrVtUzp2iw6g,8370
|
|
67
|
+
datacontract/imports/iceberg_importer.py,sha256=vadGJVqQKgG-j8swUytZALFB8QjbGRqZPCcPcCy0vco,5923
|
|
68
|
+
datacontract/imports/importer.py,sha256=NRhR_9AWPWDNq2ac_DVUHGoJuvkVpwwaao8nDfJG_l0,1257
|
|
69
|
+
datacontract/imports/importer_factory.py,sha256=RS7uwkkT7rIKGeMKgPmZhE3GVC9IfZxZhm8XN0ooa3U,4124
|
|
70
|
+
datacontract/imports/json_importer.py,sha256=JeGbqAC_wAO0u8HeMA5H-KJBfs6gpp1oGIpxt6nxSZI,12641
|
|
71
|
+
datacontract/imports/jsonschema_importer.py,sha256=67H__XLugV4vguHrIqzW02dtx27zYTWnOms4D1ma3bk,4961
|
|
72
|
+
datacontract/imports/odcs_importer.py,sha256=ZP2u3kJsgULANTbbqkP3joOlU9cUneZOPy6Ak3oTMgs,2140
|
|
73
|
+
datacontract/imports/odcs_v3_importer.py,sha256=8mWFn4Ntf0jk0DKqmrp_DN7pKlSCO88Ol_UBWzHkY20,20467
|
|
74
|
+
datacontract/imports/parquet_importer.py,sha256=W_0_16mX4stwDUt4GM2L7dnGmTpAySab5k13-OlTCCc,3095
|
|
75
|
+
datacontract/imports/protobuf_importer.py,sha256=rlUIskv9PNi5rFQ4Hobt9zlnKpahGsb4dy5G5UJoVAw,10840
|
|
76
|
+
datacontract/imports/spark_importer.py,sha256=OxX9hJhi8e1o1pZGOKh5zWsK96SX13r0WV04kKDD61M,8964
|
|
77
|
+
datacontract/imports/sql_importer.py,sha256=AdbBe7RrOEDMwdDt4huF5XmOV2EDpOP-k_m8kFQRlJg,10130
|
|
78
|
+
datacontract/imports/unity_importer.py,sha256=ZoWVMPffYNAXxPa0E8d6gRBtx3l-KSx0fPqnQx81DX0,9067
|
|
79
|
+
datacontract/init/init_template.py,sha256=sLCxvXHqoeW-Qes9W8GSVPfDmmu7pfnVOm-puI1-wsQ,721
|
|
80
|
+
datacontract/integration/datamesh_manager.py,sha256=FT9eadzFz181lg54b49_c_x2caGJT7mR3drlZBSBJLo,3375
|
|
81
|
+
datacontract/lint/files.py,sha256=tg0vq_w4LQsEr_8A5qr4hUJmHeGalUpsXJXC1t-OGC0,471
|
|
82
|
+
datacontract/lint/resolve.py,sha256=TjCS0wX4OIkQsV1fXpgGwfnDRyBLrFwePyLsEVO8Qs0,15339
|
|
83
|
+
datacontract/lint/resources.py,sha256=nfeZmORh1aP7EKpMKCmfbS04Te8pQ0nz64vJVkHOq3c,647
|
|
84
|
+
datacontract/lint/schema.py,sha256=wijp3Ix7WNqA2gnIQ6_IxbjB6fe35nYvMNM16dtAEA4,2220
|
|
85
|
+
datacontract/lint/urls.py,sha256=giac0eAYa6hha8exleL3KsiPtiFlOq8l53axtAmCilw,2529
|
|
86
|
+
datacontract/model/exceptions.py,sha256=5BMuEH2qWuckNP4FTfpUEeEu6rjgGcLOD0GQugKRQ1U,1242
|
|
87
|
+
datacontract/model/odcs.py,sha256=Ku-n2xLC0_5EX7KxxLWrQ5ei5kugQiJy7AChKMmRWTc,782
|
|
88
|
+
datacontract/model/run.py,sha256=4UdEUaJl5RxEpN9S3swSu1vGJUVyNhOpRkdfbBZhh90,3146
|
|
89
|
+
datacontract/model/data_contract_specification/__init__.py,sha256=lO7ywraknlDwJNUaSd2B9FWFsWhE8v5S-kob_shW_lg,47
|
|
90
|
+
datacontract/output/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
91
|
+
datacontract/output/junit_test_results.py,sha256=ZjevRMTxNSiR0HMr3bEvqv4olozPw2zEutbuLloInww,4822
|
|
92
|
+
datacontract/output/output_format.py,sha256=_ZokDBo7-HXBs6czUv7kLLf9cYft_q5QaKzthsVnc58,212
|
|
93
|
+
datacontract/output/test_results_writer.py,sha256=PWNLs3R_LQMH4xp5WDxLkQgY3xvj8Eyzw1jnfgkQxlc,2713
|
|
94
|
+
datacontract/schemas/datacontract-1.1.0.init.yaml,sha256=ij_-ZEJP4A7ekeJfoqGpRbiCys7_YjkClNluVVo4C6E,1828
|
|
95
|
+
datacontract/schemas/datacontract-1.1.0.schema.json,sha256=3Bu2rxEjkF6dNLcqi1GF4KoXBnEIopaJ87Qb8S4zUvg,62872
|
|
96
|
+
datacontract/schemas/datacontract-1.2.0.init.yaml,sha256=ij_-ZEJP4A7ekeJfoqGpRbiCys7_YjkClNluVVo4C6E,1828
|
|
97
|
+
datacontract/schemas/datacontract-1.2.0.schema.json,sha256=sk7oL06cug9-WozCrLH8v8MuR3a8MaV1Ztkm1P-7UFk,64226
|
|
98
|
+
datacontract/schemas/datacontract-1.2.1.init.yaml,sha256=ij_-ZEJP4A7ekeJfoqGpRbiCys7_YjkClNluVVo4C6E,1828
|
|
99
|
+
datacontract/schemas/datacontract-1.2.1.schema.json,sha256=Ha6F8i2jaL3BKOV5kjWgaxzykAiaSLqjIq-OEOojnx4,65233
|
|
100
|
+
datacontract/schemas/odcs-3.0.1.schema.json,sha256=bRZsSXA0fV0EmV_8f1K68PlXu1m4K7JcuHpLnY3ESwQ,72933
|
|
101
|
+
datacontract/schemas/odcs-3.0.2.schema.json,sha256=_J13Tqc9E7RzpSho645meE86AxeU0dIt2U12-MnAfHk,69968
|
|
102
|
+
datacontract/templates/datacontract.html,sha256=dksPEnY3c66jaaVS5r5vWfG6LzyXPjA4nO_yLUirJWg,17394
|
|
103
|
+
datacontract/templates/datacontract_odcs.html,sha256=u4bpcgQVqwGmR0QjijJqecOClV2ZhpDvnNMAMzj4Ezc,32659
|
|
104
|
+
datacontract/templates/index.html,sha256=EY2LYSTFCwMMcezsDbYsJwM5A7As7oOEiqTBpK_MXe8,12683
|
|
105
|
+
datacontract/templates/partials/datacontract_information.html,sha256=7ZBxgEgi2XndKBypeOpe03oCSRPOujC6NVlN7zexGNM,6221
|
|
106
|
+
datacontract/templates/partials/datacontract_servicelevels.html,sha256=ed3QgB11B0Qq2h_NwaroGZ4pQMBPEhfeQaoS-qEipqY,11401
|
|
107
|
+
datacontract/templates/partials/datacontract_terms.html,sha256=1cnJcOTpxwot2BCuZmkLF_SPfiVloLs3c8mj9WfE4sc,1865
|
|
108
|
+
datacontract/templates/partials/definition.html,sha256=gZEmNvwNGGxA_Fnzx_0L6tXlAMk_EAPWr5ziRIThb_o,1005
|
|
109
|
+
datacontract/templates/partials/example.html,sha256=F1dWbHDIXQScgfs4OVgqM1lR4uV4xX5j6suasXHNM88,1204
|
|
110
|
+
datacontract/templates/partials/model_field.html,sha256=2YBF95ypNCPFYuYKoeilRnDG-H_FuW4JK1znkCaYCac,7625
|
|
111
|
+
datacontract/templates/partials/quality.html,sha256=ynEDWRn8I90Uje-xhGYgFcfwOgKI1R-CDki-EvTsauQ,1785
|
|
112
|
+
datacontract/templates/partials/server.html,sha256=dHFJtonMjhiUHtT69RUgTpkoRwmNdTRzkCdH0LtGg_4,6279
|
|
113
|
+
datacontract/templates/style/output.css,sha256=ioIo1f96VW7LHhDifj6QI8QbRChJl-LlQ59EwM8MEmA,28692
|
|
114
|
+
datacontract_cli-0.10.37.dist-info/licenses/LICENSE,sha256=0hcS8X51AL0UvEsx1ZM6WQcxiy9d0j5iOfzdPYM6ONU,2205
|
|
115
|
+
datacontract_cli-0.10.37.dist-info/METADATA,sha256=xG1Rilzh_eBYa-gBTG4b9AreKjV5-lcZ41sVqkHtHoc,115131
|
|
116
|
+
datacontract_cli-0.10.37.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
|
|
117
|
+
datacontract_cli-0.10.37.dist-info/entry_points.txt,sha256=D3Eqy4q_Z6bHauGd4ppIyQglwbrm1AJnLau4Ppbw9Is,54
|
|
118
|
+
datacontract_cli-0.10.37.dist-info/top_level.txt,sha256=VIRjd8EIUrBYWjEXJJjtdUgc0UAJdPZjmLiOR8BRBYM,13
|
|
119
|
+
datacontract_cli-0.10.37.dist-info/RECORD,,
|
|
@@ -1,48 +0,0 @@
|
|
|
1
|
-
import logging
|
|
2
|
-
|
|
3
|
-
import fastjsonschema
|
|
4
|
-
import yaml
|
|
5
|
-
from fastjsonschema import JsonSchemaValueException
|
|
6
|
-
|
|
7
|
-
from datacontract.lint.schema import fetch_schema
|
|
8
|
-
from datacontract.model.run import Check, Run
|
|
9
|
-
|
|
10
|
-
|
|
11
|
-
def check_that_datacontract_str_is_valid(run: Run, data_contract_str: str):
|
|
12
|
-
schema = fetch_schema()
|
|
13
|
-
data_contract_yaml = yaml.safe_load(data_contract_str)
|
|
14
|
-
try:
|
|
15
|
-
fastjsonschema.validate(schema, data_contract_yaml)
|
|
16
|
-
logging.debug("YAML data is valid.")
|
|
17
|
-
run.checks.append(
|
|
18
|
-
Check(
|
|
19
|
-
type="lint",
|
|
20
|
-
result="passed",
|
|
21
|
-
name="Check that data contract YAML is valid",
|
|
22
|
-
engine="datacontract",
|
|
23
|
-
)
|
|
24
|
-
)
|
|
25
|
-
except JsonSchemaValueException as e:
|
|
26
|
-
logging.warning("YAML data is invalid.")
|
|
27
|
-
logging.warning(f"Validation error: {e.message}")
|
|
28
|
-
run.checks.append(
|
|
29
|
-
Check(
|
|
30
|
-
type="lint",
|
|
31
|
-
result="failed",
|
|
32
|
-
name="Check that data contract YAML is valid",
|
|
33
|
-
reason=e.message,
|
|
34
|
-
engine="datacontract",
|
|
35
|
-
)
|
|
36
|
-
)
|
|
37
|
-
except Exception as e:
|
|
38
|
-
logging.warning("YAML data is invalid.")
|
|
39
|
-
logging.warning(f"Validation error: {str(e)}")
|
|
40
|
-
run.checks.append(
|
|
41
|
-
Check(
|
|
42
|
-
type="lint",
|
|
43
|
-
result="failed",
|
|
44
|
-
name="Check that data contract YAML is valid",
|
|
45
|
-
reason=str(e),
|
|
46
|
-
engine="datacontract",
|
|
47
|
-
)
|
|
48
|
-
)
|
|
@@ -1,28 +0,0 @@
|
|
|
1
|
-
# def add_s3_connection_dask_json(data_contract, scan, server):
|
|
2
|
-
# s3_access_key_id = os.getenv('DATACONTRACT_S3_ACCESS_KEY_ID')
|
|
3
|
-
# s3_secret_access_key = os.getenv('DATACONTRACT_S3_SECRET_ACCESS_KEY')
|
|
4
|
-
# lines = server.delimiter == "new_line"
|
|
5
|
-
# for model_name in data_contract.models:
|
|
6
|
-
# logging.info(f"Connecting to {server.location}")
|
|
7
|
-
# df = dd.read_json(
|
|
8
|
-
# server.location,
|
|
9
|
-
# lines=lines,
|
|
10
|
-
# storage_options={'key': s3_access_key_id,
|
|
11
|
-
# 'secret': s3_secret_access_key,
|
|
12
|
-
# 'client_kwargs': {'endpoint_url': server.endpointUrl}
|
|
13
|
-
# })
|
|
14
|
-
# scan.add_dask_dataframe(dataset_name=model_name, dask_df=df, data_source_name=server.type)
|
|
15
|
-
|
|
16
|
-
# def add_s3_connection_dask_csv(data_contract, scan, server):
|
|
17
|
-
# s3_access_key_id = os.getenv('DATACONTRACT_S3_ACCESS_KEY_ID')
|
|
18
|
-
# s3_secret_access_key = os.getenv('DATACONTRACT_S3_SECRET_ACCESS_KEY')
|
|
19
|
-
# for model_name in data_contract.models:
|
|
20
|
-
# logging.info(f"Connecting to {server.location}")
|
|
21
|
-
# df = dd.read_csv(
|
|
22
|
-
# server.location,
|
|
23
|
-
# storage_options={'key': s3_access_key_id,
|
|
24
|
-
# 'secret': s3_secret_access_key,
|
|
25
|
-
# 'client_kwargs': {'endpoint_url': server.endpointUrl}
|
|
26
|
-
# })
|
|
27
|
-
# scan.add_dask_dataframe(dataset_name=model_name, dask_df=df, data_source_name=server.type)
|
|
28
|
-
|
|
@@ -1,76 +0,0 @@
|
|
|
1
|
-
import logging
|
|
2
|
-
import os
|
|
3
|
-
|
|
4
|
-
import duckdb
|
|
5
|
-
from datacontract.export.csv_type_converter import convert_to_duckdb_csv_type
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
def get_duckdb_connection(data_contract, server):
|
|
9
|
-
con = duckdb.connect(database=":memory:")
|
|
10
|
-
path: str = ""
|
|
11
|
-
if server.type == "local":
|
|
12
|
-
path = server.path
|
|
13
|
-
if server.type == "s3":
|
|
14
|
-
path = server.location
|
|
15
|
-
setup_s3_connection(con, server)
|
|
16
|
-
for model_name, model in data_contract.models.items():
|
|
17
|
-
model_path = path
|
|
18
|
-
if "{model}" in model_path:
|
|
19
|
-
model_path = model_path.format(model=model_name)
|
|
20
|
-
logging.info(f"Creating table {model_name} for {model_path}")
|
|
21
|
-
|
|
22
|
-
if server.format == "json":
|
|
23
|
-
format = "auto"
|
|
24
|
-
if server.delimiter == "new_line":
|
|
25
|
-
format = "newline_delimited"
|
|
26
|
-
elif server.delimiter == "array":
|
|
27
|
-
format = "array"
|
|
28
|
-
con.sql(f"""
|
|
29
|
-
CREATE VIEW "{model_name}" AS SELECT * FROM read_json_auto('{model_path}', format='{format}', hive_partitioning=1);
|
|
30
|
-
""")
|
|
31
|
-
elif server.format == "parquet":
|
|
32
|
-
con.sql(f"""
|
|
33
|
-
CREATE VIEW "{model_name}" AS SELECT * FROM read_parquet('{model_path}', hive_partitioning=1);
|
|
34
|
-
""")
|
|
35
|
-
elif server.format == "csv":
|
|
36
|
-
columns = to_csv_types(model)
|
|
37
|
-
if columns is None:
|
|
38
|
-
con.sql(
|
|
39
|
-
f"""CREATE VIEW "{model_name}" AS SELECT * FROM read_csv('{model_path}', hive_partitioning=1);"""
|
|
40
|
-
)
|
|
41
|
-
else:
|
|
42
|
-
con.sql(
|
|
43
|
-
f"""CREATE VIEW "{model_name}" AS SELECT * FROM read_csv('{model_path}', hive_partitioning=1, columns={columns});"""
|
|
44
|
-
)
|
|
45
|
-
return con
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
def to_csv_types(model) -> dict:
|
|
49
|
-
if model is None:
|
|
50
|
-
return None
|
|
51
|
-
columns = {}
|
|
52
|
-
# ['SQLNULL', 'BOOLEAN', 'BIGINT', 'DOUBLE', 'TIME', 'DATE', 'TIMESTAMP', 'VARCHAR']
|
|
53
|
-
for field_name, field in model.fields.items():
|
|
54
|
-
columns[field_name] = convert_to_duckdb_csv_type(field)
|
|
55
|
-
return columns
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
def setup_s3_connection(con, server):
|
|
59
|
-
s3_region = os.getenv("DATACONTRACT_S3_REGION")
|
|
60
|
-
s3_access_key_id = os.getenv("DATACONTRACT_S3_ACCESS_KEY_ID")
|
|
61
|
-
s3_secret_access_key = os.getenv("DATACONTRACT_S3_SECRET_ACCESS_KEY")
|
|
62
|
-
# con.install_extension("httpfs")
|
|
63
|
-
# con.load_extension("httpfs")
|
|
64
|
-
if server.endpointUrl is not None:
|
|
65
|
-
s3_endpoint = server.endpointUrl.removeprefix("http://").removeprefix("https://")
|
|
66
|
-
if server.endpointUrl.startswith("http://"):
|
|
67
|
-
con.sql("SET s3_use_ssl = 0; SET s3_url_style = 'path';")
|
|
68
|
-
con.sql(f"""
|
|
69
|
-
SET s3_endpoint = '{s3_endpoint}';
|
|
70
|
-
""")
|
|
71
|
-
if s3_access_key_id is not None:
|
|
72
|
-
con.sql(f"""
|
|
73
|
-
SET s3_region = '{s3_region}';
|
|
74
|
-
SET s3_access_key_id = '{s3_access_key_id}';
|
|
75
|
-
SET s3_secret_access_key = '{s3_secret_access_key}';
|
|
76
|
-
""")
|
|
@@ -1,36 +0,0 @@
|
|
|
1
|
-
# https://duckdb.org/docs/data/csv/overview.html
|
|
2
|
-
# ['SQLNULL', 'BOOLEAN', 'BIGINT', 'DOUBLE', 'TIME', 'DATE', 'TIMESTAMP', 'VARCHAR']
|
|
3
|
-
def convert_to_duckdb_csv_type(field) -> None | str:
|
|
4
|
-
type = field.type
|
|
5
|
-
if type is None:
|
|
6
|
-
return "VARCHAR"
|
|
7
|
-
if type.lower() in ["string", "varchar", "text"]:
|
|
8
|
-
return "VARCHAR"
|
|
9
|
-
if type.lower() in ["timestamp", "timestamp_tz"]:
|
|
10
|
-
return "TIMESTAMP"
|
|
11
|
-
if type.lower() in ["timestamp_ntz"]:
|
|
12
|
-
return "TIMESTAMP"
|
|
13
|
-
if type.lower() in ["date"]:
|
|
14
|
-
return "DATE"
|
|
15
|
-
if type.lower() in ["time"]:
|
|
16
|
-
return "TIME"
|
|
17
|
-
if type.lower() in ["number", "decimal", "numeric"]:
|
|
18
|
-
# precision and scale not supported by data contract
|
|
19
|
-
return "VARCHAR"
|
|
20
|
-
if type.lower() in ["float", "double"]:
|
|
21
|
-
return "DOUBLE"
|
|
22
|
-
if type.lower() in ["integer", "int", "long", "bigint"]:
|
|
23
|
-
return "BIGINT"
|
|
24
|
-
if type.lower() in ["boolean"]:
|
|
25
|
-
return "BOOLEAN"
|
|
26
|
-
if type.lower() in ["object", "record", "struct"]:
|
|
27
|
-
# not supported in CSV
|
|
28
|
-
return "VARCHAR"
|
|
29
|
-
if type.lower() in ["bytes"]:
|
|
30
|
-
# not supported in CSV
|
|
31
|
-
return "VARCHAR"
|
|
32
|
-
if type.lower() in ["array"]:
|
|
33
|
-
return "VARCHAR"
|
|
34
|
-
if type.lower() in ["null"]:
|
|
35
|
-
return "SQLNULL"
|
|
36
|
-
return "VARCHAR"
|
|
@@ -1,66 +0,0 @@
|
|
|
1
|
-
import datetime
|
|
2
|
-
import logging
|
|
3
|
-
from importlib.metadata import version
|
|
4
|
-
|
|
5
|
-
import pytz
|
|
6
|
-
import yaml
|
|
7
|
-
from jinja2 import Environment, PackageLoader, select_autoescape
|
|
8
|
-
|
|
9
|
-
from datacontract.model.data_contract_specification import \
|
|
10
|
-
DataContractSpecification
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
def to_html(data_contract_spec: DataContractSpecification) -> str:
|
|
14
|
-
# Load templates from templates folder
|
|
15
|
-
package_loader = PackageLoader("datacontract", "templates")
|
|
16
|
-
env = Environment(
|
|
17
|
-
loader=package_loader,
|
|
18
|
-
autoescape=select_autoescape(
|
|
19
|
-
enabled_extensions="html",
|
|
20
|
-
default_for_string=True,
|
|
21
|
-
),
|
|
22
|
-
)
|
|
23
|
-
|
|
24
|
-
# Load the required template
|
|
25
|
-
template = env.get_template("datacontract.html")
|
|
26
|
-
|
|
27
|
-
if data_contract_spec.quality is not None and isinstance(data_contract_spec.quality.specification, str):
|
|
28
|
-
quality_specification = data_contract_spec.quality.specification
|
|
29
|
-
elif data_contract_spec.quality is not None and isinstance(data_contract_spec.quality.specification, object):
|
|
30
|
-
if data_contract_spec.quality.type == "great-expectations":
|
|
31
|
-
quality_specification = yaml.dump(
|
|
32
|
-
data_contract_spec.quality.specification, sort_keys=False, default_style="|"
|
|
33
|
-
)
|
|
34
|
-
else:
|
|
35
|
-
quality_specification = yaml.dump(data_contract_spec.quality.specification, sort_keys=False)
|
|
36
|
-
else:
|
|
37
|
-
quality_specification = None
|
|
38
|
-
|
|
39
|
-
style_content, _, _ = package_loader.get_source(env, "style/output.css")
|
|
40
|
-
|
|
41
|
-
datacontract_yaml = data_contract_spec.to_yaml()
|
|
42
|
-
|
|
43
|
-
tz = pytz.timezone('UTC')
|
|
44
|
-
now = datetime.datetime.now(tz)
|
|
45
|
-
formatted_date = now.strftime('%d %b %Y %H:%M:%S UTC')
|
|
46
|
-
datacontract_cli_version = get_version()
|
|
47
|
-
|
|
48
|
-
# Render the template with necessary data
|
|
49
|
-
html_string = template.render(
|
|
50
|
-
datacontract=data_contract_spec,
|
|
51
|
-
quality_specification=quality_specification,
|
|
52
|
-
style=style_content,
|
|
53
|
-
datacontract_yaml=datacontract_yaml,
|
|
54
|
-
formatted_date=formatted_date,
|
|
55
|
-
datacontract_cli_version=datacontract_cli_version,
|
|
56
|
-
)
|
|
57
|
-
|
|
58
|
-
return html_string
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
def get_version() -> str:
|
|
62
|
-
try:
|
|
63
|
-
return version("datacontract_cli")
|
|
64
|
-
except Exception as e:
|
|
65
|
-
logging.debug("Ignoring exception", e)
|
|
66
|
-
return ""
|
|
@@ -1,102 +0,0 @@
|
|
|
1
|
-
from typing import Dict
|
|
2
|
-
|
|
3
|
-
import yaml
|
|
4
|
-
|
|
5
|
-
from datacontract.model.data_contract_specification import \
|
|
6
|
-
DataContractSpecification, Model, Field
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
def to_odcs_yaml(data_contract_spec: DataContractSpecification):
|
|
10
|
-
odcs = {
|
|
11
|
-
"kind": "DataContract",
|
|
12
|
-
"apiVersion": "2.3.0",
|
|
13
|
-
"uuid": data_contract_spec.id,
|
|
14
|
-
"version": data_contract_spec.info.version,
|
|
15
|
-
"datasetDomain": data_contract_spec.info.owner,
|
|
16
|
-
"quantumName": data_contract_spec.info.title,
|
|
17
|
-
"status": "unknown",
|
|
18
|
-
}
|
|
19
|
-
|
|
20
|
-
if data_contract_spec.info.contact is not None:
|
|
21
|
-
if data_contract_spec.info.contact.email is not None:
|
|
22
|
-
odcs["productDl"] = data_contract_spec.info.contact.email
|
|
23
|
-
if data_contract_spec.info.contact.email is not None:
|
|
24
|
-
odcs["productFeedbackUrl"] = data_contract_spec.info.contact.url
|
|
25
|
-
|
|
26
|
-
if data_contract_spec.terms is not None:
|
|
27
|
-
odcs["description"] = {
|
|
28
|
-
"purpose": None,
|
|
29
|
-
"usage": data_contract_spec.terms.usage.strip() if data_contract_spec.terms.usage is not None else None,
|
|
30
|
-
"limitations": data_contract_spec.terms.limitations.strip()
|
|
31
|
-
if data_contract_spec.terms.limitations is not None
|
|
32
|
-
else None,
|
|
33
|
-
}
|
|
34
|
-
|
|
35
|
-
odcs["type"] = "tables" # required, TODO read from models.type?
|
|
36
|
-
odcs["dataset"] = []
|
|
37
|
-
|
|
38
|
-
for model_key, model_value in data_contract_spec.models.items():
|
|
39
|
-
odcs_table = to_odcs_table(model_key, model_value)
|
|
40
|
-
odcs["dataset"].append(odcs_table)
|
|
41
|
-
return yaml.dump(odcs, indent=2, sort_keys=False, allow_unicode=True)
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
def to_odcs_table(model_key, model_value: Model) -> dict:
|
|
45
|
-
odcs_table = {
|
|
46
|
-
"table": model_key,
|
|
47
|
-
"physicalName": model_key,
|
|
48
|
-
"columns": [],
|
|
49
|
-
}
|
|
50
|
-
if model_value.description is not None:
|
|
51
|
-
odcs_table["description"] = model_value.description
|
|
52
|
-
columns = to_columns(model_value.fields)
|
|
53
|
-
if columns:
|
|
54
|
-
odcs_table["columns"] = columns
|
|
55
|
-
return odcs_table
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
def to_columns(fields: Dict[str, Field]) -> list:
|
|
59
|
-
columns = []
|
|
60
|
-
for field_name, field in fields.items():
|
|
61
|
-
column = to_column(field_name, field)
|
|
62
|
-
columns.append(column)
|
|
63
|
-
return columns
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
def to_column(field_name: str, field: Field) -> dict:
|
|
67
|
-
column = {"column": field_name}
|
|
68
|
-
if field.type is not None:
|
|
69
|
-
column["logicalType"] = field.type
|
|
70
|
-
column["physicalType"] = field.type
|
|
71
|
-
if field.description is not None:
|
|
72
|
-
column["description"] = field.description
|
|
73
|
-
if field.required is not None:
|
|
74
|
-
column["isNullable"] = not field.required
|
|
75
|
-
if field.unique is not None:
|
|
76
|
-
column["isUnique"] = field.unique
|
|
77
|
-
if field.classification is not None:
|
|
78
|
-
column["classification"] = field.classification
|
|
79
|
-
column["tags"] = []
|
|
80
|
-
if field.tags is not None:
|
|
81
|
-
column["tags"].extend(field.tags)
|
|
82
|
-
if field.pii is not None:
|
|
83
|
-
column["tags"].append(f"pii:{str(field.pii).lower()}")
|
|
84
|
-
if field.minLength is not None:
|
|
85
|
-
column["tags"].append(f"minLength:{field.minLength}")
|
|
86
|
-
if field.maxLength is not None:
|
|
87
|
-
column["tags"].append(f"maxLength:{field.maxLength}")
|
|
88
|
-
if field.pattern is not None:
|
|
89
|
-
column["tags"].append(f"pattern:{field.pattern}")
|
|
90
|
-
if field.minimum is not None:
|
|
91
|
-
column["tags"].append(f"minimum:{field.minimum}")
|
|
92
|
-
if field.maximum is not None:
|
|
93
|
-
column["tags"].append(f"maximum:{field.maximum}")
|
|
94
|
-
if field.exclusiveMinimum is not None:
|
|
95
|
-
column["tags"].append(f"exclusiveMinimum:{field.exclusiveMinimum}")
|
|
96
|
-
if field.exclusiveMaximum is not None:
|
|
97
|
-
column["tags"].append(f"exclusiveMaximum:{field.exclusiveMaximum}")
|
|
98
|
-
if not column["tags"]:
|
|
99
|
-
del column["tags"]
|
|
100
|
-
|
|
101
|
-
# todo enum
|
|
102
|
-
return column
|
|
@@ -1,17 +0,0 @@
|
|
|
1
|
-
import os
|
|
2
|
-
|
|
3
|
-
import requests
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
def download_datacontract_file(file_path: str, from_url: str, overwrite_file: bool):
|
|
7
|
-
if not overwrite_file and os.path.exists(file_path):
|
|
8
|
-
raise FileExistsException()
|
|
9
|
-
|
|
10
|
-
with requests.get(from_url) as response:
|
|
11
|
-
response.raise_for_status()
|
|
12
|
-
with open(file_path, "w") as f:
|
|
13
|
-
f.write(response.text)
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
class FileExistsException(Exception):
|
|
17
|
-
pass
|
|
@@ -1,33 +0,0 @@
|
|
|
1
|
-
import os
|
|
2
|
-
|
|
3
|
-
import requests
|
|
4
|
-
|
|
5
|
-
from datacontract.model.run import Run
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
def publish_datamesh_manager(run: Run, publish_url: str):
|
|
9
|
-
try:
|
|
10
|
-
if publish_url is None:
|
|
11
|
-
url = "https://api.datamesh-manager.com/api/runs"
|
|
12
|
-
else:
|
|
13
|
-
url = publish_url
|
|
14
|
-
datamesh_manager_api_key = os.getenv("DATAMESH_MANAGER_API_KEY")
|
|
15
|
-
|
|
16
|
-
if run.dataContractId is None:
|
|
17
|
-
raise Exception("Cannot publish run results, as data contract ID is unknown")
|
|
18
|
-
|
|
19
|
-
if datamesh_manager_api_key is None:
|
|
20
|
-
raise Exception("Cannot publish run results, as DATAMESH_MANAGER_API_KEY is not set")
|
|
21
|
-
|
|
22
|
-
headers = {"Content-Type": "application/json", "x-api-key": datamesh_manager_api_key}
|
|
23
|
-
request_body = run.model_dump_json()
|
|
24
|
-
# print("Request Body:", request_body)
|
|
25
|
-
response = requests.post(url, data=request_body, headers=headers)
|
|
26
|
-
# print("Status Code:", response.status_code)
|
|
27
|
-
# print("Response Body:", response.text)
|
|
28
|
-
if response.status_code != 200:
|
|
29
|
-
run.log_error(f"Error publishing test results to Data Mesh Manager: {response.text}")
|
|
30
|
-
return
|
|
31
|
-
run.log_info(f"Published test results to {url}")
|
|
32
|
-
except Exception as e:
|
|
33
|
-
run.log_error(f"Failed publishing test results. Error: {str(e)}")
|
|
@@ -1,107 +0,0 @@
|
|
|
1
|
-
import logging
|
|
2
|
-
import math
|
|
3
|
-
import os
|
|
4
|
-
from importlib import metadata
|
|
5
|
-
|
|
6
|
-
from opentelemetry import metrics
|
|
7
|
-
from opentelemetry.exporter.otlp.proto.grpc.metric_exporter import \
|
|
8
|
-
OTLPMetricExporter as OTLPgRPCMetricExporter
|
|
9
|
-
from opentelemetry.exporter.otlp.proto.http.metric_exporter import \
|
|
10
|
-
OTLPMetricExporter
|
|
11
|
-
from opentelemetry.metrics import Observation
|
|
12
|
-
from opentelemetry.sdk.metrics import MeterProvider
|
|
13
|
-
from opentelemetry.sdk.metrics.export import ConsoleMetricExporter, \
|
|
14
|
-
PeriodicExportingMetricReader
|
|
15
|
-
|
|
16
|
-
from datacontract.model.run import Run
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
# Publishes metrics of a test run.
|
|
20
|
-
# Metric contains the values:
|
|
21
|
-
# 0 == test run passed,
|
|
22
|
-
# 1 == test run has warnings
|
|
23
|
-
# 2 == test run failed
|
|
24
|
-
# 3 == test run not possible due to an error
|
|
25
|
-
# 4 == test status unknown
|
|
26
|
-
#
|
|
27
|
-
# Tested with these environment variables:
|
|
28
|
-
#
|
|
29
|
-
# OTEL_SERVICE_NAME=datacontract-cli
|
|
30
|
-
# OTEL_EXPORTER_OTLP_ENDPOINT=https://YOUR_ID.apm.westeurope.azure.elastic-cloud.com:443
|
|
31
|
-
# OTEL_EXPORTER_OTLP_HEADERS=Authorization=Bearer%20secret (Optional, when using SaaS Products)
|
|
32
|
-
# OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf and OTEL_EXPORTER_OTLP_PROTOCOL=grpc
|
|
33
|
-
#
|
|
34
|
-
# Current limitations:
|
|
35
|
-
# - no gRPC support
|
|
36
|
-
# - currently, only ConsoleExporter and OTLP Exporter
|
|
37
|
-
# - Metrics only, no logs yet (but loosely planned)
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
def publish_opentelemetry(run: Run):
|
|
41
|
-
try:
|
|
42
|
-
if run.dataContractId is None:
|
|
43
|
-
raise Exception("Cannot publish run results, as data contract ID is unknown")
|
|
44
|
-
|
|
45
|
-
endpoint = os.getenv("OTEL_EXPORTER_OTLP_ENDPOINT")
|
|
46
|
-
logging.info(f"Publishing test results to opentelemetry at {endpoint}")
|
|
47
|
-
|
|
48
|
-
telemetry = Telemetry()
|
|
49
|
-
provider = metrics.get_meter_provider()
|
|
50
|
-
meter = provider.get_meter("com.datacontract.cli", metadata.version("datacontract-cli"))
|
|
51
|
-
meter.create_observable_gauge(
|
|
52
|
-
name="datacontract.cli.test",
|
|
53
|
-
callbacks=[lambda x: _to_observation_callback(run)],
|
|
54
|
-
unit="result",
|
|
55
|
-
description="The overall result of the data contract test run",
|
|
56
|
-
)
|
|
57
|
-
|
|
58
|
-
telemetry.publish()
|
|
59
|
-
except Exception as e:
|
|
60
|
-
logging.error(f"Failed publishing test results. Error: {str(e)}")
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
def _to_observation_callback(run):
|
|
64
|
-
yield _to_observation(run)
|
|
65
|
-
|
|
66
|
-
|
|
67
|
-
def _to_observation(run):
|
|
68
|
-
attributes = {
|
|
69
|
-
"datacontract.id": run.dataContractId,
|
|
70
|
-
"datacontract.version": run.dataContractVersion,
|
|
71
|
-
}
|
|
72
|
-
|
|
73
|
-
if run.result == "passed":
|
|
74
|
-
result_value = 0 # think of exit codes
|
|
75
|
-
elif run.result == "warning":
|
|
76
|
-
result_value = 1
|
|
77
|
-
elif run.result == "failed":
|
|
78
|
-
result_value = 2
|
|
79
|
-
elif run.result == "error":
|
|
80
|
-
result_value = 3
|
|
81
|
-
else:
|
|
82
|
-
result_value = 4
|
|
83
|
-
return Observation(value=result_value, attributes=attributes)
|
|
84
|
-
|
|
85
|
-
|
|
86
|
-
class Telemetry:
|
|
87
|
-
def __init__(self):
|
|
88
|
-
protocol = os.getenv("OTEL_EXPORTER_OTLP_PROTOCOL")
|
|
89
|
-
|
|
90
|
-
# lower to allow grpc, GRPC and alike values.
|
|
91
|
-
if protocol and protocol.lower() == "grpc":
|
|
92
|
-
self.remote_exporter = OTLPgRPCMetricExporter()
|
|
93
|
-
else:
|
|
94
|
-
# Fallback to default OTEL http/protobuf which is used when the variable is not set.
|
|
95
|
-
# This Exporter also works for http/json.
|
|
96
|
-
self.remote_exporter = OTLPMetricExporter()
|
|
97
|
-
|
|
98
|
-
self.console_exporter = ConsoleMetricExporter()
|
|
99
|
-
# using math.inf so it does not collect periodically. we do this in collect ourselves, one-time.
|
|
100
|
-
self.reader = PeriodicExportingMetricReader(self.console_exporter, export_interval_millis=math.inf)
|
|
101
|
-
self.remote_reader = PeriodicExportingMetricReader(self.remote_exporter, export_interval_millis=math.inf)
|
|
102
|
-
provider = MeterProvider(metric_readers=[self.reader, self.remote_reader])
|
|
103
|
-
metrics.set_meter_provider(provider)
|
|
104
|
-
|
|
105
|
-
def publish(self):
|
|
106
|
-
self.reader.collect()
|
|
107
|
-
self.remote_reader.collect()
|