datacontract-cli 0.10.20__py3-none-any.whl → 0.10.22__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of datacontract-cli might be problematic. Click here for more details.
- datacontract/{web.py → api.py} +55 -3
- datacontract/breaking/breaking.py +1 -1
- datacontract/breaking/breaking_rules.py +1 -1
- datacontract/cli.py +32 -10
- datacontract/data_contract.py +14 -100
- datacontract/engines/data_contract_checks.py +735 -0
- datacontract/engines/data_contract_test.py +51 -0
- datacontract/engines/soda/check_soda_execute.py +36 -30
- datacontract/engines/soda/connections/kafka.py +8 -3
- datacontract/export/avro_converter.py +2 -0
- datacontract/export/custom_converter.py +40 -0
- datacontract/export/exporter.py +1 -2
- datacontract/export/exporter_factory.py +4 -12
- datacontract/export/sodacl_converter.py +22 -294
- datacontract/export/sql_type_converter.py +7 -2
- datacontract/imports/odcs_importer.py +6 -3
- datacontract/imports/odcs_v3_importer.py +2 -0
- datacontract/imports/sql_importer.py +229 -29
- datacontract/lint/urls.py +4 -4
- datacontract/model/data_contract_specification.py +130 -129
- datacontract/model/exceptions.py +4 -1
- datacontract/model/run.py +25 -18
- datacontract/templates/datacontract.html +16 -2
- datacontract/templates/partials/definition.html +3 -95
- datacontract/templates/partials/model_field.html +13 -0
- datacontract/templates/partials/quality.html +49 -0
- datacontract/templates/style/output.css +151 -152
- {datacontract_cli-0.10.20.dist-info → datacontract_cli-0.10.22.dist-info}/METADATA +238 -184
- {datacontract_cli-0.10.20.dist-info → datacontract_cli-0.10.22.dist-info}/RECORD +34 -34
- datacontract/engines/soda/connections/dask.py +0 -28
- datacontract/export/odcs_v2_exporter.py +0 -124
- datacontract/imports/odcs_v2_importer.py +0 -177
- datacontract/lint/linters/example_model_linter.py +0 -91
- /datacontract/{model → breaking}/breaking_change.py +0 -0
- {datacontract_cli-0.10.20.dist-info → datacontract_cli-0.10.22.dist-info}/LICENSE +0 -0
- {datacontract_cli-0.10.20.dist-info → datacontract_cli-0.10.22.dist-info}/WHEEL +0 -0
- {datacontract_cli-0.10.20.dist-info → datacontract_cli-0.10.22.dist-info}/entry_points.txt +0 -0
- {datacontract_cli-0.10.20.dist-info → datacontract_cli-0.10.22.dist-info}/top_level.txt +0 -0
|
@@ -1,54 +1,56 @@
|
|
|
1
1
|
datacontract/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
2
|
-
datacontract/
|
|
3
|
-
datacontract/
|
|
2
|
+
datacontract/api.py,sha256=qZJr8I5MI4wZlvjUEAvqna9Xj5Ic2GCBxSyogBlKEbE,8166
|
|
3
|
+
datacontract/cli.py,sha256=cxEC9DNwqJCt8b7ZcmKsee87cBnnPb_gR3rT8AXbL-g,18182
|
|
4
|
+
datacontract/data_contract.py,sha256=H9ogUTj41javVW8gcLWYDKZDJMJQHyc_Q4LxLqFrRoo,10686
|
|
4
5
|
datacontract/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
5
|
-
datacontract/
|
|
6
|
-
datacontract/breaking/
|
|
7
|
-
datacontract/breaking/breaking_rules.py,sha256=
|
|
6
|
+
datacontract/breaking/breaking.py,sha256=DnqgxUjD-EAZcg5RBizOP9a2WxsFTaQBik0AB_m3K00,20431
|
|
7
|
+
datacontract/breaking/breaking_change.py,sha256=BIDEUo1U2CQLVT2-I5PyFttxAj6zQPI1UUkEoOOQXMY,2249
|
|
8
|
+
datacontract/breaking/breaking_rules.py,sha256=M9IdzVJSA7oOr1fvLQl0y9MoBKeItPz42Db2U2cjH2Y,4063
|
|
8
9
|
datacontract/catalog/catalog.py,sha256=wmv_2BBxHhNBlilAmQHHhNe4tK14DowkyIOVaQW2DWU,2691
|
|
9
10
|
datacontract/engines/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
11
|
+
datacontract/engines/data_contract_checks.py,sha256=wjnKh1FxG9qXB1zOPjrGmjW7uCpRsxXICMIvyFwpVt4,26622
|
|
12
|
+
datacontract/engines/data_contract_test.py,sha256=ZboKW0AmTp6YL8IYAEskj-ffF4J87LXx3W0stHUrbvI,2360
|
|
10
13
|
datacontract/engines/datacontract/check_that_datacontract_contains_valid_servers_configuration.py,sha256=TXO47ON3NjwYI4Y2eBYklMOCo7vAtYzqLPAhZhii6dg,1565
|
|
11
14
|
datacontract/engines/datacontract/check_that_datacontract_file_exists.py,sha256=Vw-7U0GmQT2127tybxggZfpRFiZVgoIh6ndkTGM0FP4,665
|
|
12
15
|
datacontract/engines/fastjsonschema/check_jsonschema.py,sha256=-knTZ-NsHpBWCoR7r1JP5iYSWx697mugijmqUPx0pEY,10307
|
|
13
16
|
datacontract/engines/fastjsonschema/s3/s3_read_files.py,sha256=vuz_hLF2VD8LR_prjQpPLBU8Is-iHLAvqp4KwclOv9I,1157
|
|
14
17
|
datacontract/engines/soda/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
15
|
-
datacontract/engines/soda/check_soda_execute.py,sha256=
|
|
18
|
+
datacontract/engines/soda/check_soda_execute.py,sha256=E-3OwREXdQx4Y8mdp87bl2uLrc-x2CPUCMksVdNiURc,8284
|
|
16
19
|
datacontract/engines/soda/connections/bigquery.py,sha256=C-8kxmzpYe88bJp80ObHFLMh4rpnIjnUQ7XOj0Ke7lk,903
|
|
17
|
-
datacontract/engines/soda/connections/dask.py,sha256=Yy6Et2n_vDVsdjtqyBWDSZt7mnjPzPk_MZ-92VZHfnY,1496
|
|
18
20
|
datacontract/engines/soda/connections/databricks.py,sha256=cMRasuO0MrSKVgHPB-9uFTGTZPFg6z9Kpk3tJ0SdR0s,943
|
|
19
21
|
datacontract/engines/soda/connections/duckdb.py,sha256=9AbupEzP-9dGlhRnO9GLY2HMFlz08XazdmYK9PGIeEQ,6705
|
|
20
|
-
datacontract/engines/soda/connections/kafka.py,sha256=
|
|
22
|
+
datacontract/engines/soda/connections/kafka.py,sha256=j6I9loXixzaV-SU56dIodCykPDAnWnsNRfKG4wOtzvs,8570
|
|
21
23
|
datacontract/engines/soda/connections/postgres.py,sha256=9GTF4Es3M5vb7ocSGqAxXmslvkS5CjsPQGIuo020CFc,626
|
|
22
24
|
datacontract/engines/soda/connections/snowflake.py,sha256=rfG2ysuqNM6TkvyqQKcGHFsTGJ6AROmud5VleUDRrb0,749
|
|
23
25
|
datacontract/engines/soda/connections/sqlserver.py,sha256=RzGLbCUdRyfmDcqtM_AB9WZ-Xk-XYX91nkXpVNpYbvc,1440
|
|
24
26
|
datacontract/engines/soda/connections/trino.py,sha256=JvKUP9aFg_n095oWE0-bGmfbETSWEOURGEZdQuG8txA,718
|
|
25
27
|
datacontract/export/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
26
|
-
datacontract/export/avro_converter.py,sha256=
|
|
28
|
+
datacontract/export/avro_converter.py,sha256=ImAACOoQkZPN-Yhs7zwtlHb38nE0NsTr5y1SrUvZJII,4626
|
|
27
29
|
datacontract/export/avro_idl_converter.py,sha256=SGO7JfI9UGXLYFR5wMGNUH1qf6kt9lF6dUU9roVqnFo,9878
|
|
28
30
|
datacontract/export/bigquery_converter.py,sha256=VSBdVGWrlar18ETzgNArxDzk8Zt5JcAc_wKjfwLpG_A,4734
|
|
29
31
|
datacontract/export/csv_type_converter.py,sha256=ZZuJwBgQnafZC7PPvAXsBf2IajPJq8TYZ1l8Qq0GYeI,1290
|
|
32
|
+
datacontract/export/custom_converter.py,sha256=xb8KbkRRgHmT4ewwC7XxtnKpe_ZMSJWBjYOaKjmO_KQ,1216
|
|
30
33
|
datacontract/export/data_caterer_converter.py,sha256=eSEuy3TbqUIG_lHYEBOydAgp_CJNoGArXrcJvh81wcw,5984
|
|
31
34
|
datacontract/export/dbml_converter.py,sha256=f_OZEFwRUyL-Kg2yn_G58I8iz1VfFrZh8Nbw3Wq0JDo,4777
|
|
32
35
|
datacontract/export/dbt_converter.py,sha256=BPvcKyT-v1C_zcYh8ocn5qeNVUjTjYIcMVxnq8BY-Us,10342
|
|
33
36
|
datacontract/export/dcs_exporter.py,sha256=RALQ7bLAjak7EsoFFL2GFX2Oju7pnCDPCdRN_wo9wHM,210
|
|
34
|
-
datacontract/export/exporter.py,sha256=
|
|
35
|
-
datacontract/export/exporter_factory.py,sha256=
|
|
37
|
+
datacontract/export/exporter.py,sha256=XrNmoIH_5Myb8jx-vaS1ZCF11RTw5zf5JATkqXWunXE,3025
|
|
38
|
+
datacontract/export/exporter_factory.py,sha256=0XmU51fQNZVQdC78gDy_82CatazhioMmcd6qdCl8muU,5847
|
|
36
39
|
datacontract/export/go_converter.py,sha256=Ttvbfu3YU-3GBwRD6nwCsFyZuc_hiIvJD-Jg2sT5WLw,3331
|
|
37
40
|
datacontract/export/great_expectations_converter.py,sha256=zMaHaj5DLj_Q_q-iFEa7EZHW-qHdFMxWL4MiMIFKV80,10505
|
|
38
41
|
datacontract/export/html_export.py,sha256=ojazWrb0AwSc7Vr72M_otMo-3PA8mfi8tfIy9BCXk9o,2578
|
|
39
42
|
datacontract/export/iceberg_converter.py,sha256=ArcQ_Y3z_W4_kGDU_8jPRx2-pHpP3Nhx1zYoETOL3c4,6804
|
|
40
43
|
datacontract/export/jsonschema_converter.py,sha256=2MT82MurcQQbrVDRj1kFsxnmFd9scNSfYI1upQSecl4,5631
|
|
41
44
|
datacontract/export/markdown_converter.py,sha256=chtaZX4vXTee7JCMYmWiDQ9m55gwJjHPw6SEM3UOwpQ,6467
|
|
42
|
-
datacontract/export/odcs_v2_exporter.py,sha256=0nMI-zTENNs94bllm_Qv3V-8-QyS8jnBW1Be9fEJCmU,4679
|
|
43
45
|
datacontract/export/odcs_v3_exporter.py,sha256=52WggPBpUi9AwGrVjlScojSJ2DhhgMuFIxRQokIvQ_o,12542
|
|
44
46
|
datacontract/export/pandas_type_converter.py,sha256=464pQ3JQKFQa1TO0HBNcEoZvQye_yUbY6jQtiBaphSc,1117
|
|
45
47
|
datacontract/export/protobuf_converter.py,sha256=9K0fzBGbqlj9AhQumw2oq53hyn_QDCT3UlyH2uXJdC0,3192
|
|
46
48
|
datacontract/export/pydantic_converter.py,sha256=1Lt9F8i6zyQYb44MyQtsXwCWWXYxZ47SmzArr_uPqsU,5579
|
|
47
49
|
datacontract/export/rdf_converter.py,sha256=4gnKus37Geth4MJ3Ruc8AbnpD_Ll9OCx8oTIEKScvh8,6435
|
|
48
|
-
datacontract/export/sodacl_converter.py,sha256=
|
|
50
|
+
datacontract/export/sodacl_converter.py,sha256=lQCOcNiT7i6KGaJ1Ua4MYBYGm-EyktTGrL4FLZDi14c,1102
|
|
49
51
|
datacontract/export/spark_converter.py,sha256=-6P2_VRFqGfSF7n_lJcD-fuY9Pv8qoH-ud6g8Zimpz4,7190
|
|
50
52
|
datacontract/export/sql_converter.py,sha256=BGjmOAlzB5QfzJiXP61ajV0wj4M5oJrmNZZe_4Lo1Ik,4821
|
|
51
|
-
datacontract/export/sql_type_converter.py,sha256=
|
|
53
|
+
datacontract/export/sql_type_converter.py,sha256=qjm8Fdyihq3VBL4x2D7RHdWoOm6HWIJe28U4XboYCk8,13436
|
|
52
54
|
datacontract/export/sqlalchemy_converter.py,sha256=0DMncvA811lTtd5q4ZORREQ9YH1vQm1lJeqMWsFvloE,6463
|
|
53
55
|
datacontract/export/terraform_converter.py,sha256=ExFoEvErVk-gBnWJiqC38SxDUmUEydpACWc917l5RyM,2163
|
|
54
56
|
datacontract/imports/avro_importer.py,sha256=hpGvO6uv2zcupJC8-wC-c-vbjNb83IQ560a5F3MsEFA,9937
|
|
@@ -61,12 +63,11 @@ datacontract/imports/iceberg_importer.py,sha256=vadGJVqQKgG-j8swUytZALFB8QjbGRqZ
|
|
|
61
63
|
datacontract/imports/importer.py,sha256=X4M0SZiRQsz8TGm-FSRuh6bs6qny7V8odyQ7l_1DwNg,876
|
|
62
64
|
datacontract/imports/importer_factory.py,sha256=8ed9-ceJlFMf-AIAyuI6Uzs05CqEs1nPlLA-tvfI0bU,3639
|
|
63
65
|
datacontract/imports/jsonschema_importer.py,sha256=67H__XLugV4vguHrIqzW02dtx27zYTWnOms4D1ma3bk,4961
|
|
64
|
-
datacontract/imports/odcs_importer.py,sha256=
|
|
65
|
-
datacontract/imports/
|
|
66
|
-
datacontract/imports/odcs_v3_importer.py,sha256=NB16EQjVuywFsI-Ti0AfMXWZj__fdHu0iKA1IPSBhmo,13019
|
|
66
|
+
datacontract/imports/odcs_importer.py,sha256=vv2dHLGL0Cdivv1CdKn5euJwGNKmiZmXCoxUYAXsHX8,2126
|
|
67
|
+
datacontract/imports/odcs_v3_importer.py,sha256=65QXcUYpIHkc51_3BriDoFg5Im1_xJK80Kj9CqA3jMY,13065
|
|
67
68
|
datacontract/imports/parquet_importer.py,sha256=W_0_16mX4stwDUt4GM2L7dnGmTpAySab5k13-OlTCCc,3095
|
|
68
69
|
datacontract/imports/spark_importer.py,sha256=h2na1YtdJYu9Oz07tSvwx8L4RX6aLCCDVkAv-RTKyVA,5100
|
|
69
|
-
datacontract/imports/sql_importer.py,sha256=
|
|
70
|
+
datacontract/imports/sql_importer.py,sha256=ElFS2LILDOvWzW-X4emSIKltFV42i78TEoyg0bvn3II,9322
|
|
70
71
|
datacontract/imports/unity_importer.py,sha256=UcPYABhLZaWNl5IkCazwAuMoVDdujsu_QteuV_Q9hgI,6737
|
|
71
72
|
datacontract/init/init_template.py,sha256=BMawR-AF_vXyn_-Co-XoT8Dxj9b55V8xKk2KkJK-c1o,721
|
|
72
73
|
datacontract/integration/datamesh_manager.py,sha256=6_mHRQAqw-KCfVtejNxULN4ihBZTZikf_5p0pYHer7g,2849
|
|
@@ -75,36 +76,35 @@ datacontract/lint/lint.py,sha256=Ew0n3ooXxmCVnUxJ_cDoacsD82QdMZYnKrxnG9J0sWQ,507
|
|
|
75
76
|
datacontract/lint/resolve.py,sha256=_QtoAReUhiJ8I_4fyNimyUnHhpZGYmmDHFA59wUTeFw,10684
|
|
76
77
|
datacontract/lint/resources.py,sha256=nfeZmORh1aP7EKpMKCmfbS04Te8pQ0nz64vJVkHOq3c,647
|
|
77
78
|
datacontract/lint/schema.py,sha256=4pYX6JX6SkASftyqaWTodKFRVPi2qV0_Z60tvaCOk80,1813
|
|
78
|
-
datacontract/lint/urls.py,sha256=
|
|
79
|
+
datacontract/lint/urls.py,sha256=giac0eAYa6hha8exleL3KsiPtiFlOq8l53axtAmCilw,2529
|
|
79
80
|
datacontract/lint/linters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
80
81
|
datacontract/lint/linters/description_linter.py,sha256=7fla7FQwDa-1UrLFCFKFoeUzkR91e4o9W6ySKSW6_U8,1555
|
|
81
|
-
datacontract/lint/linters/example_model_linter.py,sha256=tmgxGxC-GzMUxFumTOuuASdz9ZlncBZHasPGJsWnDT8,3973
|
|
82
82
|
datacontract/lint/linters/field_pattern_linter.py,sha256=lreGvOW3v_Glah_SriVe9ejZ7EuR6_gJsdr2tEORB_8,1084
|
|
83
83
|
datacontract/lint/linters/field_reference_linter.py,sha256=65GnbBtazn55dXslujOho3YIHCwNy9DDp0m56pNMkUk,2021
|
|
84
84
|
datacontract/lint/linters/notice_period_linter.py,sha256=6r413aEVOVHWJHb33-68ecVTAUNzbxL4me6ebmPcgpE,2130
|
|
85
85
|
datacontract/lint/linters/quality_schema_linter.py,sha256=ZXFHlMLFV1GZejizbUdfW6-msffFECoDGNsdynaPnog,2182
|
|
86
86
|
datacontract/lint/linters/valid_constraints_linter.py,sha256=qTFh1X3I9wOtAxuXlvbGesCQ3GQ6iWc-MT_ttIybRsw,4916
|
|
87
|
-
datacontract/model/
|
|
88
|
-
datacontract/model/
|
|
89
|
-
datacontract/model/exceptions.py,sha256=zW9NoyzwsND-c9UqgyTVuezUVGEc6KK1Uc2zl12loyo,1178
|
|
87
|
+
datacontract/model/data_contract_specification.py,sha256=1bEE7hQJEjzoL4XtCQBmPe6VWclU_D_YiUJW2oNRNos,8655
|
|
88
|
+
datacontract/model/exceptions.py,sha256=5BMuEH2qWuckNP4FTfpUEeEu6rjgGcLOD0GQugKRQ1U,1242
|
|
90
89
|
datacontract/model/odcs.py,sha256=9PXwm72FASjNwteF1Jn591iP3-St0aq16Cpsk0PkEW8,389
|
|
91
|
-
datacontract/model/run.py,sha256=
|
|
90
|
+
datacontract/model/run.py,sha256=2HTISHW6gvu7NTke76hLw4wkH_kQAig9Z176OuttYSA,3004
|
|
92
91
|
datacontract/schemas/datacontract-1.1.0.init.yaml,sha256=_WQX6NRwimXlPVKcKeHLd4mFL0TJ2vYH0WnyMXZeQ8Y,1828
|
|
93
92
|
datacontract/schemas/datacontract-1.1.0.schema.json,sha256=3Bu2rxEjkF6dNLcqi1GF4KoXBnEIopaJ87Qb8S4zUvg,62872
|
|
94
93
|
datacontract/schemas/odcs-3.0.1.schema.json,sha256=bRZsSXA0fV0EmV_8f1K68PlXu1m4K7JcuHpLnY3ESwQ,72933
|
|
95
|
-
datacontract/templates/datacontract.html,sha256=
|
|
94
|
+
datacontract/templates/datacontract.html,sha256=9rm5hLkST8EnOFwj0al9c0xPajzlyWlnfxT2iicSRCM,15179
|
|
96
95
|
datacontract/templates/index.html,sha256=nyi9nrEsSPuOrXszKzqkqwZGveXsdy1PLJp2g9oDw0A,12517
|
|
97
96
|
datacontract/templates/partials/datacontract_information.html,sha256=7ZBxgEgi2XndKBypeOpe03oCSRPOujC6NVlN7zexGNM,6221
|
|
98
97
|
datacontract/templates/partials/datacontract_servicelevels.html,sha256=ed3QgB11B0Qq2h_NwaroGZ4pQMBPEhfeQaoS-qEipqY,11401
|
|
99
98
|
datacontract/templates/partials/datacontract_terms.html,sha256=1cnJcOTpxwot2BCuZmkLF_SPfiVloLs3c8mj9WfE4sc,1865
|
|
100
|
-
datacontract/templates/partials/definition.html,sha256=
|
|
99
|
+
datacontract/templates/partials/definition.html,sha256=gZEmNvwNGGxA_Fnzx_0L6tXlAMk_EAPWr5ziRIThb_o,1005
|
|
101
100
|
datacontract/templates/partials/example.html,sha256=F1dWbHDIXQScgfs4OVgqM1lR4uV4xX5j6suasXHNM88,1204
|
|
102
|
-
datacontract/templates/partials/model_field.html,sha256=
|
|
101
|
+
datacontract/templates/partials/model_field.html,sha256=2YBF95ypNCPFYuYKoeilRnDG-H_FuW4JK1znkCaYCac,7625
|
|
102
|
+
datacontract/templates/partials/quality.html,sha256=ynEDWRn8I90Uje-xhGYgFcfwOgKI1R-CDki-EvTsauQ,1785
|
|
103
103
|
datacontract/templates/partials/server.html,sha256=WkWFbz1ZvhIAUQQhH5Lkwb0HZRW907ehEnFmJSkpquQ,6235
|
|
104
|
-
datacontract/templates/style/output.css,sha256=
|
|
105
|
-
datacontract_cli-0.10.
|
|
106
|
-
datacontract_cli-0.10.
|
|
107
|
-
datacontract_cli-0.10.
|
|
108
|
-
datacontract_cli-0.10.
|
|
109
|
-
datacontract_cli-0.10.
|
|
110
|
-
datacontract_cli-0.10.
|
|
104
|
+
datacontract/templates/style/output.css,sha256=V1k6smSvlz07W2UNOkhcDFUb0HLmoas7DnNg_o8XUcA,25759
|
|
105
|
+
datacontract_cli-0.10.22.dist-info/LICENSE,sha256=23h64qnSeIZ0DKeziWAKC-zBCt328iSbRbWBrXoYRb4,2210
|
|
106
|
+
datacontract_cli-0.10.22.dist-info/METADATA,sha256=5jcshDEXyDlu3uH1I6c4XliezNFDjZScfWR1yTEul4g,101762
|
|
107
|
+
datacontract_cli-0.10.22.dist-info/WHEEL,sha256=In9FTNxeP60KnTkGw7wk6mJPYd_dQSjEZmXdBdMCI-8,91
|
|
108
|
+
datacontract_cli-0.10.22.dist-info/entry_points.txt,sha256=D3Eqy4q_Z6bHauGd4ppIyQglwbrm1AJnLau4Ppbw9Is,54
|
|
109
|
+
datacontract_cli-0.10.22.dist-info/top_level.txt,sha256=VIRjd8EIUrBYWjEXJJjtdUgc0UAJdPZjmLiOR8BRBYM,13
|
|
110
|
+
datacontract_cli-0.10.22.dist-info/RECORD,,
|
|
@@ -1,28 +0,0 @@
|
|
|
1
|
-
# def add_s3_connection_dask_json(data_contract, scan, server):
|
|
2
|
-
# s3_access_key_id = os.getenv('DATACONTRACT_S3_ACCESS_KEY_ID')
|
|
3
|
-
# s3_secret_access_key = os.getenv('DATACONTRACT_S3_SECRET_ACCESS_KEY')
|
|
4
|
-
# lines = server.delimiter == "new_line"
|
|
5
|
-
# for model_name in data_contract.models:
|
|
6
|
-
# logging.info(f"Connecting to {server.location}")
|
|
7
|
-
# df = dd.read_json(
|
|
8
|
-
# server.location,
|
|
9
|
-
# lines=lines,
|
|
10
|
-
# storage_options={'key': s3_access_key_id,
|
|
11
|
-
# 'secret': s3_secret_access_key,
|
|
12
|
-
# 'client_kwargs': {'endpoint_url': server.endpointUrl}
|
|
13
|
-
# })
|
|
14
|
-
# scan.add_dask_dataframe(dataset_name=model_name, dask_df=df, data_source_name=server.type)
|
|
15
|
-
|
|
16
|
-
# def add_s3_connection_dask_csv(data_contract, scan, server):
|
|
17
|
-
# s3_access_key_id = os.getenv('DATACONTRACT_S3_ACCESS_KEY_ID')
|
|
18
|
-
# s3_secret_access_key = os.getenv('DATACONTRACT_S3_SECRET_ACCESS_KEY')
|
|
19
|
-
# for model_name in data_contract.models:
|
|
20
|
-
# logging.info(f"Connecting to {server.location}")
|
|
21
|
-
# df = dd.read_csv(
|
|
22
|
-
# server.location,
|
|
23
|
-
# storage_options={'key': s3_access_key_id,
|
|
24
|
-
# 'secret': s3_secret_access_key,
|
|
25
|
-
# 'client_kwargs': {'endpoint_url': server.endpointUrl}
|
|
26
|
-
# })
|
|
27
|
-
# scan.add_dask_dataframe(dataset_name=model_name, dask_df=df, data_source_name=server.type)
|
|
28
|
-
|
|
@@ -1,124 +0,0 @@
|
|
|
1
|
-
from typing import Dict
|
|
2
|
-
|
|
3
|
-
import yaml
|
|
4
|
-
|
|
5
|
-
from datacontract.export.exporter import Exporter
|
|
6
|
-
from datacontract.model.data_contract_specification import DataContractSpecification, Field, Model
|
|
7
|
-
|
|
8
|
-
|
|
9
|
-
class OdcsV2Exporter(Exporter):
|
|
10
|
-
def export(self, data_contract, model, server, sql_server_type, export_args) -> dict:
|
|
11
|
-
return to_odcs_v2_yaml(data_contract)
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
def to_odcs_v2_yaml(data_contract_spec: DataContractSpecification):
|
|
15
|
-
odcs = {
|
|
16
|
-
"kind": "DataContract",
|
|
17
|
-
"apiVersion": "2.3.0",
|
|
18
|
-
"uuid": data_contract_spec.id,
|
|
19
|
-
"version": data_contract_spec.info.version,
|
|
20
|
-
"datasetDomain": data_contract_spec.info.owner,
|
|
21
|
-
"quantumName": data_contract_spec.info.title,
|
|
22
|
-
"status": "unknown",
|
|
23
|
-
}
|
|
24
|
-
|
|
25
|
-
if data_contract_spec.info.contact is not None:
|
|
26
|
-
if data_contract_spec.info.contact.email is not None:
|
|
27
|
-
odcs["productDl"] = data_contract_spec.info.contact.email
|
|
28
|
-
if data_contract_spec.info.contact.url is not None:
|
|
29
|
-
odcs["productFeedbackUrl"] = data_contract_spec.info.contact.url
|
|
30
|
-
|
|
31
|
-
if data_contract_spec.terms is not None:
|
|
32
|
-
odcs["description"] = {
|
|
33
|
-
"purpose": data_contract_spec.terms.description.strip()
|
|
34
|
-
if data_contract_spec.terms.description is not None
|
|
35
|
-
else None,
|
|
36
|
-
"usage": data_contract_spec.terms.usage.strip() if data_contract_spec.terms.usage is not None else None,
|
|
37
|
-
"limitations": data_contract_spec.terms.limitations.strip()
|
|
38
|
-
if data_contract_spec.terms.limitations is not None
|
|
39
|
-
else None,
|
|
40
|
-
}
|
|
41
|
-
|
|
42
|
-
if data_contract_spec.servicelevels is not None:
|
|
43
|
-
slas = []
|
|
44
|
-
if data_contract_spec.servicelevels.availability is not None:
|
|
45
|
-
slas.append(
|
|
46
|
-
{
|
|
47
|
-
"property": "generalAvailability",
|
|
48
|
-
"value": data_contract_spec.servicelevels.availability.description,
|
|
49
|
-
}
|
|
50
|
-
)
|
|
51
|
-
if data_contract_spec.servicelevels.retention is not None:
|
|
52
|
-
slas.append({"property": "retention", "value": data_contract_spec.servicelevels.retention.period})
|
|
53
|
-
|
|
54
|
-
if len(slas) > 0:
|
|
55
|
-
odcs["slaProperties"] = slas
|
|
56
|
-
|
|
57
|
-
odcs["type"] = "tables" # required, TODO read from models.type?
|
|
58
|
-
odcs["dataset"] = []
|
|
59
|
-
|
|
60
|
-
for model_key, model_value in data_contract_spec.models.items():
|
|
61
|
-
odcs_table = to_odcs_table(model_key, model_value)
|
|
62
|
-
odcs["dataset"].append(odcs_table)
|
|
63
|
-
return yaml.dump(odcs, indent=2, sort_keys=False, allow_unicode=True)
|
|
64
|
-
|
|
65
|
-
|
|
66
|
-
def to_odcs_table(model_key, model_value: Model) -> dict:
|
|
67
|
-
odcs_table = {
|
|
68
|
-
"table": model_key,
|
|
69
|
-
"physicalName": model_key,
|
|
70
|
-
"columns": [],
|
|
71
|
-
}
|
|
72
|
-
if model_value.description is not None:
|
|
73
|
-
odcs_table["description"] = model_value.description
|
|
74
|
-
columns = to_columns(model_value.fields)
|
|
75
|
-
if columns:
|
|
76
|
-
odcs_table["columns"] = columns
|
|
77
|
-
return odcs_table
|
|
78
|
-
|
|
79
|
-
|
|
80
|
-
def to_columns(fields: Dict[str, Field]) -> list:
|
|
81
|
-
columns = []
|
|
82
|
-
for field_name, field in fields.items():
|
|
83
|
-
column = to_column(field_name, field)
|
|
84
|
-
columns.append(column)
|
|
85
|
-
return columns
|
|
86
|
-
|
|
87
|
-
|
|
88
|
-
def to_column(field_name: str, field: Field) -> dict:
|
|
89
|
-
column = {"column": field_name}
|
|
90
|
-
if field.type is not None:
|
|
91
|
-
column["logicalType"] = field.type
|
|
92
|
-
column["physicalType"] = field.type
|
|
93
|
-
if field.description is not None:
|
|
94
|
-
column["description"] = field.description
|
|
95
|
-
if field.required is not None:
|
|
96
|
-
column["isNullable"] = not field.required
|
|
97
|
-
if field.unique is not None:
|
|
98
|
-
column["isUnique"] = field.unique
|
|
99
|
-
if field.classification is not None:
|
|
100
|
-
column["classification"] = field.classification
|
|
101
|
-
column["tags"] = []
|
|
102
|
-
if field.tags is not None:
|
|
103
|
-
column["tags"].extend(field.tags)
|
|
104
|
-
if field.pii is not None:
|
|
105
|
-
column["tags"].append(f"pii:{str(field.pii).lower()}")
|
|
106
|
-
if field.minLength is not None:
|
|
107
|
-
column["tags"].append(f"minLength:{field.minLength}")
|
|
108
|
-
if field.maxLength is not None:
|
|
109
|
-
column["tags"].append(f"maxLength:{field.maxLength}")
|
|
110
|
-
if field.pattern is not None:
|
|
111
|
-
column["tags"].append(f"pattern:{field.pattern}")
|
|
112
|
-
if field.minimum is not None:
|
|
113
|
-
column["tags"].append(f"minimum:{field.minimum}")
|
|
114
|
-
if field.maximum is not None:
|
|
115
|
-
column["tags"].append(f"maximum:{field.maximum}")
|
|
116
|
-
if field.exclusiveMinimum is not None:
|
|
117
|
-
column["tags"].append(f"exclusiveMinimum:{field.exclusiveMinimum}")
|
|
118
|
-
if field.exclusiveMaximum is not None:
|
|
119
|
-
column["tags"].append(f"exclusiveMaximum:{field.exclusiveMaximum}")
|
|
120
|
-
if not column["tags"]:
|
|
121
|
-
del column["tags"]
|
|
122
|
-
|
|
123
|
-
# todo enum
|
|
124
|
-
return column
|
|
@@ -1,177 +0,0 @@
|
|
|
1
|
-
import datetime
|
|
2
|
-
import logging
|
|
3
|
-
from typing import Any, Dict, List
|
|
4
|
-
|
|
5
|
-
import yaml
|
|
6
|
-
|
|
7
|
-
from datacontract.imports.importer import Importer
|
|
8
|
-
from datacontract.model.data_contract_specification import (
|
|
9
|
-
DATACONTRACT_TYPES,
|
|
10
|
-
Availability,
|
|
11
|
-
Contact,
|
|
12
|
-
DataContractSpecification,
|
|
13
|
-
Field,
|
|
14
|
-
Info,
|
|
15
|
-
Model,
|
|
16
|
-
Retention,
|
|
17
|
-
ServiceLevel,
|
|
18
|
-
Terms,
|
|
19
|
-
)
|
|
20
|
-
from datacontract.model.exceptions import DataContractException
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
class OdcsImporter(Importer):
|
|
24
|
-
def import_source(
|
|
25
|
-
self, data_contract_specification: DataContractSpecification, source: str, import_args: dict
|
|
26
|
-
) -> DataContractSpecification:
|
|
27
|
-
return import_odcs_v2(data_contract_specification, source)
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
def import_odcs_v2(data_contract_specification: DataContractSpecification, source: str) -> DataContractSpecification:
|
|
31
|
-
try:
|
|
32
|
-
with open(source, "r") as file:
|
|
33
|
-
odcs_contract = yaml.safe_load(file.read())
|
|
34
|
-
|
|
35
|
-
except Exception as e:
|
|
36
|
-
raise DataContractException(
|
|
37
|
-
type="schema",
|
|
38
|
-
name="Parse ODCS contract",
|
|
39
|
-
reason=f"Failed to parse odcs contract from {source}",
|
|
40
|
-
engine="datacontract",
|
|
41
|
-
original_exception=e,
|
|
42
|
-
)
|
|
43
|
-
|
|
44
|
-
data_contract_specification.id = odcs_contract["uuid"]
|
|
45
|
-
data_contract_specification.info = import_info(odcs_contract)
|
|
46
|
-
data_contract_specification.terms = import_terms(odcs_contract)
|
|
47
|
-
data_contract_specification.servicelevels = import_servicelevels(odcs_contract)
|
|
48
|
-
data_contract_specification.models = import_models(odcs_contract)
|
|
49
|
-
|
|
50
|
-
return data_contract_specification
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
def import_info(odcs_contract: Dict[str, Any]) -> Info:
|
|
54
|
-
info = Info(title=odcs_contract.get("quantumName"), version=odcs_contract.get("version"))
|
|
55
|
-
|
|
56
|
-
if odcs_contract.get("description").get("purpose") is not None:
|
|
57
|
-
info.description = odcs_contract.get("description").get("purpose")
|
|
58
|
-
|
|
59
|
-
if odcs_contract.get("datasetDomain") is not None:
|
|
60
|
-
info.owner = odcs_contract.get("datasetDomain")
|
|
61
|
-
|
|
62
|
-
if odcs_contract.get("productDl") is not None or odcs_contract.get("productFeedbackUrl") is not None:
|
|
63
|
-
contact = Contact()
|
|
64
|
-
if odcs_contract.get("productDl") is not None:
|
|
65
|
-
contact.name = odcs_contract.get("productDl")
|
|
66
|
-
if odcs_contract.get("productFeedbackUrl") is not None:
|
|
67
|
-
contact.url = odcs_contract.get("productFeedbackUrl")
|
|
68
|
-
|
|
69
|
-
info.contact = contact
|
|
70
|
-
|
|
71
|
-
return info
|
|
72
|
-
|
|
73
|
-
|
|
74
|
-
def import_terms(odcs_contract: Dict[str, Any]) -> Terms | None:
|
|
75
|
-
if (
|
|
76
|
-
odcs_contract.get("description").get("usage") is not None
|
|
77
|
-
or odcs_contract.get("description").get("limitations") is not None
|
|
78
|
-
or odcs_contract.get("price") is not None
|
|
79
|
-
):
|
|
80
|
-
terms = Terms()
|
|
81
|
-
if odcs_contract.get("description").get("usage") is not None:
|
|
82
|
-
terms.usage = odcs_contract.get("description").get("usage")
|
|
83
|
-
if odcs_contract.get("description").get("limitations") is not None:
|
|
84
|
-
terms.limitations = odcs_contract.get("description").get("limitations")
|
|
85
|
-
if odcs_contract.get("price") is not None:
|
|
86
|
-
terms.billing = f"{odcs_contract.get('price').get('priceAmount')} {odcs_contract.get('price').get('priceCurrency')} / {odcs_contract.get('price').get('priceUnit')}"
|
|
87
|
-
|
|
88
|
-
return terms
|
|
89
|
-
else:
|
|
90
|
-
return None
|
|
91
|
-
|
|
92
|
-
|
|
93
|
-
def import_servicelevels(odcs_contract: Dict[str, Any]) -> ServiceLevel:
|
|
94
|
-
# find the two properties we can map (based on the examples)
|
|
95
|
-
sla_properties = odcs_contract.get("slaProperties") if odcs_contract.get("slaProperties") is not None else []
|
|
96
|
-
availability = next((p for p in sla_properties if p["property"] == "generalAvailability"), None)
|
|
97
|
-
retention = next((p for p in sla_properties if p["property"] == "retention"), None)
|
|
98
|
-
|
|
99
|
-
if availability is not None or retention is not None:
|
|
100
|
-
servicelevel = ServiceLevel()
|
|
101
|
-
|
|
102
|
-
if availability is not None:
|
|
103
|
-
value = availability.get("value")
|
|
104
|
-
if isinstance(value, datetime.datetime):
|
|
105
|
-
value = value.isoformat()
|
|
106
|
-
servicelevel.availability = Availability(description=value)
|
|
107
|
-
|
|
108
|
-
if retention is not None:
|
|
109
|
-
servicelevel.retention = Retention(period=f"{retention.get('value')}{retention.get('unit')}")
|
|
110
|
-
|
|
111
|
-
return servicelevel
|
|
112
|
-
else:
|
|
113
|
-
return None
|
|
114
|
-
|
|
115
|
-
|
|
116
|
-
def import_models(odcs_contract: Dict[str, Any]) -> Dict[str, Model]:
|
|
117
|
-
custom_type_mappings = get_custom_type_mappings(odcs_contract.get("customProperties"))
|
|
118
|
-
|
|
119
|
-
odcs_tables = odcs_contract.get("dataset") if odcs_contract.get("dataset") is not None else []
|
|
120
|
-
result = {}
|
|
121
|
-
|
|
122
|
-
for table in odcs_tables:
|
|
123
|
-
description = table.get("description") if table.get("description") is not None else ""
|
|
124
|
-
model = Model(description=" ".join(description.splitlines()), type="table")
|
|
125
|
-
model.fields = import_fields(table.get("columns"), custom_type_mappings)
|
|
126
|
-
result[table.get("table")] = model
|
|
127
|
-
|
|
128
|
-
return result
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
def import_fields(odcs_columns: Dict[str, Any], custom_type_mappings: Dict[str, str]) -> Dict[str, Field]:
|
|
132
|
-
logger = logging.getLogger(__name__)
|
|
133
|
-
result = {}
|
|
134
|
-
|
|
135
|
-
for column in odcs_columns:
|
|
136
|
-
mapped_type = map_type(column.get("logicalType"), custom_type_mappings)
|
|
137
|
-
if mapped_type is not None:
|
|
138
|
-
description = column.get("description") if column.get("description") is not None else ""
|
|
139
|
-
field = Field(
|
|
140
|
-
description=" ".join(description.splitlines()),
|
|
141
|
-
type=mapped_type,
|
|
142
|
-
title=column.get("businessName") if column.get("businessName") is not None else "",
|
|
143
|
-
required=not column.get("isNullable") if column.get("isNullable") is not None else False,
|
|
144
|
-
primaryKey=column.get("isPrimary") if column.get("isPrimary") is not None else False,
|
|
145
|
-
unique=column.get("isUnique") if column.get("isUnique") is not None else False,
|
|
146
|
-
classification=column.get("classification") if column.get("classification") is not None else "",
|
|
147
|
-
tags=column.get("tags") if column.get("tags") is not None else [],
|
|
148
|
-
)
|
|
149
|
-
result[column["column"]] = field
|
|
150
|
-
else:
|
|
151
|
-
logger.info(
|
|
152
|
-
f"Can't properly map {column.get('column')} to the Datacontract Mapping types, as there is no equivalent or special mapping. Consider introducing a customProperty 'dc_mapping_{column.get('logicalName')}' that defines your expected type as the 'value'"
|
|
153
|
-
)
|
|
154
|
-
|
|
155
|
-
return result
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
def map_type(odcs_type: str, custom_mappings: Dict[str, str]) -> str | None:
|
|
159
|
-
t = odcs_type.lower()
|
|
160
|
-
if t in DATACONTRACT_TYPES:
|
|
161
|
-
return t
|
|
162
|
-
elif custom_mappings.get(t) is not None:
|
|
163
|
-
return custom_mappings.get(t)
|
|
164
|
-
else:
|
|
165
|
-
return None
|
|
166
|
-
|
|
167
|
-
|
|
168
|
-
def get_custom_type_mappings(odcs_custom_properties: List[Any]) -> Dict[str, str]:
|
|
169
|
-
result = {}
|
|
170
|
-
if odcs_custom_properties is not None:
|
|
171
|
-
for prop in odcs_custom_properties:
|
|
172
|
-
if prop["property"].startswith("dc_mapping_"):
|
|
173
|
-
odcs_type_name = prop["property"].substring(11)
|
|
174
|
-
datacontract_type = prop["value"]
|
|
175
|
-
result[odcs_type_name] = datacontract_type
|
|
176
|
-
|
|
177
|
-
return result
|
|
@@ -1,91 +0,0 @@
|
|
|
1
|
-
import csv
|
|
2
|
-
import io
|
|
3
|
-
import json
|
|
4
|
-
|
|
5
|
-
import yaml
|
|
6
|
-
|
|
7
|
-
from datacontract.model.data_contract_specification import DataContractSpecification, Example
|
|
8
|
-
|
|
9
|
-
from ..lint import Linter, LinterResult
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
class ExampleModelLinter(Linter):
|
|
13
|
-
@property
|
|
14
|
-
def name(self) -> str:
|
|
15
|
-
return "Example(s) match model"
|
|
16
|
-
|
|
17
|
-
@property
|
|
18
|
-
def id(self) -> str:
|
|
19
|
-
return "example-model"
|
|
20
|
-
|
|
21
|
-
@staticmethod
|
|
22
|
-
def get_example_headers(example: Example) -> list[str]:
|
|
23
|
-
if isinstance(example.data, str):
|
|
24
|
-
match example.type:
|
|
25
|
-
case "csv":
|
|
26
|
-
dialect = csv.Sniffer().sniff(example.data)
|
|
27
|
-
data = io.StringIO(example.data)
|
|
28
|
-
reader = csv.reader(data, dialect=dialect)
|
|
29
|
-
return next(reader)
|
|
30
|
-
case "yaml":
|
|
31
|
-
data = yaml.safe_load(example.data)
|
|
32
|
-
return data.keys()
|
|
33
|
-
case "json":
|
|
34
|
-
data = json.loads(example.data)
|
|
35
|
-
return data.keys()
|
|
36
|
-
case _:
|
|
37
|
-
# This is checked in lint_implementation, so shouldn't happen.
|
|
38
|
-
raise NotImplementedError(f"Unknown type {example.type}")
|
|
39
|
-
else:
|
|
40
|
-
# Checked in lint_implementation, shouldn't happen.
|
|
41
|
-
raise NotImplementedError("Can't lint object examples.")
|
|
42
|
-
|
|
43
|
-
def lint_implementation(self, contract: DataContractSpecification) -> LinterResult:
|
|
44
|
-
"""Check whether the example(s) headers match the model.
|
|
45
|
-
|
|
46
|
-
This linter checks whether the example's fields match the model
|
|
47
|
-
fields, and whether all required fields of the model are present in
|
|
48
|
-
the example.
|
|
49
|
-
"""
|
|
50
|
-
result = LinterResult()
|
|
51
|
-
examples = contract.examples
|
|
52
|
-
models = contract.models
|
|
53
|
-
examples_with_model = []
|
|
54
|
-
for index, example in enumerate(examples):
|
|
55
|
-
if example.model not in models:
|
|
56
|
-
result = result.with_error(f"Example {index + 1} has non-existent model '{example.model}'")
|
|
57
|
-
else:
|
|
58
|
-
examples_with_model.append((index, example, models.get(example.model)))
|
|
59
|
-
for index, example, model in examples_with_model:
|
|
60
|
-
if example.type == "custom":
|
|
61
|
-
result = result.with_warning(
|
|
62
|
-
f"Example {index + 1} has type" ' "custom", cannot check model' " conformance"
|
|
63
|
-
)
|
|
64
|
-
elif not isinstance(example.data, str):
|
|
65
|
-
result = result.with_warning(
|
|
66
|
-
f"Example {index + 1} is not a " "string example, can only lint string examples for now."
|
|
67
|
-
)
|
|
68
|
-
elif model.type == "object":
|
|
69
|
-
result = result.with_warning(
|
|
70
|
-
f"Example {index + 1} uses a "
|
|
71
|
-
f"model '{example.model}' with type 'object'. Linting is "
|
|
72
|
-
"currently only supported for 'table' models"
|
|
73
|
-
)
|
|
74
|
-
else:
|
|
75
|
-
if example.type in ("csv", "yaml", "json"):
|
|
76
|
-
headers = self.get_example_headers(example)
|
|
77
|
-
for example_header in headers:
|
|
78
|
-
if example_header not in model.fields:
|
|
79
|
-
result = result.with_error(
|
|
80
|
-
f"Example {index + 1} has field '{example_header}'"
|
|
81
|
-
f" that's not contained in model '{example.model}'"
|
|
82
|
-
)
|
|
83
|
-
for field_name, field_value in model.fields.items():
|
|
84
|
-
if field_name not in headers and field_value.required:
|
|
85
|
-
result = result.with_error(
|
|
86
|
-
f"Example {index + 1} is missing field '{field_name}'"
|
|
87
|
-
f" required by model '{example.model}'"
|
|
88
|
-
)
|
|
89
|
-
else:
|
|
90
|
-
result = result.with_error(f"Example {index + 1} has unknown type" f"{example.type}")
|
|
91
|
-
return result
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|