datacontract-cli 0.9.7__py3-none-any.whl → 0.9.9__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of datacontract-cli might be problematic. Click here for more details.

Files changed (62) hide show
  1. datacontract/breaking/breaking.py +48 -57
  2. datacontract/cli.py +100 -80
  3. datacontract/data_contract.py +178 -128
  4. datacontract/engines/datacontract/check_that_datacontract_contains_valid_servers_configuration.py +5 -1
  5. datacontract/engines/datacontract/check_that_datacontract_file_exists.py +9 -8
  6. datacontract/engines/datacontract/check_that_datacontract_str_is_valid.py +26 -22
  7. datacontract/engines/fastjsonschema/check_jsonschema.py +31 -25
  8. datacontract/engines/fastjsonschema/s3/s3_read_files.py +8 -6
  9. datacontract/engines/soda/check_soda_execute.py +58 -36
  10. datacontract/engines/soda/connections/bigquery.py +5 -3
  11. datacontract/engines/soda/connections/dask.py +0 -1
  12. datacontract/engines/soda/connections/databricks.py +2 -2
  13. datacontract/engines/soda/connections/duckdb.py +25 -8
  14. datacontract/engines/soda/connections/kafka.py +36 -17
  15. datacontract/engines/soda/connections/postgres.py +3 -3
  16. datacontract/engines/soda/connections/snowflake.py +4 -4
  17. datacontract/export/avro_converter.py +9 -11
  18. datacontract/export/avro_idl_converter.py +65 -42
  19. datacontract/export/csv_type_converter.py +36 -0
  20. datacontract/export/dbt_converter.py +43 -32
  21. datacontract/export/great_expectations_converter.py +141 -0
  22. datacontract/export/html_export.py +46 -0
  23. datacontract/export/jsonschema_converter.py +3 -1
  24. datacontract/export/odcs_converter.py +5 -7
  25. datacontract/export/protobuf_converter.py +12 -10
  26. datacontract/export/pydantic_converter.py +131 -0
  27. datacontract/export/rdf_converter.py +34 -11
  28. datacontract/export/sodacl_converter.py +118 -21
  29. datacontract/export/sql_converter.py +30 -8
  30. datacontract/export/sql_type_converter.py +44 -4
  31. datacontract/export/terraform_converter.py +4 -3
  32. datacontract/imports/avro_importer.py +65 -18
  33. datacontract/imports/sql_importer.py +0 -2
  34. datacontract/init/download_datacontract_file.py +2 -2
  35. datacontract/integration/publish_datamesh_manager.py +6 -12
  36. datacontract/integration/publish_opentelemetry.py +30 -16
  37. datacontract/lint/files.py +2 -2
  38. datacontract/lint/lint.py +26 -31
  39. datacontract/lint/linters/description_linter.py +12 -21
  40. datacontract/lint/linters/example_model_linter.py +28 -29
  41. datacontract/lint/linters/field_pattern_linter.py +8 -8
  42. datacontract/lint/linters/field_reference_linter.py +11 -10
  43. datacontract/lint/linters/notice_period_linter.py +18 -22
  44. datacontract/lint/linters/quality_schema_linter.py +16 -20
  45. datacontract/lint/linters/valid_constraints_linter.py +42 -37
  46. datacontract/lint/resolve.py +50 -14
  47. datacontract/lint/schema.py +2 -3
  48. datacontract/lint/urls.py +4 -5
  49. datacontract/model/breaking_change.py +2 -1
  50. datacontract/model/data_contract_specification.py +8 -7
  51. datacontract/model/exceptions.py +13 -2
  52. datacontract/model/run.py +3 -2
  53. datacontract/web.py +3 -7
  54. datacontract_cli-0.9.9.dist-info/METADATA +951 -0
  55. datacontract_cli-0.9.9.dist-info/RECORD +64 -0
  56. datacontract/lint/linters/primary_field_linter.py +0 -30
  57. datacontract_cli-0.9.7.dist-info/METADATA +0 -603
  58. datacontract_cli-0.9.7.dist-info/RECORD +0 -61
  59. {datacontract_cli-0.9.7.dist-info → datacontract_cli-0.9.9.dist-info}/LICENSE +0 -0
  60. {datacontract_cli-0.9.7.dist-info → datacontract_cli-0.9.9.dist-info}/WHEEL +0 -0
  61. {datacontract_cli-0.9.7.dist-info → datacontract_cli-0.9.9.dist-info}/entry_points.txt +0 -0
  62. {datacontract_cli-0.9.7.dist-info → datacontract_cli-0.9.9.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,951 @@
1
+ Metadata-Version: 2.1
2
+ Name: datacontract-cli
3
+ Version: 0.9.9
4
+ Summary: Test data contracts
5
+ Author-email: Jochen Christ <jochen.christ@innoq.com>, Stefan Negele <stefan.negele@innoq.com>
6
+ Project-URL: Homepage, https://cli.datacontract.com
7
+ Project-URL: Issues, https://github.com/datacontract/cli/issues
8
+ Classifier: Programming Language :: Python :: 3
9
+ Classifier: License :: OSI Approved :: MIT License
10
+ Classifier: Operating System :: OS Independent
11
+ Requires-Python: >=3.10
12
+ Description-Content-Type: text/markdown
13
+ License-File: LICENSE
14
+ Requires-Dist: typer[all] <0.13,>=0.9
15
+ Requires-Dist: pydantic <2.8.0,>=2.5.3
16
+ Requires-Dist: pyyaml ~=6.0.1
17
+ Requires-Dist: requests ~=2.31.0
18
+ Requires-Dist: fastapi ==0.110.1
19
+ Requires-Dist: fastparquet ==2024.2.0
20
+ Requires-Dist: python-multipart ==0.0.9
21
+ Requires-Dist: rich ~=13.7.0
22
+ Requires-Dist: simple-ddl-parser ==1.0.4
23
+ Requires-Dist: soda-core-bigquery <3.4.0,>=3.3.1
24
+ Requires-Dist: soda-core-duckdb <3.4.0,>=3.3.1
25
+ Requires-Dist: soda-core-postgres <3.4.0,>=3.3.1
26
+ Requires-Dist: soda-core-snowflake <3.4.0,>=3.3.1
27
+ Requires-Dist: soda-core-spark[databricks] <3.4.0,>=3.3.1
28
+ Requires-Dist: soda-core-spark-df <3.4.0,>=3.3.1
29
+ Requires-Dist: snowflake-connector-python[pandas] <3.8,>=3.6
30
+ Requires-Dist: duckdb ==0.10.1
31
+ Requires-Dist: fastjsonschema ~=2.19.1
32
+ Requires-Dist: python-dotenv ~=1.0.0
33
+ Requires-Dist: s3fs ==2024.3.1
34
+ Requires-Dist: rdflib ==7.0.0
35
+ Requires-Dist: avro ==1.11.3
36
+ Requires-Dist: opentelemetry-exporter-otlp-proto-grpc ~=1.16.0
37
+ Requires-Dist: opentelemetry-exporter-otlp-proto-http ~=1.16.0
38
+ Provides-Extra: dev
39
+ Requires-Dist: httpx ==0.27.0 ; extra == 'dev'
40
+ Requires-Dist: ruff ; extra == 'dev'
41
+ Requires-Dist: pytest ; extra == 'dev'
42
+ Requires-Dist: pytest-xdist ; extra == 'dev'
43
+ Requires-Dist: testcontainers <4.0 ; extra == 'dev'
44
+ Requires-Dist: testcontainers-minio ; extra == 'dev'
45
+ Requires-Dist: testcontainers-postgres ; extra == 'dev'
46
+ Requires-Dist: testcontainers-kafka ; extra == 'dev'
47
+
48
+ # Data Contract CLI
49
+
50
+ <p>
51
+ <a href="https://github.com/datacontract/cli/actions/workflows/ci.yaml?query=branch%3Amain">
52
+ <img alt="Test Workflow" src="https://img.shields.io/github/actions/workflow/status/datacontract/cli/ci.yaml?branch=main"></a>
53
+ <a href="https://github.com/datacontract/cli">
54
+ <img alt="Stars" src="https://img.shields.io/github/stars/datacontract/cli" /></a>
55
+ <a href="https://datacontract.com/slack" rel="nofollow"><img src="https://camo.githubusercontent.com/5ade1fd1e76a6ab860802cdd2941fe2501e2ca2cb534e5d8968dbf864c13d33d/68747470733a2f2f696d672e736869656c64732e696f2f62616467652f736c61636b2d6a6f696e5f636861742d77686974652e7376673f6c6f676f3d736c61636b267374796c653d736f6369616c" alt="Slack Status" data-canonical-src="https://img.shields.io/badge/slack-join_chat-white.svg?logo=slack&amp;style=social" style="max-width: 100%;"></a>
56
+ </p>
57
+
58
+ The `datacontract` CLI is an open source command-line tool for working with [Data Contracts](https://datacontract.com/).
59
+ It uses data contract YAML files to lint the data contract, connect to data sources and execute schema and quality tests, detect breaking changes, and export to different formats. The tool is written in Python. It can be used as a standalone CLI tool, in a CI/CD pipeline, or directly as a Python library.
60
+
61
+ ![Main features of the Data Contract CLI](datacontractcli.png)
62
+
63
+
64
+ ## Getting started
65
+
66
+ Let's look at this data contract:
67
+ [https://datacontract.com/examples/orders-latest/datacontract.yaml](https://datacontract.com/examples/orders-latest/datacontract.yaml)
68
+
69
+ We have a _servers_ section with endpoint details to the S3 bucket, _models_ for the structure of the data, _servicelevels_ and _quality_ attributes that describe the expected freshness and number of rows.
70
+
71
+ This data contract contains all information to connect to S3 and check that the actual data meets the defined schema and quality requirements. We can use this information to test if the actual data set in S3 is compliant to the data contract.
72
+
73
+ Let's use [pip](https://pip.pypa.io/en/stable/getting-started/) to install the CLI (or use the [Docker image](#docker), if you prefer).
74
+ ```bash
75
+ $ python3 -m pip install datacontract-cli
76
+ ```
77
+
78
+ We run the tests:
79
+
80
+ ```bash
81
+ $ datacontract test https://datacontract.com/examples/orders-latest/datacontract.yaml
82
+
83
+ # returns:
84
+ Testing https://datacontract.com/examples/orders-latest/datacontract.yaml
85
+ ╭────────┬─────────────────────────────────────────────────────────────────────┬───────────────────────────────┬─────────╮
86
+ │ Result │ Check │ Field │ Details │
87
+ ├────────┼─────────────────────────────────────────────────────────────────────┼───────────────────────────────┼─────────┤
88
+ │ passed │ Check that JSON has valid schema │ orders │ │
89
+ │ passed │ Check that JSON has valid schema │ line_items │ │
90
+ │ passed │ Check that field order_id is present │ orders │ │
91
+ │ passed │ Check that field order_timestamp is present │ orders │ │
92
+ │ passed │ Check that field order_total is present │ orders │ │
93
+ │ passed │ Check that field customer_id is present │ orders │ │
94
+ │ passed │ Check that field customer_email_address is present │ orders │ │
95
+ │ passed │ row_count >= 5000 │ orders │ │
96
+ │ passed │ Check that required field order_id has no null values │ orders.order_id │ │
97
+ │ passed │ Check that unique field order_id has no duplicate values │ orders.order_id │ │
98
+ │ passed │ duplicate_count(order_id) = 0 │ orders.order_id │ │
99
+ │ passed │ Check that required field order_timestamp has no null values │ orders.order_timestamp │ │
100
+ │ passed │ freshness(order_timestamp) < 24h │ orders.order_timestamp │ │
101
+ │ passed │ Check that required field order_total has no null values │ orders.order_total │ │
102
+ │ passed │ Check that required field customer_email_address has no null values │ orders.customer_email_address │ │
103
+ │ passed │ Check that field lines_item_id is present │ line_items │ │
104
+ │ passed │ Check that field order_id is present │ line_items │ │
105
+ │ passed │ Check that field sku is present │ line_items │ │
106
+ │ passed │ values in (order_id) must exist in orders (order_id) │ line_items.order_id │ │
107
+ │ passed │ row_count >= 5000 │ line_items │ │
108
+ │ passed │ Check that required field lines_item_id has no null values │ line_items.lines_item_id │ │
109
+ │ passed │ Check that unique field lines_item_id has no duplicate values │ line_items.lines_item_id │ │
110
+ ╰────────┴─────────────────────────────────────────────────────────────────────┴───────────────────────────────┴─────────╯
111
+ 🟢 data contract is valid. Run 22 checks. Took 6.739514 seconds.
112
+ ```
113
+
114
+ Voilà, the CLI tested that the _datacontract.yaml_ itself is valid, all records comply with the schema, and all quality attributes are met.
115
+
116
+ We can also use the datacontract.yaml to export in many [formats](#format), e.g., to SQL:
117
+
118
+ ```bash
119
+ $ datacontract export --format sql https://datacontract.com/examples/orders-latest/datacontract.yaml
120
+
121
+ # returns:
122
+ -- Data Contract: urn:datacontract:checkout:orders-latest
123
+ -- SQL Dialect: snowflake
124
+ CREATE TABLE orders (
125
+ order_id TEXT not null primary key,
126
+ order_timestamp TIMESTAMP_TZ not null,
127
+ order_total NUMBER not null,
128
+ customer_id TEXT,
129
+ customer_email_address TEXT not null,
130
+ processed_timestamp TIMESTAMP_TZ not null
131
+ );
132
+ CREATE TABLE line_items (
133
+ lines_item_id TEXT not null primary key,
134
+ order_id TEXT,
135
+ sku TEXT
136
+ );
137
+ ```
138
+
139
+ Or generate this [HTML page](https://datacontract.com/examples/orders-latest/datacontract.html).
140
+
141
+ ## Usage
142
+
143
+ ```bash
144
+ # create a new data contract from example and write it to datacontract.yaml
145
+ $ datacontract init datacontract.yaml
146
+
147
+ # lint the datacontract.yaml
148
+ $ datacontract lint datacontract.yaml
149
+
150
+ # execute schema and quality checks
151
+ $ datacontract test datacontract.yaml
152
+
153
+ # execute schema and quality checks on the examples within the contract
154
+ $ datacontract test --examples datacontract.yaml
155
+
156
+ # export data contract as html (other formats: avro, dbt, dbt-sources, dbt-staging-sql, jsonschema, odcs, rdf, sql, sodacl, terraform, ...)
157
+ $ datacontract export --format html datacontract.yaml > datacontract.html
158
+
159
+ # import avro (other formats: sql, ...)
160
+ $ datacontract import --format avro --source avro_schema.avsc
161
+
162
+ # find differences between to data contracts
163
+ $ datacontract diff datacontract-v1.yaml datacontract-v2.yaml
164
+
165
+ # find differences between to data contracts categorized into error, warning, and info.
166
+ $ datacontract changelog datacontract-v1.yaml datacontract-v2.yaml
167
+
168
+ # fail pipeline on breaking changes. Uses changelog internally and showing only error and warning.
169
+ $ datacontract breaking datacontract-v1.yaml datacontract-v2.yaml
170
+ ```
171
+
172
+ ## Programmatic (Python)
173
+ ```python
174
+ from datacontract.data_contract import DataContract
175
+
176
+ data_contract = DataContract(data_contract_file="datacontract.yaml")
177
+ run = data_contract.test()
178
+ if not run.has_passed():
179
+ print("Data quality validation failed.")
180
+ # Abort pipeline, alert, or take corrective actions...
181
+ ```
182
+
183
+
184
+ ## Installation
185
+
186
+ Choose the most appropriate installation method for your needs:
187
+
188
+ ### pip
189
+ Python 3.11 recommended.
190
+ Python 3.12 available as pre-release release candidate for 0.9.3
191
+
192
+ ```bash
193
+ python3 -m pip install datacontract-cli
194
+ ```
195
+
196
+ ### pipx
197
+ pipx installs into an isolated environment.
198
+ ```bash
199
+ pipx install datacontract-cli
200
+ ```
201
+
202
+ ### Docker
203
+
204
+ ```bash
205
+ docker pull datacontract/cli
206
+ docker run --rm -v ${PWD}:/home/datacontract datacontract/cli
207
+ ```
208
+
209
+ Or via an alias that automatically uses the latest version:
210
+
211
+ ```bash
212
+ alias datacontract='docker run --rm -v "${PWD}:/home/datacontract" datacontract/cli:latest'
213
+ ```
214
+
215
+ ## Documentation
216
+
217
+ Commands
218
+
219
+ - [init](#init)
220
+ - [lint](#lint)
221
+ - [test](#test)
222
+ - [export](#export)
223
+ - [import](#import)
224
+ - [breaking](#breaking)
225
+ - [changelog](#changelog)
226
+ - [diff](#diff)
227
+
228
+ ### init
229
+
230
+ ```
231
+ Usage: datacontract init [OPTIONS] [LOCATION]
232
+
233
+ Download a datacontract.yaml template and write it to file.
234
+
235
+ ╭─ Arguments ──────────────────────────────────────────────────────────────────────────────────╮
236
+ │ location [LOCATION] The location (url or path) of the data contract yaml to create. │
237
+ │ [default: datacontract.yaml] │
238
+ ╰──────────────────────────────────────────────────────────────────────────────────────────────╯
239
+ ╭─ Options ────────────────────────────────────────────────────────────────────────────────────╮
240
+ │ --template TEXT URL of a template or data contract │
241
+ │ [default: │
242
+ │ https://datacontract.com/datacontract.init.yaml] │
243
+ │ --overwrite --no-overwrite Replace the existing datacontract.yaml │
244
+ │ [default: no-overwrite] │
245
+ │ --help Show this message and exit. │
246
+ ╰──────────────────────────────────────────────────────────────────────────────────────────────╯
247
+ ```
248
+
249
+ ### lint
250
+
251
+ ```
252
+ Usage: datacontract lint [OPTIONS] [LOCATION]
253
+
254
+ Validate that the datacontract.yaml is correctly formatted.
255
+
256
+ ╭─ Arguments ─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮
257
+ │ location [LOCATION] The location (url or path) of the data contract yaml. [default: datacontract.yaml] │
258
+ ╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
259
+ ╭─ Options ───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮
260
+ │ --schema TEXT The location (url or path) of the Data Contract Specification JSON Schema │
261
+ │ [default: https://datacontract.com/datacontract.schema.json] │
262
+ │ --help Show this message and exit. │
263
+ ╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
264
+ ```
265
+
266
+ ### test
267
+
268
+ ```
269
+ Usage: datacontract test [OPTIONS] [LOCATION]
270
+
271
+ Run schema and quality tests on configured servers.
272
+
273
+ ╭─ Arguments ─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮
274
+ │ location [LOCATION] The location (url or path) of the data contract yaml. [default: datacontract.yaml] │
275
+ ╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
276
+ ╭─ Options ───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮
277
+ │ --schema TEXT The location (url or path) of the Data Contract │
278
+ │ Specification JSON Schema │
279
+ │ [default: │
280
+ │ https://datacontract.com/datacontract.schema.json] │
281
+ │ --server TEXT The server configuration to run the schema and quality │
282
+ │ tests. Use the key of the server object in the data │
283
+ │ contract yaml file to refer to a server, e.g., │
284
+ │ `production`, or `all` for all servers (default). │
285
+ │ [default: all] │
286
+ │ --examples --no-examples Run the schema and quality tests on the example data │
287
+ │ within the data contract. │
288
+ │ [default: no-examples] │
289
+ │ --publish TEXT The url to publish the results after the test │
290
+ │ [default: None] │
291
+ │ --publish-to-opentelemetry --no-publish-to-opentelemetry Publish the results to opentelemetry. Use environment │
292
+ │ variables to configure the OTLP endpoint, headers, etc. │
293
+ │ [default: no-publish-to-opentelemetry] │
294
+ │ --logs --no-logs Print logs [default: no-logs] │
295
+ │ --help Show this message and exit. │
296
+ ╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
297
+ ```
298
+
299
+ Data Contract CLI can connect to data sources and run schema and quality tests to verify that the data contract is valid.
300
+
301
+ ```bash
302
+ $ datacontract test --server production datacontract.yaml
303
+ ```
304
+
305
+ To connect to the databases the `server` block in the datacontract.yaml is used to set up the connection. In addition, credentials, such as username and passwords, may be defined with environment variables.
306
+
307
+ The application uses different engines, based on the server `type`.
308
+ Internally, it connects with DuckDB, Spark, or a native connection and executes the most tests with soda-core and fastjsonschema.
309
+ Credentials are read from the environment variables.
310
+
311
+ Supported server types:
312
+
313
+ | Type | Format | Status |
314
+ |--------------|------------|--------------------------------------------------------------------|
315
+ | `s3` | `parquet` | ✅ |
316
+ | `s3` | `json` | ✅ |
317
+ | `s3` | `csv` | ✅ |
318
+ | `s3` | `delta` | Coming soon ([#24](https://github.com/datacontract/cli/issues/24)) |
319
+ | `s3` | `iceberg` | Coming soon |
320
+ | `postgres` | n/a | ✅ |
321
+ | `snowflake` | n/a | ✅ |
322
+ | `bigquery` | n/a | ✅ |
323
+ | `redshift` | n/a | Coming soon |
324
+ | `databricks` | n/a | ✅ |
325
+ | `kafka` | `json` | ✅ |
326
+ | `kafka` | `avro` | Coming soon |
327
+ | `kafka` | `protobuf` | Coming soon |
328
+ | `local` | `parquet` | ✅ |
329
+ | `local` | `json` | ✅ |
330
+ | `local` | `csv` | ✅ |
331
+
332
+ Feel free to create an issue, if you need support for an additional type.
333
+
334
+ ### S3
335
+
336
+ Data Contract CLI can test data that is stored in S3 buckets or any S3-compliant endpoints in various formats.
337
+
338
+ #### Example
339
+
340
+ datacontract.yaml
341
+ ```yaml
342
+ servers:
343
+ production:
344
+ type: s3
345
+ endpointUrl: https://minio.example.com # not needed with AWS S3
346
+ location: s3://bucket-name/path/*/*.json
347
+ format: json
348
+ delimiter: new_line # new_line, array, or none
349
+ ```
350
+
351
+ #### Environment Variables
352
+
353
+ | Environment Variable | Example | Description |
354
+ |-----------------------------------|-------------------------------|-----------------------|
355
+ | `DATACONTRACT_S3_REGION` | `eu-central-1` | Region of S3 bucket |
356
+ | `DATACONTRACT_S3_ACCESS_KEY_ID` | `AKIAXV5Q5QABCDEFGH` | AWS Access Key ID |
357
+ | `DATACONTRACT_S3_SECRET_ACCESS_KEY` | `93S7LRrJcqLaaaa/XXXXXXXXXXXXX` | AWS Secret Access Key |
358
+
359
+
360
+ ### Postgres
361
+
362
+ Data Contract CLI can test data in Postgres or Postgres-compliant databases (e.g., RisingWave).
363
+
364
+ #### Example
365
+
366
+ datacontract.yaml
367
+ ```yaml
368
+ servers:
369
+ postgres:
370
+ type: postgres
371
+ host: localhost
372
+ port: 5432
373
+ database: postgres
374
+ schema: public
375
+ models:
376
+ my_table_1: # corresponds to a table
377
+ type: table
378
+ fields:
379
+ my_column_1: # corresponds to a column
380
+ type: varchar
381
+ ```
382
+
383
+ #### Environment Variables
384
+
385
+ | Environment Variable | Example | Description |
386
+ |----------------------------------|--------------------|-------------|
387
+ | `DATACONTRACT_POSTGRES_USERNAME` | `postgres` | Username |
388
+ | `DATACONTRACT_POSTGRES_PASSWORD` | `mysecretpassword` | Password |
389
+
390
+
391
+ ### Snowflake
392
+
393
+ Data Contract CLI can test data in Snowflake.
394
+
395
+ #### Example
396
+
397
+ datacontract.yaml
398
+ ```yaml
399
+
400
+ servers:
401
+ snowflake:
402
+ type: snowflake
403
+ account: abcdefg-xn12345
404
+ database: ORDER_DB
405
+ schema: ORDERS_PII_V2
406
+ models:
407
+ my_table_1: # corresponds to a table
408
+ type: table
409
+ fields:
410
+ my_column_1: # corresponds to a column
411
+ type: varchar
412
+ ```
413
+
414
+ #### Environment Variables
415
+
416
+ | Environment Variable | Example | Description |
417
+ |------------------------------------|--------------------|-----------------------------------------------------|
418
+ | `DATACONTRACT_SNOWFLAKE_USERNAME` | `datacontract` | Username |
419
+ | `DATACONTRACT_SNOWFLAKE_PASSWORD` | `mysecretpassword` | Password |
420
+ | `DATACONTRACT_SNOWFLAKE_ROLE` | `DATAVALIDATION` | The snowflake role to use. |
421
+ | `DATACONTRACT_SNOWFLAKE_WAREHOUSE` | `COMPUTE_WH` | The Snowflake Warehouse to use executing the tests. |
422
+
423
+
424
+ ### BigQuery
425
+
426
+ We support authentication to BigQuery using Service Account Key. The used Service Account should include the roles:
427
+ * BigQuery Job User
428
+ * BigQuery Data Viewer
429
+
430
+
431
+ #### Example
432
+
433
+ datacontract.yaml
434
+ ```yaml
435
+ servers:
436
+ production:
437
+ type: bigquery
438
+ project: datameshexample-product
439
+ dataset: datacontract_cli_test_dataset
440
+ models:
441
+ datacontract_cli_test_table: # corresponds to a BigQuery table
442
+ type: table
443
+ fields: ...
444
+ ```
445
+
446
+ #### Environment Variables
447
+
448
+ | Environment Variable | Example | Description |
449
+ |----------------------------------------------|---------------------------|---------------------------------------------------------|
450
+ | `DATACONTRACT_BIGQUERY_ACCOUNT_INFO_JSON_PATH` | `~/service-access-key.json` | Service Access key as saved on key creation by BigQuery |
451
+
452
+
453
+ ### Databricks
454
+
455
+ Works with Unity Catalog and Hive metastore.
456
+
457
+ Needs a running SQL warehouse or compute cluster.
458
+
459
+ #### Example
460
+
461
+ datacontract.yaml
462
+ ```yaml
463
+ servers:
464
+ production:
465
+ type: databricks
466
+ host: dbc-abcdefgh-1234.cloud.databricks.com
467
+ catalog: acme_catalog_prod
468
+ schema: orders_latest
469
+ models:
470
+ orders: # corresponds to a table
471
+ type: table
472
+ fields: ...
473
+ ```
474
+
475
+ #### Environment Variables
476
+
477
+ | Environment Variable | Example | Description |
478
+ |----------------------------------------------|--------------------------------------|-------------------------------------------------------|
479
+ | `DATACONTRACT_DATABRICKS_TOKEN` | `dapia00000000000000000000000000000` | The personal access token to authenticate |
480
+ | `DATACONTRACT_DATABRICKS_HTTP_PATH` | `/sql/1.0/warehouses/b053a3ffffffff` | The HTTP path to the SQL warehouse or compute cluster |
481
+
482
+
483
+ ### Databricks (programmatic)
484
+
485
+ Works with Unity Catalog and Hive metastore.
486
+ When running in a notebook or pipeline, the provided `spark` session can be used.
487
+ An additional authentication is not required.
488
+
489
+ Requires a Databricks Runtime with Python >= 3.10.
490
+
491
+ #### Example
492
+
493
+ datacontract.yaml
494
+ ```yaml
495
+ servers:
496
+ production:
497
+ type: databricks
498
+ host: dbc-abcdefgh-1234.cloud.databricks.com # ignored, always use current host
499
+ catalog: acme_catalog_prod
500
+ schema: orders_latest
501
+ models:
502
+ orders: # corresponds to a table
503
+ type: table
504
+ fields: ...
505
+ ```
506
+
507
+ Notebook
508
+ ```python
509
+ %pip install datacontract-cli
510
+ dbutils.library.restartPython()
511
+
512
+ from datacontract.data_contract import DataContract
513
+
514
+ data_contract = DataContract(
515
+ data_contract_file="/Volumes/acme_catalog_prod/orders_latest/datacontract/datacontract.yaml",
516
+ spark=spark)
517
+ run = data_contract.test()
518
+ run.result
519
+ ```
520
+
521
+ ### Kafka
522
+
523
+ Kafka support is currently considered experimental.
524
+
525
+ #### Example
526
+
527
+ datacontract.yaml
528
+ ```yaml
529
+ servers:
530
+ production:
531
+ type: kafka
532
+ host: abc-12345.eu-central-1.aws.confluent.cloud:9092
533
+ topic: my-topic-name
534
+ format: json
535
+ ```
536
+
537
+ #### Environment Variables
538
+
539
+ | Environment Variable | Example | Description |
540
+ |------------------------------------|---------|-----------------------------|
541
+ | `DATACONTRACT_KAFKA_SASL_USERNAME` | `xxx` | The SASL username (key). |
542
+ | `DATACONTRACT_KAFKA_SASL_PASSWORD` | `xxx` | The SASL password (secret). |
543
+
544
+
545
+
546
+ ### export
547
+
548
+ ```
549
+ Usage: datacontract export [OPTIONS] [LOCATION]
550
+
551
+ Convert data contract to a specific format. console.prints to stdout.
552
+
553
+ ╭─ Arguments ─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮
554
+ │ location [LOCATION] The location (url or path) of the data contract yaml. [default: datacontract.yaml] │
555
+ ╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
556
+ ╭─ Options ───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮
557
+ │ * --format [html|jsonschema|pydantic-model|sodacl|dbt|dbt-sources|dbt-staging-sql|odcs|rd The export format. [default: None] [required] │
558
+ │ f|avro|protobuf|great-expectations|terraform|avro-idl|sql|sql-query] │
559
+ │ --server TEXT The server name to export. [default: None] │
560
+ │ --model TEXT Use the key of the model in the data contract yaml file to refer to a │
561
+ │ model, e.g., `orders`, or `all` for all models (default). │
562
+ │ [default: all] │
563
+ │ --help Show this message and exit. │
564
+ ╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
565
+ ╭─ RDF Options ───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮
566
+ │ --rdf-base TEXT [rdf] The base URI used to generate the RDF graph. [default: None] │
567
+ ╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
568
+ ╭─ SQL Options ───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮
569
+ │ --sql-server-type TEXT [sql] The server type to determine the sql dialect. By default, it uses 'auto' to automatically detect the sql dialect via the specified │
570
+ │ servers in the data contract. │
571
+ │ [default: auto] │
572
+ ╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
573
+
574
+ ```
575
+
576
+ ```bash
577
+ # Example export data contract as HTML
578
+ datacontract export --format html > datacontract.html
579
+ ```
580
+
581
+ Available export options:
582
+
583
+ | Type | Description | Status |
584
+ |----------------------|---------------------------------------------------------|--------|
585
+ | `html` | Export to HTML | ✅ |
586
+ | `jsonschema` | Export to JSON Schema | ✅ |
587
+ | `odcs` | Export to Open Data Contract Standard (ODCS) | ✅ |
588
+ | `sodacl` | Export to SodaCL quality checks in YAML format | ✅ |
589
+ | `dbt` | Export to dbt models in YAML format | ✅ |
590
+ | `dbt-sources` | Export to dbt sources in YAML format | ✅ |
591
+ | `dbt-staging-sql` | Export to dbt staging SQL models | ✅ |
592
+ | `rdf` | Export data contract to RDF representation in N3 format | ✅ |
593
+ | `avro` | Export to AVRO models | ✅ |
594
+ | `protobuf` | Export to Protobuf | ✅ |
595
+ | `terraform` | Export to terraform resources | ✅ |
596
+ | `sql` | Export to SQL DDL | ✅ |
597
+ | `sql-query` | Export to SQL Query | ✅ |
598
+ | `great-expectations` | Export to Great Expectations Suites in JSON Format | ✅ |
599
+ | `bigquery` | Export to BigQuery Schemas | TBD |
600
+ | `pydantic` | Export to pydantic models | TBD |
601
+ | Missing something? | Please create an issue on GitHub | TBD |
602
+
603
+ #### Great Expectations
604
+
605
+ The export function transforms a specified data contract into a comprehensive Great Expectations JSON suite.
606
+ If the contract includes multiple models, you need to specify the names of the model you wish to export.
607
+
608
+ ```shell
609
+ datacontract export datacontract.yaml --format great-expectations --model orders
610
+ ```
611
+
612
+ The export creates a list of expectations by utilizing:
613
+
614
+ - The data from the Model definition with a fixed mapping
615
+ - The expectations provided in the quality field for each model (find here the expectations gallery https://greatexpectations.io/expectations/)
616
+
617
+ #### RDF
618
+
619
+ The export function converts a given data contract into a RDF representation. You have the option to
620
+ add a base_url which will be used as the default prefix to resolve relative IRIs inside the document.
621
+
622
+ ```shell
623
+ datacontract export --format rdf --rdf-base https://www.example.com/ datacontract.yaml
624
+ ```
625
+
626
+ The data contract is mapped onto the following concepts of a yet to be defined Data Contract
627
+ Ontology named https://datacontract.com/DataContractSpecification/ :
628
+ - DataContract
629
+ - Server
630
+ - Model
631
+
632
+ Having the data contract inside an RDF Graph gives us access the following use cases:
633
+ - Interoperability with other data contract specification formats
634
+ - Store data contracts inside a knowledge graph
635
+ - Enhance a semantic search to find and retrieve data contracts
636
+ - Linking model elements to already established ontologies and knowledge
637
+ - Using full power of OWL to reason about the graph structure of data contracts
638
+ - Apply graph algorithms on multiple data contracts (Find similar data contracts, find "gatekeeper"
639
+ data products, find the true domain owner of a field attribute)
640
+
641
+ ### import
642
+
643
+ ```
644
+ Usage: datacontract import [OPTIONS]
645
+
646
+ Create a data contract from the given source file. Prints to stdout.
647
+
648
+ ╭─ Options ───────────────────────────────────────────────────────────────────────────────────────────────────────╮
649
+ │ * --format [sql|avro] The format of the source file. [default: None] [required] │
650
+ │ * --source TEXT The path to the file that should be imported. [default: None] [required] │
651
+ │ --help Show this message and exit. │
652
+ ╰─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
653
+ ```
654
+
655
+ Example:
656
+ ```bash
657
+ # Example import from SQL DDL
658
+ datacontract import --format sql --source my_ddl.sql
659
+ ```
660
+
661
+ Available import options:
662
+
663
+ | Type | Description | Status |
664
+ |--------------------|------------------------------------------------|---------|
665
+ | `sql` | Import from SQL DDL | ✅ |
666
+ | `avro` | Import from AVRO schemas | ✅ |
667
+ | `protobuf` | Import from Protobuf schemas | TBD |
668
+ | `jsonschema` | Import from JSON Schemas | TBD |
669
+ | `bigquery` | Import from BigQuery Schemas | TBD |
670
+ | `dbt` | Import from dbt models | TBD |
671
+ | `odcs` | Import from Open Data Contract Standard (ODCS) | TBD |
672
+ | Missing something? | Please create an issue on GitHub | TBD |
673
+
674
+
675
+ ### breaking
676
+
677
+ ```
678
+ Usage: datacontract breaking [OPTIONS] LOCATION_OLD LOCATION_NEW
679
+
680
+ Identifies breaking changes between data contracts. Prints to stdout.
681
+
682
+ ╭─ Arguments ───────────────────────────────────────────────────────────────────────────────────────────────────────────────╮
683
+ │ * location_old TEXT The location (url or path) of the old data contract yaml. [default: None] [required] │
684
+ │ * location_new TEXT The location (url or path) of the new data contract yaml. [default: None] [required] │
685
+ ╰───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
686
+ ╭─ Options ─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮
687
+ │ --help Show this message and exit. │
688
+ ╰───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
689
+ ```
690
+
691
+ ### changelog
692
+
693
+ ```
694
+ Usage: datacontract changelog [OPTIONS] LOCATION_OLD LOCATION_NEW
695
+
696
+ Generate a changelog between data contracts. Prints to stdout.
697
+
698
+ ╭─ Arguments ───────────────────────────────────────────────────────────────────────────────────────────────────────────────╮
699
+ │ * location_old TEXT The location (url or path) of the old data contract yaml. [default: None] [required] │
700
+ │ * location_new TEXT The location (url or path) of the new data contract yaml. [default: None] [required] │
701
+ ╰───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
702
+ ╭─ Options ─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮
703
+ │ --help Show this message and exit. │
704
+ ╰───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
705
+ ```
706
+
707
+ ### diff
708
+
709
+ ```
710
+ Usage: datacontract diff [OPTIONS] LOCATION_OLD LOCATION_NEW
711
+
712
+ PLACEHOLDER. Currently works as 'changelog' does.
713
+
714
+ ╭─ Arguments ───────────────────────────────────────────────────────────────────────────────────────────────────────────────╮
715
+ │ * location_old TEXT The location (url or path) of the old data contract yaml. [default: None] [required] │
716
+ │ * location_new TEXT The location (url or path) of the new data contract yaml. [default: None] [required] │
717
+ ╰───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
718
+ ╭─ Options ─────────────────────────────────────────────────────────────────────────────────────────────────────────────────╮
719
+ │ --help Show this message and exit. │
720
+ ╰───────────────────────────────────────────────────────────────────────────────────────────────────────────────────────────╯
721
+ ```
722
+
723
+
724
+ ## Integrations
725
+
726
+ | Integration | Option | Description |
727
+ |-------------------|------------------------------|-------------------------------------------------------------------------------------------------------|
728
+ | Data Mesh Manager | `--publish` | Push full results to the [Data Mesh Manager API](https://api.datamesh-manager.com/swagger/index.html) |
729
+ | OpenTelemetry | `--publish-to-opentelemetry` | Push result as gauge metrics |
730
+
731
+ ### Integration with Data Mesh Manager
732
+
733
+ If you use [Data Mesh Manager](https://datamesh-manager.com/), you can use the data contract URL and append the `--publish` option to send and display the test results. Set an environment variable for your API key.
734
+
735
+ ```bash
736
+ # Fetch current data contract, execute tests on production, and publish result to data mesh manager
737
+ $ EXPORT DATAMESH_MANAGER_API_KEY=xxx
738
+ $ datacontract test https://demo.datamesh-manager.com/demo279750347121/datacontracts/4df9d6ee-e55d-4088-9598-b635b2fdcbbc/datacontract.yaml --server production --publish
739
+ ```
740
+
741
+ ### Integration with OpenTelemetry
742
+
743
+ If you use OpenTelemetry, you can use the data contract URL and append the `--publish-to-opentelemetry` option to send the test results to your OLTP-compatible instance, e.g., Prometheus.
744
+
745
+ The metric name is "datacontract.cli.test.result" and it uses the following encoding for the result:
746
+
747
+ | datacontract.cli.test.result | Description |
748
+ |-------|---------------------------------------|
749
+ | 0 | test run passed, no warnings |
750
+ | 1 | test run has warnings |
751
+ | 2 | test run failed |
752
+ | 3 | test run not possible due to an error |
753
+ | 4 | test status unknown |
754
+
755
+
756
+ ```bash
757
+ # Fetch current data contract, execute tests on production, and publish result to open telemetry
758
+ $ EXPORT OTEL_SERVICE_NAME=datacontract-cli
759
+ $ EXPORT OTEL_EXPORTER_OTLP_ENDPOINT=https://YOUR_ID.apm.westeurope.azure.elastic-cloud.com:443
760
+ $ EXPORT OTEL_EXPORTER_OTLP_HEADERS=Authorization=Bearer%20secret # Optional, when using SaaS Products
761
+ $ EXPORT OTEL_EXPORTER_OTLP_PROTOCOL=http/protobuf # Optional, default is http/protobuf - use value grpc to use the gRPC protocol instead
762
+ # Send to OpenTelemetry
763
+ $ datacontract test https://demo.datamesh-manager.com/demo279750347121/datacontracts/4df9d6ee-e55d-4088-9598-b635b2fdcbbc/datacontract.yaml --server production --publish-to-opentelemetry
764
+ ```
765
+
766
+ Current limitations:
767
+ - currently, only ConsoleExporter and OTLP Exporter
768
+ - Metrics only, no logs yet (but loosely planned)
769
+
770
+
771
+ ## Best Practices
772
+
773
+ We share best practices in using the Data Contract CLI.
774
+
775
+ ### Data-first Approach
776
+
777
+ Create a data contract based on the actual data. This is the fastest way to get started and to get feedback from the data consumers.
778
+
779
+ 1. Use an existing physical schema (e.g., SQL DDL) as a starting point to define your logical data model in the contract. Double check right after the import whether the actual data meets the imported logical data model. Just to be sure.
780
+ ```bash
781
+ $ datacontract import --format sql ddl.sql
782
+ $ datacontract test
783
+ ```
784
+
785
+ 2. Add examples to the `datacontract.yaml`. If you can, use actual data and anonymize. Make sure that the examples match the imported logical data model.
786
+ ```bash
787
+ $ datacontract test --examples
788
+ ```
789
+
790
+
791
+ 3. Add quality checks and additional type constraints one by one to the contract and make sure the examples and the actual data still adheres to the contract. Check against examples for a very fast feedback loop.
792
+ ```bash
793
+ $ datacontract test --examples
794
+ $ datacontract test
795
+ ```
796
+
797
+ 4. Make sure that all the best practices for a `datacontract.yaml` are met using the linter. You probably forgot to document some fields and add the terms and conditions.
798
+ ```bash
799
+ $ datacontract lint
800
+ ```
801
+
802
+ 5. Set up a CI pipeline that executes daily and reports the results to the [Data Mesh Manager](https://datamesh-manager.com). Or to some place else. You can even publish to any opentelemetry compatible system.
803
+ ```bash
804
+ $ datacontract test --publish https://api.datamesh-manager.com/api/runs
805
+ ```
806
+
807
+ ### Contract-First
808
+
809
+ Create a data contract based on the requirements from use cases.
810
+
811
+ 1. Start with a `datacontract.yaml` template.
812
+ ```bash
813
+ $ datacontract init
814
+ ```
815
+
816
+ 2. Add examples to the `datacontract.yaml`. Do not start with the data model, although you are probably tempted to do that. Examples are the fastest way to get feedback from everybody and not loose someone in the discussion.
817
+
818
+ 3. Create the model based on the examples. Test the model against the examples to double-check whether the model matches the examples.
819
+ ```bash
820
+ $ datacontract test --examples
821
+ ```
822
+
823
+ 4. Add quality checks and additional type constraints one by one to the contract and make sure the examples and the actual data still adheres to the contract. Check against examples for a very fast feedback loop.
824
+ ```bash
825
+ $ datacontract test --examples
826
+ ```
827
+
828
+ 5. Fill in the terms, descriptions, etc. Make sure you follow all best practices for a `datacontract.yaml` using the linter.
829
+ ```bash
830
+ $ datacontract lint
831
+ ```
832
+
833
+ 6. Set up a CI pipeline that lints and tests the examples so you make sure that any changes later do not decrease the quality of the contract.
834
+ ```bash
835
+ $ datacontract lint
836
+ $ datacontract test --examples
837
+ ```
838
+
839
+ 7. Use the export function to start building the providing data product as well as the integration into the consuming data products.
840
+ ```bash
841
+ # data provider
842
+ $ datacontract export --format dbt
843
+ # data consumer
844
+ $ datacontract export --format dbt-sources
845
+ $ datacontract export --format dbt-staging-sql
846
+ ```
847
+
848
+ ### Schema Evolution
849
+
850
+ #### Non-breaking Changes
851
+ Examples: adding models or fields
852
+
853
+ - Add the models or fields in the datacontract.yaml
854
+ - Increment the minor version of the datacontract.yaml on any change. Simply edit the datacontract.yaml for this.
855
+ - You need a policy that these changes are non-breaking. That means that one cannot use the star expression in SQL to query a table under contract. Make the consequences known.
856
+ - Fail the build in the Pull Request if a datacontract.yaml accidentially adds a breaking change even despite only a minor version change
857
+ ```bash
858
+ $ datacontract breaking datacontract-from-pr.yaml datacontract-from-main.yaml
859
+ ```
860
+ - Create a changelog of this minor change.
861
+ ```bash
862
+ $ datacontract changelog datacontract-from-pr.yaml datacontract-from-main.yaml
863
+ ```
864
+ #### Breaking Changes
865
+ Examples: Removing or renaming models and fields.
866
+
867
+ - Remove or rename models and fields in the datacontract.yaml, and any other change that might be part of this new major version of this data contract.
868
+ - Increment the major version of the datacontract.yaml for this and create a new file for the major version. The reason being, that one needs to offer an upgrade path for the data consumers from the old to the new major version.
869
+ - As data consumers need to migrate, try to reduce the frequency of major versions by making multiple breaking changes together if possible.
870
+ - Be aware of the notice period in the data contract as this is the minimum amount of time you have to offer both the old and the new version for a migration path.
871
+ - Do not fear making breaking changes with data contracts. It's okay to do them in this controlled way. Really!
872
+ - Create a changelog of this major change.
873
+ ```bash
874
+ $ datacontract changelog datacontract-from-pr.yaml datacontract-from-main.yaml
875
+ ```
876
+
877
+ ## Development Setup
878
+
879
+ Python base interpreter should be 3.11.x (unless working on 3.12 release candidate).
880
+
881
+ ```bash
882
+ # create venv
883
+ python3 -m venv venv
884
+ source venv/bin/activate
885
+
886
+ # Install Requirements
887
+ pip install --upgrade pip setuptools wheel
888
+ pip install -e '.[dev]'
889
+ ruff check --fix
890
+ ruff format --check
891
+ pytest
892
+ ```
893
+
894
+
895
+ ### Docker Build
896
+
897
+ ```bash
898
+ docker build -t datacontract/cli .
899
+ docker run --rm -v ${PWD}:/home/datacontract datacontract/cli
900
+ ```
901
+
902
+ #### Docker compose integration
903
+
904
+ We've included a [docker-compose.yml](./docker-compose.yml) configuration to simplify the build, test, and deployment of the image.
905
+
906
+ ##### Building the Image with Docker Compose
907
+
908
+ To build the Docker image using Docker Compose, run the following command:
909
+
910
+ ```bash
911
+ docker compose build
912
+ ```
913
+
914
+ This command utilizes the `docker-compose.yml` to build the image, leveraging predefined settings such as the build context and Dockerfile location. This approach streamlines the image creation process, avoiding the need for manual build specifications each time.
915
+
916
+ #### Testing the Image
917
+
918
+ After building the image, you can test it directly with Docker Compose:
919
+
920
+ ```bash
921
+ docker compose run --rm datacontract --version
922
+ ```
923
+
924
+ This command runs the container momentarily to check the version of the `datacontract` CLI. The `--rm` flag ensures that the container is automatically removed after the command executes, keeping your environment clean.
925
+
926
+
927
+
928
+ ## Release Steps
929
+
930
+ 1. Update the version in `pyproject.toml`
931
+ 2. Have a look at the `CHANGELOG.md`
932
+ 3. Create release commit manually
933
+ 4. Execute `./release`
934
+ 5. Wait until GitHub Release is created
935
+ 6. Add the release notes to the GitHub Release
936
+
937
+ ## Contribution
938
+
939
+ We are happy to receive your contributions. Propose your change in an issue or directly create a pull request with your improvements.
940
+
941
+ ## License
942
+
943
+ [MIT License](LICENSE)
944
+
945
+ ## Credits
946
+
947
+ Created by [Stefan Negele](https://www.linkedin.com/in/stefan-negele-573153112/) and [Jochen Christ](https://www.linkedin.com/in/jochenchrist/).
948
+
949
+
950
+
951
+ <a href="https://github.com/datacontract/cli" class="github-corner" aria-label="View source on GitHub"><svg width="80" height="80" viewBox="0 0 250 250" style="fill:#151513; color:#fff; position: absolute; top: 0; border: 0; right: 0;" aria-hidden="true"><path d="M0,0 L115,115 L130,115 L142,142 L250,250 L250,0 Z"></path><path d="M128.3,109.0 C113.8,99.7 119.0,89.6 119.0,89.6 C122.0,82.7 120.5,78.6 120.5,78.6 C119.2,72.0 123.4,76.3 123.4,76.3 C127.3,80.9 125.5,87.3 125.5,87.3 C122.9,97.6 130.6,101.9 134.4,103.2" fill="currentColor" style="transform-origin: 130px 106px;" class="octo-arm"></path><path d="M115.0,115.0 C114.9,115.1 118.7,116.5 119.8,115.4 L133.7,101.6 C136.9,99.2 139.9,98.4 142.2,98.6 C133.8,88.0 127.5,74.4 143.8,58.0 C148.5,53.4 154.0,51.2 159.7,51.0 C160.3,49.4 163.2,43.6 171.4,40.1 C171.4,40.1 176.1,42.5 178.8,56.2 C183.1,58.6 187.2,61.8 190.9,65.4 C194.5,69.0 197.7,73.2 200.1,77.6 C213.8,80.2 216.3,84.9 216.3,84.9 C212.7,93.1 206.9,96.0 205.4,96.6 C205.1,102.4 203.0,107.8 198.3,112.5 C181.9,128.9 168.3,122.5 157.7,114.1 C157.9,116.9 156.7,120.9 152.7,124.9 L141.0,136.5 C139.8,137.7 141.6,141.9 141.8,141.8 Z" fill="currentColor" class="octo-body"></path></svg></a><style>.github-corner:hover .octo-arm{animation:octocat-wave 560ms ease-in-out}@keyframes octocat-wave{0%,100%{transform:rotate(0)}20%,60%{transform:rotate(-25deg)}40%,80%{transform:rotate(10deg)}}@media (max-width:500px){.github-corner:hover .octo-arm{animation:none}.github-corner .octo-arm{animation:octocat-wave 560ms ease-in-out}}</style>