dagster-datacontract 0.3.1__py3-none-any.whl → 0.4.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,7 +2,6 @@ from datetime import timedelta
2
2
  from typing import Any
3
3
 
4
4
  import dagster as dg
5
- from dagster import TableColumnLineage, TableSchema
6
5
  from datacontract.data_contract import DataContract
7
6
  from datacontract.model.run import ResultEnum
8
7
  from loguru import logger
@@ -10,22 +9,34 @@ from loguru import logger
10
9
  from dagster_datacontract.description import get_description
11
10
  from dagster_datacontract.metadata import (
12
11
  get_column_lineage,
12
+ get_links,
13
13
  get_server_information,
14
14
  get_table_column,
15
15
  )
16
+ from dagster_datacontract.owners import get_owner
16
17
  from dagster_datacontract.tags import get_tags
17
- from dagster_datacontract.utils import normalize_path
18
+ from dagster_datacontract.utils import combine_parts, normalize_path
18
19
 
19
20
 
20
21
  class DataContractLoader:
21
22
  def __init__(
22
23
  self,
23
24
  asset_name: str,
24
- data_contract: DataContract,
25
+ data_contract: DataContract | None = None,
26
+ data_contract_path: str | None = None,
25
27
  ):
28
+ if data_contract is None and data_contract_path is None:
29
+ raise ValueError(
30
+ "Either 'data_contract' or 'data_contract_path' must be provided."
31
+ )
32
+
26
33
  self.asset_name = asset_name
27
34
  self.asset_key = dg.AssetKey(path=self.asset_name)
28
- self.data_contract = data_contract
35
+ self.data_contract = (
36
+ data_contract
37
+ if data_contract
38
+ else DataContract(data_contract_file=data_contract_path)
39
+ )
29
40
  self.data_contract_specification = (
30
41
  self.data_contract.get_data_contract_specification()
31
42
  )
@@ -35,16 +46,24 @@ class DataContractLoader:
35
46
  self.asset_name,
36
47
  self.data_contract_specification,
37
48
  )
38
- self.owner = self._load_owner()
49
+ self.owner = get_owner(self.data_contract_specification)
39
50
  self.version = self._load_version()
40
51
  self.cron_schedule = self._load_cron_schedule()
52
+ self.asset_spec = dg.AssetSpec(
53
+ key=asset_name,
54
+ description=self.description,
55
+ metadata=self.metadata,
56
+ code_version=self.version,
57
+ owners=self.owner,
58
+ tags=self.tags,
59
+ )
41
60
 
42
61
  def _load_metadata(
43
62
  self,
44
- ) -> dict[str, TableColumnLineage | TableSchema | Any] | None:
63
+ ) -> dict[str, dg.TableColumnLineage | dg.TableSchema | Any] | None:
45
64
  metadata = (
46
65
  {
47
- "data contract path": dg.MetadataValue.url(
66
+ "datacontract/path": dg.MetadataValue.url(
48
67
  normalize_path(self.data_contract._data_contract_file)
49
68
  ),
50
69
  }
@@ -54,19 +73,24 @@ class DataContractLoader:
54
73
  columns = []
55
74
  deps_by_column = {}
56
75
 
57
- fields = self.data_contract_specification.models.get(self.asset_name).fields
76
+ try:
77
+ fields = self.data_contract_specification.models.get(self.asset_name).fields
58
78
 
59
- for column_name, column_field in fields.items():
60
- table_column = get_table_column(column_name, column_field)
61
- columns.append(table_column)
79
+ for column_name, column_field in fields.items():
80
+ table_column = get_table_column(column_name, column_field)
81
+ columns.append(table_column)
62
82
 
63
- table_column_lineage = get_column_lineage(column_field)
64
- deps_by_column[column_name] = table_column_lineage
83
+ table_column_lineage = get_column_lineage(column_field)
84
+ deps_by_column[column_name] = table_column_lineage
65
85
 
66
- metadata["dagster/column_schema"] = dg.TableSchema(columns=columns)
67
- metadata["dagster/column_lineage"] = dg.TableColumnLineage(
68
- deps_by_column=deps_by_column
69
- )
86
+ metadata["dagster/column_schema"] = dg.TableSchema(columns=columns)
87
+ metadata["dagster/column_lineage"] = dg.TableColumnLineage(
88
+ deps_by_column=deps_by_column
89
+ )
90
+ except AttributeError as e:
91
+ logger.warning(
92
+ f"No field named {self.asset_name} found in data contract.\n{e}"
93
+ )
70
94
 
71
95
  server_information = get_server_information(
72
96
  self.data_contract_specification,
@@ -75,12 +99,10 @@ class DataContractLoader:
75
99
  )
76
100
  metadata.update(server_information)
77
101
 
78
- return metadata
79
-
80
- def _load_owner(self) -> list[str] | None:
81
- owner = self.data_contract_specification.info.owner
102
+ links = get_links(self.data_contract_specification.links)
103
+ metadata.update(links)
82
104
 
83
- return [f"team:{owner}"] if owner else None
105
+ return metadata
84
106
 
85
107
  def _load_version(self) -> str | None:
86
108
  version = self.data_contract_specification.info.version
@@ -158,3 +180,55 @@ class DataContractLoader:
158
180
  )
159
181
 
160
182
  return freshness_checks
183
+
184
+ def combine_asset_specs(
185
+ self,
186
+ asset_spec: dg.AssetSpec,
187
+ ) -> dg.AssetSpec:
188
+ """Merge the given AssetSpec with the current object's attributes to produce a new AssetSpec.
189
+
190
+ This method combines metadata, descriptions, code versions, owners, and tags from the
191
+ provided `asset_spec` and the current instance. Preference is generally given to the
192
+ current instance's values where appropriate. Fields like dependencies, skippability,
193
+ group name, automation condition, kinds, and partition definitions are taken directly
194
+ from the input `asset_spec`.
195
+
196
+ Args:
197
+ asset_spec (dg.AssetSpec): The base asset specification to merge with the current one.
198
+
199
+ Returns:
200
+ dg.AssetSpec: A new AssetSpec instance containing the combined data.
201
+
202
+ Notes:
203
+ - Descriptions are joined with double newlines (`"\n\n"`).
204
+ - Code versions are joined with an underscore (`"_"`).
205
+ - Owners are concatenated.
206
+ - Metadata and tags are merged with the current instance taking precedence.
207
+ """
208
+ description = combine_parts(
209
+ [asset_spec.description, self.description], delimiter="\n\n"
210
+ )
211
+ metadata = {
212
+ **asset_spec.metadata,
213
+ **self.metadata,
214
+ }
215
+ code_version = combine_parts(
216
+ [asset_spec.code_version, self.version], delimiter="_"
217
+ )
218
+ owners = list(asset_spec.owners) + self.owner
219
+ tags = {**asset_spec.tags, **self.tags}
220
+
221
+ return dg.AssetSpec(
222
+ key=self.asset_name,
223
+ deps=asset_spec.deps,
224
+ description=description,
225
+ metadata=metadata,
226
+ skippable=asset_spec.skippable,
227
+ group_name=asset_spec.group_name,
228
+ code_version=code_version,
229
+ automation_condition=asset_spec.automation_condition,
230
+ owners=owners,
231
+ tags=tags,
232
+ kinds=asset_spec.kinds,
233
+ partitions_def=asset_spec.partitions_def,
234
+ )
@@ -1,7 +1,13 @@
1
+ from dagster_datacontract.metadata.links import get_links
1
2
  from dagster_datacontract.metadata.server_information import get_server_information
2
3
  from dagster_datacontract.metadata.table_colums import (
3
4
  get_column_lineage,
4
5
  get_table_column,
5
6
  )
6
7
 
7
- __all__ = ["get_table_column", "get_column_lineage", "get_server_information"]
8
+ __all__ = [
9
+ "get_column_lineage",
10
+ "get_links",
11
+ "get_table_column",
12
+ "get_server_information",
13
+ ]
@@ -0,0 +1,17 @@
1
+ import dagster as dg
2
+
3
+
4
+ def get_links(links: dict[str, str]) -> dict[str, str]:
5
+ """Return a dictionary with keys prefixed by 'link/' and values as Dagster URL metadata.
6
+
7
+ Args:
8
+ links (dict[str, str]): A dictionary where each key is a name/label and each
9
+ value is a URL string.
10
+
11
+ Returns:
12
+ dict[str, str]: A dictionary where each key is prefixed with 'link/' and
13
+ each value is a `MetadataValue.url`.
14
+ """
15
+ links = {f"link/{key}": dg.MetadataValue.url(value) for key, value in links.items()}
16
+
17
+ return links
@@ -15,6 +15,8 @@ def get_server_information(
15
15
  the specified server by name and constructs a dictionary with keys such as
16
16
  "dagster/uri" and "dagster/table_name" depending on the server type.
17
17
 
18
+ Server information can be obtained from: https://datacontract.com/#server-object
19
+
18
20
  Parameters:
19
21
  data_contract_specification (DataContractSpecification):
20
22
  The data contract specification containing server configurations.
@@ -36,43 +38,71 @@ def get_server_information(
36
38
  match server.type:
37
39
  case "azure":
38
40
  server_information["dagster/uri"] = server.location
41
+ server_information["azure/storage_account"] = server.storageAccount
42
+ server_information["file/format"] = server.format
43
+ server_information["file/delimiter"] = server.delimiter
44
+ case "bigquery":
45
+ server_information["bigquery/project"] = server.project
46
+ server_information["bigquery/dataset"] = server.dataset
39
47
  case "databricks":
40
48
  server_information["dagster/uri"] = server.host
41
49
  server_information["dagster/table_name"] = (
42
50
  f"{server.catalog}.{server.schema}.{asset_name}"
43
51
  )
52
+ case "glue":
53
+ server_information = {}
44
54
  case "kafka":
45
55
  server_information["dagster/uri"] = server.host
56
+ server_information["kafka/topic"] = server.topic
57
+ server_information["kafka/format"] = server.format
46
58
  case "kinesis":
47
- server_information = {}
59
+ server_information["kinesis/stream"] = server.stream
60
+ server_information["kinesis/region"] = server.region
61
+ server_information["kinesis/format"] = server.format
48
62
  case "local":
49
63
  server_information["dagster/uri"] = normalize_path(server.path)
64
+ server_information["file/format"] = server.format
50
65
  case "oracle":
51
66
  server_information["dagster/uri"] = f"{server.host}:{server.port}"
67
+ server_information["oracle/service_name"] = server.serviceName
52
68
  case "postgres":
53
69
  server_information["dagster/uri"] = f"{server.host}:{server.port}"
54
70
  server_information["dagster/table_name"] = (
55
71
  f"{server.database}.{server.schema}.{asset_name}"
56
72
  )
57
73
  case "pubsub":
58
- server_information = {}
74
+ server_information["pubsub/project"] = server.project
75
+ server_information["pubsub/topic"] = server.topic
59
76
  case "redshift":
60
77
  server_information["dagster/uri"] = server.endpoint
61
78
  server_information["dagster/table_name"] = (
62
79
  f"{server.database}.{server.schema}.{asset_name}"
63
80
  )
81
+ server_information["redshift/account"] = server.account
82
+ server_information["redshift/host"] = server.host
83
+ server_information["redshift/port"] = server.port
84
+ server_information["redshift/cluster"] = server.clusterIdentifier
64
85
  case "s3":
65
86
  server_information["dagster/uri"] = server.location
87
+ server_information["s3/endpoint"] = server.endpointUrl
88
+ server_information["file/format"] = server.format
89
+ server_information["file/delimiter"] = server.delimiter
66
90
  case "sftp":
67
91
  server_information["dagster/uri"] = server.location
92
+ server_information["file/format"] = server.format
93
+ server_information["file/delimiter"] = server.delimiter
68
94
  case "snowflake":
69
95
  server_information["dagster/table_name"] = (
70
96
  f"{server.database}.{server.schema}.{asset_name}"
71
97
  )
98
+ server_information["snowflake/account"] = server.account
72
99
  case "sqlserver":
73
100
  server_information["dagster/table_name"] = (
74
101
  f"{server.database}.{server.schema}.{asset_name}"
75
102
  )
103
+ server_information["sqlserver/host"] = server.host
104
+ server_information["sqlserver/port"] = server.port
105
+ server_information["sqlserver/driver"] = server.driver
76
106
  case "trino":
77
107
  server_information["dagster/uri"] = f"{server.host}:{server.port}"
78
108
  server_information["dagster/table_name"] = (
@@ -0,0 +1,23 @@
1
+ from datacontract.data_contract import DataContractSpecification
2
+
3
+
4
+ def get_owner(
5
+ data_contract_specification: DataContractSpecification,
6
+ is_team: bool = True,
7
+ ) -> list[str] | None:
8
+ """Return the owner of a data contract, optionally formatted as a team identifier.
9
+
10
+ Args:
11
+ data_contract_specification (DataContractSpecification): The data contract specification containing ownership metadata.
12
+ is_team (bool, optional): If True, formats the owner as a team identifier (e.g., 'team:owner').
13
+ If False, returns the raw owner string. Defaults to True.
14
+
15
+ Returns:
16
+ list[str] | None: A list containing the owner string, formatted depending on `is_team`, or None if no owner is found.
17
+ """
18
+ owner = data_contract_specification.info.owner
19
+
20
+ if is_team:
21
+ return [f"team:{owner}"]
22
+
23
+ return [owner]
@@ -1,28 +1,8 @@
1
- import os
2
- import urllib.parse
3
-
4
-
5
- def normalize_path(path: str) -> str:
6
- """Normalizes a file path to ensure it is returned in a consistent URI format.
7
-
8
- This function checks if the provided path is a local file path (with no scheme
9
- or with the 'file' scheme) and converts it into a fully qualified file URI.
10
- If the path already has a non-'file' scheme (e.g., 's3://', 'http://'),
11
- it is returned unchanged.
12
-
13
- Parameters:
14
- path (str): The input file path. This can be a relative or absolute local path,
15
- a path starting with `~`, or a URI with a supported scheme.
16
-
17
- Returns:
18
- str: A normalized path string:
19
- - If the input is a local path or has a "file" scheme, returns it in the form "file:///absolute/path".
20
- - If the input has another scheme (e.g., "s3://", "http://"), returns it unchanged.
21
- """
22
- parsed = urllib.parse.urlparse(path)
23
-
24
- if not parsed.scheme or parsed.scheme == "file":
25
- full_path = os.path.abspath(os.path.expanduser(path))
26
- return f"file://{full_path}"
27
- else:
28
- return path
1
+ from dagster_datacontract.utils.combine_strings import combine_parts
2
+ from dagster_datacontract.utils.paths import get_absolute_path, normalize_path
3
+
4
+ __all__ = [
5
+ "combine_parts",
6
+ "get_absolute_path",
7
+ "normalize_path",
8
+ ]
@@ -0,0 +1,29 @@
1
+ from collections.abc import Iterable
2
+
3
+
4
+ def combine_parts(parts: Iterable[str | None], delimiter: str = "_") -> str:
5
+ """
6
+ Combine multiple optional strings using a specified delimiter.
7
+
8
+ This function takes an iterable of optional strings and joins the non-None,
9
+ non-empty strings using the given delimiter. None values and empty strings
10
+ are ignored. If all values are None or empty, the result is an empty string.
11
+
12
+ Args:
13
+ parts (Iterable[Optional[str]]): An iterable of strings or None values to combine.
14
+ delimiter (str): A string used to separate the non-None parts. Defaults to "_".
15
+
16
+ Returns:
17
+ str: A single combined string of all non-None, non-empty parts separated by the delimiter.
18
+
19
+ Examples:
20
+ >>> combine_parts(["v1", "2023", None])
21
+ 'v1_2023'
22
+
23
+ >>> combine_parts([None, None])
24
+ ''
25
+
26
+ >>> combine_parts(["", "alpha", None])
27
+ 'alpha'
28
+ """
29
+ return delimiter.join(filter(None, parts))
@@ -0,0 +1,51 @@
1
+ import os
2
+ from pathlib import Path
3
+ from urllib.parse import urlparse
4
+
5
+ import dagster as dg
6
+
7
+
8
+ def normalize_path(path: str) -> str:
9
+ """Normalizes a file path to ensure it is returned in a consistent URI format.
10
+
11
+ This function checks if the provided path is a local file path (with no scheme
12
+ or with the 'file' scheme) and converts it into a fully qualified file URI.
13
+ If the path already has a non-'file' scheme (e.g., 's3://', 'http://'),
14
+ it is returned unchanged.
15
+
16
+ Parameters:
17
+ path (str): The input file path. This can be a relative or absolute local path,
18
+ a path starting with `~`, or a URI with a supported scheme.
19
+
20
+ Returns:
21
+ str: A normalized path string:
22
+ - If the input is a local path or has a "file" scheme, returns it in the form "file:///absolute/path".
23
+ - If the input has another scheme (e.g., "s3://", "http://"), returns it unchanged.
24
+ """
25
+ parsed = urlparse(path)
26
+
27
+ if not parsed.scheme or parsed.scheme == "file":
28
+ full_path = os.path.abspath(os.path.expanduser(path))
29
+ return f"file://{full_path}"
30
+ else:
31
+ return path
32
+
33
+
34
+ def get_absolute_path(
35
+ context_path: Path,
36
+ full_path: str,
37
+ ) -> Path:
38
+ """TODO."""
39
+ if isinstance(full_path, dg.UrlMetadataValue):
40
+ full_path = full_path.url
41
+
42
+ parsed_path = urlparse(full_path)
43
+ if parsed_path.scheme == "file":
44
+ full_path = Path(parsed_path.path)
45
+ else:
46
+ full_path = Path(full_path)
47
+
48
+ if full_path.is_absolute():
49
+ return full_path
50
+
51
+ return Path(context_path, full_path).absolute()
@@ -1,13 +1,14 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: dagster-datacontract
3
- Version: 0.3.1
3
+ Version: 0.4.1
4
4
  Summary: Load metadata and asset check spesifications from data contracts.
5
5
  Author-email: Fredrik Bakken <fredrik@dataheim.io>
6
- Requires-Python: >=3.10.0
6
+ Requires-Python: >=3.10
7
7
  Description-Content-Type: text/markdown
8
8
  License-File: LICENSE
9
9
  Requires-Dist: dagster>=1.10.10
10
- Requires-Dist: datacontract-cli>=0.10.23
10
+ Requires-Dist: dagster-dg>=0.26.11
11
+ Requires-Dist: datacontract-cli>=0.10.24
11
12
  Requires-Dist: loguru>=0.7.3
12
13
  Dynamic: license-file
13
14
 
@@ -25,7 +26,7 @@ uv add dagster-datacontract
25
26
 
26
27
  ## Simple Example
27
28
 
28
- The following example can be found inside the [example](https://github.com/dataheim-io/dagster-datacontract/tree/main/example) directory:
29
+ The following example can be found inside the [examples/simple](https://github.com/dataheim-io/dagster-datacontract/tree/main/examples/simple)-directory:
29
30
 
30
31
  ```python
31
32
  from datetime import timedelta
@@ -0,0 +1,18 @@
1
+ dagster_datacontract/__init__.py,sha256=Tv6_G45c16Yq5Sh9A5Xw9rxkkIiAm9BBxuK79kkvSvE,8585
2
+ dagster_datacontract/description/__init__.py,sha256=ulWqPp5jIPvCzaDFZcjLjcDkljJ5j_FRsE0dXhK8Wlc,104
3
+ dagster_datacontract/description/description.py,sha256=FmjgCYDpJ9UHrvAv0sAthfRohDjdG0lL1XcMKK8QMmI,1646
4
+ dagster_datacontract/metadata/__init__.py,sha256=Gj7Htl3rYRXsE-631yr4LWqL7Tf5bZKFEFjTeglxZek,359
5
+ dagster_datacontract/metadata/links.py,sha256=ckSTZLhj6ZETrJMeXDEKVFIoZz871lhL8ZgRtsEIIvs,562
6
+ dagster_datacontract/metadata/server_information.py,sha256=Wd7hPZ8IfeISBe5VThz0pBGkmiPv4Ia2MGNhBJ-aWik,5397
7
+ dagster_datacontract/metadata/table_colums.py,sha256=Q7ZCiMReWU4-T2YfBvtt5vvoVXEoUgzK5OPMxQEgzpQ,4013
8
+ dagster_datacontract/owners/__init__.py,sha256=c0AhLQRzfw-QPmsF9rPXRyE6VoLmgDRRNgVqG8JUvFs,882
9
+ dagster_datacontract/tags/__init__.py,sha256=2Ph-M0WbBKUjJWIzM_cEBW3SQZh7Nq8oy5MbD5bt_lc,76
10
+ dagster_datacontract/tags/tags.py,sha256=aZ_HTkc-vjJ_rofT32fT_zrLCt9x1ZGn8XoihhOMhfU,1414
11
+ dagster_datacontract/utils/__init__.py,sha256=GSQ2Zry9pKlLhWI5Vjoj3X7iZiEgt-SjqnQRwfplbHM,231
12
+ dagster_datacontract/utils/combine_strings.py,sha256=nWy3unX6yuPi8YmvDTl_mO10K6MbJP8cxQWPOMDj6G4,987
13
+ dagster_datacontract/utils/paths.py,sha256=Rh-l5GSmxZwhIVQ_aBJPHTKU5afEma8hlA5RT31EhbY,1611
14
+ dagster_datacontract-0.4.1.dist-info/licenses/LICENSE,sha256=9ULsEM1ICzCaGoso40plwO-d_SCQ7nsU6ZA4xgfaRq8,11338
15
+ dagster_datacontract-0.4.1.dist-info/METADATA,sha256=KAjnQQkr1UUNBAPcQ44mJcqUqmcEqxHDvr3pU-5YyWU,3078
16
+ dagster_datacontract-0.4.1.dist-info/WHEEL,sha256=pxyMxgL8-pra_rKaQ4drOZAegBVuX-G_4nRHjjgWbmo,91
17
+ dagster_datacontract-0.4.1.dist-info/top_level.txt,sha256=_HUQ6OJ50Q0VZxEkdocTtxk1QkJpztb1QY7A0rcvtCE,21
18
+ dagster_datacontract-0.4.1.dist-info/RECORD,,
@@ -1,5 +1,5 @@
1
1
  Wheel-Version: 1.0
2
- Generator: setuptools (78.1.0)
2
+ Generator: setuptools (79.0.0)
3
3
  Root-Is-Purelib: true
4
4
  Tag: py3-none-any
5
5
 
@@ -1,14 +0,0 @@
1
- dagster_datacontract/__init__.py,sha256=Pzq4AblLnDhW0QBmV8ntV5KLD0a7D0-mA2Lw9hjS_V8,5685
2
- dagster_datacontract/description/__init__.py,sha256=ulWqPp5jIPvCzaDFZcjLjcDkljJ5j_FRsE0dXhK8Wlc,104
3
- dagster_datacontract/description/description.py,sha256=FmjgCYDpJ9UHrvAv0sAthfRohDjdG0lL1XcMKK8QMmI,1646
4
- dagster_datacontract/metadata/__init__.py,sha256=e-xmcWWoAhmKTwosshsxnyrjI1j-UyY6YpdpzA2ggF4,269
5
- dagster_datacontract/metadata/server_information.py,sha256=jk_H8aI5PdGzIeoYThlhhZMSOtBh-6xc8QAFz0BFesU,3512
6
- dagster_datacontract/metadata/table_colums.py,sha256=Q7ZCiMReWU4-T2YfBvtt5vvoVXEoUgzK5OPMxQEgzpQ,4013
7
- dagster_datacontract/tags/__init__.py,sha256=2Ph-M0WbBKUjJWIzM_cEBW3SQZh7Nq8oy5MbD5bt_lc,76
8
- dagster_datacontract/tags/tags.py,sha256=aZ_HTkc-vjJ_rofT32fT_zrLCt9x1ZGn8XoihhOMhfU,1414
9
- dagster_datacontract/utils/__init__.py,sha256=Zfbuf20Eorf7BD8gSDASiPqgPwWu8Mz03r3aa2zE4NA,1106
10
- dagster_datacontract-0.3.1.dist-info/licenses/LICENSE,sha256=9ULsEM1ICzCaGoso40plwO-d_SCQ7nsU6ZA4xgfaRq8,11338
11
- dagster_datacontract-0.3.1.dist-info/METADATA,sha256=P98oLsqB-GCJz4uanki-xtG3UnZCHD0biDMUYIZMEhc,3029
12
- dagster_datacontract-0.3.1.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
13
- dagster_datacontract-0.3.1.dist-info/top_level.txt,sha256=_HUQ6OJ50Q0VZxEkdocTtxk1QkJpztb1QY7A0rcvtCE,21
14
- dagster_datacontract-0.3.1.dist-info/RECORD,,