acryl-datahub 1.1.0rc3__py3-none-any.whl → 1.1.0.1rc6__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of acryl-datahub might be problematic. Click here for more details.
- {acryl_datahub-1.1.0rc3.dist-info → acryl_datahub-1.1.0.1rc6.dist-info}/METADATA +2515 -2513
- {acryl_datahub-1.1.0rc3.dist-info → acryl_datahub-1.1.0.1rc6.dist-info}/RECORD +87 -70
- {acryl_datahub-1.1.0rc3.dist-info → acryl_datahub-1.1.0.1rc6.dist-info}/WHEEL +1 -1
- datahub/_version.py +1 -1
- datahub/api/entities/dataset/dataset.py +9 -8
- datahub/api/entities/external/__init__.py +0 -0
- datahub/api/entities/external/external_entities.py +239 -0
- datahub/api/entities/external/external_tag.py +145 -0
- datahub/api/entities/external/restricted_text.py +247 -0
- datahub/api/entities/external/unity_catalog_external_entites.py +170 -0
- datahub/api/entities/structuredproperties/structuredproperties.py +2 -2
- datahub/cli/delete_cli.py +4 -4
- datahub/cli/ingest_cli.py +9 -1
- datahub/emitter/mce_builder.py +3 -1
- datahub/emitter/response_helper.py +86 -1
- datahub/emitter/rest_emitter.py +1 -1
- datahub/ingestion/graph/client.py +3 -3
- datahub/ingestion/source/apply/datahub_apply.py +4 -4
- datahub/ingestion/source/data_lake_common/data_lake_utils.py +22 -10
- datahub/ingestion/source/data_lake_common/object_store.py +644 -0
- datahub/ingestion/source/datahub/config.py +11 -0
- datahub/ingestion/source/datahub/datahub_database_reader.py +186 -33
- datahub/ingestion/source/datahub/datahub_source.py +1 -1
- datahub/ingestion/source/dbt/dbt_common.py +30 -11
- datahub/ingestion/source/gcs/gcs_source.py +22 -7
- datahub/ingestion/source/gcs/gcs_utils.py +36 -9
- datahub/ingestion/source/hex/query_fetcher.py +9 -3
- datahub/ingestion/source/openapi.py +12 -0
- datahub/ingestion/source/openapi_parser.py +56 -37
- datahub/ingestion/source/s3/source.py +65 -6
- datahub/ingestion/source/snowflake/snowflake_config.py +13 -0
- datahub/ingestion/source/snowflake/snowflake_queries.py +44 -21
- datahub/ingestion/source/snowflake/snowflake_query.py +0 -7
- datahub/ingestion/source/snowflake/snowflake_v2.py +17 -6
- datahub/ingestion/source/sql/athena.py +1 -0
- datahub/ingestion/source/sql/hive.py +2 -3
- datahub/ingestion/source/sql/sql_common.py +98 -34
- datahub/ingestion/source/sql/sql_types.py +5 -2
- datahub/ingestion/source/unity/config.py +5 -0
- datahub/ingestion/source/unity/proxy.py +117 -0
- datahub/ingestion/source/unity/source.py +167 -15
- datahub/ingestion/source/unity/tag_entities.py +295 -0
- datahub/metadata/_internal_schema_classes.py +667 -522
- datahub/metadata/_urns/urn_defs.py +1804 -1748
- datahub/metadata/com/linkedin/pegasus2avro/application/__init__.py +19 -0
- datahub/metadata/schema.avsc +17358 -17584
- datahub/metadata/schemas/ApplicationKey.avsc +31 -0
- datahub/metadata/schemas/ApplicationProperties.avsc +72 -0
- datahub/metadata/schemas/Applications.avsc +38 -0
- datahub/metadata/schemas/ChartKey.avsc +1 -0
- datahub/metadata/schemas/ContainerKey.avsc +1 -0
- datahub/metadata/schemas/DashboardKey.avsc +1 -0
- datahub/metadata/schemas/DataFlowKey.avsc +1 -0
- datahub/metadata/schemas/DataHubIngestionSourceKey.avsc +2 -1
- datahub/metadata/schemas/DataJobKey.avsc +1 -0
- datahub/metadata/schemas/DataProductKey.avsc +1 -0
- datahub/metadata/schemas/DataProductProperties.avsc +1 -1
- datahub/metadata/schemas/DatasetKey.avsc +1 -0
- datahub/metadata/schemas/ExecutionRequestInput.avsc +5 -0
- datahub/metadata/schemas/GlossaryTermKey.avsc +1 -0
- datahub/metadata/schemas/MLFeatureKey.avsc +1 -0
- datahub/metadata/schemas/MLFeatureTableKey.avsc +1 -0
- datahub/metadata/schemas/MLModelGroupKey.avsc +1 -0
- datahub/metadata/schemas/MLModelKey.avsc +1 -0
- datahub/metadata/schemas/MLPrimaryKeyKey.avsc +1 -0
- datahub/metadata/schemas/NotebookKey.avsc +1 -0
- datahub/metadata/schemas/__init__.py +3 -3
- datahub/sdk/__init__.py +6 -0
- datahub/sdk/_all_entities.py +11 -0
- datahub/sdk/_shared.py +118 -1
- datahub/sdk/chart.py +315 -0
- datahub/sdk/container.py +7 -0
- datahub/sdk/dashboard.py +432 -0
- datahub/sdk/dataflow.py +309 -0
- datahub/sdk/datajob.py +342 -0
- datahub/sdk/dataset.py +8 -2
- datahub/sdk/entity_client.py +90 -2
- datahub/sdk/lineage_client.py +681 -82
- datahub/sdk/main_client.py +27 -8
- datahub/sdk/mlmodel.py +101 -38
- datahub/sdk/mlmodelgroup.py +7 -0
- datahub/sql_parsing/sql_parsing_aggregator.py +1 -1
- datahub/testing/mce_helpers.py +421 -0
- datahub/testing/sdk_v2_helpers.py +18 -0
- {acryl_datahub-1.1.0rc3.dist-info → acryl_datahub-1.1.0.1rc6.dist-info}/entry_points.txt +0 -0
- {acryl_datahub-1.1.0rc3.dist-info → acryl_datahub-1.1.0.1rc6.dist-info}/licenses/LICENSE +0 -0
- {acryl_datahub-1.1.0rc3.dist-info → acryl_datahub-1.1.0.1rc6.dist-info}/top_level.txt +0 -0
datahub/sdk/main_client.py
CHANGED
|
@@ -7,15 +7,16 @@ from datahub.ingestion.graph.client import DataHubGraph, get_default_graph
|
|
|
7
7
|
from datahub.ingestion.graph.config import ClientMode, DatahubClientConfig
|
|
8
8
|
from datahub.sdk.entity_client import EntityClient
|
|
9
9
|
from datahub.sdk.lineage_client import LineageClient
|
|
10
|
-
from datahub.sdk.resolver_client import ResolverClient
|
|
11
10
|
from datahub.sdk.search_client import SearchClient
|
|
12
11
|
|
|
13
12
|
try:
|
|
14
13
|
from acryl_datahub_cloud._sdk_extras import ( # type: ignore[import-not-found]
|
|
15
|
-
|
|
14
|
+
ResolverClient,
|
|
16
15
|
)
|
|
17
16
|
except ImportError:
|
|
18
|
-
|
|
17
|
+
from datahub.sdk.resolver_client import ( # type: ignore[assignment] # If the client is not installed, use the one from the SDK
|
|
18
|
+
ResolverClient,
|
|
19
|
+
)
|
|
19
20
|
|
|
20
21
|
|
|
21
22
|
class DataHubClient:
|
|
@@ -112,9 +113,27 @@ class DataHubClient:
|
|
|
112
113
|
return LineageClient(self)
|
|
113
114
|
|
|
114
115
|
@property
|
|
115
|
-
def assertions(self)
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
)
|
|
116
|
+
def assertions(self): # type: ignore[report-untyped-call] # Not available due to circular import issues
|
|
117
|
+
try:
|
|
118
|
+
from acryl_datahub_cloud._sdk_extras import AssertionsClient
|
|
119
|
+
except ImportError as e:
|
|
120
|
+
if "acryl_datahub_cloud" in str(e):
|
|
121
|
+
raise SdkUsageError(
|
|
122
|
+
"AssertionsClient is not installed, please install it with `pip install acryl-datahub-cloud`"
|
|
123
|
+
) from e
|
|
124
|
+
else:
|
|
125
|
+
raise e
|
|
120
126
|
return AssertionsClient(self)
|
|
127
|
+
|
|
128
|
+
@property
|
|
129
|
+
def subscriptions(self): # type: ignore[report-untyped-call] # Not available due to circular import issues
|
|
130
|
+
try:
|
|
131
|
+
from acryl_datahub_cloud._sdk_extras import SubscriptionClient
|
|
132
|
+
except ImportError as e:
|
|
133
|
+
if "acryl_datahub_cloud" in str(e):
|
|
134
|
+
raise SdkUsageError(
|
|
135
|
+
"SubscriptionClient is not installed, please install it with `pip install acryl-datahub-cloud`"
|
|
136
|
+
) from e
|
|
137
|
+
else:
|
|
138
|
+
raise e
|
|
139
|
+
return SubscriptionClient(self)
|
datahub/sdk/mlmodel.py
CHANGED
|
@@ -24,6 +24,7 @@ from datahub.sdk._shared import (
|
|
|
24
24
|
HasInstitutionalMemory,
|
|
25
25
|
HasOwnership,
|
|
26
26
|
HasPlatformInstance,
|
|
27
|
+
HasStructuredProperties,
|
|
27
28
|
HasTags,
|
|
28
29
|
HasTerms,
|
|
29
30
|
HasVersion,
|
|
@@ -31,6 +32,7 @@ from datahub.sdk._shared import (
|
|
|
31
32
|
LinksInputType,
|
|
32
33
|
MLTrainingJobInputType,
|
|
33
34
|
OwnersInputType,
|
|
35
|
+
StructuredPropertyInputType,
|
|
34
36
|
TagsInputType,
|
|
35
37
|
TermsInputType,
|
|
36
38
|
TrainingMetricsInputType,
|
|
@@ -50,6 +52,7 @@ class MLModel(
|
|
|
50
52
|
HasTerms,
|
|
51
53
|
HasDomain,
|
|
52
54
|
HasVersion,
|
|
55
|
+
HasStructuredProperties,
|
|
53
56
|
Entity,
|
|
54
57
|
):
|
|
55
58
|
__slots__ = ()
|
|
@@ -82,53 +85,43 @@ class MLModel(
|
|
|
82
85
|
model_group: Optional[Union[str, MlModelGroupUrn]] = None,
|
|
83
86
|
training_jobs: Optional[MLTrainingJobInputType] = None,
|
|
84
87
|
downstream_jobs: Optional[MLTrainingJobInputType] = None,
|
|
88
|
+
structured_properties: Optional[StructuredPropertyInputType] = None,
|
|
85
89
|
extra_aspects: ExtraAspectsType = None,
|
|
86
90
|
):
|
|
87
91
|
urn = MlModelUrn(platform=platform, name=id, env=env)
|
|
88
92
|
super().__init__(urn)
|
|
89
93
|
self._set_extra_aspects(extra_aspects)
|
|
90
|
-
|
|
91
94
|
self._set_platform_instance(urn.platform, platform_instance)
|
|
92
|
-
|
|
93
95
|
self._ensure_model_props()
|
|
94
96
|
|
|
95
|
-
|
|
96
|
-
|
|
97
|
-
|
|
98
|
-
|
|
99
|
-
|
|
100
|
-
|
|
101
|
-
|
|
102
|
-
|
|
103
|
-
|
|
104
|
-
|
|
105
|
-
|
|
106
|
-
self.set_hyper_params(hyper_params)
|
|
107
|
-
if external_url is not None:
|
|
108
|
-
self.set_external_url(external_url)
|
|
109
|
-
if custom_properties is not None:
|
|
110
|
-
self.set_custom_properties(custom_properties)
|
|
111
|
-
if created is not None:
|
|
112
|
-
self.set_created(created)
|
|
113
|
-
if last_modified is not None:
|
|
114
|
-
self.set_last_modified(last_modified)
|
|
97
|
+
# Initialize properties in logical groups
|
|
98
|
+
self._init_basic_properties(
|
|
99
|
+
version=version,
|
|
100
|
+
name=name,
|
|
101
|
+
aliases=aliases,
|
|
102
|
+
description=description,
|
|
103
|
+
external_url=external_url,
|
|
104
|
+
custom_properties=custom_properties,
|
|
105
|
+
created=created,
|
|
106
|
+
last_modified=last_modified,
|
|
107
|
+
)
|
|
115
108
|
|
|
116
|
-
|
|
117
|
-
|
|
118
|
-
|
|
119
|
-
|
|
120
|
-
|
|
121
|
-
|
|
122
|
-
|
|
123
|
-
|
|
124
|
-
|
|
125
|
-
|
|
126
|
-
|
|
127
|
-
|
|
128
|
-
|
|
129
|
-
|
|
130
|
-
|
|
131
|
-
|
|
109
|
+
self._init_ml_specific_properties(
|
|
110
|
+
training_metrics=training_metrics,
|
|
111
|
+
hyper_params=hyper_params,
|
|
112
|
+
model_group=model_group,
|
|
113
|
+
training_jobs=training_jobs,
|
|
114
|
+
downstream_jobs=downstream_jobs,
|
|
115
|
+
)
|
|
116
|
+
|
|
117
|
+
self._init_metadata_properties(
|
|
118
|
+
owners=owners,
|
|
119
|
+
links=links,
|
|
120
|
+
tags=tags,
|
|
121
|
+
terms=terms,
|
|
122
|
+
domain=domain,
|
|
123
|
+
structured_properties=structured_properties,
|
|
124
|
+
)
|
|
132
125
|
|
|
133
126
|
@classmethod
|
|
134
127
|
def _new_from_graph(cls, urn: Urn, current_aspects: AspectBag) -> Self:
|
|
@@ -299,3 +292,73 @@ class MLModel(
|
|
|
299
292
|
props.downstreamJobs = [
|
|
300
293
|
job for job in props.downstreamJobs if job != job_str
|
|
301
294
|
]
|
|
295
|
+
|
|
296
|
+
def _init_basic_properties(
|
|
297
|
+
self,
|
|
298
|
+
version: Optional[str] = None,
|
|
299
|
+
name: Optional[str] = None,
|
|
300
|
+
aliases: Optional[List[str]] = None,
|
|
301
|
+
description: Optional[str] = None,
|
|
302
|
+
external_url: Optional[str] = None,
|
|
303
|
+
custom_properties: Optional[Dict[str, str]] = None,
|
|
304
|
+
created: Optional[datetime] = None,
|
|
305
|
+
last_modified: Optional[datetime] = None,
|
|
306
|
+
) -> None:
|
|
307
|
+
if version is not None:
|
|
308
|
+
self.set_version(version)
|
|
309
|
+
if name is not None:
|
|
310
|
+
self.set_name(name)
|
|
311
|
+
if aliases is not None:
|
|
312
|
+
self.set_version_aliases(aliases)
|
|
313
|
+
if description is not None:
|
|
314
|
+
self.set_description(description)
|
|
315
|
+
if external_url is not None:
|
|
316
|
+
self.set_external_url(external_url)
|
|
317
|
+
if custom_properties is not None:
|
|
318
|
+
self.set_custom_properties(custom_properties)
|
|
319
|
+
if created is not None:
|
|
320
|
+
self.set_created(created)
|
|
321
|
+
if last_modified is not None:
|
|
322
|
+
self.set_last_modified(last_modified)
|
|
323
|
+
|
|
324
|
+
def _init_ml_specific_properties(
|
|
325
|
+
self,
|
|
326
|
+
training_metrics: Optional[TrainingMetricsInputType] = None,
|
|
327
|
+
hyper_params: Optional[HyperParamsInputType] = None,
|
|
328
|
+
model_group: Optional[Union[str, MlModelGroupUrn]] = None,
|
|
329
|
+
training_jobs: Optional[MLTrainingJobInputType] = None,
|
|
330
|
+
downstream_jobs: Optional[MLTrainingJobInputType] = None,
|
|
331
|
+
) -> None:
|
|
332
|
+
if training_metrics is not None:
|
|
333
|
+
self.set_training_metrics(training_metrics)
|
|
334
|
+
if hyper_params is not None:
|
|
335
|
+
self.set_hyper_params(hyper_params)
|
|
336
|
+
if model_group is not None:
|
|
337
|
+
self.set_model_group(model_group)
|
|
338
|
+
if training_jobs is not None:
|
|
339
|
+
self.set_training_jobs(training_jobs)
|
|
340
|
+
if downstream_jobs is not None:
|
|
341
|
+
self.set_downstream_jobs(downstream_jobs)
|
|
342
|
+
|
|
343
|
+
def _init_metadata_properties(
|
|
344
|
+
self,
|
|
345
|
+
owners: Optional[OwnersInputType] = None,
|
|
346
|
+
links: Optional[LinksInputType] = None,
|
|
347
|
+
tags: Optional[TagsInputType] = None,
|
|
348
|
+
terms: Optional[TermsInputType] = None,
|
|
349
|
+
domain: Optional[DomainInputType] = None,
|
|
350
|
+
structured_properties: Optional[StructuredPropertyInputType] = None,
|
|
351
|
+
) -> None:
|
|
352
|
+
if owners is not None:
|
|
353
|
+
self.set_owners(owners)
|
|
354
|
+
if links is not None:
|
|
355
|
+
self.set_links(links)
|
|
356
|
+
if tags is not None:
|
|
357
|
+
self.set_tags(tags)
|
|
358
|
+
if terms is not None:
|
|
359
|
+
self.set_terms(terms)
|
|
360
|
+
if domain is not None:
|
|
361
|
+
self.set_domain(domain)
|
|
362
|
+
if structured_properties is not None:
|
|
363
|
+
for key, value in structured_properties.items():
|
|
364
|
+
self.set_structured_property(property_urn=key, values=value)
|
datahub/sdk/mlmodelgroup.py
CHANGED
|
@@ -17,10 +17,12 @@ from datahub.sdk._shared import (
|
|
|
17
17
|
HasInstitutionalMemory,
|
|
18
18
|
HasOwnership,
|
|
19
19
|
HasPlatformInstance,
|
|
20
|
+
HasStructuredProperties,
|
|
20
21
|
HasTags,
|
|
21
22
|
HasTerms,
|
|
22
23
|
LinksInputType,
|
|
23
24
|
OwnersInputType,
|
|
25
|
+
StructuredPropertyInputType,
|
|
24
26
|
TagsInputType,
|
|
25
27
|
TermsInputType,
|
|
26
28
|
make_time_stamp,
|
|
@@ -36,6 +38,7 @@ class MLModelGroup(
|
|
|
36
38
|
HasTags,
|
|
37
39
|
HasTerms,
|
|
38
40
|
HasDomain,
|
|
41
|
+
HasStructuredProperties,
|
|
39
42
|
Entity,
|
|
40
43
|
):
|
|
41
44
|
__slots__ = ()
|
|
@@ -66,6 +69,7 @@ class MLModelGroup(
|
|
|
66
69
|
domain: Optional[DomainInputType] = None,
|
|
67
70
|
training_jobs: Optional[Sequence[Union[str, DataProcessInstanceUrn]]] = None,
|
|
68
71
|
downstream_jobs: Optional[Sequence[Union[str, DataProcessInstanceUrn]]] = None,
|
|
72
|
+
structured_properties: Optional[StructuredPropertyInputType] = None,
|
|
69
73
|
extra_aspects: ExtraAspectsType = None,
|
|
70
74
|
):
|
|
71
75
|
urn = MlModelGroupUrn(platform=platform, name=id, env=env)
|
|
@@ -105,6 +109,9 @@ class MLModelGroup(
|
|
|
105
109
|
self.set_training_jobs(training_jobs)
|
|
106
110
|
if downstream_jobs is not None:
|
|
107
111
|
self.set_downstream_jobs(downstream_jobs)
|
|
112
|
+
if structured_properties is not None:
|
|
113
|
+
for key, value in structured_properties.items():
|
|
114
|
+
self.set_structured_property(property_urn=key, values=value)
|
|
108
115
|
|
|
109
116
|
@classmethod
|
|
110
117
|
def _new_from_graph(cls, urn: Urn, current_aspects: AspectBag) -> Self:
|
|
@@ -109,7 +109,7 @@ class ObservedQuery:
|
|
|
109
109
|
query_hash: Optional[str] = None
|
|
110
110
|
usage_multiplier: int = 1
|
|
111
111
|
|
|
112
|
-
# Use this to store
|
|
112
|
+
# Use this to store additional key-value information about the query for debugging.
|
|
113
113
|
extra_info: Optional[dict] = None
|
|
114
114
|
|
|
115
115
|
|
|
@@ -0,0 +1,421 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import logging
|
|
3
|
+
import os
|
|
4
|
+
import pathlib
|
|
5
|
+
import re
|
|
6
|
+
import tempfile
|
|
7
|
+
from typing import (
|
|
8
|
+
Any,
|
|
9
|
+
Callable,
|
|
10
|
+
Dict,
|
|
11
|
+
List,
|
|
12
|
+
Optional,
|
|
13
|
+
Sequence,
|
|
14
|
+
Set,
|
|
15
|
+
Tuple,
|
|
16
|
+
Type,
|
|
17
|
+
Union,
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
import pytest
|
|
21
|
+
|
|
22
|
+
from datahub.emitter.mcp import MetadataChangeProposalWrapper
|
|
23
|
+
from datahub.ingestion.sink.file import write_metadata_file
|
|
24
|
+
from datahub.metadata.schema_classes import MetadataChangeEventClass
|
|
25
|
+
from datahub.metadata.urns import Urn
|
|
26
|
+
from datahub.testing.compare_metadata_json import (
|
|
27
|
+
assert_metadata_files_equal,
|
|
28
|
+
load_json_file,
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
logger = logging.getLogger(__name__)
|
|
32
|
+
|
|
33
|
+
IGNORE_PATH_TIMESTAMPS = [
|
|
34
|
+
# Ignore timestamps from the ETL pipeline. A couple examples:
|
|
35
|
+
r"root\[\d+\]\['proposedSnapshot'\].+\['aspects'\].+\['created'\]\['time'\]",
|
|
36
|
+
r"root\[\d+\]\['proposedSnapshot'\].+\['aspects'\].+\['lastModified'\]\['time'\]",
|
|
37
|
+
r"root\[\d+\]\['proposedSnapshot'\].+\['aspects'\].+\['createStamp'\]\['time'\]",
|
|
38
|
+
r"root\[\d+\]\['proposedSnapshot'\].+\['aspects'\].+\['auditStamp'\]\['time'\]",
|
|
39
|
+
]
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class MCEConstants:
|
|
43
|
+
PROPOSED_SNAPSHOT = "proposedSnapshot"
|
|
44
|
+
DATASET_SNAPSHOT_CLASS = (
|
|
45
|
+
"com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot"
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class MCPConstants:
|
|
50
|
+
CHANGE_TYPE = "changeType"
|
|
51
|
+
ENTITY_URN = "entityUrn"
|
|
52
|
+
ENTITY_TYPE = "entityType"
|
|
53
|
+
ASPECT_NAME = "aspectName"
|
|
54
|
+
ASPECT_VALUE = "aspect"
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class EntityType:
|
|
58
|
+
DATASET = "dataset"
|
|
59
|
+
PIPELINE = "dataFlow"
|
|
60
|
+
FLOW = "dataFlow"
|
|
61
|
+
TASK = "dataJob"
|
|
62
|
+
JOB = "dataJob"
|
|
63
|
+
USER = "corpuser"
|
|
64
|
+
GROUP = "corpGroup"
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def clean_nones(value):
|
|
68
|
+
"""
|
|
69
|
+
Recursively remove all None values from dictionaries and lists, and returns
|
|
70
|
+
the result as a new dictionary or list.
|
|
71
|
+
"""
|
|
72
|
+
if isinstance(value, list):
|
|
73
|
+
return [clean_nones(x) for x in value if x is not None]
|
|
74
|
+
elif isinstance(value, dict):
|
|
75
|
+
return {key: clean_nones(val) for key, val in value.items() if val is not None}
|
|
76
|
+
else:
|
|
77
|
+
return value
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def check_golden_file(
|
|
81
|
+
pytestconfig: pytest.Config,
|
|
82
|
+
output_path: Union[str, os.PathLike],
|
|
83
|
+
golden_path: Union[str, os.PathLike],
|
|
84
|
+
ignore_paths: Sequence[str] = (),
|
|
85
|
+
ignore_paths_v2: Sequence[str] = (),
|
|
86
|
+
ignore_order: bool = True,
|
|
87
|
+
) -> None:
|
|
88
|
+
# TODO: Remove the pytestconfig parameter since it's redundant.
|
|
89
|
+
# Or more straightforward - we can remove the `check_golden_file` method
|
|
90
|
+
# and use assert_metadata_files_equal directly. Maybe call it "check_golden_metadata"?
|
|
91
|
+
# In a lot of cases, the output_path is also just annoying - our pytest setup
|
|
92
|
+
# should be responsible for figuring out where to put the temp file.
|
|
93
|
+
assert_metadata_files_equal(
|
|
94
|
+
output_path=output_path,
|
|
95
|
+
golden_path=golden_path,
|
|
96
|
+
ignore_paths=ignore_paths,
|
|
97
|
+
ignore_paths_v2=ignore_paths_v2,
|
|
98
|
+
ignore_order=ignore_order,
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
def check_goldens_stream(
|
|
103
|
+
outputs: List,
|
|
104
|
+
golden_path: Union[str, os.PathLike],
|
|
105
|
+
ignore_paths: Sequence[str] = (),
|
|
106
|
+
ignore_order: bool = True,
|
|
107
|
+
) -> None:
|
|
108
|
+
with tempfile.NamedTemporaryFile() as f:
|
|
109
|
+
write_metadata_file(pathlib.Path(f.name), outputs)
|
|
110
|
+
|
|
111
|
+
assert_metadata_files_equal(
|
|
112
|
+
output_path=f.name,
|
|
113
|
+
golden_path=golden_path,
|
|
114
|
+
ignore_paths=ignore_paths,
|
|
115
|
+
ignore_order=ignore_order,
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
def _get_field_for_entity_type_in_mce(entity_type: str) -> str:
|
|
120
|
+
"""Returns the field to look for depending on the type of entity in the MCE"""
|
|
121
|
+
if entity_type == EntityType.DATASET:
|
|
122
|
+
return MCEConstants.DATASET_SNAPSHOT_CLASS
|
|
123
|
+
raise Exception(f"Not implemented for entity_type {entity_type}")
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def _get_filter(
|
|
127
|
+
mce: bool = False, mcp: bool = False, entity_type: Optional[str] = None
|
|
128
|
+
) -> Callable[[Dict], bool]:
|
|
129
|
+
if mce:
|
|
130
|
+
# cheap way to determine if we are working with an MCE for the appropriate entity_type
|
|
131
|
+
if entity_type:
|
|
132
|
+
return (
|
|
133
|
+
lambda x: MCEConstants.PROPOSED_SNAPSHOT in x
|
|
134
|
+
and _get_field_for_entity_type_in_mce(str(entity_type))
|
|
135
|
+
in x[MCEConstants.PROPOSED_SNAPSHOT]
|
|
136
|
+
)
|
|
137
|
+
else:
|
|
138
|
+
return lambda x: MCEConstants.PROPOSED_SNAPSHOT in x
|
|
139
|
+
if mcp:
|
|
140
|
+
# cheap way to determine if we are working with an MCP
|
|
141
|
+
return lambda x: MCPConstants.CHANGE_TYPE in x and (
|
|
142
|
+
x[MCPConstants.ENTITY_TYPE] == entity_type if entity_type else True
|
|
143
|
+
)
|
|
144
|
+
return lambda _: False
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def _get_element(event: Dict[str, Any], path_spec: List[str]) -> Any:
|
|
148
|
+
try:
|
|
149
|
+
for p in path_spec:
|
|
150
|
+
if p not in event:
|
|
151
|
+
return None
|
|
152
|
+
else:
|
|
153
|
+
event = event.get(p, {})
|
|
154
|
+
return event
|
|
155
|
+
except Exception as e:
|
|
156
|
+
print(event)
|
|
157
|
+
raise e
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
def _element_matches_pattern(
|
|
161
|
+
event: Dict[str, Any], path_spec: List[str], pattern: str
|
|
162
|
+
) -> Tuple[bool, bool]:
|
|
163
|
+
import re
|
|
164
|
+
|
|
165
|
+
element = _get_element(event, path_spec)
|
|
166
|
+
if element is None:
|
|
167
|
+
return (False, False)
|
|
168
|
+
else:
|
|
169
|
+
return (True, re.search(pattern, str(element)) is not None)
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def get_entity_urns(events_file: str) -> Set[str]:
|
|
173
|
+
events = load_json_file(events_file)
|
|
174
|
+
assert isinstance(events, list)
|
|
175
|
+
return _get_entity_urns(events)
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
def _get_entity_urns(events_list: List[Dict]) -> Set[str]:
|
|
179
|
+
entity_type = "dataset"
|
|
180
|
+
# mce urns
|
|
181
|
+
mce_urns = {
|
|
182
|
+
_get_element(x, _get_mce_urn_path_spec(entity_type))
|
|
183
|
+
for x in events_list
|
|
184
|
+
if _get_filter(mce=True, entity_type=entity_type)(x)
|
|
185
|
+
}
|
|
186
|
+
mcp_urns = {
|
|
187
|
+
_get_element(x, _get_mcp_urn_path_spec())
|
|
188
|
+
for x in events_list
|
|
189
|
+
if _get_filter(mcp=True, entity_type=entity_type)(x)
|
|
190
|
+
}
|
|
191
|
+
all_urns = mce_urns.union(mcp_urns)
|
|
192
|
+
return all_urns
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
def assert_mcp_entity_urn(
|
|
196
|
+
filter: str, entity_type: str, regex_pattern: str, file: str
|
|
197
|
+
) -> int:
|
|
198
|
+
def get_path_spec_for_urn() -> List[str]:
|
|
199
|
+
return [MCPConstants.ENTITY_URN]
|
|
200
|
+
|
|
201
|
+
test_output = load_json_file(file)
|
|
202
|
+
if isinstance(test_output, list):
|
|
203
|
+
path_spec = get_path_spec_for_urn()
|
|
204
|
+
filter_operator = _get_filter(mcp=True, entity_type=entity_type)
|
|
205
|
+
filtered_events = [
|
|
206
|
+
(x, _element_matches_pattern(x, path_spec, regex_pattern))
|
|
207
|
+
for x in test_output
|
|
208
|
+
if filter_operator(x)
|
|
209
|
+
]
|
|
210
|
+
failed_events = [y for y in filtered_events if not y[1][0] or not y[1][1]]
|
|
211
|
+
if failed_events:
|
|
212
|
+
raise Exception("Failed to match events", failed_events)
|
|
213
|
+
return len(filtered_events)
|
|
214
|
+
else:
|
|
215
|
+
raise Exception(
|
|
216
|
+
f"Did not expect the file {file} to not contain a list of items"
|
|
217
|
+
)
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
def _get_mce_urn_path_spec(entity_type: str) -> List[str]:
|
|
221
|
+
if entity_type == EntityType.DATASET:
|
|
222
|
+
return [
|
|
223
|
+
MCEConstants.PROPOSED_SNAPSHOT,
|
|
224
|
+
MCEConstants.DATASET_SNAPSHOT_CLASS,
|
|
225
|
+
"urn",
|
|
226
|
+
]
|
|
227
|
+
raise Exception(f"Not implemented for entity_type: {entity_type}")
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
def _get_mcp_urn_path_spec() -> List[str]:
|
|
231
|
+
return [MCPConstants.ENTITY_URN]
|
|
232
|
+
|
|
233
|
+
|
|
234
|
+
def assert_mce_entity_urn(
|
|
235
|
+
filter: str, entity_type: str, regex_pattern: str, file: str
|
|
236
|
+
) -> int:
|
|
237
|
+
"""Assert that all mce entity urns must match the regex pattern passed in. Return the number of events matched"""
|
|
238
|
+
|
|
239
|
+
test_output = load_json_file(file)
|
|
240
|
+
if isinstance(test_output, list):
|
|
241
|
+
path_spec = _get_mce_urn_path_spec(entity_type)
|
|
242
|
+
filter_operator = _get_filter(mce=True)
|
|
243
|
+
filtered_events = [
|
|
244
|
+
(x, _element_matches_pattern(x, path_spec, regex_pattern))
|
|
245
|
+
for x in test_output
|
|
246
|
+
if filter_operator(x)
|
|
247
|
+
]
|
|
248
|
+
failed_events = [y for y in filtered_events if not y[1][0] or not y[1][1]]
|
|
249
|
+
if failed_events:
|
|
250
|
+
raise Exception(
|
|
251
|
+
"Failed to match events: {json.dumps(failed_events, indent=2)}"
|
|
252
|
+
)
|
|
253
|
+
return len(filtered_events)
|
|
254
|
+
else:
|
|
255
|
+
raise Exception(
|
|
256
|
+
f"Did not expect the file {file} to not contain a list of items"
|
|
257
|
+
)
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
def assert_for_each_entity(
|
|
261
|
+
entity_type: str,
|
|
262
|
+
aspect_name: str,
|
|
263
|
+
aspect_field_matcher: Dict[str, Any],
|
|
264
|
+
file: str,
|
|
265
|
+
exception_urns: Optional[List[str]] = None,
|
|
266
|
+
) -> int:
|
|
267
|
+
"""Assert that an aspect name with the desired fields exists for each entity urn"""
|
|
268
|
+
if exception_urns is None:
|
|
269
|
+
exception_urns = []
|
|
270
|
+
test_output = load_json_file(file)
|
|
271
|
+
assert isinstance(test_output, list)
|
|
272
|
+
# mce urns
|
|
273
|
+
mce_urns = {
|
|
274
|
+
_get_element(x, _get_mce_urn_path_spec(entity_type))
|
|
275
|
+
for x in test_output
|
|
276
|
+
if _get_filter(mce=True, entity_type=entity_type)(x)
|
|
277
|
+
}
|
|
278
|
+
mcp_urns = {
|
|
279
|
+
_get_element(x, _get_mcp_urn_path_spec())
|
|
280
|
+
for x in test_output
|
|
281
|
+
if _get_filter(mcp=True, entity_type=entity_type)(x)
|
|
282
|
+
}
|
|
283
|
+
all_urns = mce_urns.union(mcp_urns)
|
|
284
|
+
# there should not be any None urns
|
|
285
|
+
assert None not in all_urns
|
|
286
|
+
aspect_map = {urn: None for urn in all_urns}
|
|
287
|
+
# iterate over all mcps
|
|
288
|
+
for o in [
|
|
289
|
+
mcp
|
|
290
|
+
for mcp in test_output
|
|
291
|
+
if _get_filter(mcp=True, entity_type=entity_type)(mcp)
|
|
292
|
+
]:
|
|
293
|
+
if o.get(MCPConstants.ASPECT_NAME) == aspect_name:
|
|
294
|
+
# load the inner aspect payload and assign to this urn
|
|
295
|
+
aspect_map[o[MCPConstants.ENTITY_URN]] = o.get(
|
|
296
|
+
MCPConstants.ASPECT_VALUE, {}
|
|
297
|
+
).get("json")
|
|
298
|
+
|
|
299
|
+
success: List[str] = []
|
|
300
|
+
failures: List[str] = []
|
|
301
|
+
for urn, aspect_val in aspect_map.items():
|
|
302
|
+
if aspect_val is not None:
|
|
303
|
+
for f in aspect_field_matcher:
|
|
304
|
+
assert aspect_field_matcher[f] == _get_element(aspect_val, [f]), (
|
|
305
|
+
f"urn: {urn} -> Field {f} must match value {aspect_field_matcher[f]}, found {_get_element(aspect_val, [f])}"
|
|
306
|
+
)
|
|
307
|
+
success.append(urn)
|
|
308
|
+
elif urn not in exception_urns:
|
|
309
|
+
print(f"Adding {urn} to failures")
|
|
310
|
+
failures.append(urn)
|
|
311
|
+
|
|
312
|
+
if success:
|
|
313
|
+
print(f"Succeeded on assertion for urns {success}")
|
|
314
|
+
if failures:
|
|
315
|
+
raise AssertionError(
|
|
316
|
+
f"Failed to find aspect_name {aspect_name} for urns {json.dumps(failures, indent=2)}"
|
|
317
|
+
)
|
|
318
|
+
|
|
319
|
+
return len(success)
|
|
320
|
+
|
|
321
|
+
|
|
322
|
+
def assert_entity_mce_aspect(
|
|
323
|
+
entity_urn: str, aspect: Any, aspect_type: Type, file: str
|
|
324
|
+
) -> int:
|
|
325
|
+
# TODO: Replace with read_metadata_file()
|
|
326
|
+
test_output = load_json_file(file)
|
|
327
|
+
entity_type = Urn.from_string(entity_urn).entity_type
|
|
328
|
+
assert isinstance(test_output, list)
|
|
329
|
+
# mce urns
|
|
330
|
+
mces: List[MetadataChangeEventClass] = [
|
|
331
|
+
MetadataChangeEventClass.from_obj(x)
|
|
332
|
+
for x in test_output
|
|
333
|
+
if _get_filter(mce=True, entity_type=entity_type)(x)
|
|
334
|
+
and _get_element(x, _get_mce_urn_path_spec(entity_type)) == entity_urn
|
|
335
|
+
]
|
|
336
|
+
matches = 0
|
|
337
|
+
for mce in mces:
|
|
338
|
+
for a in mce.proposedSnapshot.aspects:
|
|
339
|
+
if isinstance(a, aspect_type):
|
|
340
|
+
assert a == aspect
|
|
341
|
+
matches = matches + 1
|
|
342
|
+
return matches
|
|
343
|
+
|
|
344
|
+
|
|
345
|
+
def assert_entity_mcp_aspect(
|
|
346
|
+
entity_urn: str, aspect_field_matcher: Dict[str, Any], aspect_name: str, file: str
|
|
347
|
+
) -> int:
|
|
348
|
+
# TODO: Replace with read_metadata_file()
|
|
349
|
+
test_output = load_json_file(file)
|
|
350
|
+
entity_type = Urn.from_string(entity_urn).entity_type
|
|
351
|
+
assert isinstance(test_output, list)
|
|
352
|
+
# mcps that match entity_urn
|
|
353
|
+
mcps: List[MetadataChangeProposalWrapper] = [
|
|
354
|
+
MetadataChangeProposalWrapper.from_obj_require_wrapper(x)
|
|
355
|
+
for x in test_output
|
|
356
|
+
if _get_filter(mcp=True, entity_type=entity_type)(x)
|
|
357
|
+
and _get_element(x, _get_mcp_urn_path_spec()) == entity_urn
|
|
358
|
+
]
|
|
359
|
+
matches = 0
|
|
360
|
+
for mcp in mcps:
|
|
361
|
+
if mcp.aspectName == aspect_name:
|
|
362
|
+
assert mcp.aspect
|
|
363
|
+
aspect_val = mcp.aspect.to_obj()
|
|
364
|
+
for f in aspect_field_matcher:
|
|
365
|
+
assert aspect_field_matcher[f] == _get_element(aspect_val, [f]), (
|
|
366
|
+
f"urn: {mcp.entityUrn} -> Field {f} must match value {aspect_field_matcher[f]}, found {_get_element(aspect_val, [f])}"
|
|
367
|
+
)
|
|
368
|
+
matches = matches + 1
|
|
369
|
+
return matches
|
|
370
|
+
|
|
371
|
+
|
|
372
|
+
def assert_entity_urn_not_like(entity_type: str, regex_pattern: str, file: str) -> int:
|
|
373
|
+
"""Assert that there are no entity urns that match the regex pattern passed in. Returns the total number of events in the file"""
|
|
374
|
+
|
|
375
|
+
# TODO: Refactor common code with assert_entity_urn_like.
|
|
376
|
+
test_output = load_json_file(file)
|
|
377
|
+
assert isinstance(test_output, list)
|
|
378
|
+
# mce urns
|
|
379
|
+
mce_urns = {
|
|
380
|
+
_get_element(x, _get_mce_urn_path_spec(entity_type))
|
|
381
|
+
for x in test_output
|
|
382
|
+
if _get_filter(mce=True, entity_type=entity_type)(x)
|
|
383
|
+
}
|
|
384
|
+
mcp_urns = {
|
|
385
|
+
_get_element(x, _get_mcp_urn_path_spec())
|
|
386
|
+
for x in test_output
|
|
387
|
+
if _get_filter(mcp=True, entity_type=entity_type)(x)
|
|
388
|
+
}
|
|
389
|
+
all_urns = mce_urns.union(mcp_urns)
|
|
390
|
+
print(all_urns)
|
|
391
|
+
matched_urns = [u for u in all_urns if re.match(regex_pattern, u)]
|
|
392
|
+
if matched_urns:
|
|
393
|
+
raise AssertionError(f"urns found that match the deny list {matched_urns}")
|
|
394
|
+
return len(test_output)
|
|
395
|
+
|
|
396
|
+
|
|
397
|
+
def assert_entity_urn_like(entity_type: str, regex_pattern: str, file: str) -> int:
|
|
398
|
+
"""Assert that there exist entity urns that match the regex pattern passed in. Returns the total number of events in the file"""
|
|
399
|
+
|
|
400
|
+
test_output = load_json_file(file)
|
|
401
|
+
assert isinstance(test_output, list)
|
|
402
|
+
# mce urns
|
|
403
|
+
mce_urns = {
|
|
404
|
+
_get_element(x, _get_mce_urn_path_spec(entity_type))
|
|
405
|
+
for x in test_output
|
|
406
|
+
if _get_filter(mce=True, entity_type=entity_type)(x)
|
|
407
|
+
}
|
|
408
|
+
mcp_urns = {
|
|
409
|
+
_get_element(x, _get_mcp_urn_path_spec())
|
|
410
|
+
for x in test_output
|
|
411
|
+
if _get_filter(mcp=True, entity_type=entity_type)(x)
|
|
412
|
+
}
|
|
413
|
+
all_urns = mce_urns.union(mcp_urns)
|
|
414
|
+
print(all_urns)
|
|
415
|
+
matched_urns = [u for u in all_urns if re.match(regex_pattern, u)]
|
|
416
|
+
if matched_urns:
|
|
417
|
+
return len(matched_urns)
|
|
418
|
+
else:
|
|
419
|
+
raise AssertionError(
|
|
420
|
+
f"No urns found that match the pattern {regex_pattern}. Full list is {all_urns}"
|
|
421
|
+
)
|