acryl-datahub 1.1.0rc4__py3-none-any.whl → 1.1.1rc2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of acryl-datahub might be problematic. Click here for more details.
- {acryl_datahub-1.1.0rc4.dist-info → acryl_datahub-1.1.1rc2.dist-info}/METADATA +2480 -2480
- {acryl_datahub-1.1.0rc4.dist-info → acryl_datahub-1.1.1rc2.dist-info}/RECORD +19 -16
- {acryl_datahub-1.1.0rc4.dist-info → acryl_datahub-1.1.1rc2.dist-info}/WHEEL +1 -1
- datahub/_version.py +1 -1
- datahub/ingestion/source/apply/datahub_apply.py +4 -4
- datahub/ingestion/source/data_lake_common/data_lake_utils.py +22 -10
- datahub/ingestion/source/data_lake_common/object_store.py +644 -0
- datahub/ingestion/source/gcs/gcs_source.py +22 -7
- datahub/ingestion/source/gcs/gcs_utils.py +36 -9
- datahub/ingestion/source/s3/source.py +65 -6
- datahub/ingestion/source/snowflake/snowflake_queries.py +44 -21
- datahub/ingestion/source/snowflake/snowflake_query.py +0 -7
- datahub/ingestion/source/sql/hive.py +2 -3
- datahub/sql_parsing/sql_parsing_aggregator.py +1 -1
- datahub/testing/mce_helpers.py +421 -0
- datahub/testing/sdk_v2_helpers.py +12 -0
- {acryl_datahub-1.1.0rc4.dist-info → acryl_datahub-1.1.1rc2.dist-info}/entry_points.txt +0 -0
- {acryl_datahub-1.1.0rc4.dist-info → acryl_datahub-1.1.1rc2.dist-info}/licenses/LICENSE +0 -0
- {acryl_datahub-1.1.0rc4.dist-info → acryl_datahub-1.1.1rc2.dist-info}/top_level.txt +0 -0
|
@@ -0,0 +1,421 @@
|
|
|
1
|
+
import json
|
|
2
|
+
import logging
|
|
3
|
+
import os
|
|
4
|
+
import pathlib
|
|
5
|
+
import re
|
|
6
|
+
import tempfile
|
|
7
|
+
from typing import (
|
|
8
|
+
Any,
|
|
9
|
+
Callable,
|
|
10
|
+
Dict,
|
|
11
|
+
List,
|
|
12
|
+
Optional,
|
|
13
|
+
Sequence,
|
|
14
|
+
Set,
|
|
15
|
+
Tuple,
|
|
16
|
+
Type,
|
|
17
|
+
Union,
|
|
18
|
+
)
|
|
19
|
+
|
|
20
|
+
import pytest
|
|
21
|
+
|
|
22
|
+
from datahub.emitter.mcp import MetadataChangeProposalWrapper
|
|
23
|
+
from datahub.ingestion.sink.file import write_metadata_file
|
|
24
|
+
from datahub.metadata.schema_classes import MetadataChangeEventClass
|
|
25
|
+
from datahub.metadata.urns import Urn
|
|
26
|
+
from datahub.testing.compare_metadata_json import (
|
|
27
|
+
assert_metadata_files_equal,
|
|
28
|
+
load_json_file,
|
|
29
|
+
)
|
|
30
|
+
|
|
31
|
+
logger = logging.getLogger(__name__)
|
|
32
|
+
|
|
33
|
+
IGNORE_PATH_TIMESTAMPS = [
|
|
34
|
+
# Ignore timestamps from the ETL pipeline. A couple examples:
|
|
35
|
+
r"root\[\d+\]\['proposedSnapshot'\].+\['aspects'\].+\['created'\]\['time'\]",
|
|
36
|
+
r"root\[\d+\]\['proposedSnapshot'\].+\['aspects'\].+\['lastModified'\]\['time'\]",
|
|
37
|
+
r"root\[\d+\]\['proposedSnapshot'\].+\['aspects'\].+\['createStamp'\]\['time'\]",
|
|
38
|
+
r"root\[\d+\]\['proposedSnapshot'\].+\['aspects'\].+\['auditStamp'\]\['time'\]",
|
|
39
|
+
]
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class MCEConstants:
|
|
43
|
+
PROPOSED_SNAPSHOT = "proposedSnapshot"
|
|
44
|
+
DATASET_SNAPSHOT_CLASS = (
|
|
45
|
+
"com.linkedin.pegasus2avro.metadata.snapshot.DatasetSnapshot"
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
class MCPConstants:
|
|
50
|
+
CHANGE_TYPE = "changeType"
|
|
51
|
+
ENTITY_URN = "entityUrn"
|
|
52
|
+
ENTITY_TYPE = "entityType"
|
|
53
|
+
ASPECT_NAME = "aspectName"
|
|
54
|
+
ASPECT_VALUE = "aspect"
|
|
55
|
+
|
|
56
|
+
|
|
57
|
+
class EntityType:
|
|
58
|
+
DATASET = "dataset"
|
|
59
|
+
PIPELINE = "dataFlow"
|
|
60
|
+
FLOW = "dataFlow"
|
|
61
|
+
TASK = "dataJob"
|
|
62
|
+
JOB = "dataJob"
|
|
63
|
+
USER = "corpuser"
|
|
64
|
+
GROUP = "corpGroup"
|
|
65
|
+
|
|
66
|
+
|
|
67
|
+
def clean_nones(value):
|
|
68
|
+
"""
|
|
69
|
+
Recursively remove all None values from dictionaries and lists, and returns
|
|
70
|
+
the result as a new dictionary or list.
|
|
71
|
+
"""
|
|
72
|
+
if isinstance(value, list):
|
|
73
|
+
return [clean_nones(x) for x in value if x is not None]
|
|
74
|
+
elif isinstance(value, dict):
|
|
75
|
+
return {key: clean_nones(val) for key, val in value.items() if val is not None}
|
|
76
|
+
else:
|
|
77
|
+
return value
|
|
78
|
+
|
|
79
|
+
|
|
80
|
+
def check_golden_file(
|
|
81
|
+
pytestconfig: pytest.Config,
|
|
82
|
+
output_path: Union[str, os.PathLike],
|
|
83
|
+
golden_path: Union[str, os.PathLike],
|
|
84
|
+
ignore_paths: Sequence[str] = (),
|
|
85
|
+
ignore_paths_v2: Sequence[str] = (),
|
|
86
|
+
ignore_order: bool = True,
|
|
87
|
+
) -> None:
|
|
88
|
+
# TODO: Remove the pytestconfig parameter since it's redundant.
|
|
89
|
+
# Or more straightforward - we can remove the `check_golden_file` method
|
|
90
|
+
# and use assert_metadata_files_equal directly. Maybe call it "check_golden_metadata"?
|
|
91
|
+
# In a lot of cases, the output_path is also just annoying - our pytest setup
|
|
92
|
+
# should be responsible for figuring out where to put the temp file.
|
|
93
|
+
assert_metadata_files_equal(
|
|
94
|
+
output_path=output_path,
|
|
95
|
+
golden_path=golden_path,
|
|
96
|
+
ignore_paths=ignore_paths,
|
|
97
|
+
ignore_paths_v2=ignore_paths_v2,
|
|
98
|
+
ignore_order=ignore_order,
|
|
99
|
+
)
|
|
100
|
+
|
|
101
|
+
|
|
102
|
+
def check_goldens_stream(
|
|
103
|
+
outputs: List,
|
|
104
|
+
golden_path: Union[str, os.PathLike],
|
|
105
|
+
ignore_paths: Sequence[str] = (),
|
|
106
|
+
ignore_order: bool = True,
|
|
107
|
+
) -> None:
|
|
108
|
+
with tempfile.NamedTemporaryFile() as f:
|
|
109
|
+
write_metadata_file(pathlib.Path(f.name), outputs)
|
|
110
|
+
|
|
111
|
+
assert_metadata_files_equal(
|
|
112
|
+
output_path=f.name,
|
|
113
|
+
golden_path=golden_path,
|
|
114
|
+
ignore_paths=ignore_paths,
|
|
115
|
+
ignore_order=ignore_order,
|
|
116
|
+
)
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
def _get_field_for_entity_type_in_mce(entity_type: str) -> str:
|
|
120
|
+
"""Returns the field to look for depending on the type of entity in the MCE"""
|
|
121
|
+
if entity_type == EntityType.DATASET:
|
|
122
|
+
return MCEConstants.DATASET_SNAPSHOT_CLASS
|
|
123
|
+
raise Exception(f"Not implemented for entity_type {entity_type}")
|
|
124
|
+
|
|
125
|
+
|
|
126
|
+
def _get_filter(
|
|
127
|
+
mce: bool = False, mcp: bool = False, entity_type: Optional[str] = None
|
|
128
|
+
) -> Callable[[Dict], bool]:
|
|
129
|
+
if mce:
|
|
130
|
+
# cheap way to determine if we are working with an MCE for the appropriate entity_type
|
|
131
|
+
if entity_type:
|
|
132
|
+
return (
|
|
133
|
+
lambda x: MCEConstants.PROPOSED_SNAPSHOT in x
|
|
134
|
+
and _get_field_for_entity_type_in_mce(str(entity_type))
|
|
135
|
+
in x[MCEConstants.PROPOSED_SNAPSHOT]
|
|
136
|
+
)
|
|
137
|
+
else:
|
|
138
|
+
return lambda x: MCEConstants.PROPOSED_SNAPSHOT in x
|
|
139
|
+
if mcp:
|
|
140
|
+
# cheap way to determine if we are working with an MCP
|
|
141
|
+
return lambda x: MCPConstants.CHANGE_TYPE in x and (
|
|
142
|
+
x[MCPConstants.ENTITY_TYPE] == entity_type if entity_type else True
|
|
143
|
+
)
|
|
144
|
+
return lambda _: False
|
|
145
|
+
|
|
146
|
+
|
|
147
|
+
def _get_element(event: Dict[str, Any], path_spec: List[str]) -> Any:
|
|
148
|
+
try:
|
|
149
|
+
for p in path_spec:
|
|
150
|
+
if p not in event:
|
|
151
|
+
return None
|
|
152
|
+
else:
|
|
153
|
+
event = event.get(p, {})
|
|
154
|
+
return event
|
|
155
|
+
except Exception as e:
|
|
156
|
+
print(event)
|
|
157
|
+
raise e
|
|
158
|
+
|
|
159
|
+
|
|
160
|
+
def _element_matches_pattern(
|
|
161
|
+
event: Dict[str, Any], path_spec: List[str], pattern: str
|
|
162
|
+
) -> Tuple[bool, bool]:
|
|
163
|
+
import re
|
|
164
|
+
|
|
165
|
+
element = _get_element(event, path_spec)
|
|
166
|
+
if element is None:
|
|
167
|
+
return (False, False)
|
|
168
|
+
else:
|
|
169
|
+
return (True, re.search(pattern, str(element)) is not None)
|
|
170
|
+
|
|
171
|
+
|
|
172
|
+
def get_entity_urns(events_file: str) -> Set[str]:
|
|
173
|
+
events = load_json_file(events_file)
|
|
174
|
+
assert isinstance(events, list)
|
|
175
|
+
return _get_entity_urns(events)
|
|
176
|
+
|
|
177
|
+
|
|
178
|
+
def _get_entity_urns(events_list: List[Dict]) -> Set[str]:
|
|
179
|
+
entity_type = "dataset"
|
|
180
|
+
# mce urns
|
|
181
|
+
mce_urns = {
|
|
182
|
+
_get_element(x, _get_mce_urn_path_spec(entity_type))
|
|
183
|
+
for x in events_list
|
|
184
|
+
if _get_filter(mce=True, entity_type=entity_type)(x)
|
|
185
|
+
}
|
|
186
|
+
mcp_urns = {
|
|
187
|
+
_get_element(x, _get_mcp_urn_path_spec())
|
|
188
|
+
for x in events_list
|
|
189
|
+
if _get_filter(mcp=True, entity_type=entity_type)(x)
|
|
190
|
+
}
|
|
191
|
+
all_urns = mce_urns.union(mcp_urns)
|
|
192
|
+
return all_urns
|
|
193
|
+
|
|
194
|
+
|
|
195
|
+
def assert_mcp_entity_urn(
|
|
196
|
+
filter: str, entity_type: str, regex_pattern: str, file: str
|
|
197
|
+
) -> int:
|
|
198
|
+
def get_path_spec_for_urn() -> List[str]:
|
|
199
|
+
return [MCPConstants.ENTITY_URN]
|
|
200
|
+
|
|
201
|
+
test_output = load_json_file(file)
|
|
202
|
+
if isinstance(test_output, list):
|
|
203
|
+
path_spec = get_path_spec_for_urn()
|
|
204
|
+
filter_operator = _get_filter(mcp=True, entity_type=entity_type)
|
|
205
|
+
filtered_events = [
|
|
206
|
+
(x, _element_matches_pattern(x, path_spec, regex_pattern))
|
|
207
|
+
for x in test_output
|
|
208
|
+
if filter_operator(x)
|
|
209
|
+
]
|
|
210
|
+
failed_events = [y for y in filtered_events if not y[1][0] or not y[1][1]]
|
|
211
|
+
if failed_events:
|
|
212
|
+
raise Exception("Failed to match events", failed_events)
|
|
213
|
+
return len(filtered_events)
|
|
214
|
+
else:
|
|
215
|
+
raise Exception(
|
|
216
|
+
f"Did not expect the file {file} to not contain a list of items"
|
|
217
|
+
)
|
|
218
|
+
|
|
219
|
+
|
|
220
|
+
def _get_mce_urn_path_spec(entity_type: str) -> List[str]:
|
|
221
|
+
if entity_type == EntityType.DATASET:
|
|
222
|
+
return [
|
|
223
|
+
MCEConstants.PROPOSED_SNAPSHOT,
|
|
224
|
+
MCEConstants.DATASET_SNAPSHOT_CLASS,
|
|
225
|
+
"urn",
|
|
226
|
+
]
|
|
227
|
+
raise Exception(f"Not implemented for entity_type: {entity_type}")
|
|
228
|
+
|
|
229
|
+
|
|
230
|
+
def _get_mcp_urn_path_spec() -> List[str]:
|
|
231
|
+
return [MCPConstants.ENTITY_URN]
|
|
232
|
+
|
|
233
|
+
|
|
234
|
+
def assert_mce_entity_urn(
|
|
235
|
+
filter: str, entity_type: str, regex_pattern: str, file: str
|
|
236
|
+
) -> int:
|
|
237
|
+
"""Assert that all mce entity urns must match the regex pattern passed in. Return the number of events matched"""
|
|
238
|
+
|
|
239
|
+
test_output = load_json_file(file)
|
|
240
|
+
if isinstance(test_output, list):
|
|
241
|
+
path_spec = _get_mce_urn_path_spec(entity_type)
|
|
242
|
+
filter_operator = _get_filter(mce=True)
|
|
243
|
+
filtered_events = [
|
|
244
|
+
(x, _element_matches_pattern(x, path_spec, regex_pattern))
|
|
245
|
+
for x in test_output
|
|
246
|
+
if filter_operator(x)
|
|
247
|
+
]
|
|
248
|
+
failed_events = [y for y in filtered_events if not y[1][0] or not y[1][1]]
|
|
249
|
+
if failed_events:
|
|
250
|
+
raise Exception(
|
|
251
|
+
"Failed to match events: {json.dumps(failed_events, indent=2)}"
|
|
252
|
+
)
|
|
253
|
+
return len(filtered_events)
|
|
254
|
+
else:
|
|
255
|
+
raise Exception(
|
|
256
|
+
f"Did not expect the file {file} to not contain a list of items"
|
|
257
|
+
)
|
|
258
|
+
|
|
259
|
+
|
|
260
|
+
def assert_for_each_entity(
|
|
261
|
+
entity_type: str,
|
|
262
|
+
aspect_name: str,
|
|
263
|
+
aspect_field_matcher: Dict[str, Any],
|
|
264
|
+
file: str,
|
|
265
|
+
exception_urns: Optional[List[str]] = None,
|
|
266
|
+
) -> int:
|
|
267
|
+
"""Assert that an aspect name with the desired fields exists for each entity urn"""
|
|
268
|
+
if exception_urns is None:
|
|
269
|
+
exception_urns = []
|
|
270
|
+
test_output = load_json_file(file)
|
|
271
|
+
assert isinstance(test_output, list)
|
|
272
|
+
# mce urns
|
|
273
|
+
mce_urns = {
|
|
274
|
+
_get_element(x, _get_mce_urn_path_spec(entity_type))
|
|
275
|
+
for x in test_output
|
|
276
|
+
if _get_filter(mce=True, entity_type=entity_type)(x)
|
|
277
|
+
}
|
|
278
|
+
mcp_urns = {
|
|
279
|
+
_get_element(x, _get_mcp_urn_path_spec())
|
|
280
|
+
for x in test_output
|
|
281
|
+
if _get_filter(mcp=True, entity_type=entity_type)(x)
|
|
282
|
+
}
|
|
283
|
+
all_urns = mce_urns.union(mcp_urns)
|
|
284
|
+
# there should not be any None urns
|
|
285
|
+
assert None not in all_urns
|
|
286
|
+
aspect_map = {urn: None for urn in all_urns}
|
|
287
|
+
# iterate over all mcps
|
|
288
|
+
for o in [
|
|
289
|
+
mcp
|
|
290
|
+
for mcp in test_output
|
|
291
|
+
if _get_filter(mcp=True, entity_type=entity_type)(mcp)
|
|
292
|
+
]:
|
|
293
|
+
if o.get(MCPConstants.ASPECT_NAME) == aspect_name:
|
|
294
|
+
# load the inner aspect payload and assign to this urn
|
|
295
|
+
aspect_map[o[MCPConstants.ENTITY_URN]] = o.get(
|
|
296
|
+
MCPConstants.ASPECT_VALUE, {}
|
|
297
|
+
).get("json")
|
|
298
|
+
|
|
299
|
+
success: List[str] = []
|
|
300
|
+
failures: List[str] = []
|
|
301
|
+
for urn, aspect_val in aspect_map.items():
|
|
302
|
+
if aspect_val is not None:
|
|
303
|
+
for f in aspect_field_matcher:
|
|
304
|
+
assert aspect_field_matcher[f] == _get_element(aspect_val, [f]), (
|
|
305
|
+
f"urn: {urn} -> Field {f} must match value {aspect_field_matcher[f]}, found {_get_element(aspect_val, [f])}"
|
|
306
|
+
)
|
|
307
|
+
success.append(urn)
|
|
308
|
+
elif urn not in exception_urns:
|
|
309
|
+
print(f"Adding {urn} to failures")
|
|
310
|
+
failures.append(urn)
|
|
311
|
+
|
|
312
|
+
if success:
|
|
313
|
+
print(f"Succeeded on assertion for urns {success}")
|
|
314
|
+
if failures:
|
|
315
|
+
raise AssertionError(
|
|
316
|
+
f"Failed to find aspect_name {aspect_name} for urns {json.dumps(failures, indent=2)}"
|
|
317
|
+
)
|
|
318
|
+
|
|
319
|
+
return len(success)
|
|
320
|
+
|
|
321
|
+
|
|
322
|
+
def assert_entity_mce_aspect(
|
|
323
|
+
entity_urn: str, aspect: Any, aspect_type: Type, file: str
|
|
324
|
+
) -> int:
|
|
325
|
+
# TODO: Replace with read_metadata_file()
|
|
326
|
+
test_output = load_json_file(file)
|
|
327
|
+
entity_type = Urn.from_string(entity_urn).entity_type
|
|
328
|
+
assert isinstance(test_output, list)
|
|
329
|
+
# mce urns
|
|
330
|
+
mces: List[MetadataChangeEventClass] = [
|
|
331
|
+
MetadataChangeEventClass.from_obj(x)
|
|
332
|
+
for x in test_output
|
|
333
|
+
if _get_filter(mce=True, entity_type=entity_type)(x)
|
|
334
|
+
and _get_element(x, _get_mce_urn_path_spec(entity_type)) == entity_urn
|
|
335
|
+
]
|
|
336
|
+
matches = 0
|
|
337
|
+
for mce in mces:
|
|
338
|
+
for a in mce.proposedSnapshot.aspects:
|
|
339
|
+
if isinstance(a, aspect_type):
|
|
340
|
+
assert a == aspect
|
|
341
|
+
matches = matches + 1
|
|
342
|
+
return matches
|
|
343
|
+
|
|
344
|
+
|
|
345
|
+
def assert_entity_mcp_aspect(
|
|
346
|
+
entity_urn: str, aspect_field_matcher: Dict[str, Any], aspect_name: str, file: str
|
|
347
|
+
) -> int:
|
|
348
|
+
# TODO: Replace with read_metadata_file()
|
|
349
|
+
test_output = load_json_file(file)
|
|
350
|
+
entity_type = Urn.from_string(entity_urn).entity_type
|
|
351
|
+
assert isinstance(test_output, list)
|
|
352
|
+
# mcps that match entity_urn
|
|
353
|
+
mcps: List[MetadataChangeProposalWrapper] = [
|
|
354
|
+
MetadataChangeProposalWrapper.from_obj_require_wrapper(x)
|
|
355
|
+
for x in test_output
|
|
356
|
+
if _get_filter(mcp=True, entity_type=entity_type)(x)
|
|
357
|
+
and _get_element(x, _get_mcp_urn_path_spec()) == entity_urn
|
|
358
|
+
]
|
|
359
|
+
matches = 0
|
|
360
|
+
for mcp in mcps:
|
|
361
|
+
if mcp.aspectName == aspect_name:
|
|
362
|
+
assert mcp.aspect
|
|
363
|
+
aspect_val = mcp.aspect.to_obj()
|
|
364
|
+
for f in aspect_field_matcher:
|
|
365
|
+
assert aspect_field_matcher[f] == _get_element(aspect_val, [f]), (
|
|
366
|
+
f"urn: {mcp.entityUrn} -> Field {f} must match value {aspect_field_matcher[f]}, found {_get_element(aspect_val, [f])}"
|
|
367
|
+
)
|
|
368
|
+
matches = matches + 1
|
|
369
|
+
return matches
|
|
370
|
+
|
|
371
|
+
|
|
372
|
+
def assert_entity_urn_not_like(entity_type: str, regex_pattern: str, file: str) -> int:
|
|
373
|
+
"""Assert that there are no entity urns that match the regex pattern passed in. Returns the total number of events in the file"""
|
|
374
|
+
|
|
375
|
+
# TODO: Refactor common code with assert_entity_urn_like.
|
|
376
|
+
test_output = load_json_file(file)
|
|
377
|
+
assert isinstance(test_output, list)
|
|
378
|
+
# mce urns
|
|
379
|
+
mce_urns = {
|
|
380
|
+
_get_element(x, _get_mce_urn_path_spec(entity_type))
|
|
381
|
+
for x in test_output
|
|
382
|
+
if _get_filter(mce=True, entity_type=entity_type)(x)
|
|
383
|
+
}
|
|
384
|
+
mcp_urns = {
|
|
385
|
+
_get_element(x, _get_mcp_urn_path_spec())
|
|
386
|
+
for x in test_output
|
|
387
|
+
if _get_filter(mcp=True, entity_type=entity_type)(x)
|
|
388
|
+
}
|
|
389
|
+
all_urns = mce_urns.union(mcp_urns)
|
|
390
|
+
print(all_urns)
|
|
391
|
+
matched_urns = [u for u in all_urns if re.match(regex_pattern, u)]
|
|
392
|
+
if matched_urns:
|
|
393
|
+
raise AssertionError(f"urns found that match the deny list {matched_urns}")
|
|
394
|
+
return len(test_output)
|
|
395
|
+
|
|
396
|
+
|
|
397
|
+
def assert_entity_urn_like(entity_type: str, regex_pattern: str, file: str) -> int:
|
|
398
|
+
"""Assert that there exist entity urns that match the regex pattern passed in. Returns the total number of events in the file"""
|
|
399
|
+
|
|
400
|
+
test_output = load_json_file(file)
|
|
401
|
+
assert isinstance(test_output, list)
|
|
402
|
+
# mce urns
|
|
403
|
+
mce_urns = {
|
|
404
|
+
_get_element(x, _get_mce_urn_path_spec(entity_type))
|
|
405
|
+
for x in test_output
|
|
406
|
+
if _get_filter(mce=True, entity_type=entity_type)(x)
|
|
407
|
+
}
|
|
408
|
+
mcp_urns = {
|
|
409
|
+
_get_element(x, _get_mcp_urn_path_spec())
|
|
410
|
+
for x in test_output
|
|
411
|
+
if _get_filter(mcp=True, entity_type=entity_type)(x)
|
|
412
|
+
}
|
|
413
|
+
all_urns = mce_urns.union(mcp_urns)
|
|
414
|
+
print(all_urns)
|
|
415
|
+
matched_urns = [u for u in all_urns if re.match(regex_pattern, u)]
|
|
416
|
+
if matched_urns:
|
|
417
|
+
return len(matched_urns)
|
|
418
|
+
else:
|
|
419
|
+
raise AssertionError(
|
|
420
|
+
f"No urns found that match the pattern {regex_pattern}. Full list is {all_urns}"
|
|
421
|
+
)
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
import pathlib
|
|
2
|
+
|
|
3
|
+
from datahub.sdk.entity import Entity
|
|
4
|
+
from datahub.testing import mce_helpers
|
|
5
|
+
|
|
6
|
+
|
|
7
|
+
def assert_entity_golden(entity: Entity, golden_path: pathlib.Path) -> None:
|
|
8
|
+
mce_helpers.check_goldens_stream(
|
|
9
|
+
outputs=entity.as_mcps(),
|
|
10
|
+
golden_path=golden_path,
|
|
11
|
+
ignore_order=False,
|
|
12
|
+
)
|
|
File without changes
|
|
File without changes
|
|
File without changes
|