acryl-datahub 0.15.0.3__py3-none-any.whl → 0.15.0.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of acryl-datahub might be problematic. Click here for more details.
- acryl_datahub-0.15.0.4.dist-info/LICENSE +202 -0
- {acryl_datahub-0.15.0.3.dist-info → acryl_datahub-0.15.0.4.dist-info}/METADATA +2417 -2414
- {acryl_datahub-0.15.0.3.dist-info → acryl_datahub-0.15.0.4.dist-info}/RECORD +36 -33
- datahub/__init__.py +1 -1
- datahub/cli/container_cli.py +108 -0
- datahub/emitter/enum_helpers.py +4 -2
- datahub/emitter/mce_builder.py +4 -0
- datahub/emitter/mcp_builder.py +19 -0
- datahub/entrypoints.py +2 -0
- datahub/ingestion/api/decorators.py +2 -0
- datahub/ingestion/api/registry.py +3 -1
- datahub/ingestion/api/sink.py +12 -0
- datahub/ingestion/api/source.py +5 -2
- datahub/ingestion/source/aws/glue.py +11 -5
- datahub/ingestion/source/aws/s3_util.py +1 -24
- datahub/ingestion/source/bigquery_v2/bigquery_schema_gen.py +2 -2
- datahub/ingestion/source/dbt/dbt_common.py +2 -2
- datahub/ingestion/source/powerbi/powerbi.py +4 -4
- datahub/ingestion/source/powerbi/rest_api_wrapper/data_classes.py +6 -6
- datahub/ingestion/source/powerbi/rest_api_wrapper/powerbi_api.py +24 -18
- datahub/ingestion/source/s3/source.py +6 -2
- datahub/ingestion/source/slack/slack.py +6 -0
- datahub/ingestion/source/sql/hive_metastore.py +3 -3
- datahub/ingestion/source/sql/mssql/job_models.py +2 -2
- datahub/ingestion/source/sql/mssql/source.py +26 -11
- datahub/ingestion/source/sql/teradata.py +2 -2
- datahub/ingestion/source/tableau/tableau.py +23 -10
- datahub/metadata/_schema_classes.py +401 -401
- datahub/metadata/_urns/urn_defs.py +1857 -1408
- datahub/metadata/schema.avsc +16624 -16266
- datahub/sql_parsing/sql_parsing_aggregator.py +3 -3
- datahub/utilities/groupby.py +17 -0
- datahub/utilities/urns/_urn_base.py +6 -2
- {acryl_datahub-0.15.0.3.dist-info → acryl_datahub-0.15.0.4.dist-info}/WHEEL +0 -0
- {acryl_datahub-0.15.0.3.dist-info → acryl_datahub-0.15.0.4.dist-info}/entry_points.txt +0 -0
- {acryl_datahub-0.15.0.3.dist-info → acryl_datahub-0.15.0.4.dist-info}/top_level.txt +0 -0
|
@@ -2,7 +2,6 @@ import contextlib
|
|
|
2
2
|
import dataclasses
|
|
3
3
|
import enum
|
|
4
4
|
import functools
|
|
5
|
-
import itertools
|
|
6
5
|
import json
|
|
7
6
|
import logging
|
|
8
7
|
import os
|
|
@@ -63,6 +62,7 @@ from datahub.utilities.file_backed_collections import (
|
|
|
63
62
|
FileBackedDict,
|
|
64
63
|
FileBackedList,
|
|
65
64
|
)
|
|
65
|
+
from datahub.utilities.groupby import groupby_unsorted
|
|
66
66
|
from datahub.utilities.lossy_collections import LossyDict, LossyList
|
|
67
67
|
from datahub.utilities.ordered_set import OrderedSet
|
|
68
68
|
from datahub.utilities.perf_timer import PerfTimer
|
|
@@ -1314,8 +1314,8 @@ class SqlParsingAggregator(Closeable):
|
|
|
1314
1314
|
upstream_aspect.fineGrainedLineages = []
|
|
1315
1315
|
for downstream_column, all_upstream_columns in cll.items():
|
|
1316
1316
|
# Group by query ID.
|
|
1317
|
-
for query_id, upstream_columns_for_query in
|
|
1318
|
-
|
|
1317
|
+
for query_id, upstream_columns_for_query in groupby_unsorted(
|
|
1318
|
+
all_upstream_columns.items(),
|
|
1319
1319
|
key=lambda x: x[1],
|
|
1320
1320
|
):
|
|
1321
1321
|
upstream_columns = [x[0] for x in upstream_columns_for_query]
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
import collections
|
|
2
|
+
from typing import Callable, Iterable, Tuple, TypeVar
|
|
3
|
+
|
|
4
|
+
T = TypeVar("T")
|
|
5
|
+
K = TypeVar("K")
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
def groupby_unsorted(
|
|
9
|
+
iterable: Iterable[T], key: Callable[[T], K]
|
|
10
|
+
) -> Iterable[Tuple[K, Iterable[T]]]:
|
|
11
|
+
"""The default itertools.groupby() requires that the iterable is already sorted by the key.
|
|
12
|
+
This method is similar to groupby() but without the pre-sorted requirement."""
|
|
13
|
+
|
|
14
|
+
values = collections.defaultdict(list)
|
|
15
|
+
for v in iterable:
|
|
16
|
+
values[key(v)].append(v)
|
|
17
|
+
return values.items()
|
|
@@ -6,6 +6,7 @@ from typing import ClassVar, Dict, List, Optional, Type, Union
|
|
|
6
6
|
from deprecated import deprecated
|
|
7
7
|
from typing_extensions import Self
|
|
8
8
|
|
|
9
|
+
from datahub._codegen.aspect import _Aspect
|
|
9
10
|
from datahub.utilities.urns.error import InvalidUrnError
|
|
10
11
|
|
|
11
12
|
URN_TYPES: Dict[str, Type["_SpecificUrn"]] = {}
|
|
@@ -270,7 +271,7 @@ class Urn:
|
|
|
270
271
|
|
|
271
272
|
|
|
272
273
|
class _SpecificUrn(Urn):
|
|
273
|
-
ENTITY_TYPE: str = ""
|
|
274
|
+
ENTITY_TYPE: ClassVar[str] = ""
|
|
274
275
|
|
|
275
276
|
def __init_subclass__(cls) -> None:
|
|
276
277
|
# Validate the subclass.
|
|
@@ -286,7 +287,10 @@ class _SpecificUrn(Urn):
|
|
|
286
287
|
return super().__init_subclass__()
|
|
287
288
|
|
|
288
289
|
@classmethod
|
|
289
|
-
def underlying_key_aspect_type(cls) -> Type:
|
|
290
|
+
def underlying_key_aspect_type(cls) -> Type[_Aspect]:
|
|
291
|
+
raise NotImplementedError()
|
|
292
|
+
|
|
293
|
+
def to_key_aspect(self) -> _Aspect:
|
|
290
294
|
raise NotImplementedError()
|
|
291
295
|
|
|
292
296
|
@classmethod
|
|
File without changes
|
|
File without changes
|
|
File without changes
|