acryl-datahub 1.2.0.2rc3__py3-none-any.whl → 1.2.0.3rc2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of acryl-datahub might be problematic. Click here for more details.
- {acryl_datahub-1.2.0.2rc3.dist-info → acryl_datahub-1.2.0.3rc2.dist-info}/METADATA +2571 -2571
- {acryl_datahub-1.2.0.2rc3.dist-info → acryl_datahub-1.2.0.3rc2.dist-info}/RECORD +21 -21
- datahub/_version.py +1 -1
- datahub/emitter/rest_emitter.py +18 -5
- datahub/ingestion/graph/client.py +19 -3
- datahub/ingestion/sink/datahub_rest.py +2 -0
- datahub/ingestion/source/dbt/dbt_common.py +10 -0
- datahub/ingestion/source/hex/query_fetcher.py +1 -1
- datahub/ingestion/source/sql/athena_properties_extractor.py +2 -2
- datahub/ingestion/source/sql/vertica.py +3 -0
- datahub/ingestion/source/sql_queries.py +86 -44
- datahub/ingestion/source/unity/proxy.py +112 -22
- datahub/ingestion/source/unity/source.py +7 -10
- datahub/metadata/schema.avsc +9 -0
- datahub/metadata/schemas/DataJobInputOutput.avsc +8 -0
- datahub/metadata/schemas/MetadataChangeEvent.avsc +9 -0
- datahub/metadata/schemas/UpstreamLineage.avsc +9 -0
- {acryl_datahub-1.2.0.2rc3.dist-info → acryl_datahub-1.2.0.3rc2.dist-info}/WHEEL +0 -0
- {acryl_datahub-1.2.0.2rc3.dist-info → acryl_datahub-1.2.0.3rc2.dist-info}/entry_points.txt +0 -0
- {acryl_datahub-1.2.0.2rc3.dist-info → acryl_datahub-1.2.0.3rc2.dist-info}/licenses/LICENSE +0 -0
- {acryl_datahub-1.2.0.2rc3.dist-info → acryl_datahub-1.2.0.3rc2.dist-info}/top_level.txt +0 -0
|
@@ -4,8 +4,9 @@ Manage the communication with DataBricks Server and provide equivalent dataclass
|
|
|
4
4
|
|
|
5
5
|
import dataclasses
|
|
6
6
|
import logging
|
|
7
|
+
from concurrent.futures import ThreadPoolExecutor
|
|
7
8
|
from datetime import datetime
|
|
8
|
-
from typing import Any, Dict, Iterable, List, Optional, Union, cast
|
|
9
|
+
from typing import Any, Dict, Iterable, List, Optional, Sequence, Union, cast
|
|
9
10
|
from unittest.mock import patch
|
|
10
11
|
|
|
11
12
|
import cachetools
|
|
@@ -28,6 +29,7 @@ from databricks.sdk.service.sql import (
|
|
|
28
29
|
)
|
|
29
30
|
from databricks.sdk.service.workspace import ObjectType
|
|
30
31
|
from databricks.sql import connect
|
|
32
|
+
from databricks.sql.types import Row
|
|
31
33
|
|
|
32
34
|
from datahub._version import nice_version_name
|
|
33
35
|
from datahub.api.entities.external.unity_catalog_external_entites import UnityCatalogTag
|
|
@@ -291,10 +293,59 @@ class UnityCatalogApiProxy(UnityCatalogProxyProfilingMixin):
|
|
|
291
293
|
method, path, body={**body, "page_token": response["next_page_token"]}
|
|
292
294
|
)
|
|
293
295
|
|
|
296
|
+
@cached(cachetools.FIFOCache(maxsize=100))
|
|
297
|
+
def get_catalog_column_lineage(self, catalog: str) -> Dict[str, Dict[str, dict]]:
|
|
298
|
+
"""Get column lineage for all tables in a catalog."""
|
|
299
|
+
logger.info(f"Fetching column lineage for catalog: {catalog}")
|
|
300
|
+
try:
|
|
301
|
+
query = """
|
|
302
|
+
SELECT
|
|
303
|
+
source_table_catalog, source_table_schema, source_table_name, source_column_name, source_type,
|
|
304
|
+
target_table_schema, target_table_name, target_column_name,
|
|
305
|
+
max(event_time)
|
|
306
|
+
FROM system.access.column_lineage
|
|
307
|
+
WHERE
|
|
308
|
+
target_table_catalog = %s
|
|
309
|
+
AND target_table_schema IS NOT NULL
|
|
310
|
+
AND target_table_name IS NOT NULL
|
|
311
|
+
AND target_column_name IS NOT NULL
|
|
312
|
+
AND source_table_catalog IS NOT NULL
|
|
313
|
+
AND source_table_schema IS NOT NULL
|
|
314
|
+
AND source_table_name IS NOT NULL
|
|
315
|
+
AND source_column_name IS NOT NULL
|
|
316
|
+
GROUP BY
|
|
317
|
+
source_table_catalog, source_table_schema, source_table_name, source_column_name, source_type,
|
|
318
|
+
target_table_schema, target_table_name, target_column_name
|
|
319
|
+
"""
|
|
320
|
+
rows = self._execute_sql_query(query, (catalog,))
|
|
321
|
+
|
|
322
|
+
result_dict: Dict[str, Dict[str, dict]] = {}
|
|
323
|
+
for row in rows:
|
|
324
|
+
result_dict.setdefault(row["target_table_schema"], {}).setdefault(
|
|
325
|
+
row["target_table_name"], {}
|
|
326
|
+
).setdefault(row["target_column_name"], []).append(
|
|
327
|
+
# make fields look like the response from the older HTTP API
|
|
328
|
+
{
|
|
329
|
+
"catalog_name": row["source_table_catalog"],
|
|
330
|
+
"schema_name": row["source_table_schema"],
|
|
331
|
+
"table_name": row["source_table_name"],
|
|
332
|
+
"name": row["source_column_name"],
|
|
333
|
+
}
|
|
334
|
+
)
|
|
335
|
+
|
|
336
|
+
return result_dict
|
|
337
|
+
except Exception as e:
|
|
338
|
+
logger.warning(
|
|
339
|
+
f"Error getting column lineage for catalog {catalog}: {e}",
|
|
340
|
+
exc_info=True,
|
|
341
|
+
)
|
|
342
|
+
return {}
|
|
343
|
+
|
|
294
344
|
def list_lineages_by_table(
|
|
295
345
|
self, table_name: str, include_entity_lineage: bool
|
|
296
346
|
) -> dict:
|
|
297
347
|
"""List table lineage by table name."""
|
|
348
|
+
logger.debug(f"Getting table lineage for {table_name}")
|
|
298
349
|
return self._workspace_client.api_client.do( # type: ignore
|
|
299
350
|
method="GET",
|
|
300
351
|
path="/api/2.0/lineage-tracking/table-lineage",
|
|
@@ -304,13 +355,24 @@ class UnityCatalogApiProxy(UnityCatalogProxyProfilingMixin):
|
|
|
304
355
|
},
|
|
305
356
|
)
|
|
306
357
|
|
|
307
|
-
def list_lineages_by_column(self, table_name: str, column_name: str) ->
|
|
358
|
+
def list_lineages_by_column(self, table_name: str, column_name: str) -> list:
|
|
308
359
|
"""List column lineage by table name and column name."""
|
|
309
|
-
|
|
310
|
-
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
360
|
+
logger.debug(f"Getting column lineage for {table_name}.{column_name}")
|
|
361
|
+
try:
|
|
362
|
+
return (
|
|
363
|
+
self._workspace_client.api_client.do( # type: ignore
|
|
364
|
+
"GET",
|
|
365
|
+
"/api/2.0/lineage-tracking/column-lineage",
|
|
366
|
+
body={"table_name": table_name, "column_name": column_name},
|
|
367
|
+
).get("upstream_cols")
|
|
368
|
+
or []
|
|
369
|
+
)
|
|
370
|
+
except Exception as e:
|
|
371
|
+
logger.warning(
|
|
372
|
+
f"Error getting column lineage on table {table_name}, column {column_name}: {e}",
|
|
373
|
+
exc_info=True,
|
|
374
|
+
)
|
|
375
|
+
return []
|
|
314
376
|
|
|
315
377
|
def table_lineage(self, table: Table, include_entity_lineage: bool) -> None:
|
|
316
378
|
if table.schema.catalog.type == CustomCatalogType.HIVE_METASTORE_CATALOG:
|
|
@@ -348,23 +410,51 @@ class UnityCatalogApiProxy(UnityCatalogProxyProfilingMixin):
|
|
|
348
410
|
f"Error getting lineage on table {table.ref}: {e}", exc_info=True
|
|
349
411
|
)
|
|
350
412
|
|
|
351
|
-
def get_column_lineage(
|
|
413
|
+
def get_column_lineage(
|
|
414
|
+
self,
|
|
415
|
+
table: Table,
|
|
416
|
+
column_names: List[str],
|
|
417
|
+
*,
|
|
418
|
+
max_workers: Optional[int] = None,
|
|
419
|
+
) -> None:
|
|
352
420
|
try:
|
|
353
|
-
|
|
354
|
-
|
|
355
|
-
|
|
356
|
-
|
|
357
|
-
|
|
358
|
-
|
|
359
|
-
|
|
421
|
+
# use the newer system tables if we have a SQL warehouse, otherwise fall back
|
|
422
|
+
# and use the older (and much slower) HTTP API.
|
|
423
|
+
if self.warehouse_id:
|
|
424
|
+
lineage = (
|
|
425
|
+
self.get_catalog_column_lineage(table.ref.catalog)
|
|
426
|
+
.get(table.ref.schema, {})
|
|
427
|
+
.get(table.ref.table, {})
|
|
360
428
|
)
|
|
361
|
-
|
|
362
|
-
|
|
363
|
-
|
|
364
|
-
|
|
429
|
+
else:
|
|
430
|
+
with ThreadPoolExecutor(max_workers=max_workers) as executor:
|
|
431
|
+
futures = [
|
|
432
|
+
executor.submit(
|
|
433
|
+
self.list_lineages_by_column,
|
|
434
|
+
table.ref.qualified_table_name,
|
|
435
|
+
column_name,
|
|
436
|
+
)
|
|
437
|
+
for column_name in column_names
|
|
438
|
+
]
|
|
439
|
+
lineage = {
|
|
440
|
+
column_name: future.result()
|
|
441
|
+
for column_name, future in zip(column_names, futures)
|
|
442
|
+
}
|
|
443
|
+
|
|
444
|
+
for column_name in column_names:
|
|
445
|
+
for item in lineage.get(column_name) or []:
|
|
446
|
+
table_ref = TableReference.create_from_lineage(
|
|
447
|
+
item,
|
|
448
|
+
table.schema.catalog.metastore,
|
|
449
|
+
)
|
|
450
|
+
if table_ref:
|
|
451
|
+
table.upstreams.setdefault(table_ref, {}).setdefault(
|
|
452
|
+
column_name, []
|
|
453
|
+
).append(item["name"])
|
|
454
|
+
|
|
365
455
|
except Exception as e:
|
|
366
456
|
logger.warning(
|
|
367
|
-
f"Error getting column lineage on table {table.ref}
|
|
457
|
+
f"Error getting column lineage on table {table.ref}: {e}",
|
|
368
458
|
exc_info=True,
|
|
369
459
|
)
|
|
370
460
|
|
|
@@ -504,14 +594,14 @@ class UnityCatalogApiProxy(UnityCatalogProxyProfilingMixin):
|
|
|
504
594
|
executed_as_user_name=info.executed_as_user_name,
|
|
505
595
|
)
|
|
506
596
|
|
|
507
|
-
def _execute_sql_query(self, query: str) -> List[
|
|
597
|
+
def _execute_sql_query(self, query: str, params: Sequence[Any] = ()) -> List[Row]:
|
|
508
598
|
"""Execute SQL query using databricks-sql connector for better performance"""
|
|
509
599
|
try:
|
|
510
600
|
with (
|
|
511
601
|
connect(**self._sql_connection_params) as connection,
|
|
512
602
|
connection.cursor() as cursor,
|
|
513
603
|
):
|
|
514
|
-
cursor.execute(query)
|
|
604
|
+
cursor.execute(query, list(params))
|
|
515
605
|
return cursor.fetchall()
|
|
516
606
|
|
|
517
607
|
except Exception as e:
|
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
import logging
|
|
2
2
|
import re
|
|
3
3
|
import time
|
|
4
|
-
from concurrent.futures import ThreadPoolExecutor
|
|
5
4
|
from typing import Dict, Iterable, List, Optional, Set, Tuple, Union
|
|
6
5
|
from urllib.parse import urljoin
|
|
7
6
|
|
|
@@ -657,15 +656,13 @@ class UnityCatalogSource(StatefulIngestionSourceBase, TestableSource):
|
|
|
657
656
|
if len(table.columns) > self.config.column_lineage_column_limit:
|
|
658
657
|
self.report.num_column_lineage_skipped_column_count += 1
|
|
659
658
|
|
|
660
|
-
|
|
661
|
-
|
|
662
|
-
|
|
663
|
-
|
|
664
|
-
|
|
665
|
-
|
|
666
|
-
|
|
667
|
-
column.name,
|
|
668
|
-
)
|
|
659
|
+
column_names = [
|
|
660
|
+
column.name
|
|
661
|
+
for column in table.columns[: self.config.column_lineage_column_limit]
|
|
662
|
+
]
|
|
663
|
+
self.unity_catalog_api_proxy.get_column_lineage(
|
|
664
|
+
table, column_names, max_workers=self.config.lineage_max_workers
|
|
665
|
+
)
|
|
669
666
|
|
|
670
667
|
return self._generate_lineage_aspect(self.gen_dataset_urn(table.ref), table)
|
|
671
668
|
|
datahub/metadata/schema.avsc
CHANGED
|
@@ -4319,6 +4319,14 @@
|
|
|
4319
4319
|
"doc": "The type of upstream entity"
|
|
4320
4320
|
},
|
|
4321
4321
|
{
|
|
4322
|
+
"Searchable": {
|
|
4323
|
+
"/*": {
|
|
4324
|
+
"fieldName": "fineGrainedUpstreams",
|
|
4325
|
+
"fieldType": "URN",
|
|
4326
|
+
"hasValuesFieldName": "hasFineGrainedUpstreams",
|
|
4327
|
+
"queryByDefault": false
|
|
4328
|
+
}
|
|
4329
|
+
},
|
|
4322
4330
|
"Urn": "Urn",
|
|
4323
4331
|
"urn_is_array": true,
|
|
4324
4332
|
"type": [
|
|
@@ -12875,6 +12883,7 @@
|
|
|
12875
12883
|
"Searchable": {
|
|
12876
12884
|
"fieldName": "upstreams",
|
|
12877
12885
|
"fieldType": "URN",
|
|
12886
|
+
"hasValuesFieldName": "hasUpstreams",
|
|
12878
12887
|
"queryByDefault": false
|
|
12879
12888
|
},
|
|
12880
12889
|
"java": {
|
|
@@ -375,6 +375,14 @@
|
|
|
375
375
|
"doc": "The type of upstream entity"
|
|
376
376
|
},
|
|
377
377
|
{
|
|
378
|
+
"Searchable": {
|
|
379
|
+
"/*": {
|
|
380
|
+
"fieldName": "fineGrainedUpstreams",
|
|
381
|
+
"fieldType": "URN",
|
|
382
|
+
"hasValuesFieldName": "hasFineGrainedUpstreams",
|
|
383
|
+
"queryByDefault": false
|
|
384
|
+
}
|
|
385
|
+
},
|
|
378
386
|
"type": [
|
|
379
387
|
"null",
|
|
380
388
|
{
|
|
@@ -3070,6 +3070,14 @@
|
|
|
3070
3070
|
"doc": "The type of upstream entity"
|
|
3071
3071
|
},
|
|
3072
3072
|
{
|
|
3073
|
+
"Searchable": {
|
|
3074
|
+
"/*": {
|
|
3075
|
+
"fieldName": "fineGrainedUpstreams",
|
|
3076
|
+
"fieldType": "URN",
|
|
3077
|
+
"hasValuesFieldName": "hasFineGrainedUpstreams",
|
|
3078
|
+
"queryByDefault": false
|
|
3079
|
+
}
|
|
3080
|
+
},
|
|
3073
3081
|
"type": [
|
|
3074
3082
|
"null",
|
|
3075
3083
|
{
|
|
@@ -3691,6 +3699,7 @@
|
|
|
3691
3699
|
"Searchable": {
|
|
3692
3700
|
"fieldName": "upstreams",
|
|
3693
3701
|
"fieldType": "URN",
|
|
3702
|
+
"hasValuesFieldName": "hasUpstreams",
|
|
3694
3703
|
"queryByDefault": false
|
|
3695
3704
|
},
|
|
3696
3705
|
"java": {
|
|
@@ -94,6 +94,7 @@
|
|
|
94
94
|
"Searchable": {
|
|
95
95
|
"fieldName": "upstreams",
|
|
96
96
|
"fieldType": "URN",
|
|
97
|
+
"hasValuesFieldName": "hasUpstreams",
|
|
97
98
|
"queryByDefault": false
|
|
98
99
|
},
|
|
99
100
|
"java": {
|
|
@@ -199,6 +200,14 @@
|
|
|
199
200
|
"doc": "The type of upstream entity"
|
|
200
201
|
},
|
|
201
202
|
{
|
|
203
|
+
"Searchable": {
|
|
204
|
+
"/*": {
|
|
205
|
+
"fieldName": "fineGrainedUpstreams",
|
|
206
|
+
"fieldType": "URN",
|
|
207
|
+
"hasValuesFieldName": "hasFineGrainedUpstreams",
|
|
208
|
+
"queryByDefault": false
|
|
209
|
+
}
|
|
210
|
+
},
|
|
202
211
|
"type": [
|
|
203
212
|
"null",
|
|
204
213
|
{
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|