acryl-datahub 1.0.0rc16__py3-none-any.whl → 1.0.0rc18__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of acryl-datahub might be problematic. Click here for more details.
- {acryl_datahub-1.0.0rc16.dist-info → acryl_datahub-1.0.0rc18.dist-info}/METADATA +2632 -2602
- {acryl_datahub-1.0.0rc16.dist-info → acryl_datahub-1.0.0rc18.dist-info}/RECORD +18 -15
- {acryl_datahub-1.0.0rc16.dist-info → acryl_datahub-1.0.0rc18.dist-info}/entry_points.txt +1 -0
- datahub/_version.py +1 -1
- datahub/configuration/common.py +8 -0
- datahub/emitter/response_helper.py +145 -0
- datahub/emitter/rest_emitter.py +161 -3
- datahub/ingestion/graph/client.py +3 -0
- datahub/ingestion/sink/datahub_rest.py +4 -0
- datahub/ingestion/source/bigquery_v2/bigquery_config.py +2 -46
- datahub/ingestion/source/common/gcp_credentials_config.py +53 -0
- datahub/ingestion/source/salesforce.py +529 -276
- datahub/ingestion/source/snowflake/snowflake_queries.py +16 -13
- datahub/ingestion/source/sql/hive.py +13 -0
- datahub/ingestion/source/vertexai.py +697 -0
- {acryl_datahub-1.0.0rc16.dist-info → acryl_datahub-1.0.0rc18.dist-info}/LICENSE +0 -0
- {acryl_datahub-1.0.0rc16.dist-info → acryl_datahub-1.0.0rc18.dist-info}/WHEEL +0 -0
- {acryl_datahub-1.0.0rc16.dist-info → acryl_datahub-1.0.0rc18.dist-info}/top_level.txt +0 -0
|
@@ -403,6 +403,7 @@ class SnowflakeQueriesExtractor(SnowflakeStructuredReportMixin, Closeable):
|
|
|
403
403
|
res["session_id"],
|
|
404
404
|
res["query_start_time"],
|
|
405
405
|
object_modified_by_ddl,
|
|
406
|
+
res["query_type"],
|
|
406
407
|
)
|
|
407
408
|
if known_ddl_entry:
|
|
408
409
|
return known_ddl_entry
|
|
@@ -537,40 +538,42 @@ class SnowflakeQueriesExtractor(SnowflakeStructuredReportMixin, Closeable):
|
|
|
537
538
|
session_id: str,
|
|
538
539
|
timestamp: datetime,
|
|
539
540
|
object_modified_by_ddl: dict,
|
|
541
|
+
query_type: str,
|
|
540
542
|
) -> Optional[Union[TableRename, TableSwap]]:
|
|
541
543
|
timestamp = timestamp.astimezone(timezone.utc)
|
|
542
|
-
if
|
|
543
|
-
"operationType"
|
|
544
|
-
|
|
545
|
-
|
|
544
|
+
if (
|
|
545
|
+
object_modified_by_ddl["operationType"] == "ALTER"
|
|
546
|
+
and query_type == "RENAME_TABLE"
|
|
547
|
+
and object_modified_by_ddl["properties"].get("objectName")
|
|
548
|
+
):
|
|
549
|
+
original_un = self.identifiers.gen_dataset_urn(
|
|
546
550
|
self.identifiers.get_dataset_identifier_from_qualified_name(
|
|
547
551
|
object_modified_by_ddl["objectName"]
|
|
548
552
|
)
|
|
549
553
|
)
|
|
550
554
|
|
|
551
|
-
|
|
555
|
+
new_urn = self.identifiers.gen_dataset_urn(
|
|
552
556
|
self.identifiers.get_dataset_identifier_from_qualified_name(
|
|
553
|
-
object_modified_by_ddl["properties"]["
|
|
557
|
+
object_modified_by_ddl["properties"]["objectName"]["value"]
|
|
554
558
|
)
|
|
555
559
|
)
|
|
556
|
-
|
|
557
|
-
return TableSwap(urn1, urn2, query, session_id, timestamp)
|
|
560
|
+
return TableRename(original_un, new_urn, query, session_id, timestamp)
|
|
558
561
|
elif object_modified_by_ddl[
|
|
559
562
|
"operationType"
|
|
560
|
-
] == "
|
|
561
|
-
|
|
563
|
+
] == "ALTER" and object_modified_by_ddl["properties"].get("swapTargetName"):
|
|
564
|
+
urn1 = self.identifiers.gen_dataset_urn(
|
|
562
565
|
self.identifiers.get_dataset_identifier_from_qualified_name(
|
|
563
566
|
object_modified_by_ddl["objectName"]
|
|
564
567
|
)
|
|
565
568
|
)
|
|
566
569
|
|
|
567
|
-
|
|
570
|
+
urn2 = self.identifiers.gen_dataset_urn(
|
|
568
571
|
self.identifiers.get_dataset_identifier_from_qualified_name(
|
|
569
|
-
object_modified_by_ddl["properties"]["
|
|
572
|
+
object_modified_by_ddl["properties"]["swapTargetName"]["value"]
|
|
570
573
|
)
|
|
571
574
|
)
|
|
572
575
|
|
|
573
|
-
return
|
|
576
|
+
return TableSwap(urn1, urn2, query, session_id, timestamp)
|
|
574
577
|
else:
|
|
575
578
|
self.report.num_ddl_queries_dropped += 1
|
|
576
579
|
return None
|
|
@@ -777,6 +777,7 @@ class HiveSource(TwoTierSQLAlchemySource):
|
|
|
777
777
|
column,
|
|
778
778
|
inspector,
|
|
779
779
|
pk_constraints,
|
|
780
|
+
partition_keys=partition_keys,
|
|
780
781
|
)
|
|
781
782
|
|
|
782
783
|
if self._COMPLEX_TYPE.match(fields[0].nativeDataType) and isinstance(
|
|
@@ -849,3 +850,15 @@ class HiveSource(TwoTierSQLAlchemySource):
|
|
|
849
850
|
default_db=default_db,
|
|
850
851
|
default_schema=default_schema,
|
|
851
852
|
)
|
|
853
|
+
|
|
854
|
+
def get_partitions(
|
|
855
|
+
self, inspector: Inspector, schema: str, table: str
|
|
856
|
+
) -> Optional[List[str]]:
|
|
857
|
+
partition_columns: List[dict] = inspector.get_indexes(
|
|
858
|
+
table_name=table, schema=schema
|
|
859
|
+
)
|
|
860
|
+
for partition_column in partition_columns:
|
|
861
|
+
if partition_column.get("column_names"):
|
|
862
|
+
return partition_column.get("column_names")
|
|
863
|
+
|
|
864
|
+
return []
|