acryl-datahub 1.2.0.7rc3__py3-none-any.whl → 1.2.0.7rc4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of acryl-datahub might be problematic. Click here for more details.

@@ -1,7 +1,7 @@
1
- acryl_datahub-1.2.0.7rc3.dist-info/licenses/LICENSE,sha256=9xNHpsD0uYF5ONzXsKDCuHHB-xbiCrSbueWXqrTNsxk,11365
1
+ acryl_datahub-1.2.0.7rc4.dist-info/licenses/LICENSE,sha256=9xNHpsD0uYF5ONzXsKDCuHHB-xbiCrSbueWXqrTNsxk,11365
2
2
  datahub/__init__.py,sha256=aq_i5lVREmoLfYIqcx_pEQicO855YlhD19tWc1eZZNI,59
3
3
  datahub/__main__.py,sha256=pegIvQ9hzK7IhqVeUi1MeADSZ2QlP-D3K0OQdEg55RU,106
4
- datahub/_version.py,sha256=AZYPfBKOdNawXnsQEXsb1wp4Gz-aFiiz2KYYWSc4CbM,323
4
+ datahub/_version.py,sha256=82EBfeYSMr3rKnGGc8fqqoIajsOjaTI4AIrgFadY4GE,323
5
5
  datahub/entrypoints.py,sha256=9Qf-37rNnTzbGlx8S75OCDazIclFp6zWNcCEL1zCZto,9015
6
6
  datahub/errors.py,sha256=p5rFAdAGVCk4Lqolol1YvthceadUSwpaCxLXRcyCCFQ,676
7
7
  datahub/py.typed,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -159,7 +159,7 @@ datahub/ingestion/api/auto_work_units/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCe
159
159
  datahub/ingestion/api/auto_work_units/auto_dataset_properties_aspect.py,sha256=ID_6N3nWl2qohsSGizUCqo3d2MNyDeVbyWroQpSOSsc,5059
160
160
  datahub/ingestion/api/auto_work_units/auto_ensure_aspect_size.py,sha256=0BwkpLhORbsiTHq0g_N_1cVVoZYdLR3qz02mNmsV9-M,4444
161
161
  datahub/ingestion/autogenerated/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
162
- datahub/ingestion/autogenerated/capability_summary.json,sha256=c0H4nQnRZpSlTwSU4RPIVdtZ5NHDkhkQ4S9z-Zyw8O8,110231
162
+ datahub/ingestion/autogenerated/capability_summary.json,sha256=4dBPgl4z4kIDPhk3L8GsAzSzJWGpsRlDt0wuPdVey5s,110230
163
163
  datahub/ingestion/autogenerated/lineage.json,sha256=8BdZF-5V5kJbX4mfFav8Zg-jHjzfkAEGk-pu1atLN4I,10029
164
164
  datahub/ingestion/autogenerated/lineage_helper.py,sha256=I_k1pZSCCCjDbUVifPTfy6fkmV8jqdVhbirE8EkpmxI,4748
165
165
  datahub/ingestion/extractor/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
@@ -261,7 +261,7 @@ datahub/ingestion/source/bigquery_v2/bigquery.py,sha256=u4-LLt6ZDe3hKqLWqEByYpc0
261
261
  datahub/ingestion/source/bigquery_v2/bigquery_audit.py,sha256=kEwWhq3ch6WT4q4hcX8-fvQh28KgrNfspFwIytO3vQA,25103
262
262
  datahub/ingestion/source/bigquery_v2/bigquery_audit_log_api.py,sha256=LuGJ6LgPViLIfDQfylxlQ3CA7fZYM5MDt8M-7sfzm84,5096
263
263
  datahub/ingestion/source/bigquery_v2/bigquery_config.py,sha256=DYfZQIwcB9kzf0Kv_m5sUhdBYtrl4LTriZZ7n7mK4-w,21915
264
- datahub/ingestion/source/bigquery_v2/bigquery_connection.py,sha256=sq9Nk11W5cMPs4X9ST6OBb3UnHkSeyBHtB_bKU1lOj8,2799
264
+ datahub/ingestion/source/bigquery_v2/bigquery_connection.py,sha256=6XFCc0oxxU3R4IPyYHaf3YMETlMD4ztkNpkf4kf1Elw,3171
265
265
  datahub/ingestion/source/bigquery_v2/bigquery_data_reader.py,sha256=DeT3v_Z82__8En0FcZ0kavBAWQoRvSZ5Rppm9eeDAb8,2393
266
266
  datahub/ingestion/source/bigquery_v2/bigquery_helper.py,sha256=QER3gY8e_k1_eNVj7cBso7ZzrWl_vO5PYSa6CpvqNx8,1554
267
267
  datahub/ingestion/source/bigquery_v2/bigquery_platform_resource_helper.py,sha256=9_sfX8BE2vt9RjBMyq27UxCxBaSlD5o3L4gQxrwlPvA,4961
@@ -442,7 +442,7 @@ datahub/ingestion/source/qlik_sense/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQ
442
442
  datahub/ingestion/source/qlik_sense/config.py,sha256=oyCqkGrY9tmFJY9cPD9B7DdkmR7eQ30Awt-iqgY-HUs,3892
443
443
  datahub/ingestion/source/qlik_sense/data_classes.py,sha256=3JBELAeadKTjDyfrhx6qhHwPChXRGOL95gRAUyPhKQU,6555
444
444
  datahub/ingestion/source/qlik_sense/qlik_api.py,sha256=KoBaD1VowYrbaRg1rjDP1_mmPk9j-3u1r9JNm15rK_w,13187
445
- datahub/ingestion/source/qlik_sense/qlik_sense.py,sha256=xehXgOnG9ah0GE5b8wm9OJYwjLq5JhkLeRzxv3dvTG4,22598
445
+ datahub/ingestion/source/qlik_sense/qlik_sense.py,sha256=d3oq7de4DE2KZHP8cMm812nQxFU222-LQcTV0SJLcw0,22597
446
446
  datahub/ingestion/source/qlik_sense/websocket_connection.py,sha256=jp39OInvjCN9BtnKsHU_aa1B3X9hVHqSmD25stXuqHk,1940
447
447
  datahub/ingestion/source/redshift/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
448
448
  datahub/ingestion/source/redshift/config.py,sha256=wIRWF6vjsTkYIlAou8MLK_3FHfHrk6TlDXFaI-qMA7I,9193
@@ -561,22 +561,23 @@ datahub/ingestion/source/tableau/tableau_server_wrapper.py,sha256=nSyx9RzC6TCQDm
561
561
  datahub/ingestion/source/tableau/tableau_validation.py,sha256=Hjbfc1AMIkGgzo5ffWXtNRjrxSxzHvw7-dYZDt4d3WE,1819
562
562
  datahub/ingestion/source/unity/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
563
563
  datahub/ingestion/source/unity/analyze_profiler.py,sha256=2pqkFY30CfN4aHgFZZntjeG0hNhBytZJvXC13VfTc1I,4689
564
- datahub/ingestion/source/unity/config.py,sha256=mCHyFnNqVjD_TWInztBMGISbPs1apCeSOo5rrEMb0qA,17364
564
+ datahub/ingestion/source/unity/config.py,sha256=DmG7kdqSA2unVnzCuFCM-LihWzqrhNtbHXRarebNv38,20331
565
565
  datahub/ingestion/source/unity/connection_test.py,sha256=Dwpz4AIc6ZDwq6pWmRCSCuDUgNjPP_bVAVJumgAAS4w,2661
566
566
  datahub/ingestion/source/unity/ge_profiler.py,sha256=NBRHZceq-f95iUn7u0h7cgcd9nAc48Aa-lmp_BqE0As,8409
567
567
  datahub/ingestion/source/unity/hive_metastore_proxy.py,sha256=IAWWJjaW0si_UF52Se2D7wmdYRY_afUG4QlVmQu6xaw,15351
568
568
  datahub/ingestion/source/unity/platform_resource_repository.py,sha256=znDmtddQB6B0owcWgOf5DACV2hbe7lseO7hwGszYTYs,541
569
- datahub/ingestion/source/unity/proxy.py,sha256=7vWdYF0qykEB27SkUSHRG-6tVs8xwTk7Y5mMVDCt7SM,41936
569
+ datahub/ingestion/source/unity/proxy.py,sha256=7TG1B9vdVdM3mmVkHDaLv2AXFfMkx1o8gWTED9_BIUk,45431
570
+ datahub/ingestion/source/unity/proxy_patch.py,sha256=gVYl5Fm_ase0iwBf3yDg7PE3bbTOl92RR-JgofHNkus,12374
570
571
  datahub/ingestion/source/unity/proxy_profiling.py,sha256=WLqvYP6MziaisA4LYL4T_GA-kPt6Xdde7bfaYsjYw40,9663
571
572
  datahub/ingestion/source/unity/proxy_types.py,sha256=dp7fRqIjaFCn6ivbgXOGHcw9bQQhZg6u-fdTK053oFM,10163
572
573
  datahub/ingestion/source/unity/report.py,sha256=_2frKPv_2RRFcCfqlKOks3YR5lrUMIa3zdFJtNO-m6E,3394
573
- datahub/ingestion/source/unity/source.py,sha256=oVFDeG-quEVVeJkiPiX1yEu7vU5kcgPF8q6Ji0JWhhM,57333
574
+ datahub/ingestion/source/unity/source.py,sha256=fC8pgacaXr9AQlwmz9FynJMJgNQ9MsDFbxUGxQRVtWo,57897
574
575
  datahub/ingestion/source/unity/tag_entities.py,sha256=-Z-XYc1XhquE-Eoksn9v0o11ZjV9CWz8n6zeXLbzluQ,7275
575
576
  datahub/ingestion/source/unity/usage.py,sha256=0wETBAaZvHI_EGgBlxX3bKsVHEAdnUV8_bKI_lbyWjY,11500
576
577
  datahub/ingestion/source/usage/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
577
578
  datahub/ingestion/source/usage/clickhouse_usage.py,sha256=M6YVQqwJoFqJPxlTr62lFwxfDeX2-_9Diw6qtcq2XWM,10244
578
579
  datahub/ingestion/source/usage/starburst_trino_usage.py,sha256=E5wlRQ-jkwSqFaHWgwnWRPu5IiWwxOnPXX1bCv_v__E,10815
579
- datahub/ingestion/source/usage/usage_common.py,sha256=uuCgIduhlRL2zIAN8rymZ5cZn1WF6akZ-ZbbaVYo9_w,9813
580
+ datahub/ingestion/source/usage/usage_common.py,sha256=fiId6iRWU71CQg7EkSbqo09z7WIH4_MwxpeH48FmPSk,9876
580
581
  datahub/ingestion/source/vertexai/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
581
582
  datahub/ingestion/source/vertexai/vertexai.py,sha256=RuHda0mbc1DElYZIZ_W_hvkN7Eg4LIvI1fRFMvpHPB0,56012
582
583
  datahub/ingestion/source/vertexai/vertexai_config.py,sha256=uMnsv3b6TsPRH26u_JE_v1u0db7ANEAFlVxU5A6ELRM,989
@@ -956,9 +957,9 @@ datahub/sdk/_all_entities.py,sha256=eQAmD_fcEHlTShe1_nHpdvHxLDN9njk9bdLnuTrYg8M,
956
957
  datahub/sdk/_attribution.py,sha256=0Trh8steVd27GOr9MKCZeawbuDD2_q3GIsZlCtHqEUg,1321
957
958
  datahub/sdk/_shared.py,sha256=DAfClwa-hX8VlZUdNy7TvSgudqKPobf-yFza4VYP0NU,28776
958
959
  datahub/sdk/_utils.py,sha256=oXE2BzsXE5zmSkCP3R1tObD4RHnPeH_ps83D_Dw9JaQ,1169
959
- datahub/sdk/chart.py,sha256=9HgZU3yTmKFJCocbSWTbwW8ROfL7h4UbK_B7pHEG7n0,11102
960
+ datahub/sdk/chart.py,sha256=rFbEyRqgEO5HL7DyaVpR2q9zPR5y2fl52_iayhvJHQc,11756
960
961
  datahub/sdk/container.py,sha256=IjnFVGDpSFDvgHuuMb7C3VdBxhJuIMq0q6crOs5PupE,7899
961
- datahub/sdk/dashboard.py,sha256=kt8vD-DzoHLDOwk4Ik8ZoIKC0tPvgxX8rU9CMobs37o,15539
962
+ datahub/sdk/dashboard.py,sha256=WzZKUrAqsnie_rzoEhWvuv36pf60eYW4FsD_SskYLFo,15139
962
963
  datahub/sdk/dataflow.py,sha256=gdAPVVkyKvsKtsa1AwhN_LpzidG_XzV3nhtd1cjnzDA,11128
963
964
  datahub/sdk/datajob.py,sha256=5kU0txTDcn2ce3AhNry83TazPVhoYZ2rAPPNWM1_FP8,13677
964
965
  datahub/sdk/dataset.py,sha256=eABxeDJm1bxoi3kht-Ix8tbVaslFR1RInBG8AIUMk6k,31076
@@ -1111,8 +1112,8 @@ datahub_provider/operators/datahub_assertion_operator.py,sha256=uvTQ-jk2F0sbqqxp
1111
1112
  datahub_provider/operators/datahub_assertion_sensor.py,sha256=lCBj_3x1cf5GMNpHdfkpHuyHfVxsm6ff5x2Z5iizcAo,140
1112
1113
  datahub_provider/operators/datahub_operation_operator.py,sha256=aevDp2FzX7FxGlXrR0khoHNbxbhKR2qPEX5e8O2Jyzw,174
1113
1114
  datahub_provider/operators/datahub_operation_sensor.py,sha256=8fcdVBCEPgqy1etTXgLoiHoJrRt_nzFZQMdSzHqSG7M,168
1114
- acryl_datahub-1.2.0.7rc3.dist-info/METADATA,sha256=FTXpPemP2Dg9rGAxRL4fBzaLZlcglD_rkTTsSZhM4Do,186578
1115
- acryl_datahub-1.2.0.7rc3.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
1116
- acryl_datahub-1.2.0.7rc3.dist-info/entry_points.txt,sha256=qopCAD6qrsijaZ9mTw3UlPCKsE00C3t9MbkkWow7pi4,9943
1117
- acryl_datahub-1.2.0.7rc3.dist-info/top_level.txt,sha256=iLjSrLK5ox1YVYcglRUkcvfZPvKlobBWx7CTUXx8_GI,25
1118
- acryl_datahub-1.2.0.7rc3.dist-info/RECORD,,
1115
+ acryl_datahub-1.2.0.7rc4.dist-info/METADATA,sha256=E15QJWL7lHS1mLe36RJZOMqHRbzN-EvYISjREnc2LZk,186633
1116
+ acryl_datahub-1.2.0.7rc4.dist-info/WHEEL,sha256=_zCd3N1l69ArxyTb8rzEoP9TpbYXkqRFSNOD5OuxnTs,91
1117
+ acryl_datahub-1.2.0.7rc4.dist-info/entry_points.txt,sha256=qopCAD6qrsijaZ9mTw3UlPCKsE00C3t9MbkkWow7pi4,9943
1118
+ acryl_datahub-1.2.0.7rc4.dist-info/top_level.txt,sha256=iLjSrLK5ox1YVYcglRUkcvfZPvKlobBWx7CTUXx8_GI,25
1119
+ acryl_datahub-1.2.0.7rc4.dist-info/RECORD,,
datahub/_version.py CHANGED
@@ -1,6 +1,6 @@
1
1
  # Published at https://pypi.org/project/acryl-datahub/.
2
2
  __package_name__ = "acryl-datahub"
3
- __version__ = "1.2.0.7rc3"
3
+ __version__ = "1.2.0.7rc4"
4
4
 
5
5
 
6
6
  def is_dev_mode() -> bool:
@@ -2575,7 +2575,7 @@
2575
2575
  },
2576
2576
  {
2577
2577
  "capability": "LINEAGE_FINE",
2578
- "description": "Disabled by default. ",
2578
+ "description": "Disabled by default.",
2579
2579
  "subtype_modifier": null,
2580
2580
  "supported": true
2581
2581
  },
@@ -2,16 +2,23 @@ import logging
2
2
  import os
3
3
  from typing import Any, Dict, Optional
4
4
 
5
+ from google.api_core.client_info import ClientInfo
5
6
  from google.cloud import bigquery, datacatalog_v1, resourcemanager_v3
6
7
  from google.cloud.logging_v2.client import Client as GCPLoggingClient
7
8
  from pydantic import Field, PrivateAttr
8
9
 
10
+ from datahub._version import __version__
9
11
  from datahub.configuration.common import ConfigModel
10
12
  from datahub.ingestion.source.common.gcp_credentials_config import GCPCredential
11
13
 
12
14
  logger = logging.getLogger(__name__)
13
15
 
14
16
 
17
+ def _get_bigquery_client_info() -> ClientInfo:
18
+ """Get ClientInfo with DataHub user-agent for BigQuery client identification"""
19
+ return ClientInfo(user_agent=f"datahub/{__version__}")
20
+
21
+
15
22
  class BigQueryConnectionConfig(ConfigModel):
16
23
  credential: Optional[GCPCredential] = Field(
17
24
  default=None, description="BigQuery credential informations"
@@ -41,7 +48,11 @@ class BigQueryConnectionConfig(ConfigModel):
41
48
 
42
49
  def get_bigquery_client(self) -> bigquery.Client:
43
50
  client_options = self.extra_client_options
44
- return bigquery.Client(self.project_on_behalf, **client_options)
51
+ return bigquery.Client(
52
+ self.project_on_behalf,
53
+ client_info=_get_bigquery_client_info(),
54
+ **client_options,
55
+ )
45
56
 
46
57
  def get_projects_client(self) -> resourcemanager_v3.ProjectsClient:
47
58
  return resourcemanager_v3.ProjectsClient()
@@ -101,7 +101,7 @@ logger = logging.getLogger(__name__)
101
101
  )
102
102
  @capability(
103
103
  SourceCapability.LINEAGE_FINE,
104
- "Disabled by default. ",
104
+ "Disabled by default.",
105
105
  )
106
106
  @capability(SourceCapability.PLATFORM_INSTANCE, "Enabled by default")
107
107
  @capability(
@@ -35,6 +35,10 @@ from datahub.utilities.global_warning_util import add_global_warning
35
35
 
36
36
  logger = logging.getLogger(__name__)
37
37
 
38
+ # Configuration default constants
39
+ INCLUDE_TAGS_DEFAULT = True
40
+ INCLUDE_HIVE_METASTORE_DEFAULT = True
41
+
38
42
 
39
43
  class LineageDataSource(ConfigEnum):
40
44
  AUTO = "AUTO"
@@ -137,10 +141,18 @@ class UnityCatalogSourceConfig(
137
141
  )
138
142
  warehouse_id: Optional[str] = pydantic.Field(
139
143
  default=None,
140
- description="SQL Warehouse id, for running queries. If not set, will use the default warehouse.",
144
+ description=(
145
+ "SQL Warehouse id, for running queries. Must be explicitly provided to enable SQL-based features. "
146
+ "Required for the following features that need SQL access: "
147
+ "1) Tag extraction (include_tags=True) - queries system.information_schema.tags "
148
+ "2) Hive Metastore catalog (include_hive_metastore=True) - queries legacy hive_metastore catalog "
149
+ "3) System table lineage (lineage_data_source=SYSTEM_TABLES) - queries system.access.table_lineage/column_lineage "
150
+ "4) Data profiling (profiling.enabled=True) - runs SELECT/ANALYZE queries on tables. "
151
+ "When warehouse_id is missing, these features will be automatically disabled (with warnings) to allow ingestion to continue."
152
+ ),
141
153
  )
142
154
  include_hive_metastore: bool = pydantic.Field(
143
- default=True,
155
+ default=INCLUDE_HIVE_METASTORE_DEFAULT,
144
156
  description="Whether to ingest legacy `hive_metastore` catalog. This requires executing queries on SQL warehouse.",
145
157
  )
146
158
  workspace_name: Optional[str] = pydantic.Field(
@@ -236,8 +248,12 @@ class UnityCatalogSourceConfig(
236
248
  )
237
249
 
238
250
  include_tags: bool = pydantic.Field(
239
- default=True,
240
- description="Option to enable/disable column/table tag extraction.",
251
+ default=INCLUDE_TAGS_DEFAULT,
252
+ description=(
253
+ "Option to enable/disable column/table tag extraction. "
254
+ "Requires warehouse_id to be set since tag extraction needs to query system.information_schema.tags. "
255
+ "If warehouse_id is not provided, this will be automatically disabled to allow ingestion to continue."
256
+ ),
241
257
  )
242
258
 
243
259
  _rename_table_ownership = pydantic_renamed_field(
@@ -310,8 +326,6 @@ class UnityCatalogSourceConfig(
310
326
  description="Details about the delta lake, incase to emit siblings",
311
327
  )
312
328
 
313
- scheme: str = DATABRICKS
314
-
315
329
  include_ml_model_aliases: bool = pydantic.Field(
316
330
  default=False,
317
331
  description="Whether to include ML model aliases in the ingestion.",
@@ -323,6 +337,51 @@ class UnityCatalogSourceConfig(
323
337
  description="Maximum number of ML models to ingest.",
324
338
  )
325
339
 
340
+ _forced_disable_tag_extraction: bool = pydantic.PrivateAttr(default=False)
341
+ _forced_disable_hive_metastore_extraction = pydantic.PrivateAttr(default=False)
342
+
343
+ scheme: str = DATABRICKS
344
+
345
+ def __init__(self, **data):
346
+ # First, let the parent handle the root validators and field processing
347
+ super().__init__(**data)
348
+
349
+ # After model creation, check if we need to auto-disable features
350
+ # based on the final warehouse_id value (which may have been set by root validators)
351
+ include_tags_original = data.get("include_tags", INCLUDE_TAGS_DEFAULT)
352
+ include_hive_metastore_original = data.get(
353
+ "include_hive_metastore", INCLUDE_HIVE_METASTORE_DEFAULT
354
+ )
355
+
356
+ # Track what we're force-disabling
357
+ forced_disable_tag_extraction = False
358
+ forced_disable_hive_metastore_extraction = False
359
+
360
+ # Check if features should be auto-disabled based on final warehouse_id
361
+ if include_tags_original and not self.warehouse_id:
362
+ forced_disable_tag_extraction = True
363
+ self.include_tags = False # Modify the model attribute directly
364
+ logger.warning(
365
+ "warehouse_id is not set but include_tags=True. "
366
+ "Automatically disabling tag extraction since it requires SQL queries. "
367
+ "Set warehouse_id to enable tag extraction."
368
+ )
369
+
370
+ if include_hive_metastore_original and not self.warehouse_id:
371
+ forced_disable_hive_metastore_extraction = True
372
+ self.include_hive_metastore = False # Modify the model attribute directly
373
+ logger.warning(
374
+ "warehouse_id is not set but include_hive_metastore=True. "
375
+ "Automatically disabling hive metastore extraction since it requires SQL queries. "
376
+ "Set warehouse_id to enable hive metastore extraction."
377
+ )
378
+
379
+ # Set private attributes
380
+ self._forced_disable_tag_extraction = forced_disable_tag_extraction
381
+ self._forced_disable_hive_metastore_extraction = (
382
+ forced_disable_hive_metastore_extraction
383
+ )
384
+
326
385
  def get_sql_alchemy_url(self, database: Optional[str] = None) -> str:
327
386
  uri_opts = {"http_path": f"/sql/1.0/warehouses/{self.warehouse_id}"}
328
387
  if database:
@@ -392,11 +451,6 @@ class UnityCatalogSourceConfig(
392
451
  "When `warehouse_id` is set, it must match the `warehouse_id` in `profiling`."
393
452
  )
394
453
 
395
- if values.get("include_hive_metastore") and not values.get("warehouse_id"):
396
- raise ValueError(
397
- "When `include_hive_metastore` is set, `warehouse_id` must be set."
398
- )
399
-
400
454
  if values.get("warehouse_id") and profiling and not profiling.warehouse_id:
401
455
  profiling.warehouse_id = values["warehouse_id"]
402
456
 
@@ -4,6 +4,7 @@ Manage the communication with DataBricks Server and provide equivalent dataclass
4
4
 
5
5
  import dataclasses
6
6
  import logging
7
+ import os
7
8
  from concurrent.futures import ThreadPoolExecutor
8
9
  from datetime import datetime
9
10
  from typing import Any, Dict, Iterable, List, Optional, Sequence, Union, cast
@@ -71,6 +72,23 @@ logger: logging.Logger = logging.getLogger(__name__)
71
72
  _MAX_CONCURRENT_CATALOGS = 1
72
73
 
73
74
 
75
+ # Import and apply the proxy patch from separate module
76
+ try:
77
+ from datahub.ingestion.source.unity.proxy_patch import (
78
+ apply_databricks_proxy_fix,
79
+ mask_proxy_credentials,
80
+ )
81
+
82
+ # Apply the fix when the module is imported
83
+ apply_databricks_proxy_fix()
84
+ except ImportError as e:
85
+ logger.debug(f"Could not import proxy patch module: {e}")
86
+
87
+ # Fallback function for masking credentials
88
+ def mask_proxy_credentials(url: Optional[str]) -> str:
89
+ return "***MASKED***" if url else "None"
90
+
91
+
74
92
  @dataclasses.dataclass
75
93
  class TableInfoWithGeneration(TableInfo):
76
94
  generation: Optional[int] = None
@@ -411,7 +429,7 @@ class UnityCatalogApiProxy(UnityCatalogProxyProfilingMixin):
411
429
  query = f"""
412
430
  SELECT
413
431
  entity_type, entity_id,
414
- source_table_full_name, source_type,
432
+ source_table_full_name, source_type, source_path,
415
433
  target_table_full_name, target_type,
416
434
  max(event_time) as last_updated
417
435
  FROM system.access.table_lineage
@@ -420,7 +438,7 @@ class UnityCatalogApiProxy(UnityCatalogProxyProfilingMixin):
420
438
  {additional_where}
421
439
  GROUP BY
422
440
  entity_type, entity_id,
423
- source_table_full_name, source_type,
441
+ source_table_full_name, source_type, source_path,
424
442
  target_table_full_name, target_type
425
443
  """
426
444
  rows = self._execute_sql_query(query, [catalog, catalog])
@@ -432,6 +450,7 @@ class UnityCatalogApiProxy(UnityCatalogProxyProfilingMixin):
432
450
  source_full_name = row["source_table_full_name"]
433
451
  target_full_name = row["target_table_full_name"]
434
452
  source_type = row["source_type"]
453
+ source_path = row["source_path"]
435
454
  last_updated = row["last_updated"]
436
455
 
437
456
  # Initialize TableLineageInfo for both source and target tables if they're in our catalog
@@ -460,7 +479,7 @@ class UnityCatalogApiProxy(UnityCatalogProxyProfilingMixin):
460
479
  # Handle external upstreams (PATH type)
461
480
  elif source_type == "PATH":
462
481
  external_upstream = ExternalUpstream(
463
- path=source_full_name,
482
+ path=source_path,
464
483
  source_type=source_type,
465
484
  last_updated=last_updated,
466
485
  )
@@ -973,16 +992,82 @@ class UnityCatalogApiProxy(UnityCatalogProxyProfilingMixin):
973
992
 
974
993
  def _execute_sql_query(self, query: str, params: Sequence[Any] = ()) -> List[Row]:
975
994
  """Execute SQL query using databricks-sql connector for better performance"""
995
+ logger.debug(f"Executing SQL query with {len(params)} parameters")
996
+ if logger.isEnabledFor(logging.DEBUG):
997
+ # Only log full query in debug mode to avoid performance overhead
998
+ logger.debug(f"Full SQL query: {query}")
999
+ if params:
1000
+ logger.debug(f"Query parameters: {params}")
1001
+
1002
+ # Check if warehouse_id is available for SQL operations
1003
+ if not self.warehouse_id:
1004
+ self.report.report_warning(
1005
+ "Cannot execute SQL query",
1006
+ "warehouse_id is not configured. SQL operations require a valid warehouse_id to be set in the Unity Catalog configuration",
1007
+ )
1008
+ logger.warning(
1009
+ "Cannot execute SQL query: warehouse_id is not configured. "
1010
+ "SQL operations require a valid warehouse_id to be set in the Unity Catalog configuration."
1011
+ )
1012
+ return []
1013
+
1014
+ # Log connection parameters (with masked token)
1015
+ masked_params = {**self._sql_connection_params}
1016
+ if "access_token" in masked_params:
1017
+ masked_params["access_token"] = "***MASKED***"
1018
+ logger.debug(f"Using connection parameters: {masked_params}")
1019
+
1020
+ # Log proxy environment variables that affect SQL connections
1021
+ proxy_env_debug = {}
1022
+ for var in ["HTTP_PROXY", "HTTPS_PROXY", "http_proxy", "https_proxy"]:
1023
+ value = os.environ.get(var)
1024
+ if value:
1025
+ proxy_env_debug[var] = mask_proxy_credentials(value)
1026
+
1027
+ if proxy_env_debug:
1028
+ logger.debug(
1029
+ f"SQL connection will use proxy environment variables: {proxy_env_debug}"
1030
+ )
1031
+ else:
1032
+ logger.debug("No proxy environment variables detected for SQL connection")
1033
+
976
1034
  try:
977
1035
  with (
978
1036
  connect(**self._sql_connection_params) as connection,
979
1037
  connection.cursor() as cursor,
980
1038
  ):
981
1039
  cursor.execute(query, list(params))
982
- return cursor.fetchall()
1040
+ rows = cursor.fetchall()
1041
+ logger.debug(
1042
+ f"SQL query executed successfully, returned {len(rows)} rows"
1043
+ )
1044
+ return rows
983
1045
 
984
1046
  except Exception as e:
985
- logger.warning(f"Failed to execute SQL query: {e}")
1047
+ logger.warning(f"Failed to execute SQL query: {e}", exc_info=True)
1048
+ if logger.isEnabledFor(logging.DEBUG):
1049
+ # Only log failed query details in debug mode for security
1050
+ logger.debug(f"SQL query that failed: {query}")
1051
+ logger.debug(f"SQL query parameters: {params}")
1052
+
1053
+ # Check if this might be a proxy-related error
1054
+ error_str = str(e).lower()
1055
+ if any(
1056
+ proxy_keyword in error_str
1057
+ for proxy_keyword in [
1058
+ "proxy",
1059
+ "407",
1060
+ "authentication required",
1061
+ "tunnel",
1062
+ "connect",
1063
+ ]
1064
+ ):
1065
+ logger.error(
1066
+ "SQL query failure appears to be proxy-related. "
1067
+ "Please check proxy configuration and authentication. "
1068
+ f"Proxy environment variables detected: {list(proxy_env_debug.keys())}"
1069
+ )
1070
+
986
1071
  return []
987
1072
 
988
1073
  @cached(cachetools.FIFOCache(maxsize=_MAX_CONCURRENT_CATALOGS))