acryl-datahub 1.2.0.6__py3-none-any.whl → 1.2.0.7__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of acryl-datahub might be problematic. Click here for more details.

Files changed (84) hide show
  1. {acryl_datahub-1.2.0.6.dist-info → acryl_datahub-1.2.0.7.dist-info}/METADATA +2629 -2543
  2. {acryl_datahub-1.2.0.6.dist-info → acryl_datahub-1.2.0.7.dist-info}/RECORD +83 -75
  3. {acryl_datahub-1.2.0.6.dist-info → acryl_datahub-1.2.0.7.dist-info}/entry_points.txt +1 -0
  4. datahub/_version.py +1 -1
  5. datahub/api/graphql/operation.py +1 -1
  6. datahub/ingestion/autogenerated/capability_summary.json +46 -6
  7. datahub/ingestion/autogenerated/lineage.json +3 -2
  8. datahub/ingestion/run/pipeline.py +1 -0
  9. datahub/ingestion/source/aws/s3_boto_utils.py +97 -5
  10. datahub/ingestion/source/bigquery_v2/bigquery_connection.py +12 -1
  11. datahub/ingestion/source/common/subtypes.py +3 -0
  12. datahub/ingestion/source/data_lake_common/path_spec.py +1 -1
  13. datahub/ingestion/source/datahub/datahub_database_reader.py +19 -8
  14. datahub/ingestion/source/dbt/dbt_common.py +74 -0
  15. datahub/ingestion/source/dremio/dremio_aspects.py +3 -2
  16. datahub/ingestion/source/dremio/dremio_source.py +4 -0
  17. datahub/ingestion/source/dynamodb/dynamodb.py +10 -7
  18. datahub/ingestion/source/excel/__init__.py +0 -0
  19. datahub/ingestion/source/excel/config.py +92 -0
  20. datahub/ingestion/source/excel/excel_file.py +539 -0
  21. datahub/ingestion/source/excel/profiling.py +308 -0
  22. datahub/ingestion/source/excel/report.py +49 -0
  23. datahub/ingestion/source/excel/source.py +662 -0
  24. datahub/ingestion/source/excel/util.py +18 -0
  25. datahub/ingestion/source/fivetran/fivetran_query.py +8 -1
  26. datahub/ingestion/source/openapi.py +1 -1
  27. datahub/ingestion/source/powerbi/config.py +33 -0
  28. datahub/ingestion/source/powerbi/m_query/data_classes.py +1 -0
  29. datahub/ingestion/source/powerbi/m_query/pattern_handler.py +100 -10
  30. datahub/ingestion/source/powerbi/powerbi.py +5 -0
  31. datahub/ingestion/source/qlik_sense/qlik_sense.py +1 -1
  32. datahub/ingestion/source/redshift/config.py +9 -6
  33. datahub/ingestion/source/redshift/lineage.py +386 -687
  34. datahub/ingestion/source/redshift/redshift.py +19 -106
  35. datahub/ingestion/source/s3/source.py +65 -59
  36. datahub/ingestion/source/snowflake/constants.py +2 -0
  37. datahub/ingestion/source/snowflake/snowflake_config.py +10 -0
  38. datahub/ingestion/source/snowflake/snowflake_connection.py +16 -5
  39. datahub/ingestion/source/snowflake/snowflake_query.py +27 -0
  40. datahub/ingestion/source/snowflake/snowflake_report.py +1 -0
  41. datahub/ingestion/source/snowflake/snowflake_schema.py +179 -7
  42. datahub/ingestion/source/snowflake/snowflake_schema_gen.py +25 -7
  43. datahub/ingestion/source/snowflake/snowflake_summary.py +1 -0
  44. datahub/ingestion/source/snowflake/snowflake_utils.py +18 -5
  45. datahub/ingestion/source/snowflake/snowflake_v2.py +6 -1
  46. datahub/ingestion/source/sql/hive_metastore.py +1 -0
  47. datahub/ingestion/source/sql/mssql/job_models.py +3 -1
  48. datahub/ingestion/source/sql/mssql/source.py +62 -3
  49. datahub/ingestion/source/sql_queries.py +24 -2
  50. datahub/ingestion/source/state/checkpoint.py +3 -28
  51. datahub/ingestion/source/unity/config.py +74 -9
  52. datahub/ingestion/source/unity/proxy.py +167 -5
  53. datahub/ingestion/source/unity/proxy_patch.py +321 -0
  54. datahub/ingestion/source/unity/proxy_types.py +24 -0
  55. datahub/ingestion/source/unity/report.py +5 -0
  56. datahub/ingestion/source/unity/source.py +111 -1
  57. datahub/ingestion/source/usage/usage_common.py +1 -0
  58. datahub/metadata/_internal_schema_classes.py +573 -517
  59. datahub/metadata/_urns/urn_defs.py +1748 -1748
  60. datahub/metadata/schema.avsc +18564 -18484
  61. datahub/metadata/schemas/ChartInfo.avsc +2 -1
  62. datahub/metadata/schemas/DataHubPageModuleProperties.avsc +9 -0
  63. datahub/metadata/schemas/InstitutionalMemory.avsc +9 -0
  64. datahub/metadata/schemas/LogicalParent.avsc +104 -100
  65. datahub/metadata/schemas/MetadataChangeEvent.avsc +81 -45
  66. datahub/metadata/schemas/Ownership.avsc +69 -0
  67. datahub/metadata/schemas/SchemaFieldKey.avsc +3 -1
  68. datahub/metadata/schemas/StructuredProperties.avsc +69 -0
  69. datahub/metadata/schemas/StructuredPropertyDefinition.avsc +3 -0
  70. datahub/metadata/schemas/__init__.py +3 -3
  71. datahub/sdk/chart.py +36 -22
  72. datahub/sdk/dashboard.py +38 -62
  73. datahub/sdk/lineage_client.py +6 -26
  74. datahub/sdk/main_client.py +7 -3
  75. datahub/sdk/search_filters.py +16 -0
  76. datahub/specific/aspect_helpers/siblings.py +73 -0
  77. datahub/specific/dataset.py +2 -0
  78. datahub/sql_parsing/sql_parsing_aggregator.py +3 -0
  79. datahub/sql_parsing/tool_meta_extractor.py +1 -3
  80. datahub/upgrade/upgrade.py +14 -2
  81. datahub/ingestion/source/redshift/lineage_v2.py +0 -466
  82. {acryl_datahub-1.2.0.6.dist-info → acryl_datahub-1.2.0.7.dist-info}/WHEEL +0 -0
  83. {acryl_datahub-1.2.0.6.dist-info → acryl_datahub-1.2.0.7.dist-info}/licenses/LICENSE +0 -0
  84. {acryl_datahub-1.2.0.6.dist-info → acryl_datahub-1.2.0.7.dist-info}/top_level.txt +0 -0
@@ -35,6 +35,10 @@ from datahub.utilities.global_warning_util import add_global_warning
35
35
 
36
36
  logger = logging.getLogger(__name__)
37
37
 
38
+ # Configuration default constants
39
+ INCLUDE_TAGS_DEFAULT = True
40
+ INCLUDE_HIVE_METASTORE_DEFAULT = True
41
+
38
42
 
39
43
  class LineageDataSource(ConfigEnum):
40
44
  AUTO = "AUTO"
@@ -137,10 +141,18 @@ class UnityCatalogSourceConfig(
137
141
  )
138
142
  warehouse_id: Optional[str] = pydantic.Field(
139
143
  default=None,
140
- description="SQL Warehouse id, for running queries. If not set, will use the default warehouse.",
144
+ description=(
145
+ "SQL Warehouse id, for running queries. Must be explicitly provided to enable SQL-based features. "
146
+ "Required for the following features that need SQL access: "
147
+ "1) Tag extraction (include_tags=True) - queries system.information_schema.tags "
148
+ "2) Hive Metastore catalog (include_hive_metastore=True) - queries legacy hive_metastore catalog "
149
+ "3) System table lineage (lineage_data_source=SYSTEM_TABLES) - queries system.access.table_lineage/column_lineage "
150
+ "4) Data profiling (profiling.enabled=True) - runs SELECT/ANALYZE queries on tables. "
151
+ "When warehouse_id is missing, these features will be automatically disabled (with warnings) to allow ingestion to continue."
152
+ ),
141
153
  )
142
154
  include_hive_metastore: bool = pydantic.Field(
143
- default=True,
155
+ default=INCLUDE_HIVE_METASTORE_DEFAULT,
144
156
  description="Whether to ingest legacy `hive_metastore` catalog. This requires executing queries on SQL warehouse.",
145
157
  )
146
158
  workspace_name: Optional[str] = pydantic.Field(
@@ -236,8 +248,12 @@ class UnityCatalogSourceConfig(
236
248
  )
237
249
 
238
250
  include_tags: bool = pydantic.Field(
239
- default=True,
240
- description="Option to enable/disable column/table tag extraction.",
251
+ default=INCLUDE_TAGS_DEFAULT,
252
+ description=(
253
+ "Option to enable/disable column/table tag extraction. "
254
+ "Requires warehouse_id to be set since tag extraction needs to query system.information_schema.tags. "
255
+ "If warehouse_id is not provided, this will be automatically disabled to allow ingestion to continue."
256
+ ),
241
257
  )
242
258
 
243
259
  _rename_table_ownership = pydantic_renamed_field(
@@ -310,8 +326,62 @@ class UnityCatalogSourceConfig(
310
326
  description="Details about the delta lake, incase to emit siblings",
311
327
  )
312
328
 
329
+ include_ml_model_aliases: bool = pydantic.Field(
330
+ default=False,
331
+ description="Whether to include ML model aliases in the ingestion.",
332
+ )
333
+
334
+ ml_model_max_results: int = pydantic.Field(
335
+ default=1000,
336
+ ge=0,
337
+ description="Maximum number of ML models to ingest.",
338
+ )
339
+
340
+ _forced_disable_tag_extraction: bool = pydantic.PrivateAttr(default=False)
341
+ _forced_disable_hive_metastore_extraction = pydantic.PrivateAttr(default=False)
342
+
313
343
  scheme: str = DATABRICKS
314
344
 
345
+ def __init__(self, **data):
346
+ # First, let the parent handle the root validators and field processing
347
+ super().__init__(**data)
348
+
349
+ # After model creation, check if we need to auto-disable features
350
+ # based on the final warehouse_id value (which may have been set by root validators)
351
+ include_tags_original = data.get("include_tags", INCLUDE_TAGS_DEFAULT)
352
+ include_hive_metastore_original = data.get(
353
+ "include_hive_metastore", INCLUDE_HIVE_METASTORE_DEFAULT
354
+ )
355
+
356
+ # Track what we're force-disabling
357
+ forced_disable_tag_extraction = False
358
+ forced_disable_hive_metastore_extraction = False
359
+
360
+ # Check if features should be auto-disabled based on final warehouse_id
361
+ if include_tags_original and not self.warehouse_id:
362
+ forced_disable_tag_extraction = True
363
+ self.include_tags = False # Modify the model attribute directly
364
+ logger.warning(
365
+ "warehouse_id is not set but include_tags=True. "
366
+ "Automatically disabling tag extraction since it requires SQL queries. "
367
+ "Set warehouse_id to enable tag extraction."
368
+ )
369
+
370
+ if include_hive_metastore_original and not self.warehouse_id:
371
+ forced_disable_hive_metastore_extraction = True
372
+ self.include_hive_metastore = False # Modify the model attribute directly
373
+ logger.warning(
374
+ "warehouse_id is not set but include_hive_metastore=True. "
375
+ "Automatically disabling hive metastore extraction since it requires SQL queries. "
376
+ "Set warehouse_id to enable hive metastore extraction."
377
+ )
378
+
379
+ # Set private attributes
380
+ self._forced_disable_tag_extraction = forced_disable_tag_extraction
381
+ self._forced_disable_hive_metastore_extraction = (
382
+ forced_disable_hive_metastore_extraction
383
+ )
384
+
315
385
  def get_sql_alchemy_url(self, database: Optional[str] = None) -> str:
316
386
  uri_opts = {"http_path": f"/sql/1.0/warehouses/{self.warehouse_id}"}
317
387
  if database:
@@ -381,11 +451,6 @@ class UnityCatalogSourceConfig(
381
451
  "When `warehouse_id` is set, it must match the `warehouse_id` in `profiling`."
382
452
  )
383
453
 
384
- if values.get("include_hive_metastore") and not values.get("warehouse_id"):
385
- raise ValueError(
386
- "When `include_hive_metastore` is set, `warehouse_id` must be set."
387
- )
388
-
389
454
  if values.get("warehouse_id") and profiling and not profiling.warehouse_id:
390
455
  profiling.warehouse_id = values["warehouse_id"]
391
456
 
@@ -4,6 +4,7 @@ Manage the communication with DataBricks Server and provide equivalent dataclass
4
4
 
5
5
  import dataclasses
6
6
  import logging
7
+ import os
7
8
  from concurrent.futures import ThreadPoolExecutor
8
9
  from datetime import datetime
9
10
  from typing import Any, Dict, Iterable, List, Optional, Sequence, Union, cast
@@ -17,6 +18,8 @@ from databricks.sdk.service.catalog import (
17
18
  ColumnInfo,
18
19
  GetMetastoreSummaryResponse,
19
20
  MetastoreInfo,
21
+ ModelVersionInfo,
22
+ RegisteredModelInfo,
20
23
  SchemaInfo,
21
24
  TableInfo,
22
25
  )
@@ -49,6 +52,8 @@ from datahub.ingestion.source.unity.proxy_types import (
49
52
  CustomCatalogType,
50
53
  ExternalTableReference,
51
54
  Metastore,
55
+ Model,
56
+ ModelVersion,
52
57
  Notebook,
53
58
  NotebookReference,
54
59
  Query,
@@ -67,6 +72,23 @@ logger: logging.Logger = logging.getLogger(__name__)
67
72
  _MAX_CONCURRENT_CATALOGS = 1
68
73
 
69
74
 
75
+ # Import and apply the proxy patch from separate module
76
+ try:
77
+ from datahub.ingestion.source.unity.proxy_patch import (
78
+ apply_databricks_proxy_fix,
79
+ mask_proxy_credentials,
80
+ )
81
+
82
+ # Apply the fix when the module is imported
83
+ apply_databricks_proxy_fix()
84
+ except ImportError as e:
85
+ logger.debug(f"Could not import proxy patch module: {e}")
86
+
87
+ # Fallback function for masking credentials
88
+ def mask_proxy_credentials(url: Optional[str]) -> str:
89
+ return "***MASKED***" if url else "None"
90
+
91
+
70
92
  @dataclasses.dataclass
71
93
  class TableInfoWithGeneration(TableInfo):
72
94
  generation: Optional[int] = None
@@ -251,6 +273,40 @@ class UnityCatalogApiProxy(UnityCatalogProxyProfilingMixin):
251
273
  logger.warning(f"Error parsing table: {e}")
252
274
  self.report.report_warning("table-parse", str(e))
253
275
 
276
+ def ml_models(
277
+ self, schema: Schema, max_results: Optional[int] = None
278
+ ) -> Iterable[Model]:
279
+ response = self._workspace_client.registered_models.list(
280
+ catalog_name=schema.catalog.name,
281
+ schema_name=schema.name,
282
+ max_results=max_results,
283
+ )
284
+ for ml_model in response:
285
+ optional_ml_model = self._create_ml_model(schema, ml_model)
286
+ if optional_ml_model:
287
+ yield optional_ml_model
288
+
289
+ def ml_model_versions(
290
+ self, ml_model: Model, include_aliases: bool = False
291
+ ) -> Iterable[ModelVersion]:
292
+ response = self._workspace_client.model_versions.list(
293
+ full_name=ml_model.id,
294
+ include_browse=True,
295
+ max_results=self.databricks_api_page_size,
296
+ )
297
+ for version in response:
298
+ if version.version is not None:
299
+ if include_aliases:
300
+ # to get aliases info, use GET
301
+ version = self._workspace_client.model_versions.get(
302
+ ml_model.id, version.version, include_aliases=True
303
+ )
304
+ optional_ml_model_version = self._create_ml_model_version(
305
+ ml_model, version
306
+ )
307
+ if optional_ml_model_version:
308
+ yield optional_ml_model_version
309
+
254
310
  def service_principals(self) -> Iterable[ServicePrincipal]:
255
311
  for principal in self._workspace_client.service_principals.list():
256
312
  optional_sp = self._create_service_principal(principal)
@@ -373,7 +429,7 @@ class UnityCatalogApiProxy(UnityCatalogProxyProfilingMixin):
373
429
  query = f"""
374
430
  SELECT
375
431
  entity_type, entity_id,
376
- source_table_full_name, source_type,
432
+ source_table_full_name, source_type, source_path,
377
433
  target_table_full_name, target_type,
378
434
  max(event_time) as last_updated
379
435
  FROM system.access.table_lineage
@@ -382,7 +438,7 @@ class UnityCatalogApiProxy(UnityCatalogProxyProfilingMixin):
382
438
  {additional_where}
383
439
  GROUP BY
384
440
  entity_type, entity_id,
385
- source_table_full_name, source_type,
441
+ source_table_full_name, source_type, source_path,
386
442
  target_table_full_name, target_type
387
443
  """
388
444
  rows = self._execute_sql_query(query, [catalog, catalog])
@@ -394,6 +450,7 @@ class UnityCatalogApiProxy(UnityCatalogProxyProfilingMixin):
394
450
  source_full_name = row["source_table_full_name"]
395
451
  target_full_name = row["target_table_full_name"]
396
452
  source_type = row["source_type"]
453
+ source_path = row["source_path"]
397
454
  last_updated = row["last_updated"]
398
455
 
399
456
  # Initialize TableLineageInfo for both source and target tables if they're in our catalog
@@ -422,7 +479,7 @@ class UnityCatalogApiProxy(UnityCatalogProxyProfilingMixin):
422
479
  # Handle external upstreams (PATH type)
423
480
  elif source_type == "PATH":
424
481
  external_upstream = ExternalUpstream(
425
- path=source_full_name,
482
+ path=source_path,
426
483
  source_type=source_type,
427
484
  last_updated=last_updated,
428
485
  )
@@ -862,6 +919,45 @@ class UnityCatalogApiProxy(UnityCatalogProxyProfilingMixin):
862
919
  if optional_column:
863
920
  yield optional_column
864
921
 
922
+ def _create_ml_model(
923
+ self, schema: Schema, obj: RegisteredModelInfo
924
+ ) -> Optional[Model]:
925
+ if not obj.name or not obj.full_name:
926
+ self.report.num_ml_models_missing_name += 1
927
+ return None
928
+ return Model(
929
+ id=obj.full_name,
930
+ name=obj.name,
931
+ description=obj.comment,
932
+ schema_name=schema.name,
933
+ catalog_name=schema.catalog.name,
934
+ created_at=parse_ts_millis(obj.created_at),
935
+ updated_at=parse_ts_millis(obj.updated_at),
936
+ )
937
+
938
+ def _create_ml_model_version(
939
+ self, model: Model, obj: ModelVersionInfo
940
+ ) -> Optional[ModelVersion]:
941
+ if obj.version is None:
942
+ return None
943
+
944
+ aliases = []
945
+ if obj.aliases:
946
+ for alias in obj.aliases:
947
+ if alias.alias_name:
948
+ aliases.append(alias.alias_name)
949
+ return ModelVersion(
950
+ id=f"{model.id}_{obj.version}",
951
+ name=f"{model.name}_{obj.version}",
952
+ model=model,
953
+ version=str(obj.version),
954
+ aliases=aliases,
955
+ description=obj.comment,
956
+ created_at=parse_ts_millis(obj.created_at),
957
+ updated_at=parse_ts_millis(obj.updated_at),
958
+ created_by=obj.created_by,
959
+ )
960
+
865
961
  def _create_service_principal(
866
962
  self, obj: DatabricksServicePrincipal
867
963
  ) -> Optional[ServicePrincipal]:
@@ -896,16 +992,82 @@ class UnityCatalogApiProxy(UnityCatalogProxyProfilingMixin):
896
992
 
897
993
  def _execute_sql_query(self, query: str, params: Sequence[Any] = ()) -> List[Row]:
898
994
  """Execute SQL query using databricks-sql connector for better performance"""
995
+ logger.debug(f"Executing SQL query with {len(params)} parameters")
996
+ if logger.isEnabledFor(logging.DEBUG):
997
+ # Only log full query in debug mode to avoid performance overhead
998
+ logger.debug(f"Full SQL query: {query}")
999
+ if params:
1000
+ logger.debug(f"Query parameters: {params}")
1001
+
1002
+ # Check if warehouse_id is available for SQL operations
1003
+ if not self.warehouse_id:
1004
+ self.report.report_warning(
1005
+ "Cannot execute SQL query",
1006
+ "warehouse_id is not configured. SQL operations require a valid warehouse_id to be set in the Unity Catalog configuration",
1007
+ )
1008
+ logger.warning(
1009
+ "Cannot execute SQL query: warehouse_id is not configured. "
1010
+ "SQL operations require a valid warehouse_id to be set in the Unity Catalog configuration."
1011
+ )
1012
+ return []
1013
+
1014
+ # Log connection parameters (with masked token)
1015
+ masked_params = {**self._sql_connection_params}
1016
+ if "access_token" in masked_params:
1017
+ masked_params["access_token"] = "***MASKED***"
1018
+ logger.debug(f"Using connection parameters: {masked_params}")
1019
+
1020
+ # Log proxy environment variables that affect SQL connections
1021
+ proxy_env_debug = {}
1022
+ for var in ["HTTP_PROXY", "HTTPS_PROXY", "http_proxy", "https_proxy"]:
1023
+ value = os.environ.get(var)
1024
+ if value:
1025
+ proxy_env_debug[var] = mask_proxy_credentials(value)
1026
+
1027
+ if proxy_env_debug:
1028
+ logger.debug(
1029
+ f"SQL connection will use proxy environment variables: {proxy_env_debug}"
1030
+ )
1031
+ else:
1032
+ logger.debug("No proxy environment variables detected for SQL connection")
1033
+
899
1034
  try:
900
1035
  with (
901
1036
  connect(**self._sql_connection_params) as connection,
902
1037
  connection.cursor() as cursor,
903
1038
  ):
904
1039
  cursor.execute(query, list(params))
905
- return cursor.fetchall()
1040
+ rows = cursor.fetchall()
1041
+ logger.debug(
1042
+ f"SQL query executed successfully, returned {len(rows)} rows"
1043
+ )
1044
+ return rows
906
1045
 
907
1046
  except Exception as e:
908
- logger.warning(f"Failed to execute SQL query: {e}")
1047
+ logger.warning(f"Failed to execute SQL query: {e}", exc_info=True)
1048
+ if logger.isEnabledFor(logging.DEBUG):
1049
+ # Only log failed query details in debug mode for security
1050
+ logger.debug(f"SQL query that failed: {query}")
1051
+ logger.debug(f"SQL query parameters: {params}")
1052
+
1053
+ # Check if this might be a proxy-related error
1054
+ error_str = str(e).lower()
1055
+ if any(
1056
+ proxy_keyword in error_str
1057
+ for proxy_keyword in [
1058
+ "proxy",
1059
+ "407",
1060
+ "authentication required",
1061
+ "tunnel",
1062
+ "connect",
1063
+ ]
1064
+ ):
1065
+ logger.error(
1066
+ "SQL query failure appears to be proxy-related. "
1067
+ "Please check proxy configuration and authentication. "
1068
+ f"Proxy environment variables detected: {list(proxy_env_debug.keys())}"
1069
+ )
1070
+
909
1071
  return []
910
1072
 
911
1073
  @cached(cachetools.FIFOCache(maxsize=_MAX_CONCURRENT_CATALOGS))
@@ -0,0 +1,321 @@
1
+ """
2
+ Proxy authentication patch for databricks-sql < 3.0 compatibility.
3
+
4
+ This module provides proxy authentication fixes for databricks-sql connector < 3.0
5
+ to resolve "407 Proxy Authentication Required" errors that occur even when
6
+ proxy environment variables are correctly set.
7
+
8
+ The patch implements the same fix as Databricks PR #354:
9
+ https://github.com/databricks/databricks-sql-python/pull/354
10
+ """
11
+
12
+ import logging
13
+ import os
14
+ import urllib.parse
15
+ from typing import Dict, Optional
16
+
17
+ logger: logging.Logger = logging.getLogger(__name__)
18
+
19
+ PROXY_VARS = ["HTTP_PROXY", "HTTPS_PROXY", "http_proxy", "https_proxy"]
20
+
21
+
22
+ def mask_proxy_credentials(url: Optional[str]) -> str:
23
+ """Mask credentials in proxy URL for safe logging."""
24
+ if not url:
25
+ return "None"
26
+
27
+ try:
28
+ parsed = urllib.parse.urlparse(url)
29
+ if parsed.username:
30
+ # Replace credentials with masked version
31
+ masked_netloc = parsed.netloc
32
+ if parsed.username and parsed.password:
33
+ masked_netloc = masked_netloc.replace(
34
+ f"{parsed.username}:{parsed.password}@", f"{parsed.username}:***@"
35
+ )
36
+ elif parsed.username:
37
+ masked_netloc = masked_netloc.replace(
38
+ f"{parsed.username}@", f"{parsed.username}:***@"
39
+ )
40
+
41
+ return urllib.parse.urlunparse(
42
+ (
43
+ parsed.scheme,
44
+ masked_netloc,
45
+ parsed.path,
46
+ parsed.params,
47
+ parsed.query,
48
+ parsed.fragment,
49
+ )
50
+ )
51
+ else:
52
+ return url
53
+ except Exception:
54
+ return "***INVALID_URL***"
55
+
56
+
57
+ def _ensure_thrift_imports():
58
+ """Ensure required thrift imports are loaded before accessing thrift_http_client.
59
+
60
+ The databricks-sql thrift_http_client requires thrift.transport.THttpClient.THttpClient
61
+ to be accessible. This is achieved by importing the required modules in the right order.
62
+ """
63
+ try:
64
+ # Import thrift submodules - this makes them accessible as attributes
65
+ import thrift.transport.THttpClient # noqa: F401 # Used to make thrift.transport accessible
66
+
67
+ logger.debug("Successfully imported required thrift modules")
68
+ except Exception as e:
69
+ logger.debug(f"Could not import thrift modules: {e}")
70
+ raise
71
+
72
+
73
+ def _log_proxy_environment():
74
+ """Log detected proxy environment variables for debugging."""
75
+ proxy_env_vars = {}
76
+ for var in PROXY_VARS:
77
+ value = os.environ.get(var)
78
+ if value:
79
+ masked_value = mask_proxy_credentials(value)
80
+ proxy_env_vars[var] = masked_value
81
+
82
+ if proxy_env_vars:
83
+ logger.info(f"Detected proxy environment variables: {proxy_env_vars}")
84
+ else:
85
+ logger.debug("No proxy environment variables detected")
86
+
87
+
88
+ def _basic_proxy_auth_header(proxy_url: str) -> Optional[Dict[str, str]]:
89
+ """Create proxy authentication header using the same method as Databricks >= 3.0.
90
+
91
+ Based on the basic_proxy_auth_header method from databricks-sql-connector >= 3.0:
92
+ https://github.com/databricks/databricks-sql-python/pull/354
93
+ """
94
+ try:
95
+ from urllib3.util import make_headers
96
+
97
+ parsed = urllib.parse.urlparse(proxy_url)
98
+ if parsed.username and parsed.password:
99
+ # Code reused from https://github.com/databricks/databricks-sql-python/pull/354
100
+ # URL decode the username and password (same as Databricks method)
101
+ username = urllib.parse.unquote(parsed.username)
102
+ password = urllib.parse.unquote(parsed.password)
103
+ auth_string = f"{username}:{password}"
104
+
105
+ # Create proxy URL without credentials
106
+ proxy_host_port = f"{parsed.scheme}://{parsed.hostname}"
107
+ if parsed.port:
108
+ proxy_host_port += f":{parsed.port}"
109
+
110
+ # Code reused from https://github.com/databricks/databricks-sql-python/pull/354
111
+ # Use make_headers like the newer Databricks version does
112
+ proxy_headers = make_headers(proxy_basic_auth=auth_string)
113
+
114
+ return {
115
+ "proxy_url": proxy_host_port,
116
+ "proxy_headers": proxy_headers,
117
+ "auth_string": auth_string, # Keep for backward compatibility with tests
118
+ }
119
+ except Exception as e:
120
+ logger.debug(f"Failed to create proxy auth header from URL {proxy_url}: {e}")
121
+
122
+ return None
123
+
124
+
125
+ def _handle_proxy_connection(self, original_open, pool_kwargs):
126
+ """Handle proxy connection setup with authentication headers."""
127
+ from urllib3.poolmanager import ProxyManager
128
+
129
+ logger.info(f"Using proxy for connection to {self.host}:{self.port}")
130
+ proxy_uri = getattr(self, "proxy_uri", None)
131
+ logger.debug(
132
+ f"Proxy URI: {mask_proxy_credentials(proxy_uri) if proxy_uri else 'None'}"
133
+ )
134
+
135
+ # Compute proxy authentication headers properly (the bug fix!)
136
+ proxy_headers = None
137
+ proxy_env_found = None
138
+ for env_var in ["HTTPS_PROXY", "https_proxy", "HTTP_PROXY", "http_proxy"]:
139
+ proxy_url = os.environ.get(env_var)
140
+ if proxy_url:
141
+ logger.debug(
142
+ f"Found proxy URL in {env_var}: {mask_proxy_credentials(proxy_url)}"
143
+ )
144
+ auth_info = _basic_proxy_auth_header(proxy_url)
145
+ if auth_info:
146
+ proxy_headers = auth_info["proxy_headers"]
147
+ proxy_env_found = env_var
148
+ logger.debug(f"Successfully created proxy headers from {env_var}")
149
+ break
150
+ else:
151
+ logger.debug(
152
+ f"No authentication info found in proxy URL from {env_var}"
153
+ )
154
+
155
+ if proxy_headers:
156
+ logger.info(f"Using proxy authentication headers from {proxy_env_found}")
157
+ else:
158
+ logger.warning(
159
+ "No proxy authentication headers could be created from environment variables"
160
+ )
161
+
162
+ proxy_manager = ProxyManager(
163
+ self.proxy_uri,
164
+ num_pools=1,
165
+ proxy_headers=proxy_headers,
166
+ )
167
+
168
+ # Validate proxy manager attributes
169
+ if not hasattr(self, "realhost") or not hasattr(self, "realport"):
170
+ logger.warning(
171
+ "THttpClient missing realhost/realport attributes, falling back to original"
172
+ )
173
+ return original_open(self)
174
+
175
+ # Set up the connection pool
176
+ self._THttpClient__pool = proxy_manager.connection_from_host(
177
+ host=self.realhost,
178
+ port=self.realport,
179
+ scheme=self.scheme,
180
+ pool_kwargs=pool_kwargs, # type: ignore
181
+ )
182
+ logger.debug(f"Created proxy connection pool for {self.realhost}:{self.realport}")
183
+
184
+
185
+ def _create_patched_open_method(original_open):
186
+ """Create the patched THttpClient.open method with proxy authentication fix."""
187
+
188
+ def patched_open(self):
189
+ """Patched version of THttpClient.open following databricks-sql >= 3.0 structure.
190
+
191
+ This is largely copied from the >= 3.0 implementation:
192
+ https://github.com/databricks/databricks-sql-python/pull/354/files
193
+ """
194
+ logger.debug(
195
+ f"Patched THttpClient.open called for host={getattr(self, 'host', 'unknown')}, scheme={getattr(self, 'scheme', 'unknown')}"
196
+ )
197
+
198
+ try:
199
+ # Validate required attributes
200
+ required_attrs = ["scheme", "host", "port", "max_connections"]
201
+ missing_attrs = [attr for attr in required_attrs if not hasattr(self, attr)]
202
+ if missing_attrs:
203
+ logger.warning(
204
+ f"THttpClient missing required attributes: {missing_attrs}, falling back to original"
205
+ )
206
+ return original_open(self)
207
+
208
+ # Code structure reused from https://github.com/databricks/databricks-sql-python/pull/354
209
+ # Determine pool class based on scheme
210
+ if self.scheme == "http":
211
+ from urllib3 import HTTPConnectionPool
212
+
213
+ pool_class = HTTPConnectionPool
214
+ elif self.scheme == "https":
215
+ from urllib3 import HTTPSConnectionPool
216
+
217
+ pool_class = HTTPSConnectionPool
218
+ else:
219
+ logger.warning(
220
+ f"Unknown scheme '{self.scheme}', falling back to original"
221
+ )
222
+ return original_open(self)
223
+
224
+ _pool_kwargs = {"maxsize": self.max_connections}
225
+ logger.debug(f"Pool kwargs: {_pool_kwargs}")
226
+
227
+ if self.using_proxy():
228
+ return _handle_proxy_connection(self, original_open, _pool_kwargs)
229
+ else:
230
+ logger.debug(f"Direct connection (no proxy) to {self.host}:{self.port}")
231
+ self._THttpClient__pool = pool_class(
232
+ self.host, self.port, **_pool_kwargs
233
+ )
234
+
235
+ logger.debug("Patched THttpClient.open completed successfully")
236
+
237
+ except Exception as e:
238
+ logger.warning(
239
+ f"Error in proxy auth patch: {e}, falling back to original",
240
+ exc_info=True,
241
+ )
242
+ # Fallback to original implementation
243
+ try:
244
+ return original_open(self)
245
+ except Exception as fallback_error:
246
+ logger.error(
247
+ f"Fallback to original THttpClient.open also failed: {fallback_error}",
248
+ exc_info=True,
249
+ )
250
+ raise
251
+
252
+ return patched_open
253
+
254
+
255
+ def apply_databricks_proxy_fix():
256
+ """Apply the databricks-sql < 3.0 proxy authentication fix at module import time.
257
+
258
+ This implements the same fix as Databricks PR #354 to resolve
259
+ "407 Proxy Authentication Required" errors that occur even when
260
+ all proxy environment variables are correctly set.
261
+
262
+ Note: This fix may not work with all thrift versions due to compatibility issues
263
+ between databricks-sql-connector 2.9.6 and newer thrift versions. The fix will
264
+ gracefully fail with a warning if thrift compatibility issues are detected.
265
+ The main SQL functionality will continue to work normally without this fix.
266
+ """
267
+ _log_proxy_environment()
268
+ logger.info("Applying databricks-sql proxy authentication fix...")
269
+
270
+ try:
271
+ _ensure_thrift_imports()
272
+ import databricks.sql.auth.thrift_http_client as thrift_http
273
+
274
+ # Store original method for fallback
275
+ original_open = getattr(thrift_http.THttpClient, "open", None)
276
+ if not original_open:
277
+ logger.warning("Could not find THttpClient.open method to patch")
278
+ return False
279
+
280
+ logger.debug(f"Found THttpClient.open method at {original_open}")
281
+
282
+ # Apply the patch
283
+ patched_open = _create_patched_open_method(original_open)
284
+ thrift_http.THttpClient.open = patched_open
285
+ logger.info("Successfully applied databricks-sql proxy authentication fix")
286
+
287
+ # Verify the patch was applied
288
+ current_method = getattr(thrift_http.THttpClient, "open", None)
289
+ if current_method == patched_open:
290
+ logger.debug(
291
+ "Patch verification successful: THttpClient.open is now the patched version"
292
+ )
293
+ return True
294
+ else:
295
+ logger.warning(
296
+ "Patch verification failed: THttpClient.open was not replaced correctly"
297
+ )
298
+ return False
299
+
300
+ except ImportError as e:
301
+ logger.debug(f"Could not import databricks-sql internals for proxy patch: {e}")
302
+ return False
303
+ except AttributeError as e:
304
+ if "thrift" in str(e).lower() and "transport" in str(e).lower():
305
+ warning_msg = (
306
+ f"Databricks-sql proxy authentication patch could not be applied due to thrift version incompatibility: {e}. "
307
+ "In most environments, the SQL connection will still work without this patch."
308
+ )
309
+ logger.warning(warning_msg)
310
+ # Import here to avoid circular imports
311
+ from datahub.utilities.global_warning_util import add_global_warning
312
+
313
+ add_global_warning(warning_msg)
314
+ else:
315
+ logger.error(
316
+ f"Failed to apply databricks-sql proxy patch: {e}", exc_info=True
317
+ )
318
+ return False
319
+ except Exception as e:
320
+ logger.error(f"Failed to apply databricks-sql proxy patch: {e}", exc_info=True)
321
+ return False