acryl-datahub 1.2.0.7rc2__py3-none-any.whl → 1.2.0.7rc4__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of acryl-datahub might be problematic. Click here for more details.

Files changed (31) hide show
  1. {acryl_datahub-1.2.0.7rc2.dist-info → acryl_datahub-1.2.0.7rc4.dist-info}/METADATA +2754 -2749
  2. {acryl_datahub-1.2.0.7rc2.dist-info → acryl_datahub-1.2.0.7rc4.dist-info}/RECORD +30 -30
  3. datahub/_version.py +1 -1
  4. datahub/ingestion/autogenerated/capability_summary.json +1 -1
  5. datahub/ingestion/source/bigquery_v2/bigquery_connection.py +12 -1
  6. datahub/ingestion/source/qlik_sense/qlik_sense.py +1 -1
  7. datahub/ingestion/source/redshift/config.py +9 -6
  8. datahub/ingestion/source/redshift/lineage.py +386 -687
  9. datahub/ingestion/source/redshift/redshift.py +19 -106
  10. datahub/ingestion/source/snowflake/snowflake_schema_gen.py +4 -1
  11. datahub/ingestion/source/snowflake/snowflake_v2.py +1 -0
  12. datahub/ingestion/source/sql/mssql/job_models.py +3 -1
  13. datahub/ingestion/source/sql/mssql/source.py +62 -3
  14. datahub/ingestion/source/unity/config.py +74 -9
  15. datahub/ingestion/source/unity/proxy.py +167 -5
  16. datahub/ingestion/source/unity/proxy_patch.py +321 -0
  17. datahub/ingestion/source/unity/proxy_types.py +24 -0
  18. datahub/ingestion/source/unity/report.py +5 -0
  19. datahub/ingestion/source/unity/source.py +111 -1
  20. datahub/ingestion/source/usage/usage_common.py +1 -0
  21. datahub/metadata/_internal_schema_classes.py +5 -5
  22. datahub/metadata/schema.avsc +66 -60
  23. datahub/metadata/schemas/LogicalParent.avsc +104 -100
  24. datahub/metadata/schemas/SchemaFieldKey.avsc +3 -1
  25. datahub/sdk/chart.py +36 -22
  26. datahub/sdk/dashboard.py +38 -62
  27. datahub/ingestion/source/redshift/lineage_v2.py +0 -466
  28. {acryl_datahub-1.2.0.7rc2.dist-info → acryl_datahub-1.2.0.7rc4.dist-info}/WHEEL +0 -0
  29. {acryl_datahub-1.2.0.7rc2.dist-info → acryl_datahub-1.2.0.7rc4.dist-info}/entry_points.txt +0 -0
  30. {acryl_datahub-1.2.0.7rc2.dist-info → acryl_datahub-1.2.0.7rc4.dist-info}/licenses/LICENSE +0 -0
  31. {acryl_datahub-1.2.0.7rc2.dist-info → acryl_datahub-1.2.0.7rc4.dist-info}/top_level.txt +0 -0
@@ -4,6 +4,7 @@ Manage the communication with DataBricks Server and provide equivalent dataclass
4
4
 
5
5
  import dataclasses
6
6
  import logging
7
+ import os
7
8
  from concurrent.futures import ThreadPoolExecutor
8
9
  from datetime import datetime
9
10
  from typing import Any, Dict, Iterable, List, Optional, Sequence, Union, cast
@@ -17,6 +18,8 @@ from databricks.sdk.service.catalog import (
17
18
  ColumnInfo,
18
19
  GetMetastoreSummaryResponse,
19
20
  MetastoreInfo,
21
+ ModelVersionInfo,
22
+ RegisteredModelInfo,
20
23
  SchemaInfo,
21
24
  TableInfo,
22
25
  )
@@ -49,6 +52,8 @@ from datahub.ingestion.source.unity.proxy_types import (
49
52
  CustomCatalogType,
50
53
  ExternalTableReference,
51
54
  Metastore,
55
+ Model,
56
+ ModelVersion,
52
57
  Notebook,
53
58
  NotebookReference,
54
59
  Query,
@@ -67,6 +72,23 @@ logger: logging.Logger = logging.getLogger(__name__)
67
72
  _MAX_CONCURRENT_CATALOGS = 1
68
73
 
69
74
 
75
+ # Import and apply the proxy patch from separate module
76
+ try:
77
+ from datahub.ingestion.source.unity.proxy_patch import (
78
+ apply_databricks_proxy_fix,
79
+ mask_proxy_credentials,
80
+ )
81
+
82
+ # Apply the fix when the module is imported
83
+ apply_databricks_proxy_fix()
84
+ except ImportError as e:
85
+ logger.debug(f"Could not import proxy patch module: {e}")
86
+
87
+ # Fallback function for masking credentials
88
+ def mask_proxy_credentials(url: Optional[str]) -> str:
89
+ return "***MASKED***" if url else "None"
90
+
91
+
70
92
  @dataclasses.dataclass
71
93
  class TableInfoWithGeneration(TableInfo):
72
94
  generation: Optional[int] = None
@@ -251,6 +273,40 @@ class UnityCatalogApiProxy(UnityCatalogProxyProfilingMixin):
251
273
  logger.warning(f"Error parsing table: {e}")
252
274
  self.report.report_warning("table-parse", str(e))
253
275
 
276
+ def ml_models(
277
+ self, schema: Schema, max_results: Optional[int] = None
278
+ ) -> Iterable[Model]:
279
+ response = self._workspace_client.registered_models.list(
280
+ catalog_name=schema.catalog.name,
281
+ schema_name=schema.name,
282
+ max_results=max_results,
283
+ )
284
+ for ml_model in response:
285
+ optional_ml_model = self._create_ml_model(schema, ml_model)
286
+ if optional_ml_model:
287
+ yield optional_ml_model
288
+
289
+ def ml_model_versions(
290
+ self, ml_model: Model, include_aliases: bool = False
291
+ ) -> Iterable[ModelVersion]:
292
+ response = self._workspace_client.model_versions.list(
293
+ full_name=ml_model.id,
294
+ include_browse=True,
295
+ max_results=self.databricks_api_page_size,
296
+ )
297
+ for version in response:
298
+ if version.version is not None:
299
+ if include_aliases:
300
+ # to get aliases info, use GET
301
+ version = self._workspace_client.model_versions.get(
302
+ ml_model.id, version.version, include_aliases=True
303
+ )
304
+ optional_ml_model_version = self._create_ml_model_version(
305
+ ml_model, version
306
+ )
307
+ if optional_ml_model_version:
308
+ yield optional_ml_model_version
309
+
254
310
  def service_principals(self) -> Iterable[ServicePrincipal]:
255
311
  for principal in self._workspace_client.service_principals.list():
256
312
  optional_sp = self._create_service_principal(principal)
@@ -373,7 +429,7 @@ class UnityCatalogApiProxy(UnityCatalogProxyProfilingMixin):
373
429
  query = f"""
374
430
  SELECT
375
431
  entity_type, entity_id,
376
- source_table_full_name, source_type,
432
+ source_table_full_name, source_type, source_path,
377
433
  target_table_full_name, target_type,
378
434
  max(event_time) as last_updated
379
435
  FROM system.access.table_lineage
@@ -382,7 +438,7 @@ class UnityCatalogApiProxy(UnityCatalogProxyProfilingMixin):
382
438
  {additional_where}
383
439
  GROUP BY
384
440
  entity_type, entity_id,
385
- source_table_full_name, source_type,
441
+ source_table_full_name, source_type, source_path,
386
442
  target_table_full_name, target_type
387
443
  """
388
444
  rows = self._execute_sql_query(query, [catalog, catalog])
@@ -394,6 +450,7 @@ class UnityCatalogApiProxy(UnityCatalogProxyProfilingMixin):
394
450
  source_full_name = row["source_table_full_name"]
395
451
  target_full_name = row["target_table_full_name"]
396
452
  source_type = row["source_type"]
453
+ source_path = row["source_path"]
397
454
  last_updated = row["last_updated"]
398
455
 
399
456
  # Initialize TableLineageInfo for both source and target tables if they're in our catalog
@@ -422,7 +479,7 @@ class UnityCatalogApiProxy(UnityCatalogProxyProfilingMixin):
422
479
  # Handle external upstreams (PATH type)
423
480
  elif source_type == "PATH":
424
481
  external_upstream = ExternalUpstream(
425
- path=source_full_name,
482
+ path=source_path,
426
483
  source_type=source_type,
427
484
  last_updated=last_updated,
428
485
  )
@@ -862,6 +919,45 @@ class UnityCatalogApiProxy(UnityCatalogProxyProfilingMixin):
862
919
  if optional_column:
863
920
  yield optional_column
864
921
 
922
+ def _create_ml_model(
923
+ self, schema: Schema, obj: RegisteredModelInfo
924
+ ) -> Optional[Model]:
925
+ if not obj.name or not obj.full_name:
926
+ self.report.num_ml_models_missing_name += 1
927
+ return None
928
+ return Model(
929
+ id=obj.full_name,
930
+ name=obj.name,
931
+ description=obj.comment,
932
+ schema_name=schema.name,
933
+ catalog_name=schema.catalog.name,
934
+ created_at=parse_ts_millis(obj.created_at),
935
+ updated_at=parse_ts_millis(obj.updated_at),
936
+ )
937
+
938
+ def _create_ml_model_version(
939
+ self, model: Model, obj: ModelVersionInfo
940
+ ) -> Optional[ModelVersion]:
941
+ if obj.version is None:
942
+ return None
943
+
944
+ aliases = []
945
+ if obj.aliases:
946
+ for alias in obj.aliases:
947
+ if alias.alias_name:
948
+ aliases.append(alias.alias_name)
949
+ return ModelVersion(
950
+ id=f"{model.id}_{obj.version}",
951
+ name=f"{model.name}_{obj.version}",
952
+ model=model,
953
+ version=str(obj.version),
954
+ aliases=aliases,
955
+ description=obj.comment,
956
+ created_at=parse_ts_millis(obj.created_at),
957
+ updated_at=parse_ts_millis(obj.updated_at),
958
+ created_by=obj.created_by,
959
+ )
960
+
865
961
  def _create_service_principal(
866
962
  self, obj: DatabricksServicePrincipal
867
963
  ) -> Optional[ServicePrincipal]:
@@ -896,16 +992,82 @@ class UnityCatalogApiProxy(UnityCatalogProxyProfilingMixin):
896
992
 
897
993
  def _execute_sql_query(self, query: str, params: Sequence[Any] = ()) -> List[Row]:
898
994
  """Execute SQL query using databricks-sql connector for better performance"""
995
+ logger.debug(f"Executing SQL query with {len(params)} parameters")
996
+ if logger.isEnabledFor(logging.DEBUG):
997
+ # Only log full query in debug mode to avoid performance overhead
998
+ logger.debug(f"Full SQL query: {query}")
999
+ if params:
1000
+ logger.debug(f"Query parameters: {params}")
1001
+
1002
+ # Check if warehouse_id is available for SQL operations
1003
+ if not self.warehouse_id:
1004
+ self.report.report_warning(
1005
+ "Cannot execute SQL query",
1006
+ "warehouse_id is not configured. SQL operations require a valid warehouse_id to be set in the Unity Catalog configuration",
1007
+ )
1008
+ logger.warning(
1009
+ "Cannot execute SQL query: warehouse_id is not configured. "
1010
+ "SQL operations require a valid warehouse_id to be set in the Unity Catalog configuration."
1011
+ )
1012
+ return []
1013
+
1014
+ # Log connection parameters (with masked token)
1015
+ masked_params = {**self._sql_connection_params}
1016
+ if "access_token" in masked_params:
1017
+ masked_params["access_token"] = "***MASKED***"
1018
+ logger.debug(f"Using connection parameters: {masked_params}")
1019
+
1020
+ # Log proxy environment variables that affect SQL connections
1021
+ proxy_env_debug = {}
1022
+ for var in ["HTTP_PROXY", "HTTPS_PROXY", "http_proxy", "https_proxy"]:
1023
+ value = os.environ.get(var)
1024
+ if value:
1025
+ proxy_env_debug[var] = mask_proxy_credentials(value)
1026
+
1027
+ if proxy_env_debug:
1028
+ logger.debug(
1029
+ f"SQL connection will use proxy environment variables: {proxy_env_debug}"
1030
+ )
1031
+ else:
1032
+ logger.debug("No proxy environment variables detected for SQL connection")
1033
+
899
1034
  try:
900
1035
  with (
901
1036
  connect(**self._sql_connection_params) as connection,
902
1037
  connection.cursor() as cursor,
903
1038
  ):
904
1039
  cursor.execute(query, list(params))
905
- return cursor.fetchall()
1040
+ rows = cursor.fetchall()
1041
+ logger.debug(
1042
+ f"SQL query executed successfully, returned {len(rows)} rows"
1043
+ )
1044
+ return rows
906
1045
 
907
1046
  except Exception as e:
908
- logger.warning(f"Failed to execute SQL query: {e}")
1047
+ logger.warning(f"Failed to execute SQL query: {e}", exc_info=True)
1048
+ if logger.isEnabledFor(logging.DEBUG):
1049
+ # Only log failed query details in debug mode for security
1050
+ logger.debug(f"SQL query that failed: {query}")
1051
+ logger.debug(f"SQL query parameters: {params}")
1052
+
1053
+ # Check if this might be a proxy-related error
1054
+ error_str = str(e).lower()
1055
+ if any(
1056
+ proxy_keyword in error_str
1057
+ for proxy_keyword in [
1058
+ "proxy",
1059
+ "407",
1060
+ "authentication required",
1061
+ "tunnel",
1062
+ "connect",
1063
+ ]
1064
+ ):
1065
+ logger.error(
1066
+ "SQL query failure appears to be proxy-related. "
1067
+ "Please check proxy configuration and authentication. "
1068
+ f"Proxy environment variables detected: {list(proxy_env_debug.keys())}"
1069
+ )
1070
+
909
1071
  return []
910
1072
 
911
1073
  @cached(cachetools.FIFOCache(maxsize=_MAX_CONCURRENT_CATALOGS))
@@ -0,0 +1,321 @@
1
+ """
2
+ Proxy authentication patch for databricks-sql < 3.0 compatibility.
3
+
4
+ This module provides proxy authentication fixes for databricks-sql connector < 3.0
5
+ to resolve "407 Proxy Authentication Required" errors that occur even when
6
+ proxy environment variables are correctly set.
7
+
8
+ The patch implements the same fix as Databricks PR #354:
9
+ https://github.com/databricks/databricks-sql-python/pull/354
10
+ """
11
+
12
+ import logging
13
+ import os
14
+ import urllib.parse
15
+ from typing import Dict, Optional
16
+
17
+ logger: logging.Logger = logging.getLogger(__name__)
18
+
19
+ PROXY_VARS = ["HTTP_PROXY", "HTTPS_PROXY", "http_proxy", "https_proxy"]
20
+
21
+
22
+ def mask_proxy_credentials(url: Optional[str]) -> str:
23
+ """Mask credentials in proxy URL for safe logging."""
24
+ if not url:
25
+ return "None"
26
+
27
+ try:
28
+ parsed = urllib.parse.urlparse(url)
29
+ if parsed.username:
30
+ # Replace credentials with masked version
31
+ masked_netloc = parsed.netloc
32
+ if parsed.username and parsed.password:
33
+ masked_netloc = masked_netloc.replace(
34
+ f"{parsed.username}:{parsed.password}@", f"{parsed.username}:***@"
35
+ )
36
+ elif parsed.username:
37
+ masked_netloc = masked_netloc.replace(
38
+ f"{parsed.username}@", f"{parsed.username}:***@"
39
+ )
40
+
41
+ return urllib.parse.urlunparse(
42
+ (
43
+ parsed.scheme,
44
+ masked_netloc,
45
+ parsed.path,
46
+ parsed.params,
47
+ parsed.query,
48
+ parsed.fragment,
49
+ )
50
+ )
51
+ else:
52
+ return url
53
+ except Exception:
54
+ return "***INVALID_URL***"
55
+
56
+
57
+ def _ensure_thrift_imports():
58
+ """Ensure required thrift imports are loaded before accessing thrift_http_client.
59
+
60
+ The databricks-sql thrift_http_client requires thrift.transport.THttpClient.THttpClient
61
+ to be accessible. This is achieved by importing the required modules in the right order.
62
+ """
63
+ try:
64
+ # Import thrift submodules - this makes them accessible as attributes
65
+ import thrift.transport.THttpClient # noqa: F401 # Used to make thrift.transport accessible
66
+
67
+ logger.debug("Successfully imported required thrift modules")
68
+ except Exception as e:
69
+ logger.debug(f"Could not import thrift modules: {e}")
70
+ raise
71
+
72
+
73
+ def _log_proxy_environment():
74
+ """Log detected proxy environment variables for debugging."""
75
+ proxy_env_vars = {}
76
+ for var in PROXY_VARS:
77
+ value = os.environ.get(var)
78
+ if value:
79
+ masked_value = mask_proxy_credentials(value)
80
+ proxy_env_vars[var] = masked_value
81
+
82
+ if proxy_env_vars:
83
+ logger.info(f"Detected proxy environment variables: {proxy_env_vars}")
84
+ else:
85
+ logger.debug("No proxy environment variables detected")
86
+
87
+
88
+ def _basic_proxy_auth_header(proxy_url: str) -> Optional[Dict[str, str]]:
89
+ """Create proxy authentication header using the same method as Databricks >= 3.0.
90
+
91
+ Based on the basic_proxy_auth_header method from databricks-sql-connector >= 3.0:
92
+ https://github.com/databricks/databricks-sql-python/pull/354
93
+ """
94
+ try:
95
+ from urllib3.util import make_headers
96
+
97
+ parsed = urllib.parse.urlparse(proxy_url)
98
+ if parsed.username and parsed.password:
99
+ # Code reused from https://github.com/databricks/databricks-sql-python/pull/354
100
+ # URL decode the username and password (same as Databricks method)
101
+ username = urllib.parse.unquote(parsed.username)
102
+ password = urllib.parse.unquote(parsed.password)
103
+ auth_string = f"{username}:{password}"
104
+
105
+ # Create proxy URL without credentials
106
+ proxy_host_port = f"{parsed.scheme}://{parsed.hostname}"
107
+ if parsed.port:
108
+ proxy_host_port += f":{parsed.port}"
109
+
110
+ # Code reused from https://github.com/databricks/databricks-sql-python/pull/354
111
+ # Use make_headers like the newer Databricks version does
112
+ proxy_headers = make_headers(proxy_basic_auth=auth_string)
113
+
114
+ return {
115
+ "proxy_url": proxy_host_port,
116
+ "proxy_headers": proxy_headers,
117
+ "auth_string": auth_string, # Keep for backward compatibility with tests
118
+ }
119
+ except Exception as e:
120
+ logger.debug(f"Failed to create proxy auth header from URL {proxy_url}: {e}")
121
+
122
+ return None
123
+
124
+
125
+ def _handle_proxy_connection(self, original_open, pool_kwargs):
126
+ """Handle proxy connection setup with authentication headers."""
127
+ from urllib3.poolmanager import ProxyManager
128
+
129
+ logger.info(f"Using proxy for connection to {self.host}:{self.port}")
130
+ proxy_uri = getattr(self, "proxy_uri", None)
131
+ logger.debug(
132
+ f"Proxy URI: {mask_proxy_credentials(proxy_uri) if proxy_uri else 'None'}"
133
+ )
134
+
135
+ # Compute proxy authentication headers properly (the bug fix!)
136
+ proxy_headers = None
137
+ proxy_env_found = None
138
+ for env_var in ["HTTPS_PROXY", "https_proxy", "HTTP_PROXY", "http_proxy"]:
139
+ proxy_url = os.environ.get(env_var)
140
+ if proxy_url:
141
+ logger.debug(
142
+ f"Found proxy URL in {env_var}: {mask_proxy_credentials(proxy_url)}"
143
+ )
144
+ auth_info = _basic_proxy_auth_header(proxy_url)
145
+ if auth_info:
146
+ proxy_headers = auth_info["proxy_headers"]
147
+ proxy_env_found = env_var
148
+ logger.debug(f"Successfully created proxy headers from {env_var}")
149
+ break
150
+ else:
151
+ logger.debug(
152
+ f"No authentication info found in proxy URL from {env_var}"
153
+ )
154
+
155
+ if proxy_headers:
156
+ logger.info(f"Using proxy authentication headers from {proxy_env_found}")
157
+ else:
158
+ logger.warning(
159
+ "No proxy authentication headers could be created from environment variables"
160
+ )
161
+
162
+ proxy_manager = ProxyManager(
163
+ self.proxy_uri,
164
+ num_pools=1,
165
+ proxy_headers=proxy_headers,
166
+ )
167
+
168
+ # Validate proxy manager attributes
169
+ if not hasattr(self, "realhost") or not hasattr(self, "realport"):
170
+ logger.warning(
171
+ "THttpClient missing realhost/realport attributes, falling back to original"
172
+ )
173
+ return original_open(self)
174
+
175
+ # Set up the connection pool
176
+ self._THttpClient__pool = proxy_manager.connection_from_host(
177
+ host=self.realhost,
178
+ port=self.realport,
179
+ scheme=self.scheme,
180
+ pool_kwargs=pool_kwargs, # type: ignore
181
+ )
182
+ logger.debug(f"Created proxy connection pool for {self.realhost}:{self.realport}")
183
+
184
+
185
+ def _create_patched_open_method(original_open):
186
+ """Create the patched THttpClient.open method with proxy authentication fix."""
187
+
188
+ def patched_open(self):
189
+ """Patched version of THttpClient.open following databricks-sql >= 3.0 structure.
190
+
191
+ This is largely copied from the >= 3.0 implementation:
192
+ https://github.com/databricks/databricks-sql-python/pull/354/files
193
+ """
194
+ logger.debug(
195
+ f"Patched THttpClient.open called for host={getattr(self, 'host', 'unknown')}, scheme={getattr(self, 'scheme', 'unknown')}"
196
+ )
197
+
198
+ try:
199
+ # Validate required attributes
200
+ required_attrs = ["scheme", "host", "port", "max_connections"]
201
+ missing_attrs = [attr for attr in required_attrs if not hasattr(self, attr)]
202
+ if missing_attrs:
203
+ logger.warning(
204
+ f"THttpClient missing required attributes: {missing_attrs}, falling back to original"
205
+ )
206
+ return original_open(self)
207
+
208
+ # Code structure reused from https://github.com/databricks/databricks-sql-python/pull/354
209
+ # Determine pool class based on scheme
210
+ if self.scheme == "http":
211
+ from urllib3 import HTTPConnectionPool
212
+
213
+ pool_class = HTTPConnectionPool
214
+ elif self.scheme == "https":
215
+ from urllib3 import HTTPSConnectionPool
216
+
217
+ pool_class = HTTPSConnectionPool
218
+ else:
219
+ logger.warning(
220
+ f"Unknown scheme '{self.scheme}', falling back to original"
221
+ )
222
+ return original_open(self)
223
+
224
+ _pool_kwargs = {"maxsize": self.max_connections}
225
+ logger.debug(f"Pool kwargs: {_pool_kwargs}")
226
+
227
+ if self.using_proxy():
228
+ return _handle_proxy_connection(self, original_open, _pool_kwargs)
229
+ else:
230
+ logger.debug(f"Direct connection (no proxy) to {self.host}:{self.port}")
231
+ self._THttpClient__pool = pool_class(
232
+ self.host, self.port, **_pool_kwargs
233
+ )
234
+
235
+ logger.debug("Patched THttpClient.open completed successfully")
236
+
237
+ except Exception as e:
238
+ logger.warning(
239
+ f"Error in proxy auth patch: {e}, falling back to original",
240
+ exc_info=True,
241
+ )
242
+ # Fallback to original implementation
243
+ try:
244
+ return original_open(self)
245
+ except Exception as fallback_error:
246
+ logger.error(
247
+ f"Fallback to original THttpClient.open also failed: {fallback_error}",
248
+ exc_info=True,
249
+ )
250
+ raise
251
+
252
+ return patched_open
253
+
254
+
255
+ def apply_databricks_proxy_fix():
256
+ """Apply the databricks-sql < 3.0 proxy authentication fix at module import time.
257
+
258
+ This implements the same fix as Databricks PR #354 to resolve
259
+ "407 Proxy Authentication Required" errors that occur even when
260
+ all proxy environment variables are correctly set.
261
+
262
+ Note: This fix may not work with all thrift versions due to compatibility issues
263
+ between databricks-sql-connector 2.9.6 and newer thrift versions. The fix will
264
+ gracefully fail with a warning if thrift compatibility issues are detected.
265
+ The main SQL functionality will continue to work normally without this fix.
266
+ """
267
+ _log_proxy_environment()
268
+ logger.info("Applying databricks-sql proxy authentication fix...")
269
+
270
+ try:
271
+ _ensure_thrift_imports()
272
+ import databricks.sql.auth.thrift_http_client as thrift_http
273
+
274
+ # Store original method for fallback
275
+ original_open = getattr(thrift_http.THttpClient, "open", None)
276
+ if not original_open:
277
+ logger.warning("Could not find THttpClient.open method to patch")
278
+ return False
279
+
280
+ logger.debug(f"Found THttpClient.open method at {original_open}")
281
+
282
+ # Apply the patch
283
+ patched_open = _create_patched_open_method(original_open)
284
+ thrift_http.THttpClient.open = patched_open
285
+ logger.info("Successfully applied databricks-sql proxy authentication fix")
286
+
287
+ # Verify the patch was applied
288
+ current_method = getattr(thrift_http.THttpClient, "open", None)
289
+ if current_method == patched_open:
290
+ logger.debug(
291
+ "Patch verification successful: THttpClient.open is now the patched version"
292
+ )
293
+ return True
294
+ else:
295
+ logger.warning(
296
+ "Patch verification failed: THttpClient.open was not replaced correctly"
297
+ )
298
+ return False
299
+
300
+ except ImportError as e:
301
+ logger.debug(f"Could not import databricks-sql internals for proxy patch: {e}")
302
+ return False
303
+ except AttributeError as e:
304
+ if "thrift" in str(e).lower() and "transport" in str(e).lower():
305
+ warning_msg = (
306
+ f"Databricks-sql proxy authentication patch could not be applied due to thrift version incompatibility: {e}. "
307
+ "In most environments, the SQL connection will still work without this patch."
308
+ )
309
+ logger.warning(warning_msg)
310
+ # Import here to avoid circular imports
311
+ from datahub.utilities.global_warning_util import add_global_warning
312
+
313
+ add_global_warning(warning_msg)
314
+ else:
315
+ logger.error(
316
+ f"Failed to apply databricks-sql proxy patch: {e}", exc_info=True
317
+ )
318
+ return False
319
+ except Exception as e:
320
+ logger.error(f"Failed to apply databricks-sql proxy patch: {e}", exc_info=True)
321
+ return False
@@ -337,3 +337,27 @@ class Notebook:
337
337
  "upstreams": frozenset([*notebook.upstreams, upstream]),
338
338
  }
339
339
  )
340
+
341
+
342
+ @dataclass
343
+ class Model:
344
+ id: str
345
+ name: str
346
+ schema_name: str
347
+ catalog_name: str
348
+ description: Optional[str]
349
+ created_at: Optional[datetime]
350
+ updated_at: Optional[datetime]
351
+
352
+
353
+ @dataclass
354
+ class ModelVersion:
355
+ id: str
356
+ name: str
357
+ model: Model
358
+ version: str
359
+ aliases: Optional[List[str]]
360
+ description: Optional[str]
361
+ created_at: Optional[datetime]
362
+ updated_at: Optional[datetime]
363
+ created_by: Optional[str]
@@ -31,6 +31,10 @@ class UnityCatalogReport(IngestionStageReport, SQLSourceReport):
31
31
  tables: EntityFilterReport = EntityFilterReport.field(type="table/view")
32
32
  table_profiles: EntityFilterReport = EntityFilterReport.field(type="table profile")
33
33
  notebooks: EntityFilterReport = EntityFilterReport.field(type="notebook")
34
+ ml_models: EntityFilterReport = EntityFilterReport.field(type="ml_model")
35
+ ml_model_versions: EntityFilterReport = EntityFilterReport.field(
36
+ type="ml_model_version"
37
+ )
34
38
 
35
39
  hive_metastore_catalog_found: Optional[bool] = None
36
40
 
@@ -64,6 +68,7 @@ class UnityCatalogReport(IngestionStageReport, SQLSourceReport):
64
68
  num_catalogs_missing_name: int = 0
65
69
  num_schemas_missing_name: int = 0
66
70
  num_tables_missing_name: int = 0
71
+ num_ml_models_missing_name: int = 0
67
72
  num_columns_missing_name: int = 0
68
73
  num_queries_missing_info: int = 0
69
74